From 3eec4615b43750061bbc4f38df45a3bbf765dd58 Mon Sep 17 00:00:00 2001 From: Cosmic Vagabond <121588426+cosmic-vagabond@users.noreply.github.com> Date: Mon, 17 Jun 2024 12:03:20 +0200 Subject: [PATCH] ci: use post-upgrade-snapshot-generator (#571) * ci: use post-upgrade-snapshot-generator * ci: update delete snapshot CI to use post upgrade snapshot gen binary --- .github/workflows/delete-branch-snapshot.yml | 26 +- .github/workflows/software-upgrade-test.yml | 768 ++---------------- .goreleaser.yaml | 24 - Makefile | 20 +- cmd/delete-snapshot/main.go | 76 -- cmd/upgrade-assure/README.md | 209 ----- cmd/upgrade-assure/account-unmarshal-json.go | 38 - cmd/upgrade-assure/add-genesis-account.go | 19 - cmd/upgrade-assure/add-key.go | 45 - cmd/upgrade-assure/add-peers.go | 25 - .../backup-genesis-init-file.go | 21 - cmd/upgrade-assure/check-tx-status.go | 15 - cmd/upgrade-assure/collect-gentxs.go | 19 - cmd/upgrade-assure/colors.go | 9 - cmd/upgrade-assure/contains.go | 10 - .../copy-data-from-node-to-node.go | 32 - cmd/upgrade-assure/create-validator.go | 40 - .../download-and-run-version.go | 109 --- cmd/upgrade-assure/export.go | 64 -- cmd/upgrade-assure/filter-accounts.go | 17 - .../filter-balances-by-denoms.go | 39 - cmd/upgrade-assure/filter-balances.go | 24 - cmd/upgrade-assure/gen-tx.go | 19 - .../generate-priv-validator-state.go | 32 - cmd/upgrade-assure/get-args.go | 24 - cmd/upgrade-assure/get-flags.go | 309 ------- cmd/upgrade-assure/init-node.go | 19 - cmd/upgrade-assure/is-linux.go | 8 - cmd/upgrade-assure/is-service-running.go | 30 - cmd/upgrade-assure/listen-for-signals.go | 22 - cmd/upgrade-assure/parse-tx-hash.go | 23 - .../query-and-calc-upgrade-block-height.go | 26 - cmd/upgrade-assure/query-block-height.go | 25 - cmd/upgrade-assure/query-next-proposal-id.go | 42 - cmd/upgrade-assure/query-node-id.go | 23 - cmd/upgrade-assure/query-operator-address.go | 23 - cmd/upgrade-assure/query-upgrade-applied.go | 20 - cmd/upgrade-assure/query-validator-pubkey.go | 23 - cmd/upgrade-assure/read-genesis-file.go | 23 - cmd/upgrade-assure/remove-home.go | 19 - .../restore-genesis-init-file.go | 21 - cmd/upgrade-assure/retrieve-snapshot.go | 47 -- cmd/upgrade-assure/sed.go | 22 - cmd/upgrade-assure/should-filter-account.go | 15 - cmd/upgrade-assure/start.go | 63 -- cmd/upgrade-assure/stop.go | 19 - cmd/upgrade-assure/submit-upgrade-proposal.go | 50 -- cmd/upgrade-assure/types.go | 531 ------------ cmd/upgrade-assure/unbond-validator.go | 45 - cmd/upgrade-assure/update-config.go | 32 - cmd/upgrade-assure/update-genesis.go | 138 ---- cmd/upgrade-assure/upgrade-assure.go | 301 ------- cmd/upgrade-assure/validate-genesis.go | 19 - .../vote-on-upgrade-proposal.go | 41 - cmd/upgrade-assure/wait-for-block-height.go | 30 - cmd/upgrade-assure/wait-for-next-block.go | 54 -- .../wait-for-service-to-start.go | 21 - .../wait-for-tx-confirmation.go | 27 - cmd/upgrade-assure/write-genesis-file.go | 28 - cmd/upload-snapshot/main.go | 113 --- 60 files changed, 61 insertions(+), 3915 deletions(-) delete mode 100644 cmd/delete-snapshot/main.go delete mode 100644 cmd/upgrade-assure/README.md delete mode 100644 cmd/upgrade-assure/account-unmarshal-json.go delete mode 100644 cmd/upgrade-assure/add-genesis-account.go delete mode 100644 cmd/upgrade-assure/add-key.go delete mode 100644 cmd/upgrade-assure/add-peers.go delete mode 100644 cmd/upgrade-assure/backup-genesis-init-file.go delete mode 100644 cmd/upgrade-assure/check-tx-status.go delete mode 100644 cmd/upgrade-assure/collect-gentxs.go delete mode 100644 cmd/upgrade-assure/colors.go delete mode 100644 cmd/upgrade-assure/contains.go delete mode 100644 cmd/upgrade-assure/copy-data-from-node-to-node.go delete mode 100644 cmd/upgrade-assure/create-validator.go delete mode 100644 cmd/upgrade-assure/download-and-run-version.go delete mode 100644 cmd/upgrade-assure/export.go delete mode 100644 cmd/upgrade-assure/filter-accounts.go delete mode 100644 cmd/upgrade-assure/filter-balances-by-denoms.go delete mode 100644 cmd/upgrade-assure/filter-balances.go delete mode 100644 cmd/upgrade-assure/gen-tx.go delete mode 100644 cmd/upgrade-assure/generate-priv-validator-state.go delete mode 100644 cmd/upgrade-assure/get-args.go delete mode 100644 cmd/upgrade-assure/get-flags.go delete mode 100644 cmd/upgrade-assure/init-node.go delete mode 100644 cmd/upgrade-assure/is-linux.go delete mode 100644 cmd/upgrade-assure/is-service-running.go delete mode 100644 cmd/upgrade-assure/listen-for-signals.go delete mode 100644 cmd/upgrade-assure/parse-tx-hash.go delete mode 100644 cmd/upgrade-assure/query-and-calc-upgrade-block-height.go delete mode 100644 cmd/upgrade-assure/query-block-height.go delete mode 100644 cmd/upgrade-assure/query-next-proposal-id.go delete mode 100644 cmd/upgrade-assure/query-node-id.go delete mode 100644 cmd/upgrade-assure/query-operator-address.go delete mode 100644 cmd/upgrade-assure/query-upgrade-applied.go delete mode 100644 cmd/upgrade-assure/query-validator-pubkey.go delete mode 100644 cmd/upgrade-assure/read-genesis-file.go delete mode 100644 cmd/upgrade-assure/remove-home.go delete mode 100644 cmd/upgrade-assure/restore-genesis-init-file.go delete mode 100644 cmd/upgrade-assure/retrieve-snapshot.go delete mode 100644 cmd/upgrade-assure/sed.go delete mode 100644 cmd/upgrade-assure/should-filter-account.go delete mode 100644 cmd/upgrade-assure/start.go delete mode 100644 cmd/upgrade-assure/stop.go delete mode 100644 cmd/upgrade-assure/submit-upgrade-proposal.go delete mode 100644 cmd/upgrade-assure/types.go delete mode 100644 cmd/upgrade-assure/unbond-validator.go delete mode 100644 cmd/upgrade-assure/update-config.go delete mode 100644 cmd/upgrade-assure/update-genesis.go delete mode 100644 cmd/upgrade-assure/upgrade-assure.go delete mode 100644 cmd/upgrade-assure/validate-genesis.go delete mode 100644 cmd/upgrade-assure/vote-on-upgrade-proposal.go delete mode 100644 cmd/upgrade-assure/wait-for-block-height.go delete mode 100644 cmd/upgrade-assure/wait-for-next-block.go delete mode 100644 cmd/upgrade-assure/wait-for-service-to-start.go delete mode 100644 cmd/upgrade-assure/wait-for-tx-confirmation.go delete mode 100644 cmd/upgrade-assure/write-genesis-file.go delete mode 100644 cmd/upload-snapshot/main.go diff --git a/.github/workflows/delete-branch-snapshot.yml b/.github/workflows/delete-branch-snapshot.yml index 7a5511586..ab670641d 100644 --- a/.github/workflows/delete-branch-snapshot.yml +++ b/.github/workflows/delete-branch-snapshot.yml @@ -10,26 +10,12 @@ jobs: runs-on: ubuntu-latest steps: - - name: Checkout repository - uses: actions/checkout@v4 - - - name: Set up Go - uses: actions/setup-go@v5 - with: - go-version: "1.21" - - - name: Cache delete-snapshot binary - id: cache-delete-snapshot - uses: actions/cache@v4 - with: - path: | - ./build - key: ${{ runner.os }}-delete-snapshot-${{ hashFiles('cmd/delete-snapshot/*.go') }} - - - name: Build delete-snapshot binary - if: steps.cache-delete-snapshot.outputs.cache-hit != 'true' + - name: Retrieve post upgrade snapshot generator binary run: | - make build-delete-snapshot + DOWNLOAD_URL=https://github.com/elys-network/post-upgrade-snapshot-generator/releases/download/v0.1.0/post-upgrade-snapshot-generator-v0.1.0-linux-amd64 + POST_UPGRADE_SNAPSHOT_GENERATOR_PATH=/tmp/post-upgrade-snapshot-generator-v0.1.0 + curl -L $DOWNLOAD_URL -o $POST_UPGRADE_SNAPSHOT_GENERATOR_PATH && chmod +x $POST_UPGRADE_SNAPSHOT_GENERATOR_PATH + echo "POST_UPGRADE_SNAPSHOT_GENERATOR_PATH=$POST_UPGRADE_SNAPSHOT_GENERATOR_PATH" >> $GITHUB_ENV - name: Run delete-snapshot env: @@ -40,4 +26,4 @@ jobs: run: | SANITIZED_EVENT_REF=${{ github.event.ref }} SANITIZED_EVENT_REF=$(echo "$SANITIZED_EVENT_REF" | sed 's|refs/heads/||; s|/|_|g') - ./build/delete-snapshot "$SANITIZED_EVENT_REF" + ${POST_UPGRADE_SNAPSHOT_GENERATOR_PATH} delete-snapshot "$SANITIZED_EVENT_REF" diff --git a/.github/workflows/software-upgrade-test.yml b/.github/workflows/software-upgrade-test.yml index 1854fbdee..dcef127c2 100644 --- a/.github/workflows/software-upgrade-test.yml +++ b/.github/workflows/software-upgrade-test.yml @@ -7,12 +7,9 @@ on: - main jobs: - retrieve-latest-tag: + create-new-snapshot: runs-on: elys-runner - outputs: - LATEST_TAG: ${{ steps.get-latest-tag.outputs.LATEST_TAG }} - steps: - name: Checkout repository uses: actions/checkout@v4 @@ -20,27 +17,42 @@ jobs: fetch-depth: 0 fetch-tags: true + - name: Set up Go + uses: actions/setup-go@v5 + with: + go-version: "1.21" + - name: Get latest tag - id: get-latest-tag run: | git fetch --tags LATEST_TAG=$(git describe --tags `git rev-list --tags --max-count=1`) echo "LATEST_TAG=$LATEST_TAG" >> $GITHUB_ENV - echo "LATEST_TAG=$LATEST_TAG" >> $GITHUB_OUTPUT echo "Latest tag: $LATEST_TAG" - retrieve-snapshot: - runs-on: elys-runner + - name: Retrieve latest binary + run: | + DOWNLOAD_URL=https://github.com/elys-network/elys/releases/download/${LATEST_TAG}/elysd-${LATEST_TAG}-linux-amd64 + OLD_BINARY_PATH=/tmp/elysd-${LATEST_TAG} + curl -L $DOWNLOAD_URL -o $OLD_BINARY_PATH && chmod +x $OLD_BINARY_PATH + echo "OLD_BINARY_PATH=$OLD_BINARY_PATH" >> $GITHUB_ENV - needs: retrieve-latest-tag + - name: Retrieve post upgrade snapshot generator binary + run: | + DOWNLOAD_URL=https://github.com/elys-network/post-upgrade-snapshot-generator/releases/download/v0.1.0/post-upgrade-snapshot-generator-v0.1.0-linux-amd64 + POST_UPGRADE_SNAPSHOT_GENERATOR_PATH=/tmp/post-upgrade-snapshot-generator-v0.1.0 + curl -L $DOWNLOAD_URL -o $POST_UPGRADE_SNAPSHOT_GENERATOR_PATH && chmod +x $POST_UPGRADE_SNAPSHOT_GENERATOR_PATH + echo "POST_UPGRADE_SNAPSHOT_GENERATOR_PATH=$POST_UPGRADE_SNAPSHOT_GENERATOR_PATH" >> $GITHUB_ENV - outputs: - SNAPSHOT_DOWNLOAD_URL: ${{ steps.retrieve-info-json.outputs.SNAPSHOT_DOWNLOAD_URL }} - SNAPSHOT_FILE_PATH: ${{ steps.retrieve-info-json.outputs.SNAPSHOT_FILE_PATH }} + - name: Build new binary + run: | + # create new git tag + git tag -f v999.999.999 + # build new elys binary + make build + NEW_BINARY_PATH=./build/elysd + echo "NEW_BINARY_PATH=$NEW_BINARY_PATH" >> $GITHUB_ENV - steps: - name: Retrieve info.json and set snapshot path - id: retrieve-info-json run: | DOWNLOAD_URL=https://snapshots-testnet.stake-town.com/elys/info.json curl -L $DOWNLOAD_URL -o /tmp/info.json @@ -54,12 +66,10 @@ jobs: # set snapshot download url SNAPSHOT_DOWNLOAD_URL=https://snapshots-testnet.stake-town.com/elys/elystestnet-1_latest.tar.lz4 echo "SNAPSHOT_DOWNLOAD_URL=$SNAPSHOT_DOWNLOAD_URL" >> $GITHUB_ENV - echo "SNAPSHOT_DOWNLOAD_URL=$SNAPSHOT_DOWNLOAD_URL" >> $GITHUB_OUTPUT # set snapshot file path SNAPSHOT_FILE_PATH=/tmp/snapshot.tar.lz4 echo "SNAPSHOT_FILE_PATH=$SNAPSHOT_FILE_PATH" >> $GITHUB_ENV - echo "SNAPSHOT_FILE_PATH=$SNAPSHOT_FILE_PATH" >> $GITHUB_OUTPUT - name: Cache Snapshot uses: elys-network/actions-cache-s3@eba1d2b54699fda7ee03d826049bc67dcf514887 @@ -67,7 +77,7 @@ jobs: with: path: | ${{ env.SNAPSHOT_FILE_PATH }} - key: ${{ runner.os }}-snapshot-${{ needs.retrieve-latest-tag.outputs.LATEST_TAG }} + key: ${{ runner.os }}-snapshot-${{ env.LATEST_TAG }} lookup-only: true aws-access-key-id: ${{ secrets.R2_ACCESS_KEY }} aws-secret-access-key: ${{ secrets.R2_SECRET_KEY }} @@ -82,193 +92,14 @@ jobs: curl -L $SNAPSHOT_DOWNLOAD_URL -o $SNAPSHOT_FILE_PATH if: steps.cache-snapshot.outputs.cache-hit != 'true' - retrieve-old-binary: - runs-on: elys-runner - - needs: retrieve-latest-tag - - outputs: - OLD_BINARY_PATH: ${{ steps.set-old-binary-path.outputs.OLD_BINARY_PATH }} - - steps: - - name: Set old binary path - id: set-old-binary-path - run: | - OLD_BINARY_PATH=/tmp/elysd-${{ needs.retrieve-latest-tag.outputs.LATEST_TAG }} - echo "OLD_BINARY_PATH=$OLD_BINARY_PATH" >> $GITHUB_ENV - echo "OLD_BINARY_PATH=$OLD_BINARY_PATH" >> $GITHUB_OUTPUT - - - name: Cache old binary - uses: actions/cache@v4 - id: cache-old-binary - with: - path: | - ${{ env.OLD_BINARY_PATH }} - key: ${{ runner.os }}-retrieve-old-binary-${{ needs.retrieve-latest-tag.outputs.LATEST_TAG }} - lookup-only: true - - - name: Retrieve latest binary - run: | - DOWNLOAD_URL=https://github.com/elys-network/elys/releases/download/${{ needs.retrieve-latest-tag.outputs.LATEST_TAG }}/elysd-${{ needs.retrieve-latest-tag.outputs.LATEST_TAG }}-linux-amd64 - curl -L $DOWNLOAD_URL -o $OLD_BINARY_PATH && chmod +x $OLD_BINARY_PATH - - # TODO: retrieve upgrade-assure and upload-snapshot binaries - if: steps.cache-old-binary.outputs.cache-hit != 'true' - - build-new-binary: - runs-on: elys-runner - - outputs: - NEW_BINARY_PATH: ${{ steps.set-new-binary-paths.outputs.NEW_BINARY_PATH }} - NEW_UPGRADE_ASSURE_BINARY_PATH: ${{ steps.set-new-binary-paths.outputs.NEW_UPGRADE_ASSURE_BINARY_PATH }} - UPLOAD_SNAPSHOT_BINARY_PATH: ${{ steps.set-new-binary-paths.outputs.UPLOAD_SNAPSHOT_BINARY_PATH }} - CACHE_KEY: ${{ steps.set-new-binary-paths.outputs.CACHE_KEY }} - - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - - - name: Set up Go - uses: actions/setup-go@v5 - with: - go-version: "1.21" - - - name: Set new binary paths - id: set-new-binary-paths - run: | - NEW_BINARY_PATH=./build/elysd - echo "NEW_BINARY_PATH=$NEW_BINARY_PATH" >> $GITHUB_ENV - echo "NEW_BINARY_PATH=$NEW_BINARY_PATH" >> $GITHUB_OUTPUT - - NEW_UPGRADE_ASSURE_BINARY_PATH=./build/new-upgrade-assure - echo "NEW_UPGRADE_ASSURE_BINARY_PATH=$NEW_UPGRADE_ASSURE_BINARY_PATH" >> $GITHUB_ENV - echo "NEW_UPGRADE_ASSURE_BINARY_PATH=$NEW_UPGRADE_ASSURE_BINARY_PATH" >> $GITHUB_OUTPUT - - UPLOAD_SNAPSHOT_BINARY_PATH=./build/upload-snapshot - echo "UPLOAD_SNAPSHOT_BINARY_PATH=$UPLOAD_SNAPSHOT_BINARY_PATH" >> $GITHUB_ENV - echo "UPLOAD_SNAPSHOT_BINARY_PATH=$UPLOAD_SNAPSHOT_BINARY_PATH" >> $GITHUB_OUTPUT - - CACHE_KEY=${{ runner.os }}-build-new-binary-${{ hashFiles('**/go.mod', '**/*.go') }} - echo "CACHE_KEY=$CACHE_KEY" >> $GITHUB_ENV - echo "CACHE_KEY=$CACHE_KEY" >> $GITHUB_OUTPUT - - - name: Cache new binary - uses: actions/cache@v4 - id: cache-new-binary - with: - path: | - ${{ env.NEW_BINARY_PATH }} - ${{ env.NEW_UPGRADE_ASSURE_BINARY_PATH}} - ${{ env.UPLOAD_SNAPSHOT_BINARY_PATH }} - key: ${{ env.CACHE_KEY }} - lookup-only: true - - - name: Create git tag - run: git tag v999.999.999 - if: steps.cache-new-binary.outputs.cache-hit != 'true' - - - name: Build new binaries - id: build-new-binaries - run: | - # build new elys binary - make build - - # build new upgrade assure binary - make build-upgrade-assure - mv ./build/upgrade-assure $NEW_UPGRADE_ASSURE_BINARY_PATH - - # build upload snapshot binary - make build-upload-snapshot - if: steps.cache-new-binary.outputs.cache-hit != 'true' - - build-old-binary: - runs-on: elys-runner - - needs: [retrieve-latest-tag, build-new-binary] - - outputs: - OLD_UPGRADE_ASSURE_BINARY_PATH: ${{ steps.set-old-binary-path.outputs.OLD_UPGRADE_ASSURE_BINARY_PATH }} - - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - - - name: Set up Go - uses: actions/setup-go@v5 - with: - go-version: "1.21" - - - name: Set old binary path - id: set-old-binary-path - run: | - OLD_UPGRADE_ASSURE_BINARY_PATH=./build/old-upgrade-assure - echo "OLD_UPGRADE_ASSURE_BINARY_PATH=$OLD_UPGRADE_ASSURE_BINARY_PATH" >> $GITHUB_ENV - echo "OLD_UPGRADE_ASSURE_BINARY_PATH=$OLD_UPGRADE_ASSURE_BINARY_PATH" >> $GITHUB_OUTPUT - - - name: Cache old binaries - uses: actions/cache@v4 - id: cache-old-binaries - with: - path: | - ${{ env.OLD_UPGRADE_ASSURE_BINARY_PATH }} - key: ${{ runner.os }}-build-old-binary-${{ needs.retrieve-latest-tag.outputs.LATEST_TAG }} - lookup-only: true - - - name: Check out latest tag - run: git checkout ${{ needs.retrieve-latest-tag.outputs.LATEST_TAG }} - if: steps.cache-old-binaries.outputs.cache-hit != 'true' - - - name: Copy old upgrade assure types.go file - run: | - cp -a ./cmd/upgrade-assure/types.go ./cmd/upgrade-assure/types.go.old - if: steps.cache-old-binaries.outputs.cache-hit != 'true' - - - name: Check out previous branch - run: git checkout - - if: steps.cache-old-binaries.outputs.cache-hit != 'true' - - # TODO: disable for now - # - name: Apply old upgrade assure types.go file - # run: | - # cp -a ./cmd/upgrade-assure/types.go.old ./cmd/upgrade-assure/types.go - # if: steps.cache-old-binaries.outputs.cache-hit != 'true' - - - name: Build old binaries - id: build-old-binaries - run: | - # build old upgrade assure binary - make build-upgrade-assure - mv ./build/upgrade-assure $OLD_UPGRADE_ASSURE_BINARY_PATH - if: steps.cache-old-binaries.outputs.cache-hit != 'true' - - chain-snapshot-and-export: - runs-on: elys-runner - - needs: - [ - retrieve-latest-tag, - retrieve-snapshot, - retrieve-old-binary, - build-new-binary, - build-old-binary, - ] - - steps: - - name: Cache Directories + - name: Check submit new proposal from cache exists uses: elys-network/actions-cache-s3@eba1d2b54699fda7ee03d826049bc67dcf514887 - id: cache-chain-snapshot-and-export + id: cache-submit-new-proposal with: path: | /home/runner/.elys /home/runner/.elys2 - /tmp/genesis.json - key: ${{ runner.os }}-chain-snapshot-and-export-${{ needs.retrieve-latest-tag.outputs.LATEST_TAG }} + key: ${{ runner.os }}-submit-new-proposal-${{ env.LATEST_TAG }} lookup-only: true aws-access-key-id: ${{ secrets.R2_ACCESS_KEY }} aws-secret-access-key: ${{ secrets.R2_SECRET_KEY }} @@ -278,473 +109,54 @@ jobs: aws-s3-bucket-endpoint: false aws-s3-force-path-style: true - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - if: steps.cache-chain-snapshot-and-export.outputs.cache-hit != 'true' - - - name: Set up Go - uses: actions/setup-go@v5 - with: - go-version: "1.21" - if: steps.cache-chain-snapshot-and-export.outputs.cache-hit != 'true' - - - name: Restore snapshot from cache - uses: elys-network/actions-cache-s3@eba1d2b54699fda7ee03d826049bc67dcf514887 - with: - path: | - ${{ needs.retrieve-snapshot.outputs.SNAPSHOT_FILE_PATH }} - key: ${{ runner.os }}-snapshot-${{ needs.retrieve-latest-tag.outputs.LATEST_TAG }} - aws-access-key-id: ${{ secrets.R2_ACCESS_KEY }} - aws-secret-access-key: ${{ secrets.R2_SECRET_KEY }} - aws-endpoint: ${{ secrets.R2_ENDPOINT }} - aws-s3-bucket: ${{ secrets.R2_BUCKET_NAME }} - aws-region: auto - aws-s3-bucket-endpoint: false - aws-s3-force-path-style: true - if: steps.cache-chain-snapshot-and-export.outputs.cache-hit != 'true' - - - name: Restore new binary from cache - uses: actions/cache@v4 - with: - path: | - ${{ needs.build-new-binary.outputs.NEW_BINARY_PATH }} - ${{ needs.build-new-binary.outputs.NEW_UPGRADE_ASSURE_BINARY_PATH}} - ${{ needs.build-new-binary.outputs.UPLOAD_SNAPSHOT_BINARY_PATH }} - key: ${{ needs.build-new-binary.outputs.CACHE_KEY }} - if: steps.cache-chain-snapshot-and-export.outputs.cache-hit != 'true' - - - name: Restore old binaries from cache - uses: actions/cache@v4 - with: - path: | - ${{ needs.build-old-binary.outputs.OLD_UPGRADE_ASSURE_BINARY_PATH }} - key: ${{ runner.os }}-build-old-binary-${{ needs.retrieve-latest-tag.outputs.LATEST_TAG }} - if: steps.cache-chain-snapshot-and-export.outputs.cache-hit != 'true' - - - name: Restore old binary from cache - uses: actions/cache@v4 - with: - path: | - ${{ needs.retrieve-old-binary.outputs.OLD_BINARY_PATH }} - key: ${{ runner.os }}-retrieve-old-binary-${{ needs.retrieve-latest-tag.outputs.LATEST_TAG }} - if: steps.cache-chain-snapshot-and-export.outputs.cache-hit != 'true' - - name: Chain snapshot and export run: | - GOMEMLIMIT=8GiB \ - ${{ needs.build-old-binary.outputs.OLD_UPGRADE_ASSURE_BINARY_PATH }} \ - ${{ needs.retrieve-snapshot.outputs.SNAPSHOT_FILE_PATH }} \ - ${{ needs.retrieve-old-binary.outputs.OLD_BINARY_PATH }} \ - ${{ needs.build-new-binary.outputs.NEW_BINARY_PATH }} \ - --skip-chain-init \ - --skip-node-start \ + ${POST_UPGRADE_SNAPSHOT_GENERATOR_PATH} chain-snapshot-export \ + ${SNAPSHOT_FILE_PATH} \ + ${OLD_BINARY_PATH} \ --timeout-next-block 100000 \ --timeout-wait-for-node 100000 - if: steps.cache-chain-snapshot-and-export.outputs.cache-hit != 'true' - - chain-init: - runs-on: elys-runner - - needs: - [ - retrieve-latest-tag, - retrieve-snapshot, - retrieve-old-binary, - build-new-binary, - build-old-binary, - chain-snapshot-and-export, - ] - - steps: - - name: Cache Directories - uses: elys-network/actions-cache-s3@eba1d2b54699fda7ee03d826049bc67dcf514887 - id: cache-chain-init - with: - path: | - /home/runner/.elys - /home/runner/.elys2 - key: ${{ runner.os }}-chain-init-${{ needs.retrieve-latest-tag.outputs.LATEST_TAG }} - lookup-only: true - aws-access-key-id: ${{ secrets.R2_ACCESS_KEY }} - aws-secret-access-key: ${{ secrets.R2_SECRET_KEY }} - aws-endpoint: ${{ secrets.R2_ENDPOINT }} - aws-s3-bucket: ${{ secrets.R2_BUCKET_NAME }} - aws-region: auto - aws-s3-bucket-endpoint: false - aws-s3-force-path-style: true - - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - if: steps.cache-chain-init.outputs.cache-hit != 'true' - - - name: Set up Go - uses: actions/setup-go@v5 - with: - go-version: "1.21" - if: steps.cache-chain-init.outputs.cache-hit != 'true' - - - name: Restore chain snapshot and export from cache - uses: elys-network/actions-cache-s3@eba1d2b54699fda7ee03d826049bc67dcf514887 - with: - path: | - /home/runner/.elys - /home/runner/.elys2 - /tmp/genesis.json - key: ${{ runner.os }}-chain-snapshot-and-export-${{ needs.retrieve-latest-tag.outputs.LATEST_TAG }} - aws-access-key-id: ${{ secrets.R2_ACCESS_KEY }} - aws-secret-access-key: ${{ secrets.R2_SECRET_KEY }} - aws-endpoint: ${{ secrets.R2_ENDPOINT }} - aws-s3-bucket: ${{ secrets.R2_BUCKET_NAME }} - aws-region: auto - aws-s3-bucket-endpoint: false - aws-s3-force-path-style: true - if: steps.cache-chain-init.outputs.cache-hit != 'true' - - - name: Restore new binary from cache - uses: actions/cache@v4 - with: - path: | - ${{ needs.build-new-binary.outputs.NEW_BINARY_PATH }} - ${{ needs.build-new-binary.outputs.NEW_UPGRADE_ASSURE_BINARY_PATH}} - ${{ needs.build-new-binary.outputs.UPLOAD_SNAPSHOT_BINARY_PATH }} - key: ${{ needs.build-new-binary.outputs.CACHE_KEY }} - if: steps.cache-chain-init.outputs.cache-hit != 'true' - - - name: Restore old binaries from cache - uses: actions/cache@v4 - with: - path: | - ${{ needs.build-old-binary.outputs.OLD_UPGRADE_ASSURE_BINARY_PATH }} - key: ${{ runner.os }}-build-old-binary-${{ needs.retrieve-latest-tag.outputs.LATEST_TAG }} - if: steps.cache-chain-init.outputs.cache-hit != 'true' - - - name: Restore old binary from cache - uses: actions/cache@v4 - with: - path: | - ${{ needs.retrieve-old-binary.outputs.OLD_BINARY_PATH }} - key: ${{ runner.os }}-retrieve-old-binary-${{ needs.retrieve-latest-tag.outputs.LATEST_TAG }} - if: steps.cache-chain-init.outputs.cache-hit != 'true' + if: steps.cache-submit-new-proposal.outputs.cache-hit != 'true' - name: Chain initialization run: | - GOMEMLIMIT=8GiB \ - ${{ needs.build-new-binary.outputs.NEW_UPGRADE_ASSURE_BINARY_PATH }} \ - ${{ needs.retrieve-snapshot.outputs.SNAPSHOT_FILE_PATH }} \ - ${{ needs.retrieve-old-binary.outputs.OLD_BINARY_PATH }} \ - ${{ needs.build-new-binary.outputs.NEW_BINARY_PATH }} \ - --skip-snapshot \ - --skip-node-start \ + ${POST_UPGRADE_SNAPSHOT_GENERATOR_PATH} chain-init \ + ${OLD_BINARY_PATH} \ --timeout-next-block 100000 \ --timeout-wait-for-node 100000 - if: steps.cache-chain-init.outputs.cache-hit != 'true' - - create-second-validator: - runs-on: elys-runner - - needs: - [ - retrieve-latest-tag, - retrieve-snapshot, - retrieve-old-binary, - build-new-binary, - chain-init, - ] - - steps: - - name: Cache Directories - uses: elys-network/actions-cache-s3@eba1d2b54699fda7ee03d826049bc67dcf514887 - id: cache-create-second-validator - with: - path: | - /home/runner/.elys - /home/runner/.elys2 - key: ${{ runner.os }}-create-second-validator-${{ needs.retrieve-latest-tag.outputs.LATEST_TAG }} - lookup-only: true - aws-access-key-id: ${{ secrets.R2_ACCESS_KEY }} - aws-secret-access-key: ${{ secrets.R2_SECRET_KEY }} - aws-endpoint: ${{ secrets.R2_ENDPOINT }} - aws-s3-bucket: ${{ secrets.R2_BUCKET_NAME }} - aws-region: auto - aws-s3-bucket-endpoint: false - aws-s3-force-path-style: true - - - name: Restore chain snapshot and export from cache - uses: elys-network/actions-cache-s3@eba1d2b54699fda7ee03d826049bc67dcf514887 - with: - path: | - /home/runner/.elys - /home/runner/.elys2 - key: ${{ runner.os }}-chain-init-${{ needs.retrieve-latest-tag.outputs.LATEST_TAG }} - aws-access-key-id: ${{ secrets.R2_ACCESS_KEY }} - aws-secret-access-key: ${{ secrets.R2_SECRET_KEY }} - aws-endpoint: ${{ secrets.R2_ENDPOINT }} - aws-s3-bucket: ${{ secrets.R2_BUCKET_NAME }} - aws-region: auto - aws-s3-bucket-endpoint: false - aws-s3-force-path-style: true - if: steps.cache-create-second-validator.outputs.cache-hit != 'true' - - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - if: steps.cache-create-second-validator.outputs.cache-hit != 'true' - - - name: Set up Go - uses: actions/setup-go@v5 - with: - go-version: "1.21" - if: steps.cache-create-second-validator.outputs.cache-hit != 'true' - - - name: Restore new binary from cache - uses: actions/cache@v4 - with: - path: | - ${{ needs.build-new-binary.outputs.NEW_BINARY_PATH }} - ${{ needs.build-new-binary.outputs.NEW_UPGRADE_ASSURE_BINARY_PATH}} - ${{ needs.build-new-binary.outputs.UPLOAD_SNAPSHOT_BINARY_PATH }} - key: ${{ needs.build-new-binary.outputs.CACHE_KEY }} - if: steps.cache-create-second-validator.outputs.cache-hit != 'true' - - - name: Restore old binary from cache - uses: actions/cache@v4 - with: - path: | - ${{ needs.retrieve-old-binary.outputs.OLD_BINARY_PATH }} - key: ${{ runner.os }}-retrieve-old-binary-${{ needs.retrieve-latest-tag.outputs.LATEST_TAG }} - if: steps.cache-create-second-validator.outputs.cache-hit != 'true' + if: steps.cache-submit-new-proposal.outputs.cache-hit != 'true' - name: Create second validator run: | - GOMEMLIMIT=8GiB \ - ${{ needs.build-new-binary.outputs.NEW_UPGRADE_ASSURE_BINARY_PATH }} \ - ${{ needs.retrieve-snapshot.outputs.SNAPSHOT_FILE_PATH }} \ - ${{ needs.retrieve-old-binary.outputs.OLD_BINARY_PATH }} \ - ${{ needs.build-new-binary.outputs.NEW_BINARY_PATH }} \ - --skip-snapshot \ - --skip-chain-init \ - --skip-prepare-validator-data \ - --skip-submit-proposal \ - --skip-upgrade-to-new-binary \ + ${POST_UPGRADE_SNAPSHOT_GENERATOR_PATH} create-second-validator \ + ${OLD_BINARY_PATH} \ --timeout-next-block 100000 \ --timeout-wait-for-node 100000 - if: steps.cache-create-second-validator.outputs.cache-hit != 'true' - - prepare-validator-data: - runs-on: elys-runner - - needs: - [ - retrieve-latest-tag, - retrieve-snapshot, - retrieve-old-binary, - build-new-binary, - create-second-validator, - ] - - steps: - - name: Cache Directories - uses: elys-network/actions-cache-s3@eba1d2b54699fda7ee03d826049bc67dcf514887 - id: cache-prepare-validator-data - with: - path: | - /home/runner/.elys - /home/runner/.elys2 - key: ${{ runner.os }}-prepare-validator-data-${{ needs.retrieve-latest-tag.outputs.LATEST_TAG }} - lookup-only: true - aws-access-key-id: ${{ secrets.R2_ACCESS_KEY }} - aws-secret-access-key: ${{ secrets.R2_SECRET_KEY }} - aws-endpoint: ${{ secrets.R2_ENDPOINT }} - aws-s3-bucket: ${{ secrets.R2_BUCKET_NAME }} - aws-region: auto - aws-s3-bucket-endpoint: false - aws-s3-force-path-style: true - - - name: Restore create second validator from cache - uses: elys-network/actions-cache-s3@eba1d2b54699fda7ee03d826049bc67dcf514887 - with: - path: | - /home/runner/.elys - /home/runner/.elys2 - key: ${{ runner.os }}-create-second-validator-${{ needs.retrieve-latest-tag.outputs.LATEST_TAG }} - aws-access-key-id: ${{ secrets.R2_ACCESS_KEY }} - aws-secret-access-key: ${{ secrets.R2_SECRET_KEY }} - aws-endpoint: ${{ secrets.R2_ENDPOINT }} - aws-s3-bucket: ${{ secrets.R2_BUCKET_NAME }} - aws-region: auto - aws-s3-bucket-endpoint: false - aws-s3-force-path-style: true - if: steps.cache-prepare-validator-data.outputs.cache-hit != 'true' - - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - if: steps.cache-prepare-validator-data.outputs.cache-hit != 'true' - - - name: Set up Go - uses: actions/setup-go@v5 - with: - go-version: "1.21" - if: steps.cache-prepare-validator-data.outputs.cache-hit != 'true' - - - name: Restore new binary from cache - uses: actions/cache@v4 - with: - path: | - ${{ needs.build-new-binary.outputs.NEW_BINARY_PATH }} - ${{ needs.build-new-binary.outputs.NEW_UPGRADE_ASSURE_BINARY_PATH }} - ${{ needs.build-new-binary.outputs.UPLOAD_SNAPSHOT_BINARY_PATH }} - key: ${{ needs.build-new-binary.outputs.CACHE_KEY }} - if: steps.cache-prepare-validator-data.outputs.cache-hit != 'true' - - - name: Restore old binary from cache - uses: actions/cache@v4 - with: - path: | - ${{ needs.retrieve-old-binary.outputs.OLD_BINARY_PATH }} - key: ${{ runner.os }}-retrieve-old-binary-${{ needs.retrieve-latest-tag.outputs.LATEST_TAG }} - if: steps.cache-prepare-validator-data.outputs.cache-hit != 'true' + if: steps.cache-submit-new-proposal.outputs.cache-hit != 'true' - name: Prepare validator data run: | - GOMEMLIMIT=8GiB \ - ${{ needs.build-new-binary.outputs.NEW_UPGRADE_ASSURE_BINARY_PATH }} \ - ${{ needs.retrieve-snapshot.outputs.SNAPSHOT_FILE_PATH }} \ - ${{ needs.retrieve-old-binary.outputs.OLD_BINARY_PATH }} \ - ${{ needs.build-new-binary.outputs.NEW_BINARY_PATH }} \ - --skip-snapshot \ - --skip-chain-init \ - --skip-create-validator \ - --skip-submit-proposal \ - --skip-upgrade-to-new-binary \ + ${POST_UPGRADE_SNAPSHOT_GENERATOR_PATH} prepare-validator-data \ --timeout-next-block 100000 \ --timeout-wait-for-node 100000 - if: steps.cache-prepare-validator-data.outputs.cache-hit != 'true' - - submit-new-proposal: - runs-on: elys-runner - - needs: - [ - retrieve-latest-tag, - retrieve-snapshot, - retrieve-old-binary, - build-new-binary, - prepare-validator-data, - ] - - steps: - - name: Cache Directories - uses: elys-network/actions-cache-s3@eba1d2b54699fda7ee03d826049bc67dcf514887 - id: cache-submit-new-proposal - with: - path: | - /home/runner/.elys - /home/runner/.elys2 - key: ${{ runner.os }}-submit-new-proposal-${{ needs.retrieve-latest-tag.outputs.LATEST_TAG }} - lookup-only: true - aws-access-key-id: ${{ secrets.R2_ACCESS_KEY }} - aws-secret-access-key: ${{ secrets.R2_SECRET_KEY }} - aws-endpoint: ${{ secrets.R2_ENDPOINT }} - aws-s3-bucket: ${{ secrets.R2_BUCKET_NAME }} - aws-region: auto - aws-s3-bucket-endpoint: false - aws-s3-force-path-style: true - - - name: Restore prepare validator data from cache - uses: elys-network/actions-cache-s3@eba1d2b54699fda7ee03d826049bc67dcf514887 - with: - path: | - /home/runner/.elys - /home/runner/.elys2 - key: ${{ runner.os }}-prepare-validator-data-${{ needs.retrieve-latest-tag.outputs.LATEST_TAG }} - aws-access-key-id: ${{ secrets.R2_ACCESS_KEY }} - aws-secret-access-key: ${{ secrets.R2_SECRET_KEY }} - aws-endpoint: ${{ secrets.R2_ENDPOINT }} - aws-s3-bucket: ${{ secrets.R2_BUCKET_NAME }} - aws-region: auto - aws-s3-bucket-endpoint: false - aws-s3-force-path-style: true - if: steps.cache-submit-new-proposal.outputs.cache-hit != 'true' - - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - if: steps.cache-submit-new-proposal.outputs.cache-hit != 'true' - - - name: Set up Go - uses: actions/setup-go@v5 - with: - go-version: "1.21" - if: steps.cache-submit-new-proposal.outputs.cache-hit != 'true' - - - name: Restore new binary from cache - uses: actions/cache@v4 - with: - path: | - ${{ needs.build-new-binary.outputs.NEW_BINARY_PATH }} - ${{ needs.build-new-binary.outputs.NEW_UPGRADE_ASSURE_BINARY_PATH}} - ${{ needs.build-new-binary.outputs.UPLOAD_SNAPSHOT_BINARY_PATH }} - key: ${{ needs.build-new-binary.outputs.CACHE_KEY }} - if: steps.cache-submit-new-proposal.outputs.cache-hit != 'true' - - - name: Restore old binary from cache - uses: actions/cache@v4 - with: - path: | - ${{ needs.retrieve-old-binary.outputs.OLD_BINARY_PATH }} - key: ${{ runner.os }}-retrieve-old-binary-${{ needs.retrieve-latest-tag.outputs.LATEST_TAG }} if: steps.cache-submit-new-proposal.outputs.cache-hit != 'true' - name: Submit new proposal run: | - GOMEMLIMIT=8GiB \ - ${{ needs.build-new-binary.outputs.NEW_UPGRADE_ASSURE_BINARY_PATH }} \ - ${{ needs.retrieve-snapshot.outputs.SNAPSHOT_FILE_PATH }} \ - ${{ needs.retrieve-old-binary.outputs.OLD_BINARY_PATH }} \ - ${{ needs.build-new-binary.outputs.NEW_BINARY_PATH }} \ - --skip-snapshot \ - --skip-chain-init \ - --skip-create-validator \ - --skip-prepare-validator-data \ - --skip-upgrade-to-new-binary \ + ${POST_UPGRADE_SNAPSHOT_GENERATOR_PATH} submit-new-proposal \ + ${OLD_BINARY_PATH} \ + ${NEW_BINARY_PATH} \ --timeout-next-block 100000 \ --timeout-wait-for-node 100000 if: steps.cache-submit-new-proposal.outputs.cache-hit != 'true' - upgrade-to-new-binary: - runs-on: elys-runner - - needs: - [ - retrieve-latest-tag, - retrieve-snapshot, - build-new-binary, - submit-new-proposal, - ] - - steps: - - name: Cache Directories - uses: elys-network/actions-cache-s3@eba1d2b54699fda7ee03d826049bc67dcf514887 - id: cache-upgrade-to-new-binary + - name: Save submit new proposal to cache + uses: elys-network/actions-cache-s3/save@eba1d2b54699fda7ee03d826049bc67dcf514887 with: path: | /home/runner/.elys /home/runner/.elys2 - key: ${{ needs.build-new-binary.outputs.CACHE_KEY }}-upgrade-to-new-binary - lookup-only: true + key: ${{ runner.os }}-submit-new-proposal-${{ env.LATEST_TAG }} aws-access-key-id: ${{ secrets.R2_ACCESS_KEY }} aws-secret-access-key: ${{ secrets.R2_SECRET_KEY }} aws-endpoint: ${{ secrets.R2_ENDPOINT }} @@ -752,14 +164,15 @@ jobs: aws-region: auto aws-s3-bucket-endpoint: false aws-s3-force-path-style: true + if: steps.cache-submit-new-proposal.outputs.cache-hit != 'true' - name: Restore submit new proposal from cache - uses: elys-network/actions-cache-s3@eba1d2b54699fda7ee03d826049bc67dcf514887 + uses: elys-network/actions-cache-s3/restore@eba1d2b54699fda7ee03d826049bc67dcf514887 with: path: | /home/runner/.elys /home/runner/.elys2 - key: ${{ runner.os }}-submit-new-proposal-${{ needs.retrieve-latest-tag.outputs.LATEST_TAG }} + key: ${{ runner.os }}-submit-new-proposal-${{ env.LATEST_TAG }} aws-access-key-id: ${{ secrets.R2_ACCESS_KEY }} aws-secret-access-key: ${{ secrets.R2_SECRET_KEY }} aws-endpoint: ${{ secrets.R2_ENDPOINT }} @@ -767,87 +180,14 @@ jobs: aws-region: auto aws-s3-bucket-endpoint: false aws-s3-force-path-style: true - if: steps.cache-upgrade-to-new-binary.outputs.cache-hit != 'true' - - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - if: steps.cache-upgrade-to-new-binary.outputs.cache-hit != 'true' - - - name: Set up Go - uses: actions/setup-go@v5 - with: - go-version: "1.21" - if: steps.cache-upgrade-to-new-binary.outputs.cache-hit != 'true' - - - name: Restore new binary from cache - uses: actions/cache@v4 - with: - path: | - ${{ needs.build-new-binary.outputs.NEW_BINARY_PATH }} - ${{ needs.build-new-binary.outputs.NEW_UPGRADE_ASSURE_BINARY_PATH}} - ${{ needs.build-new-binary.outputs.UPLOAD_SNAPSHOT_BINARY_PATH }} - key: ${{ needs.build-new-binary.outputs.CACHE_KEY }} - if: steps.cache-upgrade-to-new-binary.outputs.cache-hit != 'true' + if: steps.cache-submit-new-proposal.outputs.cache-hit == 'true' - name: Upgrade to new binary run: | - GOMEMLIMIT=8GiB \ - ${{ needs.build-new-binary.outputs.NEW_UPGRADE_ASSURE_BINARY_PATH }} \ - ${{ needs.retrieve-snapshot.outputs.SNAPSHOT_FILE_PATH }} \ - ${{ needs.build-new-binary.outputs.NEW_BINARY_PATH }} \ - ${{ needs.build-new-binary.outputs.NEW_BINARY_PATH }} \ - --skip-snapshot \ - --skip-chain-init \ - --skip-create-validator \ - --skip-prepare-validator-data \ - --skip-submit-proposal \ + ${POST_UPGRADE_SNAPSHOT_GENERATOR_PATH} upgrade-to-new-binary \ + ${NEW_BINARY_PATH} \ --timeout-next-block 100000 \ --timeout-wait-for-node 100000 - if: steps.cache-upgrade-to-new-binary.outputs.cache-hit != 'true' - - create-new-snapshot-file: - runs-on: elys-runner - - needs: [retrieve-latest-tag, build-new-binary, upgrade-to-new-binary] - - steps: - - name: Restore upgrade to new binary from cache - uses: elys-network/actions-cache-s3@eba1d2b54699fda7ee03d826049bc67dcf514887 - with: - path: | - /home/runner/.elys - /home/runner/.elys2 - key: ${{ needs.build-new-binary.outputs.CACHE_KEY }}-upgrade-to-new-binary - aws-access-key-id: ${{ secrets.R2_ACCESS_KEY }} - aws-secret-access-key: ${{ secrets.R2_SECRET_KEY }} - aws-endpoint: ${{ secrets.R2_ENDPOINT }} - aws-s3-bucket: ${{ secrets.R2_BUCKET_NAME }} - aws-region: auto - aws-s3-bucket-endpoint: false - aws-s3-force-path-style: true - - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - - - name: Set up Go - uses: actions/setup-go@v5 - with: - go-version: "1.21" - - - name: Restore new binary from cache - uses: actions/cache@v4 - with: - path: | - ${{ needs.build-new-binary.outputs.NEW_BINARY_PATH }} - ${{ needs.build-new-binary.outputs.NEW_UPGRADE_ASSURE_BINARY_PATH}} - ${{ needs.build-new-binary.outputs.UPLOAD_SNAPSHOT_BINARY_PATH }} - key: ${{ needs.build-new-binary.outputs.CACHE_KEY }} - name: Create new snapshot file run: | @@ -867,7 +207,7 @@ jobs: R2_ENDPOINT: ${{ secrets.R2_ENDPOINT }} R2_BUCKET_NAME: ${{ secrets.R2_BUCKET_NAME }} run: | - ${{ needs.build-new-binary.outputs.UPLOAD_SNAPSHOT_BINARY_PATH }} $NEW_SNAPSHOT_PATH + ${POST_UPGRADE_SNAPSHOT_GENERATOR_PATH} upload-snapshot $NEW_SNAPSHOT_PATH - name: Info about the snapshot run: | diff --git a/.goreleaser.yaml b/.goreleaser.yaml index cca3e9a94..60b06e114 100644 --- a/.goreleaser.yaml +++ b/.goreleaser.yaml @@ -120,30 +120,6 @@ builds: - muslc - osusergo - pebbledb - - id: upgrade-assure-linux-amd64 - main: ./cmd/upgrade-assure - binary: upgrade-assure - goos: - - linux - goarch: - - amd64 - env: - - CC=x86_64-linux-gnu-gcc - flags: - - -mod=readonly - - -trimpath - - id: upload-snapshot-linux-amd64 - main: ./cmd/upload-snapshot - binary: upload-snapshot - goos: - - linux - goarch: - - amd64 - env: - - CC=x86_64-linux-gnu-gcc - flags: - - -mod=readonly - - -trimpath universal_binaries: - id: elysd-darwin-universal ids: diff --git a/Makefile b/Makefile index be00fba8a..0e3a4d1d8 100644 --- a/Makefile +++ b/Makefile @@ -64,24 +64,6 @@ build: check-version go.sum @-mkdir -p $(BUILD_FOLDER) 2> /dev/null @GOFLAGS=$(GOFLAGS) go build $(BUILD_FLAGS) -o $(BUILD_FOLDER) ./cmd/$(BINARY) -## build-upgrade-assure: Build the binary for upgrade assure -build-upgrade-assure: check-version go.sum - @echo Building Upgrade assure binary... - @-mkdir -p $(BUILD_FOLDER) 2> /dev/null - @GOFLAGS=$(GOFLAGS) go build -o $(BUILD_FOLDER) ./cmd/upgrade-assure - -## build-upload-snapshot: Build the binary for upload snapshot -build-upload-snapshot: check-version go.sum - @echo Building Upload snapshot binary... - @-mkdir -p $(BUILD_FOLDER) 2> /dev/null - @GOFLAGS=$(GOFLAGS) go build -o $(BUILD_FOLDER) ./cmd/upload-snapshot - -## build-delete-snapshot: Build the binary for delete snapshot -build-delete-snapshot: check-version go.sum - @echo Building Delete snapshot binary... - @-mkdir -p $(BUILD_FOLDER) 2> /dev/null - @GOFLAGS=$(GOFLAGS) go build -o $(BUILD_FOLDER) ./cmd/delete-snapshot - ## build-all: Build binaries for all platforms build-all: @echo Building Elysd binaries for all platforms... @@ -97,7 +79,7 @@ do-checksum: ## build-with-checksum: Build binaries for all platforms and generate checksums build-with-checksum: build-all do-checksum -.PHONY: install build build-all do-checksum build-with-checksum build-upgrade-assure build-upload-snapshot build-delete-snapshot +.PHONY: install build build-all do-checksum build-with-checksum ## mocks: Generate mocks mocks: diff --git a/cmd/delete-snapshot/main.go b/cmd/delete-snapshot/main.go deleted file mode 100644 index 30459d68c..000000000 --- a/cmd/delete-snapshot/main.go +++ /dev/null @@ -1,76 +0,0 @@ -package main - -import ( - "context" - "fmt" - "os" - "strings" - - "github.com/aws/aws-sdk-go-v2/aws" - "github.com/aws/aws-sdk-go-v2/config" - "github.com/aws/aws-sdk-go-v2/credentials" - "github.com/aws/aws-sdk-go-v2/service/s3" -) - -func main() { - if len(os.Args) != 2 { - fmt.Println("Usage: ") - os.Exit(1) - } - - // Fetch credentials and configuration from environment variables - accessKey := os.Getenv("R2_ACCESS_KEY") - secretKey := os.Getenv("R2_SECRET_KEY") - s3URL := os.Getenv("R2_ENDPOINT") - bucketName := os.Getenv("R2_BUCKET_NAME") - branchName := os.Args[1] - - // Ensure all required environment variables are set - if accessKey == "" || secretKey == "" || s3URL == "" || bucketName == "" { - fmt.Println("Please set R2_ACCESS_KEY, R2_SECRET_KEY, R2_ENDPOINT, and R2_BUCKET_NAME environment variables") - os.Exit(1) - } - - // Load AWS configuration with credentials - cfg, err := config.LoadDefaultConfig( - context.TODO(), - config.WithCredentialsProvider( - credentials.NewStaticCredentialsProvider(accessKey, secretKey, ""), - ), - config.WithRegion("auto"), // Ensure this region is appropriate or set it via environment variable if needed - config.WithEndpointResolverWithOptions( - aws.EndpointResolverWithOptionsFunc( - func(service, region string, options ...interface{}) (aws.Endpoint, error) { - return aws.Endpoint{ - URL: s3URL, - }, nil - }, - ), - ), - ) - if err != nil { - fmt.Fprintf(os.Stderr, "failed to load configuration, %v", err) - os.Exit(1) - } - - // Create an S3 client - client := s3.NewFromConfig(cfg) - - // Replace '/' with '_' in the branch name - safeBranchName := strings.ReplaceAll(branchName, "/", "_") - - // Construct the key for the snapshot file - key := fmt.Sprintf("elys-snapshot-%s.tar.lz4", safeBranchName) - - // Delete the file - _, err = client.DeleteObject(context.TODO(), &s3.DeleteObjectInput{ - Bucket: &bucketName, - Key: &key, - }) - if err != nil { - fmt.Fprintf(os.Stderr, "failed to delete file %q from bucket %q, %v", key, bucketName, err) - os.Exit(1) - } - - fmt.Printf("Successfully deleted %q from %q\n", key, bucketName) -} diff --git a/cmd/upgrade-assure/README.md b/cmd/upgrade-assure/README.md deleted file mode 100644 index 4e4ee76b1..000000000 --- a/cmd/upgrade-assure/README.md +++ /dev/null @@ -1,209 +0,0 @@ -# Localnet Upgrade and Data Migration Guide for ELYS Network - -This document offers a detailed framework for upgrading the localnet used in the ELYS network, encompassing version changes, data migrations, and utilizing latest testnet snapshots in newer versions for local development purposes. - -## Prerequisites - -Ensure the following prerequisites are met before proceeding with the upgrade and migration: - -- Git is installed on your machine. -- Go programming language is installed. -- Access to the project repository. -- `curl` and `make` utilities are installed. -- Appropriate permissions to execute the commands listed below. - -## Upgrade Steps - -### Step 1: Checkout the Branch containing the new implementation and build the upgrade assure binary - -Switch to the branch containing the new implementation: - -```bash -git checkout -``` - -```bash -make build-upgrade-assure -mv build/upgrade-assure build/new-upgrade-assure -``` - -### Step 2: Create a New Tag using semantic versioning and Install It. - -i.e for tag v0.31.0, tag the new release and install it: - -```bash -git tag -d v0.31.0 -git tag v0.31.0 -make install -``` - -### Step 3: Retrieve the current binary depending on your OS - -i.e In case of current Binary is v0.30.0: -For MacOS/Darwin users: - -```bash -curl -L https://github.com/elys-network/elys/releases/download/v0.30.0/elysd-v0.30.0-darwin-arm64 -o /tmp/elysd-v0.30.0 -``` - -For Linux users: - -```bash -curl -L https://github.com/elys-network/elys/releases/download/v0.30.0/elysd-v0.30.0-linux-amd64 -o /tmp/elysd-v0.30.0 -``` - -### Step 4: Retrieve Testnet Snapshot - -Fetch the latest testnet snapshot, necessary for data migration using `curl`: - -```bash -curl -L https://snapshots-testnet.stake-town.com/elys/elystestnet-1_latest.tar.lz4 -o /tmp/snapshot.tar.lz4 -``` - -### Step 5: Checkout the latest working tag available in https://github.com/elys-network/elys/releases/tag - -Switch to the previously stable version. - -```bash -git checkout tags/ -``` - -### Step 6: Run Upgrade-Assure Script - -#### 6a: Build old upgrade assure binary - -```bash -make build-upgrade-assure -mv build/upgrade-assure build/old-upgrade-assure -``` - -#### 6a: Initial Run - -Run the upgrade-assure script without starting the node: - -```bash -./build/old-upgrade-assure /tmp/snapshot.tar.lz4 /tmp/elysd-v0.30.0 ~/go/bin/elysd --skip-node-start -``` - -Notice that /tmp/elysd-v0.30.0 is the current binary retrieved in step 3. - -#### 6b: Handle Potential Errors - -Address any type errors, such as difficulties in unmarshaling strings into integer fields in Go struct fields. - -#### 6c: Update the Script - -Modify `cmd/upgrade-assure/types.go` to reflect data structure changes necessary to resolve type errors. - -The `types.go` file employs the `elys` data structure types to serialize the genesis state into JSON format for initializing localnet. This file predominantly handles conversion issues where Go struggles with fields defined as integers. To address this, such fields are overridden as `json.Number`. - -During the `read-genisis-file` step of the `upgrade-assure` process, if parsing of the genesis JSON file fails, an error is returned. This issue generally arises from integer fields that must be redefined to `json.Number`. - -#### 6d: Retry Upgrade-Assure - -Repeat the process after updating the script: - -```bash -./build/old-upgrade-assure /tmp/snapshot.tar.lz4 /tmp/elysd-v0.30.0 ~/go/bin/elysd --skip-node-start -``` - -Notice that /tmp/elysd-v0.30.0 is the current binary retrieved in step 3. - -### Step 7: Checkout to Latest Changes Branch or tag you used in step 1 - -Switch back to the main branch to incorporate the latest changes: - -```bash -git checkout -``` - -### Step 8: Final Upgrade Command - -Execute the final upgrade command to complete the upgrade process: - -```bash -./build/new-upgrade-assure /tmp/snapshot.tar.lz4 /tmp/elysd-v0.30.0 ~/go/bin/elysd --skip-snapshot --skip-chain-init -``` - -Notice that /tmp/elysd-v0.30.0 is the current binary retrieved in step 3 and we are using the new upgrade assure binary. - -This command will execute both Alice and Bob nodes and it takes some time to execute. You will know it sucessfully finishes after the process -kills itself. The final logs are: - -``` -2024/05/01 15:14:00 Process killed successfully -2024/05/01 15:14:00 Process killed successfully -``` - -### Step 9: Run the chain - -1. Run the first node with `elysd start` -2. After running and the log prints "No addresses to dial" you have to start the second node: - `elysd start --home ~/.elys2 --rpc.laddr tcp://127.0.0.1:26667 --p2p.laddr tcp://0.0.0.0:26666`. - -### Step 10 (optional): - -You can make a backup copy of .elys and .elys2 folders available in your home directory in case you want -to start a fresh copy of the chain without going to this process again. - -### Step 11: Start the Nodes manually (optional) - -If something went wrong while you were starting the node at step 8, you can start the nodes manually with the new binary by using the following command: - -```bash -./build/new-upgrade-assure /tmp/snapshot.tar.lz4 /tmp/elysd-v0.30.0 ~/go/bin/elysd --only-start-with-new-binary -``` - -Notice that /tmp/elysd-v0.30.0 is the current binary retrieved in step 3. - -## Testnet Snapshots Usage - -**Snapshot Sources and Installation Procedures:** - -### High Stakes Testnet - -- **Snapshot Source:** [Download the latest snapshot for High Stakes Testnet](https://tools.highstakes.ch/files/elys.tar.gz). -- **Installation Commands:** - ```bash - make install build-upgrade-assure - ./build/upgrade-assure https://tools.highstakes.ch/files/elys.tar.gz ~/go/bin/elysd ~/go/bin/elysd --skip-proposal - ``` - -### Stake Town Testnet - -- **Snapshot Source:** [Download the latest snapshot for Stake Town Testnet](https://snapshots-testnet.stake-town.com/elys/elystestnet-1_latest.tar.lz4). -- **Installation Commands:** - ```bash - make install build-upgrade-assure - ./build/upgrade-assure https://snapshots-testnet.stake-town.com/elys/elystestnet-1_latest.tar.lz4 ~/go/bin/elysd ~/go/bin/elysd --skip-proposal - ``` - -## Troubleshooting - -**Common Issues and Solutions:** - -- **Memory Limitation:** Address processes terminated due to insufficient RAM by creating a swap file as detailed [here](https://wiki.manjaro.org/index.php?title=Swap#Using_a_Swapfile). -- **Timeout Issues:** Modify timeout settings for node responsiveness or block processing delays: - -```bash ---timeout-wait-for-node=600 # Time in seconds ---timeout-next-block=15 # Time in minutes -``` - -**My nodes crashed after the upgrade. What should I do?** - -Run the following command to start the nodes manually: - -```bash -make build-upgrade-assure -./build/upgrade-assure /tmp/snapshot.tar.lz4 ~/go/bin/elysd ~/go/bin/elysd --only-start-with-new-binary -``` - -**Debug Mode** - -By default the nodes run in `info` mode. To enable debug mode, add the following flag to the command: - -```bash -make install build-upgrade-assure -LOG_LEVEL=debug ./build/upgrade-assure /tmp/snapshot.tar.lz4 ~/go/bin/elysd ~/go/bin/elysd --only-start-with-new-binary -``` diff --git a/cmd/upgrade-assure/account-unmarshal-json.go b/cmd/upgrade-assure/account-unmarshal-json.go deleted file mode 100644 index f80a07bd3..000000000 --- a/cmd/upgrade-assure/account-unmarshal-json.go +++ /dev/null @@ -1,38 +0,0 @@ -package main - -import ( - "encoding/json" - "fmt" -) - -func (a *Account) UnmarshalJSON(data []byte) error { - var raw map[string]interface{} - if err := json.Unmarshal(data, &raw); err != nil { - return err - } - - // Set the Type field from the raw data - typeStr, ok := raw["@type"].(string) - if !ok { - return fmt.Errorf("type field is missing or invalid") - } - a.Type = typeStr - - switch a.Type { - case "/cosmos.auth.v1beta1.BaseAccount": - var ba BaseAccount - if err := json.Unmarshal(data, &ba); err != nil { - return err - } - a.BaseAccount = &ba - case "/cosmos.auth.v1beta1.ModuleAccount": - var ma ModuleAccount - if err := json.Unmarshal(data, &ma); err != nil { - return err - } - a.ModuleAccount = &ma - default: - return fmt.Errorf("unknown account type: %s", a.Type) - } - return nil -} diff --git a/cmd/upgrade-assure/add-genesis-account.go b/cmd/upgrade-assure/add-genesis-account.go deleted file mode 100644 index f4ba4c2d2..000000000 --- a/cmd/upgrade-assure/add-genesis-account.go +++ /dev/null @@ -1,19 +0,0 @@ -package main - -import ( - "log" - "os/exec" -) - -func addGenesisAccount(cmdPath, address, balance, homePath string) { - // Command and arguments - args := []string{"add-genesis-account", address, balance + "uelys," + balance + "ibc/2180E84E20F5679FCC760D8C165B60F42065DEF7F46A72B447CFF1B7DC6C0A65," + balance + "ibc/E2D2F6ADCC68AA3384B2F5DFACCA437923D137C14E86FB8A10207CF3BED0C8D4," + balance + "ibc/B4314D0E670CB43C88A5DCA09F76E5E812BD831CC2FEC6E434C9E5A9D1F57953", "--home", homePath} - - // Execute the command - if err := exec.Command(cmdPath, args...).Run(); err != nil { - log.Fatalf(ColorRed+"Command execution failed: %v", err) // nolint: goconst - } - - // If execution reaches here, the command was successful - log.Printf(ColorYellow+"add genesis account with address %s, balance: %s and home path %s successfully", address, balance, homePath) -} diff --git a/cmd/upgrade-assure/add-key.go b/cmd/upgrade-assure/add-key.go deleted file mode 100644 index e7253f17e..000000000 --- a/cmd/upgrade-assure/add-key.go +++ /dev/null @@ -1,45 +0,0 @@ -package main - -import ( - "encoding/json" - "log" - "os/exec" -) - -func addKey(cmdPath, name, mnemonic, homePath, keyringBackend string) string { - // Prepare the command - args := []string{"keys", "add", name, "--recover", "--home", homePath, "--keyring-backend", keyringBackend, "--output", "json"} - cmd := exec.Command(cmdPath, args...) - - // Get the stdin pipe to send the mnemonic - stdinPipe, err := cmd.StdinPipe() - if err != nil { - log.Fatalf(ColorRed+"Failed to create stdin pipe: %v", err) - } - - // Write the mnemonic to the stdin pipe - go func() { - defer stdinPipe.Close() - _, err := stdinPipe.Write([]byte(mnemonic + "\n")) - if err != nil { - log.Fatalf(ColorRed+"Failed to write mnemonic to stdin: %v", err) - } - }() - - // Run the command and wait for it to finish - output, err := cmd.CombinedOutput() - if err != nil { - log.Fatalf(ColorRed+"Command execution failed: %v", err) - } - - // Unmarshal the JSON output - var keyOutput KeyOutput - if err := json.Unmarshal(output, &keyOutput); err != nil { - log.Fatalf(ColorRed+"Failed to unmarshal JSON output: %v", err) - } - - // Log the address - log.Printf(ColorYellow+"Added key with name %s, home path: %s, keyring backend %s and address %s successfully", name, homePath, keyringBackend, keyOutput.Address) - - return keyOutput.Address -} diff --git a/cmd/upgrade-assure/add-peers.go b/cmd/upgrade-assure/add-peers.go deleted file mode 100644 index b75af0f4c..000000000 --- a/cmd/upgrade-assure/add-peers.go +++ /dev/null @@ -1,25 +0,0 @@ -package main - -import ( - "log" - "strings" -) - -func addPeers(homePath, p2p, nodeId string) { - // Path to config files - configPath := homePath + "/config/config.toml" - - // update p2p url to remove the `tcp://` or `http://` or `https://` prefix - p2p = strings.TrimPrefix(p2p, "tcp://") - p2p = strings.TrimPrefix(p2p, "http://") - p2p = strings.TrimPrefix(p2p, "https://") - - // escape the `:` character from p2p - p2p = strings.ReplaceAll(p2p, ":", "\\:") - // escape the `.` character from p2p - p2p = strings.ReplaceAll(p2p, ".", "\\.") - - sed("s/^persistent_peers =.*/persistent_peers = \\\""+nodeId+"\\@"+p2p+"\\\"/", configPath) - - log.Printf(ColorYellow + "peers have been added successfully.") -} diff --git a/cmd/upgrade-assure/backup-genesis-init-file.go b/cmd/upgrade-assure/backup-genesis-init-file.go deleted file mode 100644 index 74e0fba21..000000000 --- a/cmd/upgrade-assure/backup-genesis-init-file.go +++ /dev/null @@ -1,21 +0,0 @@ -package main - -import ( - "log" - "os/exec" -) - -func backupGenesisInitFile(homePath string) { - // Copy genesis.json to genesis_init.json - args := []string{ - homePath + "/config/genesis.json", - homePath + "/config/genesis_init.json", - } - - if err := exec.Command("cp", args...).Run(); err != nil { - log.Fatalf(ColorRed+"Failed to copy genesis.json to genesis_init.json: %v", err) - } - - // If execution reaches here, the command was successful - log.Printf(ColorYellow + "Genesis file copied to genesis_init.json") -} diff --git a/cmd/upgrade-assure/check-tx-status.go b/cmd/upgrade-assure/check-tx-status.go deleted file mode 100644 index b66afa1ae..000000000 --- a/cmd/upgrade-assure/check-tx-status.go +++ /dev/null @@ -1,15 +0,0 @@ -package main - -import ( - "fmt" - "os/exec" -) - -func checkTxStatus(cmdPath, node, txHash string) (bool, error) { - args := []string{"q", "tx", txHash, "--node", node, "--output", "json"} - _, err := exec.Command(cmdPath, args...).CombinedOutput() - if err != nil { - return false, fmt.Errorf("failed to query tx status: %w", err) - } - return true, nil -} diff --git a/cmd/upgrade-assure/collect-gentxs.go b/cmd/upgrade-assure/collect-gentxs.go deleted file mode 100644 index 9a867e5aa..000000000 --- a/cmd/upgrade-assure/collect-gentxs.go +++ /dev/null @@ -1,19 +0,0 @@ -package main - -import ( - "log" - "os/exec" -) - -func collectGentxs(cmdPath, homePath string) { - // Command and arguments - args := []string{"collect-gentxs", "--home", homePath} - - // Execute the command - if err := exec.Command(cmdPath, args...).Run(); err != nil { - log.Fatalf(ColorRed+"Command execution failed: %v", err) - } - - // If execution reaches here, the command was successful - log.Printf(ColorYellow+"collect gen txs with home path %s successfully", homePath) -} diff --git a/cmd/upgrade-assure/colors.go b/cmd/upgrade-assure/colors.go deleted file mode 100644 index 76a0b7c80..000000000 --- a/cmd/upgrade-assure/colors.go +++ /dev/null @@ -1,9 +0,0 @@ -package main - -// Colors -const ( - ColorReset = "\033[0m" - ColorRed = "\033[31m" - ColorGreen = "\033[32m" - ColorYellow = "\033[33m" -) diff --git a/cmd/upgrade-assure/contains.go b/cmd/upgrade-assure/contains.go deleted file mode 100644 index f65500bcd..000000000 --- a/cmd/upgrade-assure/contains.go +++ /dev/null @@ -1,10 +0,0 @@ -package main - -func contains(slice []string, item string) bool { - for _, s := range slice { - if s == item { - return true - } - } - return false -} diff --git a/cmd/upgrade-assure/copy-data-from-node-to-node.go b/cmd/upgrade-assure/copy-data-from-node-to-node.go deleted file mode 100644 index 2e06c08e2..000000000 --- a/cmd/upgrade-assure/copy-data-from-node-to-node.go +++ /dev/null @@ -1,32 +0,0 @@ -package main - -import ( - "log" - "os/exec" -) - -func copyDataFromNodeToNode(homePath, homePath2 string) { - // Delete first data folder if exists on homePath2 - args := []string{ - "-rf", - homePath2 + "/data", - } - - if err := exec.Command("rm", args...).Run(); err != nil { - log.Fatalf(ColorRed+"Failed to delete data folder on node 2: %v", err) - } - - // Copy data from node 1 to node 2 - args = []string{ - "-r", - homePath + "/data", - homePath2 + "/data", - } - - if err := exec.Command("cp", args...).Run(); err != nil { - log.Fatalf(ColorRed+"Failed to copy data from node 1 to node 2: %v", err) - } - - // If execution reaches here, the command was successful - log.Printf(ColorYellow + "Data copied from node 1 to node 2") -} diff --git a/cmd/upgrade-assure/create-validator.go b/cmd/upgrade-assure/create-validator.go deleted file mode 100644 index dba566386..000000000 --- a/cmd/upgrade-assure/create-validator.go +++ /dev/null @@ -1,40 +0,0 @@ -package main - -import ( - "log" - "os/exec" -) - -func createValidator(cmdPath, name, selfDelegation, moniker, pubkey, homePath, keyringBackend, chainId, node, broadcastMode string) { - // Command and arguments - args := []string{ - "tx", - "staking", - "create-validator", - "--amount", selfDelegation + "uelys", - "--pubkey", pubkey, - "--moniker", moniker, - "--commission-rate", "0.05", - "--commission-max-rate", "0.50", - "--commission-max-change-rate", "0.01", - "--min-self-delegation", "1", - "--from", name, - "--keyring-backend", keyringBackend, - "--chain-id", chainId, - "--node", node, - "--broadcast-mode", broadcastMode, - "--fees", "100000uelys", - "--gas", "3000000", - "--gas-adjustment", "1.5", - "--home", homePath, - "--yes", - } - - // Execute the command - if err := exec.Command(cmdPath, args...).Run(); err != nil { - log.Fatalf(ColorRed+"Failed to create validator: %v", err) - } - - // If execution reaches here, the command was successful - log.Printf(ColorYellow+"Validator %s created successfully", moniker) -} diff --git a/cmd/upgrade-assure/download-and-run-version.go b/cmd/upgrade-assure/download-and-run-version.go deleted file mode 100644 index 0f02314a4..000000000 --- a/cmd/upgrade-assure/download-and-run-version.go +++ /dev/null @@ -1,109 +0,0 @@ -// nolint: nakedret -package main - -import ( - "errors" - "fmt" - "io" - "io/ioutil" - "net/http" - "os" - "os/exec" - "regexp" - "strings" -) - -func isURL(str string) bool { - return strings.HasPrefix(str, "http://") || strings.HasPrefix(str, "https://") -} - -func downloadAndRunVersion(binaryPathOrURL string, skipDownload bool) (path string, version string, err error) { - if !isURL(binaryPathOrURL) { - // If the input is a local path - path = binaryPathOrURL - - // Check if the path exists - if _, err = os.Stat(path); os.IsNotExist(err) { - err = errors.New(fmt.Sprintf("binary file does not exist at the specified path: %v", path)) - return - } - - // Run the command 'binary version' - cmd := exec.Command(path, "version") - var versionOutput []byte - versionOutput, err = cmd.CombinedOutput() - if err != nil { - return - } - version = strings.TrimSpace(string(versionOutput)) - - return - } - - if skipDownload { - // Extract version from the URL - re := regexp.MustCompile(`v[0-9]+\.[0-9]+\.[0-9]+(-[a-zA-Z0-9]+)?`) - versionMatches := re.FindStringSubmatch(binaryPathOrURL) - if len(versionMatches) == 0 { - err = errors.New("no version found in URL") - return - } - version = versionMatches[0] - - // Set the binary path based on the version - path = "/tmp/elysd-" + version - - // Check if the path exists - if _, err = os.Stat(path); os.IsNotExist(err) { - err = errors.New(fmt.Sprintf("binary file does not exist at the specified path: %v", path)) - } - - return - } - - // Download the binary - resp, err := http.Get(binaryPathOrURL) // nolint: gosec - if err != nil { - return - } - defer resp.Body.Close() - - // Create a temporary file - tmpFile, err := ioutil.TempFile("", "binary-*") - if err != nil { - return - } - tmpFilePath := tmpFile.Name() - defer os.Remove(tmpFilePath) // Clean up - - // Write the downloaded content to the file - _, err = io.Copy(tmpFile, resp.Body) - tmpFile.Close() - if err != nil { - return - } - - // Make the file executable - err = os.Chmod(tmpFilePath, 0755) - if err != nil { - return - } - - // Run the command 'binary version' - cmd := exec.Command(tmpFilePath, "version") - versionOutput, err := cmd.CombinedOutput() - if err != nil { - return - } - version = strings.TrimSpace(string(versionOutput)) - - // Rename the temporary file - newFilePath := "/tmp/elysd-" + version - err = os.Rename(tmpFilePath, newFilePath) - if err != nil { - return - } - path = newFilePath - - return -} diff --git a/cmd/upgrade-assure/export.go b/cmd/upgrade-assure/export.go deleted file mode 100644 index e6c7a4abc..000000000 --- a/cmd/upgrade-assure/export.go +++ /dev/null @@ -1,64 +0,0 @@ -package main - -import ( - "log" - "os/exec" - "strings" -) - -func export(cmdPath, homePath, genesisFilePath string) { - // Define modules in a slice - modules := []string{ - "amm", - "assetprofile", - "auth", - "authz", - // "bank", // FIXME: causes the init genesis state from genesis.json to OOM (kill process) - "burner", - "capability", - "clock", - // "commitment", // FIXME: causes the balance mismatch error - "consensus", - "crisis", - "distribution", - "epochs", - "estaking", - "evidence", - "feegrant", - "genutil", - // "gov", // FIXME: should be re-enabled after gov proposal fixes - "group", - "ibc", - "incentive", - "interchainaccounts", - "leveragelp", - "masterchef", - "perpetual", - "oracle", - "parameter", - "params", - "poolaccounted", - "stablestake", - "staking", - "tokenomics", - "transfer", - "transferhook", - "upgrade", - "vesting", - } - - // Combine the modules into a comma-separated string - modulesStr := strings.Join(modules, ",") - - // Command and arguments - args := []string{"export", "--home", homePath, "--output-document", genesisFilePath, "--modules-to-export", modulesStr} - - // Execute the command and capture the output - cmd := exec.Command(cmdPath, args...) - out, err := cmd.CombinedOutput() - if err != nil { - log.Fatalf("Command execution failed: %v\nOutput: %s", err, out) - } - - log.Printf("Output successfully written to %s", genesisFilePath) -} diff --git a/cmd/upgrade-assure/filter-accounts.go b/cmd/upgrade-assure/filter-accounts.go deleted file mode 100644 index 5929930e9..000000000 --- a/cmd/upgrade-assure/filter-accounts.go +++ /dev/null @@ -1,17 +0,0 @@ -package main - -func filterAccounts(accounts []Account, filterAddresses []string) []Account { - filterMap := make(map[string]struct{}) - for _, addr := range filterAddresses { - filterMap[addr] = struct{}{} - } - - newAccounts := []Account{} - for _, account := range accounts { - if shouldFilterAccount(account, filterMap) { - continue - } - newAccounts = append(newAccounts, account) - } - return newAccounts -} diff --git a/cmd/upgrade-assure/filter-balances-by-denoms.go b/cmd/upgrade-assure/filter-balances-by-denoms.go deleted file mode 100644 index d8b769309..000000000 --- a/cmd/upgrade-assure/filter-balances-by-denoms.go +++ /dev/null @@ -1,39 +0,0 @@ -package main - -import ( - sdk "github.com/cosmos/cosmos-sdk/types" - banktypes "github.com/cosmos/cosmos-sdk/x/bank/types" -) - -func filterBalancesByDenoms(balances []banktypes.Balance, addressDenomMap map[string][]string) ([]banktypes.Balance, sdk.Coins) { - newBalances := []banktypes.Balance{} - var coinsToRemove sdk.Coins - - for _, balance := range balances { - denomsToKeep, specified := addressDenomMap[balance.Address] - if !specified { - // Address is not specified, keep it unchanged - newBalances = append(newBalances, balance) - continue - } - - // Filter the coins for the specified address - var filteredCoins sdk.Coins - for _, coin := range balance.Coins { - if contains(denomsToKeep, coin.Denom) { - filteredCoins = append(filteredCoins, coin) - } else { - coinsToRemove = coinsToRemove.Add(coin) - } - } - - if len(filteredCoins) > 0 { - newBalances = append(newBalances, banktypes.Balance{ - Address: balance.Address, - Coins: filteredCoins, - }) - } - } - - return newBalances, coinsToRemove -} diff --git a/cmd/upgrade-assure/filter-balances.go b/cmd/upgrade-assure/filter-balances.go deleted file mode 100644 index 8d58246a1..000000000 --- a/cmd/upgrade-assure/filter-balances.go +++ /dev/null @@ -1,24 +0,0 @@ -package main - -import ( - sdk "github.com/cosmos/cosmos-sdk/types" - banktypes "github.com/cosmos/cosmos-sdk/x/bank/types" -) - -func filterBalances(balances []banktypes.Balance, filterAddresses []string) ([]banktypes.Balance, sdk.Coins) { - filterMap := make(map[string]struct{}) - for _, addr := range filterAddresses { - filterMap[addr] = struct{}{} - } - - newBalances := []banktypes.Balance{} - var coinsToRemove sdk.Coins - for _, balance := range balances { - if _, exists := filterMap[balance.Address]; exists { - coinsToRemove = coinsToRemove.Add(balance.Coins...) - continue - } - newBalances = append(newBalances, balance) - } - return newBalances, coinsToRemove -} diff --git a/cmd/upgrade-assure/gen-tx.go b/cmd/upgrade-assure/gen-tx.go deleted file mode 100644 index ab3bb0c8a..000000000 --- a/cmd/upgrade-assure/gen-tx.go +++ /dev/null @@ -1,19 +0,0 @@ -package main - -import ( - "log" - "os/exec" -) - -func genTx(cmdPath, name, amount, chainId, homePath, keyringBackend string) { - // Command and arguments - args := []string{"gentx", name, amount + "uelys", "--chain-id", chainId, "--home", homePath, "--keyring-backend", keyringBackend} - - // Execute the command - if err := exec.Command(cmdPath, args...).Run(); err != nil { - log.Fatalf(ColorRed+"Command execution failed: %v", err) - } - - // If execution reaches here, the command was successful - log.Printf(ColorYellow+"gen tx with name %s, amount: %s, chain id %s, home path %s and keyring backend %s successfully", name, amount, chainId, homePath, keyringBackend) -} diff --git a/cmd/upgrade-assure/generate-priv-validator-state.go b/cmd/upgrade-assure/generate-priv-validator-state.go deleted file mode 100644 index bee86329d..000000000 --- a/cmd/upgrade-assure/generate-priv-validator-state.go +++ /dev/null @@ -1,32 +0,0 @@ -package main - -import ( - "log" - "os/exec" -) - -func generatePrivValidatorState(homePath string) { - // generate priv_validator_state.json with the following content: - // { - // "height": "0", - // "round": 0, - // "step": 0 - // } - - // Command and arguments - args := []string{ - "-c", - "echo", - "{\"height\": \"0\", \"round\": 0, \"step\": 0}", - ">", - homePath + "/data/priv_validator_state.json", - } - - // Execute the command - if err := exec.Command("sh", args...).Run(); err != nil { - log.Fatalf(ColorRed+"Failed to generate priv_validator_state.json: %v", err) - } - - // If execution reaches here, the command was successful - log.Printf(ColorYellow + "priv_validator_state.json generated successfully") -} diff --git a/cmd/upgrade-assure/get-args.go b/cmd/upgrade-assure/get-args.go deleted file mode 100644 index 5d43c4d34..000000000 --- a/cmd/upgrade-assure/get-args.go +++ /dev/null @@ -1,24 +0,0 @@ -package main - -import ( - "log" -) - -func getArgs(args []string) (snapshotUrl, oldBinaryUrl, newBinaryUrl string) { - snapshotUrl = args[0] // https://snapshots.polkachu.com/testnet-snapshots/elys/elys_4392223.tar.lz4 - if snapshotUrl == "" { - log.Fatalf(ColorRed + "snapshot url is required") - } - - oldBinaryUrl = args[1] // https://github.com/elys-network/elys/releases/download/v0.19.0/elysd-v0.19.0-darwin-arm64 - if oldBinaryUrl == "" { - log.Fatalf(ColorRed + "old binary url is required") - } - - newBinaryUrl = args[2] // https://github.com/elys-network/elys/releases/download/v0.20.0/elysd-v0.20.0-darwin-arm64 - if newBinaryUrl == "" { - log.Fatalf(ColorRed + "new binary url is required") - } - - return -} diff --git a/cmd/upgrade-assure/get-flags.go b/cmd/upgrade-assure/get-flags.go deleted file mode 100644 index 253661705..000000000 --- a/cmd/upgrade-assure/get-flags.go +++ /dev/null @@ -1,309 +0,0 @@ -// nolint: nakedret -package main - -import ( - "log" - - "github.com/spf13/cobra" -) - -const ( - // global - flagOnlyStartWithNewBinary = "only-start-with-new-binary" - flagSkipSnapshot = "skip-snapshot" - flagSkipChainInit = "skip-chain-init" - flagSkipNodeStart = "skip-node-start" - flagSkipProposal = "skip-proposal" - flagSkipBinary = "skip-binary" - flagSkipCreateValidator = "skip-create-validator" - flagSkipPrepareValidatorData = "skip-prepare-validator-data" - flagSkipSubmitProposal = "skip-submit-proposal" - flagSkipUpgradeToNewBinary = "skip-upgrade-to-new-binary" - flagSkipUnbondValidator = "skip-unbond-validator" - flagChainId = "chain-id" - flagKeyringBackend = "keyring-backend" - flagGenesisFilePath = "genesis-file-path" - flagBroadcastMode = "broadcast-mode" - flagDbEngine = "db-engine" - - // timeout - flagTimeOutToWaitForService = "timeout-wait-for-node" - flagTimeOutNextBlock = "timeout-next-block" - - // node 1 - flagHome = "home" - flagMoniker = "moniker" - flagValidatorKeyName = "validator-key-name" - flagValidatorBalance = "validator-balance" - flagValidatorSelfDelegation = "validator-self-delegation" - flagValidatorMnemonic = "validator-mnemonic" - flagRpc = "rpc" - flagP2p = "p2p" - flagPprof = "pprof" - flagApi = "api" - - // node 2 - flagHome2 = "home-2" - flagMoniker2 = "moniker-2" - flagValidatorKeyName2 = "validator-key-name-2" - flagValidatorBalance2 = "validator-balance-2" - flagValidatorSelfDelegation2 = "validator-self-delegation-2" - flagValidatorMnemonic2 = "validator-mnemonic-2" - flagRpc2 = "rpc-2" - flagP2p2 = "p2p-2" - flagPprof2 = "pprof-2" - flagApi2 = "api-2" -) - -func getFlags(cmd *cobra.Command) ( - // global - onlyStartWithNewBinaries bool, - skipSnapshot bool, - skipChainInit bool, - skipNodeStart bool, - skipProposal bool, - skipBinary bool, - skipCreateValidator bool, - skipPrepareValidatorData bool, - skipSubmitProposal bool, - skipUpgradeToNewBinary bool, - skipUnbondValidator bool, - chainId string, - keyringBackend string, - genesisFilePath string, - broadcastMode string, - dbEngine string, - - //timeouts - timeOutWaitForNode int, - timeOutNextBlock int, - - // node 1 - homePath string, - moniker string, - validatorKeyName string, - validatorBalance string, - validatorSelfDelegation string, - validatorMnemonic string, - rpc string, - p2p string, - pprof string, - api string, - - // node 2 - homePath2 string, - moniker2 string, - validatorKeyName2 string, - validatorBalance2 string, - validatorSelfDelegation2 string, - validatorMnemonic2 string, - rpc2 string, - p2p2 string, - pprof2 string, - api2 string, -) { - // global - onlyStartWithNewBinaries, _ = cmd.Flags().GetBool(flagOnlyStartWithNewBinary) - if onlyStartWithNewBinaries { - log.Printf(ColorYellow + "only starting with new binaries, skipping all other steps") - } - - skipSnapshot, _ = cmd.Flags().GetBool(flagSkipSnapshot) - if skipSnapshot { - log.Printf(ColorYellow + "skipping snapshot retrieval") - } - - skipChainInit, _ = cmd.Flags().GetBool(flagSkipChainInit) - if skipChainInit { - log.Printf(ColorYellow + "skipping chain init") - } - - skipNodeStart, _ = cmd.Flags().GetBool(flagSkipNodeStart) - if skipNodeStart { - log.Printf(ColorYellow + "skipping node start") - } - - skipProposal, _ = cmd.Flags().GetBool(flagSkipProposal) - if skipProposal { - log.Printf(ColorYellow + "skipping proposal") - } - - skipBinary, _ = cmd.Flags().GetBool(flagSkipBinary) - if skipBinary { - log.Printf(ColorYellow + "skipping binary download") - } - - skipCreateValidator, _ = cmd.Flags().GetBool(flagSkipCreateValidator) - if skipCreateValidator { - log.Printf(ColorYellow + "skipping create validator") - } - - skipPrepareValidatorData, _ = cmd.Flags().GetBool(flagSkipPrepareValidatorData) - if skipPrepareValidatorData { - log.Printf(ColorYellow + "skipping prepare validator data") - } - - skipSubmitProposal, _ = cmd.Flags().GetBool(flagSkipSubmitProposal) - if skipSubmitProposal { - log.Printf(ColorYellow + "skipping submit proposal") - } - - skipUpgradeToNewBinary, _ = cmd.Flags().GetBool(flagSkipUpgradeToNewBinary) - if skipUpgradeToNewBinary { - log.Printf(ColorYellow + "skipping upgrade to new binary") - } - - skipUnbondValidator, _ = cmd.Flags().GetBool(flagSkipUnbondValidator) - if skipUnbondValidator { - log.Printf(ColorYellow + "skipping unbond validator") - } - - chainId, _ = cmd.Flags().GetString(flagChainId) - if chainId == "" { - log.Fatalf(ColorRed + "chain id is required") - } - - keyringBackend, _ = cmd.Flags().GetString(flagKeyringBackend) - if keyringBackend == "" { - log.Fatalf(ColorRed + "keyring backend is required") - } - - genesisFilePath, _ = cmd.Flags().GetString(flagGenesisFilePath) - if genesisFilePath == "" { - log.Fatalf(ColorRed + "genesis file path is required") - } - - broadcastMode, _ = cmd.Flags().GetString(flagBroadcastMode) - if broadcastMode == "" { - log.Fatalf(ColorRed + "broadcast mode is required") - } - - dbEngine, _ = cmd.Flags().GetString(flagDbEngine) - if dbEngine == "" { - log.Fatalf(ColorRed + "database engine is required") - } - - timeOutWaitForNode, err := cmd.Flags().GetInt(flagTimeOutToWaitForService) - - if err != nil { - log.Fatalf(ColorRed + err.Error()) - } - - if timeOutWaitForNode == 0 { - log.Fatalf(ColorRed + "time out to wait for service is required") - } - - timeOutNextBlock, err = cmd.Flags().GetInt(flagTimeOutNextBlock) - - if err != nil { - log.Fatalf(ColorRed + err.Error()) - } - - if timeOutNextBlock == 0 { - log.Fatalf(ColorRed + "time out next block is required") - } - - // node 1 - homePath, _ = cmd.Flags().GetString(flagHome) - if homePath == "" { - log.Fatalf(ColorRed + "home path is required") - } - - moniker, _ = cmd.Flags().GetString(flagMoniker) - if moniker == "" { - log.Fatalf(ColorRed + "moniker is required") - } - - validatorKeyName, _ = cmd.Flags().GetString(flagValidatorKeyName) - if validatorKeyName == "" { - log.Fatalf(ColorRed + "validator key name is required") - } - - validatorBalance, _ = cmd.Flags().GetString(flagValidatorBalance) - if validatorBalance == "" { - log.Fatalf(ColorRed + "validator balance is required") - } - - validatorSelfDelegation, _ = cmd.Flags().GetString(flagValidatorSelfDelegation) - if validatorSelfDelegation == "" { - log.Fatalf(ColorRed + "validator self delegation is required") - } - - validatorMnemonic, _ = cmd.Flags().GetString(flagValidatorMnemonic) - if validatorMnemonic == "" { - log.Fatalf(ColorRed + "validator mnemonic is required") - } - - rpc, _ = cmd.Flags().GetString(flagRpc) - if rpc == "" { - log.Fatalf(ColorRed + "rpc is required") - } - - p2p, _ = cmd.Flags().GetString(flagP2p) - if p2p == "" { - log.Fatalf(ColorRed + "p2p is required") - } - - pprof, _ = cmd.Flags().GetString(flagPprof) - if pprof == "" { - log.Fatalf(ColorRed + "pprof is required") - } - - api, _ = cmd.Flags().GetString(flagApi) - if api == "" { - log.Fatalf(ColorRed + "api is required") - } - - // node 2 - homePath2, _ = cmd.Flags().GetString(flagHome2) - if homePath2 == "" { - log.Fatalf(ColorRed + "home path 2 is required") - } - - moniker2, _ = cmd.Flags().GetString(flagMoniker2) - if moniker2 == "" { - log.Fatalf(ColorRed + "moniker 2 is required") - } - - validatorKeyName2, _ = cmd.Flags().GetString(flagValidatorKeyName2) - if validatorKeyName2 == "" { - log.Fatalf(ColorRed + "validator key name 2 is required") - } - - validatorBalance2, _ = cmd.Flags().GetString(flagValidatorBalance2) - if validatorBalance2 == "" { - log.Fatalf(ColorRed + "validator balance 2 is required") - } - - validatorSelfDelegation2, _ = cmd.Flags().GetString(flagValidatorSelfDelegation2) - if validatorSelfDelegation2 == "" { - log.Fatalf(ColorRed + "validator self delegation 2 is required") - } - - validatorMnemonic2, _ = cmd.Flags().GetString(flagValidatorMnemonic2) - if validatorMnemonic2 == "" { - log.Fatalf(ColorRed + "validator mnemonic 2 is required") - } - - rpc2, _ = cmd.Flags().GetString(flagRpc2) - if rpc2 == "" { - log.Fatalf(ColorRed + "rpc 2 is required") - } - - p2p2, _ = cmd.Flags().GetString(flagP2p2) - if p2p2 == "" { - log.Fatalf(ColorRed + "p2p 2 is required") - } - - pprof2, _ = cmd.Flags().GetString(flagPprof2) - if pprof2 == "" { - log.Fatalf(ColorRed + "pprof 2 is required") - } - - api2, _ = cmd.Flags().GetString(flagApi2) - if api2 == "" { - log.Fatalf(ColorRed + "api 2 is required") - } - - return -} diff --git a/cmd/upgrade-assure/init-node.go b/cmd/upgrade-assure/init-node.go deleted file mode 100644 index dbbf8e4ad..000000000 --- a/cmd/upgrade-assure/init-node.go +++ /dev/null @@ -1,19 +0,0 @@ -package main - -import ( - "log" - "os/exec" -) - -func initNode(cmdPath, moniker, chainId, homePath string) { - // Command and arguments - args := []string{"init", moniker, "--chain-id", chainId, "--home", homePath} - - // Execute the command - if err := exec.Command(cmdPath, args...).Run(); err != nil { - log.Fatalf(ColorRed+"Command execution failed: %v", err) - } - - // If execution reaches here, the command was successful - log.Printf(ColorYellow+"init node with moniker %s, chain id %s and home path: %s successfully", moniker, chainId, homePath) -} diff --git a/cmd/upgrade-assure/is-linux.go b/cmd/upgrade-assure/is-linux.go deleted file mode 100644 index 156cfb9e2..000000000 --- a/cmd/upgrade-assure/is-linux.go +++ /dev/null @@ -1,8 +0,0 @@ -package main - -import "runtime" - -// is linux? -func isLinux() bool { - return runtime.GOOS == "linux" -} diff --git a/cmd/upgrade-assure/is-service-running.go b/cmd/upgrade-assure/is-service-running.go deleted file mode 100644 index 725bb5c40..000000000 --- a/cmd/upgrade-assure/is-service-running.go +++ /dev/null @@ -1,30 +0,0 @@ -package main - -import ( - "net" - "net/http" - "strings" -) - -func isServiceRunning(url string) bool { - // Remove the "tcp://" prefix if present - if strings.HasPrefix(url, "tcp://") { - url = strings.TrimPrefix(url, "tcp://") - } - - // Attempt to make a TCP connection - conn, err := net.Dial("tcp", url) - if err == nil { - conn.Close() - return true - } - - // If TCP connection fails, attempt an HTTP GET request - resp, err := http.Get("http://" + url) - if err == nil { - resp.Body.Close() - return resp.StatusCode == http.StatusOK - } - - return false -} diff --git a/cmd/upgrade-assure/listen-for-signals.go b/cmd/upgrade-assure/listen-for-signals.go deleted file mode 100644 index 15175dd58..000000000 --- a/cmd/upgrade-assure/listen-for-signals.go +++ /dev/null @@ -1,22 +0,0 @@ -package main - -import ( - "os" - "os/exec" - "os/signal" - "syscall" -) - -func listenForSignals(cmds ...*exec.Cmd) { - // Set up channel to listen for signals - sigChan := make(chan os.Signal, 1) - signal.Notify(sigChan, os.Interrupt, syscall.SIGTERM) - - // Block until a signal is received - <-sigChan - - for _, cmd := range cmds { - // Stop the process when a signal is received - stop(cmd) - } -} diff --git a/cmd/upgrade-assure/parse-tx-hash.go b/cmd/upgrade-assure/parse-tx-hash.go deleted file mode 100644 index d2d8344f4..000000000 --- a/cmd/upgrade-assure/parse-tx-hash.go +++ /dev/null @@ -1,23 +0,0 @@ -package main - -import ( - "encoding/json" - "fmt" -) - -// TxResponse represents the structure of the transaction output -type TxResponse struct { - TxHash string `json:"txhash"` -} - -// parseTxHash takes raw output from a command and extracts the transaction hash. -func parseTxHash(rawOutput []byte) (string, error) { - var resp TxResponse - if err := json.Unmarshal(rawOutput, &resp); err != nil { - return "", fmt.Errorf("failed to unmarshal transaction response: %w", err) - } - if resp.TxHash == "" { - return "", fmt.Errorf("transaction hash not found in the response") - } - return resp.TxHash, nil -} diff --git a/cmd/upgrade-assure/query-and-calc-upgrade-block-height.go b/cmd/upgrade-assure/query-and-calc-upgrade-block-height.go deleted file mode 100644 index fd5c1db30..000000000 --- a/cmd/upgrade-assure/query-and-calc-upgrade-block-height.go +++ /dev/null @@ -1,26 +0,0 @@ -package main - -import ( - "log" - "strconv" -) - -func queryAndCalcUpgradeBlockHeight(cmdPath, node string) string { - // query block height - blockHeight, err := queryBlockHeight(cmdPath, node) - if err != nil { - log.Fatalf(ColorRed+"Failed to query block height: %v", err) - } - - // Convert blockHeight from string to int - blockHeightInt, err := strconv.Atoi(blockHeight) - if err != nil { - log.Fatalf(ColorRed+"Failed to convert blockHeight to integer: %v", err) - } - - // set upgrade block height - upgradeBlockHeight := blockHeightInt + 20 - - // return upgrade block height as a string - return strconv.Itoa(upgradeBlockHeight) -} diff --git a/cmd/upgrade-assure/query-block-height.go b/cmd/upgrade-assure/query-block-height.go deleted file mode 100644 index 757afad24..000000000 --- a/cmd/upgrade-assure/query-block-height.go +++ /dev/null @@ -1,25 +0,0 @@ -package main - -import ( - "encoding/json" - "os/exec" -) - -func queryBlockHeight(cmdPath, node string) (string, error) { - // Command and arguments - args := []string{"status", "--node", node} - - // Execute the command - output, err := exec.Command(cmdPath, args...).CombinedOutput() - if err != nil { - return "-1", err - } - - // Unmarshal the JSON output - var statusOutput StatusOutput - if err := json.Unmarshal(output, &statusOutput); err != nil { - return "-1", err - } - - return statusOutput.SyncInfo.LatestBlockHeight, nil -} diff --git a/cmd/upgrade-assure/query-next-proposal-id.go b/cmd/upgrade-assure/query-next-proposal-id.go deleted file mode 100644 index 40254748a..000000000 --- a/cmd/upgrade-assure/query-next-proposal-id.go +++ /dev/null @@ -1,42 +0,0 @@ -package main - -import ( - "encoding/json" - "errors" - "os/exec" - "strconv" -) - -func queryNextProposalId(cmdPath, node string) (string, error) { - // Command and arguments - args := []string{"q", "gov", "proposals", "--node", node, "--limit", "1", "--reverse", "--output", "json"} - - // Execute the command - output, err := exec.Command(cmdPath, args...).CombinedOutput() - if err != nil { - return "-1", err - } - - // Unmarshal the JSON output - var proposalsOutput ProposalsOutput - if err := json.Unmarshal(output, &proposalsOutput); err != nil { - return "-1", err - } - - // check if there are any proposals - if len(proposalsOutput.Proposals) == 0 { - return "1", errors.New("no proposals found") - } - - // increment proposal id - proposalId := proposalsOutput.Proposals[0].Id - proposalIdInt, err := strconv.Atoi(proposalId) - if err != nil { - return "-1", err - } - proposalIdInt++ - // convert back to string - proposalId = strconv.Itoa(proposalIdInt) - - return proposalId, nil -} diff --git a/cmd/upgrade-assure/query-node-id.go b/cmd/upgrade-assure/query-node-id.go deleted file mode 100644 index 38be22b58..000000000 --- a/cmd/upgrade-assure/query-node-id.go +++ /dev/null @@ -1,23 +0,0 @@ -package main - -import ( - "log" - "os/exec" - "strings" -) - -func queryNodeId(cmdPath, home string) string { - // Command and arguments - args := []string{"tendermint", "show-node-id", "--home", home} - - // Execute the command - output, err := exec.Command(cmdPath, args...).CombinedOutput() - if err != nil { - log.Fatalf(ColorRed+"Failed to query node id: %v", err) - } - - // trim the output - outputStr := strings.TrimSpace(string(output)) - - return outputStr -} diff --git a/cmd/upgrade-assure/query-operator-address.go b/cmd/upgrade-assure/query-operator-address.go deleted file mode 100644 index 25f87f195..000000000 --- a/cmd/upgrade-assure/query-operator-address.go +++ /dev/null @@ -1,23 +0,0 @@ -package main - -import ( - "log" - "os/exec" - "strings" -) - -func queryOperatorAddress(cmdPath, homePath, keyringBackend, validatorName string) string { - // Command and arguments - args := []string{"keys", "show", validatorName, "--bech", "val", "--home", homePath, "--keyring-backend", keyringBackend, "--address"} - - // Execute the command - output, err := exec.Command(cmdPath, args...).CombinedOutput() - if err != nil { - log.Fatalf(ColorRed+"Failed to query validator pubkey: %v", err) - } - - // trim the output - outputStr := strings.TrimSpace(string(output)) - - return outputStr -} diff --git a/cmd/upgrade-assure/query-upgrade-applied.go b/cmd/upgrade-assure/query-upgrade-applied.go deleted file mode 100644 index ab58803d2..000000000 --- a/cmd/upgrade-assure/query-upgrade-applied.go +++ /dev/null @@ -1,20 +0,0 @@ -package main - -import ( - "log" - "os/exec" -) - -func queryUpgradeApplied(cmdPath, node, newVersion string) { - // Command and arguments - args := []string{"q", "upgrade", "applied", newVersion, "--node", node} - - // Execute the command - err := exec.Command(cmdPath, args...).Run() - if err != nil { - log.Fatalf("Failed to retrieve applied upgrade: %v", err) - } - - // If execution reaches here, the command was successful - log.Printf("Successfully retrieved applied upgrade: %s", newVersion) -} diff --git a/cmd/upgrade-assure/query-validator-pubkey.go b/cmd/upgrade-assure/query-validator-pubkey.go deleted file mode 100644 index c061c57fb..000000000 --- a/cmd/upgrade-assure/query-validator-pubkey.go +++ /dev/null @@ -1,23 +0,0 @@ -package main - -import ( - "log" - "os/exec" - "strings" -) - -func queryValidatorPubkey(cmdPath, home string) string { - // Command and arguments - args := []string{"tendermint", "show-validator", "--home", home} - - // Execute the command - output, err := exec.Command(cmdPath, args...).CombinedOutput() - if err != nil { - log.Fatalf(ColorRed+"Failed to query validator pubkey: %v", err) - } - - // trim the output - outputStr := strings.TrimSpace(string(output)) - - return outputStr -} diff --git a/cmd/upgrade-assure/read-genesis-file.go b/cmd/upgrade-assure/read-genesis-file.go deleted file mode 100644 index afeb432d0..000000000 --- a/cmd/upgrade-assure/read-genesis-file.go +++ /dev/null @@ -1,23 +0,0 @@ -package main - -import ( - "bufio" - "encoding/json" - "fmt" - "os" -) - -func readGenesisFile(filePath string) (Genesis, error) { - var genesis Genesis - file, err := os.Open(filePath) - if err != nil { - return genesis, fmt.Errorf("error opening file: %w", err) - } - defer file.Close() - - if err := json.NewDecoder(bufio.NewReader(file)).Decode(&genesis); err != nil { - return genesis, fmt.Errorf("error decoding JSON: %w", err) - } - - return genesis, nil -} diff --git a/cmd/upgrade-assure/remove-home.go b/cmd/upgrade-assure/remove-home.go deleted file mode 100644 index 6519fcac4..000000000 --- a/cmd/upgrade-assure/remove-home.go +++ /dev/null @@ -1,19 +0,0 @@ -package main - -import ( - "log" - "os/exec" -) - -func removeHome(homePath string) { - // Command and arguments - args := []string{"-rf", homePath} - - // Execute the command - if err := exec.Command("rm", args...).Run(); err != nil { - log.Fatalf(ColorRed+"Command execution failed: %v", err) - } - - // If execution reaches here, the command was successful - log.Printf(ColorYellow+"removed home path %s successfully", homePath) -} diff --git a/cmd/upgrade-assure/restore-genesis-init-file.go b/cmd/upgrade-assure/restore-genesis-init-file.go deleted file mode 100644 index e61a0963b..000000000 --- a/cmd/upgrade-assure/restore-genesis-init-file.go +++ /dev/null @@ -1,21 +0,0 @@ -package main - -import ( - "log" - "os/exec" -) - -func restoreGenesisInitFile(homePath string) { - // Copy genesis_init.json to genesis.json - args := []string{ - homePath + "/config/genesis_init.json", - homePath + "/config/genesis.json", - } - - if err := exec.Command("cp", args...).Run(); err != nil { - log.Fatalf(ColorRed+"Failed to copy genesis_init.json to genesis.json: %v", err) - } - - // If execution reaches here, the command was successful - log.Printf(ColorYellow + "Genesis file copied to genesis.json") -} diff --git a/cmd/upgrade-assure/retrieve-snapshot.go b/cmd/upgrade-assure/retrieve-snapshot.go deleted file mode 100644 index 605141d72..000000000 --- a/cmd/upgrade-assure/retrieve-snapshot.go +++ /dev/null @@ -1,47 +0,0 @@ -package main - -import ( - "log" - "os/exec" - "strings" -) - -func retrieveSnapshot(snapshotUrl, homePath string) { - var cmdString string - isUrl := strings.HasPrefix(snapshotUrl, "http://") || strings.HasPrefix(snapshotUrl, "https://") - - // Check the file type and construct the command accordingly - if strings.HasSuffix(snapshotUrl, ".tar.lz4") { - if isUrl { - cmdString = "curl -o - -L " + snapshotUrl + " | lz4 -c -d - | tar -x -C " + homePath - } else { - cmdString = "lz4 -c -d " + snapshotUrl + " | tar -x -C " + homePath - } - } else if strings.HasSuffix(snapshotUrl, ".tar.gz") { - if isUrl { - cmdString = "curl -o - -L " + snapshotUrl + " | tar -xz -C " + homePath - } else { - cmdString = "tar -xz -f " + snapshotUrl + " -C " + homePath - } - } else if strings.HasSuffix(snapshotUrl, ".tar") { - if isUrl { - cmdString = "curl -o - -L " + snapshotUrl + " | tar -x -C " + homePath - } else { - cmdString = "tar -x -f " + snapshotUrl + " -C " + homePath - } - } else { - log.Fatalf(ColorRed+"Invalid snapshot url or path: %s", snapshotUrl) - } - - // Print cmdString - log.Printf(ColorGreen+"Retrieving snapshot using command: %s", cmdString) - - // Execute the command using /bin/sh - cmd := exec.Command("/bin/sh", "-c", cmdString) - if err := cmd.Run(); err != nil { - log.Fatalf(ColorRed+"Command execution failed: %v", err) - } - - // If execution reaches here, the command was successful - log.Printf(ColorYellow+"Snapshot retrieved and extracted to path: %s", homePath) -} diff --git a/cmd/upgrade-assure/sed.go b/cmd/upgrade-assure/sed.go deleted file mode 100644 index ba9be9ced..000000000 --- a/cmd/upgrade-assure/sed.go +++ /dev/null @@ -1,22 +0,0 @@ -package main - -import ( - "log" - "os/exec" -) - -func sed(pattern, file string) { - // Update config.toml for cors_allowed_origins - var args []string - - if isLinux() { - args = []string{"-i", pattern, file} - } else { - args = []string{"-i", "", pattern, file} - } - - // Execute the sed command - if err := exec.Command("sed", args...).Run(); err != nil { - log.Fatalf(ColorRed+"Error updating "+file+": %v\n", err) - } -} diff --git a/cmd/upgrade-assure/should-filter-account.go b/cmd/upgrade-assure/should-filter-account.go deleted file mode 100644 index f361c3776..000000000 --- a/cmd/upgrade-assure/should-filter-account.go +++ /dev/null @@ -1,15 +0,0 @@ -package main - -func shouldFilterAccount(account Account, filterAddresses map[string]struct{}) bool { - if account.BaseAccount != nil { - if _, exists := filterAddresses[account.BaseAccount.Address]; exists { - return true - } - } - if account.ModuleAccount != nil { - if _, exists := filterAddresses[account.ModuleAccount.BaseAccount.Address]; exists { - return true - } - } - return false -} diff --git a/cmd/upgrade-assure/start.go b/cmd/upgrade-assure/start.go deleted file mode 100644 index 75a3a0924..000000000 --- a/cmd/upgrade-assure/start.go +++ /dev/null @@ -1,63 +0,0 @@ -package main - -import ( - "log" - "os" - "os/exec" - "syscall" -) - -// PromptWriter wraps an io.Writer and adds color codes to the output -type PromptWriter struct { - w *os.File - color string - moniker string -} - -// Write adds color codes to the data and writes it to the log -func (cw PromptWriter) Write(data []byte) (int, error) { - // Add color codes to the data - coloredData := []byte(cw.color + "[" + cw.moniker + "]" + ColorReset + " " + string(data)) - _, err := cw.w.Write(coloredData) - if err != nil { - log.Fatalf("Error writing to log: %v", err) - } - return len(data), err -} - -func start(cmdPath, homePath, rpc, p2p, pprof, api, moniker, successColor, errorColor string) *exec.Cmd { - // Set the log level - logLevel := "info" - if os.Getenv("LOG_LEVEL") != "" { - logLevel = os.Getenv("LOG_LEVEL") - } - - // Command and arguments - args := []string{"start", "--home", homePath, "--rpc.laddr", rpc, "--p2p.laddr", p2p, "--rpc.pprof_laddr", pprof, "--api.address", api, "--log_level", logLevel, "--x-crisis-skip-assert-invariants"} - - // Set up the command - cmd := exec.Command(cmdPath, args...) - - // Use PromptWriter to handle logging for standard output and error - cmd.Stdout = PromptWriter{w: os.Stdout, color: successColor, moniker: moniker} // ColorGreen for stdout - cmd.Stderr = PromptWriter{w: os.Stderr, color: errorColor, moniker: moniker} // ColorRed for stderr - - // Execute the command and stream the output in a goroutine to avoid blocking - go func() { - err := cmd.Run() - if err != nil { - // Check if the error is because of the process being killed - if exitErr, ok := err.(*exec.ExitError); ok { - // If the process was killed, log it as a non-fatal error - if status, ok := exitErr.Sys().(syscall.WaitStatus); ok && status.Signaled() { - log.Printf("Process was killed: %v", err) - return - } - } - // For other errors, log them as fatal - log.Fatalf("Command execution failed: %v", err) - } - }() - - return cmd -} diff --git a/cmd/upgrade-assure/stop.go b/cmd/upgrade-assure/stop.go deleted file mode 100644 index 0fa9bb7c0..000000000 --- a/cmd/upgrade-assure/stop.go +++ /dev/null @@ -1,19 +0,0 @@ -package main - -import ( - "log" - "os/exec" -) - -func stop(cmds ...*exec.Cmd) { - for _, cmd := range cmds { - // Stop the process - if cmd != nil && cmd.Process != nil { - err := cmd.Process.Kill() - if err != nil { - log.Fatalf(ColorRed+"Failed to kill process: %v", err) - } - log.Println(ColorYellow + "Process killed successfully") - } - } -} diff --git a/cmd/upgrade-assure/submit-upgrade-proposal.go b/cmd/upgrade-assure/submit-upgrade-proposal.go deleted file mode 100644 index b75ffec00..000000000 --- a/cmd/upgrade-assure/submit-upgrade-proposal.go +++ /dev/null @@ -1,50 +0,0 @@ -package main - -import ( - "log" - "os/exec" -) - -func submitUpgradeProposal(cmdPath, name, newVersion, upgradeHeight, homePath, keyringBackend, chainId, node, broadcastMode string) string { - // Command and arguments - args := []string{ - "tx", - "gov", - "submit-legacy-proposal", - "software-upgrade", - newVersion, - "--title", newVersion, - "--description", newVersion, - "--upgrade-height", upgradeHeight, - "--no-validate", - "--from", name, - "--keyring-backend", keyringBackend, - "--chain-id", chainId, - "--node", node, - "--broadcast-mode", broadcastMode, - "--fees", "100000uelys", - "--gas", "1000000", - "--deposit", "10000000uelys", - "--home", homePath, - "--output", "json", - "--yes", - } - - // Execute the command - output, err := exec.Command(cmdPath, args...).CombinedOutput() - if err != nil { - log.Fatalf(ColorRed+"Command execution failed: %v", err) - } - - // Parse output to find the transaction hash - txHash, err := parseTxHash(output) - if err != nil { - log.Fatalf(ColorRed+"Failed to parse transaction hash: %v", err) - } - - // If execution reaches here, the command was successful - log.Printf(ColorYellow+"Submitted upgrade proposal: %s, upgrade block height: %s", newVersion, upgradeHeight) - - // Return the transaction hash - return txHash -} diff --git a/cmd/upgrade-assure/types.go b/cmd/upgrade-assure/types.go deleted file mode 100644 index 78478c3a4..000000000 --- a/cmd/upgrade-assure/types.go +++ /dev/null @@ -1,531 +0,0 @@ -package main - -import ( - "encoding/json" - "time" - - wasmtypes "github.com/CosmWasm/wasmd/x/wasm/types" - authtypes "github.com/cosmos/cosmos-sdk/x/auth/types" - authz "github.com/cosmos/cosmos-sdk/x/authz" - banktypes "github.com/cosmos/cosmos-sdk/x/bank/types" - capabilitytypes "github.com/cosmos/cosmos-sdk/x/capability/types" - crisistypes "github.com/cosmos/cosmos-sdk/x/crisis/types" - distributiontypes "github.com/cosmos/cosmos-sdk/x/distribution/types" - evidencetypes "github.com/cosmos/cosmos-sdk/x/evidence/types" - govtypes "github.com/cosmos/cosmos-sdk/x/gov/types/v1" - minttypes "github.com/cosmos/cosmos-sdk/x/mint/types" - slashingtypes "github.com/cosmos/cosmos-sdk/x/slashing/types" - stakingtypes "github.com/cosmos/cosmos-sdk/x/staking/types" - transfertypes "github.com/cosmos/ibc-go/v7/modules/apps/transfer/types" - ibcclienttypes "github.com/cosmos/ibc-go/v7/modules/core/02-client/types" - ibcconnectiontypes "github.com/cosmos/ibc-go/v7/modules/core/03-connection/types" - ibcchanneltypes "github.com/cosmos/ibc-go/v7/modules/core/04-channel/types" - ibctypes "github.com/cosmos/ibc-go/v7/modules/core/types" - accountedpooltypes "github.com/elys-network/elys/x/accountedpool/types" - ammtypes "github.com/elys-network/elys/x/amm/types" - assetprofiletypes "github.com/elys-network/elys/x/assetprofile/types" - burnertypes "github.com/elys-network/elys/x/burner/types" - clocktypes "github.com/elys-network/elys/x/clock/types" - commitmenttypes "github.com/elys-network/elys/x/commitment/types" - epochstypes "github.com/elys-network/elys/x/epochs/types" - incentivetypes "github.com/elys-network/elys/x/incentive/types" - leveragelptypes "github.com/elys-network/elys/x/leveragelp/types" - oracletypes "github.com/elys-network/elys/x/oracle/types" - parametertypes "github.com/elys-network/elys/x/parameter/types" - perpetualtypes "github.com/elys-network/elys/x/perpetual/types" - stablestaketypes "github.com/elys-network/elys/x/stablestake/types" - tokenomicstypes "github.com/elys-network/elys/x/tokenomics/types" - transferhooktypes "github.com/elys-network/elys/x/transferhook/types" - - // genutiltypes "github.com/cosmos/cosmos-sdk/x/genutil/types" - estakingtypes "github.com/elys-network/elys/x/estaking/types" - mastercheftypes "github.com/elys-network/elys/x/masterchef/types" -) - -type Genesis struct { - GenesisTime time.Time `json:"genesis_time"` - ChainID string `json:"chain_id"` - InitialHeight string `json:"initial_height"` - ConsensusParams ConsensusParams `json:"consensus_params"` - AppHash string `json:"app_hash"` - AppState AppState `json:"app_state"` - // Include other top-level fields as needed -} - -type ConsensusParams struct { - Version Version `json:"version"` - Block Block `json:"block"` - Evidence Evidence `json:"evidence"` - Validator Validator `json:"validator"` -} - -type Version struct { - App string `json:"app"` -} - -type Validator struct { - PubKeyTypes []string `json:"pub_key_types"` -} - -type Evidence struct { - MaxAgeNumBlocks string `json:"max_age_num_blocks"` - MaxAgeDuration string `json:"max_age_duration"` - MaxBytes string `json:"max_bytes,omitempty"` -} - -type Block struct { - MaxBytes string `json:"max_bytes"` - MaxGas string `json:"max_gas"` -} - -type AppState struct { - Amm Amm `json:"amm"` - AssetProfile AssetProfile `json:"assetprofile"` - Auth Auth `json:"auth"` - AuthZ authz.GenesisState `json:"authz"` - Bank banktypes.GenesisState `json:"bank"` - Burner burnertypes.GenesisState `json:"burner"` - Capability Capability `json:"capability"` - Clock Clock `json:"clock"` - Commitment Commitment `json:"commitment"` - Crisis crisistypes.GenesisState `json:"crisis"` - Distribution Distribution `json:"distribution"` - Epochs Epochs `json:"epochs"` - Estaking Estaking `json:"estaking"` - Evidence EvidenceState `json:"evidence"` - Genutil Genutil `json:"genutil"` - Gov Gov `json:"gov"` - Ibc Ibc `json:"ibc"` - Incentive Incentive `json:"incentive"` - LeverageLP LeverageLP `json:"leveragelp"` - Perpetual Perpetual `json:"perpetual"` - Masterchef Masterchef `json:"masterchef"` - Mint Mint `json:"mint"` - Oracle Oracle `json:"oracle"` - Parameter Parameter `json:"parameter"` - Params interface{} `json:"params"` - PoolAccounted accountedpooltypes.GenesisState `json:"poolaccounted"` - Slashing Slashing `json:"slashing"` - StableStake StableStake `json:"stablestake"` - Staking Staking `json:"staking"` - Tokenomics Tokenomics `json:"tokenomics"` - Transfer transfertypes.GenesisState `json:"transfer"` - TransferHook transferhooktypes.GenesisState `json:"transferhook"` - Upgrade struct{} `json:"upgrade"` - Wasm wasmtypes.GenesisState `json:"wasm"` - // Include other fields as needed -} - -type Masterchef struct { - mastercheftypes.GenesisState - - ExternalIncentiveIndex json.Number `json:"external_incentive_index"` - PoolInfos []MasterchefPoolInfo `json:"pool_infos"` - PoolRewardInfos []MasterchefPoolRewardInfo `json:"pool_reward_infos"` - UserRewardInfos []MasterchefUserRewardInfo `json:"user_reward_infos"` -} - -type MasterchefUserRewardInfo struct { - mastercheftypes.UserRewardInfo - - PoolId json.Number `json:"pool_id"` -} - -type MasterchefPoolRewardInfo struct { - mastercheftypes.PoolRewardInfo - - PoolId json.Number `json:"pool_id"` - LastUpdatedBlock json.Number `json:"last_updated_block"` -} - -type MasterchefPoolInfo struct { - mastercheftypes.PoolInfo - - PoolId json.Number `json:"pool_id"` -} - -type Estaking struct { - estakingtypes.GenesisState -} - -type Tokenomics struct { - tokenomicstypes.GenesisState - - AirdropList []interface{} `json:"airdrop_list"` - GenesisInflation TokenomicsGenesisInflation `json:"genesis_inflation"` - TimeBasedInflationList []interface{} `json:"time_based_inflation_list"` -} - -type TokenomicsGenesisInflation struct { - tokenomicstypes.GenesisInflation - - Inflation TokenomicsInflationEntry `json:"inflation"` - SeedVesting json.Number `json:"seed_vesting"` - StrategicSalesVesting json.Number `json:"strategic_sales_vesting"` -} - -type TokenomicsInflationEntry struct { - tokenomicstypes.InflationEntry - - LmRewards json.Number `json:"lm_rewards"` - IcsStakingRewards json.Number `json:"ics_staking_rewards"` - CommunityFund json.Number `json:"community_fund"` - StrategicReserve json.Number `json:"strategic_reserve"` - TeamTokensVested json.Number `json:"team_tokens_vested"` -} - -type StableStake struct { - stablestaketypes.GenesisState - - Params StableStakeParams `json:"params"` -} - -type StableStakeParams struct { - stablestaketypes.Params - - EpochLength json.Number `json:"epoch_length"` -} - -type Incentive struct { - incentivetypes.GenesisState -} - -type Epochs struct { - epochstypes.GenesisState - - Epochs []interface{} `json:"epochs"` -} - -type Commitment struct { - commitmenttypes.GenesisState - - Params CommitmentParams `json:"params"` - Commitments []interface{} `json:"commitments"` -} - -type CommitmentParams struct { - commitmenttypes.Params - - VestingInfos []CommitmentVestingInfo `json:"vesting_infos"` - NumberOfCommitments json.Number `json:"number_of_commitments"` -} - -type CommitmentVestingInfo struct { - commitmenttypes.VestingInfo - - NumBlocks json.Number `json:"num_blocks"` - NumMaxVestings json.Number `json:"num_max_vestings"` -} - -type Clock struct { - clocktypes.GenesisState - - Params ClockParams `json:"params"` -} - -type ClockParams struct { - clocktypes.Params - - ContractGasLimit json.Number `json:"contract_gas_limit"` -} - -type AssetProfile struct { - assetprofiletypes.GenesisState - - EntryList []interface{} `json:"entry_list"` -} - -type Amm struct { - ammtypes.GenesisState - - Params AmmParams `json:"params"` - PoolList []interface{} `json:"pool_list"` - SlippageTracks []interface{} `json:"slippage_tracks"` -} - -type AmmParams struct { - ammtypes.Params - - SlippageTrackDuration json.Number `json:"slippage_track_duration"` -} - -type Genutil struct { - // genutiltypes.GenesisState - - GenTxs []interface{} `json:"gen_txs"` -} - -type EvidenceState struct { - evidencetypes.GenesisState - - Evidence []interface{} `json:"evidence"` -} - -type Oracle struct { - oracletypes.GenesisState - - Params OracleParams `json:"params"` - AssetInfos []interface{} `json:"asset_infos"` - Prices []interface{} `json:"prices"` -} - -type OracleParams struct { - oracletypes.Params - - OracleScriptID json.Number `json:"oracle_script_id"` - Multiplier json.Number `json:"multiplier"` - AskCount json.Number `json:"ask_count"` - MinCount json.Number `json:"min_count"` - PrepareGas json.Number `json:"prepare_gas"` - ExecuteGas json.Number `json:"execute_gas"` - PriceExpiryTime json.Number `json:"price_expiry_time"` - LifeTimeInBlocks json.Number `json:"life_time_in_blocks"` -} - -type Parameter struct { - parametertypes.GenesisState - - Params ParameterParams `json:"params"` -} - -type ParameterParams struct { - parametertypes.Params - - TotalBlocksPerYear json.Number `json:"total_blocks_per_year"` - WasmMaxLabelSize json.Number `json:"wasm_max_label_size"` - WasmMaxSize json.Number `json:"wasm_max_size"` - WasmMaxProposalWasmSize json.Number `json:"wasm_max_proposal_wasm_size"` -} - -type Capability struct { - capabilitytypes.GenesisState - - Index json.Number `json:"index"` - Owners []interface{} `json:"owners"` -} - -type Slashing struct { - slashingtypes.GenesisState - - Params SlashingParams `json:"params"` - SigningInfos []interface{} `json:"signing_infos"` - MissedBlocks []interface{} `json:"missed_blocks"` -} - -type SlashingParams struct { - slashingtypes.Params - - SignedBlocksWindow json.Number `json:"signed_blocks_window"` - DowntimeJailDuration string `json:"downtime_jail_duration"` -} - -type Mint struct { - minttypes.GenesisState - - Params MintParams `json:"params"` -} - -type MintParams struct { - minttypes.Params - - BlocksPerYear json.Number `json:"blocks_per_year"` -} - -type Gov struct { - govtypes.GenesisState - - StartingProposalId json.Number `json:"starting_proposal_id"` - Deposits []interface{} `json:"deposits"` - Votes []interface{} `json:"votes"` - Proposals []interface{} `json:"proposals"` - DepositParams GovDepositParams `json:"deposit_params"` - VotingParams GovVotingParams `json:"voting_params"` - Params GovParams `json:"params"` -} - -type GovParams struct { - govtypes.Params - - MaxDepositPeriod string `json:"max_deposit_period"` - VotingPeriod string `json:"voting_period"` -} - -type GovDepositParams struct { - govtypes.DepositParams - - MaxDepositPeriod string `json:"max_deposit_period"` -} - -type GovVotingParams struct { - govtypes.VotingParams - - VotingPeriod string `json:"voting_period"` -} - -type Staking struct { - stakingtypes.GenesisState - - Params StakingParams `json:"params"` - LastValidatorPowers []interface{} `json:"last_validator_powers"` - Validators []interface{} `json:"validators"` - Delegations []interface{} `json:"delegations"` - UnbondingDelegations []interface{} `json:"unbonding_delegations"` - ColorRedelegations []interface{} `json:"redelegations"` -} - -type StakingParams struct { - stakingtypes.Params - - UnbondingTime string `json:"unbonding_time"` - MaxValidators json.Number `json:"max_validators"` - MaxEntries json.Number `json:"max_entries"` - HistoricalEntries json.Number `json:"historical_entries"` -} - -type Distribution struct { - distributiontypes.GenesisState - - DelegatorWithdrawInfos []interface{} `json:"delegator_withdraw_infos"` - OutstandingRewards []interface{} `json:"outstanding_rewards"` - ValidatorAccumulatedCommissions []interface{} `json:"validator_accumulated_commissions"` - ValidatorHistoricalRewards []interface{} `json:"validator_historical_rewards"` - ValidatorCurrentRewards []interface{} `json:"validator_current_rewards"` - DelegatorStartingInfos []interface{} `json:"delegator_starting_infos"` - ValidatorSlashEvents []interface{} `json:"validator_slash_events"` -} - -type Ibc struct { - ibctypes.GenesisState - - ClientGenesis ClientGenesis `json:"client_genesis"` - ConnectionGenesis ConnectionGenesis `json:"connection_genesis"` - ChannelGenesis ChannelGenesis `json:"channel_genesis"` -} - -type ClientGenesis struct { - ibcclienttypes.GenesisState - - Clients []interface{} `json:"clients"` - ClientsConsensus []interface{} `json:"clients_consensus"` - ClientsMetadata []interface{} `json:"clients_metadata"` - Params ibcclienttypes.Params `json:"params"` - NextClientSequence json.Number `json:"next_client_sequence"` -} - -type ConnectionGenesis struct { - ibcconnectiontypes.GenesisState - - Connections []interface{} `json:"connections"` - ClientConnectionPaths []interface{} `json:"client_connection_paths"` - NextConnectionSequence json.Number `json:"next_connection_sequence"` - Params ConnectionGenesisParams `json:"params"` -} - -type ConnectionGenesisParams struct { - ibcconnectiontypes.Params - - MaxExpectedTimePerBlock json.Number `json:"max_expected_time_per_block"` -} - -type ChannelGenesis struct { - ibcchanneltypes.GenesisState - - Channels []interface{} `json:"channels"` - Acknowledgements []interface{} `json:"acknowledgements"` - Commitments []interface{} `json:"commitments"` - Receipts []interface{} `json:"receipts"` - SendSequences []interface{} `json:"send_sequences"` - RecvSequences []interface{} `json:"recv_sequences"` - AckSequences []interface{} `json:"ack_sequences"` - NextChannelSequence json.Number `json:"next_channel_sequence"` -} - -type LeverageLP struct { - leveragelptypes.GenesisState - - Params LeverageLPParams `json:"params"` - PoolList []interface{} `json:"pool_list"` - PositionList []interface{} `json:"position_list"` -} - -type LeverageLPParams struct { - leveragelptypes.Params - - EpochLength json.Number `json:"epoch_length"` - MaxOpenPositions json.Number `json:"max_open_positions"` -} - -type Perpetual struct { - perpetualtypes.GenesisState - - Params PerpetualParams `json:"params"` - PoolList []interface{} `json:"pool_list"` - MtpList []interface{} `json:"mtp_list"` -} - -type PerpetualParams struct { - perpetualtypes.Params - - EpochLength json.Number `json:"epoch_length"` - MaxOpenPositions json.Number `json:"max_open_positions"` -} - -type AuthParams struct { - authtypes.Params - - MaxMemoCharacters json.Number `json:"max_memo_characters"` - TxSigLimit json.Number `json:"tx_sig_limit"` - TxSizeCostPerByte json.Number `json:"tx_size_cost_per_byte"` - SigVerifyCostEd25519 json.Number `json:"sig_verify_cost_ed25519"` - SigVerifyCostSecp256K1 json.Number `json:"sig_verify_cost_secp256k1"` -} - -type BaseAccount struct { - Address string `json:"address"` - PubKey interface{} `json:"pub_key"` - AccountNumber json.Number `json:"account_number"` - Sequence json.Number `json:"sequence"` -} - -type ModuleAccount struct { - BaseAccount BaseAccount `json:"base_account"` - Name string `json:"name"` - Permissions []string `json:"permissions"` -} - -type Account struct { - *BaseAccount - *ModuleAccount - - Type string `json:"@type"` -} - -type Auth struct { - authtypes.GenesisState - - Params AuthParams `json:"params"` - Accounts []Account `json:"accounts"` -} - -// KeyOutput represents the JSON structure of the output from the add key command -type KeyOutput struct { - Name string `json:"name"` - Type string `json:"type"` - Address string `json:"address"` - PubKey string `json:"pubkey"` - Mnemonic string `json:"mnemonic"` -} - -// StatusOutput represents the JSON structure of the output from the status command -type StatusOutput struct { - SyncInfo struct { - LatestBlockHeight string `json:"latest_block_height"` - } `json:"SyncInfo"` -} - -// ProposalsOutput represents the JSON structure of the output from the query proposals command -type ProposalsOutput struct { - Proposals []struct { - Id string `json:"id"` - } `json:"proposals"` -} diff --git a/cmd/upgrade-assure/unbond-validator.go b/cmd/upgrade-assure/unbond-validator.go deleted file mode 100644 index de2ce7f54..000000000 --- a/cmd/upgrade-assure/unbond-validator.go +++ /dev/null @@ -1,45 +0,0 @@ -package main - -import ( - "log" - "os/exec" - "time" -) - -func unbondValidator(cmdPath, validatorKeyName, operatorAddress, validatorSelfDelegation, keyringBackend, chainId, rpc, broadcastMode, homePath string) { - // Command and arguments - args := []string{ - "tx", - "staking", - "unbond", - operatorAddress, - validatorSelfDelegation + "uelys", - "--from", validatorKeyName, - "--keyring-backend", keyringBackend, - "--chain-id", chainId, - "--node", rpc, - "--broadcast-mode", broadcastMode, - "--fees", "100000uelys", - "--gas", "1000000", - "--home", homePath, - "--output", "json", - "--yes", - } - - // Execute the command - output, err := exec.Command(cmdPath, args...).CombinedOutput() - if err != nil { - log.Fatalf(ColorRed+"Command execution failed: %v", err) - } - - // Parse output to find the transaction hash - txHash, err := parseTxHash(output) - if err != nil { - log.Fatalf(ColorRed+"Failed to parse transaction hash: %v", err) - } - - // If execution reaches here, the command was successful - log.Printf(ColorYellow+"Unbonded validator: %s, self-delegation: %s", operatorAddress, validatorSelfDelegation) - - waitForTxConfirmation(cmdPath, rpc, txHash, 5*time.Minute) -} diff --git a/cmd/upgrade-assure/update-config.go b/cmd/upgrade-assure/update-config.go deleted file mode 100644 index 31d73fcab..000000000 --- a/cmd/upgrade-assure/update-config.go +++ /dev/null @@ -1,32 +0,0 @@ -package main - -import ( - "log" -) - -func updateConfig(homePath, dbEngine string) { - // Path to config files - configPath := homePath + "/config/config.toml" - appPath := homePath + "/config/app.toml" - clientPath := homePath + "/config/client.toml" - - // Update config.toml for cors_allowed_origins - sed("s/^cors_allowed_origins =.*/cors_allowed_origins = [\\\"*\\\"]/", configPath) - - // Update config.toml for timeout_broadcast_tx_commit - sed("s/^timeout_broadcast_tx_commit =.*/timeout_broadcast_tx_commit = \\\"120s\\\"/", configPath) - - // Update config.toml for db_backend - sed("s/^db_backend =.*/db_backend = \\\""+dbEngine+"\\\"/", configPath) - - // Update app.toml for enabling the API server - sed("/^# Enable defines if the API server should be enabled./{n;s/enable = false/enable = true/;}", appPath) - - // Update app.toml for app-db-backend - sed("s/^app\\-db\\-backend =.*/app\\-db\\-backend = \\\""+dbEngine+"\\\"/", appPath) - - // Update client.toml for keyring-backend - sed("s/^keyring\\-backend =.*/keyring\\-backend = \\\"test\\\"/", clientPath) - - log.Printf(ColorYellow + "config files have been updated successfully.") -} diff --git a/cmd/upgrade-assure/update-genesis.go b/cmd/upgrade-assure/update-genesis.go deleted file mode 100644 index 5e4cae32e..000000000 --- a/cmd/upgrade-assure/update-genesis.go +++ /dev/null @@ -1,138 +0,0 @@ -package main - -import ( - "log" - - sdk "github.com/cosmos/cosmos-sdk/types" - authtypes "github.com/cosmos/cosmos-sdk/x/auth/types" -) - -func updateGenesis(validatorBalance, homePath, genesisFilePath string) { - genesis, err := readGenesisFile(genesisFilePath) - if err != nil { - log.Fatalf(ColorRed+"Error reading genesis file: %v", err) - } - - genesisInitFilePath := homePath + "/config/genesis.json" - genesisInit, err := readGenesisFile(genesisInitFilePath) - if err != nil { - log.Fatalf(ColorRed+"Error reading initial genesis file: %v", err) - } - - filterAccountAddresses := []string{ - "elys1gpv36nyuw5a92hehea3jqaadss9smsqscr3lrp", // remove existing account 0 - // "elys173n2866wggue6znwl2vnwx9zqy7nnasjed9ydh", - } - filterBalanceAddresses := []string{ - "elys1gpv36nyuw5a92hehea3jqaadss9smsqscr3lrp", // remove existing account 0 - // "elys173n2866wggue6znwl2vnwx9zqy7nnasjed9ydh", - authtypes.NewModuleAddress("distribution").String(), - authtypes.NewModuleAddress("bonded_tokens_pool").String(), - authtypes.NewModuleAddress("not_bonded_tokens_pool").String(), - authtypes.NewModuleAddress("gov").String(), - } - - var coinsToRemove sdk.Coins - - genesis.AppState.Auth.Accounts = filterAccounts(genesis.AppState.Auth.Accounts, filterAccountAddresses) - genesis.AppState.Bank.Balances, coinsToRemove = filterBalances(genesis.AppState.Bank.Balances, filterBalanceAddresses) - - newValidatorBalance, ok := sdk.NewIntFromString(validatorBalance) - if !ok { - panic(ColorRed + "invalid number") - } - - // update supply - genesis.AppState.Bank.Supply = genesis.AppState.Bank.Supply.Sub(coinsToRemove...) - - // add node 1 supply - genesis.AppState.Bank.Supply = genesis.AppState.Bank.Supply.Add(sdk.NewCoin("uelys", newValidatorBalance)).Add(sdk.NewCoin("ibc/2180E84E20F5679FCC760D8C165B60F42065DEF7F46A72B447CFF1B7DC6C0A65", newValidatorBalance)).Add(sdk.NewCoin("ibc/E2D2F6ADCC68AA3384B2F5DFACCA437923D137C14E86FB8A10207CF3BED0C8D4", newValidatorBalance)).Add(sdk.NewCoin("ibc/B4314D0E670CB43C88A5DCA09F76E5E812BD831CC2FEC6E434C9E5A9D1F57953", newValidatorBalance)) - - // add node 2 supply - genesis.AppState.Bank.Supply = genesis.AppState.Bank.Supply.Add(sdk.NewCoin("uelys", newValidatorBalance)).Add(sdk.NewCoin("ibc/2180E84E20F5679FCC760D8C165B60F42065DEF7F46A72B447CFF1B7DC6C0A65", newValidatorBalance)).Add(sdk.NewCoin("ibc/E2D2F6ADCC68AA3384B2F5DFACCA437923D137C14E86FB8A10207CF3BED0C8D4", newValidatorBalance)).Add(sdk.NewCoin("ibc/B4314D0E670CB43C88A5DCA09F76E5E812BD831CC2FEC6E434C9E5A9D1F57953", newValidatorBalance)) - - // Add new validator account and balance - genesis.AppState.Auth.Accounts = append(genesis.AppState.Auth.Accounts, genesisInit.AppState.Auth.Accounts...) - genesis.AppState.Bank.Balances = append(genesis.AppState.Bank.Balances, genesisInit.AppState.Bank.Balances...) - - distrAddr := authtypes.NewModuleAddress("distribution").String() - - addressDenomMap := map[string][]string{ - distrAddr: {"ibc/2180E84E20F5679FCC760D8C165B60F42065DEF7F46A72B447CFF1B7DC6C0A65", "ueden", "uedenb"}, - } - - genesis.AppState.Bank.Balances, coinsToRemove = filterBalancesByDenoms(genesis.AppState.Bank.Balances, addressDenomMap) - - // update supply - genesis.AppState.Bank.Supply = genesis.AppState.Bank.Supply.Sub(coinsToRemove...) - - // ColorReset staking data - stakingParams := genesis.AppState.Staking.Params - genesis.AppState.Staking = genesisInit.AppState.Staking - genesis.AppState.Staking.Params = stakingParams - - // temporary fix for staking params - genesis.AppState.Staking.Params.BondDenom = "uelys" - - // ColorReset slashing data - genesis.AppState.Slashing = genesisInit.AppState.Slashing - - // ColorReset distribution data - genesis.AppState.Distribution = genesisInit.AppState.Distribution - - // set genutil from genesisInit - genesis.AppState.Genutil = genesisInit.AppState.Genutil - - // add localhost as allowed client - genesis.AppState.Ibc.ClientGenesis.Params.AllowedClients = append(genesis.AppState.Ibc.ClientGenesis.Params.AllowedClients, "09-localhost") - - // reset gov as there are broken proposoals - genesis.AppState.Gov = genesisInit.AppState.Gov - - // update voting period - votingPeriod := "30s" - minDeposit := sdk.Coins{sdk.NewInt64Coin("uelys", 10000000)} - genesis.AppState.Gov.Params.VotingPeriod = votingPeriod - genesis.AppState.Gov.Params.MaxDepositPeriod = votingPeriod - genesis.AppState.Gov.Params.MinDeposit = minDeposit - // set deprecated settings - genesis.AppState.Gov.VotingParams.VotingPeriod = votingPeriod - genesis.AppState.Gov.DepositParams.MaxDepositPeriod = votingPeriod - genesis.AppState.Gov.DepositParams.MinDeposit = minDeposit - - // update wasm params - // genesis.AppState.Wasm.Params = wasmtypes.DefaultParams() - genesis.AppState.Wasm = genesisInit.AppState.Wasm - - // update clock params - genesis.AppState.Clock.Params.ContractAddresses = []string{ - "elys1nc5tatafv6eyq7llkr2gv50ff9e22mnf70qgjlv737ktmt4eswrqau4f4q", - "elys17p9rzwnnfxcjp32un9ug7yhhzgtkhvl9jfksztgw5uh69wac2pgs98tvuy", - } - genesis.AppState.Clock.Params.ContractGasLimit = "1000000000" - - // update broker address - genesis.AppState.Parameter.Params.BrokerAddress = "elys1nc5tatafv6eyq7llkr2gv50ff9e22mnf70qgjlv737ktmt4eswrqau4f4q" - - // temporary fix for oracle - // genesis.AppState.Oracle.Params = genesisInit.AppState.Oracle.Params - // genesis.AppState.Oracle.PortId = genesisInit.AppState.Oracle.PortId - // genesis.AppState.Oracle.Prices = genesisInit.AppState.Oracle.Prices - // genesis.AppState.Oracle.PriceFeeders = genesisInit.AppState.Oracle.PriceFeeders - // genesis.AppState.Oracle.AssetInfos = genesisInit.AppState.Oracle.AssetInfos - - // update oracle price expiration - genesis.AppState.Oracle.Params.PriceExpiryTime = "31536000" - genesis.AppState.Oracle.Params.LifeTimeInBlocks = "8000000" - - // update stablestake - genesis.AppState.StableStake = genesisInit.AppState.StableStake - - // temporary fix - // genesis.InitialHeight = "0" - - outputFilePath := homePath + "/config/genesis.json" - if err := writeGenesisFile(outputFilePath, genesis); err != nil { - log.Fatalf(ColorRed+"Error writing genesis file: %v", err) - } -} diff --git a/cmd/upgrade-assure/upgrade-assure.go b/cmd/upgrade-assure/upgrade-assure.go deleted file mode 100644 index 63f622128..000000000 --- a/cmd/upgrade-assure/upgrade-assure.go +++ /dev/null @@ -1,301 +0,0 @@ -package main - -import ( - "log" - "os" - "time" - - elyscmd "github.com/elys-network/elys/cmd/elysd/cmd" - "github.com/spf13/cobra" -) - -func main() { - var rootCmd = &cobra.Command{ - Use: "upgrade-assure [snapshot_url] [old_binary_url] [new_binary_url] [flags]", - Short: "Upgrade Assure is a tool for running a chain from a snapshot and to test out the upgrade process.", - Long: `A tool for running a chain from a snapshot.`, - Args: cobra.ExactArgs(3), // Expect exactly 1 argument - Run: func(cmd *cobra.Command, args []string) { - snapshotUrl, oldBinaryUrl, newBinaryUrl := getArgs(args) - // global flags - onlyStartWithNewBinary, skipSnapshot, skipChainInit, skipNodeStart, skipProposal, skipBinary, - skipCreateValidator, skipPrepareValidatorData, skipSubmitProposal, skipUpgradeToNewBinary, skipUnbondValidator, - chainId, keyringBackend, genesisFilePath, broadcastMode, dbEngine, - // timeouts - timeOutToWaitForService, timeOutToWaitForNextBlock, - // node 1 flags - homePath, moniker, validatorKeyName, validatorBalance, validatorSelfDelegation, validatorMnemonic, rpc, p2p, pprof, api, - // node 2 flags - homePath2, moniker2, validatorKeyName2, validatorBalance2, validatorSelfDelegation2, validatorMnemonic2, rpc2, p2p2, pprof2, api2 := getFlags(cmd) - - _ = dbEngine - - timeOutForNextBlock := time.Duration(timeOutToWaitForNextBlock) * time.Minute - // set address prefix - elyscmd.InitSDKConfig() - - // download and run old binary - oldBinaryPath, oldVersion, err := downloadAndRunVersion(oldBinaryUrl, skipBinary) - if err != nil { - log.Fatalf(ColorRed+"Error downloading and running old binary: %v", err) - } - - // print old binary path and version - log.Printf(ColorGreen+"Old binary path: %v and version: %v", oldBinaryPath, oldVersion) - - // download and run new binary - newBinaryPath, newVersion, err := downloadAndRunVersion(newBinaryUrl, skipBinary) - if err != nil { - log.Fatalf(ColorRed+"Error downloading and running new binary: %v", err) - } - - // print new binary path and version - log.Printf(ColorGreen+"New binary path: %v and version: %v", newBinaryPath, newVersion) - - // only start with new binary, skip everything else - if onlyStartWithNewBinary { - // start new binary - newBinaryCmd := start(newBinaryPath, homePath, rpc, p2p, pprof, api, moniker, "\033[32m", "\033[31m") - newBinaryCmd2 := start(newBinaryPath, homePath2, rpc2, p2p2, pprof2, api2, moniker2, "\033[32m", "\033[31m") - - // listen for signals - listenForSignals(newBinaryCmd, newBinaryCmd2) - return - } - - if !skipSnapshot { - // remove home path - removeHome(homePath) - - // init chain - initNode(oldBinaryPath, moniker, chainId, homePath) - - // update config files - updateConfig(homePath, "pebbledb") - - // retrieve the snapshot - retrieveSnapshot(snapshotUrl, homePath) - - // export genesis file - export(oldBinaryPath, homePath, genesisFilePath) - } - - if !skipChainInit { - // remove home paths - removeHome(homePath) - removeHome(homePath2) - - // init nodes - initNode(oldBinaryPath, moniker, chainId, homePath) - initNode(oldBinaryPath, moniker2, chainId, homePath2) - - // update config files to enable api and cors - updateConfig(homePath, "goleveldb") - updateConfig(homePath2, "goleveldb") - - // query node 1 id - node1Id := queryNodeId(oldBinaryPath, homePath) - - // add peers - addPeers(homePath2, p2p, node1Id) - - // add validator keys to node 1 - validatorAddress := addKey(oldBinaryPath, validatorKeyName, validatorMnemonic, homePath, keyringBackend) - validatorAddress2 := addKey(oldBinaryPath, validatorKeyName2, validatorMnemonic2, homePath, keyringBackend) - - // add validator keys to node 2 - _ = addKey(oldBinaryPath, validatorKeyName, validatorMnemonic, homePath2, keyringBackend) - _ = addKey(oldBinaryPath, validatorKeyName2, validatorMnemonic2, homePath2, keyringBackend) - - // add genesis accounts - addGenesisAccount(oldBinaryPath, validatorAddress, validatorBalance, homePath) - addGenesisAccount(oldBinaryPath, validatorAddress2, validatorBalance2, homePath) - - // generate genesis tx - genTx(oldBinaryPath, validatorKeyName, validatorSelfDelegation, chainId, homePath, keyringBackend) - - // collect genesis txs - collectGentxs(oldBinaryPath, homePath) - - // validate genesis - validateGenesis(oldBinaryPath, homePath) - - // backup genesis init file - backupGenesisInitFile(homePath) - - // update genesis - updateGenesis(validatorBalance, homePath, genesisFilePath) - } - - if !skipNodeStart { - if !skipCreateValidator { - // start node 1 - oldBinaryCmd := start(oldBinaryPath, homePath, rpc, p2p, pprof, api, moniker, ColorGreen, ColorRed) - - // wait for rpc to start - waitForServiceToStart(rpc, moniker, timeOutToWaitForService) - - // wait for next block - waitForNextBlock(oldBinaryPath, rpc, moniker, timeOutForNextBlock) - - if skipProposal { - // listen for signals - listenForSignals(oldBinaryCmd) - return - } - - // query validator pubkey - validatorPubkey2 := queryValidatorPubkey(oldBinaryPath, homePath2) - - // create validator node 2 - createValidator(oldBinaryPath, validatorKeyName2, validatorSelfDelegation2, moniker2, validatorPubkey2, homePath, keyringBackend, chainId, rpc, broadcastMode) - - // wait for next block - waitForNextBlock(oldBinaryPath, rpc, moniker, timeOutForNextBlock) - - // stop old binary - stop(oldBinaryCmd) - } - - if !skipPrepareValidatorData { - // restore genesis init file - restoreGenesisInitFile(homePath) - - // copy data from node 1 to node 2 - copyDataFromNodeToNode(homePath, homePath2) - - // generate priv_validator_state.json file for node 2 - generatePrivValidatorState(homePath2) - } - - if !skipSubmitProposal { - // start node 1 and 2 - oldBinaryCmd := start(oldBinaryPath, homePath, rpc, p2p, pprof, api, moniker, ColorGreen, ColorRed) - oldBinaryCmd2 := start(oldBinaryPath, homePath2, rpc2, p2p2, pprof2, api2, moniker2, ColorGreen, ColorRed) - - // wait for rpc 1 and 2 to start - waitForServiceToStart(rpc, moniker, timeOutToWaitForService) - waitForServiceToStart(rpc2, moniker2, timeOutToWaitForService) - - // query and calculate upgrade block height - upgradeBlockHeight := queryAndCalcUpgradeBlockHeight(oldBinaryPath, rpc) - - // query next proposal id - proposalId, err := queryNextProposalId(oldBinaryPath, rpc) - if err != nil { - log.Printf(ColorYellow+"Error querying next proposal id: %v", err) - log.Printf(ColorYellow + "Setting proposal id to 1") - proposalId = "1" - } - - // submit upgrade proposal - txHash := submitUpgradeProposal(oldBinaryPath, validatorKeyName, newVersion, upgradeBlockHeight, homePath, keyringBackend, chainId, rpc, broadcastMode) - - waitForTxConfirmation(oldBinaryPath, rpc, txHash, 5*time.Minute) - - // vote on upgrade proposal - txHash = voteOnUpgradeProposal(oldBinaryPath, validatorKeyName, proposalId, homePath, keyringBackend, chainId, rpc, broadcastMode) - - waitForTxConfirmation(oldBinaryPath, rpc, txHash, 5*time.Minute) - - // wait for upgrade block height - waitForBlockHeight(oldBinaryPath, rpc, upgradeBlockHeight) - - // wait 5 seconds - time.Sleep(5 * time.Second) - - // stop old binaries - stop(oldBinaryCmd, oldBinaryCmd2) - } - - if !skipUpgradeToNewBinary { - // wait 5 seconds - time.Sleep(5 * time.Second) - - // start new binary - newBinaryCmd := start(newBinaryPath, homePath, rpc, p2p, pprof, api, moniker, "\033[32m", "\033[31m") - newBinaryCmd2 := start(newBinaryPath, homePath2, rpc2, p2p2, pprof2, api2, moniker2, "\033[32m", "\033[31m") - - // wait for node to start - waitForServiceToStart(rpc, moniker, timeOutToWaitForService) - waitForServiceToStart(rpc2, moniker2, timeOutToWaitForService) - - // wait for next block - waitForNextBlock(newBinaryPath, rpc, moniker, timeOutForNextBlock) - waitForNextBlock(newBinaryPath, rpc2, moniker2, timeOutForNextBlock) - - // check if the upgrade was successful - queryUpgradeApplied(newBinaryPath, rpc, newVersion) - queryUpgradeApplied(newBinaryPath, rpc2, newVersion) - - if !skipUnbondValidator { - operatorAddress2 := queryOperatorAddress(newBinaryPath, homePath, keyringBackend, validatorKeyName2) - - // print operator address 2 - log.Printf(ColorGreen+"Operator address 2: %v", operatorAddress2) - - // unbound the second validator power - unbondValidator(newBinaryPath, validatorKeyName2, operatorAddress2, validatorSelfDelegation2, keyringBackend, chainId, rpc, broadcastMode, homePath) - - // wait for next block - waitForNextBlock(newBinaryPath, rpc, moniker, timeOutForNextBlock) - } - - // stop new binaries - stop(newBinaryCmd, newBinaryCmd2) - } - } - }, - } - - // get HOME environment variable - homeEnv, _ := os.LookupEnv("HOME") - - // global flags - rootCmd.PersistentFlags().Bool(flagOnlyStartWithNewBinary, false, "only start with new binary") - rootCmd.PersistentFlags().Bool(flagSkipSnapshot, false, "skip snapshot retrieval") - rootCmd.PersistentFlags().Bool(flagSkipChainInit, false, "skip chain init") - rootCmd.PersistentFlags().Bool(flagSkipNodeStart, false, "skip node start") - rootCmd.PersistentFlags().Bool(flagSkipProposal, false, "skip proposal") - rootCmd.PersistentFlags().Bool(flagSkipBinary, false, "skip binary download") - rootCmd.PersistentFlags().Bool(flagSkipCreateValidator, false, "skip create validator") - rootCmd.PersistentFlags().Bool(flagSkipPrepareValidatorData, false, "skip prepare validator data") - rootCmd.PersistentFlags().Bool(flagSkipSubmitProposal, false, "skip submit proposal") - rootCmd.PersistentFlags().Bool(flagSkipUpgradeToNewBinary, false, "skip upgrade to new binary") - rootCmd.PersistentFlags().Bool(flagSkipUnbondValidator, false, "skip unbond validator") - rootCmd.PersistentFlags().String(flagChainId, "elystestnet-1", "chain id") - rootCmd.PersistentFlags().String(flagKeyringBackend, "test", "keyring backend") - rootCmd.PersistentFlags().String(flagGenesisFilePath, "/tmp/genesis.json", "genesis file path") - rootCmd.PersistentFlags().String(flagBroadcastMode, "sync", "broadcast mode") - rootCmd.PersistentFlags().String(flagDbEngine, "pebbledb", "database engine to use") - - rootCmd.PersistentFlags().Int(flagTimeOutToWaitForService, 600, "set the maximum timeout in (seconds) to wait for the node starting") - rootCmd.PersistentFlags().Int(flagTimeOutNextBlock, 5, "set the maximum timeout in (minutes) to wait for the next block") - // node 1 flags - rootCmd.PersistentFlags().String(flagHome, homeEnv+"/.elys", "home directory") - rootCmd.PersistentFlags().String(flagMoniker, "alice", "moniker") - rootCmd.PersistentFlags().String(flagValidatorKeyName, "validator", "validator key name") - rootCmd.PersistentFlags().String(flagValidatorBalance, "200000000000000", "validator balance") - rootCmd.PersistentFlags().String(flagValidatorSelfDelegation, "50000000000000", "validator self delegation") - rootCmd.PersistentFlags().String(flagValidatorMnemonic, "shrug census ancient uniform sausage own oil boss tool captain ride year conduct welcome siren protect mutual zero funny universe candy gown rack sister", "validator mnemonic") - rootCmd.PersistentFlags().String(flagRpc, "tcp://0.0.0.0:26657", "rpc") - rootCmd.PersistentFlags().String(flagP2p, "tcp://0.0.0.0:26656", "p2p") - rootCmd.PersistentFlags().String(flagPprof, "localhost:6060", "pprof") - rootCmd.PersistentFlags().String(flagApi, "tcp://localhost:1317", "api") - - // node 2 flags - rootCmd.PersistentFlags().String(flagHome2, homeEnv+"/.elys2", "home directory 2") - rootCmd.PersistentFlags().String(flagMoniker2, "bob", "moniker 2") - rootCmd.PersistentFlags().String(flagValidatorKeyName2, "validator-2", "validator key name 2") - rootCmd.PersistentFlags().String(flagValidatorBalance2, "200000000000000", "validator balance 2") - rootCmd.PersistentFlags().String(flagValidatorSelfDelegation2, "1000000", "validator self delegation 2") - rootCmd.PersistentFlags().String(flagValidatorMnemonic2, "august viable pet tone normal below almost blush portion example trick circle pumpkin citizen conduct outdoor universe wolf ankle asthma deliver correct pool juice", "validator mnemonic 2") - rootCmd.PersistentFlags().String(flagRpc2, "tcp://0.0.0.0:26667", "rpc") - rootCmd.PersistentFlags().String(flagP2p2, "tcp://0.0.0.0:26666", "p2p") - rootCmd.PersistentFlags().String(flagPprof2, "localhost:6061", "pprof") - rootCmd.PersistentFlags().String(flagApi2, "tcp://localhost:1318", "api") - - if err := rootCmd.Execute(); err != nil { - log.Fatalf(ColorRed+"Error executing command: %v", err) - } -} diff --git a/cmd/upgrade-assure/validate-genesis.go b/cmd/upgrade-assure/validate-genesis.go deleted file mode 100644 index 9960f9d23..000000000 --- a/cmd/upgrade-assure/validate-genesis.go +++ /dev/null @@ -1,19 +0,0 @@ -package main - -import ( - "log" - "os/exec" -) - -func validateGenesis(cmdPath, homePath string) { - // Command and arguments - args := []string{"validate-genesis", "--home", homePath} - - // Execute the command - if err := exec.Command(cmdPath, args...).Run(); err != nil { - log.Fatalf(ColorRed+"Command execution failed: %v", err) - } - - // If execution reaches here, the command was successful - log.Printf(ColorYellow+"validate genesis with home path %s successfully", homePath) -} diff --git a/cmd/upgrade-assure/vote-on-upgrade-proposal.go b/cmd/upgrade-assure/vote-on-upgrade-proposal.go deleted file mode 100644 index 5e2ae6efe..000000000 --- a/cmd/upgrade-assure/vote-on-upgrade-proposal.go +++ /dev/null @@ -1,41 +0,0 @@ -package main - -import ( - "log" - "os/exec" -) - -func voteOnUpgradeProposal(cmdPath, name, proposalId, homePath, keyringBackend, chainId, node, broadcastMode string) string { - // Command and arguments - args := []string{ - "tx", "gov", "vote", proposalId, "yes", - "--from", name, - "--keyring-backend", keyringBackend, - "--chain-id", chainId, - "--node", node, - "--broadcast-mode", broadcastMode, - "--fees", "100000uelys", - "--gas", "1000000", - "--home", homePath, - "--output", "json", - "--yes", - } - - // Execute the command - output, err := exec.Command(cmdPath, args...).CombinedOutput() - if err != nil { - log.Fatalf(ColorRed+"Command execution failed: %v", err) - } - - // Parse output to find the transaction hash - txHash, err := parseTxHash(output) - if err != nil { - log.Fatalf(ColorRed+"Failed to parse transaction hash: %v", err) - } - - // If execution reaches here, the command was successful - log.Printf(ColorYellow+"Voted on upgrade proposal: %s", proposalId) - - // Return the transaction hash - return txHash -} diff --git a/cmd/upgrade-assure/wait-for-block-height.go b/cmd/upgrade-assure/wait-for-block-height.go deleted file mode 100644 index 21b622246..000000000 --- a/cmd/upgrade-assure/wait-for-block-height.go +++ /dev/null @@ -1,30 +0,0 @@ -package main - -import ( - "log" - "strconv" - "time" -) - -func waitForBlockHeight(cmdPath, node, height string) { - targetBlockHeight, err := strconv.Atoi(height) - if err != nil { - log.Fatalf(ColorRed+"Error converting target block height to integer: %v", err) - } - - // Now, wait for the block height - for { - var blockHeightStr string - blockHeightStr, err = queryBlockHeight(cmdPath, node) - if err == nil { - newBlockHeight, err := strconv.Atoi(blockHeightStr) - if err == nil && newBlockHeight >= targetBlockHeight { - break - } - } - log.Println(ColorYellow+"Waiting for block height", height, "...") - time.Sleep(5 * time.Second) // Wait 5 seconds before retrying - } - - log.Printf(ColorYellow+"Block height %d reached", targetBlockHeight) -} diff --git a/cmd/upgrade-assure/wait-for-next-block.go b/cmd/upgrade-assure/wait-for-next-block.go deleted file mode 100644 index 41c000a41..000000000 --- a/cmd/upgrade-assure/wait-for-next-block.go +++ /dev/null @@ -1,54 +0,0 @@ -package main - -import ( - "log" - "strconv" - "time" -) - -func waitForNextBlock(cmdPath, node, moniker string, timeout time.Duration) { - var currentBlockHeight, newBlockHeight int - var err error - - start := time.Now() - - // First, get the current block height - for { - if time.Since(start) > timeout { - log.Fatalf(ColorRed + "[" + moniker + "] Failed to get current block height within the specified timeout") - } - var blockHeightStr string - blockHeightStr, err = queryBlockHeight(cmdPath, node) - if err == nil { - currentBlockHeight, err = strconv.Atoi(blockHeightStr) - if err == nil && currentBlockHeight > 0 { - break - } - } - log.Println(ColorYellow + "[" + moniker + "] Waiting for current block height...") - time.Sleep(5 * time.Second) // Wait 5 seconds before retrying - } - - log.Printf(ColorYellow+"["+moniker+"] Current Block Height: %d", currentBlockHeight) - - start = time.Now() - - // Now, wait for the block height to increase - for { - if time.Since(start) > timeout { - log.Fatalf(ColorRed + "[" + moniker + "] Failed to get new block height within the specified timeout") - } - var blockHeightStr string - blockHeightStr, err = queryBlockHeight(cmdPath, node) - if err == nil { - newBlockHeight, err = strconv.Atoi(blockHeightStr) - if err == nil && newBlockHeight > currentBlockHeight { - break - } - } - log.Println(ColorYellow + "[" + moniker + "] Waiting for next block height...") - time.Sleep(5 * time.Second) // Wait 5 seconds before retrying - } - - log.Printf(ColorYellow+"["+moniker+"] New Block Height: %d", newBlockHeight) -} diff --git a/cmd/upgrade-assure/wait-for-service-to-start.go b/cmd/upgrade-assure/wait-for-service-to-start.go deleted file mode 100644 index c0767cd70..000000000 --- a/cmd/upgrade-assure/wait-for-service-to-start.go +++ /dev/null @@ -1,21 +0,0 @@ -package main - -import ( - "log" - "time" -) - -func waitForServiceToStart(url, name string, timeoutFlag int) { - timeout := time.Duration(timeoutFlag) * time.Second - start := time.Now() - - // Wait for the node to be running with timout - for !isServiceRunning(url) { - if time.Since(start) > timeout { - log.Fatalf(ColorRed + "[" + name + "] Service did not start within the specified timeout") - } - log.Println(ColorYellow + "[" + name + "] Waiting for service to start...") - time.Sleep(5 * time.Second) - } - log.Println(ColorYellow + "[" + name + "] Service is running.") -} diff --git a/cmd/upgrade-assure/wait-for-tx-confirmation.go b/cmd/upgrade-assure/wait-for-tx-confirmation.go deleted file mode 100644 index 24ac5ca43..000000000 --- a/cmd/upgrade-assure/wait-for-tx-confirmation.go +++ /dev/null @@ -1,27 +0,0 @@ -package main - -import ( - "log" - "time" -) - -func waitForTxConfirmation(cmdPath, node, txHash string, timeout time.Duration) { - start := time.Now() - for { - if time.Since(start) > timeout { - log.Fatalf(ColorRed + "timeout reached while waiting for tx confirmation") - } - success, err := checkTxStatus(cmdPath, node, txHash) - if err != nil { - log.Printf(ColorRed+"error checking tx status, retrying in 5 seconds: %v", err) - time.Sleep(5 * time.Second) - continue - } - if success { - break - } - log.Printf(ColorYellow+"waiting for tx confirmation %s", txHash) - time.Sleep(5 * time.Second) - } - log.Printf(ColorGreen+"tx %s confirmed", txHash) -} diff --git a/cmd/upgrade-assure/write-genesis-file.go b/cmd/upgrade-assure/write-genesis-file.go deleted file mode 100644 index 752bc01d3..000000000 --- a/cmd/upgrade-assure/write-genesis-file.go +++ /dev/null @@ -1,28 +0,0 @@ -package main - -import ( - "bufio" - "encoding/json" - "fmt" - "os" -) - -func writeGenesisFile(filePath string, genesis Genesis) error { - file, err := os.Create(filePath) - if err != nil { - return fmt.Errorf("error creating output file: %w", err) - } - defer file.Close() - - writer := bufio.NewWriter(file) - defer writer.Flush() - - encoder := json.NewEncoder(writer) - encoder.SetIndent("", " ") // disable for now - - if err := encoder.Encode(genesis); err != nil { - return fmt.Errorf("error encoding JSON: %w", err) - } - - return nil -} diff --git a/cmd/upload-snapshot/main.go b/cmd/upload-snapshot/main.go deleted file mode 100644 index 084e7b5b8..000000000 --- a/cmd/upload-snapshot/main.go +++ /dev/null @@ -1,113 +0,0 @@ -package main - -import ( - "context" - "fmt" - "os" - "path/filepath" - - "github.com/aws/aws-sdk-go-v2/aws" - "github.com/aws/aws-sdk-go-v2/config" - "github.com/aws/aws-sdk-go-v2/credentials" - "github.com/aws/aws-sdk-go-v2/service/s3" - "github.com/vbauerster/mpb/v8" - "github.com/vbauerster/mpb/v8/decor" -) - -func main() { - if len(os.Args) != 2 { - fmt.Println("Usage: ") - os.Exit(1) - } - - // Fetch credentials and configuration from environment variables - accessKey := os.Getenv("R2_ACCESS_KEY") - secretKey := os.Getenv("R2_SECRET_KEY") - s3URL := os.Getenv("R2_ENDPOINT") - bucketName := os.Getenv("R2_BUCKET_NAME") - filePath := os.Args[1] - - // Ensure all required environment variables are set - if accessKey == "" || secretKey == "" || s3URL == "" || bucketName == "" { - fmt.Println("Please set R2_ACCESS_KEY, R2_SECRET_KEY, R2_ENDPOINT, and R2_BUCKET_NAME environment variables") - os.Exit(1) - } - - // Load AWS configuration with credentials - cfg, err := config.LoadDefaultConfig( - context.TODO(), - config.WithCredentialsProvider( - credentials.NewStaticCredentialsProvider(accessKey, secretKey, ""), - ), - config.WithRegion("auto"), // Ensure this region is appropriate or set it via environment variable if needed - config.WithEndpointResolverWithOptions( - aws.EndpointResolverWithOptionsFunc( - func(service, region string, options ...interface{}) (aws.Endpoint, error) { - return aws.Endpoint{ - URL: s3URL, - }, nil - }, - ), - ), - ) - if err != nil { - fmt.Fprintf(os.Stderr, "failed to load configuration, %v", err) - os.Exit(1) - } - - // Create an S3 client - client := s3.NewFromConfig(cfg) - - // Open the file to upload - file, err := os.Open(filePath) - if err != nil { - fmt.Fprintf(os.Stderr, "failed to open file %q, %v", filePath, err) - os.Exit(1) - } - defer file.Close() - - // Get the file size - fileInfo, err := file.Stat() - if err != nil { - fmt.Fprintf(os.Stderr, "failed to get file stats %q, %v", filePath, err) - os.Exit(1) - } - fileSize := fileInfo.Size() - - // Create a progress bar - p := mpb.New(mpb.WithWidth(60)) - bar := p.AddBar(fileSize, - mpb.PrependDecorators( - decor.Name("Upload progress:"), - decor.Percentage(decor.WC{W: 5}), - ), - mpb.AppendDecorators( - decor.CountersKibiByte("% .2f / % .2f"), - decor.Name(" "), - decor.AverageSpeed(decor.SizeB1024(0), "% .2f", decor.WC{W: 7}), - decor.AverageETA(decor.ET_STYLE_GO, decor.WC{W: 12}), - ), - ) - - // Create a proxy reader - proxyReader := bar.ProxyReader(file) - defer proxyReader.Close() - - // Upload the file - key := filepath.Base(filePath) - _, err = client.PutObject(context.TODO(), &s3.PutObjectInput{ - Bucket: &bucketName, - Key: &key, - Body: proxyReader, - ContentLength: aws.Int64(fileSize), - }) - if err != nil { - fmt.Fprintf(os.Stderr, "failed to upload file %q to bucket %q, %v", filePath, bucketName, err) - os.Exit(1) - } - - // Wait for the bar to complete - p.Wait() - - fmt.Printf("Successfully uploaded %q to %q\n", filePath, bucketName) -}