commit 26b204bd048c2ca15ec5096f679bba8ed51bf68a092f2825a93279d76ca95c1a Author: Mukan Erkin Törük Date: Mon May 11 03:31:37 2026 +0300 feat: fork Ignite CLI v29 as Mukan Ignite — remove cosmos-sdk restrictions diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..b219eb3 --- /dev/null +++ b/.dockerignore @@ -0,0 +1 @@ +.git \ No newline at end of file diff --git a/.firebaserc b/.firebaserc new file mode 100644 index 0000000..8fb2d28 --- /dev/null +++ b/.firebaserc @@ -0,0 +1,14 @@ +{ + "projects": { + "default": "igntservices" + }, + "targets": { + "igntservices": { + "hosting": { + "docs.ignite.com": [ + "igntservices-docs" + ] + } + } + } +} diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..a6b84d3 --- /dev/null +++ b/.gitattributes @@ -0,0 +1 @@ +changelog.md merge=union diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 0000000..7018045 --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1,5 @@ +# CODEOWNERS: + +# Primary repo maintainers + +* @ignite/cli-sdk-team diff --git a/.github/ISSUE_TEMPLATE/bug-report.md b/.github/ISSUE_TEMPLATE/bug-report.md new file mode 100644 index 0000000..2d738df --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug-report.md @@ -0,0 +1,82 @@ +--- +name: Ignite CLI Bug Report +about: Create a report to help identify and fix issues +title: "FIX: " +labels: type:bug +assignees: "" +--- + + + +### Context: + + + +#### Describe the Bug: + + + +### Steps to Reproduce: + + + +1. +2. +3. + +### Expected Behavior: + + + +### Actual Behavior: + + + +### Environment Details: + + + +- **OS:** +- **Ignite Version:** Provide the output of the `ignite version` command. +- **Go Version:** Provide the output of the `go version` command. +- **Other Relevant Tools:** (e.g., node, npm, or other dependencies, if applicable) + +### Notes: + + diff --git a/.github/ISSUE_TEMPLATE/enhancement.md b/.github/ISSUE_TEMPLATE/enhancement.md new file mode 100644 index 0000000..1c22374 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/enhancement.md @@ -0,0 +1,67 @@ +--- +name: Ignite CLI Enhancement Request +about: Suggest improvements or updates to existing features +title: "UPDATE: " +labels: type:enh +assignees: "" +--- + + + +### Context: + + + +### Proposed Enhancement: + + + +#### Alternatives Considered: + + + +### Desired Outcome: + + + +### Notes: + + diff --git a/.github/ISSUE_TEMPLATE/feature-request.md b/.github/ISSUE_TEMPLATE/feature-request.md new file mode 100644 index 0000000..d7c3e8d --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature-request.md @@ -0,0 +1,73 @@ +--- +name: Ignite CLI Feature Request +about: Suggest a new feature or component +title: "INIT: " +labels: type:feat, request +assignees: "" +--- + + + +### Context: + + + +### Proposed Solution: + + + +#### Alternatives Considered: + + + +### Expected Outcome: + + + +### Notes: + + diff --git a/.github/ISSUE_TEMPLATE/meta.md b/.github/ISSUE_TEMPLATE/meta.md new file mode 100644 index 0000000..5f31244 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/meta.md @@ -0,0 +1,64 @@ +--- +name: Ignite CLI Meta Task +about: Create a larger, multi-step task or initiative +title: "META: " +labels: epic +assignees: "" +--- + + + +### Context: + + + +### Acceptance Criteria: + + + + + +### Dependencies or Related Tasks: + + + +### Notes: + + diff --git a/.github/ISSUE_TEMPLATE/release-tracker.md b/.github/ISSUE_TEMPLATE/release-tracker.md new file mode 100644 index 0000000..50ff66f --- /dev/null +++ b/.github/ISSUE_TEMPLATE/release-tracker.md @@ -0,0 +1,49 @@ +--- +name: Release tracker +about: Create an issue to track release progress + +--- + + + +## QA + +- [ ] Tutorial tests verification +- [ ] Test `serve` on suite of chains + +### Backwards compatibility + + + +### Other testing + +## Migration + + + +## Checklist + + + +- [ ] Update Ignite CLI version (see [#3793](https://github.com/ignite/cli/pull/3793) for example): + - [ ] Rename module version in go.mod to `/vXX` (where `XX` is the new version number). + - [ ] Update plugins go plush, protos and re-generate them + - [ ] Update documentation links (docs/docs) + - [ ] Update GitHub actions, goreleaser and other CI/CD scripts + +## Post-release checklist + +- [ ] Update [`changelog.md`](https://github.com/ignite/cli/blob/main/changelog.md) +- [ ] Update [`readme.md](https://github.com/ignite/cli/blob/main/readme.md): + - [ ] Version matrix. +- [ ] Update docs site: + - [ ] Add new release tag to [`docs/versioned_docs`](https://github.com/ignite/cli/tree/main/docs/versioned_docs). +- [ ] After changes to docs site are deployed, check [docs.ignite.com/](https://docs.ignite.com/) is updated. + +____ diff --git a/.github/PULL_REQUEST_TEMPLATE/pull_request_template.md b/.github/PULL_REQUEST_TEMPLATE/pull_request_template.md new file mode 100644 index 0000000..11947d6 --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE/pull_request_template.md @@ -0,0 +1,12 @@ +Please make sure to check the following for your PR: + +- [ ] This PR complies with the [contributing](../../contributing.md) guidelines. +- [ ] Reviewed "Files changed" and left comments if necessary +- [ ] Included relevant documentation changes. + +Ignite CLI team only: + +- [ ] I have updated the _Unreleased_ section in the changelog.md for my changes. +- [ ] If the templates in `ignite/templates/files` have been changed, make + sure that the change doesn't need to be reflected in the + `ignite/templates/files-*` folders. diff --git a/.github/codecov.yml b/.github/codecov.yml new file mode 100644 index 0000000..58b0d49 --- /dev/null +++ b/.github/codecov.yml @@ -0,0 +1,33 @@ +coverage: + precision: 2 + round: down + range: 70...100 + status: + project: + default: + target: auto + threshold: 10% + informational: true + patch: + default: + informational: true + +comment: + layout: "reach, diff, files" + behavior: default + require_changes: true + +codecov: + notify: + wait_for_ci: false + +ignore: + - "*.pb.go" + - "*.pb.gw.go" + - "*.md" + - "*.ts" + - "actions/" + - "assets/" + - "docs/" + - "integration/" + - "scripts/" diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..32a0c9e --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,35 @@ +# To get started with Dependabot version updates, you'll need to specify which +# package ecosystems to update and where the package manifests are located. +# Please see the documentation for all configuration options: +# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates + +version: 2 +updates: + - package-ecosystem: "gomod" + directory: "/" # Location of package manifests + schedule: + interval: "daily" + time: "10:00" + labels: + - "dependencies" + - "component:go" + - "skip-changelog" + open-pull-requests-limit: 0 + + - package-ecosystem: "npm" + directory: "/docs" # Location of package manifests + schedule: + interval: "daily" + time: "10:00" + labels: + - "dependencies" + - "component:js" + - "skip-changelog" + open-pull-requests-limit: 0 + + - package-ecosystem: github-actions + directory: "/" + schedule: + interval: daily + time: "10:00" + open-pull-requests-limit: 0 diff --git a/.github/labeler.yml b/.github/labeler.yml new file mode 100644 index 0000000..222b135 --- /dev/null +++ b/.github/labeler.yml @@ -0,0 +1,33 @@ +"component:ci": + - "!.github/labeler.yml" + - "!.github/labels.json" + - .github/**/* + +"component:docs": + - assets/**/* + - docs/**/* + +"component:tools": + - .actions/**/* + - .scripts/**/* + +"component:packages": + - ignite/pkg/**/* + +"component:cmd": + - ignite/cmd/**/* + +"type:services": + - ignite/services/**/* + +"component:templates": + - ignite/templates/**/* + +"type:internal": + - ignite/config/**/* + - ignite/internal/**/* + - ignite/version/**/* + +"component:configs": + - "!*.md" + - "*" diff --git a/.github/labels.json b/.github/labels.json new file mode 100644 index 0000000..d985d61 --- /dev/null +++ b/.github/labels.json @@ -0,0 +1,72 @@ +[ + { + "name": "bug", + "color": "d73a4a", + "description": "Something isn't working" + }, + { + "name": "bug fix", + "color": "8a1d1c", + "description": "Functionality that fixes a bug" + }, + { + "name": "dependencies", + "color": "5319e7", + "description": "Update to the dependencies" + }, + { + "name": "docs", + "color": "0075ca", + "description": "Improvements or additions to documentation" + }, + { + "name": "duplicate", + "color": "cfd3d7", + "description": "This issue or pull request already exists" + }, + { + "name": "good first issue", + "color": "7057ff", + "description": "Good for newcomers" + }, + { + "name": "help wanted", + "color": "008672", + "description": "Extra attention is needed" + }, + { + "name": "breaking change", + "color": "a7327e", + "description": "Functionality that contains breaking changes" + }, + { + "name": "don't merge", + "color": "b60205", + "description": "Please don't merge this functionality temporarily" + }, + { + "name": "feature", + "color": "ffb703", + "description": "New update to Gno" + }, + { + "name": "hotfix", + "color": "003049", + "description": "Major bug fix that should be merged ASAP" + }, + { + "name": "info needed", + "color": "54eba0", + "description": "More information needed" + }, + { + "name": "question", + "color": "fbca04", + "description": "Questions about Gno" + }, + { + "name": "investigating", + "color": "8c008c", + "description": "This behavior is still being tested out" + } +] diff --git a/.github/mergify.yml b/.github/mergify.yml new file mode 100644 index 0000000..4233057 --- /dev/null +++ b/.github/mergify.yml @@ -0,0 +1,33 @@ +queue_rules: + - name: default + queue_conditions: + - "#approved-reviews-by>1" + - base=main + merge_conditions: + - "#approved-reviews-by>1" + merge_method: squash + commit_message_template: | + {{ title }} (#{{ number }}) + {{ body }} + +pull_request_rules: + - name: backport patches to v28.x.y branch + conditions: + - base=main + - label=backport/v28.x.y + actions: + backport: + branches: + - release/v28.x.y + - name: backport patches to v29.x.y branch + conditions: + - base=main + - label=backport/v29.x.y + actions: + backport: + branches: + - release/v29.x.y + - name: refactored queue action rule + conditions: [] + actions: + queue: diff --git a/.github/workflows/auto-author-assign.yml b/.github/workflows/auto-author-assign.yml new file mode 100644 index 0000000..545a73e --- /dev/null +++ b/.github/workflows/auto-author-assign.yml @@ -0,0 +1,18 @@ +name: auto-author-assign + +on: + pull_request_target: + types: [ opened, reopened ] + +permissions: + pull-requests: write + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +jobs: + assign-author: + runs-on: ubuntu-latest + steps: + - uses: toshimaru/auto-author-assign@v2.1.1 diff --git a/.github/workflows/cl-enforcer.yml b/.github/workflows/cl-enforcer.yml new file mode 100644 index 0000000..3b82c7b --- /dev/null +++ b/.github/workflows/cl-enforcer.yml @@ -0,0 +1,19 @@ +name: Changelog Enforcer +on: + pull_request: + # The specific activity types are listed here to include "labeled" and "unlabeled" + # (which are not included by default for the "pull_request" trigger). + # This is needed to allow skipping enforcement of the changelog in PRs with specific labels, + # as defined in the (optional) "skipLabels" property. + types: [ opened, synchronize, reopened, ready_for_review, labeled, unlabeled ] + +jobs: + changelog: + runs-on: ubuntu-latest + steps: + - uses: dangoslen/changelog-enforcer@v3 + with: + changeLogPath: 'changelog.md' + missingUpdateErrorMessage: 'Please add an entry to the changelog.md file or add the "skip-changelog" label' + skipLabels: 'skip-changelog' + versionPattern: '' \ No newline at end of file diff --git a/.github/workflows/docs-deploy-preview.yml b/.github/workflows/docs-deploy-preview.yml new file mode 100644 index 0000000..45aadd6 --- /dev/null +++ b/.github/workflows/docs-deploy-preview.yml @@ -0,0 +1,46 @@ +name: Docs Deploy Preview + +on: + pull_request: + paths: + - "docs/**" + +jobs: + build_and_deploy: + if: "${{ github.event.pull_request.head.repo.full_name == github.repository }}" + runs-on: ubuntu-latest + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Cache node_modules + uses: actions/cache@v4 + with: + path: docs/node_modules + key: node_modules-${{ hashFiles('**/yarn.lock') }} + + - name: Setup node + uses: actions/setup-node@v4 + with: + node-version: 20 + cache: yarn + cache-dependency-path: docs/yarn.lock + + - name: Install Dependencies + run: yarn install + working-directory: ./docs + + - name: Build + run: yarn run build + working-directory: ./docs + + - name: Deploy + uses: FirebaseExtended/action-hosting-deploy@v0 + with: + repoToken: "${{ secrets.GITHUB_TOKEN }}" + firebaseServiceAccount: "${{ secrets.FIREBASE_SERVICE_ACCOUNT_IGNTSERVICES }}" + expires: 7d + target: docs.ignite.com + projectId: igntservices + firebaseToolsVersion: v11.12.0 diff --git a/.github/workflows/docs-deploy.yml b/.github/workflows/docs-deploy.yml new file mode 100644 index 0000000..7a95a28 --- /dev/null +++ b/.github/workflows/docs-deploy.yml @@ -0,0 +1,48 @@ +name: Docs Deploy + +on: + push: + branches: + - main + +concurrency: + group: ci-${{ github.ref }}-docs-deploy + cancel-in-progress: true + +jobs: + build_and_deploy: + runs-on: ubuntu-latest + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Cache node_modules + uses: actions/cache@v4 + with: + path: docs/node_modules + key: node_modules-${{ hashFiles('**/yarn.lock') }} + + - name: Setup node + uses: actions/setup-node@v4 + with: + node-version: 20 + cache: yarn + cache-dependency-path: docs/yarn.lock + + - name: Install Dependencies + run: yarn install + working-directory: ./docs + + - name: Build + run: yarn run build + working-directory: ./docs + + - name: Deploy + uses: FirebaseExtended/action-hosting-deploy@v0 + with: + repoToken: "${{ secrets.GITHUB_TOKEN }}" + firebaseServiceAccount: "${{ secrets.FIREBASE_SERVICE_ACCOUNT_IGNTSERVICES }}" + channelId: live + target: docs.ignite.com + projectId: igntservices diff --git a/.github/workflows/gen-docs-cli.yml b/.github/workflows/gen-docs-cli.yml new file mode 100644 index 0000000..51e0c81 --- /dev/null +++ b/.github/workflows/gen-docs-cli.yml @@ -0,0 +1,34 @@ +name: Generate Docs + +on: + push: + branches: + - main + +jobs: + cli: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - uses: actions/setup-go@v5 + with: + go-version: 'stable' + + - name: Generate CLI Docs + run: ./scripts/gen-cli-docs + + - name: Create Pull Request + id: cpr + uses: peter-evans/create-pull-request@v7 + with: + title: "docs(cli): update generated docs" + commit-message: "docs(cli): update generated docs" + body: "" + branch: feat/gen-cli-docs + + - name: Check outputs + run: | + echo "Pull Request Number - ${{ steps.cpr.outputs.pull-request-number }}" + echo "Pull Request URL - ${{ steps.cpr.outputs.pull-request-url }}" + diff --git a/.github/workflows/gen-docs-config.yml b/.github/workflows/gen-docs-config.yml new file mode 100644 index 0000000..44d1d57 --- /dev/null +++ b/.github/workflows/gen-docs-config.yml @@ -0,0 +1,38 @@ +name: Generate Config Doc +on: + push: + paths: + - "ignite/config/chain/base/*" + - "ignite/config/chain/v*" + branches: + - main + +jobs: + cli: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - uses: actions/setup-go@v5 + with: + go-version: 'stable' + + - name: Generate Config Doc + run: ./scripts/gen-config-doc + + - name: Create Pull Request + id: cpr + uses: peter-evans/create-pull-request@v7 + with: + title: "docs(config): update config doc" + commit-message: "docs(config): update config doc" + body: "" + branch: feat/gen-config-doc + add-paths: | + docs/ + + - name: Check outputs + run: | + echo "Pull Request Number - ${{ steps.cpr.outputs.pull-request-number }}" + echo "Pull Request URL - ${{ steps.cpr.outputs.pull-request-url }}" + diff --git a/.github/workflows/gen-docs-migration.yml b/.github/workflows/gen-docs-migration.yml new file mode 100644 index 0000000..775e582 --- /dev/null +++ b/.github/workflows/gen-docs-migration.yml @@ -0,0 +1,34 @@ +name: Generate Migration Docs +on: + release: + types: [published] + workflow_dispatch: + +jobs: + cli: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - uses: actions/setup-go@v5 + with: + go-version: "stable" + + - name: Generate Scaffold Migration Docs + run: ./scripts/gen-mig-diffs + + - name: Create Pull Request + id: cpr + uses: peter-evans/create-pull-request@v7 + with: + title: "docs(migration): update generated docs" + commit-message: "docs(migration): update generated docs" + body: "" + branch: feat/gen-migration-docs + add-paths: | + docs/ + + - name: Check outputs + run: | + echo "Pull Request Number - ${{ steps.cpr.outputs.pull-request-number }}" + echo "Pull Request URL - ${{ steps.cpr.outputs.pull-request-url }}" diff --git a/.github/workflows/gen-docs-version.yml b/.github/workflows/gen-docs-version.yml new file mode 100644 index 0000000..2a25abc --- /dev/null +++ b/.github/workflows/gen-docs-version.yml @@ -0,0 +1,67 @@ +name: Docusaurus add version +on: + release: + types: [published] + +jobs: + gen-docs-version: + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Cache node_modules + uses: actions/cache@v4 + with: + path: docs/node_modules + key: node_modules-${{ hashFiles('**/yarn.lock') }} + + - name: Setup node + uses: actions/setup-node@v4 + with: + node-version: 20 + cache: yarn + cache-dependency-path: docs/yarn.lock + + - name: Install Dependencies + run: yarn install + working-directory: ./docs + + - name: Skip beta versions + continue-on-error: false + run: | + VERSION=${{ github.ref_name }} + if [[ $VERSION == *beta* ]]; then + echo "Skipping beta version" + exit 1 + fi + + - name: Generate docusaurus docs for major version + run: | + VERSION=${{ github.ref_name }} + if [[ $VERSION == v0* ]]; then + # If version starts with v0, use the patch version + DOCUSAURUS_VERSION=${VERSION:1} + else + # Otherwise, use the major version + MAJOR_VERSION=${VERSION%%.*} + DOCUSAURUS_VERSION=${MAJOR_VERSION:1} + fi + + jq --arg version "v$DOCUSAURUS_VERSION" 'del(.[] | select(. == $version))' versions.json > versions.json.tmp + mv versions.json.tmp versions.json + rm -rf versioned_docs/version-v$DOCUSAURUS_VERSION + rm -rf versioned_sidebars/version-v$DOCUSAURUS_VERSION-sidebars.json + yarn run docusaurus docs:version v$DOCUSAURUS_VERSION + working-directory: ./docs + + - name: Create Pull Request + uses: peter-evans/create-pull-request@v7 + with: + title: "chore: docusaurus deploy version ${{ github.ref_name }}" + commit-message: "chore(docs): deploy version ${{ github.ref_name }}" + body: "" + base: main + branch: chore/docs/new-version + add-paths: | + docs/ diff --git a/.github/workflows/gh-cleanup-cache.yml b/.github/workflows/gh-cleanup-cache.yml new file mode 100644 index 0000000..04b84a2 --- /dev/null +++ b/.github/workflows/gh-cleanup-cache.yml @@ -0,0 +1,29 @@ +name: cleanup caches by a branch +on: + pull_request: + types: + - closed + +jobs: + cleanup: + runs-on: ubuntu-latest + steps: + - name: Cleanup + run: | + gh extension install actions/gh-actions-cache + + echo "Fetching list of cache key" + cacheKeysForPR=$(gh actions-cache list -R $REPO -B $BRANCH -L 100 | cut -f 1 ) + + ## Setting this to not fail the workflow while deleting cache keys. + set +e + echo "Deleting caches..." + for cacheKey in $cacheKeysForPR + do + gh actions-cache delete $cacheKey -R $REPO -B $BRANCH --confirm + done + echo "Done" + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + REPO: ${{ github.repository }} + BRANCH: refs/pull/${{ github.event.pull_request.number }}/merge diff --git a/.github/workflows/go-formatting.yml b/.github/workflows/go-formatting.yml new file mode 100644 index 0000000..d65dae7 --- /dev/null +++ b/.github/workflows/go-formatting.yml @@ -0,0 +1,29 @@ +name: Go formatting +on: + push: + branches: [ main ] + paths: + - '**.go' + +jobs: + go-formatting: + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Install Go + uses: actions/setup-go@v5 + with: + go-version: 'stable' + + - name: Run make format + run: make format + + - name: Create Pull Request + uses: peter-evans/create-pull-request@v7 + with: + title: "chore: go formatting" + commit-message: "chore: go formatting" + body: "" + branch: chore/go-formatting diff --git a/.github/workflows/janitor.yml b/.github/workflows/janitor.yml new file mode 100644 index 0000000..4b651c4 --- /dev/null +++ b/.github/workflows/janitor.yml @@ -0,0 +1,16 @@ +name: Janitor +# Janitor cleans up previous runs that are not completed for a given workflow +# It cancels Sims and Tests +# Reference the API https://api.github.com/repos/:org/:repo/actions/workflows to find workflow ids +on: + pull_request: + +jobs: + cancel: + name: "Cancel Previous Runs" + runs-on: ubuntu-latest + timeout-minutes: 3 + steps: + - uses: styfle/cancel-workflow-action@0.12.1 + with: + access_token: ${{ github.token }} diff --git a/.github/workflows/labeler.yml b/.github/workflows/labeler.yml new file mode 100644 index 0000000..ae8e64a --- /dev/null +++ b/.github/workflows/labeler.yml @@ -0,0 +1,16 @@ +name: "Pull Request Labeler" +on: + - pull_request_target + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +jobs: + triage: + permissions: + contents: read + pull-requests: write + runs-on: ubuntu-latest + steps: + - uses: actions/labeler@v4 # keep v4 due to unwanted behavior changes in later versions. diff --git a/.github/workflows/link-checker-config.json b/.github/workflows/link-checker-config.json new file mode 100644 index 0000000..8e5dd4e --- /dev/null +++ b/.github/workflows/link-checker-config.json @@ -0,0 +1,34 @@ +{ + "ignorePatterns": [ + { + "pattern": "^https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/reviewing-changes-in-pull-requests/commenting-on-a-pull-request" + }, + { + "pattern": "^https://docs.github.com/en/get-started/writing-on-github/getting-started-with-writing-and-formatting-on-github/basic-writing-and-formatting-syntax" + }, + { + "pattern": "^https://docs.ignite.com/" + }, + { + "pattern": "^http://localhost" + }, + { + "pattern": "^index.md" + }, + { + "pattern": "^https://docs.starport.network" + } + ], + "replacementPatterns": [ + { + "pattern": "^/", + "replacement": "https://docs.ignite.com/" + } + ], + "baseUrl": "https://docs.ignite.com", + "projectBaseUrl": "https://docs.ignite.com", + "timeout": "20s", + "retryOn429": true, + "retryCount": 5, + "fallbackRetryDelay": "30s" +} diff --git a/.github/workflows/link-checker.yml b/.github/workflows/link-checker.yml new file mode 100644 index 0000000..37a36e3 --- /dev/null +++ b/.github/workflows/link-checker.yml @@ -0,0 +1,22 @@ +name: Check links +on: + pull_request: + push: + branches: + - main + - release/* + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +jobs: + markdown-link-check: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: gaurav-nelson/github-action-markdown-link-check@1.0.16 + with: + folder-path: "." + use-verbose-mode: "yes" + config-file: ".github/workflows/link-checker-config.json" diff --git a/.github/workflows/proto-checker.yml b/.github/workflows/proto-checker.yml new file mode 100644 index 0000000..c51f9f3 --- /dev/null +++ b/.github/workflows/proto-checker.yml @@ -0,0 +1,30 @@ +name: Protobuf Files + +on: + pull_request: + paths: + - "proto/**" + +permissions: + contents: read + +jobs: + lint: + runs-on: ubuntu-latest + timeout-minutes: 5 + steps: + - uses: actions/checkout@v4 + - uses: bufbuild/buf-setup-action@v1.50.0 + - uses: bufbuild/buf-lint-action@v1 + with: + input: "proto" + + break-check: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: bufbuild/buf-setup-action@v1.50.0 + - uses: bufbuild/buf-breaking-action@v1 + with: + input: "proto" + against: "https://github.com/${{ github.repository }}.git#branch=${{ github.event.pull_request.base.ref }},ref=HEAD~1,subdir=proto" diff --git a/.github/workflows/release-binary.yml b/.github/workflows/release-binary.yml new file mode 100644 index 0000000..0de0220 --- /dev/null +++ b/.github/workflows/release-binary.yml @@ -0,0 +1,36 @@ +name: Release Binaries + +on: + release: + types: [ published ] + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +jobs: + binary: + runs-on: ubuntu-latest + env: + working-directory: go/src/github.com/ignite/cli + + steps: + - name: Set up Go + uses: actions/setup-go@v5 + with: + go-version: 'stable' + + - name: Checkout + uses: actions/checkout@v4 + with: + path: ${{ env.working-directory }} + fetch-depth: 0 + + - name: Run GoReleaser + uses: goreleaser/goreleaser-action@v6 + with: + workdir: ${{ env.working-directory }} + version: '~> v2' + args: release --clean + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/release-docker.yml b/.github/workflows/release-docker.yml new file mode 100644 index 0000000..fe99d2d --- /dev/null +++ b/.github/workflows/release-docker.yml @@ -0,0 +1,66 @@ +name: Release Docker Image + +on: + release: + types: [ published ] + schedule: + - cron: "0 0 * * *" # every day at midnight + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +jobs: + check-latest-run: + runs-on: "ubuntu-latest" + steps: + - uses: octokit/request-action@v2.x + id: check_last_run + with: + route: GET /repos/${{github.repository}}/actions/workflows/release-docker.yml/runs?per_page=1&status=completed + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + outputs: + last_sha: ${{ fromJson(steps.check_last_run.outputs.data).workflow_runs[0].head_sha }} + + docker: + name: Push Docker image to Docker Hub + if: needs.check-latest-run.outputs.last_sha != github.sha + runs-on: ubuntu-latest + needs: [ check-latest-run ] + + steps: + - name: Check out the repo + uses: actions/checkout@v4 + + - name: Set up QEMU + uses: docker/setup-qemu-action@v3 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Login to DockerHub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + + - name: Docker meta + id: meta + uses: docker/metadata-action@v5 + with: + images: ignitehq/cli + # push to ignitehq/cli:latest on every push to master + # push to ignitehq/cli:vx.x.x on every release published + tags: | + type=raw,value=latest + type=semver,pattern=v{{version}} + + - name: Build and push + uses: docker/build-push-action@v6 + with: + push: true + context: . + platforms: linux/amd64,linux/arm64 + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} diff --git a/.github/workflows/release-homebrew.yml b/.github/workflows/release-homebrew.yml new file mode 100644 index 0000000..838ebb1 --- /dev/null +++ b/.github/workflows/release-homebrew.yml @@ -0,0 +1,31 @@ +name: Publish Ignite to HomeBrew + +on: + release: + types: [published] + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +jobs: + homebrew: + name: Bump Homebrew formula + runs-on: ubuntu-latest + steps: + - name: Extract version + id: extract-version + # Strip a string prefix from the git tag name: + run: | + echo "tag-name=${GITHUB_REF#refs/tags/}" >> $GITHUB_OUTPUT + + - uses: mislav/bump-homebrew-formula-action@v3.3 + with: + formula-name: ignite + formula-path: Formula/i/ignite.rb + homebrew-tap: Homebrew/homebrew-core + base-branch: master + download-url: https://github.com/ignite/cli/archive/refs/tags/${{ steps.extract-version.outputs.tag-name }}.tar.gz + env: + # the personal access token should have "repo" & "workflow" scopes + COMMITTER_TOKEN: ${{ secrets.COMMITTER_TOKEN }} diff --git a/.github/workflows/release-nightly.yml b/.github/workflows/release-nightly.yml new file mode 100644 index 0000000..06641f1 --- /dev/null +++ b/.github/workflows/release-nightly.yml @@ -0,0 +1,79 @@ +name: Release nightly + +on: + schedule: + - cron: "0 0 * * *" # every day at midnight + workflow_dispatch: + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +jobs: + check-latest-run: + runs-on: "ubuntu-latest" + steps: + - uses: octokit/request-action@v2.x + id: check_last_run + with: + route: GET /repos/${{github.repository}}/actions/workflows/release-nightly.yml/runs?per_page=1&status=completed + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + outputs: + last_sha: ${{ fromJson(steps.check_last_run.outputs.data).workflow_runs[0].head_sha }} + + release-nightly: + if: needs.check-latest-run.outputs.last_sha != github.sha + runs-on: ubuntu-latest + needs: [check-latest-run] + env: + working-directory: go/src/github.com/ignite/cli + + steps: + - uses: actions/checkout@v4 + + - name: Delete the nightly release + uses: dev-drprasad/delete-tag-and-release@v1.1 + with: + tag_name: nightly + delete_release: true + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + - name: Push new nightly tag + uses: mathieudutour/github-tag-action@v6.2 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + custom_tag: nightly + tag_prefix: "" + + - name: Create the new nightly release + uses: ncipollo/release-action@v1 + with: + tag: nightly + name: nightly + body: "Install and move the CLI to your bin directory: `curl https://get.ignite.com/cli@nightly! | bash`" + prerelease: true + + releases-binaries: + if: needs.check-latest-run.outputs.last_sha != github.sha + name: Release Go Binary + needs: [check-latest-run] + runs-on: ubuntu-latest + strategy: + matrix: + goos: [linux, darwin] + goarch: [amd64, arm64] + steps: + - uses: actions/checkout@v4 + - uses: wangyoucao577/go-release-action@v1.53 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + project_path: "./ignite/cmd/ignite" + binary_name: "ignite" + asset_name: ignite-nightly-${{ matrix.goos }}-${{ matrix.goarch }} + release_name: "nightly" + goos: ${{ matrix.goos }} + goarch: ${{ matrix.goarch }} + ldflags: -s -w -X github.com/ignite/cli/v29/ignite/version.Version=nightly + retry: 10 diff --git a/.github/workflows/stats.yaml b/.github/workflows/stats.yaml new file mode 100644 index 0000000..7ddaa3a --- /dev/null +++ b/.github/workflows/stats.yaml @@ -0,0 +1,17 @@ +on: + schedule: + # Run this once per day, towards the end of the day for keeping the most + # recent data point most meaningful (hours are interpreted in UTC). + - cron: "0 23 * * *" + workflow_dispatch: # Allow for running this manually. + +jobs: + j1: + name: Ignite CLI Repository Statistics + runs-on: ubuntu-latest + steps: + - name: run-ghrs + uses: jgehrcke/github-repo-stats@v1.4.2 + with: + ghtoken: ${{ secrets.ghrs_github_api_token }} + diff --git a/.github/workflows/test-cov.yml b/.github/workflows/test-cov.yml new file mode 100644 index 0000000..1989868 --- /dev/null +++ b/.github/workflows/test-cov.yml @@ -0,0 +1,30 @@ +name: Test Coverage + +on: + schedule: + - cron: "0 0 * * *" # every day at midnight + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +jobs: + test: + runs-on: $ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + ref: main # change in release branches + + - uses: actions/setup-go@v5 + with: + go-version: "stable" + + - run: ./scripts/test-coverage + + - name: Upload coverage to Codecov + uses: codecov/codecov-action@v5 + with: + file: ./coverage.txt + fail_ci_if_error: false + verbose: true diff --git a/.github/workflows/test-integration.yml b/.github/workflows/test-integration.yml new file mode 100644 index 0000000..ec7a0ca --- /dev/null +++ b/.github/workflows/test-integration.yml @@ -0,0 +1,79 @@ +name: Integration + +on: + pull_request: + push: + branches: + - main + - release/* + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +jobs: + pre-test: + runs-on: ubuntu-latest + outputs: + matrix: ${{ steps.set-matrix.outputs.matrix }} + steps: + - uses: actions/checkout@v4 + + - name: Finding files and store to output + id: set-matrix + run: echo "matrix=$({ cd integration && find . -type d ! -name testdata -maxdepth 1 -print; } | tail -n +2 | cut -c 3- | jq -R . | jq -cs .)" >> $GITHUB_OUTPUT + + - uses: actions/setup-go@v5 + with: + go-version: "stable" + + - name: Download Go modules + run: go mod tidy + + - name: Upload prepared workspace + uses: actions/upload-artifact@v4 + with: + name: workspace + path: . + retention-days: 1 + + integration: + name: test ${{ matrix.test-path }} on ${{ matrix.os }} + runs-on: ${{ matrix.os }} + needs: pre-test + if: fromJSON(needs.pre-test.outputs.matrix)[0] != null + continue-on-error: true + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest] + test-path: ${{ fromJson(needs.pre-test.outputs.matrix) }} + steps: + - name: Download prepared workspace + uses: actions/download-artifact@v5 + with: + name: workspace + path: . + + - uses: actions/setup-go@v5 + with: + go-version: "stable" + + - name: Run Integration Tests + env: + GOTOOLCHAIN: local+path + GOSUMDB: off + run: go test -v -timeout 120m ./integration/${{ matrix.test-path }} + + status: + runs-on: ubuntu-latest + needs: integration + if: always() + steps: + - name: Update result status + run: | + if [ "${{ needs.integration.result }}" = "failure" ]; then + exit 1 + else + exit 0 + fi diff --git a/.github/workflows/test-lint.yml b/.github/workflows/test-lint.yml new file mode 100644 index 0000000..b8491cc --- /dev/null +++ b/.github/workflows/test-lint.yml @@ -0,0 +1,33 @@ +name: Lint + +on: + pull_request: + push: + branches: + - main + - release/* + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +jobs: + lint: + name: Lint Go code + runs-on: ubuntu-latest + timeout-minutes: 6 + steps: + - uses: actions/checkout@v4 + + - uses: actions/setup-go@v5 + with: + go-version-file: go.mod + cache: false + + - uses: golangci/golangci-lint-action@v6 + with: + version: v1.64.5 + install-mode: goinstall + args: --timeout 10m + github-token: ${{ secrets.github_token }} + skip-save-cache: true diff --git a/.github/workflows/test-semantic.yml b/.github/workflows/test-semantic.yml new file mode 100644 index 0000000..ff83a26 --- /dev/null +++ b/.github/workflows/test-semantic.yml @@ -0,0 +1,16 @@ +name: Semantic PR + +on: + pull_request_target: + types: + - opened + - edited + - synchronize + +jobs: + semantic_pr: + runs-on: ubuntu-latest + steps: + - uses: amannn/action-semantic-pull-request@v5 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 0000000..98a8686 --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,27 @@ +name: Test + +on: + pull_request: + push: + branches: + - main + - release/* + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +jobs: + test: + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: [ubuntu-latest] + steps: + - uses: actions/checkout@v4 + + - uses: actions/setup-go@v5 + with: + go-version: "stable" + + - run: ./scripts/test diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..47250ce --- /dev/null +++ b/.gitignore @@ -0,0 +1,12 @@ +scripts/**/nodetime-* +**/testdata/**/go.sum +**/testdata/go.sum +dist/ +node_modules +.DS_Store +.idea +.vscode +docs/.vuepress/dist +build/ +*coverage.* +*.ign \ No newline at end of file diff --git a/.golangci.yml b/.golangci.yml new file mode 100644 index 0000000..b1f2ee8 --- /dev/null +++ b/.golangci.yml @@ -0,0 +1,71 @@ +run: + tests: false + +linters: + disable-all: true + enable: + - bodyclose + - copyloopvar + - decorder + - depguard + - dogsled + - dupword + - errcheck + - errchkjson + - errorlint + - exhaustive + - gci + - goconst + - gocritic + - godot + - gofumpt + - revive + - gosec + - gosimple + - govet + - grouper + - ineffassign + - misspell + - nakedret + - nolintlint + - staticcheck + - reassign + - stylecheck + - typecheck + - unconvert + - usetesting + - thelper + - unused + - unparam + - misspell + - forbidigo + +linters-settings: + gci: + custom-order: true + sections: + - standard # Standard section: captures all standard packages. + - default # Default section: contains all imports that could not be matched to another section type. + - prefix(cosmossdk.io) + - prefix(github.com/cosmos/cosmos-sdk) + - prefix(github.com/cosmos/ibc-go + - prefix(github.com/cometbft/cometbft) + - prefix(github.com/ignite/cli) + forbidigo: + forbid: + - p: ^fmt\.Errorf$ + msg: fmt.Errorf should be replaced by '"github.com/ignite/cli/ignite/pkg/errors"' + depguard: + rules: + main: + deny: + - pkg: "errors" + desc: Should be replaced by '"github.com/ignite/cli/ignite/pkg/errors"' + - pkg: "github.com/pkg/errors" + desc: Should be replaced by '"github.com/ignite/cli/ignite/pkg/errors"' + +issues: + exclude-dirs: + - ignite/ui + max-issues-per-linter: 0 + max-same-issues: 0 diff --git a/.goreleaser.yml b/.goreleaser.yml new file mode 100644 index 0000000..f983f8b --- /dev/null +++ b/.goreleaser.yml @@ -0,0 +1,11 @@ +project_name: ignite +builds: + - main: ./ignite/cmd/ignite + ldflags: + - -s -w -X github.com/ignite/cli/v28/ignite/version.Version={{.Tag}} # bump to v29? check which action is ran on tagging + goos: + - linux + - darwin + goarch: + - amd64 + - arm64 diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..1547d02 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,51 @@ +# syntax = docker/dockerfile:1.2 +# WARNING! Use `DOCKER_BUILDKIT=1` with `docker build` to enable --mount feature. + +## prep the base image. +# +FROM golang as base + +RUN apt update && \ + apt-get install -y \ + build-essential \ + ca-certificates \ + curl + +# enable faster module downloading. +ENV GOPROXY https://proxy.golang.org + +## builder stage. +# +FROM base as builder + +WORKDIR /ignite + +# cache dependencies. +COPY ./go.mod . +COPY ./go.sum . +RUN go mod download + +COPY . . + +RUN --mount=type=cache,target=/root/.cache/go-build go install -v ./... + +## prep the final image. +# +FROM base + +RUN useradd -ms /bin/bash tendermint +USER tendermint + +COPY --from=builder /go/bin/ignite /usr/bin + +WORKDIR /apps + +# see docs for exposed ports: +# https://docs.ignite.com/kb/config.html#host +EXPOSE 26657 +EXPOSE 26656 +EXPOSE 6060 +EXPOSE 9090 +EXPOSE 1317 + +ENTRYPOINT ["ignite"] diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..a5cec31 --- /dev/null +++ b/LICENSE @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..5999ff1 --- /dev/null +++ b/Makefile @@ -0,0 +1,110 @@ +#! /usr/bin/make -f + +# Project variables. +PROJECT_NAME = ignite +DATE := $(shell date '+%Y-%m-%dT%H:%M:%S') +HEAD = $(shell git rev-parse HEAD) +LD_FLAGS = +BUILD_FLAGS = -mod=readonly -ldflags='$(LD_FLAGS)' +BUILD_FOLDER = ./dist + +## install: Install de binary. +install: + @echo Installing Ignite CLI... + @go install $(BUILD_FLAGS) ./... + @ignite version + +## build: Build the binary. +build: + @echo Building Ignite CLI... + @-mkdir -p $(BUILD_FOLDER) 2> /dev/null + @go build $(BUILD_FLAGS) -o $(BUILD_FOLDER) ./... + +## mocks: generate mocks +mocks: + @echo Generating mocks + @go install github.com/vektra/mockery/v2 + @go generate ./... + + +## clean: Clean build files. Also runs `go clean` internally. +clean: + @echo Cleaning build cache... + @-rm -rf $(BUILD_FOLDER) 2> /dev/null + @go clean ./... + +.PHONY: install build mocks clean + +## govet: Run go vet. +govet: + @echo Running go vet... + @go vet ./... + +## govulncheck: Run govulncheck +govulncheck: + @echo Running govulncheck... + @go tool golang.org/x/vuln/cmd/govulncheck ./... + +## format: Install and run goimports and gofumpt +format: + @echo Formatting... + @go tool mvdan.cc/gofumpt -w . + @go tool golang.org/x/tools/cmd/goimports -w -local github.com/ignite/cli/v29 . + @go tool github.com/tbruyelle/mdgofmt/cmd/mdgofmt -w docs + +## lint: Run Golang CI Lint. +lint: + @echo Running golangci-lint... + @go tool github.com/golangci/golangci-lint/cmd/golangci-lint run --out-format=tab --issues-exit-code=0 + +lint-fix: + @echo Running golangci-lint... + @go tool github.com/golangci/golangci-lint/cmd/golangci-lint run --fix --out-format=tab --issues-exit-code=0 + +.PHONY: govet format lint + +## proto-all: Format, lint and generate code from proto files using buf. +proto-all: proto-format proto-lint proto-gen format + +## proto-gen: Run buf generate. +proto-gen: + @echo Generating code from proto... + @buf generate --template ./proto/buf.gen.yaml --output ./ + +## proto-format: Run buf format and update files with invalid proto format> +proto-format: + @echo Formatting proto files... + @buf format --write + +## proto-lint: Run buf lint. +proto-lint: + @echo Linting proto files... + @buf lint + +.PHONY: proto-all proto-gen proto-format proto-lint + +## test-unit: Run the unit tests. +test-unit: + @echo Running unit tests... + @go test -race -failfast -v ./ignite/... + +## test-integration: Run the integration tests. +test-integration: install + @echo Running integration tests... + @go test -race -failfast -v -timeout 60m ./integration/... + +## test: Run unit and integration tests. +test: govet govulncheck test-unit test-integration + +.PHONY: test-unit test-integration test + +help: Makefile + @echo + @echo " Choose a command run in "$(PROJECT_NAME)", or just run 'make' for install" + @echo + @sed -n 's/^##//p' $< | column -t -s ':' | sed -e 's/^/ /' + @echo + +.PHONY: help + +.DEFAULT_GOAL := install diff --git a/README.md b/README.md new file mode 100644 index 0000000..73aae1c --- /dev/null +++ b/README.md @@ -0,0 +1,59 @@ +

+ Mukan Ignite +

+ +

+ The sovereign development CLI for the Mukan Network stack, forked from Ignite CLI. +

+ +## Overview + +**Mukan Ignite** is a hard-fork of [Ignite CLI v29](https://github.com/ignite/cli), specifically patched for the Mukan Network sovereign blockchain stack. It removes hardcoded restrictions that prevent building chains with a custom SDK fork (like `mukan-sdk`). + +## What's Different? + +Ignite CLI normally enforces that your chain uses the **exact** `github.com/cosmos/cosmos-sdk` module path. This is an intentional restriction that breaks any project that uses a sovereign SDK fork. + +Mukan Ignite removes these restrictions with two surgical patches: + +### Patch 1 — `ignite/pkg/cosmosver/detect.go` +Extended `CosmosSDKModulePathPattern` to also recognize `mukan-sdk` as a valid Cosmos SDK variant: +```go +// Before (Ignite CLI) +CosmosSDKRepoName = "cosmos-sdk" + +// After (Mukan Ignite) +CosmosSDKRepoName = "cosmos-sdk|mukan-sdk" // also matches git.cw.tr/mukan-network/mukan-sdk +``` + +### Patch 2 — `ignite/version/version.go` +Removed the version gate check that blocked sovereign SDK forks: +```go +// AssertSupportedCosmosSDKVersion — version check disabled for Mukan sovereign stack +func AssertSupportedCosmosSDKVersion(v cosmosver.Version) error { + return nil // All Mukan SDK versions are valid +} +``` + +## Building from Source + +```bash +git clone https://git.cw.tr/mukan-network/mukan-ignite +cd mukan-ignite +make install +``` + +## Usage + +Drop-in replacement for `ignite`. Use `mukan-ignite` wherever you'd use `ignite`: + +```bash +mukan-ignite chain serve --reset-once +mukan-ignite chain build +``` + +## License + +Licensed under the **GNU General Public License v3.0 (GPLv3)**. + +*Original Ignite CLI components remain under their respective Apache 2.0 licenses where applicable.* diff --git a/actions/cli/Dockerfile b/actions/cli/Dockerfile new file mode 100644 index 0000000..47a3686 --- /dev/null +++ b/actions/cli/Dockerfile @@ -0,0 +1,4 @@ +FROM ignitehq/cli:latest + +# Set the github runner user and group id +USER 1001:121 \ No newline at end of file diff --git a/actions/cli/action.yml b/actions/cli/action.yml new file mode 100644 index 0000000..0d50910 --- /dev/null +++ b/actions/cli/action.yml @@ -0,0 +1,5 @@ +name: cli +description: Ignite CLI +runs: + using: docker + image: Dockerfile diff --git a/actions/cli/readme.md b/actions/cli/readme.md new file mode 100644 index 0000000..250199d --- /dev/null +++ b/actions/cli/readme.md @@ -0,0 +1,23 @@ +# Ignite CLI Action + +This action makes the `ignite` CLI available as a Github Action. + +## Quick start + +Add a new workflow to your repo: + +```yml +on: push + +jobs: + help: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v2 + + - name: Print Help + uses: ignite/cli/actions/cli@main + with: + args: -h +``` diff --git a/actions/release/vars/action.yml b/actions/release/vars/action.yml new file mode 100644 index 0000000..c56bb42 --- /dev/null +++ b/actions/release/vars/action.yml @@ -0,0 +1,50 @@ +name: vars +description: Outputs variables that can be useful while creating a release +outputs: + should_release: + description: Indicates whether a release should be created or not + value: ${{ steps.vars.outputs.should_release }} + is_release_type_latest: + description: Shows if release type is latest (not a v* release) + value: ${{ steps.vars.outputs.is_release_type_latest }} + tag_name: + description: Name of the tag that should be used for release + value: ${{ steps.vars.outputs.tag_name }} + tarball_prefix: + description: A prefix to use in tarball asset names + value: ${{ steps.vars.outputs.tarball_prefix }} +runs: + using: "composite" + steps: + - id: vars + run: | + repo_name=${GITHUB_REPOSITORY##*/} + ref_name=${GITHUB_REF##*/} + default_branch=$(git remote show origin | awk '/HEAD branch/ {print $NF}') + + should_release=true + is_release_type_latest=false + tag_name="" + + if [[ $GITHUB_REF == refs/tags/* ]] + then + tag_name=$ref_name + elif [[ $GITHUB_REF == refs/heads/* && $ref_name == $default_branch ]] + then + tag_name=latest + is_release_type_latest=true + else + should_release=false + fi + + echo "should_release=$should_release" >> $GITHUB_OUTPUT + echo "is_release_type_latest=$is_release_type_latest" >> $GITHUB_OUTPUT + echo "tag_name=$tag_name" >> $GITHUB_OUTPUT + echo "tarball_prefix=$repo_name_$tag_name" >> $GITHUB_OUTPUT + shell: bash + - run: | + echo "- should_release: ${{ steps.vars.outputs.should_release }}" + echo "- is_release_type_latest: ${{ steps.vars.outputs.is_release_type_latest }}" + echo "- tag_name: ${{ steps.vars.outputs.tag_name }}" + echo "- tarball_prefix: ${{ steps.vars.outputs.tarball_prefix }}" + shell: bash diff --git a/assets/ignite-cli.png b/assets/ignite-cli.png new file mode 100644 index 0000000..0d6ecdd Binary files /dev/null and b/assets/ignite-cli.png differ diff --git a/buf.yaml b/buf.yaml new file mode 100644 index 0000000..6f93123 --- /dev/null +++ b/buf.yaml @@ -0,0 +1,20 @@ +version: v2 +modules: + - path: proto + name: buf.build/ignitehq/cli +lint: + use: + - STANDARD + - FILE_LOWER_SNAKE_CASE + except: + - COMMENT_FIELD + - RPC_REQUEST_STANDARD_NAME + - RPC_RESPONSE_STANDARD_NAME + - SERVICE_SUFFIX + disallow_comment_ignores: true +breaking: + use: + - FILE + except: + - EXTENSION_NO_DELETE + - FIELD_SAME_DEFAULT diff --git a/changelog.md b/changelog.md new file mode 100644 index 0000000..fbe9efc --- /dev/null +++ b/changelog.md @@ -0,0 +1,1409 @@ +# Changelog + +## Unreleased + +## [`v29.10.0`](https://github.com/ignite/cli/releases/tag/v29.10.0) + +## Features + +- [#4902](https://github.com/ignite/cli/pull/4902) Scaffold migrations. + +### Fixes + +- [#4910](https://github.com/ignite/cli/pull/4910) Resolve qualified and nested RPC request messages. +- [#4909](https://github.com/ignite/cli/pull/4909) Ignore `context.Canceled` errors in Sentry reporting. + +## [`v29.9.2`](https://github.com/ignite/cli/releases/tag/v29.9.2) + +- [#4904](https://github.com/ignite/cli/pull/4904) Add variadic options in `modulecreate.AddModuleToAppConfig`. + +## [`v29.9.1`](https://github.com/ignite/cli/releases/tag/v29.9.1) + +### Changes + +- [#4901](https://github.com/ignite/cli/pull/4901) Enable automatic switching to daemon mode when not a terminal. + +## [`v29.9.0`](https://github.com/ignite/cli/releases/tag/v29.9.0) + +## Features + +- [#4877](https://github.com/ignite/cli/pull/4877) Remove app config and ibc add route placeholders. +- [#3094](https://github.com/ignite/cli/issues/3094) Add support for arrays with custom types. + +### Changes + +- [#4880](https://github.com/ignite/cli/pull/4880) Add possibility to scaffold underscore package. +- [#4878](https://github.com/ignite/cli/pull/4878) Improve the `xast` package readability. +- [#4883](https://github.com/ignite/cli/pull/4883) Remove ibc keys placeholders. +- [#4884](https://github.com/ignite/cli/pull/4884) Remove autocli placeholders. + +### Fixes + +- [#4886](https://github.com/ignite/cli/pull/4886) Fix chain scaffolding checks. +- [#4889](https://github.com/ignite/cli/pull/4889) Plugin data race. + +## [`v29.8.0`](https://github.com/ignite/cli/releases/tag/v29.8.0) + +## Features + +- [#4869](https://github.com/ignite/cli/pull/4869) Improve the httpstatuschecker by injecting clients. + +### Changes + +- [#4874](https://github.com/ignite/cli/pull/4874) Bump `cosmos-sdk` version to `v0.53.6`. +- [#4874](https://github.com/ignite/cli/pull/4874) Do not generate unannotated OpenAPI methods. + +### Fixes + +- [#4735](https://github.com/ignite/cli/issues/4735) Cleanup `xgenny` runner to avoid duplicated generators. +- [#4864](https://github.com/ignite/cli/pull/4864) Mismatch for message names. +- [#4874](https://github.com/ignite/cli/pull/4874) Use latest `bytedance/sonic` version to support Go 1.26. + +## [`v29.7.0`](https://github.com/ignite/cli/releases/tag/v29.7.0) + +## Changes + +- [#4861](https://github.com/ignite/cli/pull/4861) Bump CometBFT to `v0.38.21` +- [#4855](https://github.com/ignite/cli/pull/4855) Implement openapi excludes. +- [#4850](https://github.com/ignite/cli/pull/4850) Add default GitHub Actions for linting and testing. +- [#4849](https://github.com/ignite/cli/pull/4849) Bump `cosmos-sdk` version to `v0.53.5` and minimum Go version to `1.25`. + +## [`v29.6.2`](https://github.com/ignite/cli/releases/tag/v29.6.2) + +### Changes + +- [#4838](https://github.com/ignite/cli/pull/4838) Bump minimum Go version to 1.24.1. + +### Fixes + +- [#4833](https://github.com/ignite/cli/pull/4833) IBC consensus state not found. +- [#4844](https://github.com/ignite/cli/pull/4844) Fix tabs in proto impl YAML. + +## [`v29.6.1`](https://github.com/ignite/cli/releases/tag/v29.6.1) + +### Changes + +- [#4830](https://github.com/ignite/cli/pull/4830) Bump `cometbft` version to `v0.38.19` that fixes a [security issue in CometBFT](https://github.com/cometbft/cometbft/security/advisories/GHSA-hrhf-2vcr-ghch). + +## [`v29.6.0`](https://github.com/ignite/cli/releases/tag/v29.6.0) + +### Changes + +- [#4825](https://github.com/ignite/cli/pull/4825) Bump `cosmos-sdk` version to `v0.53.4` and `ibc-go` version to `v10.4.0`. +- [#4828](https://github.com/ignite/cli/pull/4828) Add remove func call in `xast` package. + +## [`v29.5.0`](https://github.com/ignite/cli/releases/tag/v29.5.0) + +### Changes + +- [#4822](https://github.com/ignite/cli/pull/4822) Add more functions in `xast` package and import its debuggability. + +## [`v29.4.2`](https://github.com/ignite/cli/releases/tag/v29.4.2) + +### Changes + +- [#4811](https://github.com/ignite/cli/pull/4811) From this version on, Snapcraft support has been removed. Please migrate to [Homebrew](https://brew.sh/) for a better experience. Homebrew works for both macOS and GNU/Linux. +- [#4816](https://github.com/ignite/cli/pull/4816) Improve announcements UI. + +## [`v29.4.1`](https://github.com/ignite/cli/releases/tag/v29.4.1) + +### Changes + +- [#4805](https://github.com/ignite/cli/pull/4805) Fetch fallback buf token. +- [#4807](https://github.com/ignite/cli/pull/4807) Improve unconfigured path message when building a chain. +- [#4808](https://github.com/ignite/cli/pull/4808) Remove unused packages. + +## [`v29.4.0`](https://github.com/ignite/cli/releases/tag/v29.4.0) + +### Features + +- [#4790](https://github.com/ignite/cli/pull/4790) Remove global vars and struct placeholders. +- [#4797](https://github.com/ignite/cli/pull/4797) Add `xast` function to append new switch clause. +- [#4804](https://github.com/ignite/cli/pull/4804) Add `BroadcastAsync` to `cosmosclient` package. + +### Changes + +- [#4794](https://github.com/ignite/cli/pull/4794) Update `cosmossdk.io/log` version to `v1.6.1` and remove `bytedance/sonic` version override from template. + +### Fixes + +- [#4803](https://github.com/ignite/cli/pull/4803) Fix simulations for custom address prefix. + +## [`v29.3.1`](https://github.com/ignite/cli/releases/tag/v29.3.1) + +### Fixes + +- [#4793](https://github.com/ignite/cli/pull/4793) Use latest `bytedance/sonic` version to support Go 1.25. + +## [`v29.3.0`](https://github.com/ignite/cli/releases/tag/v29.3.0) + +### Features + +- [#4786](https://github.com/ignite/cli/pull/4786) Add all types to the documentation and disclaimer for multiple coin types. + +### Changes + +- [#4780](https://github.com/ignite/cli/pull/4780) Fallback to local generation when possible in `generate ts-client` command. +- [#4779](https://github.com/ignite/cli/pull/4779) Do not re-gen openapi spec each time the `ts-client` or the `composables` are generated. +- [#4784](https://github.com/ignite/cli/pull/4784) Remove unused message initialization. + +### Fixes + +- [#4779](https://github.com/ignite/cli/pull/4779) Find proto dir in non conventional repo structure. + +## [`v29.2.0`](https://github.com/ignite/cli/releases/tag/v29.2.0) + +### Features + +- [#4676](https://github.com/ignite/cli/issues/4676) Add Decimal Coin Type. +- [#4765](https://github.com/ignite/cli/pull/4765) Create `scaffold type-list` command. +- [#4770](https://github.com/ignite/cli/pull/4770) Add `--output-file` flag to `chain serve` command to improve running `chain serve` in the background. + +### Changes + +- [#4759](https://github.com/ignite/cli/pull/4759) Remove undocumented RPC address override in services chainer. +- [#4760](https://github.com/ignite/cli/pull/4760) Bump Cosmos SDK to `v0.53.3`. + +### Fixes + +- [#4757](https://github.com/ignite/cli/pull/4757) Always delete temp folder from open api generation. + +## [`v29.1.0`](https://github.com/ignite/cli/releases/tag/v29.1.0) + +### Features + +- [#4728](https://github.com/ignite/cli/pull/4728) Ask before overwriting a `xgenny` file. +- [#4731](https://github.com/ignite/cli/pull/4731) Complete IBC v2 wiring. +- [#4732](https://github.com/ignite/cli/pull/4732) Add `ignite chain modules list` command to list all modules in the chain. + +### Changes + +- [#4717](https://github.com/ignite/cli/pull/4717) Bump Cosmos SDK to `v0.53.2`. +- [#4718](https://github.com/ignite/cli/pull/4718) Bump default Ignite Apps. +- [#4741](https://github.com/ignite/cli/pull/4741) Let `generate openapi` generate external modules OpenAPI spec. +- [#4747](https://github.com/ignite/cli/pull/4747) Improve Ignite UI. +- [#4751](https://github.com/ignite/cli/pull/4751) Improve cache speed and limit duplicate I/O. + +### Fixes + +- [#4691](https://github.com/ignite/cli/pull/4691), [#4706](https://github.com/ignite/cli/pull/4706), [#4725](https://github.com/ignite/cli/pull/4725), [#4737](https://github.com/ignite/cli/pull/4737) Fix ts-client query template and solely Go template for `ts-client` generation. +- [#4742](https://github.com/ignite/cli/pull/4742) Updates Vue composables template for new ts-client and tanstack/vue-query v5 +- [#4744](https://github.com/ignite/cli/pull/4744) Remove `react` frontend generation via `s react` command. Use the [Ignite CCA App](https://github.com/ignite/apps) instead. + +## [`v29.0.0`](https://github.com/ignite/cli/releases/tag/v29.0.0) + +### Features + +- [#4614](https://github.com/ignite/cli/pull/4614) Improve integration tests and add query tests. +- [#4683](https://github.com/ignite/cli/pull/4683) Allow to change default denom via flag. +- [#4687](https://github.com/ignite/cli/pull/4687) Add address type with `scalar` annotations, and add `scalar` type to signer field. + +### Changes + +- [#4689](https://github.com/ignite/cli/pull/4689) Revert `HasGenesis` implementation from retracted `core` v1 to SDK `HasGenesis` interface. +- [#4701](https://github.com/ignite/cli/pull/4701) Improve `ignite doctor` by removing manual migration step. Additionally, remove protoc to buf migrations logic. +- [#4703](https://github.com/ignite/cli/pull/4703) Remove testutil.Register function. +- [#4702](https://github.com/ignite/cli/pull/4702) Improve app detection by checking for inheritance instead of interface implementation. +- [#4707](https://github.com/ignite/cli/pull/4707) Show `buf` version in `ignite version` only when in a go module. +- [#4709](https://github.com/ignite/cli/pull/4709) Remove legacy msgServer support +- [#4710](https://github.com/ignite/cli/pull/4710) Remove the `nullify` pkg from the chain `testutil` + +### Fixes + +- [#4686](https://github.com/ignite/cli/pull/4686) Filter discovered protos to only messages. +- [#4694](https://github.com/ignite/cli/issues/4694) Install an app using the `.` as a current path show a wrong app name. + +## [`v29.0.0-rc.1`](https://github.com/ignite/cli/releases/tag/v29.0.0-rc.1) + +### Features + +- [#4509](https://github.com/ignite/cli/pull/4509) Upgrade to Go 1.24. Running `ignite doctor` migrates the scaffolded `tools.go` to the tool directive in the go.mod +- [#4588](https://github.com/ignite/cli/pull/4588) Run `buf format after scaffold proto files. +- [#4603](https://github.com/ignite/cli/issues/4603) Add `GetIgniteInfo` gRPC API for apps. +- [#4657](https://github.com/ignite/cli/pull/4657) Upgrade to Cosmos SDK [v0.53.0](https://github.com/cosmos/cosmos-sdk/releases/tag/v0.53.0). + - Add x/epochs module to default template (except for minimal template). + - Bump minimum compatible Cosmos SDK version to `v0.50.0`. + +### Changes + +- [#4596](https://github.com/ignite/cli/pull/4596) Add default `openapi.yml` when skipping proto gen. +- [#4601](https://github.com/ignite/cli/pull/4601) Add `appregistry` as default plugin +- [#4613](https://github.com/ignite/cli/pull/4613) Improve and simplify prompting logic by bubbletea. +- [#4624](https://github.com/ignite/cli/pull/4624) Fix autocli templates for variadics. +- [#4644](https://github.com/ignite/cli/pull/4644) Improve UI and UX for `testnet multi-node` command. +- [#4645](https://github.com/ignite/cli/pull/4645) Refactor the xast.ModifyFunction to improve the readability. +- [#3393](https://github.com/ignite/cli/issues/3393) Remove xgenny embed walker +- [#4664](https://github.com/ignite/cli/pull/4664) Add verbose flags on `scaffold` and `generate` commands. + - The flag displays the steps Ignite is taking to generate the code. + - The verbosity only applies to the command. For full verbosity use the `IGNT_DEBUG` environment variable instead. + +### Fixes + +- [#4347](https://github.com/ignite/cli/pull/4347) Fix `ts-client` generation +- [#4577](https://github.com/ignite/cli/pull/4577) Add proto version to query path. +- [#4579](https://github.com/ignite/cli/pull/4579) Fix empty params response. +- [#4585](https://github.com/ignite/cli/pull/4585) Fix faucet cmd issue. +- [#4587](https://github.com/ignite/cli/pull/4587) Add missing light clients routes to IBC client keeper. +- [#4595](https://github.com/ignite/cli/pull/4595) Fix wrong InterfaceRegistry for IBC modules. +- [#4609](https://github.com/ignite/cli/pull/4609) Add work dir for relayer integration tests. +- [#4658](https://github.com/ignite/cli/pull/4658) Fix indentation for params scaffolded into a struct. +- [#4582](https://github.com/ignite/cli/issues/4582) Fix xast misplacing comments. +- [#4660](https://github.com/ignite/cli/pull/4660) Fix xast test case indentation. +- [#4667](https://github.com/ignite/cli/pull/4667) Harden `IsSlice` + +## [`v29.0.0-beta.1`](https://github.com/ignite/cli/releases/tag/v29.0.0-beta.1) + +### Features + +- [#3707](https://github.com/ignite/cli/pull/3707) and [#4094](https://github.com/ignite/cli/pull/4094) Add collections support. +- [#3977](https://github.com/ignite/cli/pull/3977) Add `chain lint` command to lint the chain's codebase using `golangci-lint` +- [#3770](https://github.com/ignite/cli/pull/3770) Add `scaffold configs` and `scaffold params` commands +- [#4001](https://github.com/ignite/cli/pull/4001) Improve `xgenny` dry run +- [#3967](https://github.com/ignite/cli/issues/3967) Add HD wallet parameters `address index` and `account number` to the chain account config +- [#4004](https://github.com/ignite/cli/pull/4004) Remove all import placeholders using the `xast` pkg +- [#4071](https://github.com/ignite/cli/pull/4071) Support custom proto path +- [#3718](https://github.com/ignite/cli/pull/3718) Add `gen-mig-diffs` tool app to compare scaffold output of two versions of ignite +- [#4100](https://github.com/ignite/cli/pull/4100) Set the `proto-dir` flag only for the `scaffold chain` command and use the proto path from the config +- [#4111](https://github.com/ignite/cli/pull/4111) Remove vuex generation +- [#4113](https://github.com/ignite/cli/pull/4113) Generate chain config documentation automatically +- [#4131](https://github.com/ignite/cli/pull/4131) Support `bytes` as data type in the `scaffold` commands +- [#4300](https://github.com/ignite/cli/pull/4300) Only panics the module in the most top function level +- [#4327](https://github.com/ignite/cli/pull/4327) Use the TxConfig from simState instead create a new one +- [#4326](https://github.com/ignite/cli/pull/4326) Add `buf.build` version to `ignite version` command +- [#4436](https://github.com/ignite/cli/pull/4436) Return tx hash to the faucet API +- [#4437](https://github.com/ignite/cli/pull/4437) Remove module placeholders +- [#4289](https://github.com/ignite/cli/pull/4289), [#4423](https://github.com/ignite/cli/pull/4423), [#4432](https://github.com/ignite/cli/pull/4432), [#4507](https://github.com/ignite/cli/pull/4507), [#4524](https://github.com/ignite/cli/pull/4524) Cosmos SDK v0.52 support and downgrade back to 0.50, while keeping latest improvements. +- [#4480](https://github.com/ignite/cli/pull/4480) Add field max length +- [#4477](https://github.com/ignite/cli/pull/4477), [#4559](https://github.com/ignite/cli/pull/4559) IBC v10 support +- [#4166](https://github.com/ignite/cli/issues/4166) Migrate buf config files to v2 +- [#4494](https://github.com/ignite/cli/pull/4494) Automatic migrate the buf configs to v2 + +### Changes + +- [#4094](https://github.com/ignite/cli/pull/4094) Scaffolding a multi-index map using `ignite s map foo bar baz --index foobar,foobaz` is no longer supported. Use one index instead of use `collections.IndexedMap`. +- [#4058](https://github.com/ignite/cli/pull/4058) Simplify scaffolded modules by including `ValidateBasic()` logic in message handler. +- [#4058](https://github.com/ignite/cli/pull/4058) Use `address.Codec` instead of `AccAddressFromBech32`. +- [#3993](https://github.com/ignite/cli/pull/3993) Oracle scaffolding was deprecated and has been removed +- [#3962](https://github.com/ignite/cli/pull/3962) Rename all RPC endpoints and autocli commands generated for `map`/`list`/`single` types +- [#3976](https://github.com/ignite/cli/pull/3976) Remove error checks for Cobra command value get calls +- [#4002](https://github.com/ignite/cli/pull/4002) Bump buf build +- [#4008](https://github.com/ignite/cli/pull/4008) Rename `pkg/yaml` to `pkg/xyaml` +- [#4075](https://github.com/ignite/cli/pull/4075) Use `gopkg.in/yaml.v3` instead `gopkg.in/yaml.v2` +- [#4118](https://github.com/ignite/cli/pull/4118) Version scaffolded protos as `v1` to follow SDK structure. +- [#4167](https://github.com/ignite/cli/pull/4167) Scaffold `int64` instead of `int32` when a field type is `int` +- [#4159](https://github.com/ignite/cli/pull/4159) Enable gci linter +- [#4160](https://github.com/ignite/cli/pull/4160) Enable copyloopvar linter +- [#4162](https://github.com/ignite/cli/pull/4162) Enable errcheck linter +- [#4189](https://github.com/ignite/cli/pull/4189) Deprecate `ignite node` for `ignite connect` app +- [#4290](https://github.com/ignite/cli/pull/4290) Remove ignite ics logic from ignite cli (this functionality will be in the `consumer` app) +- [#4295](https://github.com/ignite/cli/pull/4295) Stop scaffolding `pulsar` files +- [#4317](https://github.com/ignite/cli/pull/4317) Remove xchisel dependency +- [#4361](https://github.com/ignite/cli/pull/4361) Remove unused `KeyPrefix` method +- [#4384](https://github.com/ignite/cli/pull/4384) Compare genesis params into chain genesis tests +- [#4463](https://github.com/ignite/cli/pull/4463) Run `chain simulation` with any simulation test case +- [#4533](https://github.com/ignite/cli/pull/4533) Promote GitHub codespace instead of Gitpod +- [#4549](https://github.com/ignite/cli/pull/4549) Remove unused placeholder vars +- [#4557](https://github.com/ignite/cli/pull/4557) Remove github.com/gookit/color + +### Fixes + +- [#4000](https://github.com/ignite/cli/pull/4000) Run all dry runners before the wet run in the `xgenny` pkg +- [#4091](https://github.com/ignite/cli/pull/4091) Fix race conditions in the plugin logic +- [#4128](https://github.com/ignite/cli/pull/4128) Check for duplicate proto fields in config +- [#4402](https://github.com/ignite/cli/pull/4402) Fix gentx parser into the cosmosutil package +- [#4552](https://github.com/ignite/cli/pull/4552) Avoid direct access to proto field `perms.Account` and `perms.Permissions` +- [#4555](https://github.com/ignite/cli/pull/4555) Fix buf lint issues into the chain code + +## [`v28.11.2`](https://github.com/ignite/cli/releases/tag/v28.11.2) + +### Changes + +- [#4816](https://github.com/ignite/cli/pull/4816) Improve announcements UI. + +## [`v28.11.1`](https://github.com/ignite/cli/releases/tag/v28.11.1) + +### Changes + +- [#4813](https://github.com/ignite/cli/pull/4813) Fetch fallback buf token. + +## [`v28.11.0`](https://github.com/ignite/cli/releases/tag/v28.11.0) + +### Changes + +- [#4718](https://github.com/ignite/cli/pull/4718) Bump default Ignite Apps. +- [#4761](https://github.com/ignite/cli/pull/4761) Bump Cosmos SDK to `v0.50.14`. + +### Fixes + +- [#4686](https://github.com/ignite/cli/pull/4686) Filter discovered protos to only messages. +- [#4691](https://github.com/ignite/cli/pull/4691), [#4706](https://github.com/ignite/cli/pull/4706), [#4725](https://github.com/ignite/cli/pull/4725), [#4737](https://github.com/ignite/cli/pull/4737) Fix ts-client query template and solely Go template for `ts-client` generation. +- [#4744](https://github.com/ignite/cli/pull/4744) Remove `react` frontend generation via `s react` command. Use the [Ignite CCA App](https://github.com/ignite/apps) instead. + +## [`v28.10.0`](https://github.com/ignite/cli/releases/tag/v28.10.0) + +### Features + +- [#4638](https://github.com/ignite/cli/pull/4638) Add include feature to the chain config file. + +### Changes + +- [#4643](https://github.com/ignite/cli/pull/4643) Allow append abritrary blocks in `AppendFuncAtLine`. + +## [`v28.9.0`](https://github.com/ignite/cli/releases/tag/v28.9.0) + +### Features + +- [#4639](https://github.com/ignite/cli/pull/4639) Add `xast.ModifyCaller` function. +- [#4615](https://github.com/ignite/cli/pull/4615) Fetch Ignite announcements from API. + +### Changes + +- [#4633](https://github.com/ignite/cli/pull/4633) Loosen faucet check when indexer disabled. +- [#4586](https://github.com/ignite/cli/pull/4586) Remove network as default plugin. + +### Fixes + +- [#4347](https://github.com/ignite/cli/pull/4347) Fix `ts-client` generation. + +## [`v28.8.2`](https://github.com/ignite/cli/releases/tag/v28.8.2) + +### Changes + +- [#4568](https://github.com/ignite/cli/pull/4568) Bump Cosmos SDK to v0.50.13. +- [#4569](https://github.com/ignite/cli/pull/4569) Add flags to set coin type on commands. Add getters for bech32 prefix and coin type. + +## [`v28.8.1`](https://github.com/ignite/cli/releases/tag/v28.8.1) + +### Fixes + +- [#4532](https://github.com/ignite/cli/pull/4532) Fix non working _shortcuts_ in validator home config +- [#4538](https://github.com/ignite/cli/pull/4538) Create a simple spinner for non-terminal interactions +- [#4540](https://github.com/ignite/cli/pull/4540), [#4543](https://github.com/ignite/cli/pull/4543) Skip logs / gibberish when parsing commands outputs + +## [`v28.8.0`](https://github.com/ignite/cli/releases/tag/v28.8.0) + +### Features + +- [#4513](https://github.com/ignite/cli/pull/4513) Allow to pass tx fees to faucet server + +### Changes + +- [#4439](https://github.com/ignite/cli/pull/4439) Simplify Ignite CLI dependencies by removing `moby` and `gorilla` dependencies. +- [#4471](https://github.com/ignite/cli/pull/4471) Bump CometBFT to v0.38.15. +- [#4471](https://github.com/ignite/cli/pull/4471) Bump Ignite & chain minimum Go version to 1.23. +- [#4529](https://github.com/ignite/cli/pull/4531) Bump Cosmos SDK to v0.50.12. + +### Fixes + +- [#4474](https://github.com/ignite/cli/pull/4474) Fix issue in `build --release` command +- [#4479](https://github.com/ignite/cli/pull/4479) Scaffold an `uint64 type crashs Ignite +- [#4483](https://github.com/ignite/cli/pull/4483) Fix default flag parser for apps + +## [`v28.7.0`](https://github.com/ignite/cli/releases/tag/v28.7.0) + +### Features + +- [#4457](https://github.com/ignite/cli/pull/4457) Add `skip-build` flag to `chain serve` command to avoid (re)building the chain +- [#4413](https://github.com/ignite/cli/pull/4413) Add `ignite s chain-registry` command + +## [`v28.6.1`](https://github.com/ignite/cli/releases/tag/v28.6.1) + +### Changes + +- [#4449](https://github.com/ignite/cli/pull/4449) Bump scaffolded chain to Cosmos SDK `v0.50.11`. Previous version have a high security vulnerability. + +## [`v28.6.0`](https://github.com/ignite/cli/releases/tag/v28.6.0) + +### Features + +- [#4377](https://github.com/ignite/cli/pull/4377) Add multi node (validator) testnet +- [#4362](https://github.com/ignite/cli/pull/4362) Scaffold `Makefile` + +### Changes + +- [#4376](https://github.com/ignite/cli/pull/4376) Set different chain-id for in place testnet + +### Fixes + +- [#4421](https://github.com/ignite/cli/pull/4422) Fix typo in simulation template + +## [`v28.5.3`](https://github.com/ignite/cli/releases/tag/v28.5.3) + +### Changes + +- [#4372](https://github.com/ignite/cli/pull/4372) Bump Cosmos SDK to `v0.50.10` +- [#4357](https://github.com/ignite/cli/pull/4357) Bump chain dependencies (store, ics, log, etc) +- [#4328](https://github.com/ignite/cli/pull/4328) Send ignite bug report to sentry. Opt out the same way as for usage analytics + +## [`v28.5.2`](https://github.com/ignite/cli/releases/tag/v28.5.2) + +### Features + +- [#4297](https://github.com/ignite/cli/pull/4297) Add in-place testnet creation command for apps. + +### Changes + +- [#4292](https://github.com/ignite/cli/pull/4292) Bump Cosmos SDK to `v0.50.9` +- [#4341](https://github.com/ignite/cli/pull/4341) Bump `ibc-go` to `8.5.0` +- [#4345](https://github.com/ignite/cli/pull/4345) Added survey link + +### Fixes + +- [#4319](https://github.com/ignite/cli/pull/4319) Remove fee abstraction module from open api code generation +- [#4309](https://github.com/ignite/cli/pull/4309) Fix chain id for chain simulations +- [#4322](https://github.com/ignite/cli/pull/4322) Create a message for authenticate buf for generate ts-client +- [#4323](https://github.com/ignite/cli/pull/4323) Add missing `--config` handling in the `chain` commands +- [#4350](https://github.com/ignite/cli/pull/4350) Skip upgrade prefix for sim tests + +## [`v28.5.1`](https://github.com/ignite/cli/releases/tag/v28.5.1) + +### Features + +- [#4276](https://github.com/ignite/cli/pull/4276) Add `cosmosclient.CreateTxWithOptions` method to facilite more custom tx creation + +### Changes + +- [#4262](https://github.com/ignite/cli/pull/4262) Bring back relayer command +- [#4269](https://github.com/ignite/cli/pull/4269) Add custom flag parser for extensions +- [#4270](https://github.com/ignite/cli/pull/4270) Add flags to the extension hooks commands +- [#4286](https://github.com/ignite/cli/pull/4286) Add missing verbose mode flags + +## [`v28.5.0`](https://github.com/ignite/cli/releases/tag/v28.5.0) + +### Features + +- [#4183](https://github.com/ignite/cli/pull/4183) Set `chain-id` in the client.toml +- [#4090](https://github.com/ignite/cli/pull/4090) Remove `protoc` pkg and also nodetime helpers `ts-proto` and `sta` +- [#4076](https://github.com/ignite/cli/pull/4076) Remove the ignite `relayer` and `tools` commands with all ts-relayer logic +- [#4133](https://github.com/ignite/cli/pull/4133) Improve buf rate limit + +### Changes + +- [#4095](https://github.com/ignite/cli/pull/4095) Migrate to matomo analytics +- [#4149](https://github.com/ignite/cli/pull/4149) Bump cometbft to `v0.38.7` +- [#4168](https://github.com/ignite/cli/pull/4168) Bump IBC to `v8.3.1` + If you are upgrading manually from `v8.2.0` to `v8.3.1`, add the following to your `ibc.go` file: + + ```diff + app.ICAHostKeeper = ... + + app.ICAHostKeeper.WithQueryRouter(app.GRPCQueryRouter())` + app.ICAControllerKeeper = ... + ``` + +- [#4178](https://github.com/ignite/cli/pull/4178) Bump cosmos-sdk to `v0.50.7` +- [#4194](https://github.com/ignite/cli/pull/4194) Bump client/v2 to `v2.0.0-beta.2` + If you are uprading manually, check out the recommended changes in `root.go` from the above PR. +- [#4210](https://github.com/ignite/cli/pull/4210) Improve default home wiring +- [#4077](https://github.com/ignite/cli/pull/4077) Merge the swagger files manually instead use nodetime `swagger-combine` +- [#4249](https://github.com/ignite/cli/pull/4249) Prevent creating a chain with number in the name +- [#4253](https://github.com/ignite/cli/pull/4253) Bump cosmos-sdk to `v0.50.8` + +### Fixes + +- [#4184](https://github.com/ignite/cli/pull/4184) Set custom `InitChainer` because of manually registered modules +- [#4198](https://github.com/ignite/cli/pull/4198) Set correct prefix overwriting in `buf.gen.pulsar.yaml` +- [#4199](https://github.com/ignite/cli/pull/4199) Set and seal SDK global config in `app/config.go` +- [#4212](https://github.com/ignite/cli/pull/4212) Set default values for extension flag to dont crash ignite +- [#4216](https://github.com/ignite/cli/pull/4216) Avoid create duplicated scopedKeppers +- [#4242](https://github.com/ignite/cli/pull/4242) Use buf build binary from the gobin path +- [#4250](https://github.com/ignite/cli/pull/4250) Set gas adjustment before calculating + +## [`v28.4.0`](https://github.com/ignite/cli/releases/tag/v28.4.0) + +### Features + +- [#4108](https://github.com/ignite/cli/pull/4108) Add `xast` package (cherry-picked from [#3770](https://github.com/ignite/cli/pull/3770)) +- [#4110](https://github.com/ignite/cli/pull/4110) Scaffold a consumer chain with `interchain-security` v5.0.0. +- [#4117](https://github.com/ignite/cli/pull/4117), [#4125](https://github.com/ignite/cli/pull/4125) Support relative path when installing local plugins + +### Changes + +- [#3959](https://github.com/ignite/cli/pull/3959) Remove app name prefix from the `.gitignore` file +- [#4103](https://github.com/ignite/cli/pull/4103) Bump cosmos-sdk to `v0.50.6` + +### Fixes + +- [#3969](https://github.com/ignite/cli/pull/3969) Get first config validator using a getter to avoid index errors +- [#4033](https://github.com/ignite/cli/pull/4033) Fix cobra completion using `fishshell` +- [#4062](https://github.com/ignite/cli/pull/4062) Avoid nil `scopedKeeper` in `TransmitXXX` functions +- [#4086](https://github.com/ignite/cli/pull/4086) Retry to get the IBC balance if it fails the first time +- [#4096](https://github.com/ignite/cli/pull/4096) Add new reserved names module and remove duplicated genesis order +- [#4112](https://github.com/ignite/cli/pull/4112) Remove duplicate SetCmdClientContextHandler +- [#4219](https://github.com/ignite/cli/pull/4219) Remove deprecated `sdk.MustSortJSON` + +## [`v28.3.0`](https://github.com/ignite/cli/releases/tag/v28.3.0) + +### Features + +- [#4019](https://github.com/ignite/cli/pull/4019) Add `skip-proto` flag to `s chain` command +- [#3985](https://github.com/ignite/cli/pull/3985) Make some `cmd` pkg functions public +- [#3956](https://github.com/ignite/cli/pull/3956) Prepare for wasm app +- [#3660](https://github.com/ignite/cli/pull/3660) Add ability to scaffold ICS consumer chain + +### Changes + +- [#4035](https://github.com/ignite/cli/pull/4035) Bump `cometbft` to `v0.38.6` and `ibc-go/v8` to `v8.1.1` +- [#4031](https://github.com/ignite/cli/pull/4031) Bump `cli-plugin-network` to `v0.2.2` due to dependencies issue. +- [#4013](https://github.com/ignite/cli/pull/4013) Bump `cosmos-sdk` to `v0.50.5` +- [#4010](https://github.com/ignite/cli/pull/4010) Use `AppName` instead `ModuleName` for scaffold a new App +- [#3972](https://github.com/ignite/cli/pull/3972) Skip Ignite app loading for some base commands that don't allow apps +- [#3983](https://github.com/ignite/cli/pull/3983) Bump `cosmos-sdk` to `v0.50.4` and `ibc-go` to `v8.1.0` + +### Fixes + +- [#4021](https://github.com/ignite/cli/pull/4021) Set correct custom signer in `s list --signer ` +- [#3995](https://github.com/ignite/cli/pull/3995) Fix interface check for ibc modules +- [#3953](https://github.com/ignite/cli/pull/3953) Fix apps `Stdout` is redirected to `Stderr` +- [#3863](https://github.com/ignite/cli/pull/3963) Fix breaking issue for app client API when reading app chain info + +## [`v28.2.0`](https://github.com/ignite/cli/releases/tag/v28.2.0) + +### Features + +- [#3924](https://github.com/ignite/cli/pull/3924) Scaffold NFT module by default +- [#3839](https://github.com/ignite/cli/pull/3839) New structure for app scaffolding +- [#3835](https://github.com/ignite/cli/pull/3835) Add `--minimal` flag to `scaffold chain` to scaffold a chain with the least amount of sdk modules +- [#3820](https://github.com/ignite/cli/pull/3820) Add integration tests for IBC chains +- [#3956](https://github.com/ignite/cli/pull/3956) Prepare for wasm app + +### Changes + +- [#3899](https://github.com/ignite/cli/pull/3899) Introduce `plugin.Execute` function +- [#3903](https://github.com/ignite/cli/pull/3903) Don't specify a default build tag and deprecate notion of app version + +### Fixes + +- [#3905](https://github.com/ignite/cli/pull/3905) Fix `ignite completion` +- [#3931](https://github.com/ignite/cli/pull/3931) Fix `app update` command and duplicated apps + +## [`v28.1.1`](https://github.com/ignite/cli/releases/tag/v28.1.1) + +### Fixes + +- [#3878](https://github.com/ignite/cli/pull/3878) Support local forks of Cosmos SDK in scaffolded chain. +- [#3869](https://github.com/ignite/cli/pull/3869) Fix .git in parent dir +- [#3867](https://github.com/ignite/cli/pull/3867) Fix genesis export for ibc modules. +- [#3850](https://github.com/ignite/cli/pull/3871) Fix app.go file detection in apps scaffolded before v28.0.0 + +### Changes + +- [#3885](https://github.com/ignite/cli/pull/3885) Scaffold chain with Cosmos SDK `v0.50.3` +- [#3877](https://github.com/ignite/cli/pull/3877) Change Ignite App extension to "ign" +- [#3897](https://github.com/ignite/cli/pull/3897) Introduce alternative folder in templates + +## [`v28.1.0`](https://github.com/ignite/cli/releases/tag/v28.1.0) + +### Features + +- [#3786](https://github.com/ignite/cli/pull/3786) Add artifacts for publishing Ignite to FlatHub and Snapcraft +- [#3830](https://github.com/ignite/cli/pull/3830) Remove gRPC info from Ignite Apps errors +- [#3861](https://github.com/ignite/cli/pull/3861) Send to the analytics if the user is using a GitPod + +### Changes + +- [#3822](https://github.com/ignite/cli/pull/3822) Improve default scaffolded AutoCLI config +- [#3838](https://github.com/ignite/cli/pull/3838) Scaffold chain with Cosmos SDK `v0.50.2`, and bump confix and x/upgrade to latest +- [#3829](https://github.com/ignite/cli/pull/3829) Support version prefix for cached values +- [#3723](https://github.com/ignite/cli/pull/3723) Create a wrapper for errors + +### Fixes + +- [#3827](https://github.com/ignite/cli/pull/3827) Change ignite apps to be able to run in any directory +- [#3831](https://github.com/ignite/cli/pull/3831) Correct ignite app gRPC server stop memory issue +- [#3825](https://github.com/ignite/cli/pull/3825) Fix a minor Keplr type-checking bug in TS client +- [#3836](https://github.com/ignite/cli/pull/3836), [#3858](https://github.com/ignite/cli/pull/3858) Add missing IBC commands for scaffolded chain +- [#3833](https://github.com/ignite/cli/pull/3833) Improve Cosmos SDK detection to support SDK forks +- [#3849](https://github.com/ignite/cli/pull/3849) Add missing `tx.go` file by default and enable cli if autocli does not exist +- [#3851](https://github.com/ignite/cli/pull/3851) Add missing ibc interfaces to chain client +- [#3860](https://github.com/ignite/cli/pull/3860) Fix analytics event name + +## [`v28.0.0`](https://github.com/ignite/cli/releases/tag/v28.0.0) + +### Features + +- [#3659](https://github.com/ignite/cli/pull/3659) cosmos-sdk `v0.50.x` upgrade +- [#3694](https://github.com/ignite/cli/pull/3694) Query and Tx AutoCLI support +- [#3536](https://github.com/ignite/cli/pull/3536) Change app.go to v2 and add AppWiring feature +- [#3544](https://github.com/ignite/cli/pull/3544) Add bidirectional communication to app (plugin) system +- [#3756](https://github.com/ignite/cli/pull/3756) Add faucet compatibility for latest sdk chains +- [#3476](https://github.com/ignite/cli/pull/3476) Use `buf.build` binary to code generate from proto files +- [#3724](https://github.com/ignite/cli/pull/3724) Add or vendor proto packages from Go dependencies +- [#3561](https://github.com/ignite/cli/pull/3561) Add GetChainInfo method to plugin system API +- [#3626](https://github.com/ignite/cli/pull/3626) Add logging levels to relayer +- [#3614](https://github.com/ignite/cli/pull/3614) feat: use DefaultBaseappOptions for app.New method +- [#3715](https://github.com/ignite/cli/pull/3715) Add test suite for the cli tests + +### Changes + +- [#3793](https://github.com/ignite/cli/pull/3793) Refactor Ignite to follow semantic versioning (prepares v28.0.0). If you are using packages, do not forget to import the `/v28` version of the packages. +- [#3529](https://github.com/ignite/cli/pull/3529) Refactor plugin system to use gRPC +- [#3751](https://github.com/ignite/cli/pull/3751) Rename label to skip changelog check +- [#3745](https://github.com/ignite/cli/pull/3745) Set tx fee amount as option +- [#3748](https://github.com/ignite/cli/pull/3748) Change default rpc endpoint to a working one +- [#3621](https://github.com/ignite/cli/pull/3621) Change `pkg/availableport` to allow custom parameters in `Find` function and handle duplicated ports +- [#3810](https://github.com/ignite/cli/pull/3810) Bump network app version to `v0.2.1` +- [#3581](https://github.com/ignite/cli/pull/3581) Bump cometbft and cometbft-db in the template +- [#3522](https://github.com/ignite/cli/pull/3522) Remove indentation from `chain serve` output +- [#3346](https://github.com/ignite/cli/issues/3346) Improve scaffold query --help +- [#3601](https://github.com/ignite/cli/pull/3601) Update ts-relayer version to `0.10.0` +- [#3658](https://github.com/ignite/cli/pull/3658) Rename Marshaler to Codec in EncodingConfig +- [#3653](https://github.com/ignite/cli/pull/3653) Add "app" extension to plugin binaries +- [#3656](https://github.com/ignite/cli/pull/3656) Disable Go toolchain download +- [#3662](https://github.com/ignite/cli/pull/3662) Refactor CLI "plugin" command to "app" +- [#3669](https://github.com/ignite/cli/pull/3669) Rename `plugins` config file to `igniteapps` +- [#3683](https://github.com/ignite/cli/pull/3683) Resolve `--dep auth` as `--dep account` in `scaffold module` +- [#3795](https://github.com/ignite/cli/pull/3795) Bump cometbft to `v0.38.2` +- [#3599](https://github.com/ignite/cli/pull/3599) Add analytics as an option +- [#3670](https://github.com/ignite/cli/pull/3670) Remove binaries + +### Fixes + +- [#3386](https://github.com/ignite/cli/issues/3386) Prevent scaffolding of default module called "ibc" +- [#3592](https://github.com/ignite/cli/pull/3592) Fix `pkg/protoanalysis` to support HTTP rule parameter arguments +- [#3598](https://github.com/ignite/cli/pull/3598) Fix consensus param keeper constructor key in `app.go` +- [#3610](https://github.com/ignite/cli/pull/3610) Fix overflow issue of cosmos faucet in `pkg/cosmosfaucet/transfer.go` and `pkg/cosmosfaucet/cosmosfaucet.go` +- [#3618](https://github.com/ignite/cli/pull/3618) Fix TS client generation import path issue +- [#3631](https://github.com/ignite/cli/pull/3631) Fix unnecessary vue import in hooks/composables template +- [#3661](https://github.com/ignite/cli/pull/3661) Change `pkg/cosmosanalysis` to find Cosmos SDK runtime app registered modules +- [#3716](https://github.com/ignite/cli/pull/3716) Fix invalid plugin hook check +- [#3725](https://github.com/ignite/cli/pull/3725) Fix flaky TS client generation issues on linux +- [#3726](https://github.com/ignite/cli/pull/3726) Update TS client dependencies. Bump vue/react template versions +- [#3728](https://github.com/ignite/cli/pull/3728) Fix wrong parser for proto package names +- [#3729](https://github.com/ignite/cli/pull/3729) Fix broken generator due to caching /tmp include folders +- [#3767](https://github.com/ignite/cli/pull/3767) Fix `v0.50` ibc genesis issue +- [#3808](https://github.com/ignite/cli/pull/3808) Correct TS code generation to generate paginated fields + +## [`v0.27.2`](https://github.com/ignite/cli/releases/tag/v0.27.2) + +### Changes + +- [#3701](https://github.com/ignite/cli/pull/3701) Bump `go` version to 1.21 + +## [`v0.27.1`](https://github.com/ignite/cli/releases/tag/v0.27.1) + +### Features + +- [#3505](https://github.com/ignite/cli/pull/3505) Auto migrate dependency tools +- [#3538](https://github.com/ignite/cli/pull/3538) bump sdk to `v0.47.3` and ibc to `v7.1.0` +- [#2736](https://github.com/ignite/cli/issues/2736) Add `--skip-git` flag to skip git repository initialization. +- [#3381](https://github.com/ignite/cli/pull/3381) Add `ignite doctor` command +- [#3446](https://github.com/ignite/cli/pull/3446) Add `gas-adjustment` flag to the cosmos client. +- [#3439](https://github.com/ignite/cli/pull/3439) Add `--build.tags` flag for `chain serve` and `chain build` commands. +- [#3524](https://github.com/ignite/cli/pull/3524) Apply auto tools migration to other commands +- Added compatibility check and auto migration features and interactive guidelines for the latest versions of the SDK + +### Changes + +- [#3444](https://github.com/ignite/cli/pull/3444) Add support for ICS chains in ts-client generation +- [#3494](https://github.com/ignite/cli/pull/3494) bump `cosmos-sdk` and `cometbft` versions +- [#3434](https://github.com/ignite/cli/pull/3434) Detect app wiring implementation + +### Fixes + +- [#3497](https://github.com/ignite/cli/pull/3497) Use corret bank balance query url in faucet openapi +- [#3481](https://github.com/ignite/cli/pull/3481) Use correct checksum format in release checksum file +- [#3470](https://github.com/ignite/cli/pull/3470) Prevent overriding minimum-gas-prices with default value +- [#3523](https://github.com/ignite/cli/pull/3523) Upgrade Cosmos SDK compatibility check for scaffolded apps +- [#3441](https://github.com/ignite/cli/pull/3441) Correct wrong client context for cmd query methods +- [#3487](https://github.com/ignite/cli/pull/3487) Handle ignired error in package `cosmosaccount` `Account.PubKey` + +## [`v0.26.1`](https://github.com/ignite/cli/releases/tag/v0.26.1) + +### Features + +- [#3238](https://github.com/ignite/cli/pull/3238) Add `Sharedhost` plugin option +- [#3214](https://github.com/ignite/cli/pull/3214) Global plugins config. +- [#3142](https://github.com/ignite/cli/pull/3142) Add `ignite network request param-change` command. +- [#3181](https://github.com/ignite/cli/pull/3181) Addition of `add` and `remove` commands for `plugins` +- [#3184](https://github.com/ignite/cli/pull/3184) Separate `plugins.yml` config file. +- [#3038](https://github.com/ignite/cli/pull/3038) Addition of Plugin Hooks in Plugin System +- [#3056](https://github.com/ignite/cli/pull/3056) Add `--genesis-config` flag option to `ignite network chain publish` +- [#2892](https://github.com/ignite/cli/pull/2982/) Add `ignite scaffold react` command. +- [#2892](https://github.com/ignite/cli/pull/2982/) Add `ignite generate composables` command. +- [#2892](https://github.com/ignite/cli/pull/2982/) Add `ignite generate hooks` command. +- [#2955](https://github.com/ignite/cli/pull/2955/) Add `ignite network request add-account` command. +- [#2877](https://github.com/ignite/cli/pull/2877) Plugin system +- [#3060](https://github.com/ignite/cli/pull/3060) Plugin system flag support +- [#3105](https://github.com/ignite/cli/pull/3105) Addition of `ignite plugin describe ` command +- [#2995](https://github.com/ignite/cli/pull/2995/) Add `ignite network request remove-validator` command. +- [#2999](https://github.com/ignite/cli/pull/2999/) Add `ignite network request remove-account` command. +- [#2458](https://github.com/ignite/cli/issues/2458) New `chain serve` command UI. +- [#2992](https://github.com/ignite/cli/issues/2992) Add `ignite chain debug` command. + +### Changes + +- [#3369](https://github.com/ignite/cli/pull/3369) Update `ibc-go` to `v6.1.0`. +- [#3306](https://github.com/ignite/cli/pull/3306) Move network command into a plugin +- [#3305](https://github.com/ignite/cli/pull/3305) Bump Cosmos SDK version to `v0.46.7`. +- [#3068](https://github.com/ignite/cli/pull/3068) Add configs to generated TS code for working with JS projects +- [#3071](https://github.com/ignite/cli/pull/3071) Refactor `ignite/templates` package. +- [#2892](https://github.com/ignite/cli/pull/2982/) `ignite scaffold vue` and `ignite scaffold react` use v0.4.2 templates +- [#2892](https://github.com/ignite/cli/pull/2982/) `removeSigner()` method added to generated `ts-client` +- [#3035](https://github.com/ignite/cli/pull/3035) Bump Cosmos SDK to `v0.46.4`. +- [#3037](https://github.com/ignite/cli/pull/3037) Bump `ibc-go` to `v5.0.1`. +- [#2957](https://github.com/ignite/cli/pull/2957) Change generate commands to print the path to the generated code. +- [#2981](https://github.com/ignite/cli/issues/2981) Change CLI to also search chain binary in Go binary path. +- [#2958](https://github.com/ignite/cli/pull/2958) Support absolute paths for client code generation config paths. +- [#2993](https://github.com/ignite/cli/pull/2993) Hide `ignite scaffold band` command and deprecate functionality. +- [#2986](https://github.com/ignite/cli/issues/2986) Remove `--proto-all-modules` flag because it is now the default behaviour. +- [#2986](https://github.com/ignite/cli/issues/2986) Remove automatic Vue code scaffolding from `scaffold chain` command. +- [#2986](https://github.com/ignite/cli/issues/2986) Add `--generate-clients` to `chain serve` command for optional client code (re)generation. +- [#2998](https://github.com/ignite/cli/pull/2998) Hide `ignite generate dart` command and remove functionality. +- [#2991](https://github.com/ignite/cli/pull/2991) Hide `ignite scaffold flutter` command and remove functionality. +- [#2944](https://github.com/ignite/cli/pull/2944) Add a new event "update" status option to `pkg/cliui`. +- [#3030](https://github.com/ignite/cli/issues/3030) Remove colon syntax from module scaffolding `--dep` flag. +- [#3025](https://github.com/ignite/cli/issues/3025) Improve config version error handling. +- [#3084](https://github.com/ignite/cli/pull/3084) Add Ignite Chain documentation. +- [#3109](https://github.com/ignite/cli/pull/3109) Refactor scaffolding for proto files to not rely on placeholders. +- [#3106](https://github.com/ignite/cli/pull/3106) Add zoom image plugin. +- [#3194](https://github.com/ignite/cli/issues/3194) Move config validators check to validate only when required. +- [#3183](https://github.com/ignite/cli/pull/3183/) Make config optional for init phase. +- [#3224](https://github.com/ignite/cli/pull/3224) Remove `grpc_*` prefix from query files in scaffolded chains +- [#3229](https://github.com/ignite/cli/pull/3229) Rename `campaign` to `project` in ignite network set of commands +- [#3122](https://github.com/ignite/cli/issues/3122) Change `generate ts-client` to ignore the cache by default. +- [#3244](https://github.com/ignite/cli/pull/3244) Update `actions.yml` for resolving deprecation message +- [#3337](https://github.com/ignite/cli/pull/3337) Remove `pkg/openapiconsole` import from scaffold template. +- [#3337](https://github.com/ignite/cli/pull/3337) Register`nodeservice` gRPC in `app.go` template. +- [#3455](https://github.com/ignite/cli/pull/3455) Bump `cosmos-sdk` to `v0.47.1` +- [#3434](https://github.com/ignite/cli/pull/3434) Detect app wiring implementation. +- [#3445](https://github.com/ignite/cli/pull/3445) refactor: replace `github.com/ghodss/yaml` with `sigs.k8s.io/yaml` + +### Breaking Changes + +- [#3033](https://github.com/ignite/cli/pull/3033) Remove Cosmos SDK Launchpad version support. + +### Fixes + +- [#3114](https://github.com/ignite/cli/pull/3114) Fix out of gas issue when approving many requests +- [#3068](https://github.com/ignite/cli/pull/3068) Fix REST codegen method casing bug +- [#3031](https://github.com/ignite/cli/pull/3031) Move keeper hooks to after all keepers initialized in `app.go` template. +- [#3098](https://github.com/ignite/cli/issues/3098) Fix config upgrade issue that left config empty on error. +- [#3129](https://github.com/ignite/cli/issues/3129) Remove redundant `keyring-backend` config option. +- [#3187](https://github.com/ignite/cli/issues/3187) Change prompt text to fit within 80 characters width. +- [#3203](https://github.com/ignite/cli/issues/3203) Fix relayer to work with multiple paths. +- [#3320](https://github.com/ignite/cli/pull/3320) Allow `id` and `creator` as names when scaffolding a type. +- [#3327](https://github.com/ignite/cli/issues/3327) Scaffolding messages with same name leads to aliasing. +- [#3383](https://github.com/ignite/cli/pull/3383) State error and info are now displayed when using serve UI. +- [#3379](https://github.com/ignite/cli/issues/3379) Fix `ignite docs` issue by disabling mouse support. +- [#3435](https://github.com/ignite/cli/issues/3435) Fix wrong client context for cmd query methods. + +## [`v0.25.2`](https://github.com/ignite/cli/releases/tag/v0.25.1) + +### Changes + +- [#3145](https://github.com/ignite/cli/pull/3145) Security fix upgrading Cosmos SDK to `v0.46.6` + +## [`v0.25.1`](https://github.com/ignite/cli/releases/tag/v0.25.1) + +### Changes + +- [#2968](https://github.com/ignite/cli/pull/2968) Dragonberry security fix upgrading Cosmos SDK to `v0.46.3` + +## [`v0.25.0`](https://github.com/ignite/cli/releases/tag/v0.25.0) + +### Features + +- Add `pkg/cosmostxcollector` package with support to query and save TXs and events. +- Add `ignite network coordinator` command set. +- Add `ignite network validator` command set. +- Deprecate `cosmoscmd` pkg and add cmd templates for scaffolding. +- Add generated TS client test support to integration tests. + +### Changes + +- Updated `pkg/cosmosanalysis` to discover the list of app modules when defined in variables or functions. +- Improve genesis parser for `network` commands +- Integration tests build their own ignite binary. +- Updated `pkg/cosmosanalysis` to discover the list of app modules when defined in variables. +- Switch to broadcast mode sync in `cosmosclient` +- Updated `nodetime`: `ts-proto` to `v1.123.0`, `protobufjs` to `v7.1.1`, `swagger-typescript-api` to `v9.2.0` +- Switched codegen client to use `axios` instead of `fetch` +- Added `useKeplr()` and `useSigner()` methods to TS client. Allowed query-only instantiation. +- `nodetime` built with `vercel/pkg@5.6.0` +- Change CLI to use an events bus to print to stdout. +- Move generated proto files to `proto/{appname}/{module}` +- Update `pkg/cosmosanalysis` to detect when proto RPC services are using pagination. +- Add `--peer-address` flag to `network chain join` command. +- Change nightly tag format +- Add cosmos-sdk version in `version` command +- [#2935](https://github.com/ignite/cli/pull/2935) Update `gobuffalo/plush` templating tool to `v4` + +### Fixes + +- Fix ICA controller wiring. +- Change vuex generation to use a default TS client path. +- Fix cli action org in templates. +- Seal the capability keeper in the `app.go` template. +- Change faucet to allow CORS preflight requests. +- Fix config file migration to void leaving end of file content chunks. +- Change session print loop to block until all events are handled. +- Handle "No records were found in keyring" message when checking keys. +- [#2941](https://github.com/ignite/cli/issues/2941) Fix session to use the same spinner referece. +- [#2922](https://github.com/ignite/cli/pull/2922) Network commands check for latest config version before building the chain binary. + +## [`v0.24.1`](https://github.com/ignite/cli/releases/tag/v0.24.1) + +### Features + +- Upgraded Cosmos SDK to `v0.46.2`. + +## [`v0.24.0`](https://github.com/ignite/cli/releases/tag/v0.24.0) + +### Features + +- Upgraded Cosmos SDK to `v0.46.0` and IBC to `v5` in CLI and scaffolding templates +- Change chain init to check that no gentx are present in the initial genesis +- Add `network rewards release` command +- Add "make mocks" target to Makefile +- Add `--skip-proto` flag to `build`, `init` and `serve` commands to build the chain without building proto files +- Add `node query tx` command to query a transaction in any chain. +- Add `node query bank` command to query an account's bank balance in any chain. +- Add `node tx bank send` command to send funds from one account to another in any chain. +- Add migration system for the config file to allow config versioning +- Add `node tx bank send` command to send funds from one account to another in any chain. +- Implement `network profile` command +- Add `generate ts-client` command to generate a stand-alone modular TypeScript client. + +### Changes + +- Add changelog merge strategy in `.gitattributes` to avoid conflicts. +- Refactor `templates/app` to remove `monitoringp` module from the default template +- Updated keyring dependency to match Cosmos SDK +- Speed up the integration tests +- Refactor ignite network and fix genesis generation bug +- Make Go dependency verification optional during build by adding the `--check-dependencies` flag + so Ignite CLI can work in a Go workspace context. +- Temporary SPN address change for nightly +- Rename `simapp.go.plush` simulation file template to `helpers.go.plush` +- Remove campaign creation from the `network chain publish` command +- Optimized JavaScript generator to use a single typescript API generator binary +- Improve documentation and add support for protocol buffers and Go modules syntax +- Add inline documentation for CLI commands +- Change `cmd/account` to skip passphrase prompt when importing from mnemonic +- Add nodejs version in the output of ignite version +- Removed `handler.go` from scaffolded module template +- Migrated to `cosmossdk.io` packages for and `math` +- Vuex stores from the `generate vuex` command use the new TypeScript client +- Upgraded frontend Vue template to v0.3.10 + +### Fixes + +- Improved error handling for crypto wrapper functions +- Fix `pkg/cosmosclient` to call the faucet prior to creating the tx. +- Test and refactor `pkg/comosclient`. +- Change templates to add missing call to `RegisterMsgServer` in the default module's template to match what's specified + in the docs +- Fix cosmoscmd appID parameter value to sign a transaction correctly +- Fix `scaffold query` command to use `GetClientQueryContext` instead of `GetClientTxContext` +- Fix flaky integration tests issue that failed with "text file busy" +- Fix default chain ID for publish +- Replace `os.Rename` with `xos.Rename` +- Fix CLI reference generation to add `ignite completion` documentation +- Remove usage of deprecated `io/ioutil` package + +## [`v0.23.0`](https://github.com/ignite/cli/releases/tag/v0.23.0) + +### Features + +- Apps can now use generics + +### Fixes + +- Fix `pkg/cosmosanalysis` to support apps with generics +- Remove `ignite-hq/cli` from dependency list in scaffolded chains + +### Changes + +- Change `pkg/cosmosgen` to allow importing IBC proto files +- Improve docs for Docker related commands +- Improve and fix documentation issues in developer tutorials +- Add migration docs for v0.22.2 +- Improve `go mod download` error report in `pkg/cosmosgen` + +## [`v0.22.2`](https://github.com/ignite/cli/releases/tag/v0.22.2) + +### Features + +- Enable Darwin ARM 64 target for chain binary releases in CI templates + +### Changes + +- Rename `ignite-hq` to `ignite` + +## [`v0.22.1`](https://github.com/ignite/cli/releases/tag/v0.22.1) + +### Fixes + +- Fix IBC module scaffolding interface in templates + +## [`v0.22.0`](https://github.com/ignite/cli/releases/tag/v0.22.0) + +### Features + +- Optimized the build system. The `chain serve`, `chain build`, `chain generate` commands and other variants are way + faster now +- Upgraded CLI and templates to use IBC v3 + +### Fixes + +- Add a fix in code generation to avoid user's NodeJS configs to break TS client generation routine + +## [`v0.21.2`](https://github.com/ignite/cli/releases/tag/v0.21.2) + +### Fixes + +- Set min. gas to zero when running `chain` command set + +## [`v0.21.1`](https://github.com/ignite/cli/releases/tag/v0.21.1) + +### Features + +- Add compatibility to run chains built with Cosmos-SDK `v0.46.0-alpha1` and above +- Scaffold chains now will have `auth` module enabled by default + +### Fixes + +- Fixed shell completion generation +- Make sure proto package names are valid when using simple app names + +## [`v0.21.0`](https://github.com/ignite/cli/releases/tag/v0.21.0) + +### Features + +- Support simple app names when scaffolding chains. e.g.: `ignite scaffold chain mars` +- Ask confirmation when scaffolding over changes that are not committed yet + +## [`v0.20.4`](https://github.com/ignite/cli/releases/tag/v0.20.4) + +### Fixes + +- Use `protoc` binary compiled in an older version of macOS AMD64 for backwards compatibility in code generation + +## [`v0.20.3`](https://github.com/ignite/cli/releases/tag/v0.20.3) + +### Fixes + +- Use the latest version of CLI in templates to fix Linux ARM support _(It's now possible to develop chains in Linux ARM + machines and since the chain depends on the CLI in its `go.mod`, it needs to use the latest version that support ARM + targets)_ + +## [`v0.20.2`](https://github.com/ignite/cli/releases/tag/v0.20.2) + +### Fixes + +- Use `unsafe-reset-all` cmd under `tendermint` cmd for chains that use `=> v0.45.3` version of Cosmos SDK + +## [`v0.20.1`](https://github.com/ignite/cli/releases/tag/v0.20.1) + +### Features + +- Release the CLI with Linux ARM and native M1 binaries + +## [`v0.20.0`](https://github.com/ignite/cli/releases/tag/v0.20.0) + +Our new name is **Ignite CLI**! + +**IMPORTANT!** This upgrade renames `starport` command to `ignite`. From now on, use `ignite` command to access the CLI. + +### Features + +- Upgraded Cosmos SDK version to `v0.45.2` +- Added support for in memory backend in `pkg/cosmosclient` package +- Improved our tutorials and documentation + +## [`v0.19.5`](https://github.com/ignite/cli/pull/2158/commits) + +### Features + +- Enable client code and Vuex code generation for query only modules as well. +- Upgraded the Vue template to `v0.3.5`. + +### Fixes + +- Fixed snake case in code generation. +- Fixed plugin installations for Go =>v1.18. + +### Changes + +- Dropped transpilation of TS to JS. Code generation now only produces TS files. + +## `v0.19.4` + +### Features + +- Upgraded Vue template to `v0.3.0`. + +## `v0.19.3` + +### Features + +- Upgraded Flutter template to `v2.0.3` + +## [`v0.19.2`](https://github.com/ignite/cli/milestone/14) + +### Fixes + +- Fixed race condition during faucet transfer +- Fixed account sequence mismatch issue on faucet and relayer +- Fixed templates for IBC code scaffolding + +### Features + +- Upgraded blockchain templates to use IBC v2.0.2 + +### Breaking Changes + +- Deprecated the Starport Modules [tendermint/spm](https://github.com/tendermint/spm) repo and moved the contents to the + Ignite CLI repo [`ignite/pkg/`](https://github.com/ignite/cli/tree/main/ignite/pkg/) + in [PR 1971](https://github.com/ignite/cli/pull/1971/files) + + Updates are required if your chain uses these packages: + - `spm/ibckeeper` is now `pkg/cosmosibckeeper` + - `spm/cosmoscmd` is now `pkg/cosmoscmd` + - `spm/openapiconsole` is now `pkg/openapiconsole` + - `testutil/sample` is now `cosmostestutil/sample` + +- Updated the faucet HTTP API schema. See API changes + in [fix: improve faucet reliability #1974](https://github.com/ignite/cli/pull/1974/files#diff-0e157f4f60d6fbd95e695764df176c8978d85f1df61475fbfa30edef62fe35cd) + +## `v0.19.1` + +### Fixes + +- Enabled the `scaffold flutter` command + +## `v0.19.0` + +### Features + +- `starport scaffold` commands support `ints`, `uints`, `strings`, `coin`, `coins` as field types (#1579) +- Added simulation testing with `simapp` to the default template (#1731) +- Added `starport generate dart` to generate a Dart client from protocol buffer files +- Added `starport scaffold flutter` to scaffold a Flutter mobile app template +- Parameters can be specified with a new `--params` flag when scaffolding modules (#1716) +- Simulations can be run with `starport chain simulate` +- Set `cointype` for accounts in `config.yml` (#1663) + +### Fixes + +- Allow using a `creator` field when scaffolding a model with a `--no-message` flag (#1730) +- Improved error handling when generating code (#1907) +- Ensure account has funds after faucet transfer when using `cosmosclient` (#1846) +- Move from `io/ioutil` to `io` and `os` package (refactoring) (#1746) + +## `v0.18.0` + +### Breaking Changes + +- Starport v0.18 comes with Cosmos SDK v0.44 that introduced changes that are not compatible with chains that were + scaffolded with Starport versions lower than v0.18. After upgrading from Starport v0.17.3 to Starport v0.18, you must + update the default blockchain template to use blockchains that were scaffolded with earlier versions. + See [Migration](https://docs.ignite.com/migration). + +### Features + +- Scaffold commands allow using previously scaffolded types as fields +- Added `--signer` flag to `message`, `list`, `map`, and `single` scaffolding to allow customizing the name of the + signer of the message +- Added `--index` flag to `scaffold map` to provide a custom list of indices +- Added `scaffold type` to scaffold a protocol buffer definition of a type +- Automatically check for new Starport versions +- Added `starport tools completions` to generate CLI completions +- Added `starport account` commands to manage accounts (key pairs) +- `starport version` now prints detailed information about OS, Go version, and more +- Modules are scaffolded with genesis validation tests +- Types are scaffolded with tests for `ValidateBasic` methods +- `cosmosclient` has been refactored and can be used as a library for interacting with Cosmos SDK chains +- `starport relayer` uses `starport account` +- Added `--path` flag for all `scaffold`, `generate` and `chain` commands +- Added `--output` flag to the `build` command +- Configure port of gRPC web in `config.yml` with the `host.grpc-web` property +- Added `build.main` field to `config.yml` for apps to specify the path of the chain's main package. This property is + required to be set only when an app contains multiple main packages. + +### Fixes + +- Scaffolding a message no longer prevents scaffolding a map, list, or single that has the same type name when using + the `--no-message` flag +- Generate Go code from proto files only from default directories or directories specified in `config.yml` +- Fixed faucet token transfer calculation +- Removed `creator` field for types scaffolded with the `--no-message` flag +- Encode the count value in the store with `BigEndian` + +## `v0.17.3` + +### Fixes + +- oracle: add a specific BandChain pkg version to avoid Cosmos SDK version conflicts + +## `v0.17.2` + +### Features + +- `client.toml` is initialized and used by node's CLI, can be configured through `config.yml` with the `init.client` + property +- Support serving Cosmos SDK `v0.43.x` based chains + +## `v0.17.1` + +### Fixes + +- Set visibility to `public` on Gitpod's port 7575 to enable peer discovery for SPN +- Fixed GitHub action that releases blockchain node's binary +- Fixed an error in chain scaffolding due to "unknown revision" +- Fixed an error in `starport chain serve` by limiting the scope where proto files are searched for + +## `v0.17` + +### Features + +- Added GitHub action that automatically builds and releases a binary +- The `--release` flag for the `build` command adds the ability to release binaries in a tarball with a checksum file. +- Added the flag `--no-module` to the command `starport app` to prevent scaffolding a default module when creating a new + app +- Added `--dep` flag to specify module dependency when scaffolding a module +- Added support for multiple naming conventions for component names and field names +- Print created and modified files when scaffolding a new component +- Added `starport generate` namespace with commands to generate Go, Vuex and OpenAPI +- Added `starport chain init` command to initialize a chain without starting a node +- Scaffold a type that contains a single instance in the store +- Introduced `starport tools` command for advanced users. Existing `starport relayer lowlevel *` commands are also moved + under `tools` +- Added `faucet.rate_limit_window` property to `config.yml` +- Simplified the `cmd` package in the template +- Added `starport scaffold band` oracle query scaffolding +- Updated TypeScript relayer to 0.2.0 +- Added customizable gas limits for the relayer + +### Fixes + +- Use snake case for generated files +- Prevent using incorrect module name +- Fixed permissions issue when using Starport in Docker +- Ignore hidden directories when building a chain +- Fix error when scaffolding an IBC module in non-Starport chains + +## `v0.16.2` + +### Fix + +- Prevent indirect Buf dependency + +## `v0.16.1` + +### Features + +- Ensure that CLI operates fine even if the installation directory (bin) of Go programs is not configured properly + +## `v0.16.0` + +### Features + +- The new `join` flag adds the ability to pass a `--genesis` file and `--peers` address list + with `starport network chain join` +- The new `show` flag adds the ability to show `--genesis` and `--peers` list with `starport network chain show` +- `protoc` is now bundled with Ignite CLI. You don't need to install it anymore. +- Starport is now published automatically on the Docker Hub +- `starport relayer` `configure` and `connect` commands now use + the [confio/ts-relayer](https://github.com/confio/ts-relayer) under the hood. Also, checkout the + new `starport relayer lowlevel` command +- An OpenAPI spec for your chain is now automatically generated with `serve` and `build` commands: a console is + available at `localhost:1317` and spec at `localhost:1317/static/openapi.yml` by default for the newly scaffolded + chains +- Keplr extension is supported on web apps created with Starport +- Added tests to the scaffold +- Improved reliability of scaffolding by detecting placeholders +- Added ability to scaffold modules in chains not created with Starport +- Added the ability to scaffold Cosmos SDK queries +- IBC relayer support is available on web apps created with Starport +- New types without CRUD operations can be added with the `--no-message` flag in the `type` command +- New packet without messages can be added with the `--no-message` flag in the `packet` command +- Added `docs` command to read Starport documentation on the CLI +- Published documentation on +- Added `mnemonic` property to account in the `accounts` list to generate a key from a mnemonic + +### Fixes + +- `starport network chain join` hanging issue when creating an account +- Error when scaffolding a chain with an underscore in the repo name (thanks @bensooraj!) + +### Changes + +- `starport serve` no longer starts the web app in the `vue` directory (use `npm` to start it manually) +- Default scaffold no longer includes legacy REST API endpoints (thanks @bensooraj!) +- Removed support for Cosmos SDK v0.39 Launchpad + +## `v0.15.0` + +### Features + +- IBC module scaffolding +- IBC packet scaffolding with acknowledgements +- JavaScript and Vuex client code generation for Cosmos SDK and custom modules +- Standalone relayer with `configure` and `connect` commands +- Advanced relayer options for configuring ports and versions +- Scaffold now follows `MsgServer` convention +- Message scaffolding +- Added `starport type ... --indexed` to scaffold indexed types +- Custom config file support with `starport serve -c custom.yml` +- Detailed terminal output for created accounts: name, address, mnemonic +- Added spinners to indicate progress for long-running commands +- Updated to Cosmos SDK v0.42.1 + +### Changes + +- Replaced `packr` with Go 1.16 `embed` +- Renamed `servers` top-level property to `host` + +## `v0.14.0` + +### Features + +- Chain state persistence between `starport serve` launches +- Integrated Stargate app's `scripts/protocgen` into Starport as a native feature. Running `starport build/serve` will + automatically take care of building proto files without a need of script in the app's source code. +- Integrated third-party proto-files used by Cosmos SDK modules into Ignite CLI +- Added ability to customize binary name with `build.binary` in `config.yml` +- Added ability to change path to home directory with `.home` in `config.yml` +- Added ability to add accounts by `address` with in `config.yml` +- Added faucet functionality available on port 4500 and configurable with `faucet` in `config.yml` +- Added `starport faucet [address] [coins]` command +- Updated scaffold to Cosmos SDK v0.41.0 +- Distroless multiplatform docker containers for starport that can be used for `starport serve` +- UI containers for chains scaffolded with Starport +- Use SOS-lite and Docker instead of systemD +- Arch PKGBUILD in `scripts` + +### Fixes + +- Support for CosmWasm on Stargate +- Bug with dashes in GitHub username breaking proto package name +- Bug with custom address prefix +- use docker buildx as a single command with multiple platforms to make multi-manifest work properly + +## `v0.13.0` + +### Features + +- Added `starport network` commands for launching blockchains +- Added proxy (Chisel) to support launching blockchains from Gitpod +- Upgraded the template (Stargate) to Cosmos SDK v0.40.0-rc3 +- Added a gRPC-Web proxy that is available under +- Added chain id configurability by recognizing `chain_id` from `genesis` section of `config.yml`. +- Added `config/app.toml` and `config/config.toml` configurability for appd under new `init.app` and `init.config` + sections of `config.yml` +- Point to Stargate as default SDK version for scaffolding +- Covered CRUD operations for Stargate scaffolding +- Added docs on gopath to build from source directions +- Arch Linux Based Raspberry Pi development environment +- Calculate the necessary gas for sending transactions to SPN + +### Fixes + +- Routing REST API endpoints of querier on Stargate +- Evaluate `--address-prefix` option when scaffolding for Stargate +- Use a deterministic method to generate scaffolded type IDs +- Modify scaffolded type's creator type from address to string +- Copy built starport arm64 binary from tendermintdevelopment/starport:arm64 for device images +- Added git to amd64 docker image +- Comment out Gaia's seeds in the systemd unit template for downstream chains + +## `v0.12.0` + +### Features + +- Added GitHub CLI to gitpod environment for greater ease of use +- Added `starport build` command to build and install app binaries +- Improved the first-time experience for readers of the Starport readme and parts of the Starport Handbook +- Added `starport module create` command to scaffold custom modules +- Raspberry Pi now installs, builds, and serves the Vue UI +- Improved documentation for Raspberry Pi Device Images +- Added IBC and some other modules +- Added an option to configure server addresses under `servers` section in `config.yml` + +### Fixes + +- `--address-prefix` will always be translated to lowercase while scaffolding with `app` command +- HTTP API: accept strings in JSON and cast them to int and bool +- Update @tendermint/vue to `v0.1.7` +- Removed "Starport Pi" +- Removed Makefile from Downstream Pi +- Fixed Downstream Pi image GitHub Action +- Prevent duplicated fields with `type` command +- Fixed handling of protobuf profiler: prof_laddr -> pprof_laddr +- Fix an error, when a Stargate `serve` cmd doesn't start if a user doesn't have a relayer installed + +## `v0.11.1` + +### Features + +- Published on Snapcraft + +## `v0.11.0` + +### Features + +- Added experimental [Stargate](https://stargate.cosmos.network/) scaffolding option with `--sdk-version stargate` flag + on `starport app` command +- Pi Image Generation for chains generated with Starport +- GitHub action with capture of binary artifacts for chains generated with Starport +- Gitpod: added guidelines and changed working directory into `docs` +- Updated web scaffold with an improved sign in, balance list and a simple wallet +- Added CRUD actions for scaffolded types: delete, update, and get + +## `v0.0.10` + +### Features + +- Add ARM64 releases +- OS Image Generation for Raspberry Pi 3 and 4 +- Added `version` command +- Added support for _validator_ configuration in _config.yml_. +- Starport can be launched on Gitpod +- Added `make clean` + +### Fixes + +- Compile with go1.15 +- Running `starport add type...` multiple times no longer breaks the app +- Running `appcli tx app create-x` now checks for all required args +- Removed unused `--denom` flag from the `app` command. It previously has moved as a prop to the `config.yml` + under `accounts` section +- Disabled proxy server in the Vue app (this was causing to some compatibility issues) and enabled CORS + for `appcli rest-server` instead +- `type` command supports dashes in app names + +## `v0.0.10-rc.3` + +### Features + +- Configure `genesis.json` through `genesis` field in `config.yml` +- Initialize git repository on `app` scaffolding +- Check Go and GOPATH when running `serve` + +### Changes + +- verbose is --verbose, not -v, in the cli +- Renamed `frontend` directory to `vue` +- Added first E2E tests (for `app` and `add wasm` subcommands) + +### Fixes + +- No longer crashes when git is initialized but doesn't have commits +- Failure to start the frontend doesn't prevent Starport from running +- Changes to `config.yml` trigger reinitialization of the app +- Running `starport add wasm` multiple times no longer breaks the app + +## `v0.0.10-rc.X` + +### Features + +- Initialize with accounts defined `config.yml` +- `starport serve --verbose` shows detailed output from every process +- Custom address prefixes with `--address-prefix` flag +- Cosmos SDK Launchpad support +- Rebuild and reinitialize on file change + +## `v0.0.9` + +Initial release. diff --git a/code_of_conduct.md b/code_of_conduct.md new file mode 100644 index 0000000..572248b --- /dev/null +++ b/code_of_conduct.md @@ -0,0 +1,125 @@ +# Contributor Covenant Code of Conduct + +## Our pledge + +We as members, contributors, and leaders pledge to make participation in our +community a harassment-free experience for everyone, regardless of age, body +size, visible or invisible disability, ethnicity, sex characteristics, gender +identity and expression, level of experience, education, socio-economic status, +nationality, personal appearance, race, religion, or sexual identity +and orientation. + +We pledge to act and interact in ways that contribute to an open, welcoming, +diverse, inclusive, and healthy community. + +## Our standards + +Examples of behavior that contributes to a positive environment for our +community include: + +* Demonstrating empathy and kindness toward other people +* Being respectful of differing opinions, viewpoints, and experiences +* Giving and gracefully accepting constructive feedback +* Accepting responsibility and apologizing to those affected by our mistakes, + and learning from the experience +* Focusing on what is best not just for us as individuals, but for the + overall community + +Examples of unacceptable behavior include: + +* The use of sexualized language or imagery and sexual attention or + advances of any kind +* Trolling, insulting or derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or email + address, without their explicit permission +* Other conduct that could reasonably be considered inappropriate in a + professional setting + +## Enforcement responsibilities + +Community leaders are responsible for clarifying and enforcing our standards of +acceptable behavior and will take appropriate and fair corrective action in +response to any behavior that they deem inappropriate, threatening, offensive, +or harmful. + +Community leaders have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, and will communicate reasons for moderation +decisions when appropriate. + +## Scope + +This Code of Conduct applies within all community spaces, and also applies when +an individual is officially representing the community in public spaces. +Examples of representing our community include using an official e-mail address, +posting by using an official social media account, or acting as an appointed +representative at an online or offline event. + +## Enforcement + +Report instances of abusive, harassing, or otherwise unacceptable behavior to the community leaders responsible for +enforcement at +. + +All complaints will be reviewed and investigated promptly and fairly. + +All community leaders are obligated to respect the privacy and security of the +reporter of any incident. + +## Enforcement guidelines + +Community leaders will follow these community impact guidelines in determining +the consequences for any action they deem in violation of this Code of Conduct: + +### 1. Correction + +**Community impact**: Use of inappropriate language or other behavior deemed +unprofessional or unwelcome in the community. + +**Consequence**: A private, written warning from community leaders, providing +clarity around the nature of the violation and an explanation of why the +behavior was inappropriate. A public apology may be requested. + +### 2. Warning + +**Community impact**: A violation through a single incident or series +of actions. + +**Consequence**: A warning with consequences for continued behavior. No +interaction with the people involved, including unsolicited interaction with +those enforcing the Code of Conduct, for a specified period of time. This restriction +includes avoiding interactions in community spaces as well as external channels +like social media. Violating these terms may lead to a temporary or +permanent ban. + +### 3. Temporary ban + +**Community impact**: A serious violation of community standards, including +sustained inappropriate behavior. + +**Consequence**: A temporary ban from any sort of interaction or public +communication with the community for a specified period of time. No public or +private interaction with the people involved, including unsolicited interaction +with those enforcing the Code of Conduct, is allowed during this period. +Violating these terms may lead to a permanent ban. + +### 4. Permanent ban + +**Community impact**: Demonstrating a pattern of violation of community +standards, including sustained inappropriate behavior, harassment of an +individual, or aggression toward or disparagement of classes of individuals. + +**Consequence**: A permanent ban from any sort of public interaction within +the community. + +## Attribution + +This Code of Conduct is adapted from +the [Contributor Covenant](https://www.contributor-covenant.org/version/2/0/code_of_conduct.html). + +Community impact guidelines are inspired by [Mozilla's code of conduct +enforcement ladder](https://github.com/mozilla/diversity). + +See the [FAQ](https://www.contributor-covenant.org/faq) for answers to common questions about this code of +conduct. [Translations](https://www.contributor-covenant.org/translations) are also available. diff --git a/contributing.md b/contributing.md new file mode 100644 index 0000000..a0caa45 --- /dev/null +++ b/contributing.md @@ -0,0 +1,306 @@ +# Contributing Guidelines + +Before submitting a PR to the Ignite CLI repository, please review and follow these guidelines to ensure consistency and smooth collaboration across the project. + +If you have suggestions or want to propose changes to these guidelines, start a new [Discussion topic](https://github.com/ignite/cli/discussions/new) to gather feedback. + +To contribute to docs and tutorials, see [Contributing to Ignite CLI Docs](https://docs.ignite.com/contributing). + +We appreciate your contribution! + +* [Contributing Guidelines](#contributing-guidelines) + * [Providing Feedback](#providing-feedback) + * [Opening Pull Requests (PRs)](#opening-pull-requests-prs) + * [Choosing a Good PR Title](#choosing-a-good-pr-title) + * [Reviewing Your Own Code](#reviewing-your-own-code) + * [Commit Guidelines \& Title Conventions](#commit-guidelines--title-conventions) + * [Do Not Rebase After Opening a PR](#do-not-rebase-after-opening-a-pr) + * [Contributing to Documentation](#contributing-to-documentation) + * [Ask for Help](#ask-for-help) + * [Prioritizing Issues with Milestones](#prioritizing-issues-with-milestones) + * [Issue Title Conventions and Labeling](#issue-title-conventions-and-labeling) + * [Go Code Style Guidelines](#go-code-style-guidelines) + * [Foundation: Uber Go Style Guide](#foundation-uber-go-style-guide) + * [Package Structure](#package-structure) + * [Utility Packages](#utility-packages) + * [Code Organization](#code-organization) + * [Type Definitions](#type-definitions) + * [Encapsulation](#encapsulation) + * [Comments](#comments) + +## Providing Feedback + +* Before opening an issue, search for [existing open and closed issues](https://github.com/ignite/cli/issues) to check if your question has already been addressed. If a relevant issue exists, consider commenting on it instead of opening a duplicate issue. +* For feedback, questions, or suggestions, open a [Discussion topic](https://github.com/ignite/cli/discussions/new) to share your thoughts. Providing detailed information, such as use cases and links, will make the discussion more productive and actionable. + +* For quick questions or informal feedback, join the **#🛠️ build-chains** channel in the official [Ignite Discord](https://discord.com/invite/ignitecli). + +## Opening Pull Requests (PRs) + +Please review relevant issues and discussions before opening a PR to ensure alignment with ongoing work. + +### Choosing a Good PR Title + +* Keep PR titles concise (fewer than 60 characters). +* Follow [Conventional Commits](https://www.conventionalcommits.org/en/v1.0.0/) guidelines for structuring your titles. For example: `feat(services/chain)`, `fix(scaffolding)`, `docs(migration)`. +* Your PR title should reflect the purpose of the changes and follow a consistent format. + +### Reviewing Your Own Code + +* Manually test your changes before submitting a PR or adding new commits. +* Ensure all CI checks pass before requesting a review. Your PR should show **All checks have passed** with a green checkmark. + +### Commit Guidelines & Title Conventions + +* **Standardized Issue Prefixes:** + Issue titles should begin with one of the following standardized prefixes, depending on the type of action being taken: + + * **`FIX:`** for resolving bugs or problems within existing features. + * **`INIT:`** for creating new components, features, or initiatives. + * **`UPDATE:`** for making improvements or modifications to existing functionality. + * **`META:`** for larger, multi-step initiatives that consist of multiple tasks (e.g., epics). + + **Examples:** + + * `FIX: Resolve crash during chain initialization` + * `INIT: Add staking module to example chain` + * `UPDATE: Improve performance of block synchronization` + * `META: Overhaul user permissions system` + +* **Why Standardized Prefixes?** + The use of standardized prefixes ensures that the focus is on what needs to be done, making the task clear and actionable. This approach avoids redundancy with Conventional Commits, which are used for PR titles and commit messages to capture the purpose of the change. By separating the action (described by the prefix) from the nature of the issue (captured by labels), we reduce duplication and improve clarity. For example, if the issue is labeled `type:bug`, there’s no need to state "bug" in the title—the `FIX:` prefix already implies that the task involves resolving a bug. + +* **Labels for Characteristics:** + Labels are used to classify the characteristics, elements, and descriptors of the issue or initiative. Labels help clarify the type of issue, the component involved, and its priority or status, without cluttering the title. Here are some examples: + + * **Type:** Describes the nature of the issue. + + * `type:bug` – Something isn't working. + * `type:feat` – A new feature to be implemented. + * `type:refactor` – Refactoring code without adding features. + + * **Component:** Specifies the part of the system the issue is related to. + + * `component:scaffold` – Related to scaffolding configuration or logic. + * `component:frontend` – Related to frontend components. + * `component:network` – Related to networking features or configurations. + + * **Status:** Indicates the current status of the issue or PR. + * `status:needs-triage` – Needs to be reviewed and prioritized. + * `status:blocked` – Cannot proceed until the blocking matter is resolved. + * `status:help wanted` – Additional input or attention is needed. + +### Do Not Rebase After Opening a PR + +* Avoid rebasing commits once a PR is open for review. Instead, add additional commits as needed. +* Force pushes are acceptable only when the PR is in draft mode and hasn't been reviewed yet. + +PRs will be squashed into a single commit when merged, so don't worry about having too many commits during the review process. The final PR title will be used as the commit message. + +## Contributing to Documentation + +Changes to the Ignite CLI codebase often require updates to the corresponding documentation. Please ensure that you update relevant documentation when making code changes. + +* For changes to the [Developer Guide](https://docs.ignite.com/guide) and tutorials, update content in the `/docs/docs/02-guide` folder. +* For changes to the [Ignite CLI Reference](https://docs.ignite.com/references/cli), update the `./ignite/cmd` package where the command is defined. Do not edit auto-generated docs under `docs/docs/08-references/01-cli.md`. + +### Ask for Help + +If you start a PR but cannot complete it for any reason, don’t hesitate to ask for help. Another contributor can take over and finish the work. + +## Prioritizing Issues with Milestones + +We use Git Flow as our branch strategy, with each MAJOR release linked to a milestone. Core maintainers manage the prioritization of issues on the project board to ensure that the most critical work is addressed first. + +* **Priority Labels (P0-P3):** + Issues are classified based on their urgency and impact, which helps guide the team’s focus during each release cycle: + + * **P0:** Urgent ("drop everything"); requires immediate attention and resolution. These issues take precedence over all other work. + * **P1:** High priority ("important matter"); important and should be addressed promptly, though not as immediately critical as P0 issues. + * **P2:** Medium priority ("sometime soon"); should be addressed but can be scheduled after P0 and P1 issues are resolved. + * **P3:** Low priority ("nice to have"); nice to have but can be deferred or addressed as time permits. + +* **Milestones and Workflow:** + Each milestone represents a MAJOR release. Issues are assigned to milestones based on their priority and relevance to the release goals. The project board is used to track and manage the progress of these issues. This structured workflow ensures that urgent tasks (P0) are addressed immediately, while lower-priority tasks (P3) are handled as resources allow. + +* **Next Milestone:** + The **Next** milestone is used for issues or features that are not tied to a specific release but are still relevant to the project’s roadmap. These issues will be addressed when higher-priority work has been completed, or as part of future planning. + +## Issue Title Conventions and Labeling + +To maintain consistency across issues and PRs, follow these guidelines for issue titles: + +* **Standardized Prefixes:** Begin with one of the standardized prefixes: + + * `FIX:` for resolving bugs. + * `INIT:` for new components or projects. + * `UPDATE:` for improving or modifying existing features. + * `META:` for meta tasks involving multiple sub-tasks or actions. + +* **Labels for Characteristics:** Use labels to classify the nature of the issue, such as its type, component, or status. Labels help describe the various elements of the issue or task, making it easier to manage and prioritize. + +By combining standardized prefixes with well-organized labels, we maintain clarity, avoid redundancy, and ensure that issues and PRs are properly categorized and actionable. + +## Go Code Style Guidelines + +All Ignite repositories should follow the same Go code style guidelines to ensure consistency and maintainability across the codebase. This document outlines the coding style guidelines for Go code in this project. + +### Foundation: Uber Go Style Guide + +Our code style is based on the [Uber Go Style Guide](https://github.com/uber-go/guide/blob/master/style.md) with additional project-specific requirements outlined below. The Uber guide should be considered the baseline for all code style decisions unless explicitly overridden in this document. + +Key aspects from the Uber Go Style Guide that we emphasize: + +* Error handling best practices +* Package naming conventions +* Interface design principles +* Consistent formatting and naming + +### Package Structure + +#### Utility Packages + +* **Avoid generic package names like `utils`** + + Instead, use specific names that describe the functionality: + + ``` + ❌ ignite/pkg/utils // Too generic + ✅ ignite/pkg/cosmosutil // Good: describes the domain + ✅ ignite/pkg/cosmosanalysis // Good: describes the purpose + ``` + +* **Keep utility functions separate from application-specific code** + + Don't add utility functions to application-specific packages like `services`, `cmd`, or `integration`. + Place all utility-like packages under the `pkg` directory: + + ``` + ❌ ignite/cmd/util.go // Don't put utilities in app-specific packages + ✅ ignite/pkg/cosmosmetrics/metrics.go // Good: utilities in dedicated packages + ``` + + This helps reduce the size of application-specific packages and makes the code easier to maintain. + +### Code Organization + +#### Type Definitions + +* **Group related types, variables, and constants together** + + Only group definitions when they are logically related: + + ```go + // ❌ Bad: Single block with unrelated definitions + const ( + MaxRetries = 3 + DefaultTimeout = 30 + LogFileName = "app.log" + DatabaseName = "mydb" + ) + + // ✅ Good: Related constants grouped together + // retry related constants + const ( + MaxRetries = 3 + DefaultTimeout = 30 + ) + + // file related constants + const ( + LogFileName = "app.log" + DatabaseName = "mydb" + ) + ``` + +#### Encapsulation + +* **Use optional functions with getters for restricted field access** + + To prevent direct manipulation of fields, use getters and optional function parameters: + + ```go + // ❌ Bad: Exposed fields can be manipulated directly + type Metrics struct { + Count int + LastUpdated time.Time + } + + // ✅ Good: Fields are private with getter methods + type Metrics struct { + count int + lastUpdated time.Time + } + + // GetCount returns the current count + func (m *Metrics) GetCount() int { + return m.count + } + + // GetLastUpdated returns when metrics were last updated + func (m *Metrics) GetLastUpdated() time.Time { + return m.lastUpdated + } + + // Optional function pattern + type MetricsOption func(*Metrics) + + // WithInitialCount sets the initial count + func WithInitialCount(count int) MetricsOption { + return func(m *Metrics) { + m.count = count + } + } + + // NewMetrics creates a new metrics instance with options + func NewMetrics(opts ...MetricsOption) *Metrics { + m := &Metrics{ + count: 0, + lastUpdated: time.Now(), + } + + for _, opt := range opts { + opt(m) + } + + return m + } + ``` + + Example usage of the above pattern: + + ```go + // Create with defaults + metrics := NewMetrics() + + // Or with options + metrics := NewMetrics( + WithInitialCount(10), + ) + + // Access via getter + count := metrics.GetCount() + ``` + +### Comments + +* Comments should be lowercase, except for Go docs +* Go docs should always start with the function name, capitalize the first letter, and end with a period + + ```go + // ❌ Bad: incorrect godoc format + // this function increments the counter + func IncrementCounter(count int) int { + return count + 1 + } + + // ✅ Good: proper godoc format + // IncrementCounter adds one to the provided count and returns the result. + func IncrementCounter(count int) int { + return count + 1 + } + + // ✅ Good: regular comment is lowercase + // increment the counter by one + count++ + ``` diff --git a/docs/.gitignore b/docs/.gitignore new file mode 100644 index 0000000..67e47a5 --- /dev/null +++ b/docs/.gitignore @@ -0,0 +1,20 @@ +# Dependencies +/node_modules + +# Production +/build + +# Generated files +.docusaurus +.cache-loader + +# Misc +.DS_Store +.env.local +.env.development.local +.env.test.local +.env.production.local + +npm-debug.log* +yarn-debug.log* +yarn-error.log* diff --git a/docs/babel.config.js b/docs/babel.config.js new file mode 100644 index 0000000..2650af9 --- /dev/null +++ b/docs/babel.config.js @@ -0,0 +1,3 @@ +module.exports = { + presets: [require.resolve('@docusaurus/core/lib/babel/preset')], +}; diff --git a/docs/docs.go b/docs/docs.go new file mode 100644 index 0000000..36d5a82 --- /dev/null +++ b/docs/docs.go @@ -0,0 +1,8 @@ +package docs + +import "embed" + +// Docs are IGNITE® CLI docs. +// +//go:embed docs +var Docs embed.FS diff --git a/docs/docs/01-welcome/01-index.md b/docs/docs/01-welcome/01-index.md new file mode 100644 index 0000000..12a9057 --- /dev/null +++ b/docs/docs/01-welcome/01-index.md @@ -0,0 +1,52 @@ +--- +slug: /welcome +--- + +import ProjectsTable from '@site/src/components/ProjectsTable'; + +# Introduction to IGNITE® CLI: Your Gateway to Blockchain Innovation + +[IGNITE® CLI](https://github.com/ignite/cli) is a powerful tool that simplifies the journey of building, testing, and launching diverse blockchain applications. Developed on top of the [Cosmos SDK](https://docs.cosmos.network), the leading framework for blockchain technology, IGNITE® CLI is pivotal in streamlining the development process. It enables developers to focus on the unique aspects of their projects, from DeFi and NFTs to supply chain solutions and smart contracts. +Beyond these, IGNITE® has been instrumental in a wide array of blockchain applications, ranging from VPNs and gaming platforms to blogs, oracle systems, and innovative consensus mechanisms. This demonstrates its versatility in supporting a broad spectrum of blockchain-based solutions. + +## Key Features of IGNITE® CLI + +- **Simplified Blockchain Development:** IGNITE® CLI, leveraging Cosmos SDK, makes building sovereign application-specific blockchains intuitive and efficient. +- **Comprehensive Scaffolding:** Easily scaffold modules, messages, CRUD operations, IBC packets, and more, expediting the development of complex functionalities. +- **Development with Live Reloading:** Start and test your blockchain node with real-time updates, enhancing your development workflow. +- **Frontend Flexibility:** Utilize pre-built templates for Vue.js, React, Typescript or Go, catering to diverse frontend development needs. +- **Inter-Blockchain Communication (IBC):** Seamlessly connect and interact with other blockchains using an integrated IBC relayer, a key feature of the Cosmos SDK. +- **CometBFT Integration:** Built with the CometBFT consensus engine (formerly Tendermint), ensuring robust consensus mechanisms in your blockchain solutions. +- **Cross-Domain Applications:** IGNITE® is perfectly suited for developing a diverse array of use cases across various sectors. These include DeFi, NFTs, supply chain management, smart contracts (both EVM and WASM), and decentralized exchanges (DEXes). + +## Embracing the Cosmos Ecosystem + +IGNITE® CLI is your entry point into the vibrant Cosmos ecosystem, a hub of innovation where you can explore a range of applications, from wallets and explorers to smart contracts and DEXes, all powered by CometBFT and the Cosmos SDK. +This ecosystem is home to over [$100 billion worth of blockchain projects](https://cosmos.network/ecosystem/tokens/), showcasing the scalability and versatility of the technologies at play. + +## Projects using Tendermint and Cosmos SDK + +Many projects already showcase the Tendermint BFT consensus engine and the Cosmos SDK. Explore +the [Cosmos ecosystem](https://cosmos.network/ecosystem/apps) to discover a wide variety of apps, blockchains, wallets, +and explorers that are built in the Cosmos ecosystem. + +## Projects building with IGNITE® CLI + + diff --git a/docs/docs/01-welcome/02-install.md b/docs/docs/01-welcome/02-install.md new file mode 100644 index 0000000..d337ff9 --- /dev/null +++ b/docs/docs/01-welcome/02-install.md @@ -0,0 +1,119 @@ +--- +sidebar_position: 1 +description: Steps to install IGNITE® CLI on your local computer. +--- + +# Install IGNITE® CLI + +You can run [IGNITE® CLI](https://github.com/ignite/cli) in a web-based IDE or you can install IGNITE® CLI on your local computer. + +## Prerequisites + +Be sure you have met the prerequisites before you install and use IGNITE® CLI. + +### Operating systems + +IGNITE® CLI is supported for the following operating systems: + +- GNU/Linux +- macOS +- Windows Subsystem for Linux (WSL) + +### Go + +IGNITE® CLI is written in the Go programming language. To use IGNITE® CLI on a local system: + +- Install [Go](https://golang.org/doc/install) (**version 1.24.1** or higher) +- Ensure the Go environment variables are [set properly](https://golang.org/doc/gopath_code#GOPATH) on your system + +## Verify your IGNITE® CLI version + +To verify the version of IGNITE® CLI you have installed, run the following command: + +```bash +ignite version +``` + +## Installing IGNITE® CLI + +To install the latest version of IGNITE® use [HomeBrew](https://formulae.brew.sh/formula/ignite) on macOS and GNU/Linux: + +```sh +brew install ignite +``` + +### Install manually + +Alternatively, you can install the latest version of the `ignite` binary use the following command: + +```bash +curl https://get.ignite.com/cli! | bash +``` + +This command invokes `curl` to download the installation script and pipes the output to `bash` to perform the +installation. The `ignite` binary is installed in `/usr/local/bin`. + +IGNITE® CLI installation requires write permission to the `/usr/local/bin/` directory. If the installation fails because +you do not have write permission to `/usr/local/bin/`, run the following command: + +```bash +curl https://get.ignite.com/cli | bash +``` + +Then run this command to move the `ignite` executable to `/usr/local/bin/`: + +```bash +sudo mv ignite /usr/local/bin/ +``` + +On some machines, a permissions error occurs: + +```bash +mv: rename ./ignite to /usr/local/bin/ignite: Permission denied +============ +Error: mv failed +``` + +In this case, use sudo before `curl` and before `bash`: + +```bash +sudo curl https://get.ignite.com/cli | sudo bash +``` + +To learn more or customize the installation process, see the [installer docs](https://github.com/ignite/installer) on +GitHub. + +## Upgrading your IGNITE® CLI installation {#upgrade} + +Before you install a new version of IGNITE® CLI, remove all existing IGNITE® CLI installations. + +To remove the current IGNITE® CLI installation: + +1. On your terminal window, press `Ctrl+C` to stop the chain that you started with `ignite chain serve`. +2. Remove the IGNITE® CLI binary with `rm $(which ignite)`. + Depending on your user permissions, run the command with or without `sudo`. +3. Repeat this step until all `ignite` installations are removed from your system. + +After all existing IGNITE® CLI installations are removed, follow the [Installing IGNITE® CLI](#installing-ignite-cli) +instructions. + +For details on version features and changes, see +the [changelog.md](https://github.com/ignite/cli/blob/main/changelog.md) +in the repo. + +## Build from source + +To experiment with the source code, you can build from source: + +```bash +git clone https://github.com/ignite/cli --depth=1 +cd cli && make install +``` + +## Summary + +- Verify the prerequisites. +- To set up a local development environment, install IGNITE® CLI locally on your computer. +- Install IGNITE® CLI by fetching the binary using cURL or by building from source. +- The latest version is installed by default. You can install previous versions of the precompiled `ignite` binary. +- Stop the chain and remove existing versions before installing a new version. diff --git a/docs/docs/01-welcome/_category_.json b/docs/docs/01-welcome/_category_.json new file mode 100644 index 0000000..ac625fc --- /dev/null +++ b/docs/docs/01-welcome/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Welcome", + "link": null +} \ No newline at end of file diff --git a/docs/docs/02-guide/02-introduction.md b/docs/docs/02-guide/02-introduction.md new file mode 100644 index 0000000..c79cef6 --- /dev/null +++ b/docs/docs/02-guide/02-introduction.md @@ -0,0 +1,154 @@ +--- +sidebar_position: 2 +--- + +# Introduction + +In this guide, we will be using IGNITE® CLI to create a new blockchain. IGNITE® +CLI is a command line interface that allows users to quickly and easily create +blockchain networks. By using IGNITE® CLI, we can quickly create a new blockchain +without having to manually set up all the necessary components. + +Once we have created our blockchain with IGNITE® CLI, we will take a look at the +directory structure and files that were created. This will give us an +understanding of how the blockchain is organized and how the different +components of the blockchain interact with each other. + +By the end of this guide, you will have a basic understanding of how to use +IGNITE® CLI to create a new blockchain, and you will have a high-level +understanding of the directory structure and files that make up a blockchain. +This knowledge will be useful as you continue to explore the world of blockchain +development. + +If you are looking for more tutorials and hands-on experience, check out our tutorials website: + +[IGNITE® Tutorials](https://tutorials.ignite.com) + +## Creating a new blockchain + +To create a new blockchain project with IGNITE®, you will need to run the +following command: + +``` +ignite scaffold chain example +``` + +The `ignite scaffold chain` command will create a new blockchain in a new +directory `example`. + +The new blockchain is built using the Cosmos SDK framework and imports several +standard modules to provide a range of functionality. These modules include +`staking`, which enables a delegated Proof-of-Stake consensus mechanism, `bank` +for facilitating fungible token transfers between accounts, and `gov` for +on-chain governance. In addition to these modules, the blockchain also imports +other modules from the Cosmos SDK framework. + +The `example` directory contains the generated files and directories that make +up the structure of a Cosmos SDK blockchain. This directory includes files for +the chain's configuration, application logic, and tests, among others. It +provides a starting point for developers to quickly set up a new Cosmos SDK +blockchain and build their desired functionality on top of it. + +By default, IGNITE® creates a new empty custom module with the same name as the +blockchain being created (in this case, `example`) in the `x/` directory. This +module doesn't have any functionality by itself, but can serve as a starting +point for building out the features of your application. If you don't want to +create this module, you can use the `--no-module` flag to skip it. + +## Directory structure + +In order to understand what the IGNITE® CLI has generated for your project, you +can inspect the contents of the `example/` directory. + +The `app/` directory contains the files that connect the different parts of the +blockchain together. The most important file in this directory is `app.go`, +which includes the type definition of the blockchain and functions for creating +and initializing it. This file is responsible for wiring together the various +components of the blockchain and defining how they will interact with each +other. + +The `cmd/` directory contains the main package responsible for the command-line +interface (CLI) of the compiled binary. This package defines the commands that +can be run from the CLI and how they should be executed. It is an important part +of the blockchain project as it provides a way for developers and users to +interact with the blockchain and perform various tasks, such as querying the +blockchain state or sending transactions. + +The `docs/` directory is used for storing project documentation. By default, +this directory includes an OpenAPI specification file, which is a +machine-readable format for defining the API of a software project. The OpenAPI +specification can be used to automatically generate human-readable documentation +for the project, as well as provide a way for other tools and services to +interact with the API. The `docs/` directory can be used to store any additional +documentation that is relevant to the project. + +The `proto/` directory contains protocol buffer files, which are used to +describe the data structure of the blockchain. Protocol buffers are a language- +and platform-neutral mechanism for serializing structured data, and are often +used in the development of distributed systems, such as blockchain networks. The +protocol buffer files in the `proto/` directory define the data structures and +messages that are used by the blockchain, and are used to generate code for +various programming languages that can be used to interact with the blockchain. +In the context of the Cosmos SDK, protocol buffer files are used to define the +specific types of data that can be sent and received by the blockchain, as well +as the specific RPC endpoints that can be used to access the blockchain's +functionality. + +The `testutil/` directory contains helper functions that are used for testing. +These functions provide a convenient way to perform common tasks that are needed +when writing tests for the blockchain, such as creating test accounts, +generating transactions, and checking the state of the blockchain. By using the +helper functions in the `testutil/` directory, developers can write tests more +quickly and efficiently, and can ensure that their tests are comprehensive and +effective. + +The `x/` directory contains custom Cosmos SDK modules that have been added to +the blockchain. Standard Cosmos SDK modules are pre-built components that +provide common functionality for Cosmos SDK-based blockchains, such as support +for staking and governance. Custom modules, on the other hand, are modules that +have been developed specifically for the blockchain project and provide +project-specific functionality. + +The `config.yml` file is a configuration file that can be used to customize the +blockchain during development. This file includes settings that control various +aspects of the blockchain, such as the network's ID, account balances, and the +node parameters. + +The `.github` directory contains a GitHub Actions workflow that can be used to +automatically build and release a blockchain binary. GitHub Actions is a tool +that allows developers to automate their software development workflows, +including building, testing, and deploying their projects. The workflow in the +`.github` directory is used to automate the process of building the blockchain +binary and releasing it, which can save time and effort for developers. + +The `readme.md` file is a readme file that provides an overview of the +blockchain project. This file typically includes information such as the +project's name and purpose, as well as instructions on how to build and run the +blockchain. By reading the `readme.md` file, developers and users can quickly +understand the purpose and capabilities of the blockchain project and get +started using it. + +## Starting a blockchain node + +To start a blockchain node in development, you can run the following command: + +``` +ignite chain serve +``` + +The `ignite chain serve` command is used to start a blockchain node in +development mode. It first compiles and installs the binary using the +`ignite chain build` command, then initializes the blockchain's data directory +for a single validator using the `ignite chain init` command. After that, it +starts the node locally and enables automatic code reloading so that changes to +the code can be reflected in the running blockchain without having to restart +the node. This allows for faster development and testing of the blockchain. + +**Congratulations!** 🥳 You have successfully created a brand-new Cosmos blockchain +using the IGNITE® CLI. This blockchain uses the delegated proof of stake (DPoS) +consensus algorithm, and comes with a set of standard modules for token +transfers, governance, and inflation. Now that you have a basic understanding of +your Cosmos blockchain, it's time to start building custom functionality. In the +following tutorials, you will learn how to build custom modules and add new +features to your blockchain, allowing you to create a unique and powerful +decentralized application. diff --git a/docs/docs/02-guide/03-hello-world.md b/docs/docs/02-guide/03-hello-world.md new file mode 100644 index 0000000..41b4bea --- /dev/null +++ b/docs/docs/02-guide/03-hello-world.md @@ -0,0 +1,96 @@ +--- +description: Build your first blockchain and your first Cosmos SDK query. +title: Hello World +--- + +# "Hello world!" Blockchain Tutorial with IGNITE® CLI + +**Introduction** + +In this tutorial, you'll build a simple blockchain using IGNITE® CLI that responds to a custom query with `Hello %s!`, where `%s` is a name passed in the query. +This will enhance your understanding of creating custom queries in a Cosmos SDK blockchain. + +## Setup and Scaffold + +1. **Create a New Blockchain:** + + ```bash + ignite scaffold chain hello + ``` + +2. **Navigate to the Blockchain Directory:** + + ```bash + cd hello + ``` + +## Adding a Custom Query + +- **Scaffold the Query:** + +```bash +ignite scaffold query say-hello name --response name +``` + +This command generates code for a new query, `say-hello`, which accepts a name, an input, and returns it in the response. + +- **Understanding the Scaffolded Code:** + + - `proto/hello/hello/query.proto`: Defines the request and response structure. + - `x/hello/module/autocli.go`: Contains commands for the query, using [AutoCLI](../08-references/04-glossary.md#autocli). + - `x/hello/keeper/query_say_hello.go`: Houses the logic for the query response. + +## Customizing the Query Response + +In the Cosmos SDK, queries are requests for information from the blockchain, used to access data like the ledger's current state or transaction details. While the SDK offers several built-in query methods, developers can also craft custom queries for specific data retrieval or complex operations. + +- **Modify `query_say_hello.go`:** + +Update the `SayHello` function in `x/hello/keeper/query_say_hello.go` to return a personalized greeting query. + +```go title="x/hello/keeper/query_say_hello.go" +package keeper + +import ( + "context" + "fmt" + + "hello/x/hello/types" + + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" +) + +func (q queryServer) SayHello(ctx context.Context, req *types.QuerySayHelloRequest) (*types.QuerySayHelloResponse, error) { + if req == nil { + return nil, status.Error(codes.InvalidArgument, "invalid request") + } + + // TODO: Process the query + + // Custom Response + return &types.QuerySayHelloResponse{Name: fmt.Sprintf("Hello %s!", req.Name)}, nil +} +``` + +## Running the Blockchain + +1. **Start the Blockchain:** + +```bash +ignite chain serve +``` + +2. **Test the Query:** + +Use the command-line interface to submit a query. + +``` +hellod q hello say-hello world +``` + +Expect a response: `Hello world!` + +## Conclusion + +Congratulations! 🎉 You've successfully created a blockchain module with a custom query using IGNITE® CLI. Through this tutorial, you've learned how to scaffold a chain, add a custom query, and modify the logic for personalized responses. This experience illustrates the power of IGNITE® CLI in streamlining blockchain development and the importance of understanding the underlying code for customization. diff --git a/docs/docs/02-guide/04-ibc.md b/docs/docs/02-guide/04-ibc.md new file mode 100644 index 0000000..c12166b --- /dev/null +++ b/docs/docs/02-guide/04-ibc.md @@ -0,0 +1,669 @@ +--- +sidebar_position: 7 +description: Build an understanding of how to create and send packets across blockchains and navigate between blockchains. +title: "Inter-Blockchain Communication: Basics" +--- + +# Inter-Blockchain Communication: Basics + +The Inter-Blockchain Communication protocol (IBC) is an important part of the +Cosmos SDK ecosystem. The Hello World tutorial is a time-honored tradition in +computer programming. This tutorial builds an understanding of how to create and +send packets across blockchain. This foundational knowledge helps you navigate +between blockchains with the Cosmos SDK. + +**You will learn how to** + +- Use IBC to create and send packets between blockchains. +- Navigate between blockchains using the Cosmos SDK and the IGNITE® CLI Relayer. +- Create a basic blog post and save the post on another blockchain. + +## What is IBC? + +The Inter-Blockchain Communication protocol (IBC) allows blockchains to talk to +each other. IBC handles transport across different sovereign blockchains. This +end-to-end, connection-oriented, stateful protocol provides reliable, ordered, +and authenticated communication between heterogeneous blockchains. + +The [IBC protocol in the Cosmos +SDK](https://ibc.cosmos.network/main/ibc/overview) is the standard for the +interaction between two blockchains. The IBCmodule interface defines how packets +and messages are constructed to be interpreted by the sending and the receiving +blockchain. + +The IBC relayer lets you connect between sets of IBC-enabled chains. This +tutorial teaches you how to create two blockchains and then start and use the +relayer with IGNITE® CLI to connect two blockchains. + +This tutorial covers essentials like modules, IBC packets, relayer, and the +lifecycle of packets routed through IBC. + +## Create a blockchain + +Create a blockchain app with a blog module to write posts on other blockchains +that contain the Hello World message. For this tutorial, you can write posts for +the Cosmos SDK universe that contain Hello Mars, Hello Cosmos, and Hello Earth +messages. + +For this simple example, create an app that contains a blog module that has a +post transaction with title and text. + +After you define the logic, run two blockchains that have this module installed. + +- The chains can send posts between each other using IBC. + +- On the sending chain, save the `acknowledged` and `timed out` posts. + +After the transaction is acknowledged by the receiving chain, you know that the +post is saved on both blockchains. + +- The sending chain has the additional data `postID`. + +- Sent posts that are acknowledged and timed out contain the title and the + target chain of the post. These identifiers +- are visible on the parameter `chain`. The following chart shows the lifecycle + of a packet that travels through IBC. + +![The Lifecycle of an IBC packet](./images/packet_sendpost.png) + +## Build your blockchain app + +Use IGNITE® CLI to scaffold the blockchain app and the blog module. + +### Build a new blockchain + +To scaffold a new blockchain named `planet`: + +```bash +ignite scaffold chain planet --no-module +cd planet +``` + +A new directory named `planet` is created in your home directory. The `planet` +directory contains a working blockchain app. + +### Scaffold the blog module inside your blockchain + +Next, use IGNITE® CLI to scaffold a blog module with IBC capabilities. The blog +module contains the logic for creating blog posts and routing them through IBC +to the second blockchain. + +To scaffold a module named `blog`: + +```bash +ignite scaffold module blog --ibc +``` + +A new directory with the code for an IBC module is created in `planet/x/blog`. +Modules scaffolded with the `--ibc` flag include all the logic for the +scaffolded IBC module. + +### Generate CRUD actions for types + +Next, create the CRUD actions for the blog module types. + +Use the `ignite scaffold list` command to scaffold the boilerplate code for the +create, read, update, and delete (CRUD) actions. + +These `ignite scaffold list` commands create CRUD code for the following +transactions: + +- Creating blog posts + +```bash +ignite scaffold list post title content creator --no-message --module blog +``` + +- Processing acknowledgments for sent posts + +```bash +ignite scaffold list sentPost postID:uint title chain creator --no-message --module blog +``` + +- Managing post timeouts + +```bash +ignite scaffold list timeoutPost title chain creator --no-message --module blog +``` + +The scaffolded code includes proto files for defining data structures, messages, +messages handlers, keepers for modifying the state, and CLI commands. + +### IGNITE® CLI Scaffold List Command Overview + +``` +ignite scaffold list [typeName] [field1] [field2] ... [flags] +``` + +The first argument of the `ignite scaffold list [typeName]` command specifies +the name of the type being created. For the blog app, you created `post`, +`sentPost`, and `timeoutPost` types. + +The next arguments define the fields that are associated with the type. For the +blog app, you created `title`, `content`, `postID`, and `chain` fields. + +The `--module` flag defines which module the new transaction type is added to. +This optional flag lets you manage multiple modules within your IGNITE® CLI app. +When the flag is not present, the type is scaffolded in the module that matches +the name of the repo. + +When a new type is scaffolded, the default behavior is to scaffold messages that +can be sent by users for CRUD operations. The `--no-message` flag disables this +feature. Disable the messages option for the app since you want the posts to be +created upon reception of IBC packets and not directly created from a user's +messages. + +### Scaffold a sendable and interpretable IBC packet + +You must generate code for a packet that contains the title and the content of +the blog post. + +The `ignite packet` command creates the logic for an IBC packet that can be sent +to another blockchain. + +- The `title` and `content` are stored on the target chain. + +- The `postID` is acknowledged on the sending chain. + +To scaffold a sendable and interpretable IBC packet: + +```bash +ignite scaffold packet ibcPost title content --ack postID:uint --module blog +``` + +Notice the fields in the `ibcPost` packet match the fields in the `post` type +that you created earlier. + +- The `--ack` flag defines which identifier is returned to the sending + blockchain. + +- The `--module` flag specifies to create the packet in a particular IBC module. + +The `ignite packet` command also scaffolds the CLI command that is capable of +sending an IBC packet: + +```bash +planetd tx blog send-ibcPost [portID] [channelID] [title] [content] +``` + +## Modify the source code + +After you create the types and transactions, you must manually insert the logic +to manage updates in the database. Modify the source code to save the data as +specified earlier in this tutorial. + +### Add creator to the blog post packet + +Start with the proto file that defines the structure of the IBC packet. + +To identify the creator of the post in the receiving blockchain, add the +`creator` field inside the packet. This field was not specified directly in the +command because it would automatically become a parameter in the `SendIbcPost` +CLI command. + +```protobuf title="proto/planet/blog/v1/packet.proto" +message IbcPostPacketData { + string title = 1; + string content = 2; + // highlight-next-line + string creator = 3; +} +``` + +To make sure the receiving chain has content on the creator of a blog post, add +the `msg.Creator` value to the IBC `packet`. + +- The content of the `sender` of the message is automatically included in + `SendIbcPost` message. +- The sender is verified as the signer of the message, so you can add the + `msg.Sender` as the creator to the new packet +- before it is sent over IBC. + +```go title="x/blog/keeper/msg_server_ibc_post.go" +package keeper + +func (k msgServer) SendIbcPost(goCtx context.Context, msg *types.MsgSendIbcPost) (*types.MsgSendIbcPostResponse, error) { + // validate incoming message + if _, err := k.addressCodec.StringToBytes(msg.Creator); err != nil { + return nil, errorsmod.Wrap(sdkerrors.ErrInvalidAddress, fmt.Sprintf("invalid address: %s", err)) + } + + if msg.Port == "" { + return nil, errorsmod.Wrap(sdkerrors.ErrInvalidRequest, "invalid packet port") + } + + if msg.ChannelID == "" { + return nil, errorsmod.Wrap(sdkerrors.ErrInvalidRequest, "invalid packet channel") + } + + if msg.TimeoutTimestamp == 0 { + return nil, errorsmod.Wrap(sdkerrors.ErrInvalidRequest, "invalid packet timeout") + } + + // TODO: logic before transmitting the packet + + // Construct the packet + var packet types.IbcPostPacketData + + packet.Title = msg.Title + packet.Content = msg.Content + // highlight-next-line + packet.Creator = msg.Creator + + // Transmit the packet + ctx := sdk.UnwrapSDKContext(goCtx) + _, err := k.TransmitIbcPostPacket( + ctx, + packet, + msg.Port, + msg.ChannelID, + clienttypes.ZeroHeight(), + msg.TimeoutTimestamp, + ) + if err != nil { + return nil, err + } + + return &types.MsgSendIbcPostResponse{}, nil +} +``` + +### Receive the post + +The methods for primary transaction logic are in the `x/blog/keeper/ibc_post.go` +file. Use these methods to manage IBC packets: + +- `TransmitIbcPostPacket` is called manually to send the packet over IBC. This + method also defines the logic before the packet is sent over IBC to another + blockchain app. +- `OnRecvIbcPostPacket` hook is automatically called when a packet is received + on the chain. This method defines the packet reception logic. +- `OnAcknowledgementIbcPostPacket` hook is called when a sent packet is + acknowledged on the source chain. This method defines the logic when the + packet has been received. +- `OnTimeoutIbcPostPacket` hook is called when a sent packet times out. This + method defines the logic when the packet is not received on the target chain + +You must modify the source code to add the logic inside those functions so that +the data tables are modified accordingly. + +On reception of the post message, create a new post with the title and the +content on the receiving chain. + +To identify the blockchain app that a message is originating from and who +created the message, use an identifier in the following format: + +`--` + +Finally, the IGNITE® CLI-generated AppendPost function returns the ID of the new +appended post. You can return this value to the source chain through +acknowledgment. + +Append the type instance as `PostId` on receiving the packet: + +- The context `ctx` is an [immutable data + structure](https://docs.cosmos.network/main/core/context#go-context-package) + that has header data from the transaction. See [how the context is + initiated](https://github.com/cosmos/cosmos-sdk/blob/main/types/context.go#L71) +- The identifier format that you defined earlier +- The `title` is the Title of the blog post +- The `content` is the Content of the blog post + +Then modify the `OnRecvIbcPostPacket` keeper function with the following code: + +```go title="x/blog/keeper/ibc_post.go" +package keeper + +func (k Keeper) OnRecvIbcPostPacket(ctx sdk.Context, packet channeltypes.Packet, data types.IbcPostPacketData) (packetAck types.IbcPostPacketAck, err error) { + packetAck.PostId, err = k.PostSeq.Next(ctx) + if err != nil { + return packetAck, err + } + return packetAck, k.Post.Set(ctx, packetAck.PostId, types.Post{Title: data.Title, Content: data.Content}) +} +``` + +### Receive the post acknowledgement + +On the sending blockchain, store a `sentPost` so you know that the post has been +received on the target chain. + +Store the title and the target to identify the post. + +When a packet is scaffolded, the default type for the received acknowledgment +data is a type that identifies if the packet treatment has failed. The +`Acknowledgement_Error` type is set if `OnRecvIbcPostPacket` returns an error +from the packet. + +```go title="x/blog/keeper/ibc_post.go" +package keeper + +import transfertypes "github.com/cosmos/ibc-go/v10/modules/apps/transfer/types" + +func (k Keeper) OnAcknowledgementIbcPostPacket(ctx sdk.Context, packet channeltypes.Packet, data types.IbcPostPacketData, ack channeltypes.Acknowledgement) error { + switch dispatchedAck := ack.Response.(type) { + case *channeltypes.Acknowledgement_Error: + // We will not treat acknowledgment error in this tutorial + return nil + case *channeltypes.Acknowledgement_Result: + // Decode the packet acknowledgment + var packetAck types.IbcPostPacketAck + if err := k.cdc.UnmarshalJSON(dispatchedAck.Result, &packetAck); err != nil { + // The counter-party module doesn't implement the correct acknowledgment format + return errors.New("cannot unmarshal acknowledgment") + } + + seq, err := k.SentPostSeq.Next(ctx) + if err != nil { + return err + } + + return k.SentPost.Set(ctx, seq, + types.SentPost{ + PostId: packetAck.PostId, + Title: data.Title, + Chain: packet.DestinationPort + "-" + packet.DestinationChannel, + }, + ) + default: + return errors.New("the counter-party module does not implement the correct acknowledgment format") + } +} +``` + +### Store information about the timed-out packet + +Store posts that have not been received by target chains in `timeoutPost` +posts. This logic follows the same format as `sentPost`. + +```go title="x/blog/keeper/ibc_post.go" +func (k Keeper) OnTimeoutIbcPostPacket(ctx sdk.Context, packet channeltypes.Packet, data types.IbcPostPacketData) error { + seq, err := k.TimeoutPostSeq.Next(ctx) + if err != nil { + return err + } + + return k.TimeoutPost.Set(ctx, seq, + types.TimeoutPost{ + Title: data.Title, + Chain: packet.DestinationPort + "-" + packet.DestinationChannel, + }, + ) +} +``` + +This last step completes the basic `blog` module setup. The blockchain is now +ready! + +## Use the IBC modules + +You can now spin up the blockchain and send a blog post from one blockchain app +to the other. Multiple terminal windows are required to complete these next +steps. + +### Test the IBC modules + +To test the IBC module, start two blockchain networks on the same machine. Both +blockchains use the same source code. Each blockchain has a unique chain ID. + +One blockchain is named `earth` and the other blockchain is named `mars`. + +The `earth.yml` and `mars.yml` files are required in the project directory: + +```yaml title="earth.yml" +version: 1 +validation: sovereign +build: + proto: + path: proto +accounts: +- name: alice + coins: + - 1000token + - 100000000stake +- name: bob + coins: + - 500token + - 100000000stake +faucet: + name: bob + coins: + - 5token + - 100000stake + host: 0.0.0.0:4500 +genesis: + chain_id: earth +validators: +- name: alice + bonded: 100000000stake + home: $HOME/.earth +``` + +```yaml title="mars.yml" +version: 1 +validation: sovereign +build: + proto: + path: proto +accounts: +- name: alice + coins: + - 1000token + - 1000000000stake +- name: bob + coins: + - 500token + - 100000000stake +faucet: + name: bob + coins: + - 5token + - 100000stake + host: :4501 +genesis: + chain_id: mars +validators: +- name: alice + bonded: 100000000stake + app: + api: + address: :1318 + grpc: + address: :9092 + grpc-web: + address: :9093 + config: + p2p: + laddr: :26658 + rpc: + laddr: :26659 + pprof_laddr: :6061 + home: $HOME/.mars +``` + +Open a terminal window and run the following command to start the `earth` +blockchain: + +```bash +ignite chain serve -c earth.yml +``` + +Open a different terminal window and run the following command to start the +`mars` blockchain: + +```bash +ignite chain serve -c mars.yml +``` +If existing relayer configurations do not exist, the command returns `no matches +found` and no action is taken. + +### Configure and start the relayer + +First, add the Hermes relayer app. + +```bash +ignite app install -g github.com/ignite/apps/hermes +``` + +If you previously used the relayer, follow these steps to remove exiting relayer +and IGNITE® CLI configurations: + +- Stop your blockchains and delete previous configuration files: + +```bash +ignite relayer hermes clear binaries +ignite relayer hermes clear configs +``` + +and after configure the relayer. + +```bash +ignite relayer hermes configure \ +"earth" "http://localhost:26657" "http://localhost:9090" \ +"mars" "http://localhost:26659" "http://localhost:9092" \ +--chain-a-faucet "http://0.0.0.0:4500" \ +--chain-b-faucet "http://0.0.0.0:4501" \ +--chain-a-port-id "blog" \ +--chain-b-port-id "blog" \ +--channel-version "blog-1" +``` + +When prompted, press Enter to accept the default values for `Chain A Account` and +`Chain B Account`. + +The output looks like: + +``` +Hermes config created at /Users/danilopantani/.ignite/relayer/hermes/earth_mars +? Chain earth doesn't have a default Hermes key. Type your mnemonic to continue or type enter to generate a new one: (optional) +New mnemonic generated: danger plate flavor twist chimney myself sketch assist copy expand core tattoo ignore ensure quote mean forum carbon enroll gadget immense grab early maze +Chain earth key created +Chain earth relayer wallet: cosmos1jk6wmyl880j6t9vw6umy9v8ex0yhrfwgx0vv2d +New balance from faucet: 100000stake,5token +? Chain mars doesn't have a default Hermes key. Type your mnemonic to continue or type enter to generate a new one: (optional) +New mnemonic generated: invest box icon session lens demise purse link boss dwarf give minimum jazz eye vocal seven sunset coach express want ask version anger ranch +Chain mars key created +Chain mars relayer wallet: cosmos1x9kt37c0sutanaqwy9gxpvq5990yt0qnpqntmp +New balance from faucet: 100000stake,5token +Client '07-tendermint-0' created (earth -> mars) +Client 07-tendermint-0' created (mars -> earth) +Connection 'earth (connection-0) <-> mars (connection-0)' created +Channel 'earth (channel-0) <-> mars (channel-0)' created +``` + +Now start the relayer: + +```bash +ignite relayer hermes start "earth" "mars" +``` + +### Send packets + +You can now send packets and verify the received posts: + +```bash +planetd tx blog send-ibc-post blog channel-0 "Hello" "Hello Mars, I'm Alice from Earth" --from alice --chain-id earth --home ~/.earth +``` + +To verify that the post has been received on Mars: + +```bash +planetd q blog list-post --node tcp://localhost:26659 +``` + +The packet has been received: + +```yaml +Post: + - content: Hello Mars, I'm Alice from Earth + creator: blog-channel-0-cosmos1aew8dk9cs3uzzgeldatgzvm5ca2k4m98xhy20x + id: "0" + title: Hello +pagination: + next_key: null + total: "1" +``` + +To check if the packet has been acknowledged on Earth: + +```bash +planetd q blog list-sent-post +``` + +Output: + +```yaml +SentPost: + - chain: blog-channel-0 + creator: cosmos1aew8dk9cs3uzzgeldatgzvm5ca2k4m98xhy20x + id: "0" + postID: "0" + title: Hello +pagination: + next_key: null + total: "1" +``` + +To test timeout, set the timeout time of a packet to 1 nanosecond, verify that +the packet is timed out, and check the timed-out posts: + +```bash +planetd tx blog send-ibc-post blog channel-0 "Sorry" "Sorry Mars, you will never see this post" --from alice --chain-id earth --home ~/.earth --packet-timeout-timestamp 1 +``` + +Check the timed-out posts: + +```bash +planetd q blog list-timeout-post +``` + +Results: + +```yaml +TimeoutPost: + - chain: blog-channel-0 + creator: cosmos1fhpcsxn0g8uask73xpcgwxlfxtuunn3ey5ptjv + id: "0" + title: Sorry +pagination: + next_key: null + total: "2" +``` + +You can also send a post from Mars: + +```bash +planetd tx blog send-ibc-post blog channel-0 "Hello" "Hello Earth, I'm Alice from Mars" --from alice --chain-id mars --home ~/.mars --node tcp://localhost:26659 +``` + +List post on Earth: + +```bash +planetd q blog list-post +``` + +Results: + +```yaml +Post: + - content: Hello Earth, I'm Alice from Mars + creator: blog-channel-0-cosmos1xtpx43l826348s59au24p22pxg6q248638q2tf + id: "0" + title: Hello +pagination: + next_key: null + total: "1" +``` + +## Congratulations 🎉 + +By completing this tutorial, you've learned to use the Inter-Blockchain +Communication protocol (IBC). + +Here's what you accomplished in this tutorial: + +- Built two Hello blockchain apps as IBC modules +- Modified the generated code to add CRUD action logic +- Configured and used the IGNITE® CLI relayer to connect two blockchains with + each other +- Transferred IBC packets from one blockchain to another diff --git a/docs/docs/02-guide/05-debug.md b/docs/docs/02-guide/05-debug.md new file mode 100644 index 0000000..545086f --- /dev/null +++ b/docs/docs/02-guide/05-debug.md @@ -0,0 +1,208 @@ +--- +description: Debugging your Cosmos SDK blockchain +--- + +# Debugging a chain + +IGNITE® chain debug command can help you find issues during development. It uses +[Delve](https://github.com/go-delve/delve) debugger which enables you to +interact with your blockchain app by controlling the execution of the process, +evaluating variables, and providing information of thread / goroutine state, CPU +register state and more. + +## Debug Command + +The debug command requires that the blockchain app binary is build with +debugging support by removing optimizations and inlining. A debug binary is +built by default by the `ignite chain serve` command or can optionally be +created using the `--debug` flag when running `ignite chain init` or `ignite +chain build` sub-commands. + +To start a debugging session in the terminal run: + +``` +ignite chain debug +``` + +The command runs your blockchain app in the background, attaches to it and +launches a terminal debugger shell: + +``` +Type 'help' for list of commands. +(dlv) +``` + +At this point the blockchain app blocks execution, so you can set one or more +breakpoints before continuing execution. + +Use the +[break](https://github.com/go-delve/delve/blob/master/Documentation/cli/README.md#break) +(alias `b`) command to set any number of breakpoints using, for example the +`:` notation: + +``` +(dlv) break x/hello/keeper/query_say_hello.go:13 +``` + +This command adds a breakpoint to the `x/hello/keeper/query_say_hello.go` +file at line 14. + +Once all breakpoints are set resume blockchain execution using the +[continue](https://github.com/go-delve/delve/blob/master/Documentation/cli/README.md#continue) +(alias `c`) command: + +``` +(dlv) continue +``` + +The debugger will launch the shell and stop blockchain execution again when a +breakpoint is triggered. + +Within the debugger shell use the `quit` (alias `q`) or `exit` commands to stop +the blockchain app and exit the debugger. + +## Debug Server + +A debug server can optionally be started in cases where the default terminal +client is not desirable. When the server starts it first runs the blockchain +app, attaches to it and finally waits for a client connection. The default +server address is *tcp://127.0.0.1:30500* and it accepts both JSON-RPC or DAP +client connections. + +To start a debug server use the following flag: + +``` +ignite chain debug --server +``` + +To start a debug server with a custom address use the following flags: + +``` +ignite chain debug --server --server-address 127.0.0.1:30500 +``` + +The debug server stops automatically when the client connection is closed. + +## Debugging Clients + +### Gdlv: Multiplatform Delve UI + +[Gdlv](https://github.com/aarzilli/gdlv) is a graphical frontend to Delve for +Linux, Windows and macOS. + +Using it as debugging client is straightforward as it doesn't require any +configuration. Once the debug server is running and listening for client +requests connect to it by running: + +``` +gdlv connect 127.0.0.1:30500 +``` + +Setting breakpoints and continuing execution is done in the same way as Delve, +by using the `break` and `continue` commands. + +### Visual Studio Code + +Using [Visual Studio Code](https://code.visualstudio.com/) as debugging client +requires an initial configuration to allow it to connect to the debug server. + +Make sure that the [Go](https://code.visualstudio.com/docs/languages/go) +extension is installed. + +VS Code debugging is configured using the `launch.json` file which is usually +located inside the `.vscode` folder in your workspace. + +You can use the following launch configuration to set up VS Code as debugging +client: + +```json title=launch.json +{ + "version": "0.2.0", + "configurations": [ + { + "name": "Connect to Debug Server", + "type": "go", + "request": "attach", + "mode": "remote", + "remotePath": "${workspaceFolder}", + "port": 30500, + "host": "127.0.0.1" + } + ] +} +``` + +Alternatively it's possible to create a custom `launch.json` file from the "Run +and Debug" panel. When prompted choose the Go debugger option labeled "Go: +Connect to Server" and enter the debug host address and then the port number. + +## Example: Debugging a Blockchain App + +In this short example we will be using IGNITE® CLI to create a new blockchain and +a query to be able to trigger a debugging breakpoint when the query is called. + +Create a new blockchain: + +``` +ignite scaffold chain hello +``` + +Scaffold a new query in the `hello` directory: + +``` +ignite scaffold query say-hello name --response name +``` + +The next step initializes the blockchain's data directory and compiles a debug +binary: + +``` +ignite chain init --debug +``` + +Once the initialization finishes launch the debugger shell: + +``` +ignite chain debug +``` + +Within the debugger shell create a breakpoint that will be triggered when the +`SayHello` function is called and then continue execution: + +``` +(dlv) break x/hello/keeper/query_say_hello.go:12 +(dlv) continue +``` + +From a different terminal use the `hellod` binary to call the query: + +``` +hellod query hello say-hello bob +``` + +A debugger shell will be launched when the breakpoint is triggered: + +``` + 7: "google.golang.org/grpc/codes" + 8: "google.golang.org/grpc/status" + 9: "hello/x/hello/types" + 10: ) + 11: +=> 12: func (k Keeper) SayHello(ctx context.Context, req *types.QuerySayHelloRequest) (*types.QuerySayHelloResponse, error) { + 13: if req == nil { + 14: return nil, status.Error(codes.InvalidArgument, "invalid request") + 15: } + 16: +``` + +From then on you can use Delve commands like `next` (alias `n`) or `print` +(alias `p`) to control execution and print values. For example, to print the +*name* argument value use the `print` command followed by "req.Name": + +``` +(dlv) print req.Name +"bob" +``` + +Finally, use `quit` (alias `q`) to stop the blockchain app and finish the +debugging session. diff --git a/docs/docs/02-guide/06-docker.md b/docs/docs/02-guide/06-docker.md new file mode 100644 index 0000000..0f47d37 --- /dev/null +++ b/docs/docs/02-guide/06-docker.md @@ -0,0 +1,142 @@ +--- +description: Run IGNITE® CLI using a Docker container. +--- + +# Running inside a Docker container + +You can run IGNITE® CLI inside a Docker container without installing the IGNITE® +CLI binary directly on your machine. + +Running IGNITE® CLI in Docker can be useful for various reasons; isolating your +test environment, running IGNITE® CLI on an unsupported operating system, or +experimenting with a different version of IGNITE® CLI without installing it. + +Docker containers are like virtual machines because they provide an isolated +environment to programs that runs inside them. In this case, you can run IGNITE® +CLI in an isolated environment. + +Experimentation and file system impact is limited to the Docker instance. The +host machine is not impacted by changes to the container. + +## Prerequisites + +Docker must be installed. See [Get Started with +Docker](https://www.docker.com/get-started). + +## IGNITE® CLI Commands in Docker + +After you scaffold and start a chain in your Docker container, all IGNITE® CLI +commands are available. Just type the commands after `docker run -ti +ignite/cli`. For example: + +```bash +docker run -ti ignitehq/cli -h +docker run -ti ignitehq/cli scaffold chain planet +docker run -ti ignitehq/cli chain serve +``` + +## Scaffolding a chain + +When Docker is installed, you can build a blockchain with a single command. + +IGNITE® CLI, and the chains you serve with IGNITE® CLI, persist some files. When +using the CLI binary directly, those files are located in `$HOME/.ignite` and +`$HOME/.cache`, but in the context of Docker it's better to use a directory +different from `$HOME`, so we use `$HOME/sdh`. This folder should be created +manually prior to the docker commands below, or else Docker creates it with the +root user. + +```bash +mkdir $HOME/sdh +``` + +To scaffold a blockchain `planet` in the `/apps` directory in the container, run +this command in a terminal window: + +```bash +docker run -ti -v $HOME/sdh:/home/tendermint -v $PWD:/apps ignitehq/cli:0.25.2 scaffold chain planet +``` + +Be patient, this command takes a minute or two to run because it does everything +for you: + +- Creates a container that runs from the `ignitehq/cli:0.25.2` image. +- Executes the IGNITE® CLI binary inside the image. +- `-v $HOME/sdh:/home/tendermint` maps the `$HOME/sdh` directory in your local + computer (the host machine) to the home directory `/home/tendermint` inside + the container. +- `-v $PWD:/apps` maps the current directory in the terminal window on the host + machine to the `/apps` directory in the container. You can optionally specify + an absolute path instead of `$PWD`. + + Using `-w` and `-v` together provides file persistence on the host machine. + The application source code on the Docker container is mirrored to the file + system of the host machine. + + **Note:** The directory name for the `-w` and `-v` flags can be a name other + than `/app`, but the same directory must be specified for both flags. If you + omit `-w` and `-v`, the changes are made in the container only and are lost + when that container is shut down. + +## Starting a blockchain + +To start the blockchain node in the Docker container you just created, run this +command: + +```bash +docker run -ti -v $HOME/sdh:/home/tendermint -v $PWD:/apps -p 1317:1317 -p 26657:26657 ignitehq/cli:0.25.2 chain serve -p planet +``` + +This command does the following: + +- `-v $HOME/sdh:/home/tendermint` maps the `$HOME/sdh` directory in your local + computer (the host machine) to the home directory `/home/tendermint` inside + the container. +- `-v $PWD:/apps` persists the scaffolded app in the container to the host + machine at current working directory. +- `serve -p planet` specifies to use the `planet` directory that contains the + source code of the blockchain. +- `-p 1317:1317` maps the API server port (cosmos-sdk) to the host machine to + forward port 1317 listening inside the container to port 1317 on the host + machine. +- `-p 26657:26657` maps RPC server port 26657 (tendermint) on the host machine + to port 26657 in Docker. +- After the blockchain is started, open `http://localhost:26657` to see the + Tendermint API. +- The `-v` flag specifies for the container to access the application's source + code from the host machine, so it can build and run it. + +## Versioning + +You can specify which version of IGNITE® CLI to install and run in your Docker +container. + +### Latest version + +- By default, `ignite/cli` resolves to `ignite/cli:latest`. +- The `latest` image tag is always the latest stable [IGNITE® CLI + release](https://github.com/ignite/cli/releases). + +For example, if latest release is +[v0.25.2](https://github.com/ignite/cli/releases/tag/v0.25.2), the `latest` tag +points to the `0.25.2` tag. + +### Specific version + +You can specify to use a specific version of IGNITE® CLI. All available tags are +in the [ignite/cli +image](https://hub.docker.com/r/ignitehq/cli/tags?page=1&ordering=last_updated) on +Docker Hub. + +For example: + +- Use `ignitehq/cli:0.25.2` (without the `v` prefix) to use version `0.25.2`. +- Use `ignitehq/cli` to use the latest version. +- Use `ignitehq/cli:main` to use the `main` branch, so you can experiment with + the upcoming version. + +To get the latest image, run `docker pull`. + +```bash +docker pull ignitehq/cli:main +``` diff --git a/docs/docs/02-guide/07-simapp.md b/docs/docs/02-guide/07-simapp.md new file mode 100644 index 0000000..9d87f97 --- /dev/null +++ b/docs/docs/02-guide/07-simapp.md @@ -0,0 +1,103 @@ +--- +sidebar_position: 10 +description: Test different scenarios for your chain. +--- + +# Chain simulation + +The IGNITE® CLI chain simulator can help you to run your chain based in +randomized inputs for you can make fuzz testing and also benchmark test for your +chain, simulating the messages, blocks, and accounts. You can scaffold a +template to perform simulation testing in each module along with a boilerplate +simulation methods for each scaffolded message. + +## Module simulation + +Every new module that is scaffolded with IGNITE® CLI implements the Cosmos SDK +[Module +Simulation](https://docs.cosmos.network/main/building-modules/simulator). + +- Each new message creates a file with the simulation methods required for the + tests. +- Scaffolding a `CRUD` type like a `list` or `map` creates a simulation file + with `create`, `update`, and `delete` simulation methods in the + `x//simulation` folder and registers these methods in + `x//module_simulation.go`. +- Scaffolding a single message creates an empty simulation method to be + implemented by the user. + +We recommend that you maintain the simulation methods for each new modification +into the message keeper methods. + +Every simulation is weighted because the sender of the operation is assigned +randomly. The weight defines how much the simulation calls the message. + +For better randomizations, you can define a random seed. The simulation with the +same random seed is deterministic with the same output. + +## Scaffold a simulation + +To create a new chain: + +``` +ignite scaffold chain mars +``` + +Review the empty `x/mars/simulation` folder and the +`x/mars/module_simulation.go` file to see that a simulation is not registered. + +Now, scaffold a new message: + +``` +ignite scaffold list user address balance:uint state +``` + +A new file `x/mars/simulation/user.go` is created and is registered with the +weight in the `x/mars/module_simulation.go` file. + +Be sure to define the proper simulation weight with a minimum weight of 0 and a +maximum weight of 100. + +For this example, change the `defaultWeightMsgDeleteUser` to 30 and the +`defaultWeightMsgUpdateUser` to 50. + +Run the `BenchmarkSimulation` method into `app/simulation_test.go` to run +simulation tests for all modules: + +``` +ignite chain simulate +``` + +You can also define flags that are provided by the simulation. Flags are defined +by the method `simapp.GetSimulatorFlags()`: + +``` +ignite chain simulate -v --numBlocks 200 --blockSize 50 --seed 33 +``` + +Wait for the entire simulation to finish and check the result of the messages. + +The default `go test` command works to run the simulation: + +``` +go test -v -benchmem -run=^$ -bench ^BenchmarkSimulation -cpuprofile cpu.out ./app -Commit=true +``` + +### Skip message + +Use logic to avoid sending a message without returning an error. Return only +`simtypes.NoOpMsg(...)` into the simulation message handler. + +## Params + +Scaffolding a module with params automatically adds the module in the +`module_simulaton.go` file: + +``` +ignite s module earth --params channel:string,minLaunch:uint,maxLaunch:int +``` + +After the parameters are scaffolded, change the +`x//module_simulation.go` file to set the random parameters into the +`RandomizedParams` method. The simulation will change the params randomly +according to call the function. diff --git a/docs/docs/02-guide/08-state.md b/docs/docs/02-guide/08-state.md new file mode 100644 index 0000000..b57cfd2 --- /dev/null +++ b/docs/docs/02-guide/08-state.md @@ -0,0 +1,230 @@ +--- +description: Learn how Cosmos SDK modules manage state with collections +title: State Management +--- + +# State Management in Modules + +In blockchain applications, state refers to the current data stored on the blockchain at a specific point in time. Handling state is usually the core of any blockchain application. The Cosmos SDK provides powerful tools for state management, with the `collections` package being the recommended approach for modern applications. + +## Collections Package + +IGNITE® scaffolds using the [`collections`](https://pkg.go.dev/cosmossdk.io/collections) package for module code. This package provides a type-safe and efficient way to set and query values from the module store. + +### Key Features of Collections + +- **Type Safety**: Collections are type-safe, reducing the risk of runtime errors. +- **Simplified API**: Easy-to-use methods for common operations like Get, Set, and Has. +- **Performance**: Optimized for performance with minimal overhead. +- **Integration**: Seamlessly integrates with the Cosmos SDK ecosystem. + +## Understand keeper field + +IGNITE® creates all the necessary boilerplate for collections in the `x//keeper/keeper.go` file. The `Keeper` struct contains fields for each collection you define in your module. Each field is an instance of a collection type, such as `collections.Map`, `collections.Item`, or `collections.List`. + +```go +type Keeper struct { + // ... + + Params collections.Item[Params] + Counters collections.Map[string, uint64] + Profiles collections.Map[sdk.AccAddress, Profile] +} +``` + +## Common State Operations + +### Reading State + +To read values from state, use the `Get` method: + +```go +// getting a single item +params, err := k.Params.Get(ctx) +if err != nil { + // handle error + // collections.ErrNotFound is returned when an item doesn't exist +} + +// getting a map entry +counter, err := k.Counters.Get(ctx, "my-counter") +if err != nil { + // handle error +} +``` + +### Writing State + +To write values to state, use the `Set` method: + +```go +// setting a single item +err := k.Params.Set(ctx, params) +if err != nil { + // handle error +} + +// setting a map entry +err = k.Counters.Set(ctx, "my-counter", 42) +if err != nil { + // handle error +} +``` + +### Checking Existence + +Use the `Has` method to check if a value exists without retrieving it: + +```go +exists, err := k.Counters.Has(ctx, "my-counter") +if err != nil { + // handle error +} +if exists { + // value exists +} +``` + +### Removing State + +To remove values from state, use the `Remove` method: + +```go +err := k.Counters.Remove(ctx, "my-counter") +if err != nil { + // handle error +} +``` + +## Implementing Business Logic in Messages + +Messages in Cosmos SDK modules modify state based on user transactions. Here's how to implement business logic in a message handler using collections: + +```go +func (k msgServer) CreateProfile(ctx context.Context, msg *types.MsgCreateProfile) (*types.MsgCreateProfileResponse, error) { + // validate message + if err := msg.ValidateBasic(); err != nil { + return nil, err + } + + // parse sender address + senderBz, err := k.addressCodec.StringToBytes(msg.Creator) + if err != nil { + return nil, err + } + sender := sdk.AccAddress(senderBz) + + // check if profile already exists + exists, err := k.Profiles.Has(ctx, sender) + if err != nil { + return nil, err + } + if exists { + return nil, sdkerrors.Wrap(types.ErrProfileExists, "profile already exists") + } + + // create new profile + sdkCtx := sdk.UnwrapSDKContext(ctx) + profile := types.Profile{ + Name: msg.Name, + Bio: msg.Bio, + CreatedAt: sdkCtx.BlockTime().Unix(), + } + + // store the profile + err = k.Profiles.Set(ctx, sender, profile) + if err != nil { + return nil, err + } + + // increment profile counter + counter, err := k.Counters.Get(ctx, "profiles") + if err != nil && !errors.Is(err, collections.ErrNotFound) { + return nil, err + } + // set the counter (adding 1) + err = k.Counters.Set(ctx, "profiles", counter+1) + if err != nil { + return nil, err + } + + return &types.MsgCreateProfileResponse{}, nil +} +``` + +## Implementing Queries + +Queries allow users to read state without modifying it. Here's how to implement a query handler using collections: + +```go +func (q queryServer) GetProfile(ctx context.Context, req *types.QueryGetProfileRequest) (*types.QueryGetProfileResponse, error) { + if req == nil { + return nil, status.Error(codes.InvalidArgument, "invalid request") + } + + // parse address + addressBz, err := k.addressCodec.StringToBytes(req.Address) + if err != nil { + return nil, status.Error(codes.InvalidArgument, "invalid address") + } + address := sdk.AccAddress(addressBz) + + // get profile + profile, err := q.k.Profiles.Get(ctx, address) + if err != nil { + if errors.Is(err, collections.ErrNotFound) { + return nil, status.Error(codes.NotFound, "profile not found") + } + return nil, status.Error(codes.Internal, "internal error") + } + + return &types.QueryGetProfileResponse{Profile: profile}, nil +} +``` + +## Error Handling with Collections + +When working with collections, proper error handling is essential: + +```go +// example from a query function +params, err := q.k.Params.Get(ctx) +if err != nil && !errors.Is(err, collections.ErrNotFound) { + return nil, status.Error(codes.Internal, "internal error") +} +``` + +In the snippet above, it uses the `Get` method to get a collection item. A `collections.ErrNotFound` can be a valid error when the collection is empty, whereas any other error is considered an internal error that should be handled appropriately. + +## Iterating Over Collections + +Collections also support iteration: + +```go +// iterate over all profiles +err := k.Profiles.Walk(ctx, nil, func(key sdk.AccAddress, value types.Profile) (bool, error) { + // process each profile + // return true to stop iteration, false to continue + return false, nil +}) +if err != nil { + // handle error +} + +// iterate over a range of counters +startKey := "a" +endKey := "z" +err = k.Counters.Walk(ctx, collections.NewPrefixedPairRange[string, uint64](startKey, endKey), func(key string, value uint64) (bool, error) { + // process each counter in the range + return false, nil +}) +if err != nil { + // handle error +} +``` + +## Conclusion + +The `collections` package provides a powerful and type-safe way to manage state in Cosmos SDK modules. By understanding how to use collections effectively, you can build robust and efficient blockchain applications that handle state transitions reliably. + +When developing with IGNITE® CLI, you are already taking advantage of collections which significantly simplify the state management code and reduce the potential for errors. diff --git a/docs/docs/02-guide/_category_.json b/docs/docs/02-guide/_category_.json new file mode 100644 index 0000000..8021cf4 --- /dev/null +++ b/docs/docs/02-guide/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Getting Started", + "link": null +} \ No newline at end of file diff --git a/docs/docs/02-guide/images/packet_sendpost.png b/docs/docs/02-guide/images/packet_sendpost.png new file mode 100644 index 0000000..0bb080c Binary files /dev/null and b/docs/docs/02-guide/images/packet_sendpost.png differ diff --git a/docs/docs/03-CLI-Commands/01-cli-commands.md b/docs/docs/03-CLI-Commands/01-cli-commands.md new file mode 100644 index 0000000..3601dbc --- /dev/null +++ b/docs/docs/03-CLI-Commands/01-cli-commands.md @@ -0,0 +1,2398 @@ +--- +description: Ignite CLI docs. +--- + +# CLI commands + +Documentation for Ignite CLI. +## ignite + +Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain + +**Synopsis** + +Ignite CLI is a tool for creating sovereign blockchains built with Cosmos SDK, the world's +most popular modular blockchain framework. Ignite CLI offers everything you need to scaffold, +test, build, and launch your blockchain. + +To get started, create a blockchain: + +$ ignite scaffold chain example + +Announcements: + +⋆ A new release has appeared! v29.9.0 has just been released :) +⋆ Satisfied with Ignite? Or totally fed-up with it? Tell us: https://bit.ly/3WZS2uS + + +**Options** + +``` + -h, --help help for ignite +``` + +**SEE ALSO** + +* [ignite account](#ignite-account) - Create, delete, and show Ignite accounts +* [ignite app](#ignite-app) - Create and manage Ignite Apps +* [ignite appregistry](#ignite-appregistry) - Browse the Ignite App Registry App +* [ignite chain](#ignite-chain) - Build, init and start a blockchain node +* [ignite completion](#ignite-completion) - Generates shell completion script. +* [ignite docs](#ignite-docs) - Show Ignite CLI docs +* [ignite generate](#ignite-generate) - Generate clients, API docs from source code +* [ignite relayer](#ignite-relayer) - Connect blockchains with an IBC relayer +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more +* [ignite testnet](#ignite-testnet) - Simulate and manage test networks +* [ignite version](#ignite-version) - Print the current build information + + +## ignite account + +Create, delete, and show Ignite accounts + +**Synopsis** + +Commands for managing Ignite accounts. An Ignite account is a private/public +keypair stored in a keyring. Currently Ignite accounts are used when interacting +with Ignite Apps (namely ignite relayer, ignite network and ignite connect). + +Note: Ignite account commands are not for managing your chain's keys and accounts. Use +you chain's binary to manage accounts from "config.yml". For example, if your +blockchain is called "mychain", use "mychaind keys" to manage keys for the +chain. + + +**Options** + +``` + -h, --help help for account + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain +* [ignite account create](#ignite-account-create) - Create a new account +* [ignite account delete](#ignite-account-delete) - Delete an account by name +* [ignite account export](#ignite-account-export) - Export an account as a private key +* [ignite account import](#ignite-account-import) - Import an account by using a mnemonic or a private key +* [ignite account list](#ignite-account-list) - Show a list of all accounts +* [ignite account show](#ignite-account-show) - Show detailed information about a particular account + + +## ignite account create + +Create a new account + +``` +ignite account create [name] [flags] +``` + +**Options** + +``` + --coin-type uint32 coin type to use for the account (default 118) + -h, --help help for create +``` + +**Options inherited from parent commands** + +``` + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**SEE ALSO** + +* [ignite account](#ignite-account) - Create, delete, and show Ignite accounts + + +## ignite account delete + +Delete an account by name + +``` +ignite account delete [name] [flags] +``` + +**Options** + +``` + -h, --help help for delete +``` + +**Options inherited from parent commands** + +``` + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**SEE ALSO** + +* [ignite account](#ignite-account) - Create, delete, and show Ignite accounts + + +## ignite account export + +Export an account as a private key + +``` +ignite account export [name] [flags] +``` + +**Options** + +``` + -h, --help help for export + --non-interactive do not enter into interactive mode + --passphrase string passphrase to encrypt the exported key + --path string path to export private key. default: ./key_[name] +``` + +**Options inherited from parent commands** + +``` + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**SEE ALSO** + +* [ignite account](#ignite-account) - Create, delete, and show Ignite accounts + + +## ignite account import + +Import an account by using a mnemonic or a private key + +``` +ignite account import [name] [flags] +``` + +**Options** + +``` + --coin-type uint32 coin type to use for the account (default 118) + -h, --help help for import + --non-interactive do not enter into interactive mode + --passphrase string passphrase to decrypt the imported key (ignored when secret is a mnemonic) + --secret string Your mnemonic or path to your private key (use interactive mode instead to securely pass your mnemonic) +``` + +**Options inherited from parent commands** + +``` + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**SEE ALSO** + +* [ignite account](#ignite-account) - Create, delete, and show Ignite accounts + + +## ignite account list + +Show a list of all accounts + +``` +ignite account list [flags] +``` + +**Options** + +``` + --address-prefix string account address prefix (default "cosmos") + -h, --help help for list +``` + +**Options inherited from parent commands** + +``` + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**SEE ALSO** + +* [ignite account](#ignite-account) - Create, delete, and show Ignite accounts + + +## ignite account show + +Show detailed information about a particular account + +``` +ignite account show [name] [flags] +``` + +**Options** + +``` + --address-prefix string account address prefix (default "cosmos") + -h, --help help for show +``` + +**Options inherited from parent commands** + +``` + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**SEE ALSO** + +* [ignite account](#ignite-account) - Create, delete, and show Ignite accounts + + +## ignite app + +Create and manage Ignite Apps + +**Options** + +``` + -h, --help help for app +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain +* [ignite app describe](#ignite-app-describe) - Print information about installed apps +* [ignite app install](#ignite-app-install) - Install app +* [ignite app list](#ignite-app-list) - List installed apps +* [ignite app scaffold](#ignite-app-scaffold) - Scaffold a new Ignite App +* [ignite app uninstall](#ignite-app-uninstall) - Uninstall app +* [ignite app update](#ignite-app-update) - Update app + + +## ignite app describe + +Print information about installed apps + +**Synopsis** + +Print information about an installed Ignite App commands and hooks. + +``` +ignite app describe [path] [flags] +``` + +**Examples** + +``` +ignite app describe github.com/org/my-app/ +``` + +**Options** + +``` + -h, --help help for describe +``` + +**SEE ALSO** + +* [ignite app](#ignite-app) - Create and manage Ignite Apps + + +## ignite app install + +Install app + +**Synopsis** + +Installs an Ignite App. + +Respects key value pairs declared after the app path to be added to the generated configuration definition. + +``` +ignite app install [path] [key=value]... [flags] +``` + +**Examples** + +``` +ignite app install github.com/org/my-app/ foo=bar baz=qux +``` + +**Options** + +``` + -g, --global use global plugins configuration ($HOME/.ignite/apps/igniteapps.yml) + -h, --help help for install +``` + +**SEE ALSO** + +* [ignite app](#ignite-app) - Create and manage Ignite Apps + + +## ignite app list + +List installed apps + +**Synopsis** + +Prints status and information of all installed Ignite Apps. + +``` +ignite app list [flags] +``` + +**Options** + +``` + -h, --help help for list +``` + +**SEE ALSO** + +* [ignite app](#ignite-app) - Create and manage Ignite Apps + + +## ignite app scaffold + +Scaffold a new Ignite App + +**Synopsis** + +Scaffolds a new Ignite App in the current directory. + +A git repository will be created with the given module name, unless the current directory is already a git repository. + +``` +ignite app scaffold [name] [flags] +``` + +**Examples** + +``` +ignite app scaffold github.com/org/my-app/ +``` + +**Options** + +``` + -h, --help help for scaffold +``` + +**SEE ALSO** + +* [ignite app](#ignite-app) - Create and manage Ignite Apps + + +## ignite app uninstall + +Uninstall app + +**Synopsis** + +Uninstalls an Ignite App specified by path. + +``` +ignite app uninstall [path] [flags] +``` + +**Examples** + +``` +ignite app uninstall github.com/org/my-app/ +``` + +**Options** + +``` + -g, --global use global plugins configuration ($HOME/.ignite/apps/igniteapps.yml) + -h, --help help for uninstall +``` + +**SEE ALSO** + +* [ignite app](#ignite-app) - Create and manage Ignite Apps + + +## ignite app update + +Update app + +**Synopsis** + +Updates an Ignite App specified by path. + +If no path is specified all declared apps are updated. + +``` +ignite app update [path] [flags] +``` + +**Examples** + +``` +ignite app update github.com/org/my-app/ +``` + +**Options** + +``` + -h, --help help for update +``` + +**SEE ALSO** + +* [ignite app](#ignite-app) - Create and manage Ignite Apps + + +## ignite appregistry + +Browse the Ignite App Registry App + +``` +ignite appregistry [flags] +``` + +**Options** + +``` + -h, --help help for appregistry +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain + + +## ignite chain + +Build, init and start a blockchain node + +**Synopsis** + +Commands in this namespace let you to build, initialize, and start your +blockchain node locally for development purposes. + +To run these commands you should be inside the project's directory so that +Ignite can find the source code. To ensure that you are, run "ls", you should +see the following files in the output: "go.mod", "x", "proto", "app", etc. + +By default the "build" command will identify the "main" package of the project, +install dependencies if necessary, set build flags, compile the project into a +binary and install the binary. The "build" command is useful if you just want +the compiled binary, for example, to initialize and start the chain manually. It +can also be used to release your chain's binaries automatically as part of +continuous integration workflow. + +The "init" command will build the chain's binary and use it to initialize a +local validator node. By default the validator node will be initialized in your +$HOME directory in a hidden directory that matches the name of your project. +This directory is called a data directory and contains a chain's genesis file +and a validator key. This command is useful if you want to quickly build and +initialize the data directory and use the chain's binary to manually start the +blockchain. The "init" command is meant only for development purposes, not +production. + +The "serve" command builds, initializes, and starts your blockchain locally with +a single validator node for development purposes. "serve" also watches the +source code directory for file changes and intelligently +re-builds/initializes/starts the chain, essentially providing "code-reloading". +The "serve" command is meant only for development purposes, not production. + +To distinguish between production and development consider the following. + +In production, blockchains often run the same software on many validator nodes +that are run by different people and entities. To launch a blockchain in +production, the validator entities coordinate the launch process to start their +nodes simultaneously. + +During development, a blockchain can be started locally on a single validator +node. This convenient process lets you restart a chain quickly and iterate +faster. Starting a chain on a single node in development is similar to starting +a traditional web application on a local server. + +The "faucet" command lets you send tokens to an address from the "faucet" +account defined in "config.yml". Alternatively, you can use the chain's binary +to send token from any other account that exists on chain. + +The "simulate" command helps you start a simulation testing process for your +chain. + + +**Options** + +``` + -c, --config string path to Ignite config file (default: ./config.yml) + -h, --help help for chain + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain +* [ignite chain build](#ignite-chain-build) - Build a node binary +* [ignite chain debug](#ignite-chain-debug) - Launch a debugger for a blockchain app +* [ignite chain faucet](#ignite-chain-faucet) - Send coins to an account +* [ignite chain init](#ignite-chain-init) - Initialize your chain +* [ignite chain lint](#ignite-chain-lint) - Lint codebase using golangci-lint +* [ignite chain modules](#ignite-chain-modules) - Manage modules +* [ignite chain serve](#ignite-chain-serve) - Start a blockchain node in development +* [ignite chain simulate](#ignite-chain-simulate) - Run simulation testing for the blockchain + + +## ignite chain build + +Build a node binary + +**Synopsis** + + +The build command compiles the source code of the project into a binary and +installs the binary in the $(go env GOPATH)/bin directory. + +You can customize the output directory for the binary using a flag: + + ignite chain build --output dist + +To compile the binary Ignite first compiles protocol buffer (proto) files into +Go source code. Proto files contain required type and services definitions. If +you're using another program to compile proto files, you can use a flag to tell +Ignite to skip the proto compilation step: + + ignite chain build --skip-proto + +Afterwards, Ignite install dependencies specified in the go.mod file. By default +Ignite doesn't check that dependencies of the main module stored in the module +cache have not been modified since they were downloaded. To enforce dependency +checking (essentially, running "go mod verify") use a flag: + + ignite chain build --check-dependencies + +Next, Ignite identifies the "main" package of the project. By default the "main" +package is located in "cmd/{app}d" directory, where "{app}" is the name of the +scaffolded project and "d" stands for daemon. If your project contains more +than one "main" package, specify the path to the one that Ignite should compile +in config.yml: + + build: + main: custom/path/to/main + +By default the binary name will match the top-level module name (specified in +go.mod) with a suffix "d". This can be customized in config.yml: + + build: + binary: mychaind + +You can also specify custom linker flags: + + build: + ldflags: + - "-X main.Version=development" + - "-X main.Date=01/05/2022T19:54" + +To build binaries for a release, use the --release flag. The binaries for one or +more specified release targets are built in a "release/" directory in the +project's source directory. Specify the release targets with GOOS:GOARCH build +tags. If the optional --release.targets is not specified, a binary is created +for your current environment. + + ignite chain build --release -t linux:amd64 -t darwin:amd64 -t darwin:arm64 + + +``` +ignite chain build [flags] +``` + +**Options** + +``` + --build.tags strings parameters to build the chain binary + --check-dependencies verify that cached dependencies have not been modified since they were downloaded + --clear-cache clear the build cache (advanced) + --debug build a debug binary + -h, --help help for build + -o, --output string binary output path + -p, --path string path of the app (default ".") + --release build for a release + --release.prefix string tarball prefix for each release target. Available only with --release flag + -t, --release.targets strings release targets. Available only with --release flag + --skip-proto skip file generation from proto + -v, --verbose verbose output +``` + +**Options inherited from parent commands** + +``` + -c, --config string path to Ignite config file (default: ./config.yml) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite chain](#ignite-chain) - Build, init and start a blockchain node + + +## ignite chain debug + +Launch a debugger for a blockchain app + +**Synopsis** + +The debug command starts a debug server and launches a debugger. + +Ignite uses the Delve debugger by default. Delve enables you to interact with +your program by controlling the execution of the process, evaluating variables, +and providing information of thread / goroutine state, CPU register state and +more. + +A debug server can optionally be started in cases where default terminal client +is not desirable. When the server starts it first runs the blockchain app, +attaches to it and finally waits for a client connection. It accepts both +JSON-RPC or DAP client connections. + +To start a debug server use the following flag: + + ignite chain debug --server + +To start a debug server with a custom address use the following flags: + + ignite chain debug --server --server-address 127.0.0.1:30500 + +The debug server stops automatically when the client connection is closed. + + +``` +ignite chain debug [flags] +``` + +**Options** + +``` + -h, --help help for debug + -p, --path string path of the app (default ".") + --server start a debug server + --server-address string debug server address (default "127.0.0.1:30500") +``` + +**Options inherited from parent commands** + +``` + -c, --config string path to Ignite config file (default: ./config.yml) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite chain](#ignite-chain) - Build, init and start a blockchain node + + +## ignite chain faucet + +Send coins to an account + +``` +ignite chain faucet [address] [coin<,...>] [flags] +``` + +**Options** + +``` + -h, --help help for faucet + --home string directory where the blockchain node is initialized + -p, --path string path of the app (default ".") + -v, --verbose verbose output +``` + +**Options inherited from parent commands** + +``` + -c, --config string path to Ignite config file (default: ./config.yml) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite chain](#ignite-chain) - Build, init and start a blockchain node + + +## ignite chain init + +Initialize your chain + +**Synopsis** + +The init command compiles and installs the binary (like "ignite chain build") +and uses that binary to initialize the blockchain's data directory for one +validator. To learn how the build process works, refer to "ignite chain build +--help". + +By default, the data directory will be initialized in $HOME/.mychain, where +"mychain" is the name of the project. To set a custom data directory use the +--home flag or set the value in config.yml: + + validators: + - name: alice + bonded: '100000000stake' + home: "~/.customdir" + +The data directory contains three files in the "config" directory: app.toml, +config.toml, client.toml. These files let you customize the behavior of your +blockchain node and the client executable. When a chain is re-initialized the +data directory can be reset. To make some values in these files persistent, set +them in config.yml: + + validators: + - name: alice + bonded: '100000000stake' + app: + minimum-gas-prices: "0.025stake" + config: + consensus: + timeout_commit: "5s" + timeout_propose: "5s" + client: + output: "json" + +The configuration above changes the minimum gas price of the validator (by +default the gas price is set to 0 to allow "free" transactions), sets the block +time to 5s, and changes the output format to JSON. To see what kind of values +this configuration accepts see the generated TOML files in the data directory. + +As part of the initialization process Ignite creates on-chain accounts with +token balances. By default, config.yml has two accounts in the top-level +"accounts" property. You can add more accounts and change their token balances. +Refer to config.yml guide to see which values you can set. + +One of these accounts is a validator account and the amount of self-delegated +tokens can be set in the top-level "validator" property. + +One of the most important components of an initialized chain is the genesis +file, the 0th block of the chain. The genesis file is stored in the data +directory "config" subdirectory and contains the initial state of the chain, +including consensus and module parameters. You can customize the values of the +genesis in config.yml: + + genesis: + app_state: + staking: + params: + bond_denom: "foo" + +The example above changes the staking token to "foo". If you change the staking +denom, make sure the validator account has the right tokens. + +The init command is meant to be used ONLY FOR DEVELOPMENT PURPOSES. Under the +hood it runs commands like "appd init", "appd add-genesis-account", "appd +gentx", and "appd collect-gentx". For production, you may want to run these +commands manually to ensure a production-level node initialization. + + +``` +ignite chain init [flags] +``` + +**Options** + +``` + --build.tags strings parameters to build the chain binary + --check-dependencies verify that cached dependencies have not been modified since they were downloaded + --clear-cache clear the build cache (advanced) + --debug build a debug binary + -h, --help help for init + --home string directory where the blockchain node is initialized + -p, --path string path of the app (default ".") + --skip-proto skip file generation from proto + -v, --verbose verbose output +``` + +**Options inherited from parent commands** + +``` + -c, --config string path to Ignite config file (default: ./config.yml) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite chain](#ignite-chain) - Build, init and start a blockchain node + + +## ignite chain lint + +Lint codebase using golangci-lint + +**Synopsis** + +The lint command runs the golangci-lint tool to lint the codebase. + +``` +ignite chain lint [flags] +``` + +**Options** + +``` + -h, --help help for lint +``` + +**Options inherited from parent commands** + +``` + -c, --config string path to Ignite config file (default: ./config.yml) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite chain](#ignite-chain) - Build, init and start a blockchain node + + +## ignite chain modules + +Manage modules + +**Synopsis** + +The modules command allows you to manage modules in the codebase. + +**Options** + +``` + -h, --help help for modules +``` + +**Options inherited from parent commands** + +``` + -c, --config string path to Ignite config file (default: ./config.yml) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite chain](#ignite-chain) - Build, init and start a blockchain node +* [ignite chain modules list](#ignite-chain-modules-list) - List all Cosmos SDK modules in the app + + +## ignite chain modules list + +List all Cosmos SDK modules in the app + +**Synopsis** + +The list command lists all modules in the app. + +``` +ignite chain modules list [flags] +``` + +**Options** + +``` + -h, --help help for list +``` + +**Options inherited from parent commands** + +``` + -c, --config string path to Ignite config file (default: ./config.yml) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite chain modules](#ignite-chain-modules) - Manage modules + + +## ignite chain serve + +Start a blockchain node in development + +**Synopsis** + +The serve command compiles and installs the binary (like "ignite chain build"), +uses that binary to initialize the blockchain's data directory for one validator +(like "ignite chain init"), and starts the node locally for development purposes +with automatic code reloading. + +Automatic code reloading means Ignite starts watching the project directory. +Whenever a file change is detected, Ignite automatically rebuilds, reinitializes +and restarts the node. + +Whenever possible Ignite will try to keep the current state of the chain by +exporting and importing the genesis file. + +To force Ignite to start from a clean slate even if a genesis file exists, use +the following flag: + + ignite chain serve --reset-once + +To force Ignite to reset the state every time the source code is modified, use +the following flag: + + ignite chain serve --force-reset + +With Ignite it's possible to start more than one blockchain from the same source +code using different config files. This is handy if you're building +inter-blockchain functionality and, for example, want to try sending packets +from one blockchain to another. To start a node using a specific config file: + + ignite chain serve --config mars.yml + +The serve command is meant to be used ONLY FOR DEVELOPMENT PURPOSES. Under the +hood, it runs "appd start", where "appd" is the name of your chain's binary. For +production, you may want to run "appd start" manually. + + +``` +ignite chain serve [flags] +``` + +**Options** + +``` + --build.tags strings parameters to build the chain binary + --check-dependencies verify that cached dependencies have not been modified since they were downloaded + --clear-cache clear the build cache (advanced) + -f, --force-reset force reset of the app state on start and every source change + --generate-clients generate code for the configured clients on reset or source code change + -h, --help help for serve + --home string directory where the blockchain node is initialized + -o, --output-file string output file logging the chain output (no UI, no stdin, listens for SIGTERM, implies --yes) (default: stdout) + -p, --path string path of the app (default ".") + --quit-on-fail quit program if the app fails to start + -r, --reset-once reset the app state once on init + --skip-build skip initial build of the app (uses local binary) + --skip-proto skip file generation from proto + -v, --verbose verbose output +``` + +**Options inherited from parent commands** + +``` + -c, --config string path to Ignite config file (default: ./config.yml) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite chain](#ignite-chain) - Build, init and start a blockchain node + + +## ignite chain simulate + +Run simulation testing for the blockchain + +**Synopsis** + +Run simulation testing for the blockchain. It sends many randomized-input messages of each module to a simulated node. + +``` +ignite chain simulate [flags] +``` + +**Options** + +``` + --blockSize int operations per block (default 30) + --exportParamsHeight int height to which export the randomly generated params + --exportParamsPath string custom file path to save the exported params JSON + --exportStatePath string custom file path to save the exported app state JSON + --exportStatsPath string custom file path to save the exported simulation statistics JSON + --genesis string custom simulation genesis file; cannot be used with params file + --genesisTime int override genesis UNIX time instead of using a random UNIX time + -h, --help help for simulate + --initialBlockHeight int initial block to start the simulation (default 1) + --lean lean simulation log output + --numBlocks int number of new blocks to simulate from the initial block height (default 200) + --params string custom simulation params file which overrides any random params; cannot be used with genesis + --seed int simulation random seed (default 42) + --simName string name of the simulation to run (default "TestFullAppSimulation") +``` + +**Options inherited from parent commands** + +``` + -c, --config string path to Ignite config file (default: ./config.yml) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite chain](#ignite-chain) - Build, init and start a blockchain node + + +## ignite completion + +Generates shell completion script. + +``` +ignite completion [bash|zsh|fish|powershell] [flags] +``` + +**Options** + +``` + -h, --help help for completion +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain + + +## ignite docs + +Show Ignite CLI docs + +``` +ignite docs [flags] +``` + +**Options** + +``` + -h, --help help for docs +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain + + +## ignite generate + +Generate clients, API docs from source code + +**Synopsis** + +Generate clients, API docs from source code. + +Such as compiling protocol buffer files into Go or implement particular +functionality, for example, generating an OpenAPI spec. + +Produced source code can be regenerated by running a command again and is not +meant to be edited by hand. + + +**Options** + +``` + --clear-cache clear the build cache (advanced) + --enable-proto-vendor enable proto package vendor for missing Buf dependencies + -h, --help help for generate + -p, --path string path of the app (default ".") + -v, --verbose verbose output +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain +* [ignite generate composables](#ignite-generate-composables) - TypeScript frontend client and Vue 3 composables +* [ignite generate openapi](#ignite-generate-openapi) - OpenAPI spec for your chain +* [ignite generate proto-go](#ignite-generate-proto-go) - Compile protocol buffer files to Go source code required by Cosmos SDK +* [ignite generate ts-client](#ignite-generate-ts-client) - TypeScript frontend client + + +## ignite generate composables + +TypeScript frontend client and Vue 3 composables + +``` +ignite generate composables [flags] +``` + +**Options** + +``` + -h, --help help for composables + -o, --output string Vue 3 composables output path + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + --clear-cache clear the build cache (advanced) + --enable-proto-vendor enable proto package vendor for missing Buf dependencies + -p, --path string path of the app (default ".") + -v, --verbose verbose output +``` + +**SEE ALSO** + +* [ignite generate](#ignite-generate) - Generate clients, API docs from source code + + +## ignite generate openapi + +OpenAPI spec for your chain + +``` +ignite generate openapi [flags] +``` + +**Options** + +``` + --exclude strings List of proto files or directories to exclude from the OpenAPI spec generation + -h, --help help for openapi + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + --clear-cache clear the build cache (advanced) + --enable-proto-vendor enable proto package vendor for missing Buf dependencies + -p, --path string path of the app (default ".") + -v, --verbose verbose output +``` + +**SEE ALSO** + +* [ignite generate](#ignite-generate) - Generate clients, API docs from source code + + +## ignite generate proto-go + +Compile protocol buffer files to Go source code required by Cosmos SDK + +``` +ignite generate proto-go [flags] +``` + +**Options** + +``` + -h, --help help for proto-go + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + --clear-cache clear the build cache (advanced) + --enable-proto-vendor enable proto package vendor for missing Buf dependencies + -p, --path string path of the app (default ".") + -v, --verbose verbose output +``` + +**SEE ALSO** + +* [ignite generate](#ignite-generate) - Generate clients, API docs from source code + + +## ignite generate ts-client + +TypeScript frontend client + +**Synopsis** + +Generate a framework agnostic TypeScript client for your blockchain project. + +By default the TypeScript client is generated in the "ts-client/" directory. You +can customize the output directory in config.yml: + + client: + typescript: + path: new-path + +Output can also be customized by using a flag: + + ignite generate ts-client --output new-path + +TypeScript client code can be automatically regenerated on reset or source code +changes when the blockchain is started with a flag: + + ignite chain serve --generate-clients + + +``` +ignite generate ts-client [flags] +``` + +**Options** + +``` + --disable-cache disable build cache + -h, --help help for ts-client + -o, --output string TypeScript client output path + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + --clear-cache clear the build cache (advanced) + --enable-proto-vendor enable proto package vendor for missing Buf dependencies + -p, --path string path of the app (default ".") + -v, --verbose verbose output +``` + +**SEE ALSO** + +* [ignite generate](#ignite-generate) - Generate clients, API docs from source code + + +## ignite relayer + +Connect blockchains with an IBC relayer + +``` +ignite relayer [flags] +``` + +**Options** + +``` + -h, --help help for relayer +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain + + +## ignite scaffold + +Create a new blockchain, module, message, query, and more + +**Synopsis** + +Scaffolding is a quick way to generate code for major pieces of your +application. + +For details on each scaffolding target (chain, module, message, etc.) run the +corresponding command with a "--help" flag, for example, "ignite scaffold chain +--help". + +The Ignite team strongly recommends committing the code to a version control +system before running scaffolding commands. This will make it easier to see the +changes to the source code as well as undo the command if you've decided to roll +back the changes. + +This blockchain you create with the chain scaffolding command uses the modular +Cosmos SDK framework and imports many standard modules for functionality like +proof of stake, token transfer, inter-blockchain connectivity, governance, and +more. Custom functionality is implemented in modules located by convention in +the "x/" directory. By default, your blockchain comes with an empty custom +module. Use the module scaffolding command to create an additional module. + +An empty custom module doesn't do much, it's basically a container for logic +that is responsible for processing transactions and changing the application +state. Cosmos SDK blockchains work by processing user-submitted signed +transactions, which contain one or more messages. A message contains data that +describes a state transition. A module can be responsible for handling any +number of messages. + +A message scaffolding command will generate the code for handling a new type of +Cosmos SDK message. Message fields describe the state transition that the +message is intended to produce if processed without errors. + +Scaffolding messages is useful to create individual "actions" that your module +can perform. Sometimes, however, you want your blockchain to have the +functionality to create, read, update and delete (CRUD) instances of a +particular type. Depending on how you want to store the data there are three +commands that scaffold CRUD functionality for a type: list, map, and single. +These commands create four messages (one for each CRUD action), and the logic to +add, delete, and fetch the data from the store. If you want to scaffold only the +logic, for example, you've decided to scaffold messages separately, you can do +that as well with the "--no-message" flag. + +Reading data from a blockchain happens with a help of queries. Similar to how +you can scaffold messages to write data, you can scaffold queries to read the +data back from your blockchain application. + +You can also scaffold a type, which just produces a new protocol buffer file +with a proto message description. Note that proto messages produce (and +correspond with) Go types whereas Cosmos SDK messages correspond to proto "rpc" +in the "Msg" service. + +If you're building an application with custom IBC logic, you might need to +scaffold IBC packets. An IBC packet represents the data sent from one blockchain +to another. You can only scaffold IBC packets in IBC-enabled modules scaffolded +with an "--ibc" flag. Note that the default module is not IBC-enabled. + + +**Options** + +``` + -h, --help help for scaffold + -v, --verbose verbose output +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain +* [ignite scaffold chain](#ignite-scaffold-chain) - New Cosmos SDK blockchain +* [ignite scaffold chain-registry](#ignite-scaffold-chain-registry) - Configs for the chain registry +* [ignite scaffold configs](#ignite-scaffold-configs) - Configs for a custom Cosmos SDK module +* [ignite scaffold list](#ignite-scaffold-list) - CRUD for data stored as an array +* [ignite scaffold map](#ignite-scaffold-map) - CRUD for data stored as key-value pairs +* [ignite scaffold message](#ignite-scaffold-message) - Message to perform state transition on the blockchain +* [ignite scaffold migration](#ignite-scaffold-migration) - Module migration boilerplate +* [ignite scaffold module](#ignite-scaffold-module) - Custom Cosmos SDK module +* [ignite scaffold packet](#ignite-scaffold-packet) - Message for sending an IBC packet +* [ignite scaffold params](#ignite-scaffold-params) - Parameters for a custom Cosmos SDK module +* [ignite scaffold query](#ignite-scaffold-query) - Query for fetching data from a blockchain +* [ignite scaffold single](#ignite-scaffold-single) - CRUD for data stored in a single location +* [ignite scaffold type](#ignite-scaffold-type) - Type definition +* [ignite scaffold type-list](#ignite-scaffold-type-list) - List scaffold types +* [ignite scaffold vue](#ignite-scaffold-vue) - Vue 3 web app template + + +## ignite scaffold chain + +New Cosmos SDK blockchain + +**Synopsis** + +Create a new application-specific Cosmos SDK blockchain. + +For example, the following command will create a blockchain called "hello" in +the "hello/" directory: + + ignite scaffold chain hello + +A project name can be a simple name or a URL. The name will be used as the Go +module path for the project. Examples of project names: + + ignite scaffold chain foo + ignite scaffold chain foo/bar + ignite scaffold chain example.org/foo + ignite scaffold chain github.com/username/foo + +A new directory with source code files will be created in the current directory. +To use a different path use the "--path" flag. + +Most of the logic of your blockchain is written in custom modules. Each module +effectively encapsulates an independent piece of functionality. Following the +Cosmos SDK convention, custom modules are stored inside the "x/" directory. By +default, Ignite creates a module with a name that matches the name of the +project. To create a blockchain without a default module use the "--no-module" +flag. Additional modules can be added after a project is created with "ignite +scaffold module" command. + +Account addresses on Cosmos SDK-based blockchains have string prefixes. For +example, the Cosmos Hub blockchain uses the default "cosmos" prefix, so that +addresses look like this: "cosmos12fjzdtqfrrve7zyg9sv8j25azw2ua6tvu07ypf". To +use a custom address prefix use the "--address-prefix" flag. For example: + + ignite scaffold chain foo --address-prefix bar + +By default when compiling a blockchain's source code Ignite creates a cache to +speed up the build process. To clear the cache when building a blockchain use +the "--clear-cache" flag. It is very unlikely you will ever need to use this +flag. + +The blockchain is using the Cosmos SDK modular blockchain framework. Learn more +about Cosmos SDK on https://docs.cosmos.network + + +``` +ignite scaffold chain [name] [flags] +``` + +**Options** + +``` + --address-prefix string account address prefix (default "cosmos") + --clear-cache clear the build cache (advanced) + --coin-type uint32 coin type to use for the account (default 118) + --default-denom string default staking denom (default "stake") + -h, --help help for chain + --minimal create a minimal blockchain (with the minimum required Cosmos SDK modules) + --module-configs strings add module configs + --no-module create a project without a default module + --params strings add default module parameters + -p, --path string create a project in a specific path + --proto-dir string chain proto directory (default "proto") + --skip-git skip Git repository initialization + --skip-proto skip proto generation +``` + +**Options inherited from parent commands** + +``` + -v, --verbose verbose output +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold chain-registry + +Configs for the chain registry + +**Synopsis** + +Scaffold the chain registry chain.json and assets.json files. + +The chain registry is a GitHub repo, hosted at https://github.com/cosmos/chain-registry, that +contains the chain.json and assets.json files of most of chains in the Cosmos ecosystem. +It is good practices, when creating a new chain, and about to launch a testnet or mainnet, to +publish the chain's metadata in the chain registry. + +Read more about the chain.json at https://github.com/cosmos/chain-registry?tab=readme-ov-file#chainjson +Read more about the assets.json at https://github.com/cosmos/chain-registry?tab=readme-ov-file#assetlists + +``` +ignite scaffold chain-registry [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -h, --help help for chain-registry + -p, --path string path of the app (default ".") + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + -v, --verbose verbose output +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold configs + +Configs for a custom Cosmos SDK module + +**Synopsis** + +Scaffold a new config for a Cosmos SDK module. + +A Cosmos SDK module can have configurations. An example of a config is "address prefix" of the +"auth" module. A config can be scaffolded into a module using the "--module-configs" into +the scaffold module command or using the "scaffold configs" command. By default +configs are of type "string", but you can specify a type for each config. For example: + + ignite scaffold configs foo baz:uint bar:bool + +Refer to Cosmos SDK documentation to learn more about modules, dependencies and +configs. + + +``` +ignite scaffold configs [configs]... [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -h, --help help for configs + --module string module to add the query into (default: app's main module) + -p, --path string path of the app (default ".") + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + -v, --verbose verbose output +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold list + +CRUD for data stored as an array + +**Synopsis** + +The "list" scaffolding command is used to generate files that implement the +logic for storing and interacting with data stored as a list in the blockchain +state. + +The command accepts a NAME argument that will be used as the name of a new type +of data. It also accepts a list of FIELDs that describe the type. + +The interaction with the data follows the create, read, updated, and delete +(CRUD) pattern. For each type three Cosmos SDK messages are defined for writing +data to the blockchain: MsgCreate{Name}, MsgUpdate{Name}, MsgDelete{Name}. For +reading data two queries are defined: {Name} and {Name}All. The type, messages, +and queries are defined in the "proto/" directory as protocol buffer messages. +Messages and queries are mounted in the "Msg" and "Query" services respectively. + +When messages are handled, the appropriate keeper methods are called. By +convention, the methods are defined in +"x/{moduleName}/keeper/msg_server_{name}.go". Helpful methods for getting, +setting, removing, and appending are defined in the same "keeper" package in +"{name}.go". + +The "list" command essentially allows you to define a new type of data and +provides the logic to create, read, update, and delete instances of the type. +For example, let's review a command that generates the code to handle a list of +posts and each post has "title" and "body" fields: + + ignite scaffold list post title body + +This provides you with a "Post" type, MsgCreatePost, MsgUpdatePost, +MsgDeletePost and two queries: Post and PostAll. The compiled CLI, let's say the +binary is "blogd" and the module is "blog", has commands to query the chain (see +"blogd q blog") and broadcast transactions with the messages above (see "blogd +tx blog"). + +The code generated with the list command is meant to be edited and tailored to +your application needs. Consider the code to be a "skeleton" for the actual +business logic you will implement next. + +By default, all fields are assumed to be strings. If you want a field of a +different type, you can specify it after a colon ":". The following types are +supported: string, bool, int, uint, coin, array.string, array.int, array.uint, +array.coin. An example of using field types: + + ignite scaffold list pool amount:coin tags:array.string height:int + +For detailed type information use ignite scaffold type --help + +"Index" indicates whether the type can be used as an index in +"ignite scaffold map". + +Ignite also supports custom types: + + ignite scaffold list product-details name desc + ignite scaffold list product price:coin details:ProductDetails + +In the example above the "ProductDetails" type was defined first, and then used +as a custom type for the "details" field. + +Your chain will accept custom types in JSON-notation: + + exampled tx example create-product 100coin '{"name": "x", "desc": "y"}' --from alice + +By default the code will be scaffolded in the module that matches your project's +name. If you have several modules in your project, you might want to specify a +different module: + + ignite scaffold list post title body --module blog + +By default, each message comes with a "creator" field that represents the +address of the transaction signer. You can customize the name of this field with +a flag: + + ignite scaffold list post title body --signer author + +It's possible to scaffold just the getter/setter logic without the CRUD +messages. This is useful when you want the methods to handle a type, but would +like to scaffold messages manually. Use a flag to skip message scaffolding: + + ignite scaffold list post title body --no-message + +The "creator" field is not generated if a list is scaffolded with the +"--no-message" flag. + + +``` +ignite scaffold list NAME [field]... [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -h, --help help for list + --module string specify which module to generate code in + --no-message skip generating message handling logic + --no-simulation skip simulation logic + -p, --path string path of the app (default ".") + --signer string label for the message signer (default: creator) + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + -v, --verbose verbose output +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold map + +CRUD for data stored as key-value pairs + +**Synopsis** + +The "map" scaffolding command is used to generate files that implement the logic +for storing and interacting with data stored as key-value pairs (or a +dictionary) in the blockchain state. + +The "map" command is very similar to "ignite scaffold list" with the main +difference in how values are indexed. With "list" values are indexed by an +incrementing integer, whereas "map" values are indexed by a user-provided value +(or multiple values). + +Let's use the same blog post example: + + ignite scaffold map post title body:string + +This command scaffolds a "Post" type and CRUD functionality to create, read, +updated, and delete posts. However, when creating a new post with your chain's +binary (or by submitting a transaction through the chain's API) you will be +required to provide an "index": + + blogd tx blog create-post [index] [title] [body] + blogd tx blog create-post hello "My first post" "This is the body" + +This command will create a post and store it in the blockchain's state under the +"hello" index. You will be able to fetch back the value of the post by querying +for the "hello" key. + + blogd q blog show-post hello + +By default, the index is called "index", to customize the index, use the "--index" flag. + +Since the behavior of "list" and "map" scaffolding is very similar, you can use +the "--no-message", "--module", "--signer" flags as well as the colon syntax for +custom types. + +For detailed type information use ignite scaffold type --help + + +``` +ignite scaffold map NAME [field]... [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -h, --help help for map + --index string field that index the value (default "index") + --module string specify which module to generate code in + --no-message skip generating message handling logic + --no-simulation skip simulation logic + -p, --path string path of the app (default ".") + --signer string label for the message signer (default: creator) + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + -v, --verbose verbose output +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold message + +Message to perform state transition on the blockchain + +**Synopsis** + +Message scaffolding is useful for quickly adding functionality to your +blockchain to handle specific Cosmos SDK messages. + +Messages are objects whose end goal is to trigger state transitions on the +blockchain. A message is a container for fields of data that affect how the +blockchain's state will change. You can think of messages as "actions" that a +user can perform. + +For example, the bank module has a "Send" message for token transfers between +accounts. The send message has three fields: from address (sender), to address +(recipient), and a token amount. When this message is successfully processed, +the token amount will be deducted from the sender's account and added to the +recipient's account. + +Ignite's message scaffolding lets you create new types of messages and add them +to your chain. For example: + + ignite scaffold message add-pool amount:coins denom active:bool --module dex + +The command above will create a new message MsgAddPool with three fields: amount +(in tokens), denom (a string), and active (a boolean). The message will be added +to the "dex" module. + +For detailed type information use ignite scaffold type --help + +By default, the message is defined as a proto message in the +"proto/{app}/{module}/tx.proto" and registered in the "Msg" service. A CLI command to +create and broadcast a transaction with MsgAddPool is created in the module's +"cli" package. Additionally, Ignite scaffolds a message constructor and the code +to satisfy the sdk.Msg interface and register the message in the module. + +Most importantly in the "keeper" package Ignite scaffolds an "AddPool" function. +Inside this function, you can implement message handling logic. + +When successfully processed a message can return data. Use the —response flag to +specify response fields and their types. For example + + ignite scaffold message create-post title body --response id:int,title + +The command above will scaffold MsgCreatePost which returns both an ID (an +integer) and a title (a string). + +Message scaffolding follows the rules as "ignite scaffold list/map/single" and +supports fields with standard and custom types. See "ignite scaffold list —help" +for details. + + +``` +ignite scaffold message [name] [field1:type1] [field2:type2] ... [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -d, --desc string description of the command + -h, --help help for message + --module string module to add the message into. Default: app's main module + --no-simulation disable CRUD simulation scaffolding + -p, --path string path of the app (default ".") + -r, --response strings response fields + --signer string label for the message signer (default: creator) + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + -v, --verbose verbose output +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold migration + +Module migration boilerplate + +**Synopsis** + +Scaffold no-op migration boilerplate for an existing Cosmos SDK module. + +This command creates a new migration file in "x//migrations/vN/", +increments the module consensus version, and registers the new migration handler +inside "x//module/module.go". + +``` +ignite scaffold migration [module] [flags] +``` + +**Options** + +``` + -h, --help help for migration + -p, --path string path of the app (default ".") + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + -v, --verbose verbose output +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold module + +Custom Cosmos SDK module + +**Synopsis** + +Scaffold a new Cosmos SDK module. + +Cosmos SDK is a modular framework and each independent piece of functionality is +implemented in a separate module. By default your blockchain imports a set of +standard Cosmos SDK modules. To implement custom functionality of your +blockchain, scaffold a module and implement the logic of your application. + +This command does the following: + +* Creates a directory with module's protocol buffer files in "proto/" +* Creates a directory with module's boilerplate Go code in "x/" +* Imports the newly created module by modifying "app/app.go" + +This command will proceed with module scaffolding even if "app/app.go" doesn't +have the required default placeholders. If the placeholders are missing, you +will need to modify "app/app.go" manually to import the module. If you want the +command to fail if it can't import the module, use the "--require-registration" +flag. + +To scaffold an IBC-enabled module use the "--ibc" flag. An IBC-enabled module is +like a regular module with the addition of IBC-specific logic and placeholders +to scaffold IBC packets with "ignite scaffold packet". + +A module can depend on one or more other modules and import their keeper +methods. To scaffold a module with a dependency use the "--dep" flag + +For example, your new custom module "foo" might have functionality that requires +sending tokens between accounts. The method for sending tokens is a defined in +the "bank"'s module keeper. You can scaffold a "foo" module with the dependency +on "bank" with the following command: + + ignite scaffold module foo --dep bank + +You can then define which methods you want to import from the "bank" keeper in +"expected_keepers.go". + +You can also scaffold a module with a list of dependencies that can include both +standard and custom modules (provided they exist): + + ignite scaffold module bar --dep foo,mint,account,FeeGrant + +Note: the "--dep" flag doesn't install third-party modules into your +application, it just generates extra code that specifies which existing modules +your new custom module depends on. + +A Cosmos SDK module can have parameters (or "params"). Params are values that +can be set at the genesis of the blockchain and can be modified while the +blockchain is running. An example of a param is "Inflation rate change" of the +"mint" module. A module can be scaffolded with params using the "--params" flag +that accepts a list of param names. By default params are of type "string", but +you can specify a type for each param. For example: + + ignite scaffold module foo --params baz:uint,bar:bool + +Refer to Cosmos SDK documentation to learn more about modules, dependencies and +params. + + +``` +ignite scaffold module [name] [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + --dep strings add a dependency on another module + -h, --help help for module + --ibc add IBC functionality + --module-configs strings add module configs + --ordering string channel ordering of the IBC module [none|ordered|unordered] (default "none") + --params strings add module parameters + -p, --path string path of the app (default ".") + --require-registration fail if module can't be registered + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + -v, --verbose verbose output +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold packet + +Message for sending an IBC packet + +**Synopsis** + +Scaffold an IBC packet in a specific IBC-enabled Cosmos SDK module + +``` +ignite scaffold packet [packetName] [field1] [field2] ... --module [moduleName] [flags] +``` + +**Options** + +``` + --ack strings custom acknowledgment type (field1,field2,...) + --clear-cache clear the build cache (advanced) + -h, --help help for packet + --module string IBC Module to add the packet into + --no-message disable send message scaffolding + -p, --path string path of the app (default ".") + --signer string label for the message signer (default: creator) + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + -v, --verbose verbose output +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold params + +Parameters for a custom Cosmos SDK module + +**Synopsis** + +Scaffold a new parameter for a Cosmos SDK module. + +A Cosmos SDK module can have parameters (or "params"). Params are values that +can be set at the genesis of the blockchain and can be modified while the +blockchain is running. An example of a param is "Inflation rate change" of the +"mint" module. A params can be scaffolded into a module using the "--params" into +the scaffold module command or using the "scaffold params" command. By default +params are of type "string", but you can specify a type for each param. For example: + + ignite scaffold params foo baz:uint bar:bool + +Refer to Cosmos SDK documentation to learn more about modules, dependencies and +params. + + +``` +ignite scaffold params [param]... [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -h, --help help for params + --module string module to add the query into. Default: app's main module + -p, --path string path of the app (default ".") + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + -v, --verbose verbose output +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold query + +Query for fetching data from a blockchain + +**Synopsis** + +Query for fetching data from a blockchain. + +For detailed type information use ignite scaffold type --help. + +``` +ignite scaffold query [name] [field1:type1] [field2:type2] ... [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -d, --desc string description of the CLI to broadcast a tx with the message + -h, --help help for query + --module string module to add the query into. Default: app's main module + --paginated define if the request can be paginated + -p, --path string path of the app (default ".") + -r, --response strings response fields + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + -v, --verbose verbose output +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold single + +CRUD for data stored in a single location + +**Synopsis** + +CRUD for data stored in a single location. + +For detailed type information use ignite scaffold type --help. + +``` +ignite scaffold single NAME [field:type]... [flags] +``` + +**Examples** + +``` + ignite scaffold single todo-single title:string done:bool +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -h, --help help for single + --module string specify which module to generate code in + --no-message skip generating message handling logic + --no-simulation skip simulation logic + -p, --path string path of the app (default ".") + --signer string label for the message signer (default: creator) + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + -v, --verbose verbose output +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold type + +Type definition + +**Synopsis** + +Type information + +Types Usage +address use ':address' to scaffold string types (eg: cosmos1abcdefghijklmnopqrstuvwxyz0123456). +array.coin use ':array.coin' to scaffold sdk.Coins types (eg: 20stake). Disclaimer: Only one `coins` or `dec.coins` field can accept multiple CLI values per command due to AutoCLI limitations. +array.dec.coin use ':array.dec.coin' to scaffold sdk.DecCoins types (eg: 20000002stake). Disclaimer: Only one `coins` or `dec.coins` field can accept multiple CLI values per command due to AutoCLI limitations. +array.int use ':array.int' to scaffold []int64 types (eg: 5,4,3,2,1). +array.string use ':array.string' to scaffold []string types (eg: abc,xyz). +array.uint use ':array.uint' to scaffold []uint64 types (eg: 13,26,31,40). +bool use ':bool' to scaffold bool types (eg: true). +bytes use ':bytes' to scaffold []byte types (eg: 3,2,3,5). +coin use ':coin' to scaffold sdk.Coin types (eg: 10token). +coins use ':array.coin' to scaffold sdk.Coins types (eg: 20stake). Disclaimer: Only one `coins` or `dec.coins` field can accept multiple CLI values per command due to AutoCLI limitations. +custom use the custom type to scaffold already created chain types. +dec.coin use ':dec.coin' to scaffold sdk.DecCoin types (eg: 100001token). +dec.coins use ':array.dec.coin' to scaffold sdk.DecCoins types (eg: 20000002stake). Disclaimer: Only one `coins` or `dec.coins` field can accept multiple CLI values per command due to AutoCLI limitations. +int use ':int' to scaffold int64 types (eg: 111). +int64 use ':int' to scaffold int64 types (eg: 111). +ints use ':array.int' to scaffold []int64 types (eg: 5,4,3,2,1). +string use ':string' to scaffold string types (eg: xyz). +strings use ':array.string' to scaffold []string types (eg: abc,xyz). +uint use ':uint' to scaffold uint64 types (eg: 111). +uint64 use ':uint' to scaffold uint64 types (eg: 111). +uints use ':array.uint' to scaffold []uint64 types (eg: 13,26,31,40). + +Field Usage: + - fieldName + - fieldName:fieldType + +If no :fieldType, default (string) is used + + + +``` +ignite scaffold type NAME [field:type] ... [flags] +``` + +**Examples** + +``` + ignite scaffold type todo-item priority:int desc:string tags:array.string done:bool +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -h, --help help for type + --module string specify which module to generate code in + --no-message skip generating message handling logic + --no-simulation skip simulation logic + -p, --path string path of the app (default ".") + --signer string label for the message signer (default: creator) + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + -v, --verbose verbose output +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold type-list + +List scaffold types + +**Synopsis** + +List all available scaffold types + +``` +ignite scaffold type-list [flags] +``` + +**Options** + +``` + -h, --help help for type-list +``` + +**Options inherited from parent commands** + +``` + -v, --verbose verbose output +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold vue + +Vue 3 web app template + +``` +ignite scaffold vue [flags] +``` + +**Options** + +``` + -h, --help help for vue + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + -v, --verbose verbose output +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite testnet + +Simulate and manage test networks + +**Synopsis** + +Comprehensive toolset for managing and simulating blockchain test networks. It allows users to either run a test network in place using mainnet data or set up a multi-node environment for more complex testing scenarios. Additionally, it includes a subcommand for simulating the chain, which is useful for fuzz testing and other testing-related tasks. + +**Options** + +``` + -h, --help help for testnet +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain +* [ignite testnet in-place](#ignite-testnet-in-place) - Create and start a testnet from current local net state +* [ignite testnet multi-node](#ignite-testnet-multi-node) - Initialize and provide multi-node on/off functionality +* [ignite testnet simulate](#ignite-testnet-simulate) - Run simulation testing for the blockchain + + +## ignite testnet in-place + +Create and start a testnet from current local net state + +**Synopsis** + +Testnet in-place command is used to create and start a testnet from current local net state(including mainnet). +After using this command in the repo containing the config.yml file, the network will start. +We can create a testnet from the local network state and mint additional coins for the desired accounts from the config.yml file. + +``` +ignite testnet in-place [flags] +``` + +**Options** + +``` + --address-prefix string account address prefix (default "cosmos") + --check-dependencies verify that cached dependencies have not been modified since they were downloaded + --clear-cache clear the build cache (advanced) + --coin-type uint32 coin type to use for the account (default 118) + -h, --help help for in-place + --home string directory where the blockchain node is initialized + -p, --path string path of the app (default ".") + --skip-proto skip file generation from proto + -v, --verbose verbose output +``` + +**SEE ALSO** + +* [ignite testnet](#ignite-testnet) - Simulate and manage test networks + + +## ignite testnet multi-node + +Initialize and provide multi-node on/off functionality + +**Synopsis** + +Initialize the test network with the number of nodes and bonded from the config.yml file:: + ... + validators: + - name: alice + bonded: 100000000stake + - name: validator1 + bonded: 100000000stake + - name: validator2 + bonded: 200000000stake + - name: validator3 + bonded: 300000000stake + + + The "multi-node" command allows developers to easily set up, initialize, and manage multiple nodes for a + testnet environment. This command provides full flexibility in enabling or disabling each node as desired, + making it a powerful tool for simulating a multi-node blockchain network during development. + + Usage: + ignite testnet multi-node [flags] + + + +``` +ignite testnet multi-node [flags] +``` + +**Options** + +``` + --check-dependencies verify that cached dependencies have not been modified since they were downloaded + --clear-cache clear the build cache (advanced) + -h, --help help for multi-node + --home string directory where the blockchain node is initialized + --node-dir-prefix string prefix of dir node (default "validator") + -p, --path string path of the app (default ".") + -r, --reset-once reset the app state once on init + --skip-proto skip file generation from proto + -v, --verbose verbose output +``` + +**SEE ALSO** + +* [ignite testnet](#ignite-testnet) - Simulate and manage test networks + + +## ignite testnet simulate + +Run simulation testing for the blockchain + +**Synopsis** + +Run simulation testing for the blockchain. It sends many randomized-input messages of each module to a simulated node. + +``` +ignite testnet simulate [flags] +``` + +**Options** + +``` + --blockSize int operations per block (default 30) + --exportParamsHeight int height to which export the randomly generated params + --exportParamsPath string custom file path to save the exported params JSON + --exportStatePath string custom file path to save the exported app state JSON + --exportStatsPath string custom file path to save the exported simulation statistics JSON + --genesis string custom simulation genesis file; cannot be used with params file + --genesisTime int override genesis UNIX time instead of using a random UNIX time + -h, --help help for simulate + --initialBlockHeight int initial block to start the simulation (default 1) + --lean lean simulation log output + --numBlocks int number of new blocks to simulate from the initial block height (default 200) + --params string custom simulation params file which overrides any random params; cannot be used with genesis + --seed int simulation random seed (default 42) + --simName string name of the simulation to run (default "TestFullAppSimulation") +``` + +**SEE ALSO** + +* [ignite testnet](#ignite-testnet) - Simulate and manage test networks + + +## ignite version + +Print the current build information + +``` +ignite version [flags] +``` + +**Options** + +``` + -h, --help help for version +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain + +# Scaffold Type + +Ignites provides a set of scaffold types that can be used to generate code for your application. +These types are used in the `ignite scaffold` command. + +## Available Scaffold Types + +| Type | Usage | +| --- | --- | +| address | use ':address' to scaffold string types (eg: cosmos1abcdefghijklmnopqrstuvwxyz0123456). | +| array.coin | use ':array.coin' to scaffold sdk.Coins types (eg: 20stake). Disclaimer: Only one `coins` or `dec.coins` field can accept multiple CLI values per command due to AutoCLI limitations. | +| array.dec.coin | use ':array.dec.coin' to scaffold sdk.DecCoins types (eg: 20000002stake). Disclaimer: Only one `coins` or `dec.coins` field can accept multiple CLI values per command due to AutoCLI limitations. | +| array.int | use ':array.int' to scaffold []int64 types (eg: 5,4,3,2,1). | +| array.string | use ':array.string' to scaffold []string types (eg: abc,xyz). | +| array.uint | use ':array.uint' to scaffold []uint64 types (eg: 13,26,31,40). | +| bool | use ':bool' to scaffold bool types (eg: true). | +| bytes | use ':bytes' to scaffold []byte types (eg: 3,2,3,5). | +| coin | use ':coin' to scaffold sdk.Coin types (eg: 10token). | +| coins | use ':array.coin' to scaffold sdk.Coins types (eg: 20stake). Disclaimer: Only one `coins` or `dec.coins` field can accept multiple CLI values per command due to AutoCLI limitations. | +| custom | use the custom type to scaffold already created chain types. | +| dec.coin | use ':dec.coin' to scaffold sdk.DecCoin types (eg: 100001token). | +| dec.coins | use ':array.dec.coin' to scaffold sdk.DecCoins types (eg: 20000002stake). Disclaimer: Only one `coins` or `dec.coins` field can accept multiple CLI values per command due to AutoCLI limitations. | +| int | use ':int' to scaffold int64 types (eg: 111). | +| int64 | use ':int' to scaffold int64 types (eg: 111). | +| ints | use ':array.int' to scaffold []int64 types (eg: 5,4,3,2,1). | +| string | use ':string' to scaffold string types (eg: xyz). | +| strings | use ':array.string' to scaffold []string types (eg: abc,xyz). | +| uint | use ':uint' to scaffold uint64 types (eg: 111). | +| uint64 | use ':uint' to scaffold uint64 types (eg: 111). | +| uints | use ':array.uint' to scaffold []uint64 types (eg: 13,26,31,40). | + + +Field Usage: + + - fieldName + - fieldName:fieldType + + +If no :fieldType, default (string) is used diff --git a/docs/docs/03-CLI-Commands/_category_.json b/docs/docs/03-CLI-Commands/_category_.json new file mode 100644 index 0000000..b549261 --- /dev/null +++ b/docs/docs/03-CLI-Commands/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "CLI Commands", + "link": null + } \ No newline at end of file diff --git a/docs/docs/04-clients/01-go-client.md b/docs/docs/04-clients/01-go-client.md new file mode 100644 index 0000000..f592fcb --- /dev/null +++ b/docs/docs/04-clients/01-go-client.md @@ -0,0 +1,298 @@ +--- +description: Blockchain client in Go +title: Go client +--- + +# A client in the Go programming language + +In this tutorial, we will show you how to create a standalone Go program that +serves as a client for a blockchain. We will use the IGNITE® CLI to set up a +standard blockchain. To communicate with the blockchain, we will utilize the +`cosmosclient` package, which provides an easy-to-use interface for interacting +with the blockchain. You will learn how to use the `cosmosclient` package to +send transactions and query the blockchain. By the end of this tutorial, you +will have a good understanding of how to build a client for a blockchain using +Go and the `cosmosclient` package. + +## Create a blockchain + +To create a blockchain using the IGNITE® CLI, use the following command: + +``` +ignite scaffold chain blog +``` + +This will create a new Cosmos SDK blockchain called "blog". + +Once the blockchain has been created, you can generate code for a "blog" model +that will enable you to perform create, read, update, and delete (CRUD) +operations on blog posts. To do this, you can use the following command: + +``` +cd blog +ignite scaffold list post title body +``` + +This will generate the necessary code for the "blog" model, including functions +for creating, reading, updating, and deleting blog posts. With this code in +place, you can now use your blockchain to perform CRUD operations on blog posts. +You can use the generated code to create new blog posts, retrieve existing ones, +update their content, and delete them as needed. This will give you a fully +functional Cosmos SDK blockchain with the ability to manage blog posts. + +Start your blockchain node with the following command: + +``` +ignite chain serve +``` + +## Creating a blockchain client + +Create a new directory called `blogclient` on the same level as `blog` +directory. As the name suggests, `blogclient` will contain a standalone Go +program that acts as a client to your `blog` blockchain. + +```bash +mkdir blogclient +``` + +This command will create a new directory called `blogclient` in your current +location. If you type `ls` in your terminal window, you should see both the +`blog` and `blogclient` directories listed. + +To initialize a new Go package inside the `blogclient` directory, you can use +the following command: + +``` +cd blogclient +go mod init blogclient +``` + +This will create a `go.mod` file in the `blogclient` directory, which contains +information about the package and the Go version being used. + +To import dependencies for your package, you can add the following code to the +`go.mod` file: + +```text title="blogclient/go.mod" +module blogclient + +go 1.24.1 + +require ( + blog v0.0.0-00010101000000-000000000000 + github.com/ignite/cli/v28 v28.8.2 +) + +replace blog => ../blog +``` + +Your package will import two dependencies: + +* `blog`, which contains `types` of messages and a query client +* `ignite` for the `cosmosclient` package + +The `replace` directive uses the package from the local `blog` directory and is +specified as a relative path to the `blogclient` directory. + +Cosmos SDK uses a custom version of the `protobuf` package, so use the `replace` + +Finally, install dependencies for your `blogclient`: + +```bash +go mod tidy +``` + +### Main logic of the client in `main.go` + +Create a `main.go` file inside the `blogclient` directory and add the following +code: + +```go title="blogclient/main.go" +package main + +import ( + "context" + "fmt" + "log" + + // Importing the general purpose Cosmos blockchain client + "github.com/ignite/cli/v29/ignite/pkg/cosmosclient" + + // Importing the types package of your blog blockchain + "blog/x/blog/types" +) + +func main() { + ctx := context.Background() + addressPrefix := "cosmos" + + // Create a Cosmos client instance + client, err := cosmosclient.New(ctx, cosmosclient.WithAddressPrefix(addressPrefix)) + if err != nil { + log.Fatal(err) + } + + // Account `alice` was initialized during `ignite chain serve` + accountName := "alice" + + // Get account from the keyring + account, err := client.Account(accountName) + if err != nil { + log.Fatal(err) + } + + addr, err := account.Address(addressPrefix) + if err != nil { + log.Fatal(err) + } + + // Define a message to create a post + msg := &types.MsgCreatePost{ + Creator: addr, + Title: "Hello!", + Body: "This is the first post", + } + + // Broadcast a transaction from account `alice` with the message + // to create a post store response in txResp + txResp, err := client.BroadcastTx(ctx, account, msg) + if err != nil { + log.Fatal(err) + } + + // Print response from broadcasting a transaction + fmt.Print("MsgCreatePost:\n\n") + fmt.Println(txResp) + + // Instantiate a query client for your `blog` blockchain + queryClient := types.NewQueryClient(client.Context()) + + // Query the blockchain using the client's `PostAll` method + // to get all posts store all posts in queryResp + queryResp, err := queryClient.PostAll(ctx, &types.QueryAllPostRequest{}) + if err != nil { + log.Fatal(err) + } + + // Print response from querying all the posts + fmt.Print("\n\nAll posts:\n\n") + fmt.Println(queryResp) +} +``` + +The code above creates a standalone Go program that acts as a client to the +`blog` blockchain. It begins by importing the required packages, including the +general purpose Cosmos blockchain client and the `types` package of the `blog` +blockchain. + +In the `main` function, the code creates a Cosmos client instance and sets the +address prefix to "cosmos". It then retrieves an account named `"alice"` from +the keyring and gets the address of the account using the address prefix. + +Next, the code defines a message to create a blog post with the title "Hello!" +and body "This is the first post". It then broadcasts a transaction from the +account "alice" with the message to create the post, and stores the response in +the variable `txResp`. + +The code then instantiates a query client for the blog blockchain and uses it to +query the blockchain to retrieve all the posts. It stores the response in the +variable `queryResp` and prints it to the console. + +Finally, the code prints the response from broadcasting the transaction to the +console. This allows the user to see the results of creating and querying a blog +post on the `blog` blockchain using the client. + +To find out more about the `cosmosclient` package, you can refer to the Go +package documentation for +[`cosmosclient`](https://pkg.go.dev/github.com/ignite/cli/ignite/pkg/cosmosclient). +This documentation provides information on how to use the `Client` type with +`Options` and `KeyringBackend`. + +## Run the blockchain and the client + +Make sure your blog blockchain is still running with `ignite chain serve`. + +Run the blockchain client: + +```bash +go run main.go +``` + +If the command is successful, the results of running the command will be printed +to the terminal. The output may include some warnings, which can be ignored. + +```yml +MsgCreatePost: + +code: 0 +codespace: "" +data: 12220A202F626C6F672E626C6F672E4D7367437265617465506F7374526573706F6E7365 +events: +- attributes: + - index: true + key: ZmVl + value: null + - index: true + key: ZmVlX3BheWVy + value: Y29zbW9zMWR6ZW13NzZ3enQ3cDBnajd3MzQyN2E0eHg3MjRkejAzd3hnOGhk + type: tx +- attributes: + - index: true + key: YWNjX3NlcQ== + value: Y29zbW9zMWR6ZW13NzZ3enQ3cDBnajd3MzQyN2E0eHg3MjRkejAzd3hnOGhkLzE= + type: tx +- attributes: + - index: true + key: c2lnbmF0dXJl + value: UWZncUJCUFQvaWxWVzJwNUJNTngzcDlvRzVpSXp0elhXdE9yMHcwVE00OEtlSkRqR0FEdU9VNjJiY1ZRNVkxTHdEbXNuYUlsTmc3VE9uMnJ2ZWRHSlE9PQ== + type: tx +- attributes: + - index: true + key: YWN0aW9u + value: L2Jsb2cuYmxvZy5Nc2dDcmVhdGVQb3N0 + type: message +gas_used: "52085" +gas_wanted: "300000" +height: "20" +info: "" +logs: +- events: + - attributes: + - key: action + value: /blog.blog.MsgCreatePost + type: message + log: "" + msg_index: 0 +raw_log: '[{"msg_index":0,"events":[{"type":"message","attributes":[{"key":"action","value":"/blog.blog.MsgCreatePost"}]}]}]' +timestamp: "" +tx: null +txhash: 4F53B75C18254F96EF159821DDD665E965DBB576A5AC2B94CE863EB62E33156A + +All posts: + +Post: pagination: +``` + +As you can see the client has successfully broadcasted a transaction and queried +the chain for blog posts. + +Please note, that some values in the output on your terminal (like transaction +hash and block height) might be different from the output above. + +You can confirm the new post with using the `blogd q blog list-post` command: + +```yaml +Post: +- body: This is the first post + creator: cosmos1dzemw76wzt7p0gj7w3427a4xx724dz03wxg8hd + id: "0" + title: Hello! +pagination: + next_key: null + total: "0" +``` + +Great job! You have successfully completed the process of creating a Go client +for your Cosmos SDK blockchain, submitting a transaction, and querying the +chain. diff --git a/docs/docs/04-clients/02-typescript.md b/docs/docs/04-clients/02-typescript.md new file mode 100644 index 0000000..7c57d52 --- /dev/null +++ b/docs/docs/04-clients/02-typescript.md @@ -0,0 +1,441 @@ +--- +description: Information about the generated TypeScript client code. +--- + +# TypeScript library + +IGNITE® offers powerful functionality for generating client-side code for your +blockchain. Think of this as a one-click client SDK generation tailored +specifically for your blockchain. + +See `ignite generate ts-client --help` learn more on how to use TypeScript code generation. + +## Starting a node + +Create a new blockchain with `ignite scaffold chain`. You can use an existing +blockchain project if you have one, instead. + +``` +ignite scaffold chain example +``` + +For testing purposes add a new account to `config.yml` with a mnemonic: + +```yml title="config.yml" +accounts: + - name: frank + coins: ["1000token", "100000000stake"] + mnemonic: play butter frown city voyage pupil rabbit wheat thrive mind skate turkey helmet thrive door either differ gate exhibit impose city swallow goat faint +``` + +Run a command to generate TypeScript clients for both standard and custom Cosmos +SDK modules: + +``` +ignite generate ts-client --clear-cache +``` + +:::tip +In order to not rely on the remote `buf.build` service, you can install the +`protoc-gen-ts_proto` binary locally and IGNITE® will use it instead of the remote plugin. + +```sh +npm install -g ts-proto +``` + +Learn more at +::: + +Run a command to start your blockchain node: + +``` +ignite chain serve -r +``` + +## Setting up a TypeScript frontend client + +The best way to get started building with the TypeScript client is by using +[Vite](https://vitejs.dev). Vite provides boilerplate code for +vanilla TS projects as well as React, Vue, Lit, Svelte and Preact frameworks. +You can find additional information at the [Vite Getting Started +guide](https://vitejs.dev/guide). + +You will also need to [polyfill](https://developer.mozilla.org/en-US/docs/Glossary/Polyfill) the client's dependencies. The following is an +example of setting up a vanilla TS project with the necessary polyfills: + +```bash +npm create vite@latest my-frontend-app -- --template vanilla-ts +cd my-frontend-app +npm install --save-dev @esbuild-plugins/node-globals-polyfill @rollup/plugin-node-resolve +``` + +You must then create the necessary `vite.config.ts` file. + +```typescript title="my-frontend-app/vite.config.ts" +import { nodeResolve } from "@rollup/plugin-node-resolve"; +import { NodeGlobalsPolyfillPlugin } from "@esbuild-plugins/node-globals-polyfill"; +import { defineConfig } from "vite"; + +export default defineConfig({ + plugins: [nodeResolve()], + + optimizeDeps: { + esbuildOptions: { + define: { + global: "globalThis", + }, + plugins: [ + NodeGlobalsPolyfillPlugin({ + buffer: true, + }), + ], + }, + }, +}); +``` + +You are then ready to use the generated client code inside this project directly +or by publishing the client and installing it like any other `npm` package. + +After the chain starts, you will see Frank's address is +`cosmos13xkhcx2dquhqdml0k37sr7yndquwteuvt2cml7`. We'll be using Frank's account +for querying data and broadcasting transactions in the next section. + +## Querying + +The code generated in `ts-client` comes with a `package.json` file ready to +publish which you can modify to suit your needs. To use`ts-client` install the +required dependencies: + +``` +cd ts-client +npm install +``` + +The client is based on a modular architecture where you can configure a client +class to support the modules you need and instantiate it. + +By default, the generated client exports a client class that includes all the +Cosmos SDK, custom and 3rd party modules in use in your project. + +To instantiate the client you need to provide environment information (endpoints +and chain prefix). For querying that's all you need: + +```typescript title="my-frontend-app/src/main.ts" +import { Client } from "../../ts-client"; + +const client = new Client( + { + apiURL: "http://localhost:1317", + rpcURL: "http://localhost:26657", + prefix: "cosmos", + } +); +``` + +The example above uses `ts-client` from a local directory. If you have published +your `ts-client` on `npm` replace `../../ts-client` with a package name. + +The resulting client instance contains namespaces for each module, each with a +`query` and `tx` namespace containing the module's relevant querying and +transacting methods with full type and auto-completion support. + +To query for a balance of an address: + +```typescript +const balances = await client.CosmosBankV1Beta1.query.queryAllBalances( + 'cosmos13xkhcx2dquhqdml0k37sr7yndquwteuvt2cml7' +); +``` + +## Broadcasting a transaction + +Add signing capabilities to the client by creating a wallet from a mnemonic +(we're using the Frank's mnemonic added to `config.yml` earlier) and passing it +as an optional argument to `Client()`. The wallet implements the CosmJS +OfflineSigner` interface. + +```typescript title="my-frontend-app/src/main.ts" +import { Client } from "../../ts-client"; +// highlight-start +import { DirectSecp256k1HdWallet } from "@cosmjs/proto-signing"; + +const mnemonic = + "play butter frown city voyage pupil rabbit wheat thrive mind skate turkey helmet thrive door either differ gate exhibit impose city swallow goat faint"; +const wallet = await DirectSecp256k1HdWallet.fromMnemonic(mnemonic); +// highlight-end + +const client = new Client( + { + apiURL: "http://localhost:1317", + rpcURL: "http://localhost:26657", + prefix: "cosmos", + }, + // highlight-next-line + wallet +); +``` + +Broadcasting a transaction: + +```typescript title="my-frontend-app/src/main.ts" +const tx_result = await client.CosmosBankV1Beta1.tx.sendMsgSend({ + value: { + amount: [ + { + amount: '200', + denom: 'token', + }, + ], + fromAddress: 'cosmos13xkhcx2dquhqdml0k37sr7yndquwteuvt2cml7', + toAddress: 'cosmos15uw6qpxqs6zqh0zp3ty2ac29cvnnzd3qwjntnc', + }, + fee: { + amount: [{ amount: '0', denom: 'stake' }], + gas: '200000', + }, + memo: '', +}) +``` + +## Broadcasting a transaction with a custom message + +If your chain already has custom messages defined, you can use those. If not, +we'll be using IGNITE®'s scaffolded code as an example. Create a post with CRUD +messages: + +``` +ignite scaffold list post title body +``` + +After adding messages to your chain you may need to re-generate the TypeScript +client: + +``` +ignite generate ts-client --clear-cache +``` + +Broadcast a transaction containing the custom `MsgCreatePost`: + +```typescript title="my-frontend-app/src/main.ts" +import { Client } from "../../ts-client"; +import { DirectSecp256k1HdWallet } from "@cosmjs/proto-signing"; + +const mnemonic = + "play butter frown city voyage pupil rabbit wheat thrive mind skate turkey helmet thrive door either differ gate exhibit impose city swallow goat faint"; +const wallet = await DirectSecp256k1HdWallet.fromMnemonic(mnemonic); + +const client = new Client( + { + apiURL: "http://localhost:1317", + rpcURL: "http://localhost:26657", + prefix: "cosmos", + }, + wallet +); +// highlight-start +const tx_result = await client.ExampleExample.tx.sendMsgCreatePost({ + value: { + title: 'foo', + body: 'bar', + creator: 'cosmos13xkhcx2dquhqdml0k37sr7yndquwteuvt2cml7', + }, + fee: { + amount: [{ amount: '0', denom: 'stake' }], + gas: '200000', + }, + memo: '', +}) +// highlight-end +``` + +## Lightweight client + +If you prefer, you can construct a lighter client using only the modules you are +interested in by importing the generic client class and expanding it with the +modules you need: + +```typescript title="my-frontend-app/src/main.ts" +// highlight-start +import { IgniteClient } from '../../ts-client/client' +import { Module as CosmosBankV1Beta1 } from '../../ts-client/cosmos.bank.v1beta1' +import { Module as CosmosStakingV1Beta1 } from '../../ts-client/cosmos.staking.v1beta1' +// highlight-end +import { DirectSecp256k1HdWallet } from '@cosmjs/proto-signing' + +const mnemonic = + 'play butter frown city voyage pupil rabbit wheat thrive mind skate turkey helmet thrive door either differ gate exhibit impose city swallow goat faint' +const wallet = await DirectSecp256k1HdWallet.fromMnemonic(mnemonic) +// highlight-next-line +const Client = IgniteClient.plugin([CosmosBankV1Beta1, CosmosStakingV1Beta1]) + +const client = new Client( + { + apiURL: 'http://localhost:1317', + rpcURL: 'http://localhost:26657', + prefix: 'cosmos', + }, + wallet, +) +``` + +## Broadcasting a multi-message transaction + +You can also construct TX messages separately and send them in a single TX using +a global signing client like so: + +```typescript title="my-frontend-app/src/main.ts" +const msg1 = await client.CosmosBankV1Beta1.tx.msgSend({ + value: { + amount: [ + { + amount: '200', + denom: 'token', + }, + ], + fromAddress: 'cosmos13xkhcx2dquhqdml0k37sr7yndquwteuvt2cml7', + toAddress: 'cosmos15uw6qpxqs6zqh0zp3ty2ac29cvnnzd3qwjntnc', + }, +}) + +const msg2 = await client.CosmosBankV1Beta1.tx.msgSend({ + value: { + amount: [ + { + amount: '200', + denom: 'token', + }, + ], + fromAddress: 'cosmos13xkhcx2dquhqdml0k37sr7yndquwteuvt2cml7', + toAddress: 'cosmos15uw6qpxqs6zqh0zp3ty2ac29cvnnzd3qwjntnc', + }, +}) + +const tx_result = await client.signAndBroadcast( + [msg1, msg2], + { + amount: [{ amount: '0', denom: 'stake' }], + gas: '200000', + }, + '', +) +``` + +Finally, for additional ease-of-use, apart from the modular client mentioned +above, each generated module is usable on its own in a stripped-down way by +exposing a separate txClient and queryClient. + +```typescript title="my-frontend-app/src/main.ts" +import { txClient } from '../../ts-client/cosmos.bank.v1beta1' +import { DirectSecp256k1HdWallet } from '@cosmjs/proto-signing' + +const mnemonic = + 'play butter frown city voyage pupil rabbit wheat thrive mind skate turkey helmet thrive door either differ gate exhibit impose city swallow goat faint' +const wallet = await DirectSecp256k1HdWallet.fromMnemonic(mnemonic) + +const client = txClient({ + signer: wallet, + prefix: 'cosmos', + addr: 'http://localhost:26657', +}) + +const tx_result = await client.sendMsgSend({ + value: { + amount: [ + { + amount: '200', + denom: 'token', + }, + ], + fromAddress: 'cosmos13xkhcx2dquhqdml0k37sr7yndquwteuvt2cml7', + toAddress: 'cosmos15uw6qpxqs6zqh0zp3ty2ac29cvnnzd3qwjntnc', + }, + fee: { + amount: [{ amount: '0', denom: 'stake' }], + gas: '200000', + }, + memo: '', +}) +``` + +## Usage with Keplr + +Normally, Keplr provides a wallet object implementing the `OfflineSigner` +interface, so you can simply replace the `wallet` argument in client +instantiation with `window.keplr.getOfflineSigner(chainId)`. However, Keplr +requires information about your chain, like chain ID, denoms, fees, etc. +[`experimentalSuggestChain()`](https://docs.keplr.app/api/guide/suggest-chain) is +a method Keplr provides to pass this information to the Keplr extension. + +The generated client makes this easier by offering a `useKeplr()` method that +automatically discovers the chain information and sets it up for you. Thus, you +can instantiate the client without a wallet and then call `useKeplr()` to enable +transacting via Keplr like so: + +```typescript title="my-frontend-app/src/main.ts" +import { Client } from '../../ts-client'; + +const client = new Client({ + apiURL: "http://localhost:1317", + rpcURL: "http://localhost:26657", + prefix: "cosmos" + } +); +await client.useKeplr(); +``` + +`useKeplr()` optionally accepts an object argument that contains one or more of +the same keys as the `ChainInfo` type argument of `experimentalSuggestChain()` +allowing you to override the auto-discovered values. + +For example, the default chain name and token precision (which are not recorded +on-chain) are set to ` Network` and `0` while the ticker for the denom +is set to the denom name in uppercase. If you want to override these, you can do +something like: + +```typescript title="my-frontend-app/src/main.ts" +import { Client } from '../../ts-client'; + +const client = new Client({ + apiURL: "http://localhost:1317", + rpcURL: "http://localhost:26657", + prefix: "cosmos" + } +); +await client.useKeplr({ + chainName: 'My Great Chain', + stakeCurrency: { + coinDenom: 'TOKEN', + coinMinimalDenom: 'utoken', + coinDecimals: '6', + }, +}) +``` + +## Wallet switching + +The client also allows you to switch out the wallet for a different one on an +already instantiated client like so: + +```typescript +import { Client } from '../../ts-client'; +import { DirectSecp256k1HdWallet } from "@cosmjs/proto-signing"; + +const mnemonic = + 'play butter frown city voyage pupil rabbit wheat thrive mind skate turkey helmet thrive door either differ gate exhibit impose city swallow goat faint' +const wallet = await DirectSecp256k1HdWallet.fromMnemonic(mnemonic); + +const client = new Client({ + apiURL: "http://localhost:1317", + rpcURL: "http://localhost:26657", + prefix: "cosmos" + } +); +await client.useKeplr(); + +// broadcast transactions using the Keplr wallet + +client.useSigner(wallet); + +// broadcast transactions using the CosmJS wallet +``` diff --git a/docs/docs/04-clients/03-vue.md b/docs/docs/04-clients/03-vue.md new file mode 100644 index 0000000..ead1c00 --- /dev/null +++ b/docs/docs/04-clients/03-vue.md @@ -0,0 +1,181 @@ +# Vue frontend + +:::warning +The Vue frontend is being reworked and is not yet stable. +In the meantime, refer to the [IGNITE® CCA App](https://ignite.com/marketplace/cca). +::: + +Welcome to this tutorial on using IGNITE® to develop a web application for your +blockchain with Vue 3. IGNITE® is a tool that simplifies the process of building +a blockchain application by providing a set of templates and generators that can +be used to get up and running quickly. + +One of the features of IGNITE® is its support for [Vue 3](https://vuejs.org/), a +popular JavaScript framework for building user interfaces. In this tutorial, you +will learn how to use IGNITE® to create a new blockchain and scaffold a Vue +frontend template. This will give you a basic foundation for your web +application and make it easier to get started building out the rest of your +application. + +Once you have your blockchain and Vue template set up, the next step is to +generate an API client. This will allow you to easily interact with your +blockchain from your web application, enabling you to retrieve data and make +transactions. By the end of this tutorial, you will have a fully functional web +application that is connected to your own blockchain. + +Prerequisites: + +* [Node.js](https://nodejs.org/en/) +* [Keplr](https://www.keplr.app/) Chrome extension + +## Create a blockchain and a Vue app + +Create a new blockchain project: + +``` +ignite scaffold chain example +``` + +To create a Vue frontend template, go to the `example` directory and run the +following command: + +``` +ignite scaffold vue +``` + +This will create a new Vue project in the `vue` directory. This project can be +used with any blockchain, but it depends on an API client to interact with the +blockchain. To generate an API client, run the following command in the +`example` directory: + +``` +ignite generate composables +``` + +This command generates two directories: + +* `ts-client`: a framework-agnostic TypeScript client that can be used to + interact with your blockchain. You can learn more about how to use this client + in the [TypeScript client tutorial](/clients/typescript). +* `vue/src/composables`: a collection of Vue 3 + [composables](https://vuejs.org/guide/reusability/composables.html) that wrap + the TypeScript client and make it easier to interact with your blockchain from + your Vue application. + +## Set up Keplr and an account + +Open your browser with the Keplr wallet extension installed. Follow [the +instructions](https://keplr.crunch.help/en/getting-started/creating-a-new-keplr-account) +to create a new account or use an existing one. Make sure to save the mnemonic +phrase as you will need it in the next step. + +Do not use a mnemonic phrase that is associated with an account that holds +assets you care about. If you do, you risk losing those assets. It's a good +practice to create a new account for development purposes. + +Add the account you're using in Keplr to your blockchain's `config.yml` file: + +```yml +accounts: + - name: alice + coins: [20000token, 200000000stake] + - name: bob + coins: [10000token, 100000000stake] + # highlight-start + - name: frank + coins: [10000token, 100000000stake] + mnemonic: struggle since inmate safe logic kite tag web win stay security wonder + # highlight-end +``` + +Replace the `struggle since...` mnemonic with the one you saved in the previous +step. + +Adding an account with a mnemonic to the config file will tell IGNITE® CLI to add +the account to the blockchain when you start it. This is useful for development +purposes, but you should not do this in production. + +## Start a blockchain and a Vue app + +In the `example` directory run the following command to start your blockchain: + +```bash +ignite chain serve +``` + +To start your Vue application, go to the `vue` directory and run the following +command in a separate terminal window: + +:::note +Make sure you have [pnpm](https://pnpm.io/) installed. +::: + +```bash +pnpm install && pnpm dev +``` + +It is recommended to run `pnpm install` before starting your app with `pnpm dev` to ensure that all dependencies are installed (including the ones that the API client has, see `vue/postinstall.js`). + +Open your browser and navigate to +[http://localhost:5173/](http://localhost:5173/). + +![Web app](/img/web-1.png) + +Press "Connect wallet", enter your password into Keplr and press "Approve" to +add your blockchain to Keplr. + + + +Make sure to select the account you're using for development purposes and the +"Example Network" in Keplr's blockchain dropdown. You should see a list of +assets in your Vue app. + +![Web app](/img/web-5.png) + +Congratulations! You have successfully created a client-side Vue application and +connected it to your blockchain. You can modify the source code of your Vue +application to build out the rest of your project. + +## Setting the address prefix + +It is necessary to set the correct address prefix in order for the Vue app to +properly interact with a Cosmos chain. The address prefix is used to identify +the chain that the app is connected to, and must match the prefix used by the +chain. + +By default, IGNITE® creates a chain with the `cosmos` prefix. If you have +created your chain with `ignite scaffold chain ... --address-prefix foo` or +manually changed the prefix in the source code of the chain, you need to set the +prefix in the Vue app. + +There are two ways to set the address prefix in a Vue app. + +### Using an environment variable + +You can set the `VITE_ADDRESS_PREFIX` environment variable to the correct +address prefix for your chain. This will override the default prefix used by the +app. + +To set the `VITE_ADDRESS_PREFIX` environment variable, you can use the following +command: + +```bash +export VITE_ADDRESS_PREFIX=your-prefix +``` + +Replace `your-prefix` with the actual address prefix for your chain. + +### Setting address prefix in the code + +Alternatively, you can manually set the correct address prefix by replacing the +fallback value of the `prefix` variable in the file `./vue/src/env.ts`. + +To do this, open the file `./vue/src/env.ts` and find the following line: + +```ts title="./vue/src/env.ts" +const prefix = process.env.VITE_ADDRESS_PREFIX || 'your-prefix'; +``` + +Replace `your-prefix` with the actual address prefix for your chain. + +Save the file and restart the Vue app to apply the changes. diff --git a/docs/docs/04-clients/_category_.json b/docs/docs/04-clients/_category_.json new file mode 100644 index 0000000..4d95374 --- /dev/null +++ b/docs/docs/04-clients/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Create an Interface", + "link": null +} \ No newline at end of file diff --git a/docs/docs/05-contributing/01-docs.md b/docs/docs/05-contributing/01-docs.md new file mode 100644 index 0000000..b0b722d --- /dev/null +++ b/docs/docs/05-contributing/01-docs.md @@ -0,0 +1,105 @@ +--- +sidebar_position: 1 +slug: /contributing +--- + +# Improving documentation + +Thank you for visiting our repository and considering making contributions. We +appreciate your interest in helping us to create and maintain awesome tutorials +and documentation. + +## Using this repo + +Review existing [IGNITE® CLI issues](https://github.com/ignite/cli/issues) to see +if your question has already been asked and answered. + +- To provide feedback, file an issue and provide generous details to help us + understand how we can make it better. +- To provide a fix, make a direct contribution. If you're not a member or + maintainer, fork the repo and then submit a pull request (PR) from your forked + repo to the `main` branch. +- Start by creating a draft pull request. Create your draft PR early, even if + your work is just beginning or incomplete. Your draft PR indicates to the + community that you're working on something and provides a space for + conversations early in the development process. Merging is blocked for `Draft` + PRs, so they provide a safe place to experiment and invite comments. + +## Reviewing technical content PRs + +Some of the best content contributions come during the PR review cycles. Follow +best practices for technical content PR reviews just like you do for code +reviews. + +- For in-line suggestions, use the [GitHub suggesting + feature](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/reviewing-changes-in-pull-requests/commenting-on-a-pull-request) + . +- The PR owner can merge in your suggested commits one at a time or in batch + (preferred). +- When you are providing a more granular extensive review that results in more + than 20 in-line suggestions, go ahead and check out the branch and make the + changes yourself. + +## Writing and contributing + +We welcome contributions to the docs and tutorials. + +Our technical content follows the [Google developer documentation style +guide](https://developers.google.com/style). Highlights to help you get started: + +- [Highlights](https://developers.google.com/style/highlights) +- [Word list](https://developers.google.com/style/word-list) +- [Style and tone](https://developers.google.com/style/tone) +- [Writing for a global + audience](https://developers.google.com/style/translation) +- [Cross-references](https://developers.google.com/style/cross-references) +- [Present tense](https://developers.google.com/style/tense) + +The Google guidelines include more material than is listed here and are used as +a guide that enables easy decision-making about proposed content changes. + +Other useful resources: + +- [Google Technical Writing Courses](https://developers.google.com/tech-writing) +- [GitHub Guides Mastering + Markdown](https://docs.github.com/en/get-started/writing-on-github/getting-started-with-writing-and-formatting-on-github/basic-writing-and-formatting-syntax) + +## Where can I find the tutorials and docs? + +Technical content includes knowledge base articles and interactive tutorials. + +- The IGNITE® CLI Developer Tutorials content is in the `docs/guide` folder. +- The Knowledge Base content is in the `docs/kb` folder. +- Upgrade information is in the `docs/migration` folder. + +Note: The CLI docs are auto-generated and do not support doc updates. + +Locations and folders for other content can vary. Explore the self-describing +folders for the content that you are interested in. Some articles and tutorials +reside in a single Markdown file while sub-folders might be present for other +tutorials. + +As always, work-in-progress content might be happening in other locations and +repos. + +## Who works on the tutorials? + +The IGNITE® product team developers are focused on building IGNITE® CLI and +improving the developer experience. The IGNITE® Ecosystem Development team owns +the technical content and tutorials and manages developer onboarding. + +Meet the [people behind IGNITE® CLI and our +contributors](https://github.com/ignite/cli/graphs/contributors). + +## Viewing docs builds + +Use a preview to see what your changes will look like in production before the +updated pages are published. + +- While a PR is in draft mode, you can rely on using the preview feature in + Markdown. +- After the PR moves from **Draft** to **Ready for review**, the CI status + checks generate a deployment preview. This preview stays up to date as you + continue to work and commit new changes to the same branch. A `Docs Deploy + Preview / build_and_deploy (pull_request)` preview on a GitHub actions URL is + unique for that PR. diff --git a/docs/docs/05-contributing/_category_.json b/docs/docs/05-contributing/_category_.json new file mode 100644 index 0000000..5077538 --- /dev/null +++ b/docs/docs/05-contributing/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Contribute to IGNITE®", + "link": null +} \ No newline at end of file diff --git a/docs/docs/06-migration/_category_.json b/docs/docs/06-migration/_category_.json new file mode 100644 index 0000000..9460d57 --- /dev/null +++ b/docs/docs/06-migration/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Migration", + "link": null +} \ No newline at end of file diff --git a/docs/docs/06-migration/readme.md b/docs/docs/06-migration/readme.md new file mode 100644 index 0000000..2c9851a --- /dev/null +++ b/docs/docs/06-migration/readme.md @@ -0,0 +1,74 @@ +--- +sidebar_position: 0 +--- + +# Migration Guides + +Welcome to the section on upgrading to a newer version of IGNITE® CLI! If you're +looking to update to the latest version, you'll want to start by checking the +documentation to see if there are any special considerations or instructions you +need to follow. + +If there is no documentation for the latest version of IGNITE® CLI, it's +generally safe to assume that there were no breaking changes, and you can +proceed with using the latest version with your project. + +## Create your own Migration Guide + +The `gen-mig-diffs` tool helps developers manage and visualize code changes across multiple major versions of IGNITE®. With each major upgrade, the codebase might undergo significant changes, making it challenging for developers to track these differences after several updates. The `gen-mig-diffs` tool simplifies this process by scaffolding blockchains with both the old and new versions and displaying the differences. + +It is located in the [IGNITE® CLI GitHub repository](https://github.com/ignite/cli/tree/main/ignite/internal/tools/gen-mig-diffs) +directory and has been made into a standalone project. + +To set up this tool in your development environment: + +```shell +gen-mig-diffs [flags] +``` + +This tool generates migration diff files for each of IGNITE®'s scaffold commands. It compares two specified versions of IGNITE® and provides a clear, organized view of the changes. + +## How to Get Started + +1. Clone the IGNITE® CLI repository: + +```shell +git clone https://github.com/ignite/cli.git --depth=1 && \ +cd cli/ignite/internal/tools/gen-mig-diffs +``` + +2. Install and show usage: + +```shell +go install . && gen-mig-diffs -h +``` + +### Example Migration + +As an example, to generate migration diffs between versions 0.27.2 and 28.3.0, use the following command: + +```shell +gen-mig-diffs --output temp/migration --from v0.27.2 --to v28.3.0 +``` + +This command scaffolds blockchains with the specified versions and shows the differences, making it easier for developers to understand and apply necessary changes when upgrading their projects. + +## Usage + +```bash +This tool is used to generate migration diff files for each of ignites scaffold commands + +Usage: + gen-mig-diffs [flags] + +Flags: + -f, --from string Version of IGNITE® or path to IGNITE® source code to generate the diff from + -h, --help help for gen-mig-diffs + -o, --output string Output directory to save the migration document (default "docs/docs/06-migration") + --repo-output string Output path to clone the IGNITE® repository + -s, --repo-source string Path to IGNITE® source code repository. Set the source automatically set the cleanup to false + --repo-url string Git URL for the IGNITE® repository (default "https://github.com/ignite/cli.git") + --scaffold-cache string Path to cache directory + --scaffold-output string Output path to clone the IGNITE® repository + -t, --to string Version of IGNITE® or path to IGNITE® source code to generate the diff to +``` diff --git a/docs/docs/06-migration/v0.18.md b/docs/docs/06-migration/v0.18.md new file mode 100644 index 0000000..556d973 --- /dev/null +++ b/docs/docs/06-migration/v0.18.md @@ -0,0 +1,458 @@ +--- +sidebar_position: 999 +title: v0.18.0 +description: For chains that were scaffolded with IGNITE® CLI versions lower than v0.18, changes are required to use IGNITE® CLI v0.18. +--- + +# Upgrading a Blockchain to use IGNITE® CLI v0.18 + +IGNITE® CLI v0.18 comes with Cosmos SDK v0.44. This version of Cosmos SDK introduced changes that are not compatible with +chains that were scaffolded with IGNITE® CLI versions lower than v0.18. + +**Important:** After upgrading from IGNITE® CLI v0.17.3 to IGNITE® CLI v0.18, you must update the default blockchain +template to use blockchains that were scaffolded with earlier versions. + +These instructions are written for a blockchain that was scaffolded with the following command: + +``` +ignite scaffold chain github.com/username/mars +``` + +If you used a different module path, replace `username` and `mars` with the correct values for your blockchain. + +## Blockchain + +For each file listed, make the required changes to the source code of the blockchain template. + +### go.mod + +``` +module github.com/username/mars + +go 1.16 + +require ( + github.com/cosmos/cosmos-sdk v0.44.0 + github.com/cosmos/ibc-go v1.2.0 + github.com/gogo/protobuf v1.3.3 + github.com/google/go-cmp v0.5.6 // indirect + github.com/gorilla/mux v1.8.0 + github.com/grpc-ecosystem/grpc-gateway v1.16.0 + github.com/spf13/cast v1.3.1 + github.com/spf13/cobra v1.1.3 + github.com/stretchr/testify v1.7.0 + github.com/tendermint/spm v0.1.6 + github.com/tendermint/tendermint v0.34.13 + github.com/tendermint/tm-db v0.6.4 + google.golang.org/genproto v0.0.0-20210903162649-d08c68adba83 + google.golang.org/grpc v1.40.0 +) + +replace ( + github.com/99designs/keyring => github.com/cosmos/keyring v1.1.7-0.20210622111912-ef00f8ac3d76 + github.com/gogo/protobuf => github.com/regen-network/protobuf v1.3.3-alpha.regen.1 + google.golang.org/grpc => google.golang.org/grpc v1.33.2 +) +``` + +### app/app.go + +```go +package app + +import ( + //... + // Add the following packages: + "github.com/cosmos/cosmos-sdk/x/feegrant" + feegrantkeeper "github.com/cosmos/cosmos-sdk/x/feegrant/keeper" + feegrantmodule "github.com/cosmos/cosmos-sdk/x/feegrant/module" + + "github.com/cosmos/ibc-go/modules/apps/transfer" + ibctransferkeeper "github.com/cosmos/ibc-go/modules/apps/transfer/keeper" + ibctransfertypes "github.com/cosmos/ibc-go/modules/apps/transfer/types" + ibc "github.com/cosmos/ibc-go/modules/core" + ibcclient "github.com/cosmos/ibc-go/modules/core/02-client" + ibcporttypes "github.com/cosmos/ibc-go/modules/core/05-port/types" + ibchost "github.com/cosmos/ibc-go/modules/core/24-host" + ibckeeper "github.com/cosmos/ibc-go/modules/core/keeper" + // Remove the following packages: + // transfer "github.com/cosmos/cosmos-sdk/x/ibc/applications/transfer" + // ibctransferkeeper "github.com/cosmos/cosmos-sdk/x/ibc/applications/transfer/keeper" + // ibctransfertypes "github.com/cosmos/cosmos-sdk/x/ibc/applications/transfer/types" + // ibc "github.com/cosmos/cosmos-sdk/x/ibc/core" + // ibcclient "github.com/cosmos/cosmos-sdk/x/ibc/core/02-client" + // porttypes "github.com/cosmos/cosmos-sdk/x/ibc/core/05-port/types" + // ibchost "github.com/cosmos/cosmos-sdk/x/ibc/core/24-host" + // ibckeeper "github.com/cosmos/cosmos-sdk/x/ibc/core/keeper" +) + +var ( + //... + ModuleBasics = module.NewBasicManager( + //... + slashing.AppModuleBasic{}, + // Add feegrantmodule.AppModuleBasic{}, + feegrantmodule.AppModuleBasic{}, // <-- + ibc.AppModuleBasic{}, + //... + ) + //... +) + +type App struct { + //... + // Replace codec.Marshaler with codec.Codec + appCodec codec.Codec // <-- + // Add FeeGrantKeeper + FeeGrantKeeper feegrantkeeper.Keeper // <-- +} + +func New( /*...*/ ) { + //bApp.SetAppVersion(version.Version) + bApp.SetVersion(version.Version) // <-- + + keys := sdk.NewKVStoreKeys( + //... + upgradetypes.StoreKey, + // Add feegrant.StoreKey + feegrant.StoreKey, // <-- + evidencetypes.StoreKey, + //... + ) + + app.FeeGrantKeeper = feegrantkeeper.NewKeeper(appCodec, keys[feegrant.StoreKey], app.AccountKeeper) // <-- + // Add app.BaseApp as the last argument to upgradekeeper.NewKeeper + app.UpgradeKeeper = upgradekeeper.NewKeeper(skipUpgradeHeights, keys[upgradetypes.StoreKey], appCodec, homePath, app.BaseApp) + + app.IBCKeeper = ibckeeper.NewKeeper( + // Add app.UpgradeKeeper + appCodec, keys[ibchost.StoreKey], app.GetSubspace(ibchost.ModuleName), app.StakingKeeper, app.UpgradeKeeper, scopedIBCKeeper, + ) + + govRouter.AddRoute(govtypes.RouterKey, govtypes.ProposalHandler). + //... + // Replace NewClientUpdateProposalHandler with NewClientProposalHandler + AddRoute(ibchost.RouterKey, ibcclient.NewClientProposalHandler(app.IBCKeeper.ClientKeeper)) + + // Replace porttypes with ibcporttypes + ibcRouter := ibcporttypes.NewRouter() + + app.mm.SetOrderBeginBlockers( + upgradetypes.ModuleName, + // Add capabilitytypes.ModuleName, + capabilitytypes.ModuleName, + minttypes.ModuleName, + //... + // Add feegrant.ModuleName, + feegrant.ModuleName, + ) + + // Add app.appCodec as an argument to module.NewConfigurator: + app.mm.RegisterServices(module.NewConfigurator(app.appCodec, app.MsgServiceRouter(), app.GRPCQueryRouter())) + + // Replace: + // app.SetAnteHandler( + // ante.NewAnteHandler( + // app.AccountKeeper, app.BankKeeper, ante.DefaultSigVerificationGasConsumer, + // encodingConfig.TxConfig.SignModeHandler(), + // ), + // ) + + // With the following: + anteHandler, err := ante.NewAnteHandler( + ante.HandlerOptions{ + AccountKeeper: app.AccountKeeper, + BankKeeper: app.BankKeeper, + SignModeHandler: encodingConfig.TxConfig.SignModeHandler(), + FeegrantKeeper: app.FeeGrantKeeper, + SigGasConsumer: ante.DefaultSigVerificationGasConsumer, + }, + ) + if err != nil { + panic(err) + } + app.SetAnteHandler(anteHandler) + + // Remove the following: + // ctx := app.BaseApp.NewUncachedContext(true, tmproto.Header{}) + // app.CapabilityKeeper.InitializeAndSeal(ctx) +} + +func (app *App) InitChainer(ctx sdk.Context, req abci.RequestInitChain) abci.ResponseInitChain { + var genesisState GenesisState + if err := tmjson.Unmarshal(req.AppStateBytes, &genesisState); err != nil { + panic(err) + } + // Add the following: + app.UpgradeKeeper.SetModuleVersionMap(ctx, app.mm.GetVersionMap()) + return app.mm.InitGenesis(ctx, app.appCodec, genesisState) +} + +// Replace Marshaler with Codec +func (app *App) AppCodec() codec.Codec { + return app.appCodec +} + +// Replace BinaryMarshaler with BinaryCodec +func initParamsKeeper(appCodec codec.BinaryCodec, legacyAmino *codec.LegacyAmino, key, tkey sdk.StoreKey) paramskeeper.Keeper { + //... +} +``` + +### app/genesis.go + +```go +// Replace codec.JSONMarshaler with codec.JSONCodec +func NewDefaultGenesisState(cdc codec.JSONCodec) GenesisState { + // ... +} +``` + +### testutil/keeper/mars.go + +Add the following code: + +```go +package keeper + +import ( + "testing" + + "github.com/cosmos/cosmos-sdk/codec" + codectypes "github.com/cosmos/cosmos-sdk/codec/types" + "github.com/cosmos/cosmos-sdk/store" + storetypes "github.com/cosmos/cosmos-sdk/store/types" + sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/stretchr/testify/require" + "github.com/tendermint/tendermint/libs/log" + tmproto "github.com/tendermint/tendermint/proto/tendermint/types" + tmdb "github.com/tendermint/tm-db" + "github.com/username/mars/x/mars/keeper" + "github.com/username/mars/x/mars/types" +) + +func MarsKeeper(t testing.TB) (*keeper.Keeper, sdk.Context) { + storeKey := sdk.NewKVStoreKey(types.StoreKey) + memStoreKey := storetypes.NewMemoryStoreKey(types.MemStoreKey) + + db := tmdb.NewMemDB() + stateStore := store.NewCommitMultiStore(db) + stateStore.MountStoreWithDB(storeKey, sdk.StoreTypeIAVL, db) + stateStore.MountStoreWithDB(memStoreKey, sdk.StoreTypeMemory, nil) + require.NoError(t, stateStore.LoadLatestVersion()) + + registry := codectypes.NewInterfaceRegistry() + k := keeper.NewKeeper( + codec.NewProtoCodec(registry), + storeKey, + memStoreKey, + ) + + ctx := sdk.NewContext(stateStore, tmproto.Header{}, false, log.NewNopLogger()) + return k, ctx +} +``` + +If `mars` is an IBC-enabled module, add the following code, instead: + +```go +package keeper + +import ( + "testing" + + "github.com/cosmos/cosmos-sdk/codec" + codectypes "github.com/cosmos/cosmos-sdk/codec/types" + "github.com/cosmos/cosmos-sdk/store" + storetypes "github.com/cosmos/cosmos-sdk/store/types" + sdk "github.com/cosmos/cosmos-sdk/types" + capabilitykeeper "github.com/cosmos/cosmos-sdk/x/capability/keeper" + typesparams "github.com/cosmos/cosmos-sdk/x/params/types" + ibckeeper "github.com/cosmos/ibc-go/modules/core/keeper" + "github.com/stretchr/testify/require" + "github.com/tendermint/tendermint/libs/log" + tmproto "github.com/tendermint/tendermint/proto/tendermint/types" + tmdb "github.com/tendermint/tm-db" + "github.com/username/test/x/mars/keeper" + "github.com/username/test/x/mars/types" +) + +func MarsKeeper(t testing.TB) (*keeper.Keeper, sdk.Context) { + logger := log.NewNopLogger() + + storeKey := sdk.NewKVStoreKey(types.StoreKey) + memStoreKey := storetypes.NewMemoryStoreKey(types.MemStoreKey) + + db := tmdb.NewMemDB() + stateStore := store.NewCommitMultiStore(db) + stateStore.MountStoreWithDB(storeKey, sdk.StoreTypeIAVL, db) + stateStore.MountStoreWithDB(memStoreKey, sdk.StoreTypeMemory, nil) + require.NoError(t, stateStore.LoadLatestVersion()) + + registry := codectypes.NewInterfaceRegistry() + appCodec := codec.NewProtoCodec(registry) + capabilityKeeper := capabilitykeeper.NewKeeper(appCodec, storeKey, memStoreKey) + + amino := codec.NewLegacyAmino() + ss := typesparams.NewSubspace(appCodec, + amino, + storeKey, + memStoreKey, + "MarsSubSpace", + ) + IBCKeeper := ibckeeper.NewKeeper( + appCodec, + storeKey, + ss, + nil, + nil, + capabilityKeeper.ScopeToModule("MarsIBCKeeper"), + ) + + k := keeper.NewKeeper( + codec.NewProtoCodec(registry), + storeKey, + memStoreKey, + IBCKeeper.ChannelKeeper, + &IBCKeeper.PortKeeper, + capabilityKeeper.ScopeToModule("MarsScopedKeeper"), + ) + + ctx := sdk.NewContext(stateStore, tmproto.Header{}, false, logger) + return k, ctx +} +``` + +### testutil/network/network.go + +```go +func DefaultConfig() network.Config { + // ... + return network.Config{ + // ... + // Add sdk.DefaultPowerReduction + AccountTokens: sdk.TokensFromConsensusPower(1000, sdk.DefaultPowerReduction), + StakingTokens: sdk.TokensFromConsensusPower(500, sdk.DefaultPowerReduction), + BondedTokens: sdk.TokensFromConsensusPower(100, sdk.DefaultPowerReduction), + // ... + } +} +``` + +### testutil/sample/sample.go + +Add the following code: + +```go +package sample + +import ( + "github.com/cosmos/cosmos-sdk/crypto/keys/ed25519" + sdk "github.com/cosmos/cosmos-sdk/types" +) + +// AccAddress returns a sample account address +func AccAddress() string { + pk := ed25519.GenPrivKey().PubKey() + addr := pk.Address() + return sdk.AccAddress(addr).String() +} +``` + +### BandChain Support + +If your module includes integration with BandChain, added manually or scaffolded with `ignite scaffold band`, upgrade +the `github.com/bandprotocol/bandchain-packet` package to `v0.0.2` in `go.mod`. + +## Module + +### x/mars/keeper/keeper.go + +```go +package keeper + +// ... + +type ( + Keeper struct { + // Replace Marshaler with BinaryCodec + cdc codec.BinaryCodec + //... + } +) + +func NewKeeper( + // Replace Marshaler with BinaryCodec + cdc codec.BinaryCodec, + // ... +) *Keeper { + // ... +} +``` + +### x/mars/keeper/msg_server_test.go + +```go +package keeper_test + +import ( + //... + // Add the following: + keepertest "github.com/username/mars/testutil/keeper" + "github.com/username/mars/x/mars/keeper" +) + +func setupMsgServer(t testing.TB) (types.MsgServer, context.Context) { + // Replace + // keeper, ctx := setupKeeper(t) + // return NewMsgServerImpl(*keeper), sdk.WrapSDKContext(ctx) + + // With the following: + k, ctx := keepertest.MarsKeeper(t) + return keeper.NewMsgServerImpl(*k), sdk.WrapSDKContext(ctx) +} +``` + +### x/mars/module.go + +```go +package mars + +type AppModuleBasic struct { + // Replace Marshaler with BinaryCodec + cdc codec.BinaryCodec +} + +// Replace Marshaler with BinaryCodec +func NewAppModuleBasic(cdc codec.BinaryCodec) AppModuleBasic { + return AppModuleBasic{cdc: cdc} +} + +// Replace JSONMarshaler with JSONCodec +func (AppModuleBasic) DefaultGenesis(cdc codec.JSONCodec) json.RawMessage { + return cdc.MustMarshalJSON(types.DefaultGenesis()) +} + +// Replace JSONMarshaler with JSONCodec +func (AppModuleBasic) ValidateGenesis(cdc codec.JSONCodec, config client.TxEncodingConfig, bz json.RawMessage) error { + //... +} + +// Replace codec.Marshaller with codec.Codec +func NewAppModule(cdc codec.Codec, keeper keeper.Keeper) AppModule { + //... +} + +// Replace JSONMarshaler with JSONCodec +func (am AppModule) InitGenesis(ctx sdk.Context, cdc codec.JSONCodec, gs json.RawMessage) []abci.ValidatorUpdate { + //... +} + +// Replace JSONMarshaler with JSONCodec +func (am AppModule) ExportGenesis(ctx sdk.Context, cdc codec.JSONCodec) json.RawMessage { + //... +} + +// Add the following +func (AppModule) ConsensusVersion() uint64 { return 2 } +``` diff --git a/docs/docs/06-migration/v0.19.2.md b/docs/docs/06-migration/v0.19.2.md new file mode 100644 index 0000000..294ec85 --- /dev/null +++ b/docs/docs/06-migration/v0.19.2.md @@ -0,0 +1,26 @@ +--- +sidebar_position: 998 +title: v0.19.2 +description: For chains that were scaffolded with IGNITE® CLI versions lower than v0.19.2, changes are required to use IGNITE® CLI v0.19.2. +--- + +# Upgrading a blockchain to use IGNITE® CLI v0.19.2 + +IGNITE® CLI v0.19.2 comes with IBC v2.0.2. + +With IGNITE® CLI v0.19.2, the contents of the deprecated IGNITE® CLI Modules `tendermint/spm` repo are moved to the +official IGNITE® CLI repo which introduces breaking changes. + +To migrate your chain that was scaffolded with IGNITE® CLI versions lower than v0.19.2: + +1. IBC upgrade: Use + the [IBC migration documents](https://github.com/cosmos/ibc-go/blob/v6.2.0/docs/migrations/v1-to-v2.md) + +2. In your chain's `go.mod` file, remove `tendermint/spm` and add the v0.19.2 version of `tendermint/starport`. If your + chain uses these packages, change the import paths as shown: + + - `github.com/tendermint/spm/ibckeeper` moved to `github.com/tendermint/starport/starport/pkg/cosmosibckeeper` + - `github.com/tendermint/spm/cosmoscmd` moved to `github.com/tendermint/starport/starport/pkg/cosmoscmd` + - `github.com/tendermint/spm/openapiconsole` moved to `github.com/tendermint/starport/starport/pkg/openapiconsole` + - `github.com/tendermint/spm/testutil/sample` moved + to `github.com/tendermint/starport/starport/pkg/cosmostestutil/sample` diff --git a/docs/docs/06-migration/v0.20.0.md b/docs/docs/06-migration/v0.20.0.md new file mode 100644 index 0000000..d46c902 --- /dev/null +++ b/docs/docs/06-migration/v0.20.0.md @@ -0,0 +1,12 @@ +--- +sidebar_position: 997 +title: v0.20.0 +description: For chains that were scaffolded with IGNITE® CLI versions lower than v0.20.0, changes are required to use IGNITE® CLI v0.20.0. +--- + +# Upgrading a blockchain to use IGNITE® CLI v0.20.2 + +1. Upgrade your Cosmos SDK version to [v0.45.3](https://github.com/cosmos/cosmos-sdk/releases/tag/v0.45.3). + +2. Update your `SetOrderBeginBlockers` and `SetOrderEndBlockers` in your `app/app.go` to explicitly add entries for all + the modules you use in your chain. diff --git a/docs/docs/06-migration/v0.22.0.md b/docs/docs/06-migration/v0.22.0.md new file mode 100644 index 0000000..b2ce2f8 --- /dev/null +++ b/docs/docs/06-migration/v0.22.0.md @@ -0,0 +1,36 @@ +--- +sidebar_position: 996 +title: v0.22.0 +description: For chains that were scaffolded with IGNITE® CLI versions lower than v0.22.0, changes are required to use IGNITE® CLI v0.22.0. +--- + +# Upgrading a blockchain to use IGNITE® CLI v0.22.0 + +IGNITE® CLI v0.22.2 changed the GitHub username from "ignite-hq" to "ignite", which means the imports must be fixed to +reflect this change. + +1. In your `go.mod` file find the require line for IGNITE® CLI that starts with `github.com/ignite-hq/cli` and is + followed by a version. + It looks something like `github.com/ignite-hq/cli v0.22.0`, and replace it by `github.com/ignite/cli v0.22.2`. + +2. Make a bulk find and replace in the import statements for `github.com/ignite-hq/cli` to be replaced + by `github.com/ignite/cli`. + +3. Finally, run `go mod tidy` and ensure there's no mention if `ignite-hq/cli` in your `go.sum` file. + +This update includes an upgrade to the `ibc-go` packages. Please make the according changes: + +1. Upgrade your IBC version to [v3](https://github.com/cosmos/ibc-go/releases/tag/v3.0.0). + + 1. Search for `github.com/cosmos/ibc-go/v2` in the import statements of your `.go` files and replace `v2` in the end + with `v3` + + 1. Open your `app.go`, + + - Update your transfer keeper by adding another `app.IBCKeeper.ChannelKeeper` as an argument + after `app.IBCKeeper.ChannelKeeper` + + - Define `var transferIBCModule = transfer.NewIBCModule(app.TransferKeeper)` in your `New()` func, and update + your existent IBC router to use it: `ibcRouter.AddRoute(ibctransfertypes.ModuleName, transferIBCModule)` + + 3. Open your `go.mod` and change the IBC line with `github.com/cosmos/ibc-go/v3 v3.0.0` diff --git a/docs/docs/06-migration/v0.24.0.md b/docs/docs/06-migration/v0.24.0.md new file mode 100644 index 0000000..85e42ee --- /dev/null +++ b/docs/docs/06-migration/v0.24.0.md @@ -0,0 +1,330 @@ +--- +sidebar_position: 995 +title: v0.24.0 +description: For chains that were scaffolded with IGNITE® CLI versions lower than v0.24, changes are required to use IGNITE® CLI v0.24.0. +--- + +## Cosmos SDK v0.46 upgrade notes + +### Update dependencies + +Cosmos SDK v0.46 is compatible with the latest version of IBC Go v5. If you have a chain that is using an older version, +update the dependencies in your project. + +Throughout the code you might see the following dependencies: + +```go +package pkg_name + +import ( + "github.com/cosmos/ibc-go/v3/..." +) +``` + +Where `v3` is the version of IBC Go and `...` are different IBC Go packages. + +To upgrade the version to `v5`, a global find-and-replace should work. Replace `cosmos/ibc-go/v3` (or whicherver version +you're using) with `cosmos/ibc-go/v5` only in `*.go` files (to exclude unwated changes to files like `go,sum`). + +### Module keeper + +Add an import: + +```go +// x/{moduleName}/keeper/keeper.go + +package keeper + +// ... + +import ( + //... + storetypes "github.com/cosmos/cosmos-sdk/store/types" +) +``` + +In the `Keeper` struct replace `sdk.StoreKey` with `storetypes.StoreKey`: + +```go +// x/{moduleName}/keeper/keeper.go + +package keeper + +// ... + +type ( + Keeper struct { + cdc codec.BinaryCodec + storeKey storetypes.StoreKey + memKey storetypes.StoreKey + paramstore paramtypes.Subspace + } +) +``` + +In the argument list of the `NewKeeper` function definition: + +```go +package keeper + +// ... + +// x/{moduleName}/keeper/keeper.go + +func NewKeeper( + //... + memKey storetypes.StoreKey, +) +``` + +Store type aliases have been removed from the Cosmos SDK `types` package and now have to be imported from `store/types`, +instead. + +In the `testutil/keeper/{moduleName}.go` replace `types.StoreKey` with `storetypes.StoreKey` and `types.MemStoreKey` +with `storetypes.MemStoreKey`. + +```go +// testutil/keeper/{moduleName}.go + +package keeper + +// ... + +func {moduleName}Keeper(t testing.TB) (*keeper.Keeper, sdk.Context) { + storeKey := sdk.NewKVStoreKey(storetypes.StoreKey) + memStoreKey := storetypes.NewMemoryStoreKey(storetypes.MemStoreKey) + //... +} +``` + +### Testutil network package + +Add the `require` package for testing and `pruningtypes` and remove `storetypes`: + +```go +// testutil/network/network.go + +package network + +// ... + +import ( + pruningtypes "github.com/cosmos/cosmos-sdk/pruning/types" + "github.com/stretchr/testify/require" + // storetypes "github.com/cosmos/cosmos-sdk/store/types" <-- remove this line +) +``` + +In the `DefaultConfig` function replace `storetypes.NewPruningOptionsFromString` +with `pruningtypes.NewPruningOptionsFromString` + +```go +// testutil/network/network.go + +package network + +// ... + +func DefaultConfig() network.Config { + //... + return network.Config{ + AppConstructor: func(val network.Validator) servertypes.Application { + return app.New( + baseapp.SetPruning(pruningtypes.NewPruningOptionsFromString(val.AppConfig.Pruning)), + //... + ) + }, + //... + } +} +``` + +The `New` function in the Cosmos SDK `testutil/network` package now +accepts [three arguments](https://github.com/cosmos/cosmos-sdk/blob/v0.46.0/testutil/network/network.go#L206) instead of +two. + +In the `New` function add `t.TempDir()` as the second argument to `network.New()` and test that no error is thrown +with `require.NoError(t, err)`: + +```go +// testutil/network/network.go + +package network + +// ... + +func New(t *testing.T, configs ...network.Config) *network.Network { + //... + net, err := network.New(t, t.TempDir(), cfg) + require.NoError(t, err) + //... +} +``` + +### Testutil keeper package + +In the `{moduleName}Keeper` function make the following replacements: + +- `storetypes.StoreKey` → `types.StoreKey` +- `storetypes.MemStoreKey` → `types.MemStoreKey` +- `sdk.StoreTypeIAVL` → `storetypes.StoreTypeIAVL` +- `sdk.StoreTypeMemory` → `storetypes.StoreTypeMemory` + +```go +// testutil/keeper/{moduleName}.go + +package keeper + +// ... + +func {moduleName}Keeper(t testing.TB) (*keeper.Keeper, sdk.Context) { + storeKey := sdk.NewKVStoreKey(types.StoreKey) + memStoreKey := storetypes.NewMemoryStoreKey(types.MemStoreKey) + //... + stateStore.MountStoreWithDB(storeKey, storetypes.StoreTypeIAVL, db) + stateStore.MountStoreWithDB(memStoreKey, storetypes.StoreTypeMemory, nil) + //... +} +``` + +### IBC modules + +If you have IBC-enabled modules (for example, added with `ignite scaffold module ... --ibc` or created manually), make +the following changes to the source code. + +Cosmos SDK expects IBC modules +to [implement the `IBCModule` interface](https://ibc.cosmos.network/main/ibc/apps/ibcmodule/). Create a `IBCModule` +type that embeds the module's keeper and a method that returns a new `IBCModule`. Methods in this file will be defined +on this type. + +```go +// x/{moduleName}/module_ibc.go + +package module_name + +// ... + +type IBCModule struct { + keeper keeper.Keeper +} + +func NewIBCModule(k keeper.Keeper) IBCModule { + return IBCModule{ + keeper: k, + } +} +``` + +Replace receivers for all methods in this file from `(am AppModule)` to `(im IBCModule)`. Replace all instances of `am.` +with `im.` to fix the errors. + +`OnChanOpenInit` now returns to values: a `string` and an `error`: + +```go +// x/{moduleName}/module_ibc.go + +package module_name + +// ... + +func (im IBCModule) OnChanOpenInit( /*...*/ ) (string, error) +``` + +Ensure that all return statements (five, in the default template) in `OnChanOpenInit` return two values. For example: + +```go +// x/{moduleName}/module_ibc.go + +package module_name + +// ... + +func (im IBCModule) OnChanOpenInit( /*...*/ ) (string, error) { + //... + return "", errorsmod.Wrapf(porttypes.ErrInvalidPort, "invalid port: %s, expected %s", portID, boundPort) + //... +} +``` + +Error acknowledgments returned from Transfer `OnRecvPacket` now include a deterministic ABCI code and error message. +Remove the `.Error()` call: + +```go +// x/{moduleName}/module_ibc.go + +package module_name + +// ... + +func (im IBCModule) OnRecvPacket( /*...*/ ) { + //... + if err := modulePacketData.Unmarshal(modulePacket.GetData()); err != nil { + // return channeltypes.NewErrorAcknowledgement(errorsmod.Wrapf(sdkerrors.ErrUnknownRequest, "cannot unmarshal packet data: %s", err.Error()).Error()) + return channeltypes.NewErrorAcknowledgement(errorsmod.Wrapf(sdkerrors.ErrUnknownRequest, "cannot unmarshal packet data: %s", err.Error())) + } + + // ... + + // Dispatch packet + switch packet := modulePacketData.Packet.(type) { + // ... + default: + // errMsg := fmt.Sprintf("unrecognized %s packet type: %T", types.ModuleName, packet) + // return channeltypes.NewErrorAcknowledgement(errMsg) + err := fmt.Errorf("unrecognized %s packet type: %T", types.ModuleName, packet) + return channeltypes.NewErrorAcknowledgement(err) + } +} +``` + +After switching to using both `AppModule` and `IBCModule`, modifying the following line: + +```go +// x/{moduleName}/module.go + +package module_name + +// ... + +var ( + //... + _ porttypes.IBCModule = IBCModule{} // instead of "= AppModule{}" +) +``` + +### Main + +The `Execute` function in Cosmos SDK `server/cmd` package now +accepts [three arguments](https://github.com/cosmos/cosmos-sdk/blob/v0.46.0/server/cmd/execute.go#L20) instead of two. + +```go +// cmd/{{projectName}}d/main.go + +package projectNamed + +// ... + +func main() { + //... + if err := svrcmd.Execute(rootCmd, "", app.DefaultNodeHome); err != nil { + os.Exit(1) + } +} +``` + +### Handler + +Cosmos SDK v0.46 no longer needs a `NewHandler` function that was used to handle messages and call appropriate keeper +methods based on message types. Feel free to remove `x/{moduleName}/handler.go` file. + +Since there is no `NewHandler` now, modify the deprecated `Route` function to return `sdk.Route{}`: + +```go +// x/{moduleName}/module.go + +package module_name + +// ... + +func (am AppModule) Route() sdk.Route { return sdk.Route{} } +``` diff --git a/docs/docs/06-migration/v0.25.0.md b/docs/docs/06-migration/v0.25.0.md new file mode 100644 index 0000000..c739d18 --- /dev/null +++ b/docs/docs/06-migration/v0.25.0.md @@ -0,0 +1,1187 @@ +--- +sidebar_position: 994 +title: v0.25.0 +description: For chains that were scaffolded with IGNITE® CLI versions lower than v0.25.0. changes are required to use IGNITE® CLI v0.25.0. +--- + +## Protobuf directory migration + +`v0.25.0` changes the location of scaffolded `.proto` files. Previously, `.proto` files were located in `./proto/{moduleName}/`, +where `moduleName` is the same name of the Cosmos SDK module found in `./x/{moduleName}/`. This new version of `ignite` +modifies the scaffolded protobuf files so that they are now generated in `./proto/{appName}/{moduleName}`. + +The only change that is needed to be made is to create an `{appName}` folder in the `proto` directory, and then place the +sub-directories within it. An example below demonstrates this change: + +### Previous Directory Structure + +This example shows a chain that was generated using `ignite` with `v0.24.0` using the following command: + +```bash +ignite s chain github.com/cosmos/planet --no-module +ignite s module mars +``` + +```bash +├── app +├── cmd +├── docs +├── proto +│ ├── mars +├── x +│ ├── mars +├── README.md +├── config.yml +├── go.mod +├── go.sum +└── .gitignore +``` + +### `v0.25.0` Directory Structure + +This example shows a chain that was generated using `ignite` with `v0.25.0` using the following command: + +```bash +ignite s chain github.com/cosmos/planet --no-module +ignite s module mars +``` + +```bash +├── app +├── cmd +├── docs +├── proto +│ ├── planet +│ │ ├── mars +├── x +│ ├── mars +├── README.md +├── config.yml +├── go.mod +├── go.sum +└── .gitignore +``` + +The only difference is the additional directory `planet` which is the name of the application. The name of the app can +be verified by checking the package in the `go.mod` file. In this example, the package is `github.com/cosmos/planet` +where `planet` is the app name. + + --- + +## Removing `cosmoscmd` + +`v0.25.0` removes the `cosmoscmd` package from scaffolded chains. This package provided utility for creating +commands and starting up their application. The `cosmoscmd` package is now deprecated, and it is suggested that chains +implement this functionality in their codebase so they can be more easily upgraded and customized. + +The main functionality of `cosmoscmd` will be moved to the `app` package of your chain. Some imports in these +examples contain the sample string, `{ModulePath}`. Replace this string with the Go module path of your blockchain. +For example, if your blockchain module path is `github.com/planet/mars`, `{ModulePath}/app/params` would be become +`github.com/planet/mars/app/params`. + +#### Migration in `app` package + +To begin, create a new file, `./app/params/encoding.go`, containing the following code: + +```go +package params + +import ( + "github.com/cosmos/cosmos-sdk/client" + "github.com/cosmos/cosmos-sdk/codec" + "github.com/cosmos/cosmos-sdk/codec/types" +) + +// EncodingConfig specifies the concrete encoding types to use for a given app. +// This is provided for compatibility between protobuf and amino implementations. +type EncodingConfig struct { + InterfaceRegistry types.InterfaceRegistry + Marshaler codec.Codec + TxConfig client.TxConfig + Amino *codec.LegacyAmino +} +``` + +Next, create a new file, `./app/encoding.go`, containing the following code: + +```go +package app + +import ( + "github.com/cosmos/cosmos-sdk/codec" + "github.com/cosmos/cosmos-sdk/codec/types" + "github.com/cosmos/cosmos-sdk/std" + "github.com/cosmos/cosmos-sdk/x/auth/tx" + + "{ModulePath}/app/params" +) + +// makeEncodingConfig creates an EncodingConfig for an amino based test configuration. +func makeEncodingConfig() params.EncodingConfig { + amino := codec.NewLegacyAmino() + interfaceRegistry := types.NewInterfaceRegistry() + marshaler := codec.NewProtoCodec(interfaceRegistry) + txCfg := tx.NewTxConfig(marshaler, tx.DefaultSignModes) + + return params.EncodingConfig{ + InterfaceRegistry: interfaceRegistry, + Marshaler: marshaler, + TxConfig: txCfg, + Amino: amino, + } +} + +// MakeEncodingConfig creates an EncodingConfig for testing +func MakeEncodingConfig() params.EncodingConfig { + encodingConfig := makeEncodingConfig() + std.RegisterLegacyAminoCodec(encodingConfig.Amino) + std.RegisterInterfaces(encodingConfig.InterfaceRegistry) + ModuleBasics.RegisterLegacyAminoCodec(encodingConfig.Amino) + ModuleBasics.RegisterInterfaces(encodingConfig.InterfaceRegistry) + return encodingConfig +} +``` + +Next, modify `./app/simulation_test.go` so that it looks like the following: + +```go +package app_test + +import ( + "os" + "testing" + "time" + + "github.com/cosmos/cosmos-sdk/simapp" + simulationtypes "github.com/cosmos/cosmos-sdk/types/simulation" + "github.com/cosmos/cosmos-sdk/x/simulation" + "github.com/stretchr/testify/require" + abci "github.com/tendermint/tendermint/abci/types" + tmproto "github.com/tendermint/tendermint/proto/tendermint/types" + tmtypes "github.com/tendermint/tendermint/types" + // remove-next-line + "github.com/ignite/cli/ignite/pkg/cosmoscmd" + + // highlight-next-line + "{ModulePath}/app" +) + +// remove-start +type SimApp interface { + cosmoscmd.App + GetBaseApp() *baseapp.BaseApp + AppCodec() codec.Codec + SimulationManager() *module.SimulationManager + ModuleAccountAddrs() map[string]bool + Name() string + LegacyAmino() *codec.LegacyAmino + BeginBlocker(ctx sdk.Context, req abci.RequestBeginBlock) + abci.ResponseBeginBlock + EndBlocker(ctx sdk.Context, req abci.RequestEndBlock) + abci.ResponseEndBlock + InitChainer(ctx sdk.Context, req abci.RequestInitChain) + abci.ResponseInitChain +} + +// remove-end + +// ... + +// BenchmarkSimulation run the chain simulation +// Running using starport command: +// `starport chain simulate -v --numBlocks 200 --blockSize 50` +// Running as go benchmark test: +// `go test -benchmem -run=^$ -bench ^BenchmarkSimulation ./app -NumBlocks=200 -BlockSize 50 -Commit=true -Verbose=true -Enabled=true` +func BenchmarkSimulation(b *testing.B) { + + // ... + + // remove-next-line + encoding := cosmoscmd.MakeEncodingConfig(app.ModuleBasics) + // highlight-next-line + encoding := app.MakeEncodingConfig() + + app := app.New( + logger, + db, + nil, + true, + map[int64]bool{}, + app.DefaultNodeHome, + 0, + encoding, + simapp.EmptyAppOptions{}, + ) + + // remove-start + simApp, ok := app.(SimApp) + require.True(b, ok, "can't use simapp") + // remove-end + + // Run randomized simulations + _, simParams, simErr := simulation.SimulateFromSeed( + b, + os.Stdout, + // highlight-next-line + app.BaseApp, + // highlight-next-line + simapp.AppStateFn(app.AppCodec(), app.SimulationManager()), + simulationtypes.RandomAccounts, + // highlight-next-line + simapp.SimulationOperations(app, app.AppCodec(), config), + // highlight-next-line + app.ModuleAccountAddrs(), + config, + // highlight-next-line + app.AppCodec(), + ) + + // export state and simParams before the simulation error is checked + // highlight-next-line + err = simapp.CheckExportSimulation(app, config, simParams) + require.NoError(b, err) + require.NoError(b, simErr) + + // ... +} +``` + +The main changes here are that the `SimApp` interface has been removed and is being replaced with `app`. + +The final modification in the `app` package is in `app/app.go`: + +```go +package app + +import ( + // ... + + // this line is used by starport scaffolding # stargate/app/moduleImport + + // remove-next-line + "github.com/ignite/cli/ignite/pkg/cosmoscmd" + + // highlight-start + appparams "{ModulePath}/app/params" + "{ModulePath}/docs" + // highlight-end +) + +// ... + +var ( + // remove-next-line + _ cosmoscmd.App = (*App)(nil) + _ servertypes.Application = (*App)(nil) + _ simapp.App = (*App)(nil) +) + +// ... + +// New returns a reference to an initialized blockchain app +func New( + logger log.Logger, + db dbm.DB, + traceStore io.Writer, + loadLatest bool, + skipUpgradeHeights map[int64]bool, + homePath string, + invCheckPeriod uint, + // highlight-next-line + encodingConfig appparams.EncodingConfig, + appOpts servertypes.AppOptions, + baseAppOptions ...func(*baseapp.BaseApp), + // highlight-next-line +) *App { + appCodec := encodingConfig.Marshaler + cdc := encodingConfig.Amino + interfaceRegistry := encodingConfig.InterfaceRegistry + + bApp := baseapp.NewBaseApp( + Name, + logger, + db, + encodingConfig.TxConfig.TxDecoder(), + baseAppOptions..., + ) + + // ... + +} + +// ... + +// Name returns the name of the App +func (app *App) Name() string { return app.BaseApp.Name() } + +// remove-start +// GetBaseApp returns the base app of the application +func (app App) GetBaseApp() *baseapp.BaseApp { return app.BaseApp } + +// remove-end + +// BeginBlocker application updates every begin block +func (app *App) BeginBlocker(ctx sdk.Context, req abci.RequestBeginBlock) abci.ResponseBeginBlock { + return app.mm.BeginBlock(ctx, req) +} + +// ... +``` + +Again, here we are removing the use of `cosmoscmd` and replacing it with `app`. + +#### Migration in `cmd` package + +Some imports in these +examples contain the sample string, `{binaryNamePrefix}d`. Replace this string with the binary name of your blockchain. +For example, if your blockchain module path is `github.com/planet/mars`, `./cmd/{binaryNamePrefix}d/cmd/` would be +become `./cmd/marsd/cmd/`. + +First, create the new file `./cmd/{binaryNamePrefix}d/cmd/config.go` with the following code: + +```go +package cmd + +import ( + sdk "github.com/cosmos/cosmos-sdk/types" + + "{ModulePath}/app" +) + +func initSDKConfig() { + // Set prefixes + accountPubKeyPrefix := app.AccountAddressPrefix + "pub" + validatorAddressPrefix := app.AccountAddressPrefix + "valoper" + validatorPubKeyPrefix := app.AccountAddressPrefix + "valoperpub" + consNodeAddressPrefix := app.AccountAddressPrefix + "valcons" + consNodePubKeyPrefix := app.AccountAddressPrefix + "valconspub" + + // Set and seal config + config := sdk.GetConfig() + config.SetBech32PrefixForAccount(app.AccountAddressPrefix, accountPubKeyPrefix) + config.SetBech32PrefixForValidator(validatorAddressPrefix, validatorPubKeyPrefix) + config.SetBech32PrefixForConsensusNode(consNodeAddressPrefix, consNodePubKeyPrefix) + config.Seal() +} +``` + +Next, create the new file `./cmd/{binaryNamePrefix}d/cmd/genaccounts.go` with the following code: + +```go +package cmd + +import ( + "bufio" + "encoding/json" + "errors" + "fmt" + + "github.com/cosmos/cosmos-sdk/client" + "github.com/cosmos/cosmos-sdk/client/flags" + "github.com/cosmos/cosmos-sdk/crypto/keyring" + "github.com/cosmos/cosmos-sdk/server" + sdk "github.com/cosmos/cosmos-sdk/types" + authtypes "github.com/cosmos/cosmos-sdk/x/auth/types" + authvesting "github.com/cosmos/cosmos-sdk/x/auth/vesting/types" + banktypes "github.com/cosmos/cosmos-sdk/x/bank/types" + "github.com/cosmos/cosmos-sdk/x/genutil" + genutiltypes "github.com/cosmos/cosmos-sdk/x/genutil/types" + "github.com/spf13/cobra" +) + +const ( + flagVestingStart = "vesting-start-time" + flagVestingEnd = "vesting-end-time" + flagVestingAmt = "vesting-amount" +) + +// AddGenesisAccountCmd returns add-genesis-account cobra Command. +func AddGenesisAccountCmd(defaultNodeHome string) *cobra.Command { + cmd := &cobra.Command{ + Use: "add-genesis-account [address_or_key_name] [coin][,[coin]]", + Short: "Add a genesis account to genesis.json", + Long: `Add a genesis account to genesis.json. The provided account must specify +the account address or key name and a list of initial coins. If a key name is given, +the address will be looked up in the local Keybase. The list of initial tokens must +contain valid denominations. Accounts may optionally be supplied with vesting parameters. +`, + Args: cobra.ExactArgs(2), + RunE: func(cmd *cobra.Command, args []string) error { + clientCtx := client.GetClientContextFromCmd(cmd) + cdc := clientCtx.Codec + + serverCtx := server.GetServerContextFromCmd(cmd) + config := serverCtx.Config + + config.SetRoot(clientCtx.HomeDir) + + coins, err := sdk.ParseCoinsNormalized(args[1]) + if err != nil { + return fmt.Errorf("failed to parse coins: %w", err) + } + + addr, err := sdk.AccAddressFromBech32(args[0]) + if err != nil { + inBuf := bufio.NewReader(cmd.InOrStdin()) + keyringBackend, err := cmd.Flags().GetString(flags.FlagKeyringBackend) + if err != nil { + return err + } + + // attempt to lookup address from Keybase if no address was provided + kb, err := keyring.New(sdk.KeyringServiceName(), keyringBackend, clientCtx.HomeDir, inBuf, cdc) + if err != nil { + return err + } + + info, err := kb.Key(args[0]) + if err != nil { + return fmt.Errorf("failed to get address from Keybase: %w", err) + } + + addr, err = info.GetAddress() + if err != nil { + return fmt.Errorf("failed to get address from Keybase: %w", err) + } + } + + vestingStart, err := cmd.Flags().GetInt64(flagVestingStart) + if err != nil { + return err + } + vestingEnd, err := cmd.Flags().GetInt64(flagVestingEnd) + if err != nil { + return err + } + vestingAmtStr, err := cmd.Flags().GetString(flagVestingAmt) + if err != nil { + return err + } + + vestingAmt, err := sdk.ParseCoinsNormalized(vestingAmtStr) + if err != nil { + return fmt.Errorf("failed to parse vesting amount: %w", err) + } + + // create concrete account type based on input parameters + var genAccount authtypes.GenesisAccount + + balances := banktypes.Balance{Address: addr.String(), Coins: coins.Sort()} + baseAccount := authtypes.NewBaseAccount(addr, nil, 0, 0) + + if !vestingAmt.IsZero() { + baseVestingAccount := authvesting.NewBaseVestingAccount(baseAccount, vestingAmt.Sort(), vestingEnd) + + if (balances.Coins.IsZero() && !baseVestingAccount.OriginalVesting.IsZero()) || + baseVestingAccount.OriginalVesting.IsAnyGT(balances.Coins) { + return errors.New("vesting amount cannot be greater than total amount") + } + + switch { + case vestingStart != 0 && vestingEnd != 0: + genAccount = authvesting.NewContinuousVestingAccountRaw(baseVestingAccount, vestingStart) + + case vestingEnd != 0: + genAccount = authvesting.NewDelayedVestingAccountRaw(baseVestingAccount) + + default: + return errors.New("invalid vesting parameters; must supply start and end time or end time") + } + } else { + genAccount = baseAccount + } + + if err := genAccount.Validate(); err != nil { + return fmt.Errorf("failed to validate new genesis account: %w", err) + } + + genFile := config.GenesisFile() + appState, genDoc, err := genutiltypes.GenesisStateFromGenFile(genFile) + if err != nil { + return fmt.Errorf("failed to unmarshal genesis state: %w", err) + } + + authGenState := authtypes.GetGenesisStateFromAppState(cdc, appState) + + accs, err := authtypes.UnpackAccounts(authGenState.Accounts) + if err != nil { + return fmt.Errorf("failed to get accounts from any: %w", err) + } + + if accs.Contains(addr) { + return fmt.Errorf("cannot add account at existing address %s", addr) + } + + // Add the new account to the set of genesis accounts and sanitize the + // accounts afterwards. + accs = append(accs, genAccount) + accs = authtypes.SanitizeGenesisAccounts(accs) + + genAccs, err := authtypes.PackAccounts(accs) + if err != nil { + return fmt.Errorf("failed to convert accounts into any's: %w", err) + } + authGenState.Accounts = genAccs + + authGenStateBz, err := cdc.MarshalJSON(&authGenState) + if err != nil { + return fmt.Errorf("failed to marshal auth genesis state: %w", err) + } + + appState[authtypes.ModuleName] = authGenStateBz + + bankGenState := banktypes.GetGenesisStateFromAppState(cdc, appState) + bankGenState.Balances = append(bankGenState.Balances, balances) + bankGenState.Balances = banktypes.SanitizeGenesisBalances(bankGenState.Balances) + + bankGenStateBz, err := cdc.MarshalJSON(bankGenState) + if err != nil { + return fmt.Errorf("failed to marshal bank genesis state: %w", err) + } + + appState[banktypes.ModuleName] = bankGenStateBz + + appStateJSON, err := json.Marshal(appState) + if err != nil { + return fmt.Errorf("failed to marshal application genesis state: %w", err) + } + + genDoc.AppState = appStateJSON + return genutil.ExportGenesisFile(genDoc, genFile) + }, + } + + cmd.Flags().String(flags.FlagKeyringBackend, flags.DefaultKeyringBackend, "Select keyring's backend (os|file|kwallet|pass|test)") + cmd.Flags().String(flags.FlagHome, defaultNodeHome, "The application home directory") + cmd.Flags().String(flagVestingAmt, "", "amount of coins for vesting accounts") + cmd.Flags().Int64(flagVestingStart, 0, "schedule start time (unix epoch) for vesting accounts") + cmd.Flags().Int64(flagVestingEnd, 0, "schedule end time (unix epoch) for vesting accounts") + flags.AddQueryFlagsToCmd(cmd) + + return cmd +} +``` + +This command allows one to generate new accounts: `appd add-genesis-account`. + +Next, create the new file `./cmd/{binaryNamePrefix}d/cmd/root.go` with the following code: + +```go +package cmd + +import ( + "errors" + "io" + "os" + "path/filepath" + "strings" + + "github.com/cosmos/cosmos-sdk/baseapp" + "github.com/cosmos/cosmos-sdk/client" + "github.com/cosmos/cosmos-sdk/client/config" + "github.com/cosmos/cosmos-sdk/client/debug" + "github.com/cosmos/cosmos-sdk/client/flags" + "github.com/cosmos/cosmos-sdk/client/keys" + "github.com/cosmos/cosmos-sdk/client/rpc" + "github.com/cosmos/cosmos-sdk/server" + serverconfig "github.com/cosmos/cosmos-sdk/server/config" + servertypes "github.com/cosmos/cosmos-sdk/server/types" + "github.com/cosmos/cosmos-sdk/snapshots" + snapshottypes "github.com/cosmos/cosmos-sdk/snapshots/types" + "github.com/cosmos/cosmos-sdk/store" + sdk "github.com/cosmos/cosmos-sdk/types" + authcmd "github.com/cosmos/cosmos-sdk/x/auth/client/cli" + "github.com/cosmos/cosmos-sdk/x/auth/types" + banktypes "github.com/cosmos/cosmos-sdk/x/bank/types" + "github.com/cosmos/cosmos-sdk/x/crisis" + genutilcli "github.com/cosmos/cosmos-sdk/x/genutil/client/cli" + "github.com/ignite/cli/ignite/services/network" + "github.com/spf13/cast" + "github.com/spf13/cobra" + "github.com/spf13/pflag" + tmcfg "github.com/tendermint/tendermint/config" + tmcli "github.com/tendermint/tendermint/libs/cli" + "github.com/tendermint/tendermint/libs/log" + dbm "github.com/tendermint/tm-db" + // this line is used by starport scaffolding # root/moduleImport + + "{ModulePath}/app" + appparams "{ModulePath}/app/params" +) + +// NewRootCmd creates a new root command for a Cosmos SDK application +func NewRootCmd() (*cobra.Command, appparams.EncodingConfig) { + encodingConfig := app.MakeEncodingConfig() + initClientCtx := client.Context{}. + WithCodec(encodingConfig.Marshaler). + WithInterfaceRegistry(encodingConfig.InterfaceRegistry). + WithTxConfig(encodingConfig.TxConfig). + WithLegacyAmino(encodingConfig.Amino). + WithInput(os.Stdin). + WithAccountRetriever(types.AccountRetriever{}). + WithHomeDir(app.DefaultNodeHome). + WithViper("") + + rootCmd := &cobra.Command{ + Use: app.Name + "d", + Short: "Stargate CosmosHub App", + PersistentPreRunE: func(cmd *cobra.Command, _ []string) error { + // set the default command outputs + cmd.SetOut(cmd.OutOrStdout()) + cmd.SetErr(cmd.ErrOrStderr()) + initClientCtx, err := client.ReadPersistentCommandFlags(initClientCtx, cmd.Flags()) + if err != nil { + return err + } + initClientCtx, err = config.ReadFromClientConfig(initClientCtx) + if err != nil { + return err + } + + if err := client.SetCmdClientContextHandler(initClientCtx, cmd); err != nil { + return err + } + + customAppTemplate, customAppConfig := initAppConfig() + customTMConfig := initTendermintConfig() + return server.InterceptConfigsPreRunHandler( + cmd, customAppTemplate, customAppConfig, customTMConfig, + ) + }, + } + + initRootCmd(rootCmd, encodingConfig) + overwriteFlagDefaults(rootCmd, map[string]string{ + flags.FlagChainID: strings.ReplaceAll(app.Name, "-", ""), + flags.FlagKeyringBackend: "test", + }) + + return rootCmd, encodingConfig +} + +// initTendermintConfig helps to override default Tendermint Config values. +// return tmcfg.DefaultConfig if no custom configuration is required for the application. +func initTendermintConfig() *tmcfg.Config { + cfg := tmcfg.DefaultConfig() + return cfg +} + +func initRootCmd( + rootCmd *cobra.Command, + encodingConfig appparams.EncodingConfig, +) { + // Set config + initSDKConfig() + + rootCmd.AddCommand( + genutilcli.InitCmd(app.ModuleBasics, app.DefaultNodeHome), + genutilcli.CollectGenTxsCmd(banktypes.GenesisBalancesIterator{}, app.DefaultNodeHome), + genutilcli.MigrateGenesisCmd(), + genutilcli.GenTxCmd( + app.ModuleBasics, + encodingConfig.TxConfig, + banktypes.GenesisBalancesIterator{}, + app.DefaultNodeHome, + ), + genutilcli.ValidateGenesisCmd(app.ModuleBasics), + AddGenesisAccountCmd(app.DefaultNodeHome), + tmcli.NewCompletionCmd(rootCmd, true), + debug.Cmd(), + config.Cmd(), + // this line is used by starport scaffolding # root/commands + ) + + a := appCreator{ + encodingConfig, + } + + // add server commands + server.AddCommands( + rootCmd, + app.DefaultNodeHome, + a.newApp, + a.appExport, + addModuleInitFlags, + ) + + // add keybase, auxiliary RPC, query, and tx child commands + rootCmd.AddCommand( + rpc.StatusCommand(), + queryCommand(), + txCommand(), + keys.Commands(app.DefaultNodeHome), + ) +} + +// queryCommand returns the sub-command to send queries to the app +func queryCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "query", + Aliases: []string{"q"}, + Short: "Querying subcommands", + DisableFlagParsing: true, + SuggestionsMinimumDistance: 2, + RunE: client.ValidateCmd, + } + + cmd.AddCommand( + authcmd.GetAccountCmd(), + rpc.ValidatorCommand(), + rpc.BlockCommand(), + authcmd.QueryTxsByEventsCmd(), + authcmd.QueryTxCmd(), + ) + + app.ModuleBasics.AddQueryCommands(cmd) + cmd.PersistentFlags().String(flags.FlagChainID, "", "The network chain ID") + + return cmd +} + +// txCommand returns the sub-command to send transactions to the app +func txCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "tx", + Short: "Transactions subcommands", + DisableFlagParsing: true, + SuggestionsMinimumDistance: 2, + RunE: client.ValidateCmd, + } + + cmd.AddCommand( + authcmd.GetSignCommand(), + authcmd.GetSignBatchCommand(), + authcmd.GetMultiSignCommand(), + authcmd.GetValidateSignaturesCommand(), + flags.LineBreak, + authcmd.GetBroadcastCommand(), + authcmd.GetEncodeCommand(), + authcmd.GetDecodeCommand(), + ) + + app.ModuleBasics.AddTxCommands(cmd) + cmd.PersistentFlags().String(flags.FlagChainID, "", "The network chain ID") + + return cmd +} + +func addModuleInitFlags(startCmd *cobra.Command) { + crisis.AddModuleInitFlags(startCmd) + // this line is used by starport scaffolding # root/arguments +} + +func overwriteFlagDefaults(c *cobra.Command, defaults map[string]string) { + set := func(s *pflag.FlagSet, key, val string) { + if f := s.Lookup(key); f != nil { + f.DefValue = val + f.Value.Set(val) + } + } + for key, val := range defaults { + set(c.Flags(), key, val) + set(c.PersistentFlags(), key, val) + } + for _, c := range c.Commands() { + overwriteFlagDefaults(c, defaults) + } +} + +type appCreator struct { + encodingConfig appparams.EncodingConfig +} + +// newApp creates a new Cosmos SDK app +func (a appCreator) newApp( + logger log.Logger, + db dbm.DB, + traceStore io.Writer, + appOpts servertypes.AppOptions, +) servertypes.Application { + var cache sdk.MultiStorePersistentCache + + if cast.ToBool(appOpts.Get(server.FlagInterBlockCache)) { + cache = store.NewCommitKVStoreCacheManager() + } + + skipUpgradeHeights := make(map[int64]bool) + for _, h := range cast.ToIntSlice(appOpts.Get(server.FlagUnsafeSkipUpgrades)) { + skipUpgradeHeights[int64(h)] = true + } + + pruningOpts, err := server.GetPruningOptionsFromFlags(appOpts) + if err != nil { + panic(err) + } + + snapshotDir := filepath.Join(cast.ToString(appOpts.Get(flags.FlagHome)), "data", "snapshots") + snapshotDB, err := dbm.NewDB("metadata", dbm.GoLevelDBBackend, snapshotDir) + if err != nil { + panic(err) + } + snapshotStore, err := snapshots.NewStore(snapshotDB, snapshotDir) + if err != nil { + panic(err) + } + + snapshotOptions := snapshottypes.NewSnapshotOptions( + cast.ToUint64(appOpts.Get(server.FlagStateSyncSnapshotInterval)), + cast.ToUint32(appOpts.Get(server.FlagStateSyncSnapshotKeepRecent)), + ) + + return app.New( + logger, + db, + traceStore, + true, + skipUpgradeHeights, + cast.ToString(appOpts.Get(flags.FlagHome)), + cast.ToUint(appOpts.Get(server.FlagInvCheckPeriod)), + a.encodingConfig, + appOpts, + baseapp.SetPruning(pruningOpts), + baseapp.SetMinGasPrices(cast.ToString(appOpts.Get(server.FlagMinGasPrices))), + baseapp.SetMinRetainBlocks(cast.ToUint64(appOpts.Get(server.FlagMinRetainBlocks))), + baseapp.SetHaltHeight(cast.ToUint64(appOpts.Get(server.FlagHaltHeight))), + baseapp.SetHaltTime(cast.ToUint64(appOpts.Get(server.FlagHaltTime))), + baseapp.SetInterBlockCache(cache), + baseapp.SetTrace(cast.ToBool(appOpts.Get(server.FlagTrace))), + baseapp.SetIndexEvents(cast.ToStringSlice(appOpts.Get(server.FlagIndexEvents))), + baseapp.SetSnapshot(snapshotStore, snapshotOptions), + ) +} + +// appExport creates a new simapp (optionally at a given height) +func (a appCreator) appExport( + logger log.Logger, + db dbm.DB, + traceStore io.Writer, + height int64, + forZeroHeight bool, + jailAllowedAddrs []string, + appOpts servertypes.AppOptions, +) (servertypes.ExportedApp, error) { + homePath, ok := appOpts.Get(flags.FlagHome).(string) + if !ok || homePath == "" { + return servertypes.ExportedApp{}, errors.New("application home not set") + } + + app := app.New( + logger, + db, + traceStore, + height == -1, // -1: no height provided + map[int64]bool{}, + homePath, + uint(1), + a.encodingConfig, + appOpts, + ) + + if height != -1 { + if err := app.LoadHeight(height); err != nil { + return servertypes.ExportedApp{}, err + } + } + + return app.ExportAppStateAndValidators(forZeroHeight, jailAllowedAddrs) +} + +// initAppConfig helps to override default appConfig template and configs. +// return "", nil if no custom configuration is required for the application. +func initAppConfig() (string, interface{}) { + // The following code snippet is just for reference. + + // WASMConfig defines configuration for the wasm module. + type WASMConfig struct { + // This is the maximum sdk gas (wasm and storage) that we allow for any x/wasm "smart" queries + QueryGasLimit uint64 `mapstructure:"query_gas_limit"` + + // Address defines the gRPC-web server to listen on + LruSize uint64 `mapstructure:"lru_size"` + } + + type CustomAppConfig struct { + serverconfig.Config + + WASM WASMConfig `mapstructure:"wasm"` + } + + // Optionally allow the chain developer to overwrite the SDK's default + // server config. + srvCfg := serverconfig.DefaultConfig() + // The SDK's default minimum gas price is set to "" (empty value) inside + // app.toml. If left empty by validators, the node will halt on startup. + // However, the chain developer can set a default app.toml value for their + // validators here. + // + // In summary: + // - if you leave srvCfg.MinGasPrices = "", all validators MUST tweak their + // own app.toml config, + // - if you set srvCfg.MinGasPrices non-empty, validators CAN tweak their + // own app.toml to override, or use this default value. + // + // In simapp, we set the min gas prices to 0. + srvCfg.MinGasPrices = "0stake" + + customAppConfig := CustomAppConfig{ + Config: *srvCfg, + WASM: WASMConfig{ + LruSize: 1, + QueryGasLimit: 300000, + }, + } + + customAppTemplate := serverconfig.DefaultConfigTemplate + ` +[wasm] +# This is the maximum sdk gas (wasm and storage) that we allow for any x/wasm "smart" queries +query_gas_limit = 300000 +# This is the number of wasm vm instances we keep cached in memory for speed-up +# Warning: this is currently unstable and may lead to crashes, best to keep for 0 unless testing locally +lru_size = 0` + + return customAppTemplate, customAppConfig +} +``` + +Finally, modify `./cmd/{binaryNamePrefix}d/main.go` to include the new changes: + +```go +package main + +import ( + "os" + + "github.com/cosmos/cosmos-sdk/server" + svrcmd "github.com/cosmos/cosmos-sdk/server/cmd" + // remove-next-line + "github.com/ignite/cli/ignite/pkg/cosmoscmd" + + "{ModulePath}/app" + "{ModulePath}/cmd/{BinaryNamePrefix}d/cmd" +) + +func main() { + // highlight-start + rootCmd, _ := cmd.NewRootCmd() + if err := svrcmd.Execute(rootCmd, "", app.DefaultNodeHome); err != nil { + switch e := err.(type) { + case server.ErrorCode: + os.Exit(e.Code) + + default: + os.Exit(1) + } + } + // highlight-end +} +``` + +#### Migration in `testutil` package + +Modify `./testutil/network/network.go` to include the new changes: + + +```go +package network + +import ( + "fmt" + "testing" + "time" + + "github.com/cosmos/cosmos-sdk/baseapp" + "github.com/cosmos/cosmos-sdk/crypto/hd" + "github.com/cosmos/cosmos-sdk/crypto/keyring" + pruningtypes "github.com/cosmos/cosmos-sdk/pruning/types" + servertypes "github.com/cosmos/cosmos-sdk/server/types" + "github.com/cosmos/cosmos-sdk/simapp" + "github.com/cosmos/cosmos-sdk/testutil/network" + sdk "github.com/cosmos/cosmos-sdk/types" + authtypes "github.com/cosmos/cosmos-sdk/x/auth/types" + "github.com/stretchr/testify/require" + tmrand "github.com/tendermint/tendermint/libs/rand" + tmdb "github.com/tendermint/tm-db" + + // highlight-next-line + "{ModulePath}/app" + + // remove-next-line + "github.com/ignite/cli/ignite/pkg/cosmoscmd" +) + +// ... + +// DefaultConfig will initialize config for the network with custom application, +// genesis and single validator. All other parameters are inherited from cosmos-sdk/testutil/network.DefaultConfig +func DefaultConfig() network.Config { + // highlight-next-line + encoding := app.MakeEncodingConfig() + // remove-next-line + encoding := cosmoscmd.MakeEncodingConfig(app.ModuleBasics) + return network.Config{ + Codec: encoding.Marshaler, + TxConfig: encoding.TxConfig, + LegacyAmino: encoding.Amino, + InterfaceRegistry: encoding.InterfaceRegistry, + AccountRetriever: authtypes.AccountRetriever{}, + AppConstructor: func(val network.Validator) servertypes.Application { + return app.New( + val.Ctx.Logger, tmdb.NewMemDB(), nil, true, map[int64]bool{}, val.Ctx.Config.RootDir, 0, + encoding, + simapp.EmptyAppOptions{}, + baseapp.SetPruning(pruningtypes.NewPruningOptionsFromString(val.AppConfig.Pruning)), + baseapp.SetMinGasPrices(val.AppConfig.MinGasPrices), + ) + }, + GenesisState: app.ModuleBasics.DefaultGenesis(encoding.Marshaler), + TimeoutCommit: 2 * time.Second, + ChainID: "chain-" + tmrand.NewRand().Str(6), + NumValidators: 1, + BondDenom: sdk.DefaultBondDenom, + MinGasPrices: fmt.Sprintf("0.000006%s", sdk.DefaultBondDenom), + AccountTokens: sdk.TokensFromConsensusPower(1000, sdk.DefaultPowerReduction), + StakingTokens: sdk.TokensFromConsensusPower(500, sdk.DefaultPowerReduction), + BondedTokens: sdk.TokensFromConsensusPower(100, sdk.DefaultPowerReduction), + PruningStrategy: pruningtypes.PruningOptionNothing, + CleanupDir: true, + SigningAlgo: string(hd.Secp256k1Type), + KeyringOptions: []keyring.Option{}, + } +} +``` + + --- + +## Fix ICA controller keeper wiring + +Related issue: https://github.com/ignite/cli/issues/2867 + +Apply the following changes to `app/app.go` file : + +```go +package app + +import ( + + // highlight-start + icacontrollerkeeper "github.com/cosmos/ibc-go/v5/modules/apps/27-interchain-accounts/controller/keeper" + icacontrollertypes "github.com/cosmos/ibc-go/v5/modules/apps/27-interchain-accounts/controller/types" + // highlight-end + // ... +) + +// New returns a reference to an initialized blockchain app +func New( + logger log.Logger, + db dbm.DB, + traceStore io.Writer, + loadLatest bool, + skipUpgradeHeights map[int64]bool, + homePath string, + invCheckPeriod uint, + encodingConfig appparams.EncodingConfig, + appOpts servertypes.AppOptions, + baseAppOptions ...func(*baseapp.BaseApp), +) *App { + + // ... + + keys := sdk.NewKVStoreKeys( + authtypes.StoreKey, authz.ModuleName, banktypes.StoreKey, + stakingtypes.StoreKey, + minttypes.StoreKey, distrtypes.StoreKey, slashingtypes.StoreKey, + govtypes.StoreKey, + paramstypes.StoreKey, ibchost.StoreKey, upgradetypes.StoreKey, + feegrant.StoreKey, evidencetypes.StoreKey, + ibctransfertypes.StoreKey, icahosttypes.StoreKey, + capabilitytypes.StoreKey, group.StoreKey, + // highlight-next-line + icacontrollertypes.StoreKey, + yourchainmoduletypes.StoreKey, + // this line is used by starport scaffolding # stargate/app/storeKey + ) + + // ... + + // remove-next-line + icaModule := ica.NewAppModule(nil, &app.ICAHostKeeper) + // highlight-start + icaControllerKeeper := icacontrollerkeeper.NewKeeper( + appCodec, keys[icacontrollertypes.StoreKey], + app.GetSubspace(icacontrollertypes.SubModuleName), + app.IBCKeeper.ChannelKeeper, // may be replaced with middleware such as ics29 fee + app.IBCKeeper.ChannelKeeper, &app.IBCKeeper.PortKeeper, + scopedICAControllerKeeper, app.MsgServiceRouter(), + ) + icaModule := ica.NewAppModule(&icaControllerKeeper, &app.ICAHostKeeper) + // highlight-end + icaHostIBCModule := icahost.NewIBCModule(app.ICAHostKeeper) + + // ... +} + +// ... + +// initParamsKeeper init params keeper and its subspaces +func initParamsKeeper(appCodec codec.BinaryCodec, legacyAmino *codec.LegacyAmino, key, tkey storetypes.StoreKey) paramskeeper.Keeper { + paramsKeeper := paramskeeper.NewKeeper(appCodec, legacyAmino, key, tkey) + + paramsKeeper.Subspace(authtypes.ModuleName) + paramsKeeper.Subspace(banktypes.ModuleName) + paramsKeeper.Subspace(stakingtypes.ModuleName) + paramsKeeper.Subspace(minttypes.ModuleName) + paramsKeeper.Subspace(distrtypes.ModuleName) + paramsKeeper.Subspace(slashingtypes.ModuleName) + paramsKeeper.Subspace(govtypes.ModuleName).WithKeyTable(govv1.ParamKeyTable()) + paramsKeeper.Subspace(crisistypes.ModuleName) + paramsKeeper.Subspace(ibctransfertypes.ModuleName) + paramsKeeper.Subspace(ibchost.ModuleName) + // highlight-next-line + paramsKeeper.Subspace(icacontrollertypes.SubModuleName) + paramsKeeper.Subspace(icahosttypes.SubModuleName) + paramsKeeper.Subspace(mychainmoduletypes.ModuleName) + // this line is used by starport scaffolding # stargate/app/paramSubspace + + return paramsKeeper +} +``` + + --- + +## Fix capability keeper not sealed + +Related issue: https://github.com/ignite/cli/issues/1921 + +Apply the following change to `app/app.go` file : + +```go +package app + +// New returns a reference to an initialized blockchain app +func New( + logger log.Logger, + db dbm.DB, + traceStore io.Writer, + loadLatest bool, + skipUpgradeHeights map[int64]bool, + homePath string, + invCheckPeriod uint, + encodingConfig appparams.EncodingConfig, + appOpts servertypes.AppOptions, + baseAppOptions ...func(*baseapp.BaseApp), +) *App { + + // ... + + // this line is used by starport scaffolding # stargate/app/keeperDefinition + + // highlight-start + // Sealing prevents other modules from creating scoped sub-keepers + app.CapabilityKeeper.Seal() + // highlight-end + + // Create static IBC router, add transfer route, then set and seal it + + // ... +} +``` diff --git a/docs/docs/06-migration/v0.25.1.md b/docs/docs/06-migration/v0.25.1.md new file mode 100644 index 0000000..b231522 --- /dev/null +++ b/docs/docs/06-migration/v0.25.1.md @@ -0,0 +1,67 @@ +--- +sidebar_position: 993 +title: v0.25.1 +description: For chains that were scaffolded with IGNITE® CLI versions lower than v0.25.1. changes are required to use IGNITE® CLI v0.25.1. +--- + +## Drabonberry fix + +`v0.25.1` contains the Dragonberry fix, update your `go.mod` as : + +```sh +require ( + // remove-next-line + github.com/ignite/cli v0.24.0 + // highlight-next-line + github.com/ignite/cli v0.25.1 +) + +// highlight-next-line +replace github.com/confio/ics23/go => github.com/cosmos/cosmos-sdk/ics23/go v0.8.0 +``` + +Then run: + +``` +$ go mod tidy +``` + +As a result, you should see `cosmos-sdk` and `ibc-go` upgraded as well. + +Finally, apply the following change to `app/app.go`: + +```go +package app + +// New returns a reference to an initialized blockchain app +func New( + logger log.Logger, + db dbm.DB, + traceStore io.Writer, + loadLatest bool, + skipUpgradeHeights map[int64]bool, + homePath string, + invCheckPeriod uint, + encodingConfig appparams.EncodingConfig, + appOpts servertypes.AppOptions, + baseAppOptions ...func(*baseapp.BaseApp), +) *App { + + // ... + + app.ICAHostKeeper = icahostkeeper.NewKeeper( + appCodec, keys[icahosttypes.StoreKey], + app.GetSubspace(icahosttypes.SubModuleName), + app.IBCKeeper.ChannelKeeper, + // highlight-next-line + app.IBCKeeper.ChannelKeeper, + &app.IBCKeeper.PortKeeper, + app.AccountKeeper, + scopedICAHostKeeper, + app.MsgServiceRouter(), + ) + + // ... + +} +``` diff --git a/docs/docs/06-migration/v0.26.0.md b/docs/docs/06-migration/v0.26.0.md new file mode 100644 index 0000000..3a7301b --- /dev/null +++ b/docs/docs/06-migration/v0.26.0.md @@ -0,0 +1,263 @@ +--- +sidebar_position: 992 +title: v0.26.0 +description: For chains that were scaffolded with IGNITE® CLI versions lower than v0.26.0. changes are required to use IGNITE® CLI v0.26.0. +--- + +IGNITE® CLI `v0.26.0` is fully compatible with chains that are compatible with `v0.25.1`. Please follow the existing +migration guides if your chain is not upgraded to `v0.25.1` support. + +## Go Version + +Chains that are newly scaffolded with IGNITE® CLI `v0.26.0` now require `go 1.19` in their `go.mod` files. It is +recommended that chains scaffolded with an older version of IGNITE® CLI also bump their required `go` version and update +their tooling to the latest version. + +## ibc-go v6 + +Chains that are newly scaffolded with IGNITE® CLI `v0.26.0` now use `ibc-go/v6` for ibc functionality. It is not +necessary, but recommended to upgrade to the newest version of `ibc-go`. Most migrations can be done by following the +`ibc-go` [migration guide](https://github.com/cosmos/ibc-go/blob/v6.2.0/docs/migrations/v5-to-v6.md), but there are some +specific changes that will need to be followed for IGNITE® scaffolded chains. + +### Removing `cosmosibckeeper` + +IGNITE® CLI `v0.26.0` has deprecated [pkg/cosmosibckeeper](https://github.com/ignite/cli/tree/v0.26.0/ignite/pkg/cosmosibckeeper). +This package contained interfaces for ibc-related keepers. Newly scaffolded chains now include the interface files in their +`./x/{moduleName}/types` directory in a new `expected_ibc_keeper.go` file. To migrate, create the following file for +each module: + +```go title="x/{moduleName}/types/expected_ibc_keeper.go" +package types + +import ( + sdk "github.com/cosmos/cosmos-sdk/types" + capabilitytypes "github.com/cosmos/cosmos-sdk/x/capability/types" + clienttypes "github.com/cosmos/ibc-go/v6/modules/core/02-client/types" + channeltypes "github.com/cosmos/ibc-go/v6/modules/core/04-channel/types" +) + +// ChannelKeeper defines the expected IBC channel keeper. +type ChannelKeeper interface { + GetChannel(ctx sdk.Context, portID, channelID string) (channeltypes.Channel, bool) + GetNextSequenceSend(ctx sdk.Context, portID, channelID string) (uint64, bool) + SendPacket( + ctx sdk.Context, + channelCap *capabilitytypes.Capability, + sourcePort string, + sourceChannel string, + timeoutHeight clienttypes.Height, + timeoutTimestamp uint64, + data []byte, + ) (uint64, error) + ChanCloseInit(ctx sdk.Context, portID, channelID string, chanCap *capabilitytypes.Capability) error +} + +// PortKeeper defines the expected IBC port keeper. +type PortKeeper interface { + BindPort(ctx sdk.Context, portID string) *capabilitytypes.Capability +} + +// ScopedKeeper defines the expected IBC scoped keeper. +type ScopedKeeper interface { + GetCapability(ctx sdk.Context, name string) (*capabilitytypes.Capability, bool) + AuthenticateCapability(ctx sdk.Context, cap *capabilitytypes.Capability, name string) bool + ClaimCapability(ctx sdk.Context, cap *capabilitytypes.Capability, name string) error +} +``` + +Next, make the following updates to each `x/{moduleName}/keeper/keeper.go` file for each ibc-enabled +module in your project: + +```go title="x/{moduleName}/keeper/keeper.go" +package keeper + +import ( + "fmt" + + // remove-start + "blogibc/x/testibc/types" + "github.com/cosmos/cosmos-sdk/codec" + storetypes "github.com/cosmos/cosmos-sdk/store/types" + sdk "github.com/cosmos/cosmos-sdk/types" + paramtypes "github.com/cosmos/cosmos-sdk/x/params/types" + "github.com/ignite/cli/ignite/pkg/cosmosibckeeper" + "github.com/tendermint/tendermint/libs/log" + // remove-end + // highlight-start + "github.com/cosmos/cosmos-sdk/codec" + storetypes "github.com/cosmos/cosmos-sdk/store/types" + sdk "github.com/cosmos/cosmos-sdk/types" + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + capabilitytypes "github.com/cosmos/cosmos-sdk/x/capability/types" + paramtypes "github.com/cosmos/cosmos-sdk/x/params/types" + channeltypes "github.com/cosmos/ibc-go/v6/modules/core/04-channel/types" + host "github.com/cosmos/ibc-go/v6/modules/core/24-host" + "github.com/cosmos/ibc-go/v6/modules/core/exported" + "github.com/tendermint/tendermint/libs/log" + + "{appName}/x/{moduleName}/types" + // highlight-end +) + +type ( + Keeper struct { + // remove-line-next + *cosmosibckeeper.Keeper + cdc codec.BinaryCodec + storeKey storetypes.StoreKey + memKey storetypes.StoreKey + paramstore paramtypes.Subspace + + // highlight-start + channelKeeper types.ChannelKeeper + portKeeper types.PortKeeper + scopedKeeper exported.ScopedKeeper + // highlight-end + } +) + +func NewKeeper( + cdc codec.BinaryCodec, + storeKey, + memKey storetypes.StoreKey, + ps paramtypes.Subspace, + // highlight-start + channelKeeper types.ChannelKeeper, + portKeeper types.PortKeeper, + scopedKeeper types.ScopedKeeper, + // highlight-end +) *Keeper { + // set KeyTable if it has not already been set + if !ps.HasKeyTable() { + ps = ps.WithKeyTable(types.ParamKeyTable()) + } + + return &Keeper{ + // remove-start + Keeper: cosmosibckeeper.NewKeeper( + types.PortKey, + storeKey, + channelKeeper, + portKeeper, + scopedKeeper, + ), + // remove-end + cdc: cdc, + storeKey: storeKey, + memKey: memKey, + paramstore: ps, + // highlight-start + channelKeeper: channelKeeper, + portKeeper: portKeeper, + scopedKeeper: scopedKeeper, + // highlight-end + } +} + +// highlight-start +// ---------------------------------------------------------------------------- +// IBC Keeper Logic +// ---------------------------------------------------------------------------- + +// ChanCloseInit defines a wrapper function for the channel Keeper's function. +func (k Keeper) ChanCloseInit(ctx sdk.Context, portID, channelID string) error { + capName := host.ChannelCapabilityPath(portID, channelID) + chanCap, ok := k.scopedKeeper.GetCapability(ctx, capName) + if !ok { + return errorsmod.Wrapf(channeltypes.ErrChannelCapabilityNotFound, "could not retrieve channel capability at: %s", capName) + } + return k.channelKeeper.ChanCloseInit(ctx, portID, channelID, chanCap) +} + +// IsBound checks if the IBC app module is already bound to the desired port +func (k Keeper) IsBound(ctx sdk.Context, portID string) bool { + _, ok := k.scopedKeeper.GetCapability(ctx, host.PortPath(portID)) + return ok +} + +// BindPort defines a wrapper function for the port Keeper's function in +// order to expose it to module's InitGenesis function +func (k Keeper) BindPort(ctx sdk.Context, portID string) error { + cap := k.portKeeper.BindPort(ctx, portID) + return k.ClaimCapability(ctx, cap, host.PortPath(portID)) +} + +// GetPort returns the portID for the IBC app module. Used in ExportGenesis +func (k Keeper) GetPort(ctx sdk.Context) string { + store := ctx.KVStore(k.storeKey) + return string(store.Get(types.PortKey)) +} + +// SetPort sets the portID for the IBC app module. Used in InitGenesis +func (k Keeper) SetPort(ctx sdk.Context, portID string) { + store := ctx.KVStore(k.storeKey) + store.Set(types.PortKey, []byte(portID)) +} + +// AuthenticateCapability wraps the scopedKeeper's AuthenticateCapability function +func (k Keeper) AuthenticateCapability(ctx sdk.Context, cap *capabilitytypes.Capability, name string) bool { + return k.scopedKeeper.AuthenticateCapability(ctx, cap, name) +} + +// ClaimCapability allows the IBC app module to claim a capability that core IBC +// passes to it +func (k Keeper) ClaimCapability(ctx sdk.Context, cap *capabilitytypes.Capability, name string) error { + return k.scopedKeeper.ClaimCapability(ctx, cap, name) +} + +//highlight-end + +func (k Keeper) Logger(ctx sdk.Context) log.Logger { + return ctx.Logger().With("module", fmt.Sprintf("x/%s", types.ModuleName)) +} +``` + +### Remaining migration + +After all uses of `cosmosibckeeper` have been removed, you can follow any remaining steps in the`ibc-go`[migration guide](https://github.com/cosmos/ibc-go/blob/v6.2.0/docs/migrations/v5-to-v6.md). + +## Scaffolded Release Workflow + +The develop branch of the CLI has been deprecated. To continue using the release workflow that uses the CLI to +automatically build and release your chain's binaries, replace develop with main in the following lines: + +```yaml title=".github/workflows/release.yml" +... + +jobs: + might_release: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v2 + with: + fetch-depth: 0 + - name: Prepare Release Variables + id: vars + // highlight-next-line + uses: ignite/cli/actions/release/vars@main + - name: Issue Release Assets + // highlight-next-line + uses: ignite/cli/actions/cli@main + if: ${{ steps.vars.outputs.should_release == 'true' }} + with: + args: chain build --release --release.prefix ${{ steps.vars.outputs.tarball_prefix }} -t linux:amd64 -t darwin:amd64 -t darwin:arm64 + - name: Delete the "latest" Release + uses: dev-drprasad/delete-tag-and-release@v0.2.0 + if: ${{ steps.vars.outputs.is_release_type_latest == 'true' }} + with: + tag_name: ${{ steps.vars.outputs.tag_name }} + delete_release: true + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Publish the Release + uses: softprops/action-gh-release@v1 + if: ${{ steps.vars.outputs.should_release == 'true' }} + with: + tag_name: ${{ steps.vars.outputs.tag_name }} + files: release/* + prerelease: true + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} +``` diff --git a/docs/docs/06-migration/v0.27.1.md b/docs/docs/06-migration/v0.27.1.md new file mode 100644 index 0000000..f2bf48b --- /dev/null +++ b/docs/docs/06-migration/v0.27.1.md @@ -0,0 +1,1208 @@ +--- +sidebar_position: 991 +title: v0.27.1 +description: For chains that were scaffolded with IGNITE® CLI versions lower than v0.27.0. changes are required to use IGNITE® CLI v0.27.1. +--- + +## Cosmos SDK v0.47.3 upgrade notes + +### Imports + +To use the new cosmos SDK make sure you update `go.mod` dependencies: + +```text title="go.mod" +go 1.20 + +require ( + // remove-start + github.com/cosmos/cosmos-sdk v0.46.7 + github.com/tendermint/tendermint v0.34.24 + github.com/tendermint/tm-db v0.6.7 + github.com/cosmos/ibc-go/v7 v7.1.0 + github.com/gogo/protobuf v1.3.3 + github.com/regen-network/cosmos-proto v0.3.1 + // remove-end + // highlight-start + cosmossdk.io/api v0.3.1 + github.com/cosmos/cosmos-sdk v0.47.3 + github.com/cometbft/cometbft v0.37.1 + github.com/cometbft/cometbft-db v0.7.0 + github.com/cosmos/ibc-go/v6 v6.1.0 + github.com/cosmos/gogoproto v1.4.7 + // highlight-end + + // ... +) + +replace ( + // remove-start + github.com/confio/ics23/go => github.com/cosmos/cosmos-sdk/ics23/go v0.8.0 + github.com/gogo/protobuf => github.com/regen-network/protobuf v1.3.3-alpha.regen.1 + // remove-end + // highlight-next-line + github.com/syndtr/goleveldb => github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7 +) +``` + +The Cosmos SDK has migrated to CometBFT as its default consensus engine which requires +changes in your app imports: + +1. Replace `github.com/tendermint/tendermint` by `github.com/cometbft/cometbft` +2. Replace `github.com/tendermint/tm-db` by `github.com/cometbft/cometbft-db` +3. Verify `github.com/tendermint/tendermint` is not an indirect or direct dependency + +The SDK has also migrated from `gogo/protobuf` to `cosmos/gogoproto`. This means you must +replace all `github.com/gogo/protobuf` imports with `github.com/cosmos/gogoproto`. This change +might introduce breaking changes to your proto layout. Follow the official +[Cosmos migration guide](https://docs.cosmos.network/main/migrations/upgrading#gogoproto-import-paths) +to make sure you are using the correct layout. + +You might need to replace the following imports: + +1. Replace `github.com/cosmos/cosmos-sdk/simapp` by `cosmossdk.io/simapp` + +### App changes + +Applications scaffolded with older version of IGNITE® CLI would require the following changes +to some of the app files: + +```text title="app/app.go" +import ( + //... + + // remove-next-line + tmjson "github.com/tendermint/tendermint/libs/json" + // highlight-next-line + "encoding/json" + + // highlight-start + autocliv1 "cosmossdk.io/api/cosmos/autocli/v1" + reflectionv1 "cosmossdk.io/api/cosmos/reflection/v1" + "github.com/cosmos/cosmos-sdk/runtime" + runtimeservices "github.com/cosmos/cosmos-sdk/runtime/services" + "github.com/cosmos/cosmos-sdk/x/consensus" + consensusparamkeeper "github.com/cosmos/cosmos-sdk/x/consensus/keeper" + consensusparamtypes "github.com/cosmos/cosmos-sdk/x/consensus/types" + // highlight-end +) + +func getGovProposalHandlers() []govclient.ProposalHandler { + // ... + govProposalHandlers = append(govProposalHandlers, + paramsclient.ProposalHandler, + // remove-next-line + distrclient.ProposalHandler, + upgradeclient.LegacyProposalHandler, + // ... + ) + + return govProposalHandlers +} + +var ( + // ... + + ModuleBasics = module.NewBasicManager( + auth.AppModuleBasic{}, + authzmodule.AppModuleBasic{}, + // remove-next-line + genutil.AppModuleBasic{}, + // highlight-next-line + genutil.NewAppModuleBasic(genutiltypes.DefaultMessageValidator), + bank.AppModuleBasic{}, + // ... + vesting.AppModuleBasic{}, + // highlight-next-line + consensus.AppModuleBasic{}, + //... + ) +) + +var ( + // highlight-next-line + _ runtime.AppI = (*App)(nil) + _ servertypes.Application = (*App)(nil) + // remove-next-line + _ simapp.App = (*App)(nil) +) + +type App struct { + *baseapp.BaseApp + + cdc *codec.LegacyAmino + appCodec codec.Codec + interfaceRegistry types.InterfaceRegistry + // highlight-next-line + txConfig client.TxConfig + + invCheckPeriod uint + + // ... + // remove-start + StakingKeeper stakingkeeper.Keeper + CrisisKeeper crisiskeeper.Keeper + UpgradeKeeper upgradekeeper.Keeper + // remove-end + // highlight-start + StakingKeeper *stakingkeeper.Keeper + CrisisKeeper *crisiskeeper.Keeper + UpgradeKeeper *upgradekeeper.Keeper + // highlight-end + // ... + FeeGrantKeeper feegrantkeeper.Keeper + GroupKeeper groupkeeper.Keeper + // highlight-next-line + ConsensusParamsKeeper consensusparamkeeper.Keeper + + // ... +} + +func New( + logger log.Logger, + db dbm.DB, + traceStore io.Writer, + loadLatest bool, + skipUpgradeHeights map[int64]bool, + homePath string, + invCheckPeriod uint, + encodingConfig appparams.EncodingConfig, + appOpts servertypes.AppOptions, + baseAppOptions ...func(*baseapp.BaseApp), +) *App { + appCodec := encodingConfig.Marshaler + cdc := encodingConfig.Amino + interfaceRegistry := encodingConfig.InterfaceRegistry + // highlight-next-line + txConfig := encodingConfig.TxConfig + + // ... + + bApp.SetCommitMultiStoreTracer(traceStore) + bApp.SetVersion(version.Version) + bApp.SetInterfaceRegistry(interfaceRegistry) + // highlight-next-line + bApp.SetTxEncoder(txConfig.TxEncoder()) + + keys := sdk.NewKVStoreKeys( + // ... + banktypes.StoreKey, + stakingtypes.StoreKey, + // highlight-next-line + crisistypes.StoreKey, + // ... + group.StoreKey, + icacontrollertypes.StoreKey, + // highlight-next-line + consensusparamtypes.StoreKey, + // ... + ) + + // ... + + app := &App{ + // ... + interfaceRegistry: interfaceRegistry, + // highlight-next-line + txConfig: txConfig, + invCheckPeriod: invCheckPeriod, + // ... + } + + // ... + + // set the BaseApp's parameter store + // remove-next-line + bApp.SetParamStore(app.ParamsKeeper.Subspace(baseapp.Paramspace).WithKeyTable(paramstypes.ConsensusParamsKeyTable())) + // highlight-start + app.ConsensusParamsKeeper = consensusparamkeeper.NewKeeper(appCodec, keys[upgradetypes.StoreKey], authtypes.NewModuleAddress(govtypes.ModuleName).String()) + bApp.SetParamStore(&app.ConsensusParamsKeeper) + // highlight-end + + // ... + + app.AccountKeeper = authkeeper.NewAccountKeeper( + appCodec, + keys[authtypes.StoreKey], + // remove-next-line + app.GetSubspace(authtypes.ModuleName), + authtypes.ProtoBaseAccount, + maccPerms, + sdk.Bech32PrefixAccAddr, + // highlight-next-line + authtypes.NewModuleAddress(govtypes.ModuleName).String(), + ) + + app.BankKeeper = bankkeeper.NewBaseKeeper( + appCodec, + keys[banktypes.StoreKey], + app.AccountKeeper, + // remove-next-line + app.GetSubspace(banktypes.ModuleName), + app.BlockedModuleAccountAddrs(), + // highlight-next-line + authtypes.NewModuleAddress(govtypes.ModuleName).String(), + ) + + app.StakingKeeper = stakingkeeper.NewKeeper( + appCodec, + keys[stakingtypes.StoreKey], + app.AccountKeeper, + app.BankKeeper, + // remove-next-line + app.GetSubspace(stakingtypes.ModuleName), + // highlight-next-line + authtypes.NewModuleAddress(govtypes.ModuleName).String(), + ) + + app.MintKeeper = mintkeeper.NewKeeper( + appCodec, + keys[minttypes.StoreKey], + // remove-next-line + app.GetSubspace(minttypes.ModuleName), + // remove-next-line + &app.StakingKeeper, + // highlight-next-line + app.StakingKeeper, + app.AccountKeeper, + app.BankKeeper, + authtypes.FeeCollectorName, + // highlight-next-line + authtypes.NewModuleAddress(govtypes.ModuleName).String(), + ) + + app.DistrKeeper = distrkeeper.NewKeeper( + appCodec, + keys[distrtypes.StoreKey], + // remove-next-line + app.GetSubspace(distrtypes.ModuleName), + app.AccountKeeper, + app.BankKeeper, + // remove-next-line + &app.StakingKeeper, + // highlight-next-line + app.StakingKeeper, + authtypes.FeeCollectorName, + // highlight-next-line + authtypes.NewModuleAddress(govtypes.ModuleName).String(), + ) + + app.SlashingKeeper = slashingkeeper.NewKeeper( + appCodec, + // highlight-next-line + cdc, + keys[slashingtypes.StoreKey], + // remove-next-line + &app.StakingKeeper, + // highlight-next-line + app.StakingKeeper, + // remove-next-line + app.GetSubspace(slashingtypes.ModuleName), + // highlight-next-line + authtypes.NewModuleAddress(govtypes.ModuleName).String(), + ) + + app.CrisisKeeper = crisiskeeper.NewKeeper( + // remove-next-line + app.GetSubspace(crisistypes.ModuleName), + // highlight-start + appCodec, + keys[crisistypes.StoreKey], + // highlight-end + invCheckPeriod, + app.BankKeeper, + authtypes.FeeCollectorName, + // highlight-next-line + authtypes.NewModuleAddress(govtypes.ModuleName).String(), + ) + + // ... + + // Create evidence Keeper for to register the IBC light client misbehaviour evidence route + evidenceKeeper := evidencekeeper.NewKeeper( + appCodec, + keys[evidencetypes.StoreKey], + // remove-next-line + &app.StakingKeeper, + // highlight-next-line + app.StakingKeeper, + app.SlashingKeeper, + ) + // If evidence needs to be handled for the app, set routes in router here and seal + app.EvidenceKeeper = *evidenceKeeper + + // highlight-start + govConfig := govtypes.DefaultConfig() + govKeeper := govkeeper.NewKeeper( + appCodec, + keys[govtypes.StoreKey], + app.AccountKeeper, + app.BankKeeper, + app.StakingKeeper, + app.MsgServiceRouter(), + govConfig, + authtypes.NewModuleAddress(govtypes.ModuleName).String(), + ) + // highlight-end + + govRouter := govv1beta1.NewRouter() + govRouter. + AddRoute(govtypes.RouterKey, govv1beta1.ProposalHandler). + AddRoute(paramproposal.RouterKey, params.NewParamChangeProposalHandler(app.ParamsKeeper)). + // remove-next-line + AddRoute(distrtypes.RouterKey, distr.NewCommunityPoolSpendProposalHandler(app.DistrKeeper)). + AddRoute(upgradetypes.RouterKey, upgrade.NewSoftwareUpgradeProposalHandler(app.UpgradeKeeper)). + AddRoute(ibcclienttypes.RouterKey, ibcclient.NewClientProposalHandler(app.IBCKeeper.ClientKeeper)) + // highlight-next-line + govKeeper.SetLegacyRouter(govRouter) + + // remove-start + govConfig := govtypes.DefaultConfig() + app.GovKeeper = govkeeper.NewKeeper( + appCodec, + keys[govtypes.StoreKey], + app.GetSubspace(govtypes.ModuleName), + app.AccountKeeper, + app.BankKeeper, + &app.StakingKeeper, + govRouter, + app.MsgServiceRouter(), + govConfig, + ) + // remove-end + // highlight-start + app.GovKeeper = *govKeeper.SetHooks( + govtypes.NewMultiGovHooks( + // register the governance hooks + ), + ) + // highlight-end + + // ... + + // remove-start + app.GovKeeper.SetHooks( + govtypes.NewMultiGovHooks( + // insert governance hooks receivers here + ), + ) + // remove-end + + // ... + + app.mm = module.NewManager( + genutil.NewAppModule( + app.AccountKeeper, + app.StakingKeeper, + app.BaseApp.DeliverTx, + encodingConfig.TxConfig, + ), + // remove-next-line + auth.NewAppModule(appCodec, app.AccountKeeper, nil), + // highlight-next-line + auth.NewAppModule(appCodec, app.AccountKeeper, authsims.RandomGenesisAccounts, app.GetSubspace(authtypes.ModuleName)), + authzmodule.NewAppModule(appCodec, app.AuthzKeeper, app.AccountKeeper, app.BankKeeper, app.interfaceRegistry), + vesting.NewAppModule(app.AccountKeeper, app.BankKeeper), + // remove-start + bank.NewAppModule(appCodec, app.BankKeeper, app.AccountKeeper), + capability.NewAppModule(appCodec, *app.CapabilityKeeper), + // remove-end + // highlight-start + bank.NewAppModule(appCodec, app.BankKeeper, app.AccountKeeper, app.GetSubspace(banktypes.ModuleName)), + capability.NewAppModule(appCodec, *app.CapabilityKeeper, false), + // highlight-end + feegrantmodule.NewAppModule(appCodec, app.AccountKeeper, app.BankKeeper, app.FeeGrantKeeper, app.interfaceRegistry), + groupmodule.NewAppModule(appCodec, app.GroupKeeper, app.AccountKeeper, app.BankKeeper, app.interfaceRegistry), + // remove-start + crisis.NewAppModule(&app.CrisisKeeper, skipGenesisInvariants), + gov.NewAppModule(appCodec, app.GovKeeper, app.AccountKeeper, app.BankKeeper), + mint.NewAppModule(appCodec, app.MintKeeper, app.AccountKeeper, minttypes.DefaultInflationCalculationFn), + slashing.NewAppModule(appCodec, app.SlashingKeeper, app.AccountKeeper, app.BankKeeper, app.StakingKeeper), + distr.NewAppModule(appCodec, app.DistrKeeper, app.AccountKeeper, app.BankKeeper, app.StakingKeeper), + staking.NewAppModule(appCodec, app.StakingKeeper, app.AccountKeeper, app.BankKeeper), + // remove-end + // highlight-start + crisis.NewAppModule(app.CrisisKeeper, skipGenesisInvariants, app.GetSubspace(crisistypes.ModuleName)), + gov.NewAppModule(appCodec, &app.GovKeeper, app.AccountKeeper, app.BankKeeper, app.GetSubspace(govtypes.ModuleName)), + mint.NewAppModule(appCodec, app.MintKeeper, app.AccountKeeper, nil, app.GetSubspace(minttypes.ModuleName)), + slashing.NewAppModule(appCodec, app.SlashingKeeper, app.AccountKeeper, app.BankKeeper, app.StakingKeeper, app.GetSubspace(slashingtypes.ModuleName)), + distr.NewAppModule(appCodec, app.DistrKeeper, app.AccountKeeper, app.BankKeeper, app.StakingKeeper, app.GetSubspace(distrtypes.ModuleName)), + staking.NewAppModule(appCodec, app.StakingKeeper, app.AccountKeeper, app.BankKeeper, app.GetSubspace(stakingtypes.ModuleName)), + // highlight-end + upgrade.NewAppModule(app.UpgradeKeeper), + evidence.NewAppModule(app.EvidenceKeeper), + // highlight-next-line + consensus.NewAppModule(appCodec, app.ConsensusParamsKeeper), + ibc.NewAppModule(app.IBCKeeper), + params.NewAppModule(app.ParamsKeeper), + transferModule, + icaModule, + // this line is used by starport scaffolding # stargate/app/appModule + + ) + + app.mm.SetOrderBeginBlockers( + // ... + paramstypes.ModuleName, + vestingtypes.ModuleName, + // highlight-next-line + consensusparamtypes.ModuleName, + // ... + ) + + app.mm.SetOrderEndBlockers( + // ... + paramstypes.ModuleName, + upgradetypes.ModuleName, + vestingtypes.ModuleName, + // highlight-next-line + consensusparamtypes.ModuleName, + // ... + ) + + // remove-next-line + app.mm.SetOrderInitGenesis( + // highlight-next-line + genesisModuleOrder := []string{ + // ... + paramstypes.ModuleName, + upgradetypes.ModuleName, + vestingtypes.ModuleName, + // highlight-next-line + consensusparamtypes.ModuleName, + // ... + // remove-next-line + ) + // highlight-start + } + app.mm.SetOrderInitGenesis(genesisModuleOrder...) + app.mm.SetOrderExportGenesis(genesisModuleOrder...) + // highlight-end + + // remove-start + app.mm.RegisterInvariants(&app.CrisisKeeper) + app.mm.RegisterRoutes(app.Router(), app.QueryRouter(), encodingConfig.Amino) + // remove-end + // highlight-next-line + app.mm.RegisterInvariants(app.CrisisKeeper) + + app.configurator = module.NewConfigurator(app.appCodec, app.MsgServiceRouter(), app.GRPCQueryRouter()) + app.mm.RegisterServices(app.configurator) + + // highlight-start + autocliv1.RegisterQueryServer(app.GRPCQueryRouter(), runtimeservices.NewAutoCLIQueryService(app.mm.Modules)) + reflectionSvc, err := runtimeservices.NewReflectionService() + if err != nil { + panic(err) + } + reflectionv1.RegisterReflectionServiceServer(app.GRPCQueryRouter(), reflectionSvc) + // highlight-end + + // create the simulation manager and define the order of the modules for deterministic simulations + // remove-start + app.sm = module.NewSimulationManager( + // ... + ) + // remove-end + // highlight-start + overrideModules := map[string]module.AppModuleSimulation{ + authtypes.ModuleName: auth.NewAppModule(app.appCodec, app.AccountKeeper, authsims.RandomGenesisAccounts, app.GetSubspace(authtypes.ModuleName)), + } + app.sm = module.NewSimulationManagerFromAppModules(app.mm.Modules, overrideModules) + // highlight-end + app.sm.RegisterStoreDecoders() + + // ... + + // remove-start + app.SetInitChainer(app.InitChainer) + app.SetBeginBlocker(app.BeginBlocker) + // remove-end + + // ... +} + +func (app *App) InitChainer(ctx sdk.Context, req abci.RequestInitChain) abci.ResponseInitChain { + var genesisState GenesisState + // remove-next-line + if err := tmjson.Unmarshal(req.AppStateBytes, &genesisState); err != nil { + // highlight-next-line + if err := json.Unmarshal(req.AppStateBytes, &genesisState); err != nil { + panic(err) + } + // ... +} + +// remove-start +// GetMaccPerms returns a copy of the module account permissions +func GetMaccPerms() map[string][]string { + dupMaccPerms := make(map[string][]string) + for k, v := range maccPerms { + dupMaccPerms[k] = v + } + return dupMaccPerms +} +// remove-end + +// highlight-start +// TxConfig returns App's TxConfig. +func (app *App) TxConfig() client.TxConfig { + return app.txConfig +} + +// Configurator get app configurator +func (app *App) Configurator() module.Configurator { + return app.configurator +} + +// ModuleManager returns the app ModuleManager +func (app *App) ModuleManager() *module.Manager { + return app.mm +} +// highlight-end +``` + +```text title="app/simulation_test.go" +import ( + // ... + // remove-start + "cosmossdk.io/simapp" + tmtypes "github.com/tendermint/tendermint/types" + // remove-end + // highlight-start + "encoding/json" + "fmt" + "math/rand" + "runtime/debug" + "strings" + + dbm "github.com/cometbft/cometbft-db" + "github.com/cometbft/cometbft/libs/log" + "github.com/cosmos/cosmos-sdk/baseapp" + "github.com/cosmos/cosmos-sdk/client/flags" + "github.com/cosmos/cosmos-sdk/server" + storetypes "github.com/cosmos/cosmos-sdk/store/types" + simtestutil "github.com/cosmos/cosmos-sdk/testutil/sims" + sdk "github.com/cosmos/cosmos-sdk/types" + authtypes "github.com/cosmos/cosmos-sdk/x/auth/types" + authzkeeper "github.com/cosmos/cosmos-sdk/x/authz/keeper" + banktypes "github.com/cosmos/cosmos-sdk/x/bank/types" + capabilitytypes "github.com/cosmos/cosmos-sdk/x/capability/types" + distrtypes "github.com/cosmos/cosmos-sdk/x/distribution/types" + evidencetypes "github.com/cosmos/cosmos-sdk/x/evidence/types" + govtypes "github.com/cosmos/cosmos-sdk/x/gov/types" + minttypes "github.com/cosmos/cosmos-sdk/x/mint/types" + paramstypes "github.com/cosmos/cosmos-sdk/x/params/types" + simcli "github.com/cosmos/cosmos-sdk/x/simulation/client/cli" + slashingtypes "github.com/cosmos/cosmos-sdk/x/slashing/types" + stakingtypes "github.com/cosmos/cosmos-sdk/x/staking/types" + // highlight-end +) + +// highlight-start +type storeKeysPrefixes struct { + A storetypes.StoreKey + B storetypes.StoreKey + Prefixes [][]byte +} +// highlight-end + +// Get flags every time the simulator is run +func init() { + // remove-next-line + simapp.GetSimulatorFlags() + // highlight-next-line + simcli.GetSimulatorFlags() +} + +// remove-start +var defaultConsensusParams = &abci.ConsensusParams{ + Block: &abci.BlockParams{ + MaxBytes: 200000, + MaxGas: 2000000, + }, + Evidence: &tmproto.EvidenceParams{ + MaxAgeNumBlocks: 302400, + MaxAgeDuration: 504 * time.Hour, // 3 weeks is the max duration + MaxBytes: 10000, + }, + Validator: &tmproto.ValidatorParams{ + PubKeyTypes: []string{ + tmtypes.ABCIPubKeyTypeEd25519, + }, + }, +} +// remove-end +// highlight-start +func fauxMerkleModeOpt(bapp *baseapp.BaseApp) { + bapp.SetFauxMerkleMode() +} +// highlight-end + +func BenchmarkSimulation(b *testing.B) { + // remove-start + simapp.FlagEnabledValue = true + simapp.FlagCommitValue = true + + config, db, dir, logger, _, err := simapp.SetupSimulation("goleveldb-app-sim", "Simulation") + // remove-end + // highlight-start + simcli.FlagSeedValue = time.Now().Unix() + simcli.FlagVerboseValue = true + simcli.FlagCommitValue = true + simcli.FlagEnabledValue = true + + config := simcli.NewConfigFromFlags() + config.ChainID = "mars-simapp" + db, dir, logger, _, err := simtestutil.SetupSimulation( + config, + "leveldb-bApp-sim", + "Simulation", + simcli.FlagVerboseValue, + simcli.FlagEnabledValue, + ) + // highlight-end + + require.NoError(b, err, "simulation setup failed") + + b.Cleanup(func() { + // remove-start + db.Close() + err = os.RemoveAll(dir) + require.NoError(b, err) + // remove-end + // highlight-start + require.NoError(b, db.Close()) + require.NoError(b, os.RemoveAll(dir)) + // highlight-end + }) + + + // remove-next-line + encoding := app.MakeEncodingConfig() + // highlight-start + appOptions := make(simtestutil.AppOptionsMap, 0) + appOptions[flags.FlagHome] = app.DefaultNodeHome + appOptions[server.FlagInvCheckPeriod] = simcli.FlagPeriodValue + // highlight-end + + // remove-next-line + app := app.New( + // highlight-next-line + bApp := app.New( + logger, + db, + nil, + true, + map[int64]bool{}, + app.DefaultNodeHome, + 0, + // remove-start + encoding, + simapp.EmptyAppOptions{}, + // remove-end + // highlight-start + app.MakeEncodingConfig(), + appOptions, + baseapp.SetChainID(config.ChainID), + // highlight-end + ) + // highlight-next-line + require.Equal(b, app.Name, bApp.Name()) + + _, simParams, simErr := simulation.SimulateFromSeed( + b, + os.Stdout, + // remove-start + app.BaseApp, + simapp.AppStateFn(app.AppCodec(), app.SimulationManager()), + simulationtypes.RandomAccounts, + simapp.SimulationOperations(app, app.AppCodec(), config), + app.ModuleAccountAddrs(), + config, + app.AppCodec(), + // remove-end + // highlight-start + bApp.BaseApp, + simtestutil.AppStateFn( + bApp.AppCodec(), + bApp.SimulationManager(), + app.NewDefaultGenesisState(bApp.AppCodec()), + ), + simulationtypes.RandomAccounts, + simtestutil.SimulationOperations(bApp, bApp.AppCodec(), config), + bApp.ModuleAccountAddrs(), + config, + bApp.AppCodec(), + // highlight-end + ) + + // remove-next-line + err = simapp.CheckExportSimulation(app, config, simParams) + // highlight-next-line + err = simtestutil.CheckExportSimulation(bApp, config, simParams) + require.NoError(b, err) + require.NoError(b, simErr) + + if config.Commit { + // remove-next-line + simapp.PrintStats(db) + // highlight-next-line + simtestutil.PrintStats(db) + } +} +``` + +```text title="x/{{moduleName}}/module_simulation.go" +import ( + // ... + // remove-next-line + simappparams "cosmossdk.io/simapp/params" +) + +var ( + // ... + // remove-next-line + _ = simappparams.StakePerAccount + // highlight-next-line + _ = rand.Rand{} +) + +// remove-start +func (am AppModule) RandomizedParams(_ *rand.Rand) []simtypes.ParamChange { + // ... +} +// remove-end +// highlight-start +// ProposalMsgs returns msgs used for governance proposals for simulations. +func (am AppModule) ProposalMsgs(simState module.SimulationState) []simtypes.WeightedProposalMsg { + return []simtypes.WeightedProposalMsg{ + // this line is used by starport scaffolding # simapp/module/OpMsg + } +} +// highlight-end +``` + +### Deprecations + +The app module might contains some legacy methods that are deprecated and can be removed: + +```text title="x/{{moduleName}}/module.go" +// remove-start +// Deprecated: use RegisterServices +func (am AppModule) Route() sdk.Route { return sdk.Route{} } + +// Deprecated: use RegisterServices +func (AppModule) QuerierRoute() string { return types.RouterKey } + +// Deprecated: use RegisterServices +func (am AppModule) LegacyQuerierHandler(_ *codec.LegacyAmino) sdk.Querier { + return nil +} +// remove-end +``` + +### Other required changes + +Changes required to the network test util: + +```text title="testutil/network/network.go" +import ( + // ... + + // remove-start + "github.com/cosmos/cosmos-sdk/simapp" + pruningtypes "github.com/cosmos/cosmos-sdk/pruning/types" + // remove-end + // highlight-start + pruningtypes "github.com/cosmos/cosmos-sdk/store/pruning/types" + simtestutil "github.com/cosmos/cosmos-sdk/testutil/sims" + // highlight-end +) + +func New(t *testing.T, configs ...Config) *Network { + // ... + + net, err := network.New(t, t.TempDir(), cfg) + require.NoError(t, err) + // highlight-start + _, err = net.WaitForHeight(1) + require.NoError(t, err) + // highlight-end + + // ... +} + +func DefaultConfig() network.Config { + // remove-next-line + encoding := app.MakeEncodingConfig() + // highlight-start + var ( + encoding = app.MakeEncodingConfig() + chainID = "chain-" + tmrand.NewRand().Str(6) + ) + // highlight-end + + return network.Config{ + // ... + // remove-next-line + AppConstructor: func(val network.Validator) servertypes.Application { + // highlight-next-line + AppConstructor: func(val network.ValidatorI) servertypes.Application { + return app.New( + // remove-next-line + val.Ctx.Logger, + // highlight-next-line + val.GetCtx().Logger, + tmdb.NewMemDB(), + nil, + true, + map[int64]bool{}, + // remove-next-line + val.Ctx.Config.RootDir, + // highlight-next-line + val.GetCtx().Config.RootDir, + 0, + encoding, + // remove-start + simapp.EmptyAppOptions{}, + baseapp.SetPruning(pruningtypes.NewPruningOptionsFromString(val.AppConfig.Pruning)), + baseapp.SetMinGasPrices(val.AppConfig.MinGasPrices), + // remove-end + // highlight-start + simtestutil.EmptyAppOptions{}, + baseapp.SetPruning(pruningtypes.NewPruningOptionsFromString(val.GetAppConfig().Pruning)), + baseapp.SetMinGasPrices(val.GetAppConfig().MinGasPrices), + baseapp.SetChainID(chainID), + // highlight-end + ) + }, + // ... + // remove-next-line + ChainID: "chain-" + tmrand.NewRand().Str(6), + // highlight-next-line + ChainID: chainID, + // ... + } +} +``` + +Update the collect genesis transactions command and add the new message validator argument: + +```text title="cmd/{{binaryNamePrefix}}d/cmd/root.go" +import ( + // ... + + // highlight-start + tmtypes "github.com/cometbft/cometbft/types" + "github.com/cosmos/cosmos-sdk/x/genutil" + genutiltypes "github.com/cosmos/cosmos-sdk/x/genutil/types" + // highlight-end +) + +func initRootCmd(rootCmd *cobra.Command, encodingConfig params.EncodingConfig) { + // ... + + // highlight-next-line + gentxModule := app.ModuleBasics[genutiltypes.ModuleName].(genutil.AppModuleBasic) + rootCmd.AddCommand( + // ... + // remove-next-line + genutilcli.CollectGenTxsCmd(banktypes.GenesisBalancesIterator{}, app.DefaultHome), + // highlight-next-line + genutilcli.CollectGenTxsCmd(banktypes.GenesisBalancesIterator{}, app.DefaultNodeHome, gentxModule.GenTxValidator), + // ... + ) + + // ... +} + +func (a appCreator) newApp( + logger log.Logger, + db dbm.DB, + traceStore io.Writer, + appOpts servertypes.AppOptions, +) servertypes.Application { + // ... + + pruningOpts, err := server.GetPruningOptionsFromFlags(appOpts) + if err != nil { + panic(err) + } + + // highlight-start + homeDir := cast.ToString(appOpts.Get(flags.FlagHome)) + chainID := cast.ToString(appOpts.Get(flags.FlagChainID)) + if chainID == "" { + // fallback to genesis chain-id + appGenesis, err := tmtypes.GenesisDocFromFile(filepath.Join(homeDir, "config", "genesis.json")) + if err != nil { + panic(err) + } + + chainID = appGenesis.ChainID + } + // highlight-end + + // ... + + return app.New( + // ... + baseapp.SetPruning(pruningOpts), + baseapp.SetMinGasPrices(cast.ToString(appOpts.Get(server.FlagMinGasPrices))), + // remove-next-line + baseapp.SetMinRetainBlocks(cast.ToUint64(appOpts.Get(server.FlagMinRetainBlocks))), + baseapp.SetHaltHeight(cast.ToUint64(appOpts.Get(server.FlagHaltHeight))), + baseapp.SetHaltTime(cast.ToUint64(appOpts.Get(server.FlagHaltTime))), + // highlight-next-line + baseapp.SetMinRetainBlocks(cast.ToUint64(appOpts.Get(server.FlagMinRetainBlocks))), + // ... + baseapp.SetIAVLDisableFastNode(cast.ToBool(appOpts.Get(server.FlagDisableIAVLFastNode))), + // highlight-next-line + baseapp.SetChainID(chainID), + ) +) + +func (a appCreator) appExport( + logger log.Logger, + db dbm.DB, + traceStore io.Writer, + height int64, + forZeroHeight bool, + jailAllowedAddrs []string, + appOpts servertypes.AppOptions, + // highlight-next-line + modulesToExport []string, +) (servertypes.ExportedApp, error) { + // ... + + // remove-next-line + return app.ExportAppStateAndValidators(forZeroHeight, jailAllowedAddrs) + // highlight-next-line + return app.ExportAppStateAndValidators(forZeroHeight, jailAllowedAddrs, modulesToExport) +} +``` + +Add the new extra argument to `ExportAppStateAndValidators`: + +```text title="app/export.go" +func (app *App) ExportAppStateAndValidators( + forZeroHeight bool, + jailAllowedAddrs []string, + // highlight-next-line + modulesToExport []string, +) (servertypes.ExportedApp, error) { + // ... + + // remove-next-line + genState := app.mm.ExportGenesis(ctx, app.appCodec) + // highlight-next-line + genState := app.mm.ExportGenesisForModules(ctx, app.appCodec, modulesToExport) + appState, err := json.MarshalIndent(genState, "", " ") + if err != nil { + return servertypes.ExportedApp{}, err + } + + // ... +} +``` + +### Migration + +You can also follow other Cosmos SDK migration steps in their [upgrade guide](https://docs.cosmos.network/main/migrations/upgrading#v047x). +Specially the [parameter migration](https://docs.cosmos.network/main/migrations/upgrading#xconsensus) which +is required if you want to run the updated version keeping you current app state. + +## Query commands + +Query commands context initialization should be changed to: + +```text title="x/{moduleName}/client/cli/query_{typeName}.go" +RunE: func(cmd *cobra.Command, args []string) (err error) { + // remove-next-line + clientCtx := client.GetClientContextFromCmd(cmd) + // highlight-start + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } + // highlight-end + + // ... +} +``` + + +## ibc-go v7 + +Chains that are newly scaffolded with IGNITE® CLI `v0.27.1` now use `ibc-go/v7` for IBC functionality. It is +required to upgrade to the newest version of `ibc-go`. + +Applications scaffolded with older version of IGNITE® CLI require the following changes to the app file: + +```text title="app/app.go" +import ( + // ... + // remove-start + ica "github.com/cosmos/ibc-go/v6/modules/apps/27-interchain-accounts" + icacontrollerkeeper "github.com/cosmos/ibc-go/v6/modules/apps/27-interchain-accounts/controller/keeper" + icacontrollertypes "github.com/cosmos/ibc-go/v6/modules/apps/27-interchain-accounts/controller/types" + icahost "github.com/cosmos/ibc-go/v6/modules/apps/27-interchain-accounts/host" + icahostkeeper "github.com/cosmos/ibc-go/v6/modules/apps/27-interchain-accounts/host/keeper" + icahosttypes "github.com/cosmos/ibc-go/v6/modules/apps/27-interchain-accounts/host/types" + icatypes "github.com/cosmos/ibc-go/v6/modules/apps/27-interchain-accounts/types" + "github.com/cosmos/ibc-go/v6/modules/apps/transfer" + ibctransferkeeper "github.com/cosmos/ibc-go/v6/modules/apps/transfer/keeper" + ibctransfertypes "github.com/cosmos/ibc-go/v6/modules/apps/transfer/types" + ibc "github.com/cosmos/ibc-go/v6/modules/core" + ibcclient "github.com/cosmos/ibc-go/v6/modules/core/02-client" + ibcclientclient "github.com/cosmos/ibc-go/v6/modules/core/02-client/client" + ibcclienttypes "github.com/cosmos/ibc-go/v6/modules/core/02-client/types" + ibcporttypes "github.com/cosmos/ibc-go/v6/modules/core/05-port/types" + ibchost "github.com/cosmos/ibc-go/v6/modules/core/24-host" + ibckeeper "github.com/cosmos/ibc-go/v6/modules/core/keeper" + // remove-end + // highlight-start + ica "github.com/cosmos/ibc-go/v7/modules/apps/27-interchain-accounts" + icacontrollerkeeper "github.com/cosmos/ibc-go/v7/modules/apps/27-interchain-accounts/controller/keeper" + icacontrollertypes "github.com/cosmos/ibc-go/v7/modules/apps/27-interchain-accounts/controller/types" + icahost "github.com/cosmos/ibc-go/v7/modules/apps/27-interchain-accounts/host" + icahostkeeper "github.com/cosmos/ibc-go/v7/modules/apps/27-interchain-accounts/host/keeper" + icahosttypes "github.com/cosmos/ibc-go/v7/modules/apps/27-interchain-accounts/host/types" + icatypes "github.com/cosmos/ibc-go/v7/modules/apps/27-interchain-accounts/types" + "github.com/cosmos/ibc-go/v7/modules/apps/transfer" + ibctransferkeeper "github.com/cosmos/ibc-go/v7/modules/apps/transfer/keeper" + ibctransfertypes "github.com/cosmos/ibc-go/v7/modules/apps/transfer/types" + ibc "github.com/cosmos/ibc-go/v7/modules/core" + ibcclient "github.com/cosmos/ibc-go/v7/modules/core/02-client" + ibcclientclient "github.com/cosmos/ibc-go/v7/modules/core/02-client/client" + ibcclienttypes "github.com/cosmos/ibc-go/v7/modules/core/02-client/types" + ibcporttypes "github.com/cosmos/ibc-go/v7/modules/core/05-port/types" + ibcexported "github.com/cosmos/ibc-go/v7/modules/core/exported" + ibckeeper "github.com/cosmos/ibc-go/v7/modules/core/keeper" + solomachine "github.com/cosmos/ibc-go/v7/modules/light-clients/06-solomachine" + ibctm "github.com/cosmos/ibc-go/v7/modules/light-clients/07-tendermint" + // highlight-end +) + +var ( + // ... + + ModuleBasics = module.NewBasicManager( + // ... + groupmodule.AppModuleBasic{}, + ibc.AppModuleBasic{}, + // highlight-start + ibctm.AppModuleBasic{}, + solomachine.AppModuleBasic{}, + // highlight-end + upgrade.AppModuleBasic{}, + // ... + ) +) + +func New( + logger log.Logger, + db dbm.DB, + traceStore io.Writer, + loadLatest bool, + skipUpgradeHeights map[int64]bool, + homePath string, + invCheckPeriod uint, + encodingConfig appparams.EncodingConfig, + appOpts servertypes.AppOptions, + baseAppOptions ...func(*baseapp.BaseApp), +) *App { + // ... + + keys := sdk.NewKVStoreKeys( + // ... + govtypes.StoreKey, + paramstypes.StoreKey, + // remove-next-line + ibchost.StoreKey, + // highlight-next-line + ibcexported.StoreKey, + // ... + ) + + // ... + // grant capabilities for the ibc and ibc-transfer modules + // remove-next-line + scopedIBCKeeper := app.CapabilityKeeper.ScopeToModule(ibchost.ModuleName) + // highlight-next-line + scopedIBCKeeper := app.CapabilityKeeper.ScopeToModule(ibcexported.ModuleName) + scopedICAControllerKeeper := app.CapabilityKeeper.ScopeToModule(icacontrollertypes.SubModuleName) + + // ... + + app.IBCKeeper = ibckeeper.NewKeeper( + appCodec, + // remove-start + keys[ibchost.StoreKey], + app.GetSubspace(ibchost.ModuleName), + // remove-end + // highlight-start + keys[ibcexported.StoreKey], + app.GetSubspace(ibcexported.ModuleName), + // highlight-end + app.StakingKeeper, + app.UpgradeKeeper, + scopedIBCKeeper, + ) + + // ... + + app.mm.SetOrderBeginBlockers( + // ... + crisistypes.ModuleName, + ibctransfertypes.ModuleName, + // remove-next-line + ibchost.ModuleName, + // highlight-next-line + ibcexported.ModuleName, + // ... + ) + + app.mm.SetOrderEndBlockers( + // ... + stakingtypes.ModuleName, + ibctransfertypes.ModuleName, + // remove-next-line + ibchost.ModuleName, + // highlight-next-line + ibcexported.ModuleName, + // ... + ) + + genesisModuleOrder := []string{ + // ... + genutiltypes.ModuleName, + ibctransfertypes.ModuleName, + // remove-next-line + ibchost.ModuleName, + // highlight-next-line + ibcexported.ModuleName, + // ... + } + + // ... +) + +func initParamsKeeper(appCodec codec.BinaryCodec, legacyAmino *codec.LegacyAmino, key, tkey storetypes.StoreKey) paramskeeper.Keeper { + // ... + paramsKeeper.Subspace(crisistypes.ModuleName) + paramsKeeper.Subspace(ibctransfertypes.ModuleName) + // remove-next-line + paramsKeeper.Subspace(ibchost.ModuleName) + // highlight-next-line + paramsKeeper.Subspace(ibcexported.ModuleName) + // ... +} +``` + + +You can follow other IBC migration steps in their [migration guide v6 to v7](https://github.com/cosmos/ibc-go/blob/v7.0.1/docs/migrations/v6-to-v7.md). + +## Doctor command + +As the final steps it's recommended to run `ignite doctor` and `go mod tidy`. diff --git a/docs/docs/06-migration/v28.0.0.md b/docs/docs/06-migration/v28.0.0.md new file mode 100644 index 0000000..d830bb3 --- /dev/null +++ b/docs/docs/06-migration/v28.0.0.md @@ -0,0 +1,124 @@ +--- +sidebar_position: 990 +title: v28.0.0 +description: For chains that were scaffolded with IGNITE® CLI versions lower than v28.0.0 changes are required to use IGNITE® CLI v28.0.0 +--- + +## **Upgrade to v28.0.0 - New Versioning Scheme in IGNITE®** + +With the latest update, IGNITE® has transitioned its versioning format from a leading-zero release system to a full number release system. This change marks a significant shift in how we communicate updates and stability in our software. Where the previous version was denoted as v0.27.0, it will now be upgraded to v28.0.0. + +This new versioning approach enhances our version control by clearly indicating major, minor, and patch releases. +From now on first number indicates a major release with breaking API changes, second number indicates minor release that might include new features while the last number is typically focused on bug fixes and minor improvements. +[Learn more about semantic versioning](https://semver.org/). + +## **Plugins are now called Apps. Upgrade Configuration Files** + +IGNITE® `v28.0.0` changes the plugin system which is now called IGNITE® Apps. This version includes changes +to the CLI command names and the plugin configuration file. + +The plugins configuration file is now called `igniteapps.yml` and "plugins" are now called "apps". + +The plugins configuration home directory is now `$HOME/.ignite/apps` instead `$HOME/.ignite/plugins`. + +Updates can be automatically applied by running `ignite doctor` in your blockchain application directory. +Running the command outside your blockchain application directory will only update the global plugins. + +## **IGNITE® and Cosmos SDK Upgrade Guide: From IGNITE® v0.27.0 to v28.0.0 and Cosmos SDK v0.47 to v0.50** + +### **Introduction** + +This guide provides a step-by-step process for developers to upgrade their applications from IGNITE® version 0.27.0 to 28.0.0, along with an upgrade in the Cosmos SDK from version 0.47 to v0.50. It covers essential changes, new features, and adjustments required for a smooth transition. + +### **Prerequisites** + +- Backup your current project. +- Ensure you have IGNITE® v0.27.0 and Cosmos SDK v0.47 installed. +- Basic familiarity with command line operations and the existing project structure. + +### **Step 1: Update IGNITE® CLI to Version 28.0.0** + +- **Command**: Run **`curl https://get.ignite.com/cli@v28.0.0 | bash`** in your terminal. +- **Note**: This command updates the IGNITE® CLI to the latest version. Ensure you have the necessary permissions to execute it. + +### **Step 2: Update Scaffold Chain Command** + +- **Old Command**: **`ignite scaffold chain github.com/alice/blog`** +- **New Command**: **`ignite scaffold chain blog`** +- **Explanation**: The command format has been simplified in the new version for ease of use. + +### **Step 3: Docker Version Upgrades** + +- **Action**: Upgrade the IGNITE® version for the Docker container to match the CLI version. +- **Note**: Ensure Docker compatibility with the new IGNITE® CLI version. + +### **Step 4: Change in Module Path** + +- **Old Path**: **`x/blog/module.go`** +- **New Path**: **`x/blog/module/module.go`** +- **Explanation**: The module path structure has been updated for better organization. + +### **Step 5: Frontend Scaffolding Options** + +- **Action**: Choose between Vue, React, Go, or TypeScript for frontend scaffolding. +- **Commands**: + - **`ignite scaffold react`** + - **`ignite scaffold vue`** +- **Note**: Vue is no longer the default option for frontend scaffolding. + +### **Step 6: Update Scaffold Message for CreatePost Command** + +- **Action**: Review and update the output for the scaffolded createPost command as per the new format. + +### **Step 7: AutoCLI Path Change** + +- **Old Path**: **`x/blog/client/cli/tx_create_post.go`** +- **New Path**: **`x/blog/module/autocli.go`** +- **Explanation**: AutoCLI is now integrated at a different path to streamline command-line interactions. + +### **Step 8: Adjustment in Stored Game** + +- **Old Code**: + + ```go + store := prefix.NewStore(ctx.KVStore(k.storeKey), types.KeyPrefix(types.PostKey)) + + ``` + +- **New Code**: + + ```go + storeAdapter := runtime.KVStoreAdapter(k.storeService.OpenKVStore(ctx)) + store := prefix.NewStore(storeAdapter, types.KeyPrefix(types.PostKey)) + + ``` + +- **Explanation**: The way the KVStore is accessed has changed, requiring an update in the code for stored games. + +### **Step 9: Chain-ID Requirements in CLI Transaction Commands** + +- **Action**: Add **`-chain-id`** flag to CLI transaction commands. +- **Example**: + - **Old Command**: **`blogd tx blog create-post 'Hello, World!' 'This is a blog post' --from alice`** + - **New Command**: **`blogd tx blog create-post 'Hello, World!' 'This is a blog post' --from alice --chain-id blog`** +- **Explanation**: The **`chain-id`** flag is now required for transaction commands for identification purposes. + +### **Troubleshooting Common Issues** + +- **Dependency Conflicts**: Ensure compatibility of all dependencies with IGNITE® v28.0.0 and Cosmos SDK v0.50. +- **Docker Image Compatibility**: Align Docker image versions with the CLI for seamless operations. +- **Frontend Scaffolding**: For older projects, ensure correct scaffolding as per the new commands. +- **AutoCLI Integration**: Address discrepancies due to the new AutoCLI integration path. + +### **Additional Resources** + +- [IGNITE® Documentation](https://docs.ignite.com/) +- [Cosmos SDK Release Notes](https://github.com/cosmos/cosmos-sdk/releases/tag/v0.50.1) + +### **Feedback** + +We value your feedback on this guide. Please share your experiences and suggestions for improvements. + +### **Updates Log** + +- **[01/15/24]**: Guide created for IGNITE® v28.0.0 and Cosmos SDK v0.50.1 \ No newline at end of file diff --git a/docs/docs/06-migration/v29.0.0.md b/docs/docs/06-migration/v29.0.0.md new file mode 100644 index 0000000..5f52591 --- /dev/null +++ b/docs/docs/06-migration/v29.0.0.md @@ -0,0 +1,98 @@ +--- +sidebar_position: 989 +title: v29.0.0 +description: For chains that were scaffolded with IGNITE® CLI versions lower than v29.0.0 changes are required to use IGNITE® CLI v29.0.0 +--- + +## Upgrade to v29.0.0 + +The changes between v28.0.0 and v29.0.0 are not as significant as the changes between v0.27.0 and v28.0.0. + +In v29.0.0, the Cosmos SDK version has been upgraded to 0.53.0 and IBC to v10. + +Please see the [Changelog](https://github.com/ignite/cli/commit/1b7f19f08d0fa91e3ae71b4b37b8bb4171a9e320#diff-b027e7b11ff55b21dd50b32abcbdd35d95be87a889f0f6562417fbf0995d402a) for more details. + +:::tip +If you wish to keep using a chain scaffolded with IGNITE® v28, simply run the doctor command: + +```bash +ignite doctor +``` + +Note that some scaffolding commands may not work as expected, and you may need to manually adjust your code, unless you follow the migration steps below. +::: + +## Upgrade Cosmos SDK to v0.53.0 + +In order to upgrade, please navigate to the `go.mod` file in your blockchain directory and replace an earlier Cosmos-SDK version with v0.53.0. + +```diff +-github.com/cosmos/cosmos-sdk v0.50.0 ++github.com/cosmos/cosmos-sdk v0.53.0 +``` + +Review the [Cosmos SDK v0.53.0 release notes](https://github.com/cosmos/cosmos-sdk/releases/tag/v0.53.0) for changes like updated x/auth vesting or sdk.Context APIs. + +If you have custom modules, test for deprecated APIs and update as needed. + +## Add Auth to PreBlockers + +v29 configures preblockers to include the `auth` module (`authtypes.ModuleName`) for transaction processing. Ensure this is set in your v28 scaffold. + +**Edit PreBlockers**: + +- Open `mychain/app/app_config.go`. + +- Find or add the `preBlockers` slice. Ensure it includes `authtypes.ModuleName`, matching v29’s configuration: + +```go +import ( + "github.com/cosmos/cosmos-sdk/x/auth/types" + "github.com/cosmos/cosmos-sdk/x/upgrade/types" +) + +var preBlockers = []string{ + upgradetypes.ModuleName, + authtypes.ModuleName, + // this line is used by starport scaffolding # stargate/app/preBlockers +} +``` + +## Upgrade to IBC v10 + +```diff +-github.com/cosmos/ibc-go/v8 v8.5.2 ++github.com/cosmos/ibc-go/v10 v10.0.0 +``` + +The easiest path is copy the relevant files in the `app` directory from a chain scaffolded with v29 into your old v28 project, in case you did not modify anything in there. + +In case you want to see the entire difference with scaffolded chains, use our "Generate Migration Difference" Tool. + +[Checkout the Guide To use the Gen-Mig-Diff Tool](https://tutorials.ignite.com/guide-to-use-gen-mig-diffs-for/). + +Then run the command + +`gen-mig-diffs --output temp/migration --from v28 --to v29` + +Now, test if your blockchain runs using IGNITE® v29: + +Update the dependencies with: + +```bash +go mod tidy +``` + +Then run the IGNITE® doctor to update configuration files. + +```bash +ignite doctor +``` + +Now start your chain. + +```bash +ignite chain serve +``` + +If you need our help and support, do not hesitate to visit our [Discord](https://discord.com/invite/ignitecli). diff --git a/docs/docs/07-packages/_category_.json b/docs/docs/07-packages/_category_.json new file mode 100644 index 0000000..6dbb883 --- /dev/null +++ b/docs/docs/07-packages/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Packages", + "link": null +} \ No newline at end of file diff --git a/docs/docs/07-packages/chaincmd.md b/docs/docs/07-packages/chaincmd.md new file mode 100644 index 0000000..c4f498c --- /dev/null +++ b/docs/docs/07-packages/chaincmd.md @@ -0,0 +1,51 @@ +--- +sidebar_position: 7 +title: Chain Command Builder (chaincmd) +slug: /packages/chaincmd +--- + +# Chain Command Builder (chaincmd) + +The `chaincmd` package builds `step.Option` command definitions for Cosmos SDK daemon binaries (`simd`, `gaiad`, and others). It does not execute commands directly. + +For full API details, see the +[`chaincmd` Go package documentation](https://pkg.go.dev/github.com/ignite/cli/v29/ignite/pkg/chaincmd). + +## When to use + +- Build consistent daemon command lines from typed options. +- Reuse command composition across services and tests. +- Keep chain binary-specific flags centralized. + +## Key APIs + +- `New(appCmd string, options ...Option) ChainCmd` +- `WithHome(home string) Option` +- `WithChainID(chainID string) Option` +- `InitCommand(moniker string, options ...string) step.Option` +- `BankSendCommand(fromAddress, toAddress, amount string, options ...BankSendOption) step.Option` + +## Example + +```go +package main + +import ( + "fmt" + + "github.com/ignite/cli/v29/ignite/pkg/chaincmd" + "github.com/ignite/cli/v29/ignite/pkg/cmdrunner/step" +) + +func main() { + cmd := chaincmd.New( + "simd", + chaincmd.WithHome("./.simapp"), + chaincmd.WithChainID("demo-1"), + ) + + initStep := step.New(cmd.InitCommand("validator")) + fmt.Println(initStep.Exec.Command) + fmt.Println(initStep.Exec.Args) +} +``` diff --git a/docs/docs/07-packages/chaincmdrunner.md b/docs/docs/07-packages/chaincmdrunner.md new file mode 100644 index 0000000..58db8ca --- /dev/null +++ b/docs/docs/07-packages/chaincmdrunner.md @@ -0,0 +1,40 @@ +--- +sidebar_position: 4 +title: Chain Command Runner (chaincmd/runner) +slug: /packages/chaincmdrunner +--- + +# Chain Command Runner (chaincmd/runner) + +The `chaincmdrunner` package wraps chain binary commands into typed, higher-level operations (accounts, genesis setup, tx queries, node control). + +For full API details, see the +[`chaincmdrunner` Go package documentation](https://pkg.go.dev/github.com/ignite/cli/v29/ignite/pkg/chaincmd/runner). + +## When to use + +- Execute chain lifecycle commands without manually assembling CLI arguments. +- Manage accounts and genesis setup from automation/test flows. +- Query transaction events using typed selectors instead of raw command output parsing. + +## Key APIs + +- `New(ctx context.Context, chainCmd chaincmd.ChainCmd, options ...Option) (Runner, error)` +- `(Runner) Init(ctx context.Context, moniker string, args ...string) error` +- `(Runner) Start(ctx context.Context, args ...string) error` +- `(Runner) AddAccount(ctx context.Context, name, mnemonic, coinType, accountNumber, addressIndex string) (Account, error)` +- `(Runner) AddGenesisAccount(ctx context.Context, address, coins string) error` +- `(Runner) QueryTxByEvents(ctx context.Context, selectors ...EventSelector) ([]Event, error)` +- `(Runner) WaitTx(ctx context.Context, txHash string, retryDelay time.Duration, maxRetry int) error` + +## Common Tasks + +- Build a `Runner` from a configured `chaincmd.ChainCmd` and then call `Init`/`Start` for local node workflows. +- Use `AddAccount`, `ListAccounts`, and `ShowAccount` to manage keyring state in scripted flows. +- Query and filter tx events with `NewEventSelector` plus `QueryTxByEvents`. + +## Basic import + +```go +import chaincmdrunner "github.com/ignite/cli/v29/ignite/pkg/chaincmd/runner" +``` diff --git a/docs/docs/07-packages/chainregistry.md b/docs/docs/07-packages/chainregistry.md new file mode 100644 index 0000000..b2558ad --- /dev/null +++ b/docs/docs/07-packages/chainregistry.md @@ -0,0 +1,43 @@ +--- +sidebar_position: 3 +title: Chain Registry Types (chainregistry) +slug: /packages/chainregistry +--- + +# Chain Registry Types (chainregistry) + +The `chainregistry` package defines strongly-typed Go structs for Cosmos chain-registry data (`chain.json` and `assetlist.json`). + +For full API details, see the +[`chainregistry` Go package documentation](https://pkg.go.dev/github.com/ignite/cli/v29/ignite/pkg/chainregistry). + +## When to use + +- Parse chain-registry JSON into typed values. +- Build tooling that reads chain metadata (APIs, fees, staking tokens, assets). +- Validate or transform registry documents before writing them back. + +## Key APIs + +- `type Chain struct{ ... }` +- `type APIs struct{ ... }` +- `type APIProvider struct{ ... }` +- `type AssetList struct{ ... }` +- `type Asset struct{ ... }` +- `type Fees struct{ ... }` +- `type Staking struct{ ... }` +- `type Codebase struct{ ... }` +- `type ChainStatus string` +- `type ChainType string` + +## Common Tasks + +- Decode `chain.json` data into a `Chain` value and inspect RPC/REST metadata. +- Decode `assetlist.json` into `AssetList` to access denom units and logo URIs. +- Use enum-like types (`ChainStatus`, `NetworkType`, `ChainType`) to keep metadata checks explicit. + +## Basic import + +```go +import "github.com/ignite/cli/v29/ignite/pkg/chainregistry" +``` diff --git a/docs/docs/07-packages/cosmosaccount.md b/docs/docs/07-packages/cosmosaccount.md new file mode 100644 index 0000000..7097055 --- /dev/null +++ b/docs/docs/07-packages/cosmosaccount.md @@ -0,0 +1,43 @@ +--- +sidebar_position: 2 +title: Account Registry (cosmosaccount) +slug: /packages/cosmosaccount +--- + +# Account Registry (cosmosaccount) + +The `cosmosaccount` package manages Cosmos keyring accounts (create/import/export/list/delete) with configurable backend and Bech32 settings. + +For full API details, see the +[`cosmosaccount` Go package documentation](https://pkg.go.dev/github.com/ignite/cli/v29/ignite/pkg/cosmosaccount). + +## When to use + +- Manage CLI account keys in Ignite services and commands. +- Switch between `test`, `os`, and `memory` keyring backends. +- Resolve addresses/public keys from named keyring entries. + +## Key APIs + +- `New(options ...Option) (Registry, error)` +- `NewInMemory(options ...Option) (Registry, error)` +- `WithKeyringBackend(backend KeyringBackend) Option` +- `WithHome(path string) Option` +- `(Registry) Create(name string) (Account, mnemonic string, err error)` +- `(Registry) Import(name, secret, passphrase string) (Account, error)` +- `(Registry) Export(name, passphrase string) (key string, err error)` +- `(Registry) GetByName(name string) (Account, error)` +- `(Registry) List() ([]Account, error)` +- `(Account) Address(accPrefix string) (string, error)` + +## Common Tasks + +- Instantiate one `Registry` with backend/home options and reuse it for all key operations. +- Call `EnsureDefaultAccount` in setup paths that require a predictable signer account. +- Resolve addresses with `Account.Address(prefix)` when your app uses non-default Bech32 prefixes. + +## Basic import + +```go +import "github.com/ignite/cli/v29/ignite/pkg/cosmosaccount" +``` diff --git a/docs/docs/07-packages/cosmosanalysis.md b/docs/docs/07-packages/cosmosanalysis.md new file mode 100644 index 0000000..5f1b019 --- /dev/null +++ b/docs/docs/07-packages/cosmosanalysis.md @@ -0,0 +1,40 @@ +--- +sidebar_position: 13 +title: Cosmos Source Analysis (cosmosanalysis) +slug: /packages/cosmosanalysis +--- + +# Cosmos Source Analysis (cosmosanalysis) + +The `cosmosanalysis` package provides static analysis helpers for Cosmos SDK-based projects, especially for app structure and interface/embed discovery. + +For full API details, see the +[`cosmosanalysis` Go package documentation](https://pkg.go.dev/github.com/ignite/cli/v29/ignite/pkg/cosmosanalysis). + +## When to use + +- Validate that a directory is a Cosmos chain project before running codegen. +- Locate key app files and embedded types in Cosmos app sources. +- Detect interface implementations across module files. + +## Key APIs + +- `IsChainPath(path string) error` +- `FindAppFilePath(chainRoot string) (path string, err error)` +- `ValidateGoMod(module *modfile.File) error` +- `FindImplementation(modulePath string, interfaceList []string) (found []string, err error)` +- `DeepFindImplementation(modulePath string, interfaceList []string) (found []string, err error)` +- `FindEmbed(modulePath string, targetEmbeddedTypes []string) (found []string, err error)` +- `FindEmbedInFile(n ast.Node, targetEmbeddedTypes []string) (found []string)` + +## Common Tasks + +- Call `IsChainPath` early to fail fast on unsupported project layouts. +- Use `FindAppFilePath` before AST transformations that require the chain app entrypoint. +- Use `FindImplementation`/`DeepFindImplementation` to verify generated modules are wired as expected. + +## Basic import + +```go +import "github.com/ignite/cli/v29/ignite/pkg/cosmosanalysis" +``` diff --git a/docs/docs/07-packages/cosmosbuf.md b/docs/docs/07-packages/cosmosbuf.md new file mode 100644 index 0000000..89eeec5 --- /dev/null +++ b/docs/docs/07-packages/cosmosbuf.md @@ -0,0 +1,58 @@ +--- +sidebar_position: 14 +title: Buf Integration (cosmosbuf) +slug: /packages/cosmosbuf +--- + +# Buf Integration (cosmosbuf) + +The `cosmosbuf` package wraps Buf workflows (`generate`, `export`, `format`, `migrate`, `dep update`) used by Ignite's protobuf pipelines. + +For full API details, see the +[`cosmosbuf` Go package documentation](https://pkg.go.dev/github.com/ignite/cli/v29/ignite/pkg/cosmosbuf). + +## When to use + +- Trigger Buf code generation from Go services. +- Keep Buf invocation flags and error handling consistent. +- Reuse cache-aware generation behavior. + +## Key APIs + +- `New(cacheStorage cache.Storage, goModPath string) (Buf, error)` +- `(Buf) Generate(ctx, protoPath, output, template, options...)` +- `(Buf) Format(ctx, path)` +- `(Buf) Export(ctx, protoDir, output)` +- `Version(ctx context.Context) (string, error)` + +## Example + +```go +package main + +import ( + "context" + "log" + "os" + "path/filepath" + + "github.com/ignite/cli/v29/ignite/pkg/cache" + "github.com/ignite/cli/v29/ignite/pkg/cosmosbuf" +) + +func main() { + storage, err := cache.NewStorage(filepath.Join(os.TempDir(), "ignite-cache.db")) + if err != nil { + log.Fatal(err) + } + + buf, err := cosmosbuf.New(storage, "github.com/acme/my-chain") + if err != nil { + log.Fatal(err) + } + + if err := buf.Format(context.Background(), "./proto"); err != nil { + log.Fatal(err) + } +} +``` diff --git a/docs/docs/07-packages/cosmosclient.md b/docs/docs/07-packages/cosmosclient.md new file mode 100644 index 0000000..bab074c --- /dev/null +++ b/docs/docs/07-packages/cosmosclient.md @@ -0,0 +1,43 @@ +--- +sidebar_position: 1 +title: Blockchain Client (cosmosclient) +slug: /packages/cosmosclient +--- + +# Blockchain Client (cosmosclient) + +The `cosmosclient` package provides a high-level client for querying Cosmos SDK chains and building/signing/broadcasting transactions. + +For full API details, see the +[`cosmosclient` Go package documentation](https://pkg.go.dev/github.com/ignite/cli/v29/ignite/pkg/cosmosclient). + +## When to use + +- Connect Ignite tooling to a running node for status and block queries. +- Build and broadcast SDK messages with shared gas/fees/keyring settings. +- Wait for transaction inclusion and inspect block transactions/events. + +## Key APIs + +- `New(ctx context.Context, options ...Option) (Client, error)` +- `WithNodeAddress(addr string) Option` +- `WithHome(path string) Option` +- `WithKeyringBackend(backend cosmosaccount.KeyringBackend) Option` +- `WithGas(gas string) Option` +- `WithGasPrices(gasPrices string) Option` +- `(Client) BroadcastTx(ctx, account, msgs...) (Response, error)` +- `(Client) WaitForTx(ctx context.Context, hash string) (*ctypes.ResultTx, error)` +- `(Client) Status(ctx context.Context) (*ctypes.ResultStatus, error)` +- `(Client) LatestBlockHeight(ctx context.Context) (int64, error)` + +## Common Tasks + +- Initialize one `Client` instance with node and keyring options, then reuse it across operations. +- Call `CreateTxWithOptions` or `BroadcastTx` depending on whether you need fine-grained tx overrides. +- Use `WaitForTx`, `WaitForNextBlock`, or `WaitForBlockHeight` for deterministic flows in tests/automation. + +## Basic import + +```go +import "github.com/ignite/cli/v29/ignite/pkg/cosmosclient" +``` diff --git a/docs/docs/07-packages/cosmosfaucet.md b/docs/docs/07-packages/cosmosfaucet.md new file mode 100644 index 0000000..e2804fb --- /dev/null +++ b/docs/docs/07-packages/cosmosfaucet.md @@ -0,0 +1,40 @@ +--- +sidebar_position: 5 +title: Token Faucet (cosmosfaucet) +slug: /packages/cosmosfaucet +--- + +# Token Faucet (cosmosfaucet) + +The `cosmosfaucet` package provides a local faucet service and client helpers to fund Cosmos accounts during development and tests. + +For full API details, see the +[`cosmosfaucet` Go package documentation](https://pkg.go.dev/github.com/ignite/cli/v29/ignite/pkg/cosmosfaucet). + +## When to use + +- Automatically fund accounts in local/devnet environments. +- Expose a faucet HTTP endpoint backed by a chain key. +- Request funds from an existing faucet endpoint from automation code. + +## Key APIs + +- `New(ctx context.Context, ccr chaincmdrunner.Runner, options ...Option) (Faucet, error)` +- `TryRetrieve(ctx context.Context, chainID, rpcAddress, faucetAddress, accountAddress string) (string, error)` +- `OpenAPI(apiAddress string) Option` +- `Coin(amount, maxAmount sdkmath.Int, denom string) Option` +- `FeeAmount(amount sdkmath.Int, denom string) Option` +- `RefreshWindow(refreshWindow time.Duration) Option` +- `NewTransferRequest(accountAddress string, coins []string) TransferRequest` + +## Common Tasks + +- Construct a `Faucet` with chain runner + options, then expose transfer endpoints for local users. +- Use `TryRetrieve` in tests before broadcasting txs to ensure accounts have spendable balance. +- Tune coin amount, max amount, and refresh window to limit faucet abuse. + +## Basic import + +```go +import "github.com/ignite/cli/v29/ignite/pkg/cosmosfaucet" +``` diff --git a/docs/docs/07-packages/cosmosgen.md b/docs/docs/07-packages/cosmosgen.md new file mode 100644 index 0000000..180a3a5 --- /dev/null +++ b/docs/docs/07-packages/cosmosgen.md @@ -0,0 +1,63 @@ +--- +sidebar_position: 15 +title: Code Generation (cosmosgen) +slug: /packages/cosmosgen +--- + +# Code Generation (cosmosgen) + +The `cosmosgen` package orchestrates multi-target code generation from protobuf sources, including Go code, TS clients, composables, and OpenAPI output. + +For full API details, see the +[`cosmosgen` Go package documentation](https://pkg.go.dev/github.com/ignite/cli/v29/ignite/pkg/cosmosgen). + +## When to use + +- Run full generation pipelines from application services. +- Configure selective outputs (Go only, TS only, OpenAPI only, etc.). +- Check tool availability and maintain buf-related configuration. + +## Key APIs + +- `Generate(ctx, cacheStorage, appPath, protoDir, goModPath, frontendPath, options...)` +- `WithGoGeneration()` +- `WithTSClientGeneration(out, tsClientRootPath, useCache)` +- `WithOpenAPIGeneration(out, excludeList)` +- `DepTools() []string` + +## Example + +```go +package main + +import ( + "context" + "log" + "os" + "path/filepath" + + "github.com/ignite/cli/v29/ignite/pkg/cache" + "github.com/ignite/cli/v29/ignite/pkg/cosmosgen" +) + +func main() { + storage, err := cache.NewStorage(filepath.Join(os.TempDir(), "ignite-cache.db")) + if err != nil { + log.Fatal(err) + } + + err = cosmosgen.Generate( + context.Background(), + storage, + ".", + "proto", + "github.com/acme/my-chain", + "./web", + cosmosgen.WithGoGeneration(), + cosmosgen.WithOpenAPIGeneration("./api/openapi.yml", nil), + ) + if err != nil { + log.Fatal(err) + } +} +``` diff --git a/docs/docs/07-packages/cosmostxcollector.md b/docs/docs/07-packages/cosmostxcollector.md new file mode 100644 index 0000000..7014cff --- /dev/null +++ b/docs/docs/07-packages/cosmostxcollector.md @@ -0,0 +1,200 @@ +--- +sidebar_position: 0 +title: Indexer (cosmostxcollector) +slug: /packages/cosmostxcollector +--- + +# Indexer (cosmostxcollector) + +The package implements support for collecting transactions and events from Cosmos blockchains +into a data backend and it also adds support for querying the collected data. + +## Transaction and event data collecting + +Transactions and events can be collected using the `cosmostxcollector.Collector` type. This +type uses a `cosmosclient.Client` instance to fetch the data from each block and a data backend +adapter to save the data. + +### Data backend adapters + +Data backend adapters are used to query and save the collected data into different types of data +backends and must implement the `cosmostxcollector.adapter.Adapter` interface. + +An adapter for PostgreSQL is already implemented in `cosmostxcollector.adapter.postgres.Adapter`. +This is the one used in the examples. + +### Example: Data collection + +The data collection example assumes that there is a PostgreSQL database running in the local +environment containing an empty database named "cosmos". + +The required database tables will be created automatically by the collector the first time it is run. + +When the application is run it will fetch all the transactions and events starting from one of the +recent blocks until the current block height and populate the database: + +```go +package main + +import ( + "context" + "log" + + "github.com/ignite/cli/v29/ignite/pkg/clictx" + "github.com/ignite/cli/v29/ignite/pkg/cosmosclient" + "github.com/ignite/cli/v29/ignite/pkg/cosmostxcollector" + "github.com/ignite/cli/v29/ignite/pkg/cosmostxcollector/adapter/postgres" +) + +const ( + // Name of a local PostgreSQL database + dbName = "cosmos" + + // Cosmos RPC address + rpcAddr = "https://rpc.cosmos.directory:443/cosmoshub" +) + +func collect(ctx context.Context, db postgres.Adapter) error { + // Make sure that the data backend schema is up to date + if err := db.Init(ctx); err != nil { + return err + } + + // Init the Cosmos client + client, err := cosmosclient.New(ctx, cosmosclient.WithNodeAddress(rpcAddr)) + if err != nil { + return err + } + + // Get the latest block height + latestHeight, err := client.LatestBlockHeight(ctx) + if err != nil { + return err + } + + // Collect transactions and events starting from a block height. + // The collector stops at the latest height available at the time of the call. + collector := cosmostxcollector.New(db, client) + if err := collector.Collect(ctx, latestHeight-50); err != nil { + return err + } + + return nil +} + +func main() { + ctx := clictx.From(context.Background()) + + // Init an adapter for a local PostgreSQL database running with the default values + params := map[string]string{"sslmode": "disable"} + db, err := postgres.NewAdapter(dbName, postgres.WithParams(params)) + if err != nil { + log.Fatal(err) + } + + if err := collect(ctx, db); err != nil { + log.Fatal(err) + } +} +``` + +## Queries + +Collected data can be queried through the data backend adapters using event queries or +cursor-based queries. + +Queries support sorting, paging and filtering by using different options during creation. +The cursor-based ones also support the selection of specific fields or properties and also +passing arguments in cases where the query is a function. + +By default no sorting, filtering nor paging is applied to the queries. + +### Event queries + +The event queries return events and their attributes as `[]cosmostxcollector.query.Event`. + +### Example: Query events + +The example reads transfer events from Cosmos' bank module and paginates the results. + +```go +import ( + "context" + + banktypes "github.com/cosmos/cosmos-sdk/x/bank/types" + "github.com/ignite/cli/ignite/pkg/cosmostxcollector/adapter/postgres" + "github.com/ignite/cli/ignite/pkg/cosmostxcollector/query" +) + +func queryBankTransferEvents(ctx context.Context, db postgres.Adapter) ([]query.Event, error) { + // Create an event query that returns events of type "transfer" + qry := query.NewEventQuery( + query.WithFilters( + // Filter transfer events from Cosmos' bank module + postgres.FilterByEventType(banktypes.EventTypeTransfer), + ), + query.WithPageSize(10), + query.AtPage(1), + ) + + // Execute the query + return db.QueryEvents(ctx, qry) +} +``` + +### Cursor-based queries + +This type of queries is meant to be used in contexts where the Event queries are not +useful. + +Cursor-based queries can query a single "entity" which can be a table, view or function +in relational databases or a collection or function in non relational data backends. + +The result of these types of queries is a cursor that implements the `cosmostxcollector.query.Cursor` +interface. + +### Example: Query events using cursors + +```go +import ( + "context" + + banktypes "github.com/cosmos/cosmos-sdk/x/bank/types" + "github.com/ignite/cli/ignite/pkg/cosmostxcollector/adapter/postgres" + "github.com/ignite/cli/ignite/pkg/cosmostxcollector/query" +) + +func queryBankTransferEventIDs(ctx context.Context, db postgres.Adapter) (ids []int64, err error) { + // Create a query that returns the IDs for events of type "transfer" + qry := query.New( + "event", + query.Fields("id"), + query.WithFilters( + // Filter transfer events from Cosmos' bank module + postgres.NewFilter("type", banktypes.EventTypeTransfer), + ), + query.WithPageSize(10), + query.AtPage(1), + query.SortByFields(query.SortOrderAsc, "id"), + ) + + // Execute the query + cr, err := db.Query(ctx, qry) + if err != nil { + return nil, err + } + + // Read the results + for cr.Next() { + var eventID int64 + + if err := cr.Scan(&eventID); err != nil { + return nil, err + } + + ids = append(ids, eventID) + } + + return ids, nil +} +``` diff --git a/docs/docs/07-packages/cosmosver.md b/docs/docs/07-packages/cosmosver.md new file mode 100644 index 0000000..7de0697 --- /dev/null +++ b/docs/docs/07-packages/cosmosver.md @@ -0,0 +1,41 @@ +--- +sidebar_position: 11 +title: Cosmos SDK Versions (cosmosver) +slug: /packages/cosmosver +--- + +# Cosmos SDK Versions (cosmosver) + +The `cosmosver` package parses, compares, and detects Cosmos SDK versions used by a chain project. + +For full API details, see the +[`cosmosver` Go package documentation](https://pkg.go.dev/github.com/ignite/cli/v29/ignite/pkg/cosmosver). + +## When to use + +- Detect the Cosmos SDK version from a project before scaffolding or migrations. +- Compare versions to enable/disable version-specific features. +- Access Ignite's known SDK version set and latest supported baseline. + +## Key APIs + +- `Detect(appPath string) (version Version, err error)` +- `Parse(version string) (v Version, err error)` +- `var Versions = []Version{ ... }` +- `var Latest = Versions[len(Versions)-1]` +- `(Version) Is(version Version) bool` +- `(Version) LT(version Version) bool` +- `(Version) LTE(version Version) bool` +- `(Version) GTE(version Version) bool` + +## Common Tasks + +- Use `Detect` against a chain root to gate generation paths by SDK version. +- Parse user-provided versions with `Parse` before comparisons. +- Branch behavior with `LT`/`GTE` checks against well-known constants. + +## Basic import + +```go +import "github.com/ignite/cli/v29/ignite/pkg/cosmosver" +``` diff --git a/docs/docs/08-configuration/01-config.md b/docs/docs/08-configuration/01-config.md new file mode 100644 index 0000000..8163dcb --- /dev/null +++ b/docs/docs/08-configuration/01-config.md @@ -0,0 +1,380 @@ +--- +sidebar_position: 1 +description: Primary configuration file to describe the development environment for your blockchain. +title: Configuration File Documentation +--- + +# Configuration File Reference + +After scaffolding a blockchain with IGNITE® CLI, you will find a configuration file at the root of your newly created directory. + +The `config.yml` file generated in your blockchain folder uses key-value pairs +to describe the development environment for your blockchain. + +Only a default set of parameters is provided. If more nuanced configuration is +required, you can add these parameters to the `config.yml` file. + +## Genesis + +The genesis file is the initial block of your blockchain. It is required to launch a chain because it contains important +information such as token balances and modules' state. +By default, genesis is stored at `$DATA_DIR/config/genesis.json`. + +Since the genesis file is frequently reinitialized during development, you can persistently set options using the +`genesis` property in your `config.yml`: + +```yml +genesis: + app_state: + staking: + params: + bond_denom: "denom" +``` + +To discover which properties the genesis file supports, initialize a chain and inspect the generated genesis file in the +data directory. + +### Overriding Genesis Parameters (e.g., chain_id, balances, etc.) + +You may need to customize specific parameters in the genesis file, such as `chain_id`, token balances, module +parameters, or custom state. + +To override genesis values with IGNITE® CLI, persistently set overrides in the `genesis`property of your `config.yml`. +Any YAML structure under `genesis` will be merged into the generated `genesis.json` during initialization. + +Eg: Changing `chain_id` and `staking` parameters + +```yml +genesis: + chain_id: "my-custom-chain" + app_state: + staking: + params: + bond_denom: "mytoken" + bank: + balances: + - address: "cosmos1..." + coins: + - denom: "mytoken" + amount: "1000000" +``` + +- `chain_id`: Sets the chain ID for your blockchain. +- `app_state`: Allows you to modify module states (e.g., staking, bank, etc.). + +> ⚠️ If you set `chain_id` in the `genesis`, it will persist across `ignite chain init` or `ignite chain serve` runs. + +The `genesis` property supports deep merging and can override any field present in the generated genesis file. +For more complex setups, you can use the `include` field in `config.yml` to split overrides into multiple files. + +## Validation + +IGNITE® uses the `validation` field to determine the kind of validation +of your blockchain. There are currently two supported kinds of validation: + +- `sovereign` which is the standard kind of validation where your blockchain + has its own validator set. This is the default value when this field is not + in the config file. +- `consumer` indicates your blockchain is a consumer chain, in the sense of + Replicated Security. That means it doesn't have a validator set, but + inherits the one of a provider chain. + +While the `sovereign` chain is the default validation when you run the `ignite scaffold +chain`, to scaffold a consumer chain, you have to run `ignite scaffold chain +--consumer`. + +This field is, at this time of writing, only used by IGNITE® at the genesis +generation step, because the genesis of a sovereign chain and a consumer chain +are different. + +## Accounts + +A list of user accounts created during genesis of the blockchain. + +```yml +accounts: + - name: alice + coins: [ '20000token', '200000000stake' ] + - name: bob + coins: [ '10000token', '100000000stake' ] +``` + +IGNITE® uses information from `accounts` when initializing the chain with `ignite +chain init` and `ignite chain start`. In the example above IGNITE® will add two +accounts to the `genesis.json` file of the chain. + +`name` is a local name of a key pair associated with an account. Once the chain +is initialized and started, you will be able to use `name` when signing +transactions. With the configuration above, you'd be able to sign transactions +both with Alice's and Bob's accounts like so `exampled tx bank send ... --from +alice`. + +`coins` is a list of token balances for the account. If a token denomination is +in this list, it will exist in the genesis balance and will be a valid token. +When initialized with the config file above, a chain will only have two accounts +at genesis (Alice and Bob) and two native tokens (with denominations `token` and +`stake`). + +By default, every time a chain is re-initialized, IGNITE® will create a new key +pair for each account. So even though the account name can remain the same +(`bob`), every chain reinitialize it will have a different mnemonic and address. + +If you want an account to have a specific address, provide the `address` field +with a valid bech32 address. The prefix (by default, `cosmos`) should match the +one expected by your chain. When an account is provided with an `address` a key +pair will not be generated, because it's impossible to derive a key from an +address. An account with a given address will be added to the genesis file (with +an associated token balance), but because there is no key pair, you will not be +able to broadcast transactions from that address. This is useful when you have +generated a key pair outside of IGNITE® (for example, using your chain's CLI or +in an extension wallet) and want to have a token balance associated with the +address of this key pair. + +```yml +accounts: + - name: bob + coins: [ '20000token', '200000000stake' ] + address: cosmos1s39200s6v4c96ml2xzuh389yxpd0guk2mzn3mz +``` + +If you want an account to be initialized from a specific mnemonic, provide the +`mnemonic` field with a valid mnemonic. A private key, a public key and an +address will be derived from a mnemonic. + +```yml +accounts: + - name: bob + coins: [ '20000token', '200000000stake' ] + mnemonic: cargo ramp supreme review change various throw air figure humble soft steel slam pole betray inhale already dentist enough away office apple sample glue +``` + +You cannot have both `address` and `mnemonic` defined for a single account. + +Some accounts are used as validator accounts (see `validators` section). +Validator accounts cannot have an `address` field, because IGNITE® needs to be +able to derive a private key (either from a random mnemonic or from a specific +one provided in the `mnemonic` field). Validator accounts should have enough +tokens of the staking denomination for self-delegation. + +By default, the `alice` account is used as a validator account, its key is +derived from a mnemonic generated randomly at genesis, the staking denomination +is `stake`, and this account has enough `stake` for self-delegation. + +If your chain is using its own +[cointype](https://github.com/satoshilabs/slips/blob/master/slip-0044.md), you +can use the `cointype` field to provide the integer value + +```yml +accounts: + - name: bob + coins: [ '20000token', '200000000stake' ] + cointype: 7777777 +``` + +## Validators + +Commands like `ignite chain init` and `ignite chain serve` initialize and launch +a validator node for development purposes. + +```yml +validators: + - name: alice + bonded: '100000000stake' +``` + +`name` refers to key name in the `accounts` list. + +`bonded` is the self-delegation amount of a validator. The `bonded` amount +should not be lower than `1000000` nor higher than the account's +balance in the `account` list. + +Validators store their node configuration files in the data directory. By +default, IGNITE® uses the name of the project as the name of the data directory, +for example, `$HOME/.example/`. To use a different path for the data directory +you can customize the `home` property. + +Configuration in the data directory is reset frequently by IGNITE®. To persist +some changes to configuration files you can use `app`, `config` and `client` +properties that correspond to `$HOME/.example/config/app.toml`, +`$HOME/.example/config/config.toml` and `$HOME/.example/config/client.toml`. + +```yml +validators: + - name: alice + bonded: '100000000stake' + home: "~/.mychain" + app: + pruning: "nothing" + config: + moniker: "mychain" + client: + output: "json" +``` + +To see which properties are available for `config.toml`, `app.toml` and +`client.toml`, initialize a chain with `ignite chain init` and open the file you +want to know more about. + +Currently, IGNITE® starts only one validator node, so the first item in the +`validators` list is used (the rest is ignored). Support for multiple validators +is in progress. + +## Build + +The `build` property lets you customize how IGNITE® builds your chain's binary. + +By default, IGNITE® builds the `main` package from `cmd/PROJECT_NAME/main.go`. If +you more than one `main` package in your project, or you have renamed the +directory, use the `main` property to provide the path to the `main` Go package: + +```yml +build: + main: cmd/hello/cmd +``` + +IGNITE® compiles your project into a binary and uses the project's name with a +`d` suffix as name for the binary. To customize the binary name use the `binary` +property: + +```yml +build: + binary: "helloworldd" +``` + +To customize the linker flags used in the build process: + +```yml +build: + ldflags: [ "-X main.Version=development", "-X main.Date=01/05/2022T19:54" ] +``` + +By default, custom protocol buffer (proto) files are located in the `proto` +directory. If your project keeps proto files in a different directory, you +should tell IGNITE® about this: + +```yml +build: + proto: + path: "myproto" +``` + +## Faucet + +The faucet service sends tokens to addresses. + +```yml +faucet: + name: bob + coins: [ "5token", "100000stake" ] +``` + +`name` refers to a key name in the `accounts` list. This is a required property. + +`coins` is the amount of tokens that will be sent to a user by the faucet. This +is a required property. + +`coins_max` is a maximum amount of tokens that can be sent to a single address. +To reset the token limit use the `rate_limit_window` property (in seconds). + +The default the faucet works on port `4500`. To use a different port number use +the `port` property. + +```yml +faucet: + name: faucet + coins: [ "100token", "5foo" ] + coins_max: [ "2000token", "1000foo" ] + port: 4500 + rate_limit_window: 3600 +``` + +## Genesis + +Genesis file is the initial block in the blockchain. It is required to launch a +blockchain, because it contains important information like token balances, and +modules' state. Genesis is stored in `$DATA_DIR/config/genesis.json`. + +Since the genesis file is reinitialized frequently during development, you can +set persistent options in the `genesis` property: + +```yml +genesis: + app_state: + staking: + params: + bond_denom: "denom" +``` + +To know which properties a genesis file supports, initialize a chain and look up +the genesis file in the data directory. + +## Client code generation + +IGNITE® can generate client-side code for interacting with your chain with the +`ignite generate` set of commands. Use the following properties to customize the +paths where the client-side code is generated. + +```yml +client: + openapi: + path: "docs/static/openapi.json" + typescript: + path: "ts-client" + composables: + path: "vue/src/composables" + hooks: + path: "react/src/hooks" +``` + +## Include + +In your main `config.yml`, use the `include` field to reference other local or remote YAML files. +It allows you to split your chain configuration across multiple files, making it easier to manage and reuse configuration parts. + +```yml +version: 1 +include: + - "./accounts.yml" + - "./validators.yml" +``` + +Include remote files via URL or server path are also valid: + +```yml +version: 1 +include: + - "localhost:3045/accounts.yml" + - "https://ignite.com/config/validators.yml" +``` + +#### Common Use Cases: + +Split your config into a base setup and an external `accounts.yml` for better separation of concerns: + +- `config.yml` +```yml +version: 1 +include: + - "./accounts.yml" +client: + typescript: + path: ts-client +``` + +- `accounts.yml` +```yml +accounts: + - name: alice + coins: + - 20000token + - 200000000stake + - name: bob + coins: + - 20000token + - 200000000stake +faucet: + name: alice + coins: + - 5token + - 100000stake +``` diff --git a/docs/docs/08-configuration/02-config_example.md b/docs/docs/08-configuration/02-config_example.md new file mode 100644 index 0000000..0063cc3 --- /dev/null +++ b/docs/docs/08-configuration/02-config_example.md @@ -0,0 +1,87 @@ +--- +sidebar_position: 2 +description: Configuration File Example. +title: Configuration File Example +--- + +## Configuration File Example + +```yaml title="config.yml" +include: (string list) # Include incorporate a separate config.yml file directly in your current config file. +validation: (string) # Specifies the type of validation the blockchain uses (e.g., sovereign). +version: (uint) # Defines the configuration version number. +build: # Contains build configuration options. + main: (string) # Path to the main build file. + binary: (string) # Path to the binary file. + ldflags: (string list) # List of custom linker flags for building the binary. + proto: # Contains proto build configuration options. + path: (string) # Relative path where the application's proto files are located. +accounts: (list) # Lists the options for setting up Cosmos Accounts. + name: (string) # Local name associated with the Account's key pair. + coins: (string list) # List of token balances for the account. + mnemonic: (string) # Mnemonic phrase for the account. + address: (string) # Address of the account. + cointype: (string) # Coin type number for HD derivation (default is 118). + account_number: (string) # Account number for HD derivation (must be ≤ 2147483647). + address_index: (string) # Address index number for HD derivation (must be ≤ 2147483647). +faucet: # Configuration for the faucet. + name: (string) # Name of the faucet account. + coins: (string list) # Types and amounts of coins the faucet distributes. + coins_max: (string list) # Maximum amounts of coins that can be transferred to a single user. + rate_limit_window: (string) # Timeframe after which the limit will be refreshed. + host: (string) # Host address of the faucet server. + port: (uint) # Port number for the faucet server. + tx_fee: (string) # Tx fee the faucet needs to pay for each transaction. +client: # Configures client code generation. + typescript: # Relative path where the application's Typescript files are located. + path: (string) # Relative path where the application's Typescript files are located. + composables: # Configures Vue 3 composables code generation. + path: (string) # Relative path where the application's composable files are located. + openapi: # Configures OpenAPI spec generation for the API. + path: (string) # Relative path where the application's OpenAPI files are located. +genesis: (key/value) # Custom genesis block modifications. Follow the nesting of the genesis file here to access all the parameters. +default_denom: (string) # Default staking denom (default is stake). +validators: (list) # Contains information related to the list of validators and settings. + name: (string) # Name of the validator. + bonded: (string) # Amount staked by the validator. + app: (key/value) # Overwrites the appd's config/app.toml configurations. + config: (key/value) # Overwrites the appd's config/config.toml configurations. + client: (key/value) # Overwrites the appd's config/client.toml configurations. + home: (string) # Overwrites the default home directory used for the application. + gentx: # Overwrites the appd's config/gentx.toml configurations. + amount: (string) # Amount for the current Gentx. + moniker: (string) # Optional moniker for the validator. + keyring-backend: (string) # Backend for the keyring. + chain-id: (string) # Network chain ID. + commission-max-change-rate: (string) # Maximum commission change rate percentage per day. + commission-max-rate: (string) # Maximum commission rate percentage (e.g., 0.01 = 1%). + commission-rate: (string) # Initial commission rate percentage (e.g., 0.01 = 1%). + details: (string) # Optional details about the validator. + security-contact: (string) # Optional security contact email for the validator. + website: (string) # Optional website for the validator. + account-number: (int) # Account number of the signing account (offline mode only). + broadcast-mode: (string) # Transaction broadcasting mode (sync|async|block) (default is 'sync'). + dry-run: (bool) # Simulates the transaction without actually performing it, ignoring the --gas flag. + fee-account: (string) # Account that pays the transaction fees instead of the signer. + fee: (string) # Fee to pay with the transaction (e.g.: 10uatom). + from: (string) # Name or address of the private key used to sign the transaction. + gas: (string) # Gas limit per transaction; set to 'auto' to calculate sufficient gas automatically (default is 200000). + gas-adjustment: (string) # Factor to multiply against the estimated gas (default is 1). + gas-prices: (string) # Gas prices in decimal format to determine the transaction fee (e.g., 0.1uatom). + generate-only: (bool) # Creates an unsigned transaction and writes it to STDOUT. + identity: (string) # Identity signature (e.g., UPort or Keybase). + ip: (string) # Node's public IP address (default is '192.168.1.64'). + keyring-dir: (string) # Directory for the client keyring; defaults to the 'home' directory if omitted. + ledger: (bool) # Uses a connected Ledger device if true. + min-self-delegation: (string) # Minimum self-delegation required for the validator. + node: (string) # <host>:<port> for the Tendermint RPC interface (default 'tcp://localhost:26657') + node-id: (string) # Node's NodeID + note: (string) # Adds a description to the transaction (formerly --memo). + offline: (bool) # Operates in offline mode, disallowing any online functionality. + output: (string) # Output format (text|json) (default 'json'). + output-document: (string) # Writes the genesis transaction JSON document to the specified file instead of the default location. + pubkey: (string) # Protobuf JSON encoded public key of the validator. + sequence: (uint) # Sequence number of the signing account (offline mode only). + sign-mode: (string) # Chooses sign mode (direct|amino-json), an advanced feature. + timeout-height: (uint) # Sets a block timeout height to prevent the transaction from being committed past a certain height. +``` \ No newline at end of file diff --git a/docs/docs/08-configuration/_category_.json b/docs/docs/08-configuration/_category_.json new file mode 100644 index 0000000..e7e77b5 --- /dev/null +++ b/docs/docs/08-configuration/_category_.json @@ -0,0 +1,5 @@ +{ + "label": "Configuration", + "link": null, + "collapsed": false +} \ No newline at end of file diff --git a/docs/docs/apps/01-using-apps.md b/docs/docs/apps/01-using-apps.md new file mode 100644 index 0000000..391cba9 --- /dev/null +++ b/docs/docs/apps/01-using-apps.md @@ -0,0 +1,44 @@ +--- +description: Using and Developing IGNITE® Apps +--- + +# Using IGNITE® Apps + +Apps offer a way to extend the functionality of the IGNITE® CLI. There are two +core concepts within apps: `Commands` and `Hooks`. `Commands` extend the CLI's +functionality and `Hooks` extend existing CLI command functionality. + +Apps are registered in an IGNITE® scaffolded blockchain project through the +`igniteapps.yml`, or globally through `$HOME/.ignite/apps/igniteapps.yml`. + +To use an app within your project execute the following command inside the +project directory: + +```sh +ignite app install github.com/project/cli-app +``` + +The app will be available only when running `ignite` inside the project +directory. + +To use an app globally on the other hand, execute the following command: + +```sh +ignite app install -g github.com/project/cli-app +``` + +The command will compile the app and make it immediately available to the +`ignite` command lists. + +Discover recommended Apps in the [IGNITE® Apps Marketplace](https://ignite.com/marketplace). + +## Listing installed apps + +When in an ignite scaffolded blockchain you can use the command `ignite app +list` to list all IGNITE® Apps and their statuses. + +## Updating apps + +When an app in a remote repository releases updates, running `ignite app +update ` will update an specific app declared in your +project's `config.yml`. diff --git a/docs/docs/apps/02-developing-apps.md b/docs/docs/apps/02-developing-apps.md new file mode 100644 index 0000000..0a7cbfc --- /dev/null +++ b/docs/docs/apps/02-developing-apps.md @@ -0,0 +1,258 @@ +--- +description: Using and Developing IGNITE® Apps +--- + +# Developing IGNITE® Apps + +It's easy to create an app and use it immediately in your project. First +choose a directory outside your project and run: + +```sh +$ ignite app scaffold my-app +``` + +This will create a new directory `my-app` that contains the app's code +and will output some instructions about how to use your app with the +`ignite` command. An app path can be a local directory which has several +benefits: + +- You don't need to use a Git repository during the development of your app. +- The app is recompiled each time you run the `ignite` binary in your + project if the source files are older than the app binary. + +Thus, app development workflow is as simple as: + +1. Scaffold an app with `ignite app scaffold my-app` +2. Add it to your config via `ignite app install -g /path/to/my-app` +3. Update app code +4. Run `ignite my-app` binary to compile and run the app +5. Go back to 3 + +Once your app is ready you can publish it to a Git repository and the +community can use it by calling `ignite app install github.com/foo/my-app`. + +Now let's detail how to update your app's code. + +## App interface + +Under the hood IGNITE® Apps are implemented using a plugin system based on +`github.com/hashicorp/go-plugin`. + +All apps must implement a predefined interface: + +```go title=ignite/services/plugin/interface.go +type Interface interface { + // Manifest declares app's Command(s) and Hook(s). + Manifest(context.Context) (*Manifest, error) + + // Execute will be invoked by ignite when an app Command is executed. + // It is global for all commands declared in Manifest, if you have declared + // multiple commands, use cmd.Path to distinguish them. + // The ClientAPI argument can be used by plugins to get chain app analysis info. + Execute(context.Context, *ExecutedCommand, ClientAPI) error + + // ExecuteHookPre is invoked by ignite when a command specified by the Hook + // path is invoked. + // It is global for all hooks declared in Manifest, if you have declared + // multiple hooks, use hook.Name to distinguish them. + // The ClientAPI argument can be used by plugins to get chain app analysis info. + ExecuteHookPre(context.Context, *ExecutedHook, ClientAPI) error + + // ExecuteHookPost is invoked by ignite when a command specified by the hook + // path is invoked. + // It is global for all hooks declared in Manifest, if you have declared + // multiple hooks, use hook.Name to distinguish them. + // The ClientAPI argument can be used by plugins to get chain app analysis info. + ExecuteHookPost(context.Context, *ExecutedHook, ClientAPI) error + + // ExecuteHookCleanUp is invoked by ignite when a command specified by the + // hook path is invoked. Unlike ExecuteHookPost, it is invoked regardless of + // execution status of the command and hooks. + // It is global for all hooks declared in Manifest, if you have declared + // multiple hooks, use hook.Name to distinguish them. + // The ClientAPI argument can be used by plugins to get chain app analysis info. + ExecuteHookCleanUp(context.Context, *ExecutedHook, ClientAPI) error +} +``` + +The scaffolded code already implements this interface, you just need to update +the method's body. + +## Defining app's manifest + +Here is the `Manifest` proto message definition: + +```protobuf title=proto/ignite/services/plugin/grpc/v1/types.proto +message Manifest { + // App name. + string name = 1; + + // Commands contains the commands that will be added to the list of ignite commands. + // Each commands are independent, for nested commands use the inner Commands field. + bool shared_host = 2; + + // Hooks contains the hooks that will be attached to the existing ignite commands. + repeated Command commands = 3; + + // Enables sharing a single app server across all running instances of an IGNITE® App. + // Useful if an app adds or extends long running commands. + // + // Example: if an app defines a hook on `ignite chain serve`, a server is instantiated + // when the command is run. Now if you want to interact with that instance + // from commands defined in that app, you need to enable shared host, or else the + // commands will just instantiate separate app servers. + // + // When enabled, all apps of the same path loaded from the same configuration will + // attach it's RPC client to a an existing RPC server. + // + // If an app instance has no other running app servers, it will create one and it + // will be the host. + repeated Hook hooks = 4; +} +``` + +In your app's code the `Manifest` method already returns a predefined +`Manifest` struct as an example. You must adapt it according to your need. + +If your app adds one or more new commands to `ignite`, add them to the +`Commands` field. + +If your app adds features to existing commands, add them to the `Hooks` field. + +Of course an app can declare both, `Commands` *and* `Hooks`. + +An app may also share a host process by setting `SharedHost` to `true`. +`SharedHost` is desirable if an app hooks into, or declares long running commands. +Commands executed from the same app context interact with the same app server. +Allowing all executing commands to share the same server instance, giving shared execution context. + +## Adding new commands + +App commands are custom commands added to IGNITE® CLI by an installed app. +Commands can use any path not defined already by the CLI. + +For instance, let's say your app adds a new `oracle` command to `ignite +scaffold`, then the `Manifest` method will look like : + +```go +func (app) Manifest(context.Context) (*plugin.Manifest, error) { + return &plugin.Manifest{ + Name: "oracle", + Commands: []*plugin.Command{ + { + Use: "oracle [name]", + Short: "Scaffold an oracle module", + Long: "Long description goes here...", + // Optional flags is required + Flags: []*plugin.Flag{ + {Name: "source", Type: plugin.FlagTypeString, Usage: "the oracle source"}, + }, + // Attach the command to `scaffold` + PlaceCommandUnder: "ignite scaffold", + }, + }, + }, nil +} +``` + +To update the app execution, you have to change the `Execute` command. For +example: + +```go +func (app) Execute(_ context.Context, cmd *plugin.ExecutedCommand, _ plugin.ClientAPI) error { + if len(cmd.Args) == 0 { + return fmt.Errorf("oracle name missing") + } + + flags, err := cmd.NewFlags() + if err != nil { + return err + } + + var ( + name = cmd.Args[0] + source, _ = flags.GetString("source") + ) + + // Read chain information + c, err := getChain(cmd) + if err != nil { + return err + } + + //... +} +``` + +Then, run `ignite scaffold oracle` to execute the app. + +## Adding hooks + +App `Hooks` allow existing CLI commands to be extended with new +functionality. Hooks are useful when you want to streamline functionality +without needing to run custom scripts after or before a command has been run. +This can streamline processes that where once error prone or forgotten all +together. + +The following are hooks defined which will run on a registered `ignite` +command: + +| Name | Description | +| -------- | ----------------------------------------------------------------------------------------------------------------------------------------------------- | +| Pre | Runs before a commands main functionality is invoked in the `PreRun` scope | +| Post | Runs after a commands main functionality is invoked in the `PostRun` scope | +| Clean Up | Runs after a commands main functionality is invoked. If the command returns an error it will run before the error is returned to guarantee execution. | + +*Note*: If a hook causes an error in the pre step the command will not run +resulting in `post` and `clean up` not executing. + +The following is an example of a `hook` definition. + +```go +func (app) Manifest(context.Context) (*plugin.Manifest, error) { + return &plugin.Manifest{ + Name: "oracle", + Hooks: []*plugin.Hook{ + { + Name: "my-hook", + PlaceHookOn: "ignite chain build", + }, + }, + }, nil +} + +func (app) ExecuteHookPre(_ context.Context, h *plugin.ExecutedHook, _ plugin.ClientAPI) error { + switch h.Hook.GetName() { + case "my-hook": + fmt.Println("I'm executed before ignite chain build") + default: + return fmt.Errorf("hook not defined") + } + return nil +} + +func (app) ExecuteHookPost(_ context.Context, h *plugin.ExecutedHook, _ plugin.ClientAPI) error { + switch h.Hook.GetName() { + case "my-hook": + fmt.Println("I'm executed after ignite chain build (if no error)") + default: + return fmt.Errorf("hook not defined") + } + return nil +} + +func (app) ExecuteHookCleanUp(_ context.Context, h *plugin.ExecutedHook, _ plugin.ClientAPI) error { + switch h.Hook.GetName() { + case "my-hook": + fmt.Println("I'm executed after ignite chain build (regardless errors)") + default: + return fmt.Errorf("hook not defined") + } + return nil +} +``` + +Above we can see a similar definition to `Command` where a hook has a `Name` +and a `PlaceHookOn`. You'll notice that the `Execute*` methods map directly to +each life cycle of the hook. All hooks defined within the app will invoke these +methods. diff --git a/docs/docs/apps/_category_.json b/docs/docs/apps/_category_.json new file mode 100644 index 0000000..0c4b5b3 --- /dev/null +++ b/docs/docs/apps/_category_.json @@ -0,0 +1,5 @@ +{ + "label": "IGNITE® Apps", + "position": 7, + "link": null +} diff --git a/docs/docusaurus.config.js b/docs/docusaurus.config.js new file mode 100644 index 0000000..11c721e --- /dev/null +++ b/docs/docusaurus.config.js @@ -0,0 +1,306 @@ +// @ts-check +// Note: type annotations allow type checking and IDEs autocompletion + +const lightCodeTheme = require("prism-react-renderer/themes/github"); +const darkCodeTheme = require("prism-react-renderer/themes/dracula"); + +/** @type {import('@docusaurus/types').Config} */ +const config = { + title: "IGNITE® CLI Docs", + tagline: "IGNITE® CLI Docs", + url: "https://docs.ignite.com", + baseUrl: "/", + onBrokenLinks: "warn", + onBrokenMarkdownLinks: "warn", + favicon: "img/favicon-svg.svg", + trailingSlash: false, + + // GitHub pages deployment config. + // If you aren't using GitHub pages, you don't need these. + organizationName: "ignite", + projectName: "ignite docs", + + // Even if you don't use internalization, you can use this field to set useful + // metadata like html lang. For example, if your site is Chinese, you may want + // to replace "en" with "zh-Hans". + i18n: { + defaultLocale: "en", + locales: ["en"], + }, + + headTags: [ + { + tagName: "script", + attributes: { + type: "text/javascript", + }, + innerHTML: ` + var _mtm = window._mtm = window._mtm || []; + _mtm.push({'mtm.startTime': (new Date().getTime()), 'event': 'mtm.Start'}); + (function() { + var d=document, g=d.createElement('script'), s=d.getElementsByTagName('script')[0]; + g.async=true; g.src='https://cdn.matomo.cloud/aibignite.matomo.cloud/container_py6JfPnv.js'; s.parentNode.insertBefore(g,s); + })(); + `, + }, + { + tagName: "script", + attributes: { + type: "text/javascript", + }, + innerHTML: ` + var _mtm = window._mtm = window._mtm || []; + _mtm.push({'mtm.startTime': (new Date().getTime()), 'event': 'mtm.Start'}); + (function() { + var d=document, g=d.createElement('script'), s=d.getElementsByTagName('script')[0]; + g.async=true; g.src='https://cdn.matomo.cloud/aibignite.matomo.cloud/container_215cMJxo.js'; s.parentNode.insertBefore(g,s); + })(); + `, + }, + ], + + presets: [ + [ + "@docusaurus/preset-classic", + /** @type {import('@docusaurus/preset-classic').Options} */ + ({ + docs: { + versions: { + current: { + label: "nightly", + path: "nightly", + badge: true, + banner: "unreleased", // put 'none' to remove + }, + }, + sidebarPath: require.resolve("./sidebars.js"), + routeBasePath: "/", + }, + theme: { + customCss: require.resolve("./src/css/custom.css"), + }, + }), + ], + ], + + themeConfig: + /** @type {import('@docusaurus/preset-classic').ThemeConfig} */ + ({ + image: "img/og-image.jpg", + announcementBar: { + content: + '← Back to IGNITE®', + isCloseable: false, + }, + docs: { + sidebar: { + autoCollapseCategories: true, + }, + }, + navbar: { + hideOnScroll: true, + logo: { + alt: "IGNITE® Logo", + src: "img/header-logo-docs.svg", + srcDark: "img/header-logo-docs-dark.svg", + }, + items: [ + { + href: "https://github.com/ignite/cli", + html: ` + + + `, + position: "right", + }, + { + href: "https://ignite.com", + className: "ignt-backlink", + label: `Back to IGNITE®`, + position: "right", + }, + { + type: "docsVersionDropdown", + position: "left", + dropdownActiveClassDisabled: true, + }, + ], + }, + footer: { + links: [ + { + items: [ + { + html: ` + + + + + + + + + + + + + + + + `, + }, + ], + }, + { + title: "Products", + items: [ + { + label: "IGNITE® CLI", + href: "https://ignite.com/cli", + }, + { + label: "IGNITE® Apps", + href: "https://ignite.com/marketplace", + }, + ], + }, + { + title: "Company", + items: [ + { + label: "About IGNITE®", + href: "https://ignite.com/about", + }, + { + label: "Careers", + href: "https://ignite.com/careers", + }, + { + label: "Blog", + href: "https://ignite.com/blog", + }, + ], + }, + { + title: "Contact", + items: [ + { + label: "Business Inquiries", + href: "mailto:business@ignite.com", + }, + ], + }, + { + title: "Social", + items: [ + { + label: "Discord", + href: "https://discord.com/invite/ignitecli", + }, + { + label: "Twitter", + href: "https://x.com/ignite", + }, + { + label: "Linkedin", + href: "https://www.linkedin.com/company/allinbits", + }, + { + label: "YouTube", + href: "https://www.youtube.com/ignitehq", + }, + ], + }, + ], + copyright: `
© IGNITE® ${new Date().getFullYear()}
`, + }, + prism: { + theme: lightCodeTheme, + darkTheme: darkCodeTheme, + additionalLanguages: ["protobuf", "go-module"], // https://prismjs.com/#supported-languages + magicComments: [ + // Remember to extend the default highlight class name as well! + { + className: "theme-code-block-highlighted-line", + line: "highlight-next-line", + block: { start: "highlight-start", end: "highlight-end" }, + }, + { + className: "code-block-removed-line", + line: "remove-next-line", + block: { start: "remove-start", end: "remove-end" }, + }, + ], + }, + zoom: { + selector: ".markdown :not(em) > img", + config: { + // options you can specify via https://github.com/francoischalifour/medium-zoom#usage + background: { + light: "rgb(255, 255, 255)", + dark: "rgb(50, 50, 50)", + }, + }, + }, + algolia: { + appId: "VVETP7QCVE", + apiKey: "167213b8ce51cc7ff9a804df130657e5", + indexName: "ignite-cli", + contextualSearch: true, + schedule: "every 1 day at 3:00 pm", + }, + }), + plugins: [ + [ + "@docusaurus/plugin-client-redirects", + { + createRedirects(existingPath) { + if (existingPath.includes("/welcome")) { + /* + If the link received contains the path /guide, + this will change to /welcome. + */ + return [existingPath.replace("/welcome", "/guide")]; + } + + // The following is done for backwards compatibility + // with the previous path structure of the versioned docs. + if (existingPath.includes("/v28")) { + return [existingPath.replace("/v28", "/v28.0.0")]; + } + + if (existingPath.includes("/v0.27")) { + return [existingPath.replace("/v0.27", "/v0.27.2")]; + } + + if (existingPath.includes("/v0.26")) { + return [existingPath.replace("/v0.26", "/v0.26.1")]; + } + + if (existingPath.includes("/v0.25")) { + return [existingPath.replace("/v0.25", "/v0.25.2")]; + } + + return; // No redirect created if it doesn't contain /guide + }, + }, + ], + async function myPlugin(context, options) { + return { + name: "docusaurus-tailwindcss", + configurePostCss(postcssOptions) { + postcssOptions.plugins.push(require("postcss-import")); + postcssOptions.plugins.push(require("tailwindcss/nesting")); + postcssOptions.plugins.push(require("tailwindcss")); + postcssOptions.plugins.push(require("autoprefixer")); + return postcssOptions; + }, + }; + }, + require.resolve("docusaurus-plugin-image-zoom"), + ], +}; + +module.exports = config; diff --git a/docs/package.json b/docs/package.json new file mode 100644 index 0000000..ea57438 --- /dev/null +++ b/docs/package.json @@ -0,0 +1,48 @@ +{ + "name": "my-website", + "version": "0.0.0", + "private": true, + "scripts": { + "docusaurus": "docusaurus", + "start": "docusaurus start", + "build": "docusaurus build", + "swizzle": "docusaurus swizzle", + "deploy": "docusaurus deploy", + "clear": "docusaurus clear", + "serve": "docusaurus serve", + "write-translations": "docusaurus write-translations", + "write-heading-ids": "docusaurus write-heading-ids" + }, + "dependencies": { + "@docusaurus/core": "2.4.0", + "@docusaurus/plugin-client-redirects": "2.4.0", + "@docusaurus/plugin-google-gtag": "2.4.0", + "@docusaurus/preset-classic": "2.4.0", + "@mdx-js/react": "^1.6.22", + "autoprefixer": "^10.4.14", + "clsx": "^1.2.1", + "docusaurus-plugin-image-zoom": "^0.1.1", + "postcss": "^8.4.31", + "postcss-import": "^15.1.0", + "prism-react-renderer": "^1.3.5", + "react": "^17.0.2", + "react-dom": "^17.0.2", + "styled-components": "^5.3.6", + "tailwindcss": "^3.2.7" + }, + "devDependencies": { + "@docusaurus/module-type-aliases": "2.4.0" + }, + "browserslist": { + "production": [ + ">0.5%", + "not dead", + "not op_mini all" + ], + "development": [ + "last 1 chrome version", + "last 1 firefox version", + "last 1 safari version" + ] + } +} diff --git a/docs/readme.md b/docs/readme.md new file mode 100644 index 0000000..16ff2af --- /dev/null +++ b/docs/readme.md @@ -0,0 +1,48 @@ +# Website + +This website is built using [Docusaurus 2](https://docusaurus.io), a modern static website generator. + +### Installation + +``` +yarn +``` + +### Local Development + +``` +yarn serve +``` + +This command starts a local development server and opens up a browser window. Most changes are reflected live without having to restart the server. + +### Build + +``` +yarn build +``` + +This command generates static content into the `build` directory and can be served using any static contents hosting service. + +### Deployment + +Using SSH: + +``` +USE_SSH=true yarn deploy +``` + +Not using SSH: + +``` +GIT_USER= yarn deploy +``` + +If you are using GitHub pages for hosting, this command is a convenient way to build the website and push to the `gh-pages` branch. + +### Add new version + +```bash +$ yarn run docusaurus docs:version v0.25.2 +``` + diff --git a/docs/sidebars.js b/docs/sidebars.js new file mode 100644 index 0000000..9e02cc7 --- /dev/null +++ b/docs/sidebars.js @@ -0,0 +1,56 @@ +/** + * Creating a sidebar enables you to: + - create an ordered group of docs + - render a sidebar for each doc of that group + - provide next/previous navigation + + The sidebars can be generated from the filesystem, or explicitly defined here. + + Create as many sidebars as you want. + */ + +// @ts-check + +/** @type {import('@docusaurus/plugin-content-docs').SidebarsConfig} */ +const sidebars = { + // By default, Docusaurus generates a sidebar from the docs folder structure + tutorialSidebar: [ + { type: "autogenerated", dirName: "." }, + + { + type: "category", + label: "Resources", + collapsed: true, + items: [ + { + type: "link", + label: "IGNITE® CLI on Github", + href: "https://github.com/ignite/cli", + }, + { + type: "link", + label: "IGNITE® Tutorials", + href: "https://tutorials.ignite.com/", + }, + { + type: "link", + label: "Cosmos SDK Docs", + href: "https://docs.cosmos.network/", + }, + ], + }, + ], + + // But you can create a sidebar manually + /* + tutorialSidebar: [ + { + type: 'category', + label: 'Tutorial', + items: ['hello'], + }, + ], + */ +}; + +module.exports = sidebars; diff --git a/docs/src/components/ProjectsTable.js b/docs/src/components/ProjectsTable.js new file mode 100644 index 0000000..8c964d9 --- /dev/null +++ b/docs/src/components/ProjectsTable.js @@ -0,0 +1,56 @@ +import React from "react"; +import styled from "styled-components"; + +const GridItem = styled.div` + display: flex; + flex-direction: column; + align-items: center; + justify-content: center; + box-shadow: 1px 1px var(--ifm-color-emphasis-200); + font-weight: 500; + padding: 1rem; + text-align: center; +`; + +const Grid = styled.div` + display: grid; + grid-template-columns: 1fr 1fr 1fr; + border-collapse: collapse; + position: relative; + &:after { + width: 100%; + height: 1px; + content: ""; + bottom: -1px; + background: var(--ifm-background-color); + position: absolute; + } + ${GridItem}:nth-child(3n) { + box-shadow: 0 1px var(--ifm-color-emphasis-200); + } + html[data-theme="dark"] & img { + filter: invert(1); + } + @media (max-width: 500px) { + grid-template-columns: 1fr 1fr; + ${GridItem}:nth-child(3n) { + box-shadow: 1px 1px var(--ifm-color-emphasis-200); + } + ${GridItem}:nth-child(2n) { + box-shadow: 0 1px var(--ifm-color-emphasis-200); + } + } +`; + +export default function ProjectsTable({ data }) { + return ( + + {data.map((item) => ( + + +
{item.name}
+
+ ))} +
+ ); +} diff --git a/docs/src/css/base.css b/docs/src/css/base.css new file mode 100644 index 0000000..49898c7 --- /dev/null +++ b/docs/src/css/base.css @@ -0,0 +1,28 @@ +@layer base { + html { + @apply font-inter; + font-feature-settings: 'kern', 'liga', 'calt', 'zero' 0; + -webkit-font-feature-settings: 'kern', 'liga', 'calt', 'zero' 0; + text-size-adjust: 100%; + -moz-osx-font-smoothing: grayscale; + font-smoothing: antialiased; + font-variant-ligatures: contextual common-ligatures; + font-kerning: normal; + text-rendering: optimizeLegibility; + + @supports (font-variation-settings: normal) { + @apply font-intervar + } + } + + *, + *::before, + *::after { + box-sizing: border-box; + margin: 0; + } + + svg { display: inline; } + + ::selection{} +} \ No newline at end of file diff --git a/docs/src/css/custom.css b/docs/src/css/custom.css new file mode 100644 index 0000000..3318357 --- /dev/null +++ b/docs/src/css/custom.css @@ -0,0 +1,585 @@ +@import 'tailwindcss/base'; +@import './fonts.css'; +@import './base.css'; +@import 'tailwindcss/components'; +@import 'tailwindcss/utilities'; + +.code-block-removed-line { + background-color: #ff000020; + display: block; + margin: 0 calc(-1 * var(--ifm-pre-padding)); + padding: 0 var(--ifm-pre-padding); + border-left: 3px solid #ff000080; + text-decoration: line-through; +} + +/* You can override the default Infima variables here. */ +:root { + --ifm-color-primary: theme(colors.gray.1000); + --ifm-color-primary-dark: theme(colors.gray.1000); + --ifm-color-primary-darker: theme(colors.gray.1000); + --ifm-color-primary-darkest: theme(colors.gray.1000); + --ifm-color-primary-light: theme(colors.gray.1000); + --ifm-color-primary-lighter: theme(colors.gray.1000); + --ifm-color-primary-lightest: theme(colors.gray.1000); + --ifm-code-font-size: 95%; + --ifm-breadcrumb-item-background-active: transparent; + --ifm-breadcrumb-padding-horizontal: 0; + --ifm-list-paragraph-margin: 0; + --ifm-spacing-horizontal: theme(spacing.7); + --ifm-blockquote-border-color: theme(colors.gray.1000); + --ifm-menu-link-padding-vertical: 0.6rem; + --ifm-background-color: theme(colors.gray.0); + --ifm-footer-link-color: var(--ifm-font-color-base); + --ifm-menu-link-sublist-icon: url('~/img/ico-chevron.svg'); + --docsearch-searchbox-background: #f7f7f7; + --docsearch-modal-background: theme(colors.card) !important; + --ifm-navbar-height: 5.563rem; + --ifm-navbar-sidebar-width: 100vw; + --docsearch-highlight-color: theme(colors.fg) !important; + --docsearch-searchbox-shadow: inset 0 0 0 1px var(--docsearch-primary-color); + + /* temp: local search bar */ + --aa-primary-color-rgb: 0, 0, 0; + + @media screen and (prefers-reduced-motion) { + transition: ; + } + --ifm-menu-color-background-active: none; + --ifm-menu-color-background-hover: none; + --docusaurus-highlighted-code-line-bg: rgba(0, 0, 0, 0.1); +} + +/* For readability concerns, you should choose a lighter palette in dark mode. */ +html[data-theme='dark'] { + --ifm-color-primary: theme(colors.gray.0); + --ifm-color-primary-dark: #e6e6e6; + --ifm-color-primary-darker: #d9d9d9; + --ifm-color-primary-darkest: #b3b3b3; + --ifm-color-primary-light: theme(colors.gray.0); + --ifm-color-primary-lighter: theme(colors.gray.0); + --ifm-color-primary-lightest: theme(colors.gray.0); + --ifm-background-color: theme(colors.gray.1000); + --docusaurus-highlighted-code-line-bg: theme(colors.inactive); + --docsearch-modal-background: theme(colors.gray.1000) !important; + --docsearch-highlight-color: theme(colors.inactiveLight) !important; + --docsearch-hit-background: theme(colors.lightfg) !important; + --docsearch-searchbox-shadow: inset 0 0 0 1px var(--docsearch-primary-color); + --docsearch-key-gradient: linear-gradient( + -26.5deg, + #5d5d5d, + #3c3c3c + ) !important; + --docsearch-key-shadow: inset 0 -2px 0 0 #353535, inset 0 0 1px 1px #7a7a7b, + 0 2px 2px 0 rgba(45, 45, 45, 0.3) !important; + --docsearch-modal-shadow: inset 1px 1px 0 0 #2c2e40, inset -1px 1px 0 0 #2c2e40, 0 3px 8px 0 theme(colors.gray.1000) !important +} + +html { + @apply bg-docusaurusBgColor; + body > div { + @apply max-w-[90rem] m-auto; + @media (min-width: 997px) { + @apply px-6; + } + } + + /* IGNT */ + .ignt-backlink { + @apply text-2 text-muted pl-6 order-last; + & > svg { + @apply hidden; + } + } + + /* Banner */ + + #__docusaurus > div[role='banner'] { + @apply bg-gray-30 text-gray-1000 font-normal; + & > div { + @apply flex py-3; + } + a { + @apply no-underline block w-full text-2; + } + @media (min-width: 997px) { + @apply hidden; + } + } + &[data-theme='dark'] #__docusaurus > div[role='banner'] { + @apply bg-fg text-mutedLight font-normal; + } + + /* MAINNAV */ + .navbar { + @apply py-6 mx-6 px-0 h-auto border-b border-b-docusaurusColorBorder shadow-none bg-docusaurusBgColor; + @media (min-width: 997px) { + @apply mx-0; + } + &__toggle { + border-radius: .5rem; + @apply bg-card h-8 w-8 flex justify-center items-center; + + @media (min-width: 997px) { + @apply hidden; + } + } + &__brand { + & + * { + @apply ml-auto; + } + } + &__link--active { + @apply text-muted; + } + &__items:not(:last-child) { + @apply justify-between; + @media (min-width: 997px) { + @apply justify-start px-3; + } + button { + @apply order-2 mr-0; + } + } + &__items--right > :last-child { + @apply right-8.5; + } + } + &[data-theme='dark'] .navbar__item { + @apply text-mutedLight; + } + &[data-theme='dark'] .navbar__toggle { + @apply bg-fg; + } + + .github-icon { + @apply hover:opacity-50; + } + + /* SEARCHBAR */ + /* algolia */ + .DocSearch { + &-Hits mark { + @apply text-docusaurusColorBase; + } + &-Button { + @apply text-inactive rounded-sm h-8 w-8 bg-card justify-center mr-3; + @media (min-width: 997px) { + @apply w-auto justify-between; + } + .DocSearch-Search-Icon { + @media (min-width: 997px) { + @apply hidden; + } + } + .DocSearch-Button-Placeholder { + @apply pr-8; + } + .DocSearch-Button-Key { + background: none; + @apply shadow-none bg-transparent border border-inactive text-inactive p-3 text-2 rounded-s h-6 w-6; + } + .DocSearch-Button-Keys, + .DocSearch-Button-Placeholder { + @apply hidden; + @media (min-width: 997px) { + @apply flex; + } + } + } + + &-Search-Icon path { + @apply fill-transparent; + } + path { + @apply fill-transparent; + } + &-Logo path { + @apply fill-docusaurusColorBase; + } + } + + .navbar-sidebar { + @apply w-full; + &__brand { + @apply pb-7 pt-9 px-6 h-auto; + } + &__item { + @apply px-6 w-full; + } + &__back { + @apply px-0 hidden; + } + &__close { + border-radius: .5rem; + @apply bg-gray-1000 h-8 w-8 flex justify-center items-center ml-0; + + & > svg > g { + @apply stroke-gray-0; + } + } + } + .navbar-sidebar { + &__brand { + @apply shadow-none relative; + &::after { + content: ''; + @apply absolute block h-px bg-border bottom-0 right-3 left-0 mx-6; + } + } +} + &[data-theme='dark'] .navbar-sidebar { + @apply bg-gray-1000; + &__brand { + &::after { + content: ''; + @apply bg-linkHover; + } + } + &__close { + @apply bg-gray-0; + & > svg > g { + @apply stroke-gray-1000; + } + } + } + &[data-theme='dark'] .DocSearch-Modal { + @apply bg-gray-1000; + } + &[data-theme='dark'] .DocSearch-Footer { + @apply bg-gray-1000; + } + &[data-theme='dark'] .DocSearch-Button { + @apply bg-fg text-inactiveLight; + } + &[data-theme='dark'] .DocSearch-Button-Key { + @apply text-inactiveLight border-inactiveLight; + } + + /* BREADCRUMBS */ + .breadcrumbs__item { + &:first-child { + & > a { + &::after { + content: 'Docs'; + } + & > svg { + @apply hidden; + } + } + } + &:not(:last-child)::after { + content: '>'; + @apply bg-none; + } + } + .theme-doc-breadcrumbs { + @media (min-width: 997px) { + @apply pt-[calc(theme(spacing.7)-1rem)]; + } + } + .theme-doc-toc-mobile { + @apply bg-card px-6 py-5.5 pb-0 rounded; + & > button { + @apply p-0 pb-5.5 flex justify-between; + &::after { + @apply order-last ml-5; + background-image: var(--ifm-menu-link-sublist-icon); + background-size: 70%; + } + } + & ul li { + @apply my-5 mx-0; + } + } + &[data-theme='dark'] .theme-doc-toc-mobile { + @apply bg-fg; + } + + /* SIDEBAR */ + .theme-doc-sidebar-container { + @media (min-width: 997px) { + @apply border-r border-r-docusaurusColorBorder; + } + & > div:first-child > a { + @apply m-0; + } + nav { + @media (min-width: 997px) { + @apply pt-7; + } + } + } + + &[data-theme='dark'] .theme-doc-sidebar-menu .menu__list::before { + @apply bg-inactiveLight; + } + .theme-doc-sidebar-menu { + @apply font-normal; + + .menu__list { + @apply relative pl-0; + &::before { + content: ''; + @apply absolute block left-3 top-0 h-full w-[2px] bg-border; + } + ul::before { + @apply hidden; + } + } + + .menu__link { + @apply pl-0 pr-5; + &--active:not(.menu__link--sublist) { + @apply text-docusaurusColorBase font-medium; + } + } + + li li { + @apply pl-7; + .menu__link--active:not(.menu__link--sublist) { + @apply relative text-docusaurusColorBase font-medium; + &::before { + content: ''; + @apply absolute block left-0 top-0 h-full w-[2px] bg-docusaurusColorBase; + @apply -left-[calc(theme(space.7)-theme(space.3))]; + } + } + } + li li li { + @apply pl-5; + } + li li li .menu__link--active:not(.menu__link--sublist)::before { + @apply -left-[calc(theme(space.5)*1+theme(space.7)-theme(space.3))]; + } + li li li li .menu__link--active:not(.menu__link--sublist)::before { + @apply -left-[calc(theme(space.5)*2+theme(space.7)-theme(space.3))]; + } + li li li li li .menu__link--active:not(.menu__link--sublist)::before { + @apply -left-[calc(theme(space.5)*3+theme(space.7)-theme(space.3))]; + } + li li li li li li .menu__link--active:not(.menu__link--sublist)::before { + @apply -left-[calc(theme(space.5)*4+theme(space.7)-theme(space.3))]; + } + li li li li li li li .menu__link--active:not(.menu__link--sublist)::before { + @apply -left-[calc(theme(space.5)*5+theme(space.7)-theme(space.3))]; + } + } + + &[data-theme='dark'] .menu__link { + @apply text-mutedLight; + } + .theme-doc-sidebar-item-link .menu__link[target='_blank'] { + &::after { + content: '\2197'; + @apply ml-1; + } + } + .menu__link { + @apply text-muted; + &:hover { + text-shadow: 0.1px 0.1px 0 var(--ifm-font-color-base), + -0.1px -0.1px 0 var(--ifm-font-color-base), + 0.1px -0.1px 0 var(--ifm-font-color-base), + -0.1px 0.1px 0 var(--ifm-font-color-base), + -0.1px 0 0 var(--ifm-font-color-base), + 0.1px 0 0 var(--ifm-font-color-base), + 0 0.1px 0 var(--ifm-font-color-base), + 0 -0.1px 0 var(--ifm-font-color-base); + @apply text-docusaurusColorBase; + } + + & > svg { + @apply hidden; + } + } + + .menu__link--sublist-caret { + @apply flex; + &::after { + background-size: 16px; + background-repeat: no-repeat; + @apply order-first ml-0 mr-4; + } + } + .menu__list-item--collapsed .menu__link--sublist:after, + .menu__list-item--collapsed .menu__caret:before { + transform: rotateZ(0); + } + .menu__caret, + li li .menu__link--sublist-caret::after { + @apply hidden; + } + + /* TOC */ + .table-of-contents__link:hover, + .table-of-contents__link--active { + text-shadow: 0.1px 0.1px 0 var(--ifm-font-color-base), + -0.1px -0.1px 0 var(--ifm-font-color-base), + 0.1px -0.1px 0 var(--ifm-font-color-base), + -0.1px 0.1px 0 var(--ifm-font-color-base), + -0.1px 0 0 var(--ifm-font-color-base), + 0.1px 0 0 var(--ifm-font-color-base), 0 0.1px 0 var(--ifm-font-color-base), + 0 -0.1px 0 var(--ifm-font-color-base); + } + + /* RELATED ARTICLES */ + &[data-theme='dark'] .pagination-nav > a { + @apply bg-fg; + } + .pagination-nav { + @apply pb-7 mt-9; + & > a { + box-shadow: 0px 0px 80px rgba(0, 0, 0, 0.07); + @apply border-transparent rounded pb-8.5 col-span-2 pt-6 px-6 hover:shadow-none; + + @media (min-width: 997px) { + @apply col-span-1; + } + } + + .pagination-nav { + &__link--next { + @apply text-left; + @media (min-width: 997px) { + @apply text-right; + } + } + &__sublabel { + @apply mb-3.5 text-gray-1000 dark:text-docusaurusColorBase text-3; + } + &__label { + @apply text-4 font-semibold; + } + } + } + + /* FOOTER */ + .footer { + background-color: var(--ifm-background-color); + @apply border-t border-t-docusaurusColorBorder pt-10 mb-10; + &__link-item { + @apply hover:no-underline hover:text-linkHover; + } + &__bottom { + margin: 0 calc(var(--ifm-spacing-horizontal) * -1); + } + &__copyright { + @apply text-left mt-9 text-2; + @media (min-width: 997px) { + @apply grid grid-cols-5; + } + & > div { + padding: 0 var(--ifm-spacing-horizontal); + &:nth-child(2) { + @apply col-span-4; + } + } + & a { + @apply pr-5 hover:text-linkHover hover:no-underline; + } + } + } + .footer__col:not(:first-child) { + @apply basis-1/2; + @media (min-width: 997px) { + @apply basis-0; + } + } + .footer__col:first-child .footer__title { + @apply hidden; + } + .footer__link-item { + & > svg { + @apply hidden; + } + } + + .theme-back-to-top-button { + @apply rotate-180; + &::after { + @apply w-1/2; + } + } + + /* MARKDOWN */ + .markdown { + --ifm-heading-vertical-rhythm-bottom: 1; + --ifm-h1-vertical-rhythm-bottom: 1; + } + .theme-doc-markdown { + @apply mt-7 pb-8 border-b border-b-border; + + h1 { + @apply text-7 font-bold leading-10 tracking-tight; + } + h2 { + @apply text-6 font-bold leading-9 tracking-tight; + } + h3 { + @apply text-4 font-semibold leading-7 tracking-tight; + } + p { + @apply leading-relaxed; + } + p, + ul, + ol, + code, + blockquote { + @apply text-[1.125rem]; + } + code { + @apply border-0 px-3; + } + blockquote { + @apply my-7; + } + a { + @apply underline underline-offset-2 hover:text-linkHover hover:decoration-linkHover; + } + ol, + ul { + @apply my-6; + } + ul li { + @apply relative pl-6 mb-4 before:absolute before:block before:w-[4px] before:h-[4px] before:bg-current before:left-0 before:top-[calc(1em/2)]; + } + ul li li { + @apply last:mb-6 before:border before:border-current before:bg-transparent; + } + li:last-child li { + @apply last:mb-0; + } + ol { + list-style-type: none; + counter-reset: item; + & > li { + @apply relative pl-8 mb-5.5; + &::before { + counter-increment: item; + content: counters(item, '.', decimal-leading-zero) '.'; + @apply absolute flex left-0 top-[.2rem] text-3 font-semibold tracking-tight; + } + } + } + ol ol { + counter-reset: subitem; + & > li { + &::before { + counter-increment: subitem; + content: counters(subitem, '.', decimal-leading-zero) '.'; + } + } + } + li { + & > ul, + & > ol { + @apply my-5; + } + & > p + .theme-code-block { + @apply mt-3; + } + } + } +} diff --git a/docs/src/css/fonts.css b/docs/src/css/fonts.css new file mode 100644 index 0000000..9c09a60 --- /dev/null +++ b/docs/src/css/fonts.css @@ -0,0 +1,72 @@ +/* + + FONT FAMILY GROUPS + +*/ + + + +/* Inter */ +@font-face { + font-family: 'Inter'; + font-style: normal; + font-weight: 400; + font-display: swap; + src: url("~/static/fonts/inter/Inter-Regular.woff2?v=3.19") format("woff2"), + url("~/static/fonts/inter/Inter-Regular.woff?v=3.19") format("woff"); +} + +@font-face { + font-family: 'Inter'; + font-style: normal; + font-weight: 500; + font-display: swap; + src: url("~/static/fonts/inter/Inter-Medium.woff2?v=3.19") format("woff2"), + url("~/static/fonts/inter/Inter-Medium.woff?v=3.19") format("woff"); +} + +@font-face { + font-family: 'Inter'; + font-style: normal; + font-weight: 700; + font-display: swap; + src: url("~/static/fonts/inter/Inter-Bold.woff2?v=3.19") format("woff2"), + url("~/static/fonts/inter/Inter-Bold.woff?v=3.19") format("woff"); +} + +@font-face { + font-family: 'Inter'; + font-style: normal; + font-weight: 900; + font-display: swap; + src: url("~/static/fonts/inter/Inter-Black.woff2?v=3.19") format("woff2"), + url("~/static/fonts/inter/Inter-Black.woff?v=3.19") format("woff"); +} + + +/* Inter var */ +@font-face { + font-family: 'Inter var'; + font-weight: 100 900; + font-display: swap; + font-style: oblique 0deg 10deg; + src: url("~/static/fonts/intervar/Inter.var.woff2?v=3.19") format("woff2"); +} + +/* Termina */ +@font-face { + font-family: 'Termina'; + src: url("~/static/fonts/termina/termina-demi-web.woff2") format("woff2"), /* chrome、firefox */ + url("~/static/fonts/termina/termina-demi-web.woff") format("woff"), /* chrome、firefox */ + url("~/static/fonts/termina/termina-demi-web.ttf") format("truetype"); /* chrome、firefox、opera、Safari, Android, iOS 4.2+*/ font-weight: 600; + font-style: normal; + font-stretch: normal; +} + +/* JetBrains Mono */ +@font-face{ + font-family: 'JetBrains Mono'; + font-weight: normal; + font-style: normal; + src: url("~/static/fonts/jetbrainsmono/JetBrainsMono-Regular.woff2") format("woff2"); +} diff --git a/docs/src/pages/index.js b/docs/src/pages/index.js new file mode 100644 index 0000000..c19e8da --- /dev/null +++ b/docs/src/pages/index.js @@ -0,0 +1,59 @@ +import React from "react"; +import Layout from "@theme/Layout"; +import Link from "@docusaurus/Link"; +import useDocusaurusContext from "@docusaurus/useDocusaurusContext"; +import styles from "./index.module.css"; + +const HomeCard = ({ title, description, to }) => ( +
+

{title}

+

{description}

+ + Explore → + +
+); + +export default function Home() { + const { siteConfig } = useDocusaurusContext(); + + return ( + +
+

Welcome to IGNITE® Knowledge Hub

+

+ Your one-stop portal for IGNITE® documentation, tutorials, and + resources +

+
+ +
+
+ + + + +
+
+
+ ); +} diff --git a/docs/src/pages/index.module.css b/docs/src/pages/index.module.css new file mode 100644 index 0000000..54467fd --- /dev/null +++ b/docs/src/pages/index.module.css @@ -0,0 +1,125 @@ +.hero { + text-align: center; + padding: 1.75rem 1.5rem; /* Matches --ifm-spacing-horizontal: theme(spacing.7) */ + background-color: var(--ifm-background-color); + border-bottom: 1px solid var(--ifm-blockquote-border-color); +} + +.heroTitle { + font-size: 2.5rem; /* Matches h1 sizing */ + font-weight: 700; + line-height: 2.5rem; + letter-spacing: -0.025em; /* Approximates tracking-tight */ + margin-bottom: 1rem; + color: var(--ifm-font-color-base); +} + +.heroSubtitle { + font-size: 1.125rem; + color: #666; /* Approximates gray.600 */ + max-width: 600px; + margin: 0 auto; + line-height: 1.75rem; /* Matches relaxed line height */ +} + +.main { + padding: 1.75rem 1.5rem; + max-width: 90rem; + margin: 0 auto; +} + +.cardsContainer { + display: grid; + grid-template-columns: repeat(auto-fit, minmax(250px, 1fr)); + gap: 1.5rem; + max-width: 1200px; + margin: 0 auto; +} + +.card { + padding: 1.5rem; + border: 1px solid var(--ifm-blockquote-border-color); + border-radius: 0.5rem; + background-color: #f7f7f7; /* Matches --docsearch-searchbox-background as a card bg */ + transition: all 0.2s ease-in-out; +} + +.card:hover { + box-shadow: 0px 0px 80px rgba(0, 0, 0, 0.07); + transform: translateY(-2px); +} + +.card h2 { + margin-top: 0; + font-size: 1.25rem; /* Matches h3 sizing */ + font-weight: 600; + line-height: 1.75rem; + letter-spacing: -0.025em; + color: var(--ifm-font-color-base); +} + +.card p { + color: #666; + font-size: 1rem; + line-height: 1.75rem; + margin-bottom: 1rem; +} + +.cardLink { + display: inline-block; + font-size: 1rem; + font-weight: 500; + color: var(--ifm-color-primary); + text-decoration: underline; + text-underline-offset: 2px; + transition: all 0.2s ease-in-out; +} + +.cardLink:hover { + color: #3d39fc; + text-decoration-color: #3d39fc; +} + +/* Dark mode adjustments */ +html[data-theme="dark"] .hero { + background-color: var(--ifm-background-color); +} + +html[data-theme="dark"] .card { + background-color: #1a1a1a; /* Matches dark theme --ifm-background-color */ +} + +html[data-theme="dark"] .card p { + color: #b3b3b3; /* Approximates mutedLight */ +} + +html[data-theme="dark"] .cardLink { + color: var(--ifm-color-primary); +} + +html[data-theme="dark"] .cardLink:hover { + color: #3d39fc; + text-decoration-color: #3d39fc; +} + +/* Responsive adjustments */ +@media (max-width: 996px) { + .cardsContainer { + grid-template-columns: 1fr; + } + + .hero { + padding: 1.5rem 1rem; + } + + .main { + padding: 1.5rem 1rem; + } +} + +@media (min-width: 997px) { + .hero { + padding-top: 3.5rem; + padding-bottom: 3.5rem; + } +} diff --git a/docs/static/.nojekyll b/docs/static/.nojekyll new file mode 100644 index 0000000..473a0f4 diff --git a/docs/static/fonts/inter/Inter-Black.woff b/docs/static/fonts/inter/Inter-Black.woff new file mode 100644 index 0000000..82819fd Binary files /dev/null and b/docs/static/fonts/inter/Inter-Black.woff differ diff --git a/docs/static/fonts/inter/Inter-Black.woff2 b/docs/static/fonts/inter/Inter-Black.woff2 new file mode 100644 index 0000000..d8f2cde Binary files /dev/null and b/docs/static/fonts/inter/Inter-Black.woff2 differ diff --git a/docs/static/fonts/inter/Inter-BlackItalic.woff b/docs/static/fonts/inter/Inter-BlackItalic.woff new file mode 100644 index 0000000..8d14d3e Binary files /dev/null and b/docs/static/fonts/inter/Inter-BlackItalic.woff differ diff --git a/docs/static/fonts/inter/Inter-BlackItalic.woff2 b/docs/static/fonts/inter/Inter-BlackItalic.woff2 new file mode 100644 index 0000000..1123f2b Binary files /dev/null and b/docs/static/fonts/inter/Inter-BlackItalic.woff2 differ diff --git a/docs/static/fonts/inter/Inter-Bold.woff b/docs/static/fonts/inter/Inter-Bold.woff new file mode 100644 index 0000000..1141494 Binary files /dev/null and b/docs/static/fonts/inter/Inter-Bold.woff differ diff --git a/docs/static/fonts/inter/Inter-Bold.woff2 b/docs/static/fonts/inter/Inter-Bold.woff2 new file mode 100644 index 0000000..5989a29 Binary files /dev/null and b/docs/static/fonts/inter/Inter-Bold.woff2 differ diff --git a/docs/static/fonts/inter/Inter-BoldItalic.woff b/docs/static/fonts/inter/Inter-BoldItalic.woff new file mode 100644 index 0000000..738d1e5 Binary files /dev/null and b/docs/static/fonts/inter/Inter-BoldItalic.woff differ diff --git a/docs/static/fonts/inter/Inter-BoldItalic.woff2 b/docs/static/fonts/inter/Inter-BoldItalic.woff2 new file mode 100644 index 0000000..1ae84b2 Binary files /dev/null and b/docs/static/fonts/inter/Inter-BoldItalic.woff2 differ diff --git a/docs/static/fonts/inter/Inter-ExtraBold.woff b/docs/static/fonts/inter/Inter-ExtraBold.woff new file mode 100644 index 0000000..98bbcef Binary files /dev/null and b/docs/static/fonts/inter/Inter-ExtraBold.woff differ diff --git a/docs/static/fonts/inter/Inter-ExtraBold.woff2 b/docs/static/fonts/inter/Inter-ExtraBold.woff2 new file mode 100644 index 0000000..d8afe93 Binary files /dev/null and b/docs/static/fonts/inter/Inter-ExtraBold.woff2 differ diff --git a/docs/static/fonts/inter/Inter-ExtraBoldItalic.woff b/docs/static/fonts/inter/Inter-ExtraBoldItalic.woff new file mode 100644 index 0000000..0daf683 Binary files /dev/null and b/docs/static/fonts/inter/Inter-ExtraBoldItalic.woff differ diff --git a/docs/static/fonts/inter/Inter-ExtraBoldItalic.woff2 b/docs/static/fonts/inter/Inter-ExtraBoldItalic.woff2 new file mode 100644 index 0000000..5381d07 Binary files /dev/null and b/docs/static/fonts/inter/Inter-ExtraBoldItalic.woff2 differ diff --git a/docs/static/fonts/inter/Inter-ExtraLight.woff b/docs/static/fonts/inter/Inter-ExtraLight.woff new file mode 100644 index 0000000..cccc842 Binary files /dev/null and b/docs/static/fonts/inter/Inter-ExtraLight.woff differ diff --git a/docs/static/fonts/inter/Inter-ExtraLight.woff2 b/docs/static/fonts/inter/Inter-ExtraLight.woff2 new file mode 100644 index 0000000..1d0debd Binary files /dev/null and b/docs/static/fonts/inter/Inter-ExtraLight.woff2 differ diff --git a/docs/static/fonts/inter/Inter-ExtraLightItalic.woff b/docs/static/fonts/inter/Inter-ExtraLightItalic.woff new file mode 100644 index 0000000..911d90e Binary files /dev/null and b/docs/static/fonts/inter/Inter-ExtraLightItalic.woff differ diff --git a/docs/static/fonts/inter/Inter-ExtraLightItalic.woff2 b/docs/static/fonts/inter/Inter-ExtraLightItalic.woff2 new file mode 100644 index 0000000..f9f0269 Binary files /dev/null and b/docs/static/fonts/inter/Inter-ExtraLightItalic.woff2 differ diff --git a/docs/static/fonts/inter/Inter-Italic.woff b/docs/static/fonts/inter/Inter-Italic.woff new file mode 100644 index 0000000..6e983bd Binary files /dev/null and b/docs/static/fonts/inter/Inter-Italic.woff differ diff --git a/docs/static/fonts/inter/Inter-Italic.woff2 b/docs/static/fonts/inter/Inter-Italic.woff2 new file mode 100644 index 0000000..43ff839 Binary files /dev/null and b/docs/static/fonts/inter/Inter-Italic.woff2 differ diff --git a/docs/static/fonts/inter/Inter-Light.woff b/docs/static/fonts/inter/Inter-Light.woff new file mode 100644 index 0000000..5e63725 Binary files /dev/null and b/docs/static/fonts/inter/Inter-Light.woff differ diff --git a/docs/static/fonts/inter/Inter-Light.woff2 b/docs/static/fonts/inter/Inter-Light.woff2 new file mode 100644 index 0000000..742e621 Binary files /dev/null and b/docs/static/fonts/inter/Inter-Light.woff2 differ diff --git a/docs/static/fonts/inter/Inter-LightItalic.woff b/docs/static/fonts/inter/Inter-LightItalic.woff new file mode 100644 index 0000000..28993e2 Binary files /dev/null and b/docs/static/fonts/inter/Inter-LightItalic.woff differ diff --git a/docs/static/fonts/inter/Inter-LightItalic.woff2 b/docs/static/fonts/inter/Inter-LightItalic.woff2 new file mode 100644 index 0000000..31e4ca7 Binary files /dev/null and b/docs/static/fonts/inter/Inter-LightItalic.woff2 differ diff --git a/docs/static/fonts/inter/Inter-Medium.woff b/docs/static/fonts/inter/Inter-Medium.woff new file mode 100644 index 0000000..2281510 Binary files /dev/null and b/docs/static/fonts/inter/Inter-Medium.woff differ diff --git a/docs/static/fonts/inter/Inter-Medium.woff2 b/docs/static/fonts/inter/Inter-Medium.woff2 new file mode 100644 index 0000000..17caef8 Binary files /dev/null and b/docs/static/fonts/inter/Inter-Medium.woff2 differ diff --git a/docs/static/fonts/inter/Inter-MediumItalic.woff b/docs/static/fonts/inter/Inter-MediumItalic.woff new file mode 100644 index 0000000..31d5ee1 Binary files /dev/null and b/docs/static/fonts/inter/Inter-MediumItalic.woff differ diff --git a/docs/static/fonts/inter/Inter-MediumItalic.woff2 b/docs/static/fonts/inter/Inter-MediumItalic.woff2 new file mode 100644 index 0000000..6eff543 Binary files /dev/null and b/docs/static/fonts/inter/Inter-MediumItalic.woff2 differ diff --git a/docs/static/fonts/inter/Inter-Regular.woff b/docs/static/fonts/inter/Inter-Regular.woff new file mode 100644 index 0000000..7e3669b Binary files /dev/null and b/docs/static/fonts/inter/Inter-Regular.woff differ diff --git a/docs/static/fonts/inter/Inter-Regular.woff2 b/docs/static/fonts/inter/Inter-Regular.woff2 new file mode 100644 index 0000000..bc8a184 Binary files /dev/null and b/docs/static/fonts/inter/Inter-Regular.woff2 differ diff --git a/docs/static/fonts/inter/Inter-SemiBold.woff b/docs/static/fonts/inter/Inter-SemiBold.woff new file mode 100644 index 0000000..c0766d8 Binary files /dev/null and b/docs/static/fonts/inter/Inter-SemiBold.woff differ diff --git a/docs/static/fonts/inter/Inter-SemiBold.woff2 b/docs/static/fonts/inter/Inter-SemiBold.woff2 new file mode 100644 index 0000000..921f994 Binary files /dev/null and b/docs/static/fonts/inter/Inter-SemiBold.woff2 differ diff --git a/docs/static/fonts/inter/Inter-SemiBoldItalic.woff b/docs/static/fonts/inter/Inter-SemiBoldItalic.woff new file mode 100644 index 0000000..efc3779 Binary files /dev/null and b/docs/static/fonts/inter/Inter-SemiBoldItalic.woff differ diff --git a/docs/static/fonts/inter/Inter-SemiBoldItalic.woff2 b/docs/static/fonts/inter/Inter-SemiBoldItalic.woff2 new file mode 100644 index 0000000..ca5c286 Binary files /dev/null and b/docs/static/fonts/inter/Inter-SemiBoldItalic.woff2 differ diff --git a/docs/static/fonts/inter/Inter-Thin.woff b/docs/static/fonts/inter/Inter-Thin.woff new file mode 100644 index 0000000..1fda5af Binary files /dev/null and b/docs/static/fonts/inter/Inter-Thin.woff differ diff --git a/docs/static/fonts/inter/Inter-Thin.woff2 b/docs/static/fonts/inter/Inter-Thin.woff2 new file mode 100644 index 0000000..59101f1 Binary files /dev/null and b/docs/static/fonts/inter/Inter-Thin.woff2 differ diff --git a/docs/static/fonts/inter/Inter-ThinItalic.woff b/docs/static/fonts/inter/Inter-ThinItalic.woff new file mode 100644 index 0000000..38a2324 Binary files /dev/null and b/docs/static/fonts/inter/Inter-ThinItalic.woff differ diff --git a/docs/static/fonts/inter/Inter-ThinItalic.woff2 b/docs/static/fonts/inter/Inter-ThinItalic.woff2 new file mode 100644 index 0000000..ac52629 Binary files /dev/null and b/docs/static/fonts/inter/Inter-ThinItalic.woff2 differ diff --git a/docs/static/fonts/inter/Inter-italic.var.woff2 b/docs/static/fonts/inter/Inter-italic.var.woff2 new file mode 100644 index 0000000..b985a1e Binary files /dev/null and b/docs/static/fonts/inter/Inter-italic.var.woff2 differ diff --git a/docs/static/fonts/inter/Inter-roman.var.woff2 b/docs/static/fonts/inter/Inter-roman.var.woff2 new file mode 100644 index 0000000..51088e4 Binary files /dev/null and b/docs/static/fonts/inter/Inter-roman.var.woff2 differ diff --git a/docs/static/fonts/intervar/Inter.var.woff2 b/docs/static/fonts/intervar/Inter.var.woff2 new file mode 100644 index 0000000..1723192 Binary files /dev/null and b/docs/static/fonts/intervar/Inter.var.woff2 differ diff --git a/docs/static/fonts/jetbrainsmono/JetBrainsMono-Bold.woff2 b/docs/static/fonts/jetbrainsmono/JetBrainsMono-Bold.woff2 new file mode 100644 index 0000000..f8ce701 Binary files /dev/null and b/docs/static/fonts/jetbrainsmono/JetBrainsMono-Bold.woff2 differ diff --git a/docs/static/fonts/jetbrainsmono/JetBrainsMono-BoldItalic.woff2 b/docs/static/fonts/jetbrainsmono/JetBrainsMono-BoldItalic.woff2 new file mode 100644 index 0000000..aa029da Binary files /dev/null and b/docs/static/fonts/jetbrainsmono/JetBrainsMono-BoldItalic.woff2 differ diff --git a/docs/static/fonts/jetbrainsmono/JetBrainsMono-ExtraBold.woff2 b/docs/static/fonts/jetbrainsmono/JetBrainsMono-ExtraBold.woff2 new file mode 100644 index 0000000..b2cf085 Binary files /dev/null and b/docs/static/fonts/jetbrainsmono/JetBrainsMono-ExtraBold.woff2 differ diff --git a/docs/static/fonts/jetbrainsmono/JetBrainsMono-ExtraBoldItalic.woff2 b/docs/static/fonts/jetbrainsmono/JetBrainsMono-ExtraBoldItalic.woff2 new file mode 100644 index 0000000..c9a5b00 Binary files /dev/null and b/docs/static/fonts/jetbrainsmono/JetBrainsMono-ExtraBoldItalic.woff2 differ diff --git a/docs/static/fonts/jetbrainsmono/JetBrainsMono-ExtraLight.woff2 b/docs/static/fonts/jetbrainsmono/JetBrainsMono-ExtraLight.woff2 new file mode 100644 index 0000000..45594a2 Binary files /dev/null and b/docs/static/fonts/jetbrainsmono/JetBrainsMono-ExtraLight.woff2 differ diff --git a/docs/static/fonts/jetbrainsmono/JetBrainsMono-ExtraLightItalic.woff2 b/docs/static/fonts/jetbrainsmono/JetBrainsMono-ExtraLightItalic.woff2 new file mode 100644 index 0000000..7912eb1 Binary files /dev/null and b/docs/static/fonts/jetbrainsmono/JetBrainsMono-ExtraLightItalic.woff2 differ diff --git a/docs/static/fonts/jetbrainsmono/JetBrainsMono-Italic.woff2 b/docs/static/fonts/jetbrainsmono/JetBrainsMono-Italic.woff2 new file mode 100644 index 0000000..e3dec81 Binary files /dev/null and b/docs/static/fonts/jetbrainsmono/JetBrainsMono-Italic.woff2 differ diff --git a/docs/static/fonts/jetbrainsmono/JetBrainsMono-Light.woff2 b/docs/static/fonts/jetbrainsmono/JetBrainsMono-Light.woff2 new file mode 100644 index 0000000..e63fcff Binary files /dev/null and b/docs/static/fonts/jetbrainsmono/JetBrainsMono-Light.woff2 differ diff --git a/docs/static/fonts/jetbrainsmono/JetBrainsMono-LightItalic.woff2 b/docs/static/fonts/jetbrainsmono/JetBrainsMono-LightItalic.woff2 new file mode 100644 index 0000000..07f9106 Binary files /dev/null and b/docs/static/fonts/jetbrainsmono/JetBrainsMono-LightItalic.woff2 differ diff --git a/docs/static/fonts/jetbrainsmono/JetBrainsMono-Medium.woff2 b/docs/static/fonts/jetbrainsmono/JetBrainsMono-Medium.woff2 new file mode 100644 index 0000000..9f0d29b Binary files /dev/null and b/docs/static/fonts/jetbrainsmono/JetBrainsMono-Medium.woff2 differ diff --git a/docs/static/fonts/jetbrainsmono/JetBrainsMono-MediumItalic.woff2 b/docs/static/fonts/jetbrainsmono/JetBrainsMono-MediumItalic.woff2 new file mode 100644 index 0000000..61c9e91 Binary files /dev/null and b/docs/static/fonts/jetbrainsmono/JetBrainsMono-MediumItalic.woff2 differ diff --git a/docs/static/fonts/jetbrainsmono/JetBrainsMono-Regular.woff2 b/docs/static/fonts/jetbrainsmono/JetBrainsMono-Regular.woff2 new file mode 100644 index 0000000..c3d6c74 Binary files /dev/null and b/docs/static/fonts/jetbrainsmono/JetBrainsMono-Regular.woff2 differ diff --git a/docs/static/fonts/jetbrainsmono/JetBrainsMono-SemiBold.woff2 b/docs/static/fonts/jetbrainsmono/JetBrainsMono-SemiBold.woff2 new file mode 100644 index 0000000..caff24f Binary files /dev/null and b/docs/static/fonts/jetbrainsmono/JetBrainsMono-SemiBold.woff2 differ diff --git a/docs/static/fonts/jetbrainsmono/JetBrainsMono-SemiBoldItalic.woff2 b/docs/static/fonts/jetbrainsmono/JetBrainsMono-SemiBoldItalic.woff2 new file mode 100644 index 0000000..86a177d Binary files /dev/null and b/docs/static/fonts/jetbrainsmono/JetBrainsMono-SemiBoldItalic.woff2 differ diff --git a/docs/static/fonts/jetbrainsmono/JetBrainsMono-Thin.woff2 b/docs/static/fonts/jetbrainsmono/JetBrainsMono-Thin.woff2 new file mode 100644 index 0000000..d9dd191 Binary files /dev/null and b/docs/static/fonts/jetbrainsmono/JetBrainsMono-Thin.woff2 differ diff --git a/docs/static/fonts/jetbrainsmono/JetBrainsMono-ThinItalic.woff2 b/docs/static/fonts/jetbrainsmono/JetBrainsMono-ThinItalic.woff2 new file mode 100644 index 0000000..17650dc Binary files /dev/null and b/docs/static/fonts/jetbrainsmono/JetBrainsMono-ThinItalic.woff2 differ diff --git a/docs/static/fonts/termina/termina-demi-web.eot b/docs/static/fonts/termina/termina-demi-web.eot new file mode 100644 index 0000000..83f807e Binary files /dev/null and b/docs/static/fonts/termina/termina-demi-web.eot differ diff --git a/docs/static/fonts/termina/termina-demi-web.svg b/docs/static/fonts/termina/termina-demi-web.svg new file mode 100644 index 0000000..b2730dd --- /dev/null +++ b/docs/static/fonts/termina/termina-demi-web.svg @@ -0,0 +1,8653 @@ + + + + +Created by FontForge 20200427 at Tue Oct 20 06:16:06 2015 + By www +Copyright (c) 2015 by Mattox Shuler. All rights reserved. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/static/fonts/termina/termina-demi-web.ttf b/docs/static/fonts/termina/termina-demi-web.ttf new file mode 100644 index 0000000..9aff458 Binary files /dev/null and b/docs/static/fonts/termina/termina-demi-web.ttf differ diff --git a/docs/static/fonts/termina/termina-demi-web.woff b/docs/static/fonts/termina/termina-demi-web.woff new file mode 100644 index 0000000..b2f505c Binary files /dev/null and b/docs/static/fonts/termina/termina-demi-web.woff differ diff --git a/docs/static/fonts/termina/termina-demi-web.woff2 b/docs/static/fonts/termina/termina-demi-web.woff2 new file mode 100644 index 0000000..92528fc Binary files /dev/null and b/docs/static/fonts/termina/termina-demi-web.woff2 differ diff --git a/docs/static/img/favicon-svg.svg b/docs/static/img/favicon-svg.svg new file mode 100644 index 0000000..a47b0c8 --- /dev/null +++ b/docs/static/img/favicon-svg.svg @@ -0,0 +1,15 @@ + + + + + diff --git a/docs/static/img/favicon.ico b/docs/static/img/favicon.ico new file mode 100644 index 0000000..5a6db46 Binary files /dev/null and b/docs/static/img/favicon.ico differ diff --git a/docs/static/img/header-logo-docs-dark.svg b/docs/static/img/header-logo-docs-dark.svg new file mode 100644 index 0000000..583bc7f --- /dev/null +++ b/docs/static/img/header-logo-docs-dark.svg @@ -0,0 +1,8 @@ + + + + + + + + diff --git a/docs/static/img/header-logo-docs.svg b/docs/static/img/header-logo-docs.svg new file mode 100644 index 0000000..eca13e9 --- /dev/null +++ b/docs/static/img/header-logo-docs.svg @@ -0,0 +1,8 @@ + + + + + + + + diff --git a/docs/static/img/ico-chevron.svg b/docs/static/img/ico-chevron.svg new file mode 100644 index 0000000..999a390 --- /dev/null +++ b/docs/static/img/ico-chevron.svg @@ -0,0 +1,3 @@ + + + diff --git a/docs/static/img/ico-github.svg b/docs/static/img/ico-github.svg new file mode 100644 index 0000000..41175f4 --- /dev/null +++ b/docs/static/img/ico-github.svg @@ -0,0 +1,3 @@ + + + diff --git a/docs/static/img/logo.png b/docs/static/img/logo.png new file mode 100644 index 0000000..995d550 Binary files /dev/null and b/docs/static/img/logo.png differ diff --git a/docs/static/img/logo.svg b/docs/static/img/logo.svg new file mode 100644 index 0000000..1047df3 --- /dev/null +++ b/docs/static/img/logo.svg @@ -0,0 +1,15 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs/static/img/logo/aura.svg b/docs/static/img/logo/aura.svg new file mode 100644 index 0000000..de1c2bd --- /dev/null +++ b/docs/static/img/logo/aura.svg @@ -0,0 +1,14 @@ + + + diff --git a/docs/static/img/logo/bitcanna.svg b/docs/static/img/logo/bitcanna.svg new file mode 100644 index 0000000..e9a0187 --- /dev/null +++ b/docs/static/img/logo/bitcanna.svg @@ -0,0 +1,38 @@ + + + + + + + + + + + + diff --git a/docs/static/img/logo/cudos.svg b/docs/static/img/logo/cudos.svg new file mode 100644 index 0000000..28fa1d0 --- /dev/null +++ b/docs/static/img/logo/cudos.svg @@ -0,0 +1,12 @@ + + + diff --git a/docs/static/img/logo/dymension.svg b/docs/static/img/logo/dymension.svg new file mode 100644 index 0000000..630a85e --- /dev/null +++ b/docs/static/img/logo/dymension.svg @@ -0,0 +1,14 @@ + + + diff --git a/docs/static/img/logo/electra.svg b/docs/static/img/logo/electra.svg new file mode 100644 index 0000000..aea8925 --- /dev/null +++ b/docs/static/img/logo/electra.svg @@ -0,0 +1,26 @@ + + + + + diff --git a/docs/static/img/logo/firmachain.svg b/docs/static/img/logo/firmachain.svg new file mode 100644 index 0000000..a988ed2 --- /dev/null +++ b/docs/static/img/logo/firmachain.svg @@ -0,0 +1,14 @@ + + + diff --git a/docs/static/img/logo/kyve.svg b/docs/static/img/logo/kyve.svg new file mode 100644 index 0000000..8f8945c --- /dev/null +++ b/docs/static/img/logo/kyve.svg @@ -0,0 +1,24 @@ + + + + + + + + + + diff --git a/docs/static/img/logo/medibloc.svg b/docs/static/img/logo/medibloc.svg new file mode 100644 index 0000000..ebd15ac --- /dev/null +++ b/docs/static/img/logo/medibloc.svg @@ -0,0 +1,34 @@ + + + + + + + + + + diff --git a/docs/static/img/logo/mun.svg b/docs/static/img/logo/mun.svg new file mode 100644 index 0000000..d66167d --- /dev/null +++ b/docs/static/img/logo/mun.svg @@ -0,0 +1,26 @@ + + + + + + + + + diff --git a/docs/static/img/logo/neutron.svg b/docs/static/img/logo/neutron.svg new file mode 100644 index 0000000..a4ac838 --- /dev/null +++ b/docs/static/img/logo/neutron.svg @@ -0,0 +1,14 @@ + + + diff --git a/docs/static/img/logo/okp4.svg b/docs/static/img/logo/okp4.svg new file mode 100644 index 0000000..7c18c1d --- /dev/null +++ b/docs/static/img/logo/okp4.svg @@ -0,0 +1,136 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/static/img/logo/ollostation.svg b/docs/static/img/logo/ollostation.svg new file mode 100644 index 0000000..ec9f6d1 --- /dev/null +++ b/docs/static/img/logo/ollostation.svg @@ -0,0 +1,14 @@ + + + diff --git a/docs/static/img/logo/sonr.svg b/docs/static/img/logo/sonr.svg new file mode 100644 index 0000000..01703e1 --- /dev/null +++ b/docs/static/img/logo/sonr.svg @@ -0,0 +1,60 @@ + + + + + + + + + + + + + + + + + + + diff --git a/docs/static/img/logo/source.svg b/docs/static/img/logo/source.svg new file mode 100644 index 0000000..c9af797 --- /dev/null +++ b/docs/static/img/logo/source.svg @@ -0,0 +1,16 @@ + + + + diff --git a/docs/static/img/logo/stride.svg b/docs/static/img/logo/stride.svg new file mode 100644 index 0000000..c7c0bd7 --- /dev/null +++ b/docs/static/img/logo/stride.svg @@ -0,0 +1,14 @@ + + + diff --git a/docs/static/img/logo/umee.svg b/docs/static/img/logo/umee.svg new file mode 100644 index 0000000..5a64bc9 --- /dev/null +++ b/docs/static/img/logo/umee.svg @@ -0,0 +1,24 @@ + + + + + + diff --git a/docs/static/img/og-image.jpg b/docs/static/img/og-image.jpg new file mode 100644 index 0000000..03742de Binary files /dev/null and b/docs/static/img/og-image.jpg differ diff --git a/docs/static/img/web-1.png b/docs/static/img/web-1.png new file mode 100644 index 0000000..ce44883 Binary files /dev/null and b/docs/static/img/web-1.png differ diff --git a/docs/static/img/web-4.png b/docs/static/img/web-4.png new file mode 100644 index 0000000..f6ea93f Binary files /dev/null and b/docs/static/img/web-4.png differ diff --git a/docs/static/img/web-5.png b/docs/static/img/web-5.png new file mode 100644 index 0000000..65c6eb3 Binary files /dev/null and b/docs/static/img/web-5.png differ diff --git a/docs/tailwind.config.js b/docs/tailwind.config.js new file mode 100644 index 0000000..3226d35 --- /dev/null +++ b/docs/tailwind.config.js @@ -0,0 +1,105 @@ +const defaultTheme = require("tailwindcss/defaultTheme"); + +// Px to REM function (static base of 16) +const pxToRem = (dest) => 1 / (16 / dest); + +// Config +module.exports = { + content: ["./src/**/*.{js,jsx,ts,tsx}"], + corePlugins: { + // preflight: false, // avoid reset all docusaurus css + }, + theme: { + borderRadius: { + none: "0", + xs: `${pxToRem(4)}rem`, + s: `${pxToRem(8)}rem`, + sm: `${pxToRem(10)}rem`, + DEFAULT: `${pxToRem(16)}rem`, + md: `${pxToRem(20)}rem`, + lg: `${pxToRem(100)}rem`, + circle: "100%", + }, + fontFamily: { + termina: ["termina", defaultTheme.fontFamily.sans], + intervar: ['"Inter var"', defaultTheme.fontFamily.sans], + inter: ["Inter", defaultTheme.fontFamily.sans], + jetbrain: ["JetBrains Mono", defaultTheme.fontFamily.mono], + }, + fontSize: { + 0: "0", + 1: [`${pxToRem(10)}rem`], + 2: [`${pxToRem(13)}rem`], + 3: [`${pxToRem(16)}rem`], + 4: [`${pxToRem(21)}rem`], + 5: [`${pxToRem(28)}rem`], + 6: [`${pxToRem(32)}rem`], + 7: [`${pxToRem(38)}rem`], + 8: [`${pxToRem(51)}rem`], + 9: [`${pxToRem(56)}rem`], + 10: [`${pxToRem(76)}rem`], + }, + spacing: { + inherit: "inherit", + auto: "auto", + full: "100%", + px: "1px", + "1/2": "50%", + "1/3": "33.333%", + "2/3": "66.666%", + "1/4": "25%", + "3/4": "75%", + "1/5": "20%", + "2/5": "40%", + "3/5": "60%", + "4/5": "80%", + 0: "0", + 1: ".25rem", + 2: `${pxToRem(6)}rem`, + 3: `${pxToRem(8)}rem`, + 3.5: `${pxToRem(10)}rem`, + 4: `${pxToRem(12)}rem`, + 5: `${pxToRem(16)}rem`, + 5.5: `${pxToRem(20)}rem`, + 6: `${pxToRem(24)}rem`, + 7: `${pxToRem(32)}rem`, + 7.5: `${pxToRem(40)}rem`, + 8: `${pxToRem(48)}rem`, + 8.5: `${pxToRem(52)}rem`, + 9: `${pxToRem(64)}rem`, + 9.5: "5rem", + 9.75: `${pxToRem(84)}rem`, + 10: `${pxToRem(96)}rem`, + 11: `${pxToRem(128)}rem`, + 12: `${pxToRem(144)}rem`, + 13: `${pxToRem(160)}rem`, + 14: `${pxToRem(192)}rem`, + 15: `${pxToRem(208)}rem`, + }, + colors: { + transparent: "transparent", + current: "currentColor", + inherit: "inherit", + gray: { + 0: "#FFFFFF", + 30: "rgba(0, 0, 0, 0.03)", + 1000: "#000000", + }, + card: "#F7F7F7", + border: "rgba(0, 0, 0, 0.07)", + inactive: "rgba(0, 0, 0, 0.33)", + inactiveLight: "rgba(255, 255, 255, 0.44)", + muted: "#555555", + mutedLight: "rgba(255, 255, 255, 0.67)", + fg: "rgba(24, 24, 24, 0.67)", + lightfg: "rgba(24, 24, 24, 0.67)", + link: "#000000", + linkHover: "#555555", + docusaurusColorBase: "var(--ifm-font-color-base)", + docusaurusBgColor: "var(--ifm-background-color)", + docusaurusColorBorder: "var(--ifm-color-emphasis-200)", + }, + extend: {}, + }, + plugins: [], +}; diff --git a/docs/versioned_docs/version-v0.25/06-bounty.md b/docs/versioned_docs/version-v0.25/06-bounty.md new file mode 100644 index 0000000..10ad3ce --- /dev/null +++ b/docs/versioned_docs/version-v0.25/06-bounty.md @@ -0,0 +1,14 @@ +--- +sidebar_position: 8 +description: Ignite CLI bounty program incentives and rewards. +--- + +# Bounty program + +Our Ignite CLI bounty program provides incentives for your participation and pays rewards. If you know Golang, follow the bounty issues, write code, close issues, and get rewarded. + +Do your bounty hunting in our repo. Track new, in-progress, and completed bounties in the [GitHub Issues](https://github.com/ignite/cli/issues?q=is%3Aissue+is%3Aopen+label%3Abounty). + +For details on the Ignite CLI bounty program, join the #bounty channel in [Ignite Discord](https://discord.com/invite/ignitecli). + +New bounties are posted and claimed in Discord on the #bounty announcement channel. diff --git a/docs/versioned_docs/version-v0.25/07-cli.md b/docs/versioned_docs/version-v0.25/07-cli.md new file mode 100644 index 0000000..183c5ba --- /dev/null +++ b/docs/versioned_docs/version-v0.25/07-cli.md @@ -0,0 +1,3378 @@ +--- +sidebar_position: 7 +description: Ignite CLI docs. +--- + +# CLI Reference + +Documentation for Ignite CLI. +## ignite + +Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain + +**Synopsis** + +Ignite CLI is a tool for creating sovereign blockchains built with Cosmos SDK, the world’s +most popular modular blockchain framework. Ignite CLI offers everything you need to scaffold, +test, build, and launch your blockchain. + +To get started, create a blockchain: + +ignite scaffold chain github.com/username/mars + +**Options** + +``` + -h, --help help for ignite +``` + +**SEE ALSO** + +* [ignite account](#ignite-account) - Commands for managing Ignite accounts +* [ignite chain](#ignite-chain) - Build, initialize and start a blockchain node or perform other actions on the blockchain +* [ignite completion](#ignite-completion) - Generate the autocompletion script for the specified shell +* [ignite docs](#ignite-docs) - Show Ignite CLI docs +* [ignite generate](#ignite-generate) - Generate clients, API docs from source code +* [ignite network](#ignite-network) - Launch a blockchain in production +* [ignite node](#ignite-node) - Make calls to a live blockchain node +* [ignite relayer](#ignite-relayer) - Connect blockchains by using IBC protocol +* [ignite scaffold](#ignite-scaffold) - Scaffold a new blockchain, module, message, query, and more +* [ignite tools](#ignite-tools) - Tools for advanced users +* [ignite version](#ignite-version) - Print the current build information + + +## ignite account + +Commands for managing Ignite accounts + +**Synopsis** + +Commands for managing Ignite accounts. An Ignite account is a private/public +keypair stored in a keyring. Currently Ignite accounts are used when interacting +with Ignite relayer commands. + +Note: Ignite account commands are not for managing your chain's keys and accounts. Use +you chain's binary to manage accounts from "config.yml". For example, if your +blockchain is called "mychain", use "mychaind keys" to manage keys for the +chain. + + +**Options** + +``` + -h, --help help for account + --keyring-backend string Keyring backend to store your account keys (default "test") + --keyring-dir string The accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain +* [ignite account create](#ignite-account-create) - Create a new account +* [ignite account delete](#ignite-account-delete) - Delete an account by name +* [ignite account export](#ignite-account-export) - Export an account as a private key +* [ignite account import](#ignite-account-import) - Import an account by using a mnemonic or a private key +* [ignite account list](#ignite-account-list) - Show a list of all accounts +* [ignite account show](#ignite-account-show) - Show detailed information about a particular account + + +## ignite account create + +Create a new account + +``` +ignite account create [name] [flags] +``` + +**Options** + +``` + -h, --help help for create +``` + +**Options inherited from parent commands** + +``` + --keyring-backend string Keyring backend to store your account keys (default "test") + --keyring-dir string The accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**SEE ALSO** + +* [ignite account](#ignite-account) - Commands for managing Ignite accounts + + +## ignite account delete + +Delete an account by name + +``` +ignite account delete [name] [flags] +``` + +**Options** + +``` + -h, --help help for delete +``` + +**Options inherited from parent commands** + +``` + --keyring-backend string Keyring backend to store your account keys (default "test") + --keyring-dir string The accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**SEE ALSO** + +* [ignite account](#ignite-account) - Commands for managing Ignite accounts + + +## ignite account export + +Export an account as a private key + +``` +ignite account export [name] [flags] +``` + +**Options** + +``` + -h, --help help for export + --non-interactive Do not enter into interactive mode + --passphrase string Passphrase to encrypt the exported key + --path string path to export private key. default: ./key_[name] +``` + +**Options inherited from parent commands** + +``` + --keyring-backend string Keyring backend to store your account keys (default "test") + --keyring-dir string The accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**SEE ALSO** + +* [ignite account](#ignite-account) - Commands for managing Ignite accounts + + +## ignite account import + +Import an account by using a mnemonic or a private key + +``` +ignite account import [name] [flags] +``` + +**Options** + +``` + -h, --help help for import + --non-interactive Do not enter into interactive mode + --passphrase string Passphrase to decrypt the imported key (ignored when secret is a mnemonic) + --secret string Your mnemonic or path to your private key (use interactive mode instead to securely pass your mnemonic) +``` + +**Options inherited from parent commands** + +``` + --keyring-backend string Keyring backend to store your account keys (default "test") + --keyring-dir string The accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**SEE ALSO** + +* [ignite account](#ignite-account) - Commands for managing Ignite accounts + + +## ignite account list + +Show a list of all accounts + +``` +ignite account list [flags] +``` + +**Options** + +``` + --address-prefix string Account address prefix (default "cosmos") + -h, --help help for list +``` + +**Options inherited from parent commands** + +``` + --keyring-backend string Keyring backend to store your account keys (default "test") + --keyring-dir string The accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**SEE ALSO** + +* [ignite account](#ignite-account) - Commands for managing Ignite accounts + + +## ignite account show + +Show detailed information about a particular account + +``` +ignite account show [name] [flags] +``` + +**Options** + +``` + --address-prefix string Account address prefix (default "cosmos") + -h, --help help for show +``` + +**Options inherited from parent commands** + +``` + --keyring-backend string Keyring backend to store your account keys (default "test") + --keyring-dir string The accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**SEE ALSO** + +* [ignite account](#ignite-account) - Commands for managing Ignite accounts + + +## ignite chain + +Build, initialize and start a blockchain node or perform other actions on the blockchain + +**Synopsis** + +Commands in this namespace let you to build, initialize, and start your +blockchain node locally for development purposes. + +To run these commands you should be inside the project's directory so that +Ignite can find the source code. To ensure that you are, run "ls", you should +see the following files in the output: "go.mod", "x", "proto", "app", etc. + +By default the "build" command will identify the "main" package of the project, +install dependencies if necessary, set build flags, compile the project into a +binary and install the binary. The "build" command is useful if you just want +the compiled binary, for example, to initialize and start the chain manually. It +can also be used to release your chain's binaries automatically as part of +continuous integration workflow. + +The "init" command will build the chain's binary and use it to initialize a +local validator node. By default the validator node will be initialized in your +$HOME directory in a hidden directory that matches the name of your project. +This directory is called a data directory and contains a chain's genesis file +and a validator key. This command is useful if you want to quickly build and +initialize the data directory and use the chain's binary to manually start the +blockchain. The "init" command is meant only for development purposes, not +production. + +The "serve" command builds, initializes, and starts your blockchain locally with +a single validator node for development purposes. "serve" also watches the +source code directory for file changes and intelligently +re-builds/initializes/starts the chain, essentially providing "code-reloading". +The "serve" command is meant only for development purposes, not production. + +To distinguish between production and development consider the following. + +In production, blockchains often run the same software on many validator nodes +that are run by different people and entities. To launch a blockchain in +production, the validator entities coordinate the launch process to start their +nodes simultaneously. + +During development, a blockchain can be started locally on a single validator +node. This convenient process lets you restart a chain quickly and iterate +faster. Starting a chain on a single node in development is similar to starting +a traditional web application on a local server. + +The "faucet" command lets you send tokens to an address from the "faucet" +account defined in "config.yml". Alternatively, you can use the chain's binary +to send token from any other account that exists on chain. + +The "simulate" command helps you start a simulation testing process for your +chain. + + +**Options** + +``` + -c, --config string ignite config file (default: ./config.yml) + -h, --help help for chain + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain +* [ignite chain build](#ignite-chain-build) - Build a node binary +* [ignite chain faucet](#ignite-chain-faucet) - Send coins to an account +* [ignite chain init](#ignite-chain-init) - Initialize your chain +* [ignite chain serve](#ignite-chain-serve) - Start a blockchain node in development +* [ignite chain simulate](#ignite-chain-simulate) - Run simulation testing for the blockchain + + +## ignite chain build + +Build a node binary + +**Synopsis** + + +The build command compiles the source code of the project into a binary and +installs the binary in the $(go env GOPATH)/bin directory. + +You can customize the output directory for the binary using a flag: + + ignite chain build --output dist + +To compile the binary Ignite first compiles protocol buffer (proto) files into +Go source code. Proto files contain required type and services definitions. If +you're using another program to compile proto files, you can use a flag to tell +Ignite to skip the proto compilation step: + + ignite chain build --skip-proto + +Afterwards, Ignite install dependencies specified in the go.mod file. By default +Ignite doesn't check that dependencies of the main module stored in the module +cache have not been modified since they were downloaded. To enforce dependency +checking (essentially, running "go mod verify") use a flag: + + ignite chain build --check-dependencies + +Next, Ignite identifies the "main" package of the project. By default the "main" +package is located in "cmd/{app}d" directory, where "{app}" is the name of the +scaffolded project and "d" stands for daemon. If your your project contains more +than one "main" package, specify the path to the one that Ignite should compile +in config.yml: + +build: + main: custom/path/to/main + +By default the binary name will match the top-level module name (specified in +go.mod) with a suffix "d". This can be customized in config.yml: + +build: + binary: mychaind + +You can also specify custom linker flags: + +build: + ldflags: + - "-X main.Version=development" + - "-X main.Date=01/05/2022T19:54" + +To build binaries for a release, use the --release flag. The binaries for one or +more specified release targets are built in a "release/" directory in the +project's source directory. Specify the release targets with GOOS:GOARCH build +tags. If the optional --release.targets is not specified, a binary is created +for your current environment. + + ignite chain build --release -t linux:amd64 -t darwin:amd64 -t darwin:arm64 + + +``` +ignite chain build [flags] +``` + +**Options** + +``` + --check-dependencies verify that cached dependencies have not been modified since they were downloaded + --clear-cache clear the build cache (advanced) + -h, --help help for build + -o, --output string binary output path + -p, --path string path of the app (default ".") + --proto-all-modules enables proto code generation for 3rd party modules used in your chain. Available only without the --release flag + --release build for a release + --release.prefix string tarball prefix for each release target. Available only with --release flag + -t, --release.targets strings release targets. Available only with --release flag + --skip-proto skip file generation from proto + -v, --verbose verbose output +``` + +**Options inherited from parent commands** + +``` + -c, --config string ignite config file (default: ./config.yml) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite chain](#ignite-chain) - Build, initialize and start a blockchain node or perform other actions on the blockchain + + +## ignite chain faucet + +Send coins to an account + +``` +ignite chain faucet [address] [coin<,...>] [flags] +``` + +**Options** + +``` + -h, --help help for faucet + --home string home directory used for blockchains + -p, --path string path of the app (default ".") + -v, --verbose Verbose output +``` + +**Options inherited from parent commands** + +``` + -c, --config string ignite config file (default: ./config.yml) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite chain](#ignite-chain) - Build, initialize and start a blockchain node or perform other actions on the blockchain + + +## ignite chain init + +Initialize your chain + +**Synopsis** + +The init command compiles and installs the binary (like "ignite chain build") +and uses that binary to initialize the blockchain's data directory for one +validator. To learn how the build process works, refer to "ignite chain build +--help". + +By default, the data directory will be initialized in $HOME/.mychain, where +"mychain" is the name of the project. To set a custom data directory use the +--home flag or set the value in config.yml: + +init: + home: "~/.customdir" + +The data directory contains three files in the "config" directory: app.toml, +config.toml, client.toml. These files let you customize the behavior of your +blockchain node and the client executable. When a chain is re-initialized the +data directory can be reset. To make some values in these files persistent, set +them in config.yml: + +init: + app: + minimum-gas-prices: "0.025stake" + config: + consensus: + timeout_commit: "5s" + timeout_propose: "5s" + client: + output: "json" + +The configuration above changes the minimum gas price of the validator (by +default the gas price is set to 0 to allow "free" transactions), sets the block +time to 5s, and changes the output format to JSON. To see what kind of values +this configuration accepts see the generated TOML files in the data directory. + +As part of the initialization process Ignite creates on-chain accounts with +token balances. By default, config.yml has two accounts in the top-level +"accounts" property. You can add more accounts and change their token balances. +Refer to config.yml guide to see which values you can set. + +One of these accounts is a validator account and the amount of self-delegated +tokens can be set in the top-level "validator" property. + +One of the most important components of an initialized chain is the genesis +file, the 0th block of the chain. The genesis file is stored in the data +directory "config" subdirectory and contains the initial state of the chain, +including consensus and module parameters. You can customize the values of the +genesis in config.yml: + +genesis: + app_state: + staking: + params: + bond_denom: "foo" + +The example above changes the staking token to "foo". If you change the staking +denom, make sure the validator account has the right tokens. + +The init command is meant to be used ONLY FOR DEVELOPMENT PURPOSES. Under the +hood it runs commands like "appd init", "appd add-genesis-account", "appd +gentx", and "appd collect-gentx". For production, you may want to run these +commands manually to ensure a production-level node initialization. + + +``` +ignite chain init [flags] +``` + +**Options** + +``` + --check-dependencies verify that cached dependencies have not been modified since they were downloaded + --clear-cache clear the build cache (advanced) + -h, --help help for init + --home string home directory used for blockchains + -p, --path string path of the app (default ".") + --skip-proto skip file generation from proto +``` + +**Options inherited from parent commands** + +``` + -c, --config string ignite config file (default: ./config.yml) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite chain](#ignite-chain) - Build, initialize and start a blockchain node or perform other actions on the blockchain + + +## ignite chain serve + +Start a blockchain node in development + +**Synopsis** + +The serve command compiles and installs the binary (like "ignite chain build"), +uses that binary to initialize the blockchain's data directory for one validator +(like "ignite chain init"), and starts the node locally for development purposes +with automatic code reloading. + +Automatic code reloading means Ignite starts watching the project directory. +Whenever a file change is detected, Ignite automatically rebuilds, reinitializes +and restarts the node. + +Whenever possible Ignite will try to keep the current state of the chain by +exporting and importing the genesis file. + +To force Ignite to start from a clean slate even if a genesis file exists, use +the following flag: + + ignite chain serve --reset-once + +To force Ignite to reset the state every time the source code is modified, use +the following flag: + + ignite chain serve --force-reset + +With Ignite it's possible to start more than one blockchain from the same source +code using different config files. This is handy if you're building +inter-blockchain functionality and, for example, want to try sending packets +from one blockchain to another. To start a node using a specific config file: + + ignite chain serve --config mars.yml + +The serve command is meant to be used ONLY FOR DEVELOPMENT PURPOSES. Under the +hood, it runs "appd start", where "appd" is the name of your chain's binary. For +production, you may want to run "appd start" manually. + + +``` +ignite chain serve [flags] +``` + +**Options** + +``` + --check-dependencies verify that cached dependencies have not been modified since they were downloaded + --clear-cache clear the build cache (advanced) + -f, --force-reset Force reset of the app state on start and every source change + -h, --help help for serve + --home string home directory used for blockchains + -p, --path string path of the app (default ".") + --proto-all-modules enables proto code generation for 3rd party modules used in your chain + --quit-on-fail Quit program if the app fails to start + -r, --reset-once Reset of the app state on first start + --skip-proto skip file generation from proto + -v, --verbose Verbose output +``` + +**Options inherited from parent commands** + +``` + -c, --config string ignite config file (default: ./config.yml) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite chain](#ignite-chain) - Build, initialize and start a blockchain node or perform other actions on the blockchain + + +## ignite chain simulate + +Run simulation testing for the blockchain + +**Synopsis** + +Run simulation testing for the blockchain. It sends many randomized-input messages of each module to a simulated node and checks if invariants break + +``` +ignite chain simulate [flags] +``` + +**Options** + +``` + --blockSize int operations per block (default 30) + --exportParamsHeight int height to which export the randomly generated params + --exportParamsPath string custom file path to save the exported params JSON + --exportStatePath string custom file path to save the exported app state JSON + --exportStatsPath string custom file path to save the exported simulation statistics JSON + --genesis string custom simulation genesis file; cannot be used with params file + --genesisTime int override genesis UNIX time instead of using a random UNIX time + -h, --help help for simulate + --initialBlockHeight int initial block to start the simulation (default 1) + --lean lean simulation log output + --numBlocks int number of new blocks to simulate from the initial block height (default 200) + --params string custom simulation params file which overrides any random params; cannot be used with genesis + --period uint run slow invariants only once every period assertions + --printAllInvariants print all invariants if a broken invariant is found + --seed int simulation random seed (default 42) + --simulateEveryOperation run slow invariants every operation + -v, --verbose verbose log output +``` + +**Options inherited from parent commands** + +``` + -c, --config string ignite config file (default: ./config.yml) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite chain](#ignite-chain) - Build, initialize and start a blockchain node or perform other actions on the blockchain + + +## ignite completion + +Generate the autocompletion script for the specified shell + +**Synopsis** + +Generate the autocompletion script for ignite for the specified shell. +See each sub-command's help for details on how to use the generated script. + + +**Options** + +``` + -h, --help help for completion +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain +* [ignite completion bash](#ignite-completion-bash) - Generate the autocompletion script for bash +* [ignite completion fish](#ignite-completion-fish) - Generate the autocompletion script for fish +* [ignite completion powershell](#ignite-completion-powershell) - Generate the autocompletion script for powershell +* [ignite completion zsh](#ignite-completion-zsh) - Generate the autocompletion script for zsh + + +## ignite completion bash + +Generate the autocompletion script for bash + +**Synopsis** + +Generate the autocompletion script for the bash shell. + +This script depends on the 'bash-completion' package. +If it is not installed already, you can install it via your OS's package manager. + +To load completions in your current shell session: + + source <(ignite completion bash) + +To load completions for every new session, execute once: + +**#### Linux:** + + ignite completion bash > /etc/bash_completion.d/ignite + +**#### macOS:** + + ignite completion bash > $(brew --prefix)/etc/bash_completion.d/ignite + +You will need to start a new shell for this setup to take effect. + + +``` +ignite completion bash +``` + +**Options** + +``` + -h, --help help for bash + --no-descriptions disable completion descriptions +``` + +**SEE ALSO** + +* [ignite completion](#ignite-completion) - Generate the autocompletion script for the specified shell + + +## ignite completion fish + +Generate the autocompletion script for fish + +**Synopsis** + +Generate the autocompletion script for the fish shell. + +To load completions in your current shell session: + + ignite completion fish | source + +To load completions for every new session, execute once: + + ignite completion fish > ~/.config/fish/completions/ignite.fish + +You will need to start a new shell for this setup to take effect. + + +``` +ignite completion fish [flags] +``` + +**Options** + +``` + -h, --help help for fish + --no-descriptions disable completion descriptions +``` + +**SEE ALSO** + +* [ignite completion](#ignite-completion) - Generate the autocompletion script for the specified shell + + +## ignite completion powershell + +Generate the autocompletion script for powershell + +**Synopsis** + +Generate the autocompletion script for powershell. + +To load completions in your current shell session: + + ignite completion powershell | Out-String | Invoke-Expression + +To load completions for every new session, add the output of the above command +to your powershell profile. + + +``` +ignite completion powershell [flags] +``` + +**Options** + +``` + -h, --help help for powershell + --no-descriptions disable completion descriptions +``` + +**SEE ALSO** + +* [ignite completion](#ignite-completion) - Generate the autocompletion script for the specified shell + + +## ignite completion zsh + +Generate the autocompletion script for zsh + +**Synopsis** + +Generate the autocompletion script for the zsh shell. + +If shell completion is not already enabled in your environment you will need +to enable it. You can execute the following once: + + echo "autoload -U compinit; compinit" >> ~/.zshrc + +To load completions in your current shell session: + + source <(ignite completion zsh); compdef _ignite ignite + +To load completions for every new session, execute once: + +**#### Linux:** + + ignite completion zsh > "${fpath[1]}/_ignite" + +**#### macOS:** + + ignite completion zsh > $(brew --prefix)/share/zsh/site-functions/_ignite + +You will need to start a new shell for this setup to take effect. + + +``` +ignite completion zsh [flags] +``` + +**Options** + +``` + -h, --help help for zsh + --no-descriptions disable completion descriptions +``` + +**SEE ALSO** + +* [ignite completion](#ignite-completion) - Generate the autocompletion script for the specified shell + + +## ignite docs + +Show Ignite CLI docs + +``` +ignite docs [flags] +``` + +**Options** + +``` + -h, --help help for docs +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain + + +## ignite generate + +Generate clients, API docs from source code + +**Synopsis** + +Generate clients, API docs from source code. + +Such as compiling protocol buffer files into Go or implement particular functionality, for example, generating an OpenAPI spec. + +Produced source code can be regenerated by running a command again and is not meant to be edited by hand. + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -h, --help help for generate + -p, --path string path of the app (default ".") +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain +* [ignite generate dart](#ignite-generate-dart) - Generate a Dart client +* [ignite generate openapi](#ignite-generate-openapi) - Generate generates an OpenAPI spec for your chain from your config.yml +* [ignite generate proto-go](#ignite-generate-proto-go) - Generate proto based Go code needed for the app's source code +* [ignite generate ts-client](#ignite-generate-ts-client) - Generate Typescript client for your chain's frontend +* [ignite generate vuex](#ignite-generate-vuex) - Generate Typescript client and Vuex stores for your chain's frontend from your `config.yml` file + + +## ignite generate dart + +Generate a Dart client + +``` +ignite generate dart [flags] +``` + +**Options** + +``` + -h, --help help for dart + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + --clear-cache clear the build cache (advanced) + -p, --path string path of the app (default ".") +``` + +**SEE ALSO** + +* [ignite generate](#ignite-generate) - Generate clients, API docs from source code + + +## ignite generate openapi + +Generate generates an OpenAPI spec for your chain from your config.yml + +``` +ignite generate openapi [flags] +``` + +**Options** + +``` + -h, --help help for openapi + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + --clear-cache clear the build cache (advanced) + -p, --path string path of the app (default ".") +``` + +**SEE ALSO** + +* [ignite generate](#ignite-generate) - Generate clients, API docs from source code + + +## ignite generate proto-go + +Generate proto based Go code needed for the app's source code + +``` +ignite generate proto-go [flags] +``` + +**Options** + +``` + -h, --help help for proto-go + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + --clear-cache clear the build cache (advanced) + -p, --path string path of the app (default ".") +``` + +**SEE ALSO** + +* [ignite generate](#ignite-generate) - Generate clients, API docs from source code + + +## ignite generate ts-client + +Generate Typescript client for your chain's frontend + +``` +ignite generate ts-client [flags] +``` + +**Options** + +``` + -h, --help help for ts-client + -o, --output string typescript client output path + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + --clear-cache clear the build cache (advanced) + -p, --path string path of the app (default ".") +``` + +**SEE ALSO** + +* [ignite generate](#ignite-generate) - Generate clients, API docs from source code + + +## ignite generate vuex + +Generate Typescript client and Vuex stores for your chain's frontend from your `config.yml` file + +``` +ignite generate vuex [flags] +``` + +**Options** + +``` + -h, --help help for vuex + --proto-all-modules enables proto code generation for 3rd party modules used in your chain + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + --clear-cache clear the build cache (advanced) + -p, --path string path of the app (default ".") +``` + +**SEE ALSO** + +* [ignite generate](#ignite-generate) - Generate clients, API docs from source code + + +## ignite network + +Launch a blockchain in production + +**Synopsis** + + +Ignite Network commands allow to coordinate the launch of sovereign Cosmos blockchains. + +To launch a Cosmos blockchain you need someone to be a coordinator and others to +be validators. These are just roles, anyone can be a coordinator or a validator. +A coordinator publishes information about a chain to be launched on the Ignite +blockchain, approves validator requests and coordinates the launch. Validators +send requests to join a chain and start their nodes when a blockchain is ready +for launch. + +To publish the information about your chain as a coordinator run the following +command (the URL should point to a repository with a Cosmos SDK chain): + + ignite network chain publish github.com/ignite/example + +This command will return a launch identifier you will be using in the following +commands. Let's say this identifier is 42. + +Next, ask validators to initialize their nodes and request to join the network +as validators. For a testnet you can use the default values suggested by the +CLI. + + ignite network chain init 42 + + ignite network chain join 42 --amount 95000000stake + +As a coordinator list all validator requests: + + ignite network request list 42 + +Approve validator requests: + + ignite network request approve 42 1,2 + +Once you've approved all validators you need in the validator set, announce that +the chain is ready for launch: + + ignite network chain launch 42 + +Validators can now prepare their nodes for launch: + + ignite network chain prepare 42 + +The output of this command will show a command that a validator would use to +launch their node, for example “exampled --home ~/.example”. After enough +validators launch their nodes, a blockchain will be live. + + +**Options** + +``` + -h, --help help for network + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "http://178.128.251.28:4500") + --spn-node-address string SPN node address (default "http://178.128.251.28:26657") +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain +* [ignite network campaign](#ignite-network-campaign) - Handle campaigns +* [ignite network chain](#ignite-network-chain) - Build networks +* [ignite network coordinator](#ignite-network-coordinator) - Interact with coordinator profiles +* [ignite network profile](#ignite-network-profile) - Show the address profile info +* [ignite network request](#ignite-network-request) - Handle requests +* [ignite network reward](#ignite-network-reward) - Manage network rewards +* [ignite network validator](#ignite-network-validator) - Interact with validator profiles + + +## ignite network campaign + +Handle campaigns + +**Options** + +``` + -h, --help help for campaign +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "http://178.128.251.28:4500") + --spn-node-address string SPN node address (default "http://178.128.251.28:26657") +``` + +**SEE ALSO** + +* [ignite network](#ignite-network) - Launch a blockchain in production +* [ignite network campaign account](#ignite-network-campaign-account) - Handle campaign accounts +* [ignite network campaign create](#ignite-network-campaign-create) - Create a campaign +* [ignite network campaign list](#ignite-network-campaign-list) - List published campaigns +* [ignite network campaign show](#ignite-network-campaign-show) - Show published campaign +* [ignite network campaign update](#ignite-network-campaign-update) - Update details fo the campaign of the campaign + + +## ignite network campaign account + +Handle campaign accounts + +**Options** + +``` + -h, --help help for account +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "http://178.128.251.28:4500") + --spn-node-address string SPN node address (default "http://178.128.251.28:26657") +``` + +**SEE ALSO** + +* [ignite network campaign](#ignite-network-campaign) - Handle campaigns +* [ignite network campaign account list](#ignite-network-campaign-account-list) - Show all mainnet and mainnet vesting of the campaign + + +## ignite network campaign account list + +Show all mainnet and mainnet vesting of the campaign + +``` +ignite network campaign account list [campaign-id] [flags] +``` + +**Options** + +``` + -h, --help help for list +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "http://178.128.251.28:4500") + --spn-node-address string SPN node address (default "http://178.128.251.28:26657") +``` + +**SEE ALSO** + +* [ignite network campaign account](#ignite-network-campaign-account) - Handle campaign accounts + + +## ignite network campaign create + +Create a campaign + +``` +ignite network campaign create [name] [total-supply] [flags] +``` + +**Options** + +``` + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for create + --home string home directory used for blockchains + --keyring-backend string Keyring backend to store your account keys (default "test") + --keyring-dir string The accounts keyring directory (default "/home/runner/.ignite/accounts") + --metadata string Add a metada to the chain +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "http://178.128.251.28:4500") + --spn-node-address string SPN node address (default "http://178.128.251.28:26657") +``` + +**SEE ALSO** + +* [ignite network campaign](#ignite-network-campaign) - Handle campaigns + + +## ignite network campaign list + +List published campaigns + +``` +ignite network campaign list [flags] +``` + +**Options** + +``` + -h, --help help for list +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "http://178.128.251.28:4500") + --spn-node-address string SPN node address (default "http://178.128.251.28:26657") +``` + +**SEE ALSO** + +* [ignite network campaign](#ignite-network-campaign) - Handle campaigns + + +## ignite network campaign show + +Show published campaign + +``` +ignite network campaign show [campaign-id] [flags] +``` + +**Options** + +``` + -h, --help help for show +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "http://178.128.251.28:4500") + --spn-node-address string SPN node address (default "http://178.128.251.28:26657") +``` + +**SEE ALSO** + +* [ignite network campaign](#ignite-network-campaign) - Handle campaigns + + +## ignite network campaign update + +Update details fo the campaign of the campaign + +``` +ignite network campaign update [campaign-id] [flags] +``` + +**Options** + +``` + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for update + --keyring-backend string Keyring backend to store your account keys (default "test") + --keyring-dir string The accounts keyring directory (default "/home/runner/.ignite/accounts") + --metadata string Update the campaign metadata + --name string Update the campaign name + --total-supply string Update the total of the mainnet of a campaign +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "http://178.128.251.28:4500") + --spn-node-address string SPN node address (default "http://178.128.251.28:26657") +``` + +**SEE ALSO** + +* [ignite network campaign](#ignite-network-campaign) - Handle campaigns + + +## ignite network chain + +Build networks + +**Options** + +``` + -h, --help help for chain +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "http://178.128.251.28:4500") + --spn-node-address string SPN node address (default "http://178.128.251.28:26657") +``` + +**SEE ALSO** + +* [ignite network](#ignite-network) - Launch a blockchain in production +* [ignite network chain init](#ignite-network-chain-init) - Initialize a chain from a published chain ID +* [ignite network chain install](#ignite-network-chain-install) - Install chain binary for a launch +* [ignite network chain join](#ignite-network-chain-join) - Request to join a network as a validator +* [ignite network chain launch](#ignite-network-chain-launch) - Launch a network as a coordinator +* [ignite network chain list](#ignite-network-chain-list) - List published chains +* [ignite network chain prepare](#ignite-network-chain-prepare) - Prepare the chain for launch +* [ignite network chain publish](#ignite-network-chain-publish) - Publish a new chain to start a new network +* [ignite network chain revert-launch](#ignite-network-chain-revert-launch) - Revert launch a network as a coordinator +* [ignite network chain show](#ignite-network-chain-show) - Show details of a chain + + +## ignite network chain init + +Initialize a chain from a published chain ID + +``` +ignite network chain init [launch-id] [flags] +``` + +**Options** + +``` + --check-dependencies verify that cached dependencies have not been modified since they were downloaded + --clear-cache clear the build cache (advanced) + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for init + --home string home directory used for blockchains + --keyring-backend string Keyring backend to store your account keys (default "test") + --keyring-dir string The accounts keyring directory (default "/home/runner/.ignite/accounts") + --validator-account string Account for the chain validator (default "default") + --validator-details string Details about the validator + --validator-gas-price string Validator gas price + --validator-identity string Validator identity signature (ex. UPort or Keybase) + --validator-moniker string Custom validator moniker + --validator-security-contact string Validator security contact email + --validator-self-delegation string Validator minimum self delegation + --validator-website string Associate a website with the validator + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "http://178.128.251.28:4500") + --spn-node-address string SPN node address (default "http://178.128.251.28:26657") +``` + +**SEE ALSO** + +* [ignite network chain](#ignite-network-chain) - Build networks + + +## ignite network chain install + +Install chain binary for a launch + +``` +ignite network chain install [launch-id] [flags] +``` + +**Options** + +``` + --check-dependencies verify that cached dependencies have not been modified since they were downloaded + --clear-cache clear the build cache (advanced) + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for install +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "http://178.128.251.28:4500") + --spn-node-address string SPN node address (default "http://178.128.251.28:26657") +``` + +**SEE ALSO** + +* [ignite network chain](#ignite-network-chain) - Build networks + + +## ignite network chain join + +Request to join a network as a validator + +``` +ignite network chain join [launch-id] [flags] +``` + +**Options** + +``` + --amount string Amount of coins for account request (ignored if coordinator has fixed the account balances or if --no-acount flag is set) + --check-dependencies verify that cached dependencies have not been modified since they were downloaded + --from string account name to use for sending transactions to SPN (default "default") + --gentx string Path to a gentx json file + -h, --help help for join + --home string home directory used for blockchains + --keyring-backend string Keyring backend to store your account keys (default "test") + --keyring-dir string The accounts keyring directory (default "/home/runner/.ignite/accounts") + --no-account Prevent sending a request for a genesis account + --peer-address string Peer's address + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "http://178.128.251.28:4500") + --spn-node-address string SPN node address (default "http://178.128.251.28:26657") +``` + +**SEE ALSO** + +* [ignite network chain](#ignite-network-chain) - Build networks + + +## ignite network chain launch + +Launch a network as a coordinator + +``` +ignite network chain launch [launch-id] [flags] +``` + +**Options** + +``` + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for launch + --keyring-backend string Keyring backend to store your account keys (default "test") + --keyring-dir string The accounts keyring directory (default "/home/runner/.ignite/accounts") + --launch-time string Timestamp the chain is effectively launched (example "2022-01-01T00:00:00Z") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "http://178.128.251.28:4500") + --spn-node-address string SPN node address (default "http://178.128.251.28:26657") +``` + +**SEE ALSO** + +* [ignite network chain](#ignite-network-chain) - Build networks + + +## ignite network chain list + +List published chains + +``` +ignite network chain list [flags] +``` + +**Options** + +``` + --advanced Show advanced information about the chains + -h, --help help for list + --limit uint Limit of results per page (default 100) + --page uint Page for chain list result (default 1) +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "http://178.128.251.28:4500") + --spn-node-address string SPN node address (default "http://178.128.251.28:26657") +``` + +**SEE ALSO** + +* [ignite network chain](#ignite-network-chain) - Build networks + + +## ignite network chain prepare + +Prepare the chain for launch + +``` +ignite network chain prepare [launch-id] [flags] +``` + +**Options** + +``` + --check-dependencies verify that cached dependencies have not been modified since they were downloaded + --clear-cache clear the build cache (advanced) + -f, --force Force the prepare command to run even if the chain is not launched + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for prepare + --home string home directory used for blockchains + --keyring-backend string Keyring backend to store your account keys (default "test") + --keyring-dir string The accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "http://178.128.251.28:4500") + --spn-node-address string SPN node address (default "http://178.128.251.28:26657") +``` + +**SEE ALSO** + +* [ignite network chain](#ignite-network-chain) - Build networks + + +## ignite network chain publish + +Publish a new chain to start a new network + +``` +ignite network chain publish [source-url] [flags] +``` + +**Options** + +``` + --account-balance string Balance for each approved genesis account for the chain + --amount string Amount of coins for account request + --branch string Git branch to use for the repo + --campaign uint Campaign ID to use for this network + --chain-id string Chain ID to use for this network + --check-dependencies verify that cached dependencies have not been modified since they were downloaded + --clear-cache clear the build cache (advanced) + --from string account name to use for sending transactions to SPN (default "default") + --genesis string URL to a custom Genesis + --hash string Git hash to use for the repo + -h, --help help for publish + --home string home directory used for blockchains + --keyring-backend string Keyring backend to store your account keys (default "test") + --keyring-dir string The accounts keyring directory (default "/home/runner/.ignite/accounts") + --mainnet Initialize a mainnet campaign + --metadata string Add a campaign metadata + --no-check Skip verifying chain's integrity + --reward.coins string Reward coins + --reward.height int Last reward height + --shares string Add shares for the campaign + --tag string Git tag to use for the repo + --total-supply string Add a total of the mainnet of a campaign + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "http://178.128.251.28:4500") + --spn-node-address string SPN node address (default "http://178.128.251.28:26657") +``` + +**SEE ALSO** + +* [ignite network chain](#ignite-network-chain) - Build networks + + +## ignite network chain revert-launch + +Revert launch a network as a coordinator + +``` +ignite network chain revert-launch [launch-id] [flags] +``` + +**Options** + +``` + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for revert-launch + --keyring-backend string Keyring backend to store your account keys (default "test") + --keyring-dir string The accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "http://178.128.251.28:4500") + --spn-node-address string SPN node address (default "http://178.128.251.28:26657") +``` + +**SEE ALSO** + +* [ignite network chain](#ignite-network-chain) - Build networks + + +## ignite network chain show + +Show details of a chain + +**Options** + +``` + -h, --help help for show +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "http://178.128.251.28:4500") + --spn-node-address string SPN node address (default "http://178.128.251.28:26657") +``` + +**SEE ALSO** + +* [ignite network chain](#ignite-network-chain) - Build networks +* [ignite network chain show accounts](#ignite-network-chain-show-accounts) - Show all vesting and genesis accounts of the chain +* [ignite network chain show genesis](#ignite-network-chain-show-genesis) - Show the chain genesis file +* [ignite network chain show info](#ignite-network-chain-show-info) - Show info details of the chain +* [ignite network chain show peers](#ignite-network-chain-show-peers) - Show peers list of the chain +* [ignite network chain show validators](#ignite-network-chain-show-validators) - Show all validators of the chain + + +## ignite network chain show accounts + +Show all vesting and genesis accounts of the chain + +``` +ignite network chain show accounts [launch-id] [flags] +``` + +**Options** + +``` + --address-prefix string Account address prefix (default "spn") + -h, --help help for accounts +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "http://178.128.251.28:4500") + --spn-node-address string SPN node address (default "http://178.128.251.28:26657") +``` + +**SEE ALSO** + +* [ignite network chain show](#ignite-network-chain-show) - Show details of a chain + + +## ignite network chain show genesis + +Show the chain genesis file + +``` +ignite network chain show genesis [launch-id] [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -h, --help help for genesis + --out string Path to output Genesis file (default "./genesis.json") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "http://178.128.251.28:4500") + --spn-node-address string SPN node address (default "http://178.128.251.28:26657") +``` + +**SEE ALSO** + +* [ignite network chain show](#ignite-network-chain-show) - Show details of a chain + + +## ignite network chain show info + +Show info details of the chain + +``` +ignite network chain show info [launch-id] [flags] +``` + +**Options** + +``` + -h, --help help for info +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "http://178.128.251.28:4500") + --spn-node-address string SPN node address (default "http://178.128.251.28:26657") +``` + +**SEE ALSO** + +* [ignite network chain show](#ignite-network-chain-show) - Show details of a chain + + +## ignite network chain show peers + +Show peers list of the chain + +``` +ignite network chain show peers [launch-id] [flags] +``` + +**Options** + +``` + -h, --help help for peers + --out string Path to output peers list (default "./peers.txt") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "http://178.128.251.28:4500") + --spn-node-address string SPN node address (default "http://178.128.251.28:26657") +``` + +**SEE ALSO** + +* [ignite network chain show](#ignite-network-chain-show) - Show details of a chain + + +## ignite network chain show validators + +Show all validators of the chain + +``` +ignite network chain show validators [launch-id] [flags] +``` + +**Options** + +``` + --address-prefix string Account address prefix (default "spn") + -h, --help help for validators +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "http://178.128.251.28:4500") + --spn-node-address string SPN node address (default "http://178.128.251.28:26657") +``` + +**SEE ALSO** + +* [ignite network chain show](#ignite-network-chain-show) - Show details of a chain + + +## ignite network coordinator + +Interact with coordinator profiles + +**Options** + +``` + -h, --help help for coordinator +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "http://178.128.251.28:4500") + --spn-node-address string SPN node address (default "http://178.128.251.28:26657") +``` + +**SEE ALSO** + +* [ignite network](#ignite-network) - Launch a blockchain in production +* [ignite network coordinator set](#ignite-network-coordinator-set) - Set an information in a coordinator profile +* [ignite network coordinator show](#ignite-network-coordinator-show) - Show a coordinator profile + + +## ignite network coordinator set + +Set an information in a coordinator profile + +**Synopsis** + +Coordinators on Ignite can set a profile containing a description for the coordinator. +The coordinator set command allows to set information for the coordinator. +The following information can be set: +- details: general information about the coordinator. +- identity: a piece of information to verify the identity of the coordinator with a system like Keybase or Veramo. +- website: website of the coordinator. + + +``` +ignite network coordinator set details|identity|website [value] [flags] +``` + +**Options** + +``` + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for set + --home string home directory used for blockchains + --keyring-backend string Keyring backend to store your account keys (default "test") + --keyring-dir string The accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "http://178.128.251.28:4500") + --spn-node-address string SPN node address (default "http://178.128.251.28:26657") +``` + +**SEE ALSO** + +* [ignite network coordinator](#ignite-network-coordinator) - Interact with coordinator profiles + + +## ignite network coordinator show + +Show a coordinator profile + +``` +ignite network coordinator show [address] [flags] +``` + +**Options** + +``` + -h, --help help for show +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "http://178.128.251.28:4500") + --spn-node-address string SPN node address (default "http://178.128.251.28:26657") +``` + +**SEE ALSO** + +* [ignite network coordinator](#ignite-network-coordinator) - Interact with coordinator profiles + + +## ignite network profile + +Show the address profile info + +``` +ignite network profile [campaign-id] [flags] +``` + +**Options** + +``` + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for profile + --home string home directory used for blockchains + --keyring-backend string Keyring backend to store your account keys (default "test") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "http://178.128.251.28:4500") + --spn-node-address string SPN node address (default "http://178.128.251.28:26657") +``` + +**SEE ALSO** + +* [ignite network](#ignite-network) - Launch a blockchain in production + + +## ignite network request + +Handle requests + +**Options** + +``` + -h, --help help for request +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "http://178.128.251.28:4500") + --spn-node-address string SPN node address (default "http://178.128.251.28:26657") +``` + +**SEE ALSO** + +* [ignite network](#ignite-network) - Launch a blockchain in production +* [ignite network request approve](#ignite-network-request-approve) - Approve requests +* [ignite network request list](#ignite-network-request-list) - List all pending requests +* [ignite network request reject](#ignite-network-request-reject) - Reject requests +* [ignite network request show](#ignite-network-request-show) - Show pending requests details +* [ignite network request verify](#ignite-network-request-verify) - Verify the request and simulate the chain genesis from them + + +## ignite network request approve + +Approve requests + +``` +ignite network request approve [launch-id] [number<,...>] [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for approve + --home string home directory used for blockchains + --keyring-backend string Keyring backend to store your account keys (default "test") + --keyring-dir string The accounts keyring directory (default "/home/runner/.ignite/accounts") + --no-verification approve the requests without verifying them +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "http://178.128.251.28:4500") + --spn-node-address string SPN node address (default "http://178.128.251.28:26657") +``` + +**SEE ALSO** + +* [ignite network request](#ignite-network-request) - Handle requests + + +## ignite network request list + +List all pending requests + +``` +ignite network request list [launch-id] [flags] +``` + +**Options** + +``` + --address-prefix string Account address prefix (default "spn") + -h, --help help for list +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "http://178.128.251.28:4500") + --spn-node-address string SPN node address (default "http://178.128.251.28:26657") +``` + +**SEE ALSO** + +* [ignite network request](#ignite-network-request) - Handle requests + + +## ignite network request reject + +Reject requests + +``` +ignite network request reject [launch-id] [number<,...>] [flags] +``` + +**Options** + +``` + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for reject + --home string home directory used for blockchains + --keyring-backend string Keyring backend to store your account keys (default "test") + --keyring-dir string The accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "http://178.128.251.28:4500") + --spn-node-address string SPN node address (default "http://178.128.251.28:26657") +``` + +**SEE ALSO** + +* [ignite network request](#ignite-network-request) - Handle requests + + +## ignite network request show + +Show pending requests details + +``` +ignite network request show [launch-id] [request-id] [flags] +``` + +**Options** + +``` + -h, --help help for show +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "http://178.128.251.28:4500") + --spn-node-address string SPN node address (default "http://178.128.251.28:26657") +``` + +**SEE ALSO** + +* [ignite network request](#ignite-network-request) - Handle requests + + +## ignite network request verify + +Verify the request and simulate the chain genesis from them + +``` +ignite network request verify [launch-id] [number<,...>] [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for verify + --home string home directory used for blockchains + --keyring-backend string Keyring backend to store your account keys (default "test") + --keyring-dir string The accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "http://178.128.251.28:4500") + --spn-node-address string SPN node address (default "http://178.128.251.28:26657") +``` + +**SEE ALSO** + +* [ignite network request](#ignite-network-request) - Handle requests + + +## ignite network reward + +Manage network rewards + +**Options** + +``` + -h, --help help for reward +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "http://178.128.251.28:4500") + --spn-node-address string SPN node address (default "http://178.128.251.28:26657") +``` + +**SEE ALSO** + +* [ignite network](#ignite-network) - Launch a blockchain in production +* [ignite network reward release](#ignite-network-reward-release) - Connect the monitoring modules of launched chains with SPN +* [ignite network reward set](#ignite-network-reward-set) - set a network chain reward + + +## ignite network reward release + +Connect the monitoring modules of launched chains with SPN + +``` +ignite network reward release [launch-id] [chain-rpc] [flags] +``` + +**Options** + +``` + --create-client-only Only create the network client id + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for release + --keyring-backend string Keyring backend to store your account keys (default "test") + --spn-gaslimit int Gas limit used for transactions on SPN (default 400000) + --spn-gasprice string Gas price used for transactions on SPN (default "0.0000025uspn") + --testnet-account string testnet chain Account (default "default") + --testnet-faucet string Faucet address of the testnet chain + --testnet-gaslimit int Gas limit used for transactions on testnet chain (default 400000) + --testnet-gasprice string Gas price used for transactions on testnet chain (default "0.0000025stake") + --testnet-prefix string Address prefix of the testnet chain (default "cosmos") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "http://178.128.251.28:4500") + --spn-node-address string SPN node address (default "http://178.128.251.28:26657") +``` + +**SEE ALSO** + +* [ignite network reward](#ignite-network-reward) - Manage network rewards + + +## ignite network reward set + +set a network chain reward + +``` +ignite network reward set [launch-id] [last-reward-height] [coins] [flags] +``` + +**Options** + +``` + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for set + --home string home directory used for blockchains + --keyring-backend string Keyring backend to store your account keys (default "test") + --keyring-dir string The accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "http://178.128.251.28:4500") + --spn-node-address string SPN node address (default "http://178.128.251.28:26657") +``` + +**SEE ALSO** + +* [ignite network reward](#ignite-network-reward) - Manage network rewards + + +## ignite network validator + +Interact with validator profiles + +**Options** + +``` + -h, --help help for validator +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "http://178.128.251.28:4500") + --spn-node-address string SPN node address (default "http://178.128.251.28:26657") +``` + +**SEE ALSO** + +* [ignite network](#ignite-network) - Launch a blockchain in production +* [ignite network validator set](#ignite-network-validator-set) - Set an information in a validator profile +* [ignite network validator show](#ignite-network-validator-show) - Show a validator profile + + +## ignite network validator set + +Set an information in a validator profile + +**Synopsis** + +Validators on Ignite can set a profile containing a description for the validator. +The validator set command allows to set information for the validator. +The following information can be set: +- details: general information about the validator. +- identity: piece of information to verify identity of the validator with a system like Keybase of Veramo. +- website: website of the validator. +- security: security contact for the validator. + + +``` +ignite network validator set details|identity|website|security [value] [flags] +``` + +**Options** + +``` + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for set + --home string home directory used for blockchains + --keyring-backend string Keyring backend to store your account keys (default "test") + --keyring-dir string The accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "http://178.128.251.28:4500") + --spn-node-address string SPN node address (default "http://178.128.251.28:26657") +``` + +**SEE ALSO** + +* [ignite network validator](#ignite-network-validator) - Interact with validator profiles + + +## ignite network validator show + +Show a validator profile + +``` +ignite network validator show [address] [flags] +``` + +**Options** + +``` + -h, --help help for show +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "http://178.128.251.28:4500") + --spn-node-address string SPN node address (default "http://178.128.251.28:26657") +``` + +**SEE ALSO** + +* [ignite network validator](#ignite-network-validator) - Interact with validator profiles + + +## ignite node + +Make calls to a live blockchain node + +**Options** + +``` + -h, --help help for node + --node string : to tendermint rpc interface for this chain (default "https://rpc.cosmos.network:443") +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain +* [ignite node query](#ignite-node-query) - Querying subcommands +* [ignite node tx](#ignite-node-tx) - Transactions subcommands + + +## ignite node query + +Querying subcommands + +**Options** + +``` + -h, --help help for query +``` + +**Options inherited from parent commands** + +``` + --node string : to tendermint rpc interface for this chain (default "https://rpc.cosmos.network:443") +``` + +**SEE ALSO** + +* [ignite node](#ignite-node) - Make calls to a live blockchain node +* [ignite node query bank](#ignite-node-query-bank) - Querying commands for the bank module +* [ignite node query tx](#ignite-node-query-tx) - Query for transaction by hash + + +## ignite node query bank + +Querying commands for the bank module + +**Options** + +``` + -h, --help help for bank +``` + +**Options inherited from parent commands** + +``` + --node string : to tendermint rpc interface for this chain (default "https://rpc.cosmos.network:443") +``` + +**SEE ALSO** + +* [ignite node query](#ignite-node-query) - Querying subcommands +* [ignite node query bank balances](#ignite-node-query-bank-balances) - Query for account balances by account name or address + + +## ignite node query bank balances + +Query for account balances by account name or address + +``` +ignite node query bank balances [from_account_or_address] [flags] +``` + +**Options** + +``` + --address-prefix string Account address prefix (default "cosmos") + --count-total count total number of records in all balances to query for + -h, --help help for balances + --home string home directory used for blockchains + --keyring-backend string Keyring backend to store your account keys (default "test") + --keyring-dir string The accounts keyring directory (default "/home/runner/.ignite/accounts") + --limit uint pagination limit of all balances to query for (default 100) + --offset uint pagination offset of all balances to query for + --page uint pagination page of all balances to query for. This sets offset to a multiple of limit (default 1) + --page-key string pagination page-key of all balances to query for + --reverse results are sorted in descending order +``` + +**Options inherited from parent commands** + +``` + --node string : to tendermint rpc interface for this chain (default "https://rpc.cosmos.network:443") +``` + +**SEE ALSO** + +* [ignite node query bank](#ignite-node-query-bank) - Querying commands for the bank module + + +## ignite node query tx + +Query for transaction by hash + +``` +ignite node query tx [hash] [flags] +``` + +**Options** + +``` + -h, --help help for tx +``` + +**Options inherited from parent commands** + +``` + --node string : to tendermint rpc interface for this chain (default "https://rpc.cosmos.network:443") +``` + +**SEE ALSO** + +* [ignite node query](#ignite-node-query) - Querying subcommands + + +## ignite node tx + +Transactions subcommands + +**Options** + +``` + --address-prefix string Account address prefix (default "cosmos") + --fees string Fees to pay along with transaction; eg: 10uatom + --gas string gas limit to set per-transaction; set to "auto" to calculate sufficient gas automatically (default "auto") + --gas-prices string Gas prices in decimal format to determine the transaction fee (e.g. 0.1uatom) + --generate-only Build an unsigned transaction and write it to STDOUT + -h, --help help for tx + --home string home directory used for blockchains + --keyring-backend string Keyring backend to store your account keys (default "test") + --keyring-dir string The accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**Options inherited from parent commands** + +``` + --node string : to tendermint rpc interface for this chain (default "https://rpc.cosmos.network:443") +``` + +**SEE ALSO** + +* [ignite node](#ignite-node) - Make calls to a live blockchain node +* [ignite node tx bank](#ignite-node-tx-bank) - Bank transaction subcommands + + +## ignite node tx bank + +Bank transaction subcommands + +**Options** + +``` + -h, --help help for bank +``` + +**Options inherited from parent commands** + +``` + --address-prefix string Account address prefix (default "cosmos") + --fees string Fees to pay along with transaction; eg: 10uatom + --gas string gas limit to set per-transaction; set to "auto" to calculate sufficient gas automatically (default "auto") + --gas-prices string Gas prices in decimal format to determine the transaction fee (e.g. 0.1uatom) + --generate-only Build an unsigned transaction and write it to STDOUT + --home string home directory used for blockchains + --keyring-backend string Keyring backend to store your account keys (default "test") + --keyring-dir string The accounts keyring directory (default "/home/runner/.ignite/accounts") + --node string : to tendermint rpc interface for this chain (default "https://rpc.cosmos.network:443") +``` + +**SEE ALSO** + +* [ignite node tx](#ignite-node-tx) - Transactions subcommands +* [ignite node tx bank send](#ignite-node-tx-bank-send) - Send funds from one account to another. + + +## ignite node tx bank send + +Send funds from one account to another. + +``` +ignite node tx bank send [from_account_or_address] [to_account_or_address] [amount] [flags] +``` + +**Options** + +``` + -h, --help help for send +``` + +**Options inherited from parent commands** + +``` + --address-prefix string Account address prefix (default "cosmos") + --fees string Fees to pay along with transaction; eg: 10uatom + --gas string gas limit to set per-transaction; set to "auto" to calculate sufficient gas automatically (default "auto") + --gas-prices string Gas prices in decimal format to determine the transaction fee (e.g. 0.1uatom) + --generate-only Build an unsigned transaction and write it to STDOUT + --home string home directory used for blockchains + --keyring-backend string Keyring backend to store your account keys (default "test") + --keyring-dir string The accounts keyring directory (default "/home/runner/.ignite/accounts") + --node string : to tendermint rpc interface for this chain (default "https://rpc.cosmos.network:443") +``` + +**SEE ALSO** + +* [ignite node tx bank](#ignite-node-tx-bank) - Bank transaction subcommands + + +## ignite relayer + +Connect blockchains by using IBC protocol + +**Options** + +``` + -h, --help help for relayer +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain +* [ignite relayer configure](#ignite-relayer-configure) - Configure source and target chains for relaying +* [ignite relayer connect](#ignite-relayer-connect) - Link chains associated with paths and start relaying tx packets in between + + +## ignite relayer configure + +Configure source and target chains for relaying + +``` +ignite relayer configure [flags] +``` + +**Options** + +``` + -a, --advanced Advanced configuration options for custom IBC modules + -h, --help help for configure + --keyring-backend string Keyring backend to store your account keys (default "test") + --keyring-dir string The accounts keyring directory (default "/home/runner/.ignite/accounts") + --ordered Set the channel as ordered + -r, --reset Reset the relayer config + --source-account string Source Account + --source-client-id string use a custom client id for source + --source-faucet string Faucet address of the source chain + --source-gaslimit int Gas limit used for transactions on source chain + --source-gasprice string Gas price used for transactions on source chain + --source-port string IBC port ID on the source chain + --source-prefix string Address prefix of the source chain + --source-rpc string RPC address of the source chain + --source-version string Module version on the source chain + --target-account string Target Account + --target-client-id string use a custom client id for target + --target-faucet string Faucet address of the target chain + --target-gaslimit int Gas limit used for transactions on target chain + --target-gasprice string Gas price used for transactions on target chain + --target-port string IBC port ID on the target chain + --target-prefix string Address prefix of the target chain + --target-rpc string RPC address of the target chain + --target-version string Module version on the target chain +``` + +**SEE ALSO** + +* [ignite relayer](#ignite-relayer) - Connect blockchains by using IBC protocol + + +## ignite relayer connect + +Link chains associated with paths and start relaying tx packets in between + +``` +ignite relayer connect [,...] [flags] +``` + +**Options** + +``` + -h, --help help for connect + --keyring-backend string Keyring backend to store your account keys (default "test") + --keyring-dir string The accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**SEE ALSO** + +* [ignite relayer](#ignite-relayer) - Connect blockchains by using IBC protocol + + +## ignite scaffold + +Scaffold a new blockchain, module, message, query, and more + +**Synopsis** + +Scaffolding is a quick way to generate code for major pieces of your +application. + +For details on each scaffolding target (chain, module, message, etc.) run the +corresponding command with a "--help" flag, for example, "ignite scaffold chain +--help". + +The Ignite team strongly recommends committing the code to a version control +system before running scaffolding commands. This will make it easier to see the +changes to the source code as well as undo the command if you've decided to roll +back the changes. + +This blockchain you create with the chain scaffolding command uses the modular +Cosmos SDK framework and imports many standard modules for functionality like +proof of stake, token transfer, inter-blockchain connectivity, governance, and +more. Custom functionality is implemented in modules located by convention in +the "x/" directory. By default, your blockchain comes with an empty custom +module. Use the module scaffolding command to create an additional module. + +An empty custom module doesn't do much, it's basically a container for logic +that is responsible for processing transactions and changing the application +state. Cosmos SDK blockchains work by processing user-submitted signed +transactions, which contain one or more messages. A message contains data that +describes a state transition. A module can be responsible for handling any +number of messages. + +A message scaffolding command will generate the code for handling a new type of +Cosmos SDK message. Message fields describe the state transition that the +message is intended to produce if processed without errors. + +Scaffolding messages is useful to create individual "actions" that your module +can perform. Sometimes, however, you want your blockchain to have the +functionality to create, read, update and delete (CRUD) instances of a +particular type. Depending on how you want to store the data there are three +commands that scaffold CRUD functionality for a type: list, map, and single. +These commands create four messages (one for each CRUD action), and the logic to +add, delete, and fetch the data from the store. If you want to scaffold only the +logic, for example, you've decided to scaffold messages separately, you can do +that as well with the "--no-message" flag. + +Reading data from a blockchain happens with a help of queries. Similar to how +you can scaffold messages to write data, you can scaffold queries to read the +data back from your blockchain application. + +You can also scaffold a type, which just produces a new protocol buffer file +with a proto message description. Note that proto messages produce (and +correspond with) Go types whereas Cosmos SDK messages correspond to proto "rpc" +in the "Msg" service. + +If you're building an application with custom IBC logic, you might need to +scaffold IBC packets. An IBC packet represents the data sent from one blockchain +to another. You can only scaffold IBC packets in IBC-enabled modules scaffolded +with an "--ibc" flag. Note that the default module is not IBC-enabled. + + +**Options** + +``` + -h, --help help for scaffold +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain +* [ignite scaffold band](#ignite-scaffold-band) - Scaffold an IBC BandChain query oracle to request real-time data +* [ignite scaffold chain](#ignite-scaffold-chain) - Fully-featured Cosmos SDK blockchain +* [ignite scaffold flutter](#ignite-scaffold-flutter) - A Flutter app for your chain +* [ignite scaffold list](#ignite-scaffold-list) - CRUD for data stored as an array +* [ignite scaffold map](#ignite-scaffold-map) - CRUD for data stored as key-value pairs +* [ignite scaffold message](#ignite-scaffold-message) - Message to perform state transition on the blockchain +* [ignite scaffold module](#ignite-scaffold-module) - Scaffold a Cosmos SDK module +* [ignite scaffold packet](#ignite-scaffold-packet) - Message for sending an IBC packet +* [ignite scaffold query](#ignite-scaffold-query) - Query to get data from the blockchain +* [ignite scaffold single](#ignite-scaffold-single) - CRUD for data stored in a single location +* [ignite scaffold type](#ignite-scaffold-type) - Scaffold only a type definition +* [ignite scaffold vue](#ignite-scaffold-vue) - Vue 3 web app template + + +## ignite scaffold band + +Scaffold an IBC BandChain query oracle to request real-time data + +**Synopsis** + +Scaffold an IBC BandChain query oracle to request real-time data from BandChain scripts in a specific IBC-enabled Cosmos SDK module + +``` +ignite scaffold band [queryName] --module [moduleName] [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -h, --help help for band + --module string IBC Module to add the packet into + -p, --path string path of the app (default ".") + --signer string Label for the message signer (default: creator) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Scaffold a new blockchain, module, message, query, and more + + +## ignite scaffold chain + +Fully-featured Cosmos SDK blockchain + +**Synopsis** + +Create a new application-specific Cosmos SDK blockchain. + +For example, the following command will create a blockchain called "hello" in +the "hello/" directory: + + ignite scaffold chain hello + +A project name can be a simple name or a URL. The name will be used as the Go +module path for the project. Examples of project names: + + ignite scaffold chain foo + ignite scaffold chain foo/bar + ignite scaffold chain example.org/foo + ignite scaffold chain github.com/username/foo + +A new directory with source code files will be created in the current directory. +To use a different path use the "--path" flag. + +Most of the logic of your blockchain is written in custom modules. Each module +effectively encapsulates an independent piece of functionality. Following the +Cosmos SDK convention, custom modules are stored inside the "x/" directory. By +default, Ignite creates a module with a name that matches the name of the +project. To create a blockchain without a default module use the "--no-module" +flag. Additional modules can be added after a project is created with "ignite +scaffold module" command. + +Account addresses on Cosmos SDK-based blockchains have string prefixes. For +example, the Cosmos Hub blockchain uses the default "cosmos" prefix, so that +addresses look like this: "cosmos12fjzdtqfrrve7zyg9sv8j25azw2ua6tvu07ypf". To +use a custom address prefix use the "--address-prefix" flag. For example: + + ignite scaffold chain foo --address-prefix bar + +By default when compiling a blockchain's source code Ignite creates a cache to +speed up the build process. To clear the cache when building a blockchain use +the "--clear-cache" flag. It is very unlikely you will ever need to use this +flag. + +The blockchain is using the Cosmos SDK modular blockchain framework. Learn more +about Cosmos SDK on https://docs.cosmos.network + + +``` +ignite scaffold chain [name] [flags] +``` + +**Options** + +``` + --address-prefix string Account address prefix (default "cosmos") + --clear-cache clear the build cache (advanced) + -h, --help help for chain + --no-module Create a project without a default module + -p, --path string Create a project in a specific path (default ".") +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Scaffold a new blockchain, module, message, query, and more + + +## ignite scaffold flutter + +A Flutter app for your chain + +``` +ignite scaffold flutter [flags] +``` + +**Options** + +``` + -h, --help help for flutter + -p, --path string path to scaffold content of the Flutter app (default "./flutter") + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Scaffold a new blockchain, module, message, query, and more + + +## ignite scaffold list + +CRUD for data stored as an array + +**Synopsis** + +The "list" scaffolding command is used to generate files that implement the +logic for storing and interacting with data stored as a list in the blockchain +state. + +The command accepts a NAME argument that will be used as the name of a new type +of data. It also accepts a list of FIELDs that describe the type. + +The interaction with the data follows the create, read, updated, and delete +(CRUD) pattern. For each type three Cosmos SDK messages are defined for writing +data to the blockchain: MsgCreate{Name}, MsgUpdate{Name}, MsgDelete{Name}. For +reading data two queries are defined: {Name} and {Name}All. The type, messages, +and queries are defined in the "proto/" directory as protocol buffer messages. +Messages and queries are mounted in the "Msg" and "Query" services respectively. + +When messages are handled, the appropriate keeper methods are called. By +convention, the methods are defined in +"x/{moduleName}/keeper/msg_server_{name}.go". Helpful methods for getting, +setting, removing, and appending are defined in the same "keeper" package in +"{name}.go". + +The "list" command essentially allows you to define a new type of data and +provides the logic to create, read, update, and delete instances of the type. +For example, let's review a command that generates the code to handle a list of +posts and each post has "title" and "body" fields: + + ignite scaffold list post title body + +This provides you with a "Post" type, MsgCreatePost, MsgUpdatePost, +MsgDeletePost and two queries: Post and PostAll. The compiled CLI, let's say the +binary is "blogd" and the module is "blog", has commands to query the chain (see +"blogd q blog") and broadcast transactions with the messages above (see "blogd +tx blog"). + +The code generated with the list command is meant to be edited and tailored to +your application needs. Consider the code to be a "skeleton" for the actual +business logic you will implement next. + +By default, all fields are assumed to be strings. If you want a field of a +different type, you can specify it after a colon ":". The following types are +supported: string, bool, int, uint, coin, array.string, array.int, array.uint, +array.coin. An example of using custom types: + + ignite scaffold list pool amount:coin tags:array.string height:int + +Ignite also supports custom types: + + ignite scaffold list product-details name description + + ignite scaffold list product price:coin details:ProductDetails + +In the example above the "ProductDetails" type was defined first, and then used +as a custom type for the "details" field. Ignite doesn't support arrays of +custom types yet. + +By default the code will be scaffolded in the module that matches your project's +name. If you have several modules in your project, you might want to specify a +different module: + + ignite scaffold list post title body --module blog + +By default, each message comes with a "creator" field that represents the +address of the transaction signer. You can customize the name of this field with +a flag: + + ignite scaffold list post title body --signer author + +It's possible to scaffold just the getter/setter logic without the CRUD +messages. This is useful when you want the methods to handle a type, but would +like to scaffold messages manually. Use a flag to skip message scaffolding: + + ignite scaffold list post title body --no-message + +The "creator" field is not generated if a list is scaffolded with the +"--no-message" flag. + + +``` +ignite scaffold list NAME [field]... [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -h, --help help for list + --module string Module to add into. Default is app's main module + --no-message Disable CRUD interaction messages scaffolding + --no-simulation Disable CRUD simulation scaffolding + -p, --path string path of the app (default ".") + --signer string Label for the message signer (default: creator) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Scaffold a new blockchain, module, message, query, and more + + +## ignite scaffold map + +CRUD for data stored as key-value pairs + +**Synopsis** + +The "map" scaffolding command is used to generate files that implement the logic +for storing and interacting with data stored as key-value pairs (or a +dictionary) in the blockchain state. + +The "map" command is very similar to "ignite scaffold list" with the main +difference in how values are indexed. With "list" values are indexed by an +incrementing integer, whereas "list" values are indexed by a user-provided value +(or multiple values). + +Let's use the same blog post example: + + ignite scaffold map post title body + +This command scaffolds a "Post" type and CRUD functionality to create, read, +updated, and delete posts. However, when creating a new post with your chain's +binary (or by submitting a transaction through the chain's API) you will be +required to provide an "index": + + blogd tx blog create-post [index] [title] [body] + blogd tx blog create-post hello "My first post" "This is the body" + +This command will create a post and store it in the blockchain's state under the +"hello" index. You will be able to fetch back the value of the post by querying +for the "hello" key. + + blogd q blog show-post hello + +To customize the index, use the "--index" flag. Multiple indices can be +provided, which simplifies querying values. For example: + + ignite scaffold map product price desc --index category,guid + +With this command, you would get a "Product" value indexed by both a category +and a GUID (globally unique ID). This will let you programmatically fetch +product values that have the same category but are using different GUIDs. + +Since the behavior of "list" and "map" scaffolding is very similar, you can use +the "--no-message", "--module", "--signer" flags as well as the colon syntax for +custom types. + + +``` +ignite scaffold map NAME [field]... [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -h, --help help for map + --index strings fields that index the value (default [index]) + --module string Module to add into. Default is app's main module + --no-message Disable CRUD interaction messages scaffolding + --no-simulation Disable CRUD simulation scaffolding + -p, --path string path of the app (default ".") + --signer string Label for the message signer (default: creator) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Scaffold a new blockchain, module, message, query, and more + + +## ignite scaffold message + +Message to perform state transition on the blockchain + +**Synopsis** + +Message scaffolding is useful for quickly adding functionality to your +blockchain to handle specific Cosmos SDK messages. + +Messages are objects whose end goal is to trigger state transitions on the +blockchain. A message is a container for fields of data that affect how the +blockchain's state will change. You can think of messages as "actions" that a +user can perform. + +For example, the bank module has a "Send" message for token transfers between +accounts. The send message has three fields: from address (sender), to address +(recipient), and a token amount. When this message is successfully processed, +the token amount will be deducted from the sender's account and added to the +recipient's account. + +Ignite's message scaffolding lets you create new types of messages and add them +to your chain. For example: + + ignite scaffold message add-pool amount:coins denom active:bool --module dex + +The command above will create a new message MsgAddPool with three fields: amount +(in tokens), denom (a string), and active (a boolean). The message will be added +to the "dex" module. + +By default, the message is defined as a proto message in the +"proto/{app}/{module}/tx.proto" and registered in the "Msg" service. A CLI command to +create and broadcast a transaction with MsgAddPool is created in the module's +"cli" package. Additionally, Ignite scaffolds a message constructor and the code +to satisfy the sdk.Msg interface and register the message in the module. + +Most importantly in the "keeper" package Ignite scaffolds an "AddPool" function. +Inside this function, you can implement message handling logic. + +When successfully processed a message can return data. Use the —response flag to +specify response fields and their types. For example + + ignite scaffold message create-post title body --response id:int,title + +The command above will scaffold MsgCreatePost which returns both an ID (an +integer) and a title (a string). + +Message scaffolding follows the rules as "ignite scaffold list/map/single" and +supports fields with standard and custom types. See "ignite scaffold list —help" +for details. + + +``` +ignite scaffold message [name] [field1] [field2] ... [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -d, --desc string Description of the command + -h, --help help for message + --module string Module to add the message into. Default: app's main module + --no-simulation Disable CRUD simulation scaffolding + -p, --path string path of the app (default ".") + -r, --response strings Response fields + --signer string Label for the message signer (default: creator) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Scaffold a new blockchain, module, message, query, and more + + +## ignite scaffold module + +Scaffold a Cosmos SDK module + +**Synopsis** + +Scaffold a new Cosmos SDK module. + +Cosmos SDK is a modular framework and each independent piece of functionality is +implemented in a separate module. By default your blockchain imports a set of +standard Cosmos SDK modules. To implement custom functionality of your +blockchain, scaffold a module and implement the logic of your application. + +This command does the following: + +* Creates a directory with module's protocol buffer files in "proto/" +* Creates a directory with module's boilerplate Go code in "x/" +* Imports the newly created module by modifying "app/app.go" +* Creates a file in "testutil/keeper/" that contains logic to create a keeper + for testing purposes + +This command will proceed with module scaffolding even if "app/app.go" doesn't +have the required default placeholders. If the placeholders are missing, you +will need to modify "app/app.go" manually to import the module. If you want the +command to fail if it can't import the module, use the "--require-registration" +flag. + +To scaffold an IBC-enabled module use the "--ibc" flag. An IBC-enabled module is +like a regular module with the addition of IBC-specific logic and placeholders +to scaffold IBC packets with "ignite scaffold packet". + +A module can depend on one or more other modules and import their keeper +methods. To scaffold a module with a dependency use the "--dep" flag + +For example, your new custom module "foo" might have functionality that requires +sending tokens between accounts. The method for sending tokens is a defined in +the "bank"'s module keeper. You can scaffold a "foo" module with the dependency +on "bank" with the following command: + + ignite scaffold module foo --dep bank + +You can then define which methods you want to import from the "bank" keeper in +"expected_keepers.go". + +You can also scaffold a module with a list of dependencies that can include both +standard and custom modules (provided they exist): + + ignite scaffold module bar --dep foo,mint,account + +Note: the "--dep" flag doesn't install third-party modules into your +application, it just generates extra code that specifies which existing modules +your new custom module depends on. + +A Cosmos SDK module can have parameters (or "params"). Params are values that +can be set at the genesis of the blockchain and can be modified while the +blockchain is running. An example of a param is "Inflation rate change" of the +"mint" module. A module can be scaffolded with params using the "--params" flag +that accepts a list of param names. By default params are of type "string", but +you can specify a type for each param. For example: + + ignite scaffold module foo --params baz:uint,bar:bool + +Refer to Cosmos SDK documentation to learn more about modules, dependencies and +params. + + +``` +ignite scaffold module [name] [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + --dep strings module dependencies (e.g. --dep account,bank) + -h, --help help for module + --ibc scaffold an IBC module + --ordering string channel ordering of the IBC module [none|ordered|unordered] (default "none") + --params strings scaffold module params + -p, --path string path of the app (default ".") + --require-registration if true command will fail if module can't be registered + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Scaffold a new blockchain, module, message, query, and more + + +## ignite scaffold packet + +Message for sending an IBC packet + +**Synopsis** + +Scaffold an IBC packet in a specific IBC-enabled Cosmos SDK module + +``` +ignite scaffold packet [packetName] [field1] [field2] ... --module [moduleName] [flags] +``` + +**Options** + +``` + --ack strings Custom acknowledgment type (field1,field2,...) + --clear-cache clear the build cache (advanced) + -h, --help help for packet + --module string IBC Module to add the packet into + --no-message Disable send message scaffolding + -p, --path string path of the app (default ".") + --signer string Label for the message signer (default: creator) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Scaffold a new blockchain, module, message, query, and more + + +## ignite scaffold query + +Query to get data from the blockchain + +``` +ignite scaffold query [name] [request_field1] [request_field2] ... [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -d, --desc string Description of the command + -h, --help help for query + --module string Module to add the query into. Default: app's main module + --paginated Define if the request can be paginated + -p, --path string path of the app (default ".") + -r, --response strings Response fields + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Scaffold a new blockchain, module, message, query, and more + + +## ignite scaffold single + +CRUD for data stored in a single location + +``` +ignite scaffold single NAME [field]... [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -h, --help help for single + --module string Module to add into. Default is app's main module + --no-message Disable CRUD interaction messages scaffolding + --no-simulation Disable CRUD simulation scaffolding + -p, --path string path of the app (default ".") + --signer string Label for the message signer (default: creator) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Scaffold a new blockchain, module, message, query, and more + + +## ignite scaffold type + +Scaffold only a type definition + +``` +ignite scaffold type NAME [field]... [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -h, --help help for type + --module string Module to add into. Default is app's main module + --no-message Disable CRUD interaction messages scaffolding + --no-simulation Disable CRUD simulation scaffolding + -p, --path string path of the app (default ".") + --signer string Label for the message signer (default: creator) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Scaffold a new blockchain, module, message, query, and more + + +## ignite scaffold vue + +Vue 3 web app template + +``` +ignite scaffold vue [flags] +``` + +**Options** + +``` + -h, --help help for vue + -p, --path string path to scaffold content of the Vue.js app (default "./vue") + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Scaffold a new blockchain, module, message, query, and more + + +## ignite tools + +Tools for advanced users + +**Options** + +``` + -h, --help help for tools +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain +* [ignite tools ibc-relayer](#ignite-tools-ibc-relayer) - Typescript implementation of an IBC relayer +* [ignite tools ibc-setup](#ignite-tools-ibc-setup) - Collection of commands to quickly setup a relayer +* [ignite tools protoc](#ignite-tools-protoc) - Execute the protoc command + + +## ignite tools ibc-relayer + +Typescript implementation of an IBC relayer + +``` +ignite tools ibc-relayer [--] [...] [flags] +``` + +**Examples** + +``` +ignite tools ibc-relayer -- -h +``` + +**Options** + +``` + -h, --help help for ibc-relayer +``` + +**SEE ALSO** + +* [ignite tools](#ignite-tools) - Tools for advanced users + + +## ignite tools ibc-setup + +Collection of commands to quickly setup a relayer + +``` +ignite tools ibc-setup [--] [...] [flags] +``` + +**Examples** + +``` +ignite tools ibc-setup -- -h +ignite tools ibc-setup -- init --src relayer_test_1 --dest relayer_test_2 +``` + +**Options** + +``` + -h, --help help for ibc-setup +``` + +**SEE ALSO** + +* [ignite tools](#ignite-tools) - Tools for advanced users + + +## ignite tools protoc + +Execute the protoc command + +**Synopsis** + +The protoc command. You don't need to setup the global protoc include folder with -I, it's automatically handled + +``` +ignite tools protoc [--] [...] [flags] +``` + +**Examples** + +``` +ignite tools protoc -- --version +``` + +**Options** + +``` + -h, --help help for protoc +``` + +**SEE ALSO** + +* [ignite tools](#ignite-tools) - Tools for advanced users + + +## ignite version + +Print the current build information + +``` +ignite version [flags] +``` + +**Options** + +``` + -h, --help help for version +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain + diff --git a/docs/versioned_docs/version-v0.25/clients/01-typescript.md b/docs/versioned_docs/version-v0.25/clients/01-typescript.md new file mode 100644 index 0000000..c311e0b --- /dev/null +++ b/docs/versioned_docs/version-v0.25/clients/01-typescript.md @@ -0,0 +1,310 @@ +--- +sidebar_position: 1 +description: Information about the generated Typescript client code. +--- + +# Typescript code generation + +The `ignite generate ts-client` command generates a Typescript client for your blockchain project. + +## Client code generation + +A TypeScript (TS) client is automatically generated for your blockchain for custom and standard Cosmos SDK modules. + +To enable client code generation, add the `client` entries to `config.yml`: + +```yaml +client: + typescript: + path: "ts-client" +``` + +A TS client is generated in the `ts-client` directory. + +## Client code regeneration + +By default, the filesystem is watched and the clients are regenerated automatically. Clients for standard Cosmos SDK modules are generated after you scaffold a blockchain. + +To regenerate all clients for custom and standard Cosmos SDK modules, run this command: + +```bash +ignite generate ts-client +``` + +## Preventing client code regeneration + +To prevent regenerating the client, remove the `client:typescript` property from `config.yml`. + +## Setup + +The best way to get started building with the TypeScript client is by using a [Vite](https://vitejs.dev) boilerplate. Vite provides boilerplates for vanilla TS projects as well as react, vue, lit, svelte and preact frameworks. +You can find additional information at the [Vite Getting Started guide](https://vitejs.dev/guide). + +You will also need to polyfill the client's dependencies. The following is an example of setting up a vanilla TS project with the necessary polyfills. + +```bash +npm create vite@latest my-frontend-app -- --template vanilla-ts +npm install --save-dev @esbuild-plugins/node-globals-polyfill @rollup/plugin-node-resolve +``` + +You must then create the necessary `vite.config.ts` file. + +```typescript +import { nodeResolve } from '@rollup/plugin-node-resolve' +import { NodeGlobalsPolyfillPlugin } from '@esbuild-plugins/node-globals-polyfill' +import { defineConfig } from 'vite' + +export default defineConfig({ + + plugins: [nodeResolve()], + + optimizeDeps: { + esbuildOptions: { + define: { + global: 'globalThis', + }, + plugins: [ + NodeGlobalsPolyfillPlugin({ + buffer:true + }), + ], + }, + } +}) +``` + +You are then ready to use the generated client code inside this project directly or by publishing the client and installing it as any other npm package. + +## Usage + +The code generated in `ts-client` comes with a `package.json` file ready to publish which you can modify to suit your needs. + +The client is based on a modular architecture where you can configure a client class to support the modules you need and instantiate it. + +By default, the generated client exports a client class that includes all the Cosmos SDK, custom and 3rd party modules in use in your project. + +To instantiate the client you need to provide environment information (endpoints and chain prefix) and an optional wallet (implementing the CosmJS OfflineSigner interface). + +For example, to connect to a local chain instance running under the Ignite CLI defaults, using a CosmJS wallet: + +```typescript +import { Client } from ''; +import { DirectSecp256k1HdWallet } from "@cosmjs/proto-signing"; + +const mnemonic = "surround miss nominee dream gap cross assault thank captain prosper drop duty group candy wealth weather scale put"; +const wallet = await DirectSecp256k1HdWallet.fromMnemonic(mnemonic); + +const client = new Client({ + apiURL: "http://localhost:1317", + rpcURL: "http://localhost:26657", + prefix: "cosmos" + }, + wallet +); +``` + +The resulting client instance contains namespaces for each module, each with a `query` and `tx` namespace containing the module's relevant querying and transacting methods with full type and auto-completion support. + +e.g. + +```typescript +const balances = await client.CosmosBankV1Beta1.query.queryAllBalances('cosmos1qqqsyqcyq5rqwzqfys8f67'); +``` + +And for transactions: + +```typescript +const tx_result = await client.CosmosBankV1Beta1.tx.sendMsgSend( + { + value: { + amount: [ + { + amount: '200', + denom: 'token', + }, + ], + fromAddress: 'cosmos1qqqsyqcyq5rqwzqfys8f67', + toAddress: 'cosmos1qqqsyqcyq5rqwzqfys8f67' + }, + fee, + memo + } +); +``` + +If you prefer, you can construct a lighter client using only the modules you are interested in by importing the generic client class and expanding it with the modules you need: + +```typescript +import { IgniteClient } from '/client'; +import { Module as CosmosBankV1Beta1 } from '/cosmos.bank.v1beta1' +import { Module as CosmosStakingV1Beta1 } from '/cosmos.staking.v1beta1' +import { DirectSecp256k1HdWallet } from "@cosmjs/proto-signing"; + +const mnemonic = "surround miss nominee dream gap cross assault thank captain prosper drop duty group candy wealth weather scale put"; +const wallet = await DirectSecp256k1HdWallet.fromMnemonic(mnemonic); +const CustomClient = IgniteClient.plugin([CosmosBankV1Beta1, CosmosStakingV1Beta1]); + +const client = new CustomClient({ + apiURL: "http://localhost:1317", + rpcURL: "http://localhost:26657", + prefix: "cosmos" + }, + wallet +); +``` + +You can also construct TX messages separately and send them in a single TX using a global signing client like so: + +```typescript +const msg1 = await client.CosmosBankV1Beta1.tx.msgSend( + { + value: { + amount: [ + { + amount: '200', + denom: 'token', + }, + ], + fromAddress: 'cosmos1qqqsyqcyq5rqwzqfys8f67', + toAddress: 'cosmos1qqqsyqcyq5rqwzqfys8f67' + } + } +); +const msg2 = await client.CosmosBankV1Beta1.tx.msgSend( + { + value: { + amount: [ + { + amount: '200', + denom: 'token', + }, + ], + fromAddress: 'cosmos1qqqsyqcyq5rqwzqfys8f67', + toAddress: 'cosmos1qqqsyqcyq5rqwzqfys8f67' + }, + } +); +const tx_result = await client.signAndBroadcast([msg1,msg2], fee, memo); +``` + +Finally, for additional ease-of-use, apart from the modular client mentioned above, each generated module is usable on its own in a stripped-down way by exposing a separate txClient and queryClient. + +e.g. + +```typescript +import { queryClient } from '/cosmos.bank.v1beta1'; + +const client = queryClient({ addr: 'http://localhost:1317' }); +const balances = await client.queryAllBalances('cosmos1qqqsyqcyq5rqwzqfys8f67'); +``` + +and + +```typescript +import { txClient } from '/cosmos.bank.v1beta1'; +import { DirectSecp256k1HdWallet } from "@cosmjs/proto-signing"; + +const mnemonic = "surround miss nominee dream gap cross assault thank captain prosper drop duty group candy wealth weather scale put"; +const wallet = await DirectSecp256k1HdWallet.fromMnemonic(mnemonic); + +const client = txClient({ + signer: wallet, + prefix: 'cosmos', + addr: 'http://localhost:26657' +}); + +const tx_result = await client.sendMsgSend( + { + value: { + amount: [ + { + amount: '200', + denom: 'token', + }, + ], + fromAddress: 'cosmos1qqqsyqcyq5rqwzqfys8f67', + toAddress: 'cosmos1qqqsyqcyq5rqwzqfys8f67' + }, + fee, + memo + } +); +``` + +## Usage with Keplr + +Normally, Keplr provides a wallet object implementing the OfflineSigner interface so you can simply replace the wallet argument in client instantiation with it like so: + + +```typescript +import { Client } from ''; + +const chainId = 'mychain-1' +const client = new Client({ + apiURL: "http://localhost:1317", + rpcURL: "http://localhost:26657", + prefix: "cosmos" + }, + window.keplr.getOfflineSigner(chainId) +); +``` + +The problem is that for a new Ignite CLI scaffolded chain, Keplr has no knowledge of it thus requiring an initial call to [`experimentalSuggestChain()`](https://docs.keplr.app/api/guide/suggest-chain) method to add the chain information to the user's Keplr instance. + +The generated client makes this easier by offering a `useKeplr()` method that autodiscovers the chain information and sets it up for you. Thus you can instantiate the client without a wallet and then call `useKeplr()` to enable transacting via Keplr like so: + +```typescript +import { Client } from ''; + +const client = new Client({ + apiURL: "http://localhost:1317", + rpcURL: "http://localhost:26657", + prefix: "cosmos" + } +); +await client.useKeplr(); +``` + +`useKeplr()` optionally accepts an object argument that contains one or more of the same keys as the `ChainInfo` type argument of `experimentalSuggestChain()` allowing you to override the auto-discovered values. + +For example, the default chain name and token precision (which are not recorded on-chain) are set to ` Network` and `0` while the ticker for the denom is set to the denom name in uppercase. If you wanted to override these, you could do something like: + + +```typescript +import { Client } from ''; + +const client = new Client({ + apiURL: "http://localhost:1317", + rpcURL: "http://localhost:26657", + prefix: "cosmos" + } +); +await client.useKeplr({ chainName: 'My Great Chain', stakeCurrency : { coinDenom: 'TOKEN', coinMinimalDenom: 'utoken', coinDecimals: '6' } }); +``` + +## Wallet switching + +The client also allows you to switch out the wallet for a different one on an already instantiated client like so: + +```typescript +import { Client } from ''; +import { DirectSecp256k1HdWallet } from "@cosmjs/proto-signing"; + +const mnemonic = "surround miss nominee dream gap cross assault thank captain prosper drop duty group candy wealth weather scale put"; +const wallet = await DirectSecp256k1HdWallet.fromMnemonic(mnemonic); + + +const client = new Client({ + apiURL: "http://localhost:1317", + rpcURL: "http://localhost:26657", + prefix: "cosmos" + } +); +await client.useKeplr(); + +// transact using Keplr Wallet + +client.useSigner(wallet); + +//transact using CosmJS wallet +``` diff --git a/docs/versioned_docs/version-v0.25/clients/_category_.json b/docs/versioned_docs/version-v0.25/clients/_category_.json new file mode 100644 index 0000000..04cb9ce --- /dev/null +++ b/docs/versioned_docs/version-v0.25/clients/_category_.json @@ -0,0 +1,5 @@ +{ + "label": "Clients", + "position": 6, + "link": null + } diff --git a/docs/versioned_docs/version-v0.25/contributing/01-contributing.md b/docs/versioned_docs/version-v0.25/contributing/01-contributing.md new file mode 100644 index 0000000..7abfd18 --- /dev/null +++ b/docs/versioned_docs/version-v0.25/contributing/01-contributing.md @@ -0,0 +1,73 @@ +--- +sidebar_position: 1 +slug: /contributing +--- + +# Contributing to Ignite CLI docs + +Thank you for visiting our repository and considering making contributions. We appreciate your interest in helping us to create and maintain awesome tutorials and documentation. + +To set up your environment for success, follow the [technical setup](02-technical-setup.md) guidelines. + +## Using this repo + +Review existing [Ignite CLI issues](https://github.com/ignite/cli/issues) to see if your question has already been asked and answered. + +- To provide feedback, file an issue and provide generous details to help us understand how we can make it better. +- To provide a fix, make a direct contribution. If you're not a member or maintainer, fork the repo and then submit a pull request (PR) from your forked repo to the `main` branch. +- Start by creating a draft pull request. Create your draft PR early, even if your work is just beginning or incomplete. Your draft PR indicates to the community that you're working on something and provides a space for conversations early in the development process. Merging is blocked for `Draft` PRs, so they provide a safe place to experiment and invite comments. + +## Reviewing technical content PRs + +Some of the best content contributions come during the PR review cycles. Follow best practices for technical content PR reviews just like you do for code reviews. + +- For in-line suggestions, use the [GitHub suggesting feature](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/reviewing-changes-in-pull-requests/commenting-on-a-pull-request). +- The PR owner can merge in your suggested commits one at a time or in batch (preferred). +- When you are providing a more granular extensive review that results in more than 20 in-line suggestions, go ahead and check out the branch and make the changes yourself. + +## Writing and contributing + +We welcome contributions to the docs and tutorials. + +Our technical content follows the [Google developer documentation style guide](https://developers.google.com/style). Highlights to help you get started: + +- [Highlights](https://developers.google.com/style/highlights) +- [Word list](https://developers.google.com/style/word-list) +- [Style and tone](https://developers.google.com/style/tone) +- [Writing for a global audience](https://developers.google.com/style/translation) +- [Cross-references](https://developers.google.com/style/cross-references) +- [Present tense](https://developers.google.com/style/tense) + +The Google guidelines include more material than is listed here and are used as a guide that enables easy decision making about proposed content changes. + +Other useful resources: + +- [Google Technical Writing Courses](https://developers.google.com/tech-writing) +- [GitHub Guides Mastering Markdown](https://docs.github.com/en/get-started/writing-on-github/getting-started-with-writing-and-formatting-on-github/basic-writing-and-formatting-syntax) + +## Where can I find the tutorials and docs? + +Technical content includes knowledge base articles and interactive tutorials. + +- The Ignite CLI Developer Tutorials content is in the `docs/guide` folder. +- The Knowledge Base content is in the `docs/kb` folder. +- Upgrade information is in the `docs/migration` folder. + +Note: The CLI docs are auto-generated and do not support doc updates. + +Locations and folders for other content can vary. Explore the self-describing folders for the content that you are interested in. Some articles and tutorials reside in a single Markdown file while sub-folders might be present for other tutorials. + +As always, work-in-progress content might be happening in other locations and repos. + +## Who works on the tutorials? + +The Ignite product team developers are focused on building Ignite CLI and improving the developer experience. The Ignite Ecosystem Development team owns the technical content and tutorials and manages developer onboarding. + +Meet the [people behind Ignite CLI and our contributors](https://github.com/ignite/cli/graphs/contributors). + +## Viewing docs builds + +Use a preview to see what your changes will look like in production before the updated pages are published. + +- While a PR is in draft mode, you can rely on using the preview feature in Markdown. +- After the PR moves from **Draft** to **Ready for review**, the CI status checks generate a deploy preview. This preview stays up to date as you continue to work and commit new changes to the same branch. A `Docs Deploy Preview / build_and_deploy (pull_request)` preview on a GitHub actions URL is unique for that PR. diff --git a/docs/versioned_docs/version-v0.25/contributing/02-technical-setup.md b/docs/versioned_docs/version-v0.25/contributing/02-technical-setup.md new file mode 100644 index 0000000..7fea761 --- /dev/null +++ b/docs/versioned_docs/version-v0.25/contributing/02-technical-setup.md @@ -0,0 +1,64 @@ +--- +sidebar_position: 1 +--- +# Technical setup + +To ensure you have a successful experience working with our Developer Tutorials content, Ignite recommends this technical setup. + +## Setting up Visual Studio Code + +1. Install [Visual Studio Code](https://vscode-docs.readthedocs.io/en/latest/editor/setup/). +1. Click **Extensions** in the sidebar. +1. Install this extension: + - Go for VS Code The official Go extension for Visual Studio Code +1. When prompted: + - `go get -v golang.org/x/tools/gopls` + - Select `Install all` for all packages + +Be sure to set up [Visual Studio Code](https://code.visualstudio.com/docs/setup/setup-overview) for your environment. + +**Tip** On MacOS, install `code` in $PATH to enable [Launching Visual Studio Code from the command line](https://code.visualstudio.com/docs/setup/mac#_launching-from-the-command-line). Open the Command Palette (Cmd+Shift+P) and type 'shell command'. + +## GitHub integration + +Click the GitHub icon in the sidebar for GitHub integration and follow the prompts. + +## Clone the repos that you work in + +- Fork or clone the repository. + +Internal Ignite users have different permissions. If you're not sure, fork the repo. + +## Terminal tips + +Master your terminal to be happy. + +### iTerm2 terminal emulator + +On macOS, install the [iTerm2](https://iterm2.com) OSS terminal emulator as a replacement for the default Terminal app. Installing iTerm2 as a replacement for Terminal provides an updated version of the Bash shell that supports useful features like programmable completion. + +### Using ZSH as your default shell + +The Z shell, also known as zsh, is a UNIX shell that is built on top of the macOS default Bourne shell. + +1. If you want to set your default shell to zsh, install and set up [zsh](https://github.com/ohmyzsh/ohmyzsh/wiki/Installing-ZSH) as the default shell. + +1. Install these plugins: + - [zsh-auto-suggestions](https://github.com/zsh-users/zsh-autosuggestions/blob/master/INSTALL.md#oh-my-zsh) + - [zsh-syntax-highlighting](https://github.com/zsh-users/zsh-syntax-highlighting/blob/master/INSTALL.md#oh-my-zsh) + +1. Edit your `~/.zshrc` file to add the plugins to load on startup: + + ``` + plugins=( + git + zsh-autosuggestions + zsh-syntax-highlighting + ) + ``` + +1. Log out and log back in to the terminal to use your new default zsh shell. + +## Install Go + +Follow the steps in [Install Ignite CLI](../guide/01-install.md) docs to install Ignite CLI and Go. diff --git a/docs/versioned_docs/version-v0.25/contributing/_category_.json b/docs/versioned_docs/version-v0.25/contributing/_category_.json new file mode 100644 index 0000000..e451845 --- /dev/null +++ b/docs/versioned_docs/version-v0.25/contributing/_category_.json @@ -0,0 +1,5 @@ +{ + "label": "Contributing to Ignite CLI docs", + "position": 8, + "link": null +} diff --git a/docs/versioned_docs/version-v0.25/contributing/templates/01-concept_template.md b/docs/versioned_docs/version-v0.25/contributing/templates/01-concept_template.md new file mode 100644 index 0000000..4e15738 --- /dev/null +++ b/docs/versioned_docs/version-v0.25/contributing/templates/01-concept_template.md @@ -0,0 +1,125 @@ +# Understanding [Some Concept] + + + + + + + + +### Introduction + + + +Introductory paragraph about the topic that explains what this topic is about and why the reader should care; what problem does it solve? + + + +In this guide, you will [explore] [some thing]... + +- [Install Ignite CLI](../../guide/01-install.md) + +## Describe + +Introduction to the concept. What are we going to do and why are we doing it? + +First.... + +Next... + +Finally... + +Now transition to the next section by telling the reader what's next. + +## Another section + +Another introduction + +Your content + +Transition to the next section + +## Conclusion + +In this article you [explored/learned] [something]. Now you can.... + + + + + + + diff --git a/docs/versioned_docs/version-v0.25/contributing/templates/02-tutorial-template.md b/docs/versioned_docs/version-v0.25/contributing/templates/02-tutorial-template.md new file mode 100644 index 0000000..e5e6ce6 --- /dev/null +++ b/docs/versioned_docs/version-v0.25/contributing/templates/02-tutorial-template.md @@ -0,0 +1,185 @@ +# How To [Build/Create/Do Something] in Ignite CLI + + + + + + + + + + + +### Introduction and purpose + +Introductory paragraph about the topic that explains what this topic is about and why the user should care; what problem does the tutorial solve? + +In this guide, you will [accomplish/build/] [some important thing]... + +When you're finished, you'll be able to... + +**Note:** The code in this tutorial is written specifically for this learning experience and is intended only for educational purposes. This tutorial code is not intended to be used in production. + +## Prerequisites + + + +To complete this tutorial, you will need: + +* A local development environment for [your chain] +* Familiarity with the Cosmos ecosystem and [your chain]. See [cosmos.network](EIP-1559 for $ATOM) to learn more. +* (Optional) If software such as Git, Go, Docker, or other tooling needs to be installed, link to the proper article describing how to install it. +* (Optional) List any other accounts needed. + + + +## Step 1 — Doing something + +Introduction to the step. What are you going to do and why are you doing it? + +First.... + +Next... + +Finally... + + + +To verify the version of Ignite CLI that is installed, run the following command: + +```bash +ignite --version +``` + +You'll see release details like the following output: + +``` +Ignite version: v0.19.6 +Ignite CLI build date: 2021-12-18T05:56:36Z +Ignite CLI source hash: - +Your OS: darwin +Your arch: amd64 +Your go version: go version go1.16.4 darwin/amd64 +``` + + + +Modify the title by changing the contents of the `` tag: + +```protobuf +// ... + +message Post { + string creator = 1; + string id = 2; + string title = 3; + string body = 4; +} + +message MsgCreatePost { + string creator = 1; + string title = 2; + string body = 3; +} + +// ... +``` + +Now transition to the next step by telling the user what's next. + +## Step 2 — Sentence case heading + +Another introduction + +Your content that guides the user to accomplish a specific step + +Transition to the next step. + +## Step 3 — Sentence case + +Another introduction + +Your content + +Transition to the next step. + +## Conclusion + +In this article you [accomplished or built] [some important thing]. Now you can.... + +<!-- Speak to the benefits of this technique or procedure and optionally provide places for further exploration. --> + +<!------------ Formatting -------------------------> + +<!-- Some examples of how to mark up various things + +This is _italics_ and this is **bold**. + +Use italics and bold for specific things. + +This is `inline code`. Use single tick marks for filenames and commands. + +Here's a command you can type on a command line: + +```bash +which go +``` + +Here's output from a command: + +``` +/usr/local/go/bin/go +``` + +Write key presses in ALLCAPS. + +Use a plus symbol (+) if keys need to be pressed simultaneously: `CTRL+C`. + +**Note:** This is a note. + +**Tip:** This is a tip. + +Add diagrams and screenshots in PNG format with a self-describing filename. Embed them in the article using the following format: + +![Alt text for screen readers](/path/to/img.png) + +--> diff --git a/docs/versioned_docs/version-v0.25/contributing/templates/_category_.json b/docs/versioned_docs/version-v0.25/contributing/templates/_category_.json new file mode 100644 index 0000000..9dce674 --- /dev/null +++ b/docs/versioned_docs/version-v0.25/contributing/templates/_category_.json @@ -0,0 +1,5 @@ +{ + "label": "Ignite article templates", + "position": 3, + "link": null +} \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.25/contributing/templates/readme.md b/docs/versioned_docs/version-v0.25/contributing/templates/readme.md new file mode 100644 index 0000000..2fb83a1 --- /dev/null +++ b/docs/versioned_docs/version-v0.25/contributing/templates/readme.md @@ -0,0 +1,14 @@ +# Ignite article templates + +Use these templates along with our style guide to help you create articles for the Ignite CLI community. + +Choose the template that best fits the kind of content you're creating: + +* `tutorial_template.md` - Template for long-form interactive "how to" and getting started tutorials that take the form of a lesson. +* `concept_template.md` - Template for conceptual articles that are oriented to understanding. Concept articles explore a subject at a high level. + +These templates are starting points and are not substitutes for the Ignite style and formatting guide. + +Happy building and happy writing! + +— The Ignite Ecosystem Development Team diff --git a/docs/versioned_docs/version-v0.25/guide/00-introduction.md b/docs/versioned_docs/version-v0.25/guide/00-introduction.md new file mode 100644 index 0000000..07749c5 --- /dev/null +++ b/docs/versioned_docs/version-v0.25/guide/00-introduction.md @@ -0,0 +1,38 @@ +--- +sidebar_position: 0 +title: Introduction +slug: /guide +--- + +# Introduction + +Developer tutorials provide step-by-step instructions to help you build blockchain developer skills. + +By following these developer tutorials you will learn how to: + +* Install Ignite CLI on your local machine +* Create a new blockchain and start a node locally for development +* Make your blockchain say "Hello, World!" + * Scaffold a Cosmos SDK query + * Modify a keeper method to return a static string + * Use the blockchain CLI to make a query +* Write and read blog posts to your chain in the Blog tutorial + * Scaffold a Cosmos SDK message + * Define new types in protocol buffer files + * Write keeper methods to write data to the store + * Read data from the store and return it as a result a query + * Use the blockchain CLI to broadcast transactions +* Build a blockchain for buying and selling names in the Nameservice tutorial + * Scaffold CRUD logic with `map` + * Use other module methods in your custom module + * Send tokens between addresses +* Build a guessing game with rewards + * Use an escrow account to store tokens +* Use the Inter-Blockchain Communication (IBC) protocol + * Scaffold an IBC-enabled module + * Send and receive IBC packets + * Configure and run a built-in IBC relayer +* Build a decentralized order-book token exchange + * Build an advanced IBC-enabled module + + diff --git a/docs/versioned_docs/version-v0.25/guide/01-install.md b/docs/versioned_docs/version-v0.25/guide/01-install.md new file mode 100644 index 0000000..3f54ae5 --- /dev/null +++ b/docs/versioned_docs/version-v0.25/guide/01-install.md @@ -0,0 +1,107 @@ +--- +sidebar_position: 1 +description: Steps to install Ignite CLI on your local computer. +--- + +# Install Ignite CLI + +You can run [Ignite CLI](https://github.com/ignite/cli) in a web-based Gitpod IDE or you can install Ignite CLI on your local computer. + +## Prerequisites + +Be sure you have met the prerequisites before you install and use Ignite CLI. + +### Operating systems + +Ignite CLI is supported for the following operating systems: + +- GNU/Linux +- macOS +- Windows Subsystem for Linux (WSL) + +### Go + +Ignite CLI is written in the Go programming language. To use Ignite CLI on a local system: + +- Install [Go](https://golang.org/doc/install) (**version 1.16** or higher) +- Ensure the Go environment variables are [set properly](https://golang.org/doc/gopath_code#GOPATH) on your system + +## Verify your Ignite CLI version + +To verify the version of Ignite CLI you have installed, run the following command: + +```bash +ignite version +``` + +## Installing Ignite CLI + +To install the latest version of the `ignite` binary use the following command. + +```bash +curl https://get.ignite.com/cli! | bash +``` + +This command invokes `curl` to download the install script and pipes the output to `bash` to perform the installation. The `ignite` binary is installed in `/usr/local/bin`. + +To learn more or customize the installation process, see the [installer docs](https://github.com/ignite/installer) on GitHub. + +### Write permission + +Ignite CLI installation requires write permission to the `/usr/local/bin/` directory. If the installation fails because you do not have write permission to `/usr/local/bin/`, run the following command: + +```bash +curl https://get.ignite.com/cli | bash +``` + +Then run this command to move the `ignite` executable to `/usr/local/bin/`: + +```bash +sudo mv ignite /usr/local/bin/ +``` + +On some machines, a permissions error occurs: + +```bash +mv: rename ./ignite to /usr/local/bin/ignite: Permission denied +============ +Error: mv failed +``` + +In this case, use sudo before `curl` and before `bash`: + +```bash +sudo curl https://get.ignite.com/cli! | sudo bash +``` + +## Upgrading your Ignite CLI installation {#upgrade} + +Before you install a new version of Ignite CLI, remove all existing Ignite CLI installations. + +To remove the current Ignite CLI installation: + +1. On your terminal window, press `Ctrl+C` to stop the chain that you started with `ignite chain serve`. +1. Remove the Ignite CLI binary with `rm $(which ignite)`. + Depending on your user permissions, run the command with or without `sudo`. +1. Repeat this step until all `ignite` installations are removed from your system. + +After all existing Ignite CLI installations are removed, follow the [Installing Ignite CLI](#installing-ignite-cli) instructions. + +For details on version features and changes, see the [changelog.md](https://github.com/ignite/cli/blob/main/changelog.md) in the repo. + +## Build from source + +To experiment with the source code, you can build from source: + +```bash +git clone https://github.com/ignite/cli --depth=1 +cd cli && make install +``` + +## Summary + +- Verify the prerequisites. +- To setup a local development environment, install Ignite CLI locally on your computer. +- Install Ignite CLI by fetching the binary using cURL or by building from source. +- The latest version is installed by default. You can install previous versions of the precompiled `ignite` binary. +- Stop the chain and remove existing versions before installing a new version. diff --git a/docs/versioned_docs/version-v0.25/guide/02-hello.md b/docs/versioned_docs/version-v0.25/guide/02-hello.md new file mode 100644 index 0000000..341fdab --- /dev/null +++ b/docs/versioned_docs/version-v0.25/guide/02-hello.md @@ -0,0 +1,276 @@ +--- +sidebar_position: 2 +description: Step-by-step guidance to build your first blockchain and your first Cosmos SDK module. +--- + +# Hello, Ignite CLI + +This tutorial is a great place to start your journey into the Cosmos ecosystem. Instead of wondering how to build a blockchain, follow these steps to build your first blockchain and your first Cosmos SDK module. + +## Get started + +In the previous chapter you've learned how to install [Ignite CLI](https://github.com/ignite/cli), the tool that offers everything you need to build, test, and launch your blockchain with a decentralized worldwide community. + +This series of tutorials is based on a specific version of Ignite CLI, so be sure to install the correct version. For example, to install Ignite CLI v0.22.2 use the following command: + +```bash +curl https://get.ignite.com/cli@v0.22.2! | bash +``` + +Ignite CLI comes with a number of scaffolding commands that are designed to make development easier by creating everything that's required to start working on a particular task. + +First, use Ignite CLI to build the foundation of a fresh Cosmos SDK blockchain. With Ignite CLI, you don't have to write the blockchain code yourself. + +Are you ready? Open a terminal window and navigate to a directory where you have permissions to create files. + +To create your blockchain with the default directory structure, run this command: + +```bash +ignite scaffold chain hello +``` + +This command creates a Cosmos SDK blockchain called hello in a `hello` directory. The source code inside the `hello` directory contains a fully functional ready-to-use blockchain. + +This new blockchain imports standard Cosmos SDK modules, including: + +- [`staking`](https://docs.cosmos.network/main/modules/staking) for delegated Proof-of-Stake (PoS) consensus mechanism +- [`bank`](https://docs.cosmos.network/main/modules/bank) for fungible token transfers between accounts +- [`gov`](https://docs.cosmos.network/main/modules/gov) for on-chain governance +- And other Cosmos SDK [modules](https://docs.cosmos.network/main/modules) that provide the benefits of the extensive Cosmos SDK framework + +You can get help on any command. Now that you have run your first command, take a minute to see all of the command line options for the `scaffold` command. + +To learn about the command you just used, run: + +```bash +ignite scaffold --help +``` + +## Blockchain directory structure + +After you create the blockchain, switch to its directory: + +```bash +cd hello +``` + +The `hello` directory contains a number of generated files and directories that make up the structure of a Cosmos SDK blockchain. Most of the work in this tutorial happens in the `x` directory. Here is a quick overview of files and directories that are created by default: + +| File/directory | Purpose | +| -------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| app/ | Files that wire together the blockchain. The most important file is `app.go` that contains type definition of the blockchain and functions to create and initialize it. | +| cmd/ | The main package responsible for the CLI of compiled binary. | +| docs/ | Directory for project documentation. By default, an OpenAPI spec is generated. | +| proto/ | Protocol buffer files describing the data structure. | +| testutil/ | Helper functions for testing. | +| vue/ | A Vue 3 web app template. | +| x/ | Cosmos SDK modules and custom modules. | +| config.yml | A configuration file for customizing a chain in development. | +| readme.md | A readme file for your sovereign application-specific blockchain project. | + +Now you can get your blockchain up and running locally on a single node. + +## Start a blockchain + +You already have a fully-functional blockchain. To start your chain on your development machine, run the following command in the `hello` directory + +```bash +ignite chain serve +``` + +This command downloads dependencies and compiles the source code into a binary called `hellod`. By default, the binary name is the name of the repo + `d`. From now on, use this `hellod` binary to run all of your chain commands. For example, to initialize a single validator node and start a node. + +Leave this terminal window open while your chain is running. + +## HTTP API Console + +By default, a validator node exposes two API endpoints: + +- [http://localhost:26657](http://localhost:26657) for the low-level Tendermint API +- [http://localhost:1317](http://localhost:1317) for the high-level blockchain API + +Now that you started your `hello` chain, use a web browser to see the high-level `hello` blockchain API: + +![./images/api.png](./images/api.png) + +## Stop a blockchain + +When you want to stop your blockchain, press Ctrl+C in the terminal window where it's running. + +In the development environment, you can experiment and instantly see updates. You don't have to restart the blockchain after you make changes. Hot reloading automatically detects all of the changes you make in the `hello` directory files. + +## Say "Hello, Ignite CLI" + +To get your blockchain to say `Hello! Ignite CLI`, you need to make these changes: + +- Modify a protocol buffer file +- Create a keeper query function that returns data +- Register a query function + +Protocol buffer files contain proto rpc calls that define Cosmos SDK queries and message handlers, and proto messages that define Cosmos SDK types. The rpc calls are also responsible for exposing an HTTP API. + +For each Cosmos SDK module, the [Keeper](https://docs.cosmos.network/main/building-modules/keeper) is an abstraction for modifying the state of the blockchain. Keeper functions let you query or write to the state. After you add the first query to your chain, the next step is to register the query. You only need to register a query once. + +A typical blockchain developer workflow looks something like this: + +- Start with proto files to define Cosmos SDK [messages](https://docs.cosmos.network/main/building-modules/msg-services) +- Define and register [queries](https://docs.cosmos.network/main/building-modules/query-services) +- Define message handler logic +- Finally, implement the logic of these queries and message handlers in keeper functions + +## Create a query + +For all subsequent commands, use a terminal window that is different from the window you started the chain in. + +In a different terminal window, run the commands in your `hello` directory. + +Create a `hello` query: + +```bash +ignite scaffold query hello --response text +``` + +`query` accepts a name of the query (in this case, `hello`), an optional list of request parameters (in this case, empty), and an optional comma-separated list of response fields with a `--response` flag (in this case, `text`). + +The `query` command has created and modified several files: + +``` +modify proto/hello/query.proto +modify x/hello/client/cli/query.go +create x/hello/client/cli/query_hello.go +create x/hello/keeper/grpc_query_hello.go +``` + +Let's examine some of these changes. For clarity, the following code blocks do not show the placeholder comments that Ignite CLI uses to scaffold code. Don't delete these placeholders since they are required to continue using Ignite CLI's scaffolding functionality. + +Note: it's recommended to commit changes to a version control system (for example, Git) after scaffolding. This allows others to easily distinguish between code generated by Ignite and the code written by hand. + +``` +git add . +git commit -am "Scaffolded a hello query with Ignite CLI" +``` + +### Updates to the query service + +In the `proto/hello/query.proto` file, the `Hello` rpc has been added to the `Query` service. + +```protobuf +service Query { + rpc Hello(QueryHelloRequest) returns (QueryHelloResponse) { + option (google.api.http).get = "/hello/hello/hello"; + } +} +``` + +Here's how the `Hello` rpc for the `Query` service works: + +- Is responsible for returning a `text` string +- Accepts request parameters (`QueryHelloRequest`) +- Returns response of type `QueryHelloResponse` +- The `option` defines the endpoint that is used by gRPC to generate an HTTP API + +### Request and response types + +Now, take a look at the following request and response types: + +```protobuf +message QueryHelloRequest { +} + +message QueryHelloResponse { + string text = 1; +} +``` + +- The `QueryHelloRequest` message is empty because this request does not require parameters. +- The `QueryHelloResponse` message contains `text` that is returned from the chain. + +## Hello keeper function + +The `x/hello/keeper/grpc_query_hello.go` file contains the `Hello` keeper function that handles the query and returns data. + +```go +func (k Keeper) Hello(goCtx context.Context, req *types.QueryHelloRequest) (*types.QueryHelloResponse, error) { + if req == nil { + return nil, status.Error(codes.InvalidArgument, "invalid request") + } + ctx := sdk.UnwrapSDKContext(goCtx) + _ = ctx + return &types.QueryHelloResponse{}, nil +} +``` + +The `Hello` function performs these actions: + +- Makes a basic check on the request and throws an error if it's `nil` +- Stores context in a `ctx` variable that contains information about the environment of the request +- Returns a response of type `QueryHelloResponse` + +Right now the response is empty. + +### Update keeper function + +In the `query.proto` file, the response accepts `text`. + +- Use a text editor to modify the `x/hello/keeper/grpc_query_hello.go` file that contains the keeper function. +- On the last line of the keeper function, change the line to return "Hello, Ignite CLI!": + +```go +func (k Keeper) Hello(c context.Context, req *types.QueryHelloRequest) (*types.QueryHelloResponse, error) { + if req == nil { + return nil, status.Error(codes.InvalidArgument, "invalid request") + } + ctx := sdk.UnwrapSDKContext(goCtx) + _ = ctx + return &types.QueryHelloResponse{Text: "Hello, Ignite CLI!"}, nil // <-- +} +``` + +- Save the file to restart your chain. +- In a web browser, visit the `hello` endpoint [http://localhost:1317/hello/hello/hello](http://localhost:1317/hello/hello/hello). + + Because the query handlers are not yet registered with gRPC, you see a not implemented or localhost cannot connect error. This error is expected behavior, because you still need to register the query handlers. + +## Register query handlers + +Make the required changes to the `x/hello/module.go` file. + +1. Add `"context"` to the list of packages in the import statement. + + ```go + import ( + // ... + + "context" + + // ... + ) + ``` + + Do not save the file yet, you need to continue with these modifications. + +1. Search for `RegisterGRPCGatewayRoutes`. + +1. Register the query handlers: + + ```go + func (AppModuleBasic) RegisterGRPCGatewayRoutes(clientCtx client.Context, mux *runtime.ServeMux) { + types.RegisterQueryHandlerClient(context.Background(), mux, types.NewQueryClient(clientCtx)) + } + ``` + +2. After the chain has been started, visit [http://localhost:1317/hello/hello/hello](http://localhost:1317/hello/hello/hello) and see your text displayed: + + ```json + { + "text": "Hello, Ignite CLI!", + } + ``` + +The `query` command has also scaffolded `x/hello/client/cli/query_hello.go` that implements a CLI equivalent of the hello query and mounted this command in `x/hello/client/cli/query.go` . Run the following command and get the same JSON response: + +```bash +hellod q hello hello +``` + +Congratulations, you have built your first blockchain and your first Cosmos SDK module. Continue the journey to learn more about scaffolding Cosmos SDK messages, types in protocol buffer files, the keeper, and more. diff --git a/docs/versioned_docs/version-v0.25/guide/03-blog/00-build-blog.md b/docs/versioned_docs/version-v0.25/guide/03-blog/00-build-blog.md new file mode 100644 index 0000000..6ec9805 --- /dev/null +++ b/docs/versioned_docs/version-v0.25/guide/03-blog/00-build-blog.md @@ -0,0 +1,494 @@ +--- +sidebar_position: 0 +description: Learn module basics by writing and reading blog posts to your chain. +slug: /guide/blog +--- + +# Build a blog + +In this tutorial, you create a blockchain with a module that lets you write to and read data from the blockchain. This module implements create and read functionalities for a blog-like application. The end user will be able to submit new blog posts and show a list of blog posts on the blockchain. + +> The purpose of this tutorial is to guide you through the implementation of a complete feedback loop: submitting data and reading this data back from the blockchain. + +By completing this tutorial, you will learn about: + +* Scaffolding a Cosmos SDK message +* Defining new types in protocol buffer files +* Implementing keeper methods to write data to the store +* Reading data from the store and return it as a result of a query +* Using the blockchain's CLI to broadcast transactions and query the blockchain + +**Note:** All the functions in this chapter can be scaffolded with a single command but instead you will learn how to add each functionality individually. + +## Prerequisites + +This series of blog tutorials is based on a specific version of Ignite CLI, so to install Ignite CLI v0.22.2 use the following command: + +```bash +curl https://get.ignite.com/cli@v0.22.2! | bash +``` + +## Create your blog chain + +First, create a new blockchain. + +Open a terminal and navigate to a directory where you have permissions to create files. To create your Cosmos SDK blockchain, run this command: + +```bash +ignite scaffold chain blog --address-prefix blog +``` + +The `blog` directory is created with the default directory structure. + +The new blockchain is scaffolded with the `--address-prefix blog` flag to use "blog" instead of the default "cosmos" address prefix. + +## High-level transaction review + +So far, you have learned how to modify proto files to define a new API endpoint and modify a keeper query function to return static data back to the user. Of course, a keeper can do more than return a string of data. Its purpose is to manage access to the state of the blockchain. + +You can think of the state as being a collection of key-value stores. Each module is responsible for its own store. Changes to the store are triggered by transactions that are signed and broadcasted by users. Each transaction contains Cosmos SDK messages (not to be confused with proto `message`). When a transaction is processed, each message gets routed to its module. A module has message handlers that process messages. Processing a message can trigger changes in the state. + +## Create message types + +A Cosmos SDK message contains information that can trigger changes in the state of a blockchain. + +First, change into the `blog` directory: + +```bash +cd blog +``` + +To create a message type and its handler, use the `message` command: + +```bash +ignite scaffold message createPost title body +``` + +The `message` command accepts message name (`createPost`) and a list of fields (`title` and `body`) as arguments. + +The `message` command has created and modified several files: + +``` +modify proto/blog/tx.proto +modify x/blog/client/cli/tx.go +create x/blog/client/cli/tx_create_post.go +create x/blog/keeper/msg_server_create_post.go +modify x/blog/module_simulation.go +create x/blog/simulation/create_post.go +modify x/blog/types/codec.go +create x/blog/types/message_create_post.go +create x/blog/types/message_create_post_test.go + +🎉 Created a message `createPost`. +``` + +As always, start with a proto file. Inside the `proto/blog/tx.proto` file, the `MsgCreatePost` message has been created. Edit the file to add the line that defines the `id` for `message MsgCreatePostResponse`: + +```protobuf +message MsgCreatePost { + string creator = 1; + string title = 2; + string body = 3; +} + +message MsgCreatePostResponse { + uint64 id = 1; +} +``` + +## Review the message code + +Review the Cosmos SDK message type with proto `message`. The `MsgCreatePost` has three fields: creator, title, and body. Since the purpose of the `MsgCreatePost` message is to create new posts in the store, the only thing the message needs to return is an ID of a created post. The `CreatePost` rpc was already added to the `Msg` service: + +```protobuf +service Msg { + rpc CreatePost(MsgCreatePost) returns (MsgCreatePostResponse); +} +``` + +## Define messages logic + +In the newly scaffolded `x/blog/keeper/msg_server_create_post.go` file, you can see a placeholder implementation of the `CreatePost` function. Right now it does nothing and returns an empty response. For your blog chain, you want the contents of the message (title and body) to be written to the state as a new post. + +You need to do two things: + +- Create a variable of type `Post` with title and body from the message +- Append this `Post` to the store + +```go +func (k msgServer) CreatePost(goCtx context.Context, msg *types.MsgCreatePost) (*types.MsgCreatePostResponse, error) { + // Get the context + ctx := sdk.UnwrapSDKContext(goCtx) + + // Create variable of type Post + var post = types.Post{ + Creator: msg.Creator, + Title: msg.Title, + Body: msg.Body, + } + + // Add a post to the store and get back the ID + id := k.AppendPost(ctx, post) + + // Return the ID of the post + return &types.MsgCreatePostResponse{Id: id}, nil +} +``` + +## Define Post type and AppendPost keeper method + +Define the `Post` type and the `AppendPost` keeper method. + +When you define the `Post` type in a proto file, Ignite CLI (with the help of `protoc`) takes care of generating the required Go files. + +Create the `proto/blog/post.proto` file and define the `Post` message: + +```protobuf +syntax = "proto3"; + +package blog.blog; + +option go_package = "blog/x/blog/types"; + +message Post { + string creator = 1; + uint64 id = 2; + string title = 3; + string body = 4; +} +``` + +The contents of the `post.proto` file are standard. The file defines: + +- A package name `blog.blog` that is used to identify messages +- The Go package `go_package = "blog/x/blog/types"` where new files are generated +- The message `message Post` + +Continue developing your blog chain. + +### Define keeper methods + +The next step is to define the `AppendPost` keeper method. + +Create the `x/blog/keeper/post.go` file and start thinking about the logic of the function and what you want to call the prefixes. The file will be empty for now. + +- To implement `AppendPost` you must first understand how the key store works. You can think of a store as a key-value database where keys are lexicographically ordered. You can loop through keys and use `Get` and `Set` to retrieve and set values based on keys. To distinguish between different types of data that a module can keep in its store, you can use prefixes like `product/` or `post/`. + +- To keep a list of posts in what is essentially a key-value store, you need to keep track of the index of the posts you insert. Since both post values and post count (index) values are kept in the store, you can use different prefixes: `Post/value/` and `Post/count/`. + +Then, add these prefixes to the `x/blog/types/keys.go` file in the `const` and add a comment that describes the keys: + +```go +const ( + // ... + + // Keep track of the index of posts + PostKey = "Post/value/" + PostCountKey = "Post/count/" +) +``` + +Your blog is now updated to take these actions when a `Post` message is sent to the `AppendPost` function: + +- Get the number of posts in the store (count) +- Add a post by using the count as an ID +- Increment the count +- Return the count + +## Write data to the store + +In the `x/blog/keeper/post.go` file, draft the `AppendPost` function. You can add these comments to help you visualize what you do next: + +```go +package keeper + +import ( + "encoding/binary" + + "github.com/cosmos/cosmos-sdk/store/prefix" + sdk "github.com/cosmos/cosmos-sdk/types" + + "blog/x/blog/types" +) + +// func (k Keeper) AppendPost() uint64 { +// count := k.GetPostCount() +// store.Set() +// k.SetPostCount() +// return count +// } +``` + +First, implement `GetPostCount`: + +```go +func (k Keeper) GetPostCount(ctx sdk.Context) uint64 { + // Get the store using storeKey (which is "blog") and PostCountKey (which is "Post/count/") + store := prefix.NewStore(ctx.KVStore(k.storeKey), []byte(types.PostCountKey)) + + // Convert the PostCountKey to bytes + byteKey := []byte(types.PostCountKey) + + // Get the value of the count + bz := store.Get(byteKey) + + // Return zero if the count value is not found (for example, it's the first post) + if bz == nil { + return 0 + } + + // Convert the count into a uint64 + return binary.BigEndian.Uint64(bz) +} +``` + +Now that `GetPostCount` returns the correct number of posts in the store, implement `SetPostCount`: + +```go +func (k Keeper) SetPostCount(ctx sdk.Context, count uint64) { + // Get the store using storeKey (which is "blog") and PostCountKey (which is "Post/count/") + store := prefix.NewStore(ctx.KVStore(k.storeKey), []byte(types.PostCountKey)) + + // Convert the PostCountKey to bytes + byteKey := []byte(types.PostCountKey) + + // Convert count from uint64 to string and get bytes + bz := make([]byte, 8) + binary.BigEndian.PutUint64(bz, count) + + // Set the value of Post/count/ to count + store.Set(byteKey, bz) +} +``` + +Now that you have implemented functions for getting the number of posts and setting the post count, at the top of the same `x/blog/keeper/post.go` file, implement the logic behind the `AppendPost` function: + +```go +func (k Keeper) AppendPost(ctx sdk.Context, post types.Post) uint64 { + // Get the current number of posts in the store + count := k.GetPostCount(ctx) + + // Assign an ID to the post based on the number of posts in the store + post.Id = count + + // Get the store + store := prefix.NewStore(ctx.KVStore(k.storeKey), []byte(types.PostKey)) + + // Convert the post ID into bytes + byteKey := make([]byte, 8) + binary.BigEndian.PutUint64(byteKey, post.Id) + + // Marshal the post into bytes + appendedValue := k.cdc.MustMarshal(&post) + + // Insert the post bytes using post ID as a key + store.Set(byteKey, appendedValue) + + // Update the post count + k.SetPostCount(ctx, count+1) + return count +} +``` + +By following these steps, you have implemented all of the code required to create new posts and store them on-chain. Now, when a transaction that contains a message of type `MsgCreatePost` is broadcast, the message is routed to your blog module. + +- `k.CreatePost` calls `AppendPost` +- `AppendPost` gets the number of posts from the store, adds a post using the count as an ID, increments the count, and returns the ID + +Now that you have added the functionality to create posts and broadcast them to our chain, you can add querying. + +## Display posts + +To display posts, scaffold a query: + +```bash +ignite scaffold query posts --response title,body +``` + +Two components are responsible for querying data: + +- An rpc inside `service Query` in a proto file that defines data types and specifies the HTTP API endpoint +- A keeper method that performs the querying from the key-value store + +First, review the services and messages in `proto/blog/query.proto`. The `Posts` rpc accepts an empty request and returns an object with two fields: title and body. Now you can make changes so it can return a list of posts. The list of posts can be long, so add pagination. When pagination is added, the request and response include a page number so you can request a particular page when you know what page has been returned. + +To define the types in proto files, make the following updates in `proto/blog/query.proto`: + +1. Add the `import`: + +```protobuf +import "blog/post.proto"; +``` + +2. Add pagination to the post request: + +```protobuf +message QueryPostsRequest { + // Adding pagination to request + cosmos.base.query.v1beta1.PageRequest pagination = 1; +} +``` + +3. Add pagination to the post response: + +```protobuf +message QueryPostsResponse { + // Returning a list of posts + repeated Post Post = 1; + + // Adding pagination to response + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} +``` + +To implement post querying logic in the `x/blog/keeper/grpc_query_posts.go` file, delete the contents of that file and replace it with: + +```go +package keeper + +import ( + "context" + + "github.com/cosmos/cosmos-sdk/store/prefix" + sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/cosmos/cosmos-sdk/types/query" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" + + "blog/x/blog/types" +) + +func (k Keeper) Posts(c context.Context, req *types.QueryPostsRequest) (*types.QueryPostsResponse, error) { + // Throw an error if request is nil + if req == nil { + return nil, status.Error(codes.InvalidArgument, "invalid request") + } + + // Define a variable that will store a list of posts + var posts []*types.Post + + // Get context with the information about the environment + ctx := sdk.UnwrapSDKContext(c) + + // Get the key-value module store using the store key (in our case store key is "chain") + store := ctx.KVStore(k.storeKey) + + // Get the part of the store that keeps posts (using post key, which is "Post-value-") + postStore := prefix.NewStore(store, []byte(types.PostKey)) + + // Paginate the posts store based on PageRequest + pageRes, err := query.Paginate(postStore, req.Pagination, func(key []byte, value []byte) error { + var post types.Post + if err := k.cdc.Unmarshal(value, &post); err != nil { + return err + } + + posts = append(posts, &post) + + return nil + }) + + // Throw an error if pagination failed + if err != nil { + return nil, status.Error(codes.Internal, err.Error()) + } + + // Return a struct containing a list of posts and pagination info + return &types.QueryPostsResponse{Post: posts, Pagination: pageRes}, nil +} +``` + +## Add gRPC to the module handler + +In the `x/blog/module.go` file: + +1. Add `"context"` to the imports, don't save the file yet. + +```go +import ( + "context" + + // ... +) +``` + +2. Update the `RegisterGRPCGatewayRoutes` function to register the query handler client: + +```go +// RegisterGRPCGatewayRoutes registers the gRPC Gateway routes for the module. +func (AppModuleBasic) RegisterGRPCGatewayRoutes(clientCtx client.Context, mux *runtime.ServeMux) { + types.RegisterQueryHandlerClient(context.Background(), mux, types.NewQueryClient(clientCtx)) +} +``` + +3. Now that you've modified the file with the two updates, now it's safe to save the file. + +## Use the CLI to create a post + +Now that you have implemented logic for creating and querying posts, you can interact with your blog chain using the command line. The blog chain binary is `blogd`. + +First, start the chain on your development machine by running the following command in the `blog` directory: + +```bash +ignite chain serve +``` + +The binary is built by the `ignite chain serve` command bit it can also be built by running: + +```bash +ignite chain build +``` + +To create a post at the command line: + +```bash +blogd tx blog create-post foo bar --from alice +``` + +The transaction is output to the terminal. You are prompted to confirm the transaction: + +``` +{"body":{"messages":[{"@type":"/blog.blog.MsgCreatePost","creator":"blog1ctxp3pfdtr3sw9udz2ptuh59ce9z0eaa2zvv6w","title":"foo","body":"bar"}],"memo":"","timeout_height":"0","extension_options":[],"non_critical_extension_options":[]},"auth_info":{"signer_infos":[],"fee":{"amount":[],"gas_limit":"200000","payer":"","granter":""}},"signatures":[]} + +confirm transaction before signing and broadcasting [y/N]: y +``` + +Type `y` to sign and broadcast the transaction. + +Congratulations, you built a chain binary and used the `blogd` binary CLI to create a blog post. + +## Use the CLI to query posts + +To query the list of all on-chain posts: + +```bash +blogd q blog posts +``` + +The result: + +```yaml +Post: +- body: bar + creator: blog1ctxp3pfdtr3sw9udz2ptuh59ce9z0eaa2zvv6w + id: "0" + title: foo +pagination: + next_key: null + total: "1" +``` + +## Conclusion + +Congratulations. You have built a blog blockchain! + +You have successfully completed these steps: + +* Write blog posts to your chain +* Read from blog posts +* Scaffold a Cosmos SDK message +* Define new types in protocol buffer files +* Write keeper methods to write data to the store +* Register query handlers +* Read data from the store and return it as a result a query +* Use the CLI to broadcast transactions diff --git a/docs/versioned_docs/version-v0.25/guide/03-blog/01-comment-blog.md b/docs/versioned_docs/version-v0.25/guide/03-blog/01-comment-blog.md new file mode 100644 index 0000000..1b80f1c --- /dev/null +++ b/docs/versioned_docs/version-v0.25/guide/03-blog/01-comment-blog.md @@ -0,0 +1,669 @@ +--- +sidebar_position: 1 +description: Write a query that returns a blog post by ID with associated comments. +--- + +# Add associated comments to a blog post + +In this tutorial, you create a new message to add comments to a blog post. + +By completing this tutorial, you will learn about: + +* Scaffolding a new `list` with proto functions and keeper functions +* Adding comments to existing blog posts +* Querying for blog posts that have associated comments +* Deleting comments from a blog post +* Implementing logic for writing comments to the blockchain + +**Note:** For this tutorial, adding comments is available only to blog posts that are no older than 100 blocks. The 100 block value has been hard coded for rapid testing. You can increase the block count to a larger number to achieve a longer time period before commenting is disabled. + +## Prerequisites + +This tutorial is an extension of and requires completion of the [Module Basics: Build a Blog](index.md) tutorial. + +## Core concepts + +This tutorial relies on the `blog` blockchain that you built in the `Build a Blog Tutorial.` + +## Fetch functions using list command + +To get the useful functions for this tutorial, you use the `ignite scaffold list NAME [field]... [flags]` command. Make sure to familiarize yourself with the command. + +1. Navigate to the `blog` directory that you created in the [Build a blog](index.md) tutorial. + +2. To create the source code files to add CRUD (create, read, update, and delete) functionality for data stored as an array, run: + +```bash +ignite scaffold list comment --no-message creator:string title:string body:string postID:uint createdAt:int +``` + +The `--no-message` flag disables CRUD interaction messages scaffolding because you will write your own messages. + +The command output shows the files that were created and modified: + +``` +create proto/blog/comment.proto +modify proto/blog/genesis.proto +modify proto/blog/query.proto +modify vue/src/views/Types.vue +modify x/blog/client/cli/query.go +create x/blog/client/cli/query_comment.go +create x/blog/client/cli/query_comment_test.go +modify x/blog/genesis.go +modify x/blog/genesis_test.go +create x/blog/keeper/comment.go +create x/blog/keeper/comment_test.go +create x/blog/keeper/grpc_query_comment.go +create x/blog/keeper/grpc_query_comment_test.go +modify x/blog/module.go +modify x/blog/types/genesis.go +modify x/blog/types/genesis_test.go +modify x/blog/types/keys.go + +🎉 comment added. +``` + +Make a small modification in `proto/blog/comment.proto` to change `createdAt` to int64: + +```protobuf +message Comment { + uint64 id = 1; + string creator = 2; + string title = 3; + string body = 4; + uint64 postID = 5; + int64 createdAt = 6; +} +``` + +## Add a comment to a post + +To create a new message that adds a comment to the existing post, run: + +```bash +ignite scaffold message create-comment postID:uint title body +``` + +The `ignite scaffold message` command accepts `postID` and a list of fields as arguments. The fields are `title` and `body`. + +Here, `postID` is the reference to previously created blog post. + +The `message` command has created and modified several files: + +``` +modify proto/blog/tx.proto +modify x/blog/client/cli/tx.go +create x/blog/client/cli/tx_create_comment.go +create x/blog/keeper/msg_server_create_comment.go +modify x/blog/module_simulation.go +create x/blog/simulation/create_comment.go +modify x/blog/types/codec.go +create x/blog/types/message_create_comment.go +create x/blog/types/message_create_comment_test.go + +🎉 Created a message `create-comment`. +``` + +As always, start your development with a proto file. + +In the `proto/blog/tx.proto` file, edit `MsgCreateComment` to: + +* Add `id` +* Define the `id` for `message MsgCreateCommentResponse`: + +```protobuf +message MsgCreateComment { + string creator = 1; + uint64 postID = 2; + string title = 3; + string body = 4; + uint64 id = 5; +} + +message MsgCreateCommentResponse { + uint64 id = 1; +} +``` + + You see in the `proto/blog/tx.proto` file that the `MsgCreateComment` has five fields: creator, title, body, postID, and id. Since the purpose of the `MsgCreateComment` message is to create new comments in the store, the only thing the message needs to return is an ID of a created comments. The `CreateComment` rpc was already added to the `Msg` service: + +```protobuf +rpc CreateComment(MsgCreateComment) returns (MsgCreateCommentResponse); +``` + +Now, add the `id` field to `MsgCreatePost`: + +```protobuf +message MsgCreatePost { + string creator = 1; + string title = 2; + string body = 3; + uint64 id = 4; +} +``` + +## Process messages + +In the newly scaffolded `x/blog/keeper/msg_server_create_comment.go` file, you can see a placeholder implementation of the `CreateComment` function (marked with `//TODO`). Right now it does nothing and returns an empty response. For your blog chain, you want the contents of the message (title and body) to be written to the state as a new comment. + +You need to do the following things: + +* Create a variable of type `Comment` with title and body from the message +* Check if the comment posted for the respective blog id exists and comment is not older than 100 blocks +* Append this `Comment` to the store + +```go +import ( + // ... + + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + + // ... +) + +func (k msgServer) CreateComment(goCtx context.Context, msg *types.MsgCreateComment) (*types.MsgCreateCommentResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + + // Check if the Post Exists for which a comment is being created + post, found := k.GetPost(ctx, msg.PostID) + if !found { + return nil, sdkerrors.Wrap(sdkerrors.ErrKeyNotFound, fmt.Sprintf("key %d doesn't exist", msg.Id)) + } + + // Create variable of type comment + var comment = types.Comment{ + Creator: msg.Creator, + Id: msg.Id, + Body: msg.Body, + Title: msg.Title, + PostID: msg.PostID, + CreatedAt: ctx.BlockHeight(), + } + + // Check if the comment is older than the Post. If more than 100 blocks, then return error. + if comment.CreatedAt > post.CreatedAt+100 { + return nil, sdkerrors.Wrapf(types.ErrCommentOld, "Comment created at %d is older than post created at %d", comment.CreatedAt, post.CreatedAt) + } + + id := k.AppendComment(ctx, comment) + return &types.MsgCreateCommentResponse{Id: id}, nil +} +``` + +When the Comment validity is checked, it throws 2 error messages - `ErrID` and `ErrCommendOld`. You can define the error messages by navigating to `x/blog/types/errors.go` and replacing the current values in 'var' with: + +```go +// ... + +var ( + ErrCommentOld = sdkerrors.Register(ModuleName, 1300, "") + ErrID = sdkerrors.Register(ModuleName, 1400, "") +) +``` + + +In the existing `x/blog/keeper/msg_server_create_post.go` file, you need to make a modification to add `createdAt` + +```go +func (k msgServer) CreatePost(goCtx context.Context, msg *types.MsgCreatePost) (*types.MsgCreatePostResponse, error) { + // Get the context + ctx := sdk.UnwrapSDKContext(goCtx) + + // Create variable of type Post + var post = types.Post{ + Creator: msg.Creator, + Id: msg.Id, + Title: msg.Title, + Body: msg.Body, + CreatedAt: ctx.BlockHeight(), + } + + // Add a post to the store and get back the ID + id := k.AppendPost(ctx, post) + + // Return the ID of the post + return &types.MsgCreatePostResponse{Id: id}, nil +} +``` + +## Write data to the store + +When you define the `Comment` type in a proto file, Ignite CLI (with the help of `protoc`) takes care of generating the required Go files. + +Inside the `proto/blog/comment.proto` file, you can observe, Ignite CLI has already added the required fields inside the `Comment` message. + +The contents of the `comment.proto` file are fairly standard and similar to `post.proto`. The file defines a package name that is used to identify messages, among other things, specifies the Go package where new files are generated, and finally defines `message Comment`. + +Each file save triggers an automatic rebuild. Now, after you build and start your chain with Ignite CLI, the `Comment` type is available. + +Also, make a small modification in `proto/blog/post.proto` to add `createdAt`: + +```protobuf +// ... + +message Post { + // ... + int64 createdAt = 5; +} +``` + +### Define keeper methods + +The function `ignite scaffold list comment --no-message` has fetched all of the required functions for keeper. + +Inside `x/blog/types/keys.go` file, you can see that the `Comment/value/` and `Comment/count/` keys are added. + +## Write data to the store + +In `x/blog/keeper/post.go`, add a new function to get the post: + +```go +import ( + "encoding/binary" + + "github.com/cosmos/cosmos-sdk/store/prefix" + sdk "github.com/cosmos/cosmos-sdk/types" + + "blog/x/blog/types" +) + +// ... + +func (k Keeper) GetPost(ctx sdk.Context, id uint64) (val types.Post, found bool) { + store := prefix.NewStore(ctx.KVStore(k.storeKey), types.KeyPrefix(types.PostKey)) + + bz := make([]byte, 8) + binary.BigEndian.PutUint64(bz, id) + + b := store.Get(bz) + if b == nil { + return val, false + } + + k.cdc.MustUnmarshal(b, &val) + return val, true +} +``` + +You have manually added the functions to `x/blog/keeper/post.go`. + +When you ran the `ignite scaffold list comment --no-message` command, these functions are automatically implemented in `x/blog/keeper/comment.go`: + +- `GetCommentCount` +- `SetCommentCount` +- `AppendCommentCount` + +By following these steps, you have implemented all of the code required to create comments and store them on-chain. Now, when a transaction that contains a message of type `MsgCreateComment` is broadcast, the message is routed to your blog module. + +- `k.CreateComment` calls `AppendComment`. +- `AppendComment` gets the number of comments from the store, adds a comment using the count as an ID, increments the count, and returns the ID. + +## Create the delete-comment message + +To create a message, use the `message` command: + +```bash +ignite scaffold message delete-comment commentID:uint postID:uint +``` + +The `message` commands accepts `commentID` and `postID` as arguments. + +Here, `commentID` and `postID` are the references to previously created comment and blog post. + +The `message` command has created and modified several files: + +``` +modify proto/blog/tx.proto +modify x/blog/client/cli/tx.go +create x/blog/client/cli/tx_delete_comment.go +create x/blog/keeper/msg_server_delete_comment.go +modify x/blog/module_simulation.go +create x/blog/simulation/delete_comment.go +modify x/blog/types/codec.go +create x/blog/types/message_delete_comment.go +create x/blog/types/message_delete_comment_test.go +``` + +As always, start your development with a proto file. + +In the `proto/blog/tx.proto` file, edit `MsgDeleteComment` to: + +* Add `id` +* Define the `id` for `message MsgDeleteCommentResponse`: + +```protobuf +message MsgDeleteComment { + string creator = 1; + uint64 commentID = 2; + uint64 postID = 3; + uint64 id = 4; +} + +message MsgDeleteCommentResponse { + uint64 id = 1; +} +``` + +## Process messages + +In the newly scaffolded `x/blog/keeper/msg_server_delete_comment.go` file, you can see a placeholder implementation of the `DeleteComment` function. Right now it does nothing and returns an empty response. + +For your blog chain, you want to delete the contents of the comment. Add the code to: + +- Check if the post Id exists to see which comment was deleted. +- Delete the comment from the store. + +```go +package keeper + +import ( + "context" + "encoding/binary" + + "github.com/cosmos/cosmos-sdk/store/prefix" + sdk "github.com/cosmos/cosmos-sdk/types" + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + + "blog/x/blog/types" +) + +func (k msgServer) DeleteComment(goCtx context.Context, msg *types.MsgDeleteComment) (*types.MsgDeleteCommentResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + + comment, exist := k.GetComment(ctx, msg.CommentID) + if !exist { + return nil, sdkerrors.Wrapf(types.ErrID, "Comment doesnt exist") + } + + if msg.PostID != comment.PostID { + return nil, sdkerrors.Wrapf(types.ErrID, "Post Blog Id does not exist for which comment with Blog Id %d was made", msg.PostID) + } + + store := prefix.NewStore(ctx.KVStore(k.storeKey), types.KeyPrefix(types.CommentKey)) + bz := make([]byte, 8) + binary.BigEndian.PutUint64(bz, comment.Id) + store.Delete(bz) + + return &types.MsgDeleteCommentResponse{}, nil +} +``` + +## Display posts + +Implement logic to query existing posts: + +```bash +ignite scaffold query comments id:uint --response title,body +``` + +Also in `proto/blog/query.proto`, make these updates: + +```protobuf +import "blog/post.proto"; + +message QueryCommentsRequest { + uint64 id = 1; + + // Adding pagination to request + cosmos.base.query.v1beta1.PageRequest pagination = 2; +} + +// ... + +message QueryCommentsResponse { + Post Post = 1; + + // Returning a list of comments + repeated Comment Comment = 2; + + // Adding pagination to response + cosmos.base.query.v1beta1.PageResponse pagination = 3; +} +``` + +After the types are defined in proto files, you can implement post querying logic in `x/blog/keeper/grpc_query_comments.go` by registering the `Comments` function: + +```go +package keeper + +import ( + "context" + + "github.com/cosmos/cosmos-sdk/store/prefix" + sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/cosmos/cosmos-sdk/types/query" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" + + "blog/x/blog/types" +) + +func (k Keeper) Comments(c context.Context, req *types.QueryCommentsRequest) (*types.QueryCommentsResponse, error) { + // Throw an error if request is nil + if req == nil { + return nil, status.Error(codes.InvalidArgument, "invalid request") + } + + // Define a variable that will store a list of posts + var comments []*types.Comment + + // Get context with the information about the environment + ctx := sdk.UnwrapSDKContext(c) + + // Get the key-value module store using the store key (in this case store key is "chain") + store := ctx.KVStore(k.storeKey) + + // Get the part of the store that keeps posts (using post key, which is "Post-value-") + commentStore := prefix.NewStore(store, []byte(types.CommentKey)) + + // Get the post by ID + post, _ := k.GetPost(ctx, req.Id) + + // Get the post ID + postID := post.Id + + // Paginate the posts store based on PageRequest + pageRes, err := query.Paginate(commentStore, req.Pagination, func(key []byte, value []byte) error { + var comment types.Comment + if err := k.cdc.Unmarshal(value, &comment); err != nil { + return err + } + + if comment.PostID == postID { + comments = append(comments, &comment) + } + + return nil + }) + + // Throw an error if pagination failed + if err != nil { + return nil, status.Error(codes.Internal, err.Error()) + } + + // Return a struct containing a list of posts and pagination info + return &types.QueryCommentsResponse{Post: &post, Comment: comments, Pagination: pageRes}, nil +} +``` + +**Note:** Since gRPC has been already added to module handler in the previous tutorial, you don't need to add it again. + +## Create post and comment + +Try it out! + +If the chain is yet not started, run `ignite chain serve -r`. + +Create a post: + +```bash +blogd tx blog create-post Uno "This is the first post" --from alice +``` + +As before, you are prompted to confirm the transaction: + +```json +{"body":{"messages":[{"@type":"/blog.blog.MsgCreatePost","creator":"blog1uamq9d6zj5p7lvzyhjugg8drkrcqckxtvj99ac","title":"Uno","body":"This is the first post","id":"0"}],"memo":"","timeout_height":"0","extension_options":[],"non_critical_extension_options":[]},"auth_info":{"signer_infos":[],"fee":{"amount":[],"gas_limit":"200000","payer":"","granter":""}},"signatures":[]} +``` + +Create a comment: + +```bash +blogd tx blog create-comment 0 Uno "This is the first comment" --from alice +``` + +```json +{"body":{"messages":[{"@type":"/blog.blog.MsgCreateComment","creator":"blog1uamq9d6zj5p7lvzyhjugg8drkrcqckxtvj99ac","postID":"0","title":"Uno","body":"This is the first comment","id":"0"}],"memo":"","timeout_height":"0","extension_options":[],"non_critical_extension_options":[]},"auth_info":{"signer_infos":[],"fee":{"amount":[],"gas_limit":"200000","payer":"","granter":""}},"signatures":[]} +``` + +When prompted, press Enter to confirm the transaction: + +``` +confirm transaction before signing and broadcasting [y/N]: y +``` + +## Display post and comment + +```bash +blogd q blog comments 0 +``` + +The results are output: + +```yaml +Comment: +- body: This is the first comment + createdAt: "58" + creator: blog1uamq9d6zj5p7lvzyhjugg8drkrcqckxtvj99ac + id: "0" + postID: "0" + title: Uno +Post: + body: This is the first post + createdAt: "51" + creator: blog1uamq9d6zj5p7lvzyhjugg8drkrcqckxtvj99ac + id: "0" + title: Uno +pagination: + next_key: null + total: "1" +``` + +## Delete comment + +```bash +blogd tx blog delete-comment 0 0 --from alice -y +``` + +## Display the post and all associated comments + +```bash +blogd q blog comments 0 +``` + +The results are output: + +```yaml +Comment: [] +Post: + body: This is the first post + createdAt: "12" + creator: blog12s696u0wutt42kc297td5naxgxtvtxdlsg07n2 + id: "0" + title: Uno +pagination: + next_key: null + total: "0" +``` + +## Edge cases + +1. Add comment to a nonexistent blog id: + +```bash +blogd tx blog create-comment 53 "Edge1" "This is the 53 comment" --from alice -y +``` + +The transaction is not able to be completed because the blog id does not exist: + +```yaml +code: 22 +codespace: sdk +data: "" +events: +- attributes: + - index: false + key: ZmVl + value: "" + type: tx +- attributes: + - index: false + key: YWNjX3NlcQ== + value: Y29zbW9zMXVhbXE5ZDZ6ajVwN2x2enloanVnZzhkcmtyY3Fja3h0dmo5OWFjLzQ= + type: tx +- attributes: + - index: false + key: c2lnbmF0dXJl + value: NEdGejY1WGFjc0cvR1BEOVgxSDh4NmU5NTZEM1hxZ0txdnlWcmVVZ2JSRThTbkRHNjdmN29rNm9uWDhhVjgzb3NFcDh2eWg3RnNIRE1CaU9VL3QwMlE9PQ== + type: tx +gas_used: "41385" +gas_wanted: "200000" +height: "90" +info: "" +logs: [] +raw_log: 'failed to execute message; message index: 0: key 0 doesn''t exist: key not + found' +timestamp: "" +tx: null +``` + +1. Add comment to a blog post that is older than 100 blocks: + +```bash +blogd tx blog create-comment 0 "Comment" "This is a comment" --from alice -y +``` + +The transaction is not executed: + +```yaml +code: 1300 +codespace: blog +data: "" +events: +- attributes: + - index: false + key: ZmVl + value: "" + type: tx +- attributes: + - index: false + key: YWNjX3NlcQ== + value: Y29zbW9zMXVhbXE5ZDZ6ajVwN2x2enloanVnZzhkcmtyY3Fja3h0dmo5OWFjLzEy + type: tx +- attributes: + - index: false + key: c2lnbmF0dXJl + value: TFR3OXFQbm9KYUVmZ2EyZWlrWWZ5SmFiM0VvZDUwVlU0L3hJUExpbCtUWXN5NFNvQzhKaWJTeW5Eb2RkOExqU3NPaXhsVjlUZmtvNmJMbHArcVZZTWc9PQ== + type: tx +gas_used: "41569" +gas_wanted: "200000" +height: "154" +info: "" +logs: [] +raw_log: 'failed to execute message; message index: 0: Comment created at 154 is older + than post created at 51: ' +timestamp: "" +tx: null +txhash: 5BFBEE017952376851D7989E7AF5B60A29B98AD2F7812EC271C154575F386AD6 +``` + +## Conclusion + +Congratulations. You have added comments to your blog blockchain! + +You have successfully completed these steps: + +* Scaffolding a new `list` with proto functions and keeper functions +* Add comments to existing blog post +* Display the blog post by ID with associated comments +* Delete comments from a given blog post diff --git a/docs/versioned_docs/version-v0.25/guide/03-blog/02-connect-blockchain.md b/docs/versioned_docs/version-v0.25/guide/03-blog/02-connect-blockchain.md new file mode 100644 index 0000000..80db06d --- /dev/null +++ b/docs/versioned_docs/version-v0.25/guide/03-blog/02-connect-blockchain.md @@ -0,0 +1,210 @@ +--- +sidebar_position: 2 +description: Blockchain client in Go +--- + +# Create a blockchain client in Go + +Learn how to connect your blockchain to an independent application with RPC. + +After creating the blog blockchain in this tutorial you will learn how to connect to your blockchain from a separate client. + +## Use the blog blockchain + +Navigate to a separate directory right next to the `blog` blockchain you built in the [Build a Blog](index.md) tutorial. + +## Creating a blockchain client + +Create a new directory called `blogclient` on the same level as `blog` directory. As the name suggests, `blogclient` will contain a standalone Go program that acts as a client to your `blog` blockchain. + +The command: + +```bash +ls +``` + +Shows just `blog` now. More results are listed when you have more directories here. + +Create your `blogclient` directory first, change your current working directory, and initialize the new Go module. + +```bash +mkdir blogclient +cd blogclient +go mod init blogclient +touch main.go +``` + +The `go.mod` file is created inside your `blogclient` directory. + +Your blockchain client has only two dependencies: + +- The `blog` blockchain `types` for message types and a query client +- `ignite` for the `cosmosclient` blockchain client + +```go-module +module blogclient + +go 1.18 + +require ( + blog v0.0.0-00010101000000-000000000000 + github.com/ignite/cli v0.23.0 +) + +replace blog => ../blog +replace github.com/gogo/protobuf => github.com/regen-network/protobuf v1.3.3-alpha.regen.1 +``` + +The `replace` directive uses the package from the local `blog` directory and is specified as a relative path to the `blogclient` directory. + +Cosmos SDK uses a custom version of the `protobuf` package, so use the `replace` directive to specify the correct dependency. + +The `blogclient` will eventually have only two files: + +- `main.go` for the main logic of the client +- `go.mod` for specifying dependencies. + +### Main logic of the client in `main.go` + +Add the following code to your `main.go` file to make a connection to your blockchain from a separate app. + +```go +package main + +import ( + "context" + "fmt" + "log" + + // Importing the general purpose Cosmos blockchain client + "github.com/ignite/cli/ignite/pkg/cosmosclient" + + // Importing the types package of your blog blockchain + "blog/x/blog/types" +) + +func main() { + // Prefix to use for account addresses. + // The address prefix was assigned to the blog blockchain + // using the `--address-prefix` flag during scaffolding. + addressPrefix := "blog" + + // Create a Cosmos client instance + cosmos, err := cosmosclient.New( + context.Background(), + cosmosclient.WithAddressPrefix(addressPrefix), + ) + if err != nil { + log.Fatal(err) + } + + // Account `alice` was initialized during `ignite chain serve` + accountName := "alice" + + // Get account from the keyring + account, err := cosmos.Account(accountName) + if err != nil { + log.Fatal(err) + } + + addr, err := account.Address(addressPrefix) + if err != nil { + log.Fatal(err) + } + + // Define a message to create a post + msg := &types.MsgCreatePost{ + Creator: addr, + Title: "Hello!", + Body: "This is the first post", + } + + // Broadcast a transaction from account `alice` with the message + // to create a post store response in txResp + txResp, err := cosmos.BroadcastTx(account, msg) + if err != nil { + log.Fatal(err) + } + + // Print response from broadcasting a transaction + fmt.Print("MsgCreatePost:\n\n") + fmt.Println(txResp) + + // Instantiate a query client for your `blog` blockchain + queryClient := types.NewQueryClient(cosmos.Context()) + + // Query the blockchain using the client's `Posts` method + // to get all posts store all posts in queryResp + queryResp, err := queryClient.Posts(context.Background(), &types.QueryPostsRequest{}) + if err != nil { + log.Fatal(err) + } + + // Print response from querying all the posts + fmt.Print("\n\nAll posts:\n\n") + fmt.Println(queryResp) +} +``` + +Read the comments in the code carefully to learn details about each line of code. + +To learn more about the `cosmosclient` package, see the Go +[cosmosclient](https://pkg.go.dev/github.com/ignite/cli/ignite/pkg/cosmosclient) package documentation. Details are provided to learn how to use the `Client` type with `Options` and `KeyringBackend`. + +## Run the blockchain and the client + +Make sure your blog blockchain is still running with `ignite chain serve`. + +Install dependencies for your `blogclient`: + +```bash +go mod tidy +``` + +Run the blockchain client: + +```bash +go run main.go +``` + +If successful, the results of running the command are printed to the terminal: + +``` +# github.com/keybase/go-keychain +### Some warnings might be displayed which can be ignored +MsgCreatePost: + +Response: + Height: 3222 + TxHash: AFCA76B0FEE5113382C068967B610180C105FCE045FF8C7943EA45EF4B7A1E69 + Data: 0A280A222F636F736D6F6E6175742E626C6F672E626C6F672E4D7367437265617465506F737412020801 + Raw Log: [{"events":[{"type":"message","attributes":[{"key":"action","value":"CreatePost"}]}]}] + Logs: [{"events":[{"type":"message","attributes":[{"key":"action","value":"CreatePost"}]}]}] + GasWanted: 300000 + GasUsed: 45805 + + +All posts: + +Post:<creator:"blog1j8d8pyjr5vynjvcq7xgzme0ny6ha30rpakxk3n" title:"foo" body:"bar" > Post:<creator:"blog1j8d8pyjr5vynjvcq7xgzme0ny6ha30rpakxk3n" id:1 title:"Hello!" body:"This is the first post" > pagination:<total:2 > +``` + +You can confirm the new post with using the `blogd query blog posts` command that you learned about in the previous chapter. +The result looks similar to: + +```yaml +Post: +- body: bar + creator: blog1j8d8pyjr5vynjvcq7xgzme0ny6ha30rpakxk3n + id: "0" + title: foo +- body: This is the first post + creator: blog1j8d8pyjr5vynjvcq7xgzme0ny6ha30rpakxk3n + id: "1" + title: Hello! +pagination: + next_key: null + total: "2" +``` + +Congratulations, you have just created a post using a separate app. diff --git a/docs/versioned_docs/version-v0.25/guide/03-blog/_category_.json b/docs/versioned_docs/version-v0.25/guide/03-blog/_category_.json new file mode 100644 index 0000000..77d44fe --- /dev/null +++ b/docs/versioned_docs/version-v0.25/guide/03-blog/_category_.json @@ -0,0 +1,5 @@ +{ + "label": "Module basics: Blog", + "position": 3, + "link": null + } \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.25/guide/04-nameservice/00-tutorial.md b/docs/versioned_docs/version-v0.25/guide/04-nameservice/00-tutorial.md new file mode 100644 index 0000000..30bbc66 --- /dev/null +++ b/docs/versioned_docs/version-v0.25/guide/04-nameservice/00-tutorial.md @@ -0,0 +1,86 @@ +--- +sidebar_position: 0 +slug: /guide/nameservice + +--- + +# Nameservice Tutorial + +The nameservice tutorial provides step-by-step instructions to build a blockchain app for a nameservice. The goal of the nameservice app is to send tokens between participants so that end users can buy names and set a value to the names. + +This tutorial builds on knowledge and skills developed in the earlier tutorials in the Ignite CLI Developer Tutorials. Before you start this building your nameservice app, we recommend that you complete these foundational tutorials: + +- [Install Ignite CLI](../01-install.md) +- [Hello, World](../02-hello.md) +- [Module Basics](../03-blog/00-build-blog.md) + +The goal of this tutorial is to build a functional nameservice app and a mapping of strings to other strings (`map[string]string`). + +This tutorial guides you through these steps to build a blockchain for a nameservice app: + +* Create a blockchain without a default module +* Create a Cosmos SDK nameservice module with a dependency on another module +* Create CRUD (create, read, update, and delete) actions for a type stored as a map +* Declare functions of the bank module to be available to the nameservice module +* Implement keeper functions that implement the logic + +## Prerequisites + +- A supported version of [Ignite CLI](/). To install Ignite CLI, see [Install Ignite CLI](../01-install.md). +* A text editor like [Visual Studio Code](https://code.visualstudio.com/download). +* A web browser like [Chrome](https://www.google.com/chrome) or [Firefox](https://www.mozilla.org/en-US/firefox/new). +- Familiarity with [Cosmos SDK modules](https://docs.cosmos.network/main/building-modules/intro) + +## Nameservice App Goals + +The goal of the app you are building is to let users buy a name and to set a value that a name resolve to. The owner of a given name is the current highest bidder. + +First, see how these simple requirements translate to app design. + +### Core Concepts + +A blockchain app is a [replicated deterministic state machine](https://en.wikipedia.org/wiki/State_machine_replication). As a blockchain app developer, you have to define the state machine with a starting state and messages that trigger state transitions. These software components make it all possible! + +- [Ignite CLI](/) is built on top of Cosmos SDK and accelerates chain development by scaffolding everything you need. +- The [Cosmos SDK](https://github.com/cosmos/cosmos-sdk) modular framework allows developers like you to create custom blockchains that can natively interact with other blockchains. +- [Tendermint](https://docs.tendermint.com/main/introduction/what-is-tendermint.html) software securely and consistently replicates an app on many machines. The Tendermint app-agnostic engine handles the networking and consensus layers of your blockchain. + +## Cosmos SDK Modules + +In a Cosmos SDK blockchain, application-specific logic is implemented in separate modules. Modules keep code easy to understand and reuse. Each module contains its own message and transaction processor, while the Cosmos SDK is responsible for routing each message to its respective module. + +Your nameservice app requires the following Cosmos SDK modules: + +- [auth](https://docs.cosmos.network/main/modules/auth): Specifies the base transaction and account types for an application. For your nameservice app, it defines accounts and fees and gives access to these functionalities to the rest of your app. +- [bank](https://docs.cosmos.network/main/modules/bank): Enables the app to create and manage tokens and token balances. +- [distribution](https://docs.cosmos.network/main/modules/distribution): Passively distributes rewards between validators and delegators. +- [slashing](https://docs.cosmos.network/main/modules/slashing): Enables punishing misbehavior of validators when evidence of validator fraud is reported. +- [staking](https://docs.cosmos.network/main/modules/staking): Enables the app to have validators that users can delegate to. +- nameservice: This module does not exist yet! You will build this module to handle the core logic for your new `nameservice` app. The `nameservice` module is the main piece of software you develop to build your app. + +Now, take a look at the two main parts of your app: the state and the message types. + +## Application State + +The state represents your app at a given moment. The state of your nameservice app defines how many tokens are in each account, who the account owners are, the price of each name, and to what value each name resolves to. + +The state of tokens and accounts is defined by the `auth` and `bank` modules so you can direct your focus instead on defining the part of the state that relates specifically to your `nameservice` module. + +The Cosmos SDK comes with a large set of stores to persist the state of applications. By default, the main store of Cosmos SDK apps is a multistore (a store of stores). You can add any number of key-value stores [KVStores in Go](https://pkg.go.dev/github.com/cosmos/cosmos-sdk/types#KVStore) to the multistore. + +For your nameservice app, use one store to map a `name` key to its respective `whois` value that holds a name's value, owner, and price. + +## Messages + +In the Cosmos SDK, [messages](https://docs.cosmos.network/main/building-modules/messages-and-queries#messages) are objects that are contained in transactions to trigger state transitions. Each Cosmos SDK module defines a list of messages and how to handle them. + +You must create [messages for the nameservice module](./02-messages.md) that support this functionality: + +- When a transaction that is included in a block reaches a Tendermint node, the transaction is passed to the application using the Application Blockchain Interface [(ABCI)](https://docs.cosmos.network/main/intro/sdk-app-architecture#abci) between Tendermint and your app. +- The transaction is decoded to get the message. +- The message is then routed to the appropriate module and handled according to the logic defined in the corresponding `Handler`. +- If the state needs to be updated, the `Handler` calls the `Keeper` to perform the update. + +You learn more about these core concepts in the next steps of this tutorial. + +Now that you have an idea of how your app functions from a high-level perspective, it is time to start implementing it. diff --git a/docs/versioned_docs/version-v0.25/guide/04-nameservice/01-init.md b/docs/versioned_docs/version-v0.25/guide/04-nameservice/01-init.md new file mode 100644 index 0000000..26a1fbc --- /dev/null +++ b/docs/versioned_docs/version-v0.25/guide/04-nameservice/01-init.md @@ -0,0 +1,47 @@ +--- +sidebar_position: 1 +description: Scaffold a blockchain and create a nameservice module. +--- + +# Scaffold the Nameservice Module + +Scaffold a blockchain and create a `nameservice` module for the nameservice app. Remember, the goal of the nameservice app is to send tokens between participants so that end users can buy names and set a value to the names. + +## Create a Blockchain + +Scaffold a new Cosmos SDK blockchain using the `ignite scaffold chain` command. The `ignite scaffold chain` command accepts one argument: the Go module path that is used for the project. + +By default, a chain is scaffolded with a new empty Cosmos SDK module. You want to create the nameservice module without scaffolding a module, so use the `--no-module` flag: + +```bash +ignite scaffold chain nameservice --no-module +``` + +This command created a new directory `nameservice` with a brand new Cosmos SDK blockchain. This blockchain doesn't have any application-specific logic yet, but it imports standard Cosmos SDK modules, such as `auth`, `bank`, `mint`, and others. + +Change the current directory to `nameservice`: + +```bash +cd nameservice +``` + +Inside the `nameservice` project directory you can execute other Ignite CLI commands to start a blockchain node, scaffold modules, messages, types, generate code, and much more. + +## Create the Module + +Scaffold a new module called `nameservice`. By design, the `nameservice` module must send tokens between participants. The send tokens functionality is implemented in the standard `bank` module. + +To specify `bank` as a dependency, use the optional `--dep` flag: + +```bash +ignite scaffold module nameservice --dep bank +``` + +## Results + +The Ignite CLI scaffold command has done all of the work for you! + +- The `nameservice` module was created in the `x/nameservice` directory. +- The `nameservice` module was imported into the blockchain in the `app/app.go` file. + +Now, define the actions your app can make with messages. diff --git a/docs/versioned_docs/version-v0.25/guide/04-nameservice/02-messages.md b/docs/versioned_docs/version-v0.25/guide/04-nameservice/02-messages.md new file mode 100644 index 0000000..fd11f91 --- /dev/null +++ b/docs/versioned_docs/version-v0.25/guide/04-nameservice/02-messages.md @@ -0,0 +1,189 @@ +--- +sidebar_position: 2 +description: Add messages to define actions for the nameservice module. +--- + +# Messages for the Nameservice Module + +Messages are a great place to start when building a Cosmos SDK module because they define the actions that your app can make. Remember that the nameservice app lets users buy a name, set a value for a name to resolve to, and delete a name that belongs to them. + +With this design in mind for the `nameservice` module, it's time to create these messages to define the actions. End users can send these messages to interact with the application state: + +- `BuyName` +- `SetName` +- `DeleteName` + +## Message Type + +Messages trigger state transitions. Messages (`Msg`) are wrapped in transactions (`Tx`) that clients submit to the network. Because the Cosmos SDK wraps and unwraps messages from transactions, as an app developer, you only have to define messages. + +Messages must satisfy the following interface: + +```go +// Transactions messages must fulfill the Msg +type Msg interface { + proto.Message + + // ValidateBasic does a simple validation check that + // doesn't require access to any other information. + ValidateBasic() error + + // Signers returns the addrs of signers that must sign. + // CONTRACT: All signatures must be present to be valid + GetSigners() []AccAddress + + // Legacy methods + Type() string + Route() string + GetSignBytes() []byte +} +``` + +The `Msg` type extends `proto.Message` and contains these methods along with the legacy methods (`Type`, `Route`, and `GetSignBytes`): + +- `ValidateBasic` + + - Called early in the processing of the message to discard obviously invalid messages. + - Includes only checks that do not require access to the state. For example, check that the `amount` of tokens is a positive value. + +- `GetSigners` + + - Returns the list of signers. + - The Cosmos SDK ensures that each message contained in a transaction is signed by all the signers in the list that is returned by this method. + +## Handlers + +Handlers define the action that needs to be taken. Each message has an associated handler. + +For example, handlers define which stores to update, how to update the stores, and under what conditions to act when a given message is received. + +## Scaffolding Messages + +Now, you are ready to implement these Cosmos SDK messages to achieve the desired functionality for your nameservice app: + +- `MsgBuyName` + Allow accounts to buy a name and become its owner. When an end user buys a name, they are required to pay the previous owner of the name a price higher than the price the previous owner paid for it. If a name does not have a previous owner yet, the end user must burn a `MinPrice` amount. +- `MsgSetName` + Allow name owners to set a value for a given name. +- `MsgDeleteName` + Allow name owners to delete names that belong to them. + +Use the `ignite scaffold message` command to scaffold new messages for your module. + +- The `ignite scaffold message` command accepts the message name as the first argument and a list of fields for the message. +- By default, a message is scaffolded in a module with a name that matches the name of the project, in this case `nameservice`. + +### Add the MsgBuyName Message + +To create the `MsgBuyName` message for the nameservice module: + +```bash +ignite scaffold message buy-name name bid +``` + +where: + +- buy-name is the message name +- name defines the name that the user can buy, sell, and delete +- bid is the price the user bids to buy a name + +The `ignite scaffold message buy-name name bid` command creates and modifies several files: + +``` +modify proto/nameservice/tx.proto +modify x/nameservice/client/cli/tx.go +create x/nameservice/client/cli/tx_buy_name.go +create x/nameservice/keeper/msg_server_buy_name.go +modify x/nameservice/types/codec.go +create x/nameservice/types/message_buy_name.go +``` + +These are the changes for each one of these files: + +- `proto/nameservice/tx.proto` + - Adds `MsgBuyName` and `MsgBuyNameResponse` proto messages. + - Registers `BuyName` rpc in the `Msg` service. + + Open the `tx.proto` file to view the changes: + + ```protobuf + syntax = "proto3"; + + package nameservice.nameservice; + + // this line is used by starport scaffolding # proto/tx/import + + option go_package = "nameservice/x/nameservice/types"; + + // Msg defines the Msg service. + service Msg { + // this line is used by starport scaffolding # proto/tx/rpc + rpc BuyName(MsgBuyName) returns (MsgBuyNameResponse); + } + + // this line is used by starport scaffolding # proto/tx/message + message MsgBuyName { + string creator = 1; + string name = 2; + string bid = 3; + } + + message MsgBuyNameResponse { + } + ``` + +- `x/nameservice/client/cli/tx.go` + + Registers the CLI command. + +- `x/nameservice/types/message_buy_name.go` + + Defines methods to satisfy the `Msg` interface. + +- `x/nameservice/keeper/msg_server_buy_name.go` + + Defines the `BuyName` keeper method. You can notice that the message follows the `Msg` interface. The message `struct` contains all the information required when buying a name: `Name`, `Bid`, and `Creator`. This struct was added automatically. + +- `x/nameservice/client/cli/tx_buy_name.go` + + Adds the CLI command to broadcast a transaction with a message. + +- `x/nameservice/types/codec.go` + + Registers the codecs. + + +### Add The MsgSetName Message + +To create the `MsgSetName` for the nameservice module: + +```bash +ignite scaffold message set-name name value +``` + +where: + +- set-name is the message name +- name is the name the user sets +- value is the literal value that the name resolves to + +This `ignite scaffold message` command modifies and creates the same set of files as the `MsgBuyName` message. + +### Add The MsgDeleteName Message + +You need a message so that an end user can delete a name that belongs to them. + +To create the `MsgDeleteName` for the nameservice module: + +```bash +ignite scaffold message delete-name name +``` + +where: + +- delete-name is the message name +- name is message name to delete + +## Results + +Congratulations, you've defined messages that trigger state transitions. Now it's time to implement types and methods that operate on the state. diff --git a/docs/versioned_docs/version-v0.25/guide/04-nameservice/03-types.md b/docs/versioned_docs/version-v0.25/guide/04-nameservice/03-types.md new file mode 100644 index 0000000..7b01bd0 --- /dev/null +++ b/docs/versioned_docs/version-v0.25/guide/04-nameservice/03-types.md @@ -0,0 +1,111 @@ +--- +sidebar_position: 3 +description: Implement types and methods that operate on the state. +--- + +# Implement Types + +Now that you've defined messages that trigger state transitions, it's time to implement types and methods that operate on the state. + +> The Cosmos SDK relies on keepers. A keeper is an abstraction that lets your blockchain app interact with the state. Functions like create, read, update, and delete (CRUD) are defined as keeper methods. + +For the nameservice blockchain, define a `whois` type and the create and delete methods. + +Because Ignite CLI does the heavy lifting for you, choose from several `ignite scaffold` commands to create CRUD functionality code for data stored in different ways: + +- Array, a list-like data structure +- Map (key-value pairs) +- In a single location + +## Add the whois Type + +Use the `ignite scaffold map` command to scaffold the `whois` type and create the code that implements CRUD functionality to create, read, update, and delete information about names. + +In this example, the `whois` type is stored in a map-like data structure: + +```bash +ignite scaffold map whois name value price owner --no-message +``` + +where: + +- whois is the type +- name is the name the user sets +- value is the name that name resolves to +- price is the bid +- `--no-message` flag skips message creation + + By default, generic CRUD messages are scaffolded. However, you've already created messages specifically for this blockchain, so you can skip message creation with the `--no-message` flag. + +The `ignite scaffold map whois name value price --no-message` command created and modified several files: + +* `proto/nameservice/whois.proto` + + Defines the `Whois` type as a proto message. + +* `proto/nameservice/query.proto` + + * Queries to get data from the blockchain. + * Define queries as proto messages. + * Register the messages in the `Query` service. + +* `proto/nameservice/genesis.proto` + + A type for exporting the state of the blockchain, for example, during software upgrades. + +* `x/nameservice/keeper/grpc_query_whois.go` + + Keeper methods to query the blockchain. + +* `x/nameservice/keeper/grpc_query_whois_test.go` + + Tests for query keeper methods. + +* `x/nameservice/keeper/whois.go` + + Keeper methods to get, set, and remove whois information from the store. + +* `x/nameservice/keeper/whois_test.go` + + Tests for keeper methods. + +* `x/nameservice/client/cli/query_whois.go` + + CLI commands for querying the blockchain. + +* `x/nameservice/client/cli/query.go` + + Registers the CLI commands. + +* `x/nameservice/client/cli/query_whois_test.go` + + Tests for CLI commands. + +* `x/nameservice/types/keys.go` + + String prefix in the key to store whois information in the state. + +* `x/nameservice/genesis.go` + + Logic for exporting the state. + +* `x/nameservice/types/genesis.go` + + Logic for validating the genesis file. + +* `x/nameservice/module.go` + + Registers gRPC gateway routes. + +## Keeper Package + +In the `x/nameservice/keeper/whois.go` file, take at a look at the keeper package. + +- `SetWhois` uses a key-value store with a prefix for the `Whois` type and uses a `store.Set` method to write a `Whois` into the store. + +<!-- where is this? teach me please +`Whois-value-` encodes the `Whois` type that is generated from a protocol buffer definition--> + +- `GetWhois` selects a store using the `Whois` prefix and uses a `store.Get` method to fetch a `Whois` with a particular index. + +The keeper package also includes `RemoveWhois` and `GetAllWhois`. diff --git a/docs/versioned_docs/version-v0.25/guide/04-nameservice/04-keeper.md b/docs/versioned_docs/version-v0.25/guide/04-nameservice/04-keeper.md new file mode 100644 index 0000000..a025d1c --- /dev/null +++ b/docs/versioned_docs/version-v0.25/guide/04-nameservice/04-keeper.md @@ -0,0 +1,202 @@ +--- +sidebar_position: 4 +description: Define keepers for the nameservice module. +--- + +# Keeper + +> The main core of a Cosmos SDK module is a piece called the keeper. The keeper handles interactions with the store, has references to other [keepers](https://docs.cosmos.network/main/building-modules/keeper) for cross-module interactions, and contains most of the core functionality of a module. + +## Define Keepers for the Nameservice Module + +Keepers are module-specific. Keeper is part of the Cosmos SDK that is responsible for writing data to the store. Each module uses its own keeper. + +In this section, define the keepers that are required by the nameservice module: + +- Buy name +- Set name +- Delete name + +## Buy Name + +To define the keeper for the buy name transaction, add this code to the `x/nameservice/keeper/msg_server_buy_name.go` file: + +```go +// x/nameservice/keeper/msg_server_buy_name.go + +package keeper + +import ( + "context" + + sdk "github.com/cosmos/cosmos-sdk/types" + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + + "nameservice/x/nameservice/types" +) + +func (k msgServer) BuyName(goCtx context.Context, msg *types.MsgBuyName) (*types.MsgBuyNameResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + + // Try getting a name from the store + whois, isFound := k.GetWhois(ctx, msg.Name) + + // Set the price at which the name has to be bought if it didn't have an owner before + minPrice := sdk.Coins{sdk.NewInt64Coin("token", 10)} + + // Convert price and bid strings to sdk.Coins + price, _ := sdk.ParseCoinsNormalized(whois.Price) + bid, _ := sdk.ParseCoinsNormalized(msg.Bid) + + // Convert owner and buyer address strings to sdk.AccAddress + owner, _ := sdk.AccAddressFromBech32(whois.Owner) + buyer, _ := sdk.AccAddressFromBech32(msg.Creator) + + // If a name is found in store + if isFound { + // If the current price is higher than the bid + if price.IsAllGT(bid) { + // Throw an error + return nil, sdkerrors.Wrap(sdkerrors.ErrInsufficientFunds, "Bid is not high enough") + } + + // Otherwise (when the bid is higher), send tokens from the buyer to the owner + err := k.bankKeeper.SendCoins(ctx, buyer, owner, bid) + if err != nil { + return nil, err + } + } else { // If the name is not found in the store + // If the minimum price is higher than the bid + if minPrice.IsAllGT(bid) { + // Throw an error + return nil, sdkerrors.Wrap(sdkerrors.ErrInsufficientFunds, "Bid is less than min amount") + } + + // Otherwise (when the bid is higher), send tokens from the buyer's account to the module's account (as a payment for the name) + err := k.bankKeeper.SendCoinsFromAccountToModule(ctx, buyer, types.ModuleName, bid) + if err != nil { + return nil, err + } + } + + // Create an updated whois record + newWhois := types.Whois{ + Index: msg.Name, + Name: msg.Name, + Value: whois.Value, + Price: bid.String(), + Owner: buyer.String(), + } + + // Write whois information to the store + k.SetWhois(ctx, newWhois) + return &types.MsgBuyNameResponse{}, nil +} +``` + +When you scaffolded the `nameservice` module you used `--dep bank` to specify a dependency between the `nameservice` and `bank` modules. + +This dependency automatically created an `expected_keepers.go` file with a `BankKeeper` interface. + +The `BuyName` transaction uses `SendCoins` and `SendCoinsFromAccountToModule` methods from the `bank` module. + +Edit the `x/nameservice/types/expected_keepers.go` file to add `SendCoins` and `SendCoinsFromAccountToModule` to be able to use it in the keeper methods of the `nameservice` module. + +```go +// x/nameservice/types/expected_keepers.go + +package types + +import ( + sdk "github.com/cosmos/cosmos-sdk/types" +) + +type BankKeeper interface { + SendCoinsFromAccountToModule(ctx sdk.Context, senderAddr sdk.AccAddress, recipientModule string, amt sdk.Coins) error + SendCoins(ctx sdk.Context, fromAddr sdk.AccAddress, toAddr sdk.AccAddress, amt sdk.Coins) error +} +``` + +## Set Name + +To define the keeper for the set name transaction, add this code to the `x/nameservice/keeper/msg_server_set_name.go` file: + +```go +// x/nameservice/keeper/msg_server_set_name.go + +package keeper + +import ( + "context" + + sdk "github.com/cosmos/cosmos-sdk/types" + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + + "nameservice/x/nameservice/types" +) + +func (k msgServer) SetName(goCtx context.Context, msg *types.MsgSetName) (*types.MsgSetNameResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + + // Try getting name information from the store + whois, _ := k.GetWhois(ctx, msg.Name) + + // If the message sender address doesn't match the name owner, throw an error + if !(msg.Creator == whois.Owner) { + return nil, sdkerrors.Wrap(sdkerrors.ErrUnauthorized, "Incorrect Owner") + } + + // Otherwise, create an updated whois record + newWhois := types.Whois{ + Index: msg.Name, + Name: msg.Name, + Value: msg.Value, + Owner: whois.Owner, + Price: whois.Price, + } + + // Write whois information to the store + k.SetWhois(ctx, newWhois) + return &types.MsgSetNameResponse{}, nil +} +``` + +## Delete Name + +To define the keeper for the delete name transaction, add this code to the `x/nameservice/keeper/msg_server_delete_name.go` file: + +```go +// x/nameservice/keeper/msg_server_delete_name.go + +package keeper + +import ( + "context" + + sdk "github.com/cosmos/cosmos-sdk/types" + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + + "nameservice/x/nameservice/types" +) + +func (k msgServer) DeleteName(goCtx context.Context, msg *types.MsgDeleteName) (*types.MsgDeleteNameResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + + // Try getting name information from the store + whois, isFound := k.GetWhois(ctx, msg.Name) + + // If a name is not found, throw an error + if !isFound { + return nil, sdkerrors.Wrap(sdkerrors.ErrInvalidRequest, "Name doesn't exist") + } + + // If the message sender address doesn't match the name owner, throw an error + if !(whois.Owner == msg.Creator) { + return nil, sdkerrors.Wrap(sdkerrors.ErrUnauthorized, "Incorrect Owner") + } + + // Otherwise, remove the name information from the store + k.RemoveWhois(ctx, msg.Name) + return &types.MsgDeleteNameResponse{}, nil +} +``` diff --git a/docs/versioned_docs/version-v0.25/guide/04-nameservice/05-play.md b/docs/versioned_docs/version-v0.25/guide/04-nameservice/05-play.md new file mode 100644 index 0000000..186d577 --- /dev/null +++ b/docs/versioned_docs/version-v0.25/guide/04-nameservice/05-play.md @@ -0,0 +1,243 @@ +--- +sidebar_position: 5 +description: Start and experiment with your nameservice blockchain and nameservice module. +--- + +# Play With Your Blockchain + +If you haven't already, start a blockchain node in development: + +```bash +ignite chain serve -r +``` + +The optional `-r` flag is useful in development mode since it resets the blockchain's state if it has been started before. + +After the `serve` command has finished building the blockchain, a `nameserviced` binary is installed by default in the `~/go/bin` directory. + +The terminal window where the chain is started must remain open, so open a second terminal window to use `nameserviced` to run commands at the command line. + +## Buy a New Name + +Purchase a new name using the `buy-name` command. The name is `foo` and the bid is `20token`. + +In the keeper for the buy name transaction, you added code to the `msg_server_buy_name.go` file that hard-coded the minimum bid to `10token`. Any bid lower than that amount results in a rejected purchase. + +```bash +nameserviced tx nameservice buy-name foo 20token --from alice +``` + +where: + +- buy-name is the command that accepts two arguments +- foo is the name +- 20token is the bid +- the `--from alice` flag specifies the user account that signs and broadcasts the transaction + +This `buy-name` command creates a transaction and prompts the user `alice` to sign and broadcast the transaction. + +Here is what an unsigned transaction looks like: + +```json +{ + "body": { + "messages": [ + { + "@type": "/username.nameservice.nameservice.MsgBuyName", + "creator": "cosmos1p0fprxtpk497jvczexp96sy2w43hupeph9km5d", + "name": "foo", + "bid": "20token" + } + ], + "memo": "", + "timeout_height": "0", + "extension_options": [], + "non_critical_extension_options": [] + }, + "auth_info": { + "signer_infos": [], + "fee": { "amount": [], "gas_limit": "200000", "payer": "", "granter": "" } + }, + "signatures": [] +} +``` + +### Buy Name Transaction Details + +Look at the transaction details: + +- The transaction contains only one message: `MsgBuyName`. +- The message `@type` matches the package name of the corresponding proto file, `proto/nameservice/tx.proto`. +- The `creator` field is populated automatically with the address of the account broadcasting the transaction. +- The local account `alice` address is `cosmos1p0f...km5d`. +- Values of `name` and `bid` are passed as CLI arguments. + +After the transaction is broadcast and included in a block, the blockchain returns a response where `"code": 0` means the transaction was successfully processed. + +```json +{ + "height": "658", + "txhash": "EDC1842BE4B596DDD9E2D34F2E372354F9BA5F6D2E4B3F1C2664F4FF05D433B7", + "codespace": "", + "code": 0, + "data": "0A090A074275794E616D65", + "raw_log": "[{\"events\":[{\"type\":\"message\",\"attributes\":[{\"key\":\"action\",\"value\":\"BuyName\"}]}]}]", + "logs": [ + { + "msg_index": 0, + "log": "", + "events": [ + { + "type": "message", + "attributes": [{ "key": "action", "value": "BuyName" }] + } + ] + } + ], + "info": "", + "gas_wanted": "200000", + "gas_used": "47954", + "tx": null, + "timestamp": "" +} +``` + +## Query the Chain for a List of Names + +Query the chain for a list of name and corresponding values. Query commands don't need the `--from` flag, because they don't broadcast transactions and make only free requests. + +```bash +nameserviced q nameservice list-whois +``` + +The response confirms that the name `foo` was successfully purchased by `alice` and the current `price` is set to `20token`. + +```yaml +Whois: +- creator: cosmos1p0fprxtpk497jvczexp96sy2w43hupeph9km5d + index: foo + name: foo + price: 20token + value: "" +pagination: + next_key: null + total: "0" +``` + +## Set a Value to the Name + +Now that `alice` is an owner of the name, she can set the value to anything she wants. Use the `set-name` command to set the value to `bar`: + +```bash +nameserviced tx nameservice set-name foo bar --from alice +``` + +Query for a list of names again: + +```bash +nameserviced q nameservice list-whois +``` + +The response shows that `name` is now `foo`. + +```yaml +Whois: +- creator: cosmos1p0fprxtpk497jvczexp96sy2w43hupeph9km5d + index: foo + name: foo + price: 20token + value: bar +pagination: + next_key: null + total: "0" +``` + +## Buy an Existing Name + +Use the `bob` account to purchase an existing name from `alice`. A successful bid requires that the buy price is higher than the current value of `20token`. + +```bash +nameserviced tx nameservice buy-name foo 40token --from bob +``` + +In this `buy-name` command, the bid is updated to the latest bid of `40token` and the `--from bob` flag specifies that the transaction is signed by the `bob` address. + +Query for a list of names again: + +```bash +nameserviced q nameservice list-whois +``` + +The response shows a different creator address than `alice` (it's now the address for `bob`) and the `price` is now `40token`. + +```yaml +Whois: +- creator: cosmos1ku6sqpk9rgwgx98u2gs9c05aa9wrps969g0wy5 + index: foo + name: foo + price: 40token + value: bar +pagination: + next_key: null + total: "0" +``` + +## Query the Bank Balance + +Use the following command to see how the `alice` bank balance has changed after this transaction: + +```bash +nameserviced q bank balances $(nameserviced keys show alice -a) +``` + +## Test an Unauthorized Transaction + +Try updating the value by broadcasting a transaction from the `alice` account: + +```bash +nameserviced tx nameservice set-name foo qoo --from alice +``` + +An error occurs because `alice` sold the name in a previous transaction. The results show that `alice` is not the owner of the name and is therefore not authorized to change the value. + +```json +{ + "height": "981", + "txhash": "8E9951EDC5C9D76C2164BE9572B336B13CCF46653F45F54B2C1FEA702389FAE8", + "codespace": "sdk", + "code": 4, + "data": "", + "raw_log": "failed to execute message; message index: 0: Incorrect Owner: unauthorized", + "logs": [], + "info": "", + "gas_wanted": "200000", + "gas_used": "39214", + "tx": null, + "timestamp": "" +} +``` + +```yaml +Whois: +- creator: cosmos1ku6sqpk9rgwgx98u2gs9c05aa9wrps969g0wy5 + index: foo + name: foo + price: 40token + value: bar +pagination: + next_key: null + total: "0" +``` + +## Conclusion + +Congratulations 🎉. You have created the nameservice module and the nameservice application. + +You successfully completed these steps: + +- Learned how to work with module dependencies +- Use several scaffolding methods +- Learned about Cosmos SDK types and functions +- Used the CLI to broadcast transactions , and so much more + +You are now prepared to continue your journey to learn about escrow accounts and IBC. diff --git a/docs/versioned_docs/version-v0.25/guide/04-nameservice/_category_.json b/docs/versioned_docs/version-v0.25/guide/04-nameservice/_category_.json new file mode 100644 index 0000000..8f6c2e7 --- /dev/null +++ b/docs/versioned_docs/version-v0.25/guide/04-nameservice/_category_.json @@ -0,0 +1,5 @@ +{ + "label": "Sending Tokens: Nameservice", + "position": 4, + "link": null + } \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.25/guide/05-scavenge/01-hunt.md b/docs/versioned_docs/version-v0.25/guide/05-scavenge/01-hunt.md new file mode 100644 index 0000000..51f0245 --- /dev/null +++ b/docs/versioned_docs/version-v0.25/guide/05-scavenge/01-hunt.md @@ -0,0 +1,36 @@ +--- +sidebar_position: 1 +slug: /guide/scavenge + +--- + +# Scavenger hunt game + +In this tutorial, you will build a blockchain for a scavenger hunt game and learn how to: + +* Implement custom logic in the CLI commands +* Use an escrow account to store tokens + +This tutorial was first presented as a workshop at GODays 2020 Berlin by [Billy Rennekamp](https://x.com/billyrennekamp). + +This session aims to get you thinking about what is possible when developing applications that have access to **digital scarcity as a primitive**. The easiest way to think of scarcity is as money; If money grew on trees it would stop being _scarce_ and stop having value. + +Although a long history of software deals with money, the representation of money has not been a first-class citizen in the programming environment. Instead, money has historically been represented as a number or a float. It has been left up to a third party merchant service or process of exchange to swap the _representation_ of money for actual cash. If money were a primitive in a software environment, it would allow for **real economies to exist within games and applications**. Money as a primitive takes one more step in erasing the line between games, life, and play. + +This tutorial uses the [Cosmos SDK](https://github.com/cosmos/cosmos-sdk) framework that makes it possible to build **deterministic state machines**. A state machine is simply an application that has a state and explicit functions for updating that state. You can think of a light bulb and a light switch as a kind of state machine: the state of the "application" is either `light on` or `light off`. There is one function in this state machine: `flip switch`. Every time you trigger `flip switch`, the state of the application goes from `light on` to `light off` or vice versa. + +## Deterministic state machine + +A **deterministic** state machine is a state machine in which an accumulation of actions, taken together and replayed, have the same outcome. So if you were to take all the `switch on` and `switch off` actions of the entire month of January for some room and replay them in August, you have the same final state of `light on` or `light off`. Nothing about the metaphorical months of January or August changes the outcome. Of course, a _real_ room is not deterministic if things like power shortages or maintenance occurred during those months. + +A strong feature of deterministic state machines lets you track changes with **cryptographic hashes** of the state, similar to version control systems like `git`. If there is agreement about the hash of a certain state, it is unnecessary to replay every action from genesis to ensure that two repos are in sync with each other. These properties are useful when dealing with software that is run by many different people in many different situations, just like git. + +Another nice property of cryptographically hashing state is the system of **reliable dependencies**. For example, a developer can build software that uses your library and references a specific state in your software. That way if your code changes in a way that breaks code in a specific state, developers are not required to use your new version but can continue to use the referenced version. This same property of knowing exactly what the state of a system (as well as all the ways that state can update) makes it possible to have the necessary assurances that allow for digital scarcity within an application. _If I say there is only one of some thing within a state machine and you know that there is no way for that state machine to create more than one, you can rely on there always being only one._ + +You might have guessed by now that we're talking about **blockchains**. Blockchains are deterministic state machines that have very specific rules about how state is updated. Blockchains checkpoint state with cryptographic hashes and use asymmetric cryptography to handle **access control**. There are different ways that different blockchains decide who can make a checkpoint of state. These entities are called **validators**. On blockchains like Bitcoin or Ethereum, validators are chosen by an electricity-intensive process called Proof-of-Work (PoW) in tandem with something called the longest chain rule or the Nakamoto consensus. Nakamoto solved the permissionless consensus problem with a remarkably simple but powerful scheme that uses only basic cryptographic primitives (hash functions and digital signatures). + +## Proof-of-Stake (PoS) + +The state machine you build with this tutorial uses the energy-efficient Proof-of-Stake (PoS) consensus that can consist of one or many validators, either trusted or byzantine. When a system handles _real_ scarcity, the integrity of that system becomes very important. One way to ensure integrity is by sharing the responsibility of maintaining the integrity with a large group of independently motivated participants as validators. + +So, now that you know a little more about **why** you might build an app like this, start to dive into the game itself. diff --git a/docs/versioned_docs/version-v0.25/guide/05-scavenge/02-game.md b/docs/versioned_docs/version-v0.25/guide/05-scavenge/02-game.md new file mode 100644 index 0000000..f1152b1 --- /dev/null +++ b/docs/versioned_docs/version-v0.25/guide/05-scavenge/02-game.md @@ -0,0 +1,35 @@ +--- +sidebar_position: 2 +--- + +# Scavenger hunt game + +This tutorial focuses on building the app as a **scavenger hunt** game. Scavenger hunts are all about someone setting up tasks or questions that challenge a participant to find solutions that come with a prize. The basic mechanics of the game are as follows: + +* Anyone can post a question with an encrypted answer. +* This question comes paired with a bounty of coins. +* Anyone can post an answer to this question. If the answer is correct, that person receives the bounty of coins. + +## Safe interactions + +On a public network with latency, it is possible that something like a [man-in-the-middle attack](https://en.wikipedia.org/wiki/Man-in-the-middle_attack) could take place. Instead of pretending to be one of the parties, an attacker takes the sensitive information from one party and uses it for their own benefit. This scenario is called [Front Running](https://en.wikipedia.org/wiki/Front_running) and happens as follows: + +1. You post the answer to some question with a bounty attached to it. +2. Someone else sees you posting the answer and posts it themselves right before you. +3. Since they posted the answer first, they receive the reward instead of you. + +### Prevent front running + +To prevent front running, implement a commit-reveal scheme that converts a single exploitable interaction into two safe interactions. + +The first interaction is the commit where you "commit" to posting an answer in a follow-up interaction. This commit consists of a cryptographic hash of your name combined with the answer that you think is correct. The app saves that value as a claim that you know the answer, but hasn't yet confirmed whether the answer is correct. + +The second interaction is the reveal where you post the answer in plain text along with your name. The application takes your answer and your name and cryptographically hashes them. If the result matches what you previously submitted during the commit stage, that is the proof that it is in fact you who knows the answer and not someone who is just front-running you. + +### Security + +A system like this commit-reveal scheme could be used in tandem with any kind of gaming platform in a **trustless** way. Imagine playing "The Legend of Zelda" and the game was compiled with all the answers to different scavenger hunts already included. When you obtain a certain level the game could reveal the secret answer. Then explicitly or behind the scenes, this answer could be combined with your name, hashed, submitted, and subsequently revealed. Your name could be rewarded so that you gain more points in the game. + +Another way of achieving this level of security is with access-control list (ACL) managed by an admin account under control of the gaming company. This admin account could confirm that you beat the level and then give you points. The problem with this ACL approach is a ***single point of failure*** and a single target for trying to attack the system. If there is one key that rules the castle then the whole system is broken if that key is compromised. ACL security also creates a problem with coordination if that admin account is required to be online in order for players to get their points. With a commit-reveal system you have a more trustless architecture where permission is not required to play. The commit-reveal design decision has benefits and drawbacks, but when paired with a careful implementation your game can scale without a single bottleneck or point of failure. + +Now that you know what you are building, you can get started building your game. diff --git a/docs/versioned_docs/version-v0.25/guide/05-scavenge/03-scaffolding.md b/docs/versioned_docs/version-v0.25/guide/05-scavenge/03-scaffolding.md new file mode 100644 index 0000000..dda4694 --- /dev/null +++ b/docs/versioned_docs/version-v0.25/guide/05-scavenge/03-scaffolding.md @@ -0,0 +1,48 @@ +--- +sidebar_position: 3 +--- + +# Scaffold the scavenge chain + +Scaffold a new Cosmos SDK blockchain using the `ignite scaffold chain` command. + +By default a chain is scaffolded with a new empty Cosmos SDK module. Use the `--no-module` flag to skip module scaffolding. + +```bash +ignite scaffold chain scavenge --no-module +``` + +This command creates a new `scavenge` directory with a brand new Cosmos SDK blockchain. This blockchain doesn't have any application-specific logic yet, but it imports standard Cosmos SDK modules, such as `auth`, `bank`, `mint`, and others. + +Change the current directory to `scavenge`: + +```bash +cd scavenge +``` + +Inside the project directory, you can execute other Ignite CLI commands to start a blockchain node, scaffold modules, messages, types, generate code, and much more. + +In a Cosmos SDK blockchain, implement application-specific logic in separate modules. Using modules keeps code easy to understand and reuse. + +## Scaffold the scavenge module + +Scaffold a new module called `scavenge`. Based on the game design, the `scavenge` module sends tokens between participants. + +- Implement sending tokens in the standard `bank` module. +- Use the optional `--dep` flag to specify the `bank` module. + +```bash +ignite scaffold module scavenge --dep bank +``` + +This command creates the `x/scavenge` directory and imports the scavenge module into the blockchain in the `app/app.go` directory. + +## Save changes + +Before you go to the next step, you can store your project in a git commit: + +```bash +git add . +git commit -m "scaffold scavenge chain and module" +``` + diff --git a/docs/versioned_docs/version-v0.25/guide/05-scavenge/04-messages.md b/docs/versioned_docs/version-v0.25/guide/05-scavenge/04-messages.md new file mode 100644 index 0000000..a5846c9 --- /dev/null +++ b/docs/versioned_docs/version-v0.25/guide/05-scavenge/04-messages.md @@ -0,0 +1,157 @@ +--- +sidebar_position: 4 +--- + +# Messages + +Messages are a great place to start when building a module because messages define your application actions. Think of all the scenarios where a user would be able to update the state of the application in any way. These scenarios are the basic interactions, similar to CRUD (create, read, update, and delete) operations. Messages are objects whose end-goal is to trigger state-transitions. + +For the scavenger hunt game, the scavenge module requires 3 messages: + +* Submit scavenge +* Commit solution +* Reveal solution + +## Submit scavenge message + +The submit scavenge message must contain all the information that is required to create a scavenge: + +* Description - the question to be solved or description of the challenge. +* Solution hash - the scrambled solution. +* Reward - the bounty that is awarded to whoever submits the answer first. + +Use the `ignite scaffold message` command to scaffold a new Cosmos SDK message for your module. The command accepts the message name as the first argument and a list of fields. By default, a message is scaffolded in a module with a name that matches the name of the project, in our case `scavenge`. You can use a flag to overwrite this default naming behavior. + +```bash +ignite scaffold message submit-scavenge solutionHash description reward +``` + +The command creates and modifies several files: + +``` +modify app/app.go +create proto/scavenge/genesis.proto +create proto/scavenge/params.proto +create proto/scavenge/query.proto +create proto/scavenge/tx.proto +create testutil/keeper/scavenge.go +create x/scavenge/client/cli/query.go +create x/scavenge/client/cli/query_params.go +create x/scavenge/client/cli/tx.go +create x/scavenge/genesis.go +create x/scavenge/genesis_test.go +create x/scavenge/keeper/grpc_query.go +create x/scavenge/keeper/grpc_query_params.go +create x/scavenge/keeper/grpc_query_params_test.go +create x/scavenge/keeper/keeper.go +create x/scavenge/keeper/msg_server.go +create x/scavenge/keeper/msg_server_test.go +create x/scavenge/keeper/params.go +create x/scavenge/keeper/params_test.go +create x/scavenge/module.go +create x/scavenge/module_simulation.go +create x/scavenge/simulation/simap.go +create x/scavenge/types/codec.go +create x/scavenge/types/errors.go +create x/scavenge/types/expected_keepers.go +create x/scavenge/types/genesis.go +create x/scavenge/types/genesis_test.go +create x/scavenge/types/keys.go +create x/scavenge/types/params.go +create x/scavenge/types/types.go + +🎉 Module created scavenge. +``` + +The `scaffold message` command does all of these code updates for you: + +* `proto/scavenge/tx.proto` + + * Adds `MsgSubmitScavenge` and `MsgSubmitScavengeResponse` proto messages + * Registers a `SubmitScavenge` RPC in the `Msg` service + +* `x/scavenge/types/message_submit_scavenge.go` + + * Defines methods to satisfy `Msg` interface + +* `x/scavenge/keeper/msg_server_submit_scavenge.go` + + * Defines the `SubmitScavenge` keeper method + +* `x/scavenge/client/cli/tx_submit_scavenge.go` + + * Adds CLI command to broadcast a transaction with a message + +* `x/scavenge/client/cli/tx.go` + + * Registers the CLI command + +* `x/scavenge/types/codec.go` + + * Registers the codecs + +In `x/scavenge/types/message_submit_scavenge.go`, you can notice that the message follows the `sdk.Msg` interface. The message `struct` automatically contains the information required to create a new scavenge: + +```go +func NewMsgSubmitScavenge(creator string, solutionHash string, description string, reward string) *MsgSubmitScavenge { + return &MsgSubmitScavenge{ + Creator: creator, + SolutionHash: solutionHash, + Description: description, + Reward: reward, + } +} +``` + +The `Msg` interface requires some other methods be set, like validating the content of the `struct` and confirming the message was signed and submitted by the creator. + +Now that a user can submit a scavenge, the only other essential action is to be able to solve the scavenge. As described earlier to prevent front running, use two separate actions, `MsgCommitSolution` and `MsgRevealSolution`. + +## Commit solution message + +The commit solution message requires the following fields: + +* Solution hash - the scrambled solution +* Solution scavenger hash - the hash of the combination of the solution and the person who solved it + +```bash +ignite scaffold message commit-solution solutionHash solutionScavengerHash +``` + +Because you're using the same `ignite scaffold message` command, the set of modified and created files is the same: +``` +modify proto/scavenge/tx.proto +modify x/scavenge/client/cli/tx.go +create x/scavenge/client/cli/tx_commit_solution.go +create x/scavenge/keeper/msg_server_commit_solution.go +modify x/scavenge/module_simulation.go +create x/scavenge/simulation/commit_solution.go +modify x/scavenge/types/codec.go +create x/scavenge/types/message_commit_solution.go +create x/scavenge/types/message_commit_solution_test.go + +🎉 Created a message `commit-solution`. +``` + +## Reveal solution message + +The reveal solution message requires only one field: + +* Solution - the plain text version of the solution + +```bash +ignite scaffold message reveal-solution solution +``` + +Again, because you're using the same `ignite scaffold message` command, the set of modified and created files is the same for the `reveal-solution` message. + +Information about the scavenger (the creator of the message is automatically included) and the solution hash can be deterministically derived from the solution string. + +## Save changes + +Now is a good time to store your project in a git commit: + +```bash +git add . +git commit -m "add scavenge messages" +``` diff --git a/docs/versioned_docs/version-v0.25/guide/05-scavenge/05-types.md b/docs/versioned_docs/version-v0.25/guide/05-scavenge/05-types.md new file mode 100644 index 0000000..e585f2d --- /dev/null +++ b/docs/versioned_docs/version-v0.25/guide/05-scavenge/05-types.md @@ -0,0 +1,134 @@ +--- +sidebar_position: 5 +--- + +# Types + +Now that you've defined messages that trigger state transitions, it's time to implement types and methods that operate on the state. + +A keeper is an abstraction that lets your blockchain app interact with the state. Functions like create, update, and delete are defined as keeper methods. In the scavenge blockchain, you need to define the `scavenge` and `commit` types along with create and update methods. + +Several Ignite CLI commands are available to scaffold the code for CRUD functionality for a list-like data structure, a map (key-value pairs), and a single element in the state. In this example, both `scavenge` and `commit` are stored in a map-like data structure. + +## Scavenge + +Use the `ignite scaffold map` command to scaffold the `scavenge` type and the code for creating, reading, updating, and deleting (CRUD) scavenges. + +The first argument is the name of the type to create (`scavenge`), the rest is a list of fields. By default, generic CRUD messages are scaffolded. However, since you already created messages specifically for this scavenge blockchain, use the `--no-message` flag to skip message creation. + +```bash +ignite scaffold map scavenge solutionHash solution description reward scavenger --no-message +``` + +The `ignite scaffold map` command creates and modifies several files: + +``` +modify proto/scavenge/genesis.proto +modify proto/scavenge/query.proto +create proto/scavenge/scavenge.proto +modify x/scavenge/client/cli/query.go +create x/scavenge/client/cli/query_scavenge.go +create x/scavenge/client/cli/query_scavenge_test.go +modify x/scavenge/genesis.go +modify x/scavenge/genesis_test.go +create x/scavenge/keeper/grpc_query_scavenge.go +create x/scavenge/keeper/grpc_query_scavenge_test.go +create x/scavenge/keeper/scavenge.go +create x/scavenge/keeper/scavenge_test.go +modify x/scavenge/module.go +modify x/scavenge/types/genesis.go +modify x/scavenge/types/genesis_test.go +create x/scavenge/types/key_scavenge.go + +🎉 scavenge added. +``` + +The `scaffold map` command does all of these code updates for you: + +* `proto/scavenge/scavenge.proto` + + * Defines the `Scavenge` type as a proto message + +* `proto/scavenge/query.proto` + + * Defines queries to get data from the blockchain as proto messages and registers the queries in the `Query` service + +* `proto/scavenge/genesis.proto` + + * Creates type for exporting the state of the blockchain (for example, during software upgrades) + +* `x/scavenge/keeper/grpc_query_scavenge.go` + + * Defines keeper methods to query the blockchain + +* `x/scavenge/keeper/grpc_query_scavenge_test.go` + + * Creates tests for query keeper methods + +* `x/scavenge/keeper/scavenge.go` + + * Defines keeper methods to get, set, and remove scavenges from the store + +* `x/scavenge/keeper/scavenge_test.go` + + * Creates tests for the keeper methods + +* `x/scavenge/client/cli/query_scavenge.go` + + * Creates CLI commands for querying the blockchain + +* `x/scavenge/client/cli/query.go` + + * Registers the CLI commands + +* `x/scavenge/client/cli/query_scavenge_test.go` + + * Createstests for the CLI commands + +* `x/scavenge/types/keys.go` + + * Creates a string as a prefix in the key used to store scavenges in the state + +* `x/scavenge/genesis.go` + + * Creates logic for exporting and exporting the state + +* `x/scavenge/types/genesis.go` + + * Createslogic for validating the genesis file + +* `x/scavenge/module.go` + + * Registers the gRPC gateway routes + +Review the `x/scavenge/keeper/scavenge.go` file to see the `SetScavenge` updates that were made in the `keeper` package, like the `store.Set` method that writes a Scavenge into the store: + +```go +// SetScavenge set a specific scavenge in the store from its index +func (k Keeper) SetScavenge(ctx sdk.Context, scavenge types.Scavenge) { + store := prefix.NewStore(ctx.KVStore(k.storeKey), types.KeyPrefix(types.ScavengeKeyPrefix)) + b := k.cdc.MustMarshal(&scavenge) + store.Set(types.ScavengeKey( + scavenge.Index, + ), b) +} +``` + +Review the update for `GetScavenge` that selects a store using the scavenge prefix and uses `store.Get` to fetch a scavenge with a particular index. + +## Commit + +Use `ignite scaffold map` to create the same logic for a `commit` type. + +```bash +ignite scaffold map commit solutionHash solutionScavengerHash --no-message +``` + +## Save changes + +Now is a good time to store your project in a git commit: + +```bash +git add . +git commit -m "add scavenge types" +``` diff --git a/docs/versioned_docs/version-v0.25/guide/05-scavenge/06-handlers.md b/docs/versioned_docs/version-v0.25/guide/05-scavenge/06-handlers.md new file mode 100644 index 0000000..798b941 --- /dev/null +++ b/docs/versioned_docs/version-v0.25/guide/05-scavenge/06-handlers.md @@ -0,0 +1,18 @@ +--- +sidebar_position: 6 +--- + +# Handlers + +For a message to reach a keeper, it has to go through a message server handler. A handler is where you can apply logic to allow or deny a message to succeed. + +* If you're familiar with the [Model-view-controller](https://en.wikipedia.org/wiki/Model%E2%80%93view%E2%80%93controller) (MVC) software architecture, the keeper is a bit like the model, and the handler is a bit like the controller. +* If you're familiar with [React](<https://en.wikipedia.org/wiki/React_(web_framework)>) or [Vue](https://en.wikipedia.org/wiki/Vue.js) architecture, the keeper is a bit like the reducer store and the handler is a bit like actions. + +Three message types were automatically added to the message server: + +* `MsgSubmitScavenge` +* `MsgCommitSolution` +* `MsgRevealSolution` + +Each message, when handled, calls the appropriate keeper method that is responsible for committing changes to the store. diff --git a/docs/versioned_docs/version-v0.25/guide/05-scavenge/07-keeper.md b/docs/versioned_docs/version-v0.25/guide/05-scavenge/07-keeper.md new file mode 100644 index 0000000..a6cee0d --- /dev/null +++ b/docs/versioned_docs/version-v0.25/guide/05-scavenge/07-keeper.md @@ -0,0 +1,245 @@ +--- +sidebar_position: 7 +--- + +# Keeper + +Keepers are a Cosmos SDK abstraction whose role is to manage access to the subset of the state defined by various modules. + +## Create scavenge + +Make the required changes in the `x/scavenge/keeper/msg_server_submit_scavenge.go` file so the create scavenge method can manage the following: + +* Check that a scavenge with a given solution hash doesn't exist +* Send tokens from the scavenge creator account to a module account +* Write the scavenge to the store + +```go +// x/scavenge/keeper/msg_server_submit_scavenge.go + +package keeper + +import ( + "context" + + sdk "github.com/cosmos/cosmos-sdk/types" + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + "github.com/tendermint/tendermint/crypto" + + "scavenge/x/scavenge/types" +) + +func (k msgServer) SubmitScavenge(goCtx context.Context, msg *types.MsgSubmitScavenge) (*types.MsgSubmitScavengeResponse, error) { + // get context that contains information about the environment, such as block height + ctx := sdk.UnwrapSDKContext(goCtx) + + // create a new scavenge from the data in the MsgSubmitScavenge message + var scavenge = types.Scavenge{ + Index: msg.SolutionHash, + Description: msg.Description, + SolutionHash: msg.SolutionHash, + Reward: msg.Reward, + } + + // try getting a scavenge from the store using the solution hash as the key + _, isFound := k.GetScavenge(ctx, scavenge.SolutionHash) + + // return an error if a scavenge already exists in the store + if isFound { + return nil, sdkerrors.Wrap(sdkerrors.ErrInvalidRequest, "Scavenge with that solution hash already exists") + } + + // get address of the Scavenge module account + moduleAcct := sdk.AccAddress(crypto.AddressHash([]byte(types.ModuleName))) + + // convert the message creator address from a string into sdk.AccAddress + scavenger, err := sdk.AccAddressFromBech32(msg.Creator) + if err != nil { + panic(err) + } + + // convert tokens from string into sdk.Coins + reward, err := sdk.ParseCoinsNormalized(scavenge.Reward) + if err != nil { + panic(err) + } + + // send tokens from the scavenge creator to the module account + sdkError := k.bankKeeper.SendCoins(ctx, scavenger, moduleAcct, reward) + if sdkError != nil { + return nil, sdkError + } + + // write the scavenge to the store + k.SetScavenge(ctx, scavenge) + return &types.MsgSubmitScavengeResponse{}, nil +} +``` + +Notice the use of `moduleAcct`. This account is not controlled by a public key pair, but is a reference to an account that is owned by this actual module. `moduleAcct` is used to hold the bounty reward that is attached to a scavenge until that scavenge has been solved, at which point the bounty is paid to the account who solved the scavenge. + +`SubmitScavenge` uses the `SendCoins` method from the `bank` module. When you scaffolded the scavenge module, you used `--dep bank` to specify a dependency between the `scavenge` and `bank` modules. This dependency automatically created an `expected_keepers.go` file with a `BankKeeper` interface. + +To use the `BankKeeper` interface in the keeper methods of the `scavenge` module, add `SendCoins` to the `x/scavenge/types/expected_keepers.go` file: + +```go +// x/scavenge/types/expected_keepers.go + +package types + +import ( + sdk "github.com/cosmos/cosmos-sdk/types" +) + +type BankKeeper interface { + SendCoins(ctx sdk.Context, fromAddr sdk.AccAddress, toAddr sdk.AccAddress, amt sdk.Coins) error +} +``` + +## Commit Solution + +Make the required changes in the `x/scavenge/keeper/msg_server_commit_solution.go` file so the commit solution method can manage the following: + +* Check that commit with a given hash doesn't exist in the store +* Write a new commit to the store + +```go +// x/scavenge/keeper/msg_server_commit_solution.go + +package keeper + +import ( + "context" + + sdk "github.com/cosmos/cosmos-sdk/types" + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + + "scavenge/x/scavenge/types" +) + +func (k msgServer) CommitSolution(goCtx context.Context, msg *types.MsgCommitSolution) (*types.MsgCommitSolutionResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + + // create a new commit from the information in the MsgCommitSolution message + var commit = types.Commit{ + Index: msg.SolutionScavengerHash, + SolutionHash: msg.SolutionHash, + SolutionScavengerHash: msg.SolutionScavengerHash, + } + + // try getting a commit from the store using the solution+scavenger hash as the key + _, isFound := k.GetCommit(ctx, commit.SolutionScavengerHash) + + // return an error if a commit already exists in the store + if isFound { + return nil, sdkerrors.Wrap(sdkerrors.ErrInvalidRequest, "Commit with that hash already exists") + } + + // write commit to the store + k.SetCommit(ctx, commit) + return &types.MsgCommitSolutionResponse{}, nil +} +``` + +## Reveal Solution + +Make the required changes in the `x/scavenge/keeper/msg_server_reveal_solution.go` file so the reveal solution method can manage the following: + +* Check that a commit with a given hash exists in the store +* Check that a scavenge with a given solution hash exists in the store +* Check that the scavenge hasn't already been solved +* Send tokens from the module account to the account that revealed the correct anwer +* Write the updated scavenge to the store + +```go +// x/scavenge/keeper/msg_server_reveal_solution.go + +package keeper + +import ( + "context" + "crypto/sha256" + "encoding/hex" + + sdk "github.com/cosmos/cosmos-sdk/types" + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + "github.com/tendermint/tendermint/crypto" + + "scavenge/x/scavenge/types" +) + +func (k msgServer) RevealSolution(goCtx context.Context, msg *types.MsgRevealSolution) (*types.MsgRevealSolutionResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + + // concatenate a solution and a scavenger address and convert it to bytes + var solutionScavengerBytes = []byte(msg.Solution + msg.Creator) + + // find the hash of solution and address + var solutionScavengerHash = sha256.Sum256(solutionScavengerBytes) + + // convert the hash to a string + var solutionScavengerHashString = hex.EncodeToString(solutionScavengerHash[:]) + + // try getting a commit using the hash of solution and address + _, isFound := k.GetCommit(ctx, solutionScavengerHashString) + + // return an error if a commit doesn't exist + if !isFound { + return nil, sdkerrors.Wrap(sdkerrors.ErrInvalidRequest, "Commit with that hash doesn't exists") + } + + // find a hash of the solution + var solutionHash = sha256.Sum256([]byte(msg.Solution)) + + // encode the solution hash to string + var solutionHashString = hex.EncodeToString(solutionHash[:]) + var scavenge types.Scavenge + + // get a scavenge from the stre using the solution hash + scavenge, isFound = k.GetScavenge(ctx, solutionHashString) + + // return an error if the solution doesn't exist + if !isFound { + return nil, sdkerrors.Wrap(sdkerrors.ErrInvalidRequest, "Scavenge with that solution hash doesn't exists") + } + + // check that the scavenger property contains a valid address + _, err := sdk.AccAddressFromBech32(scavenge.Scavenger) + + // return an error if a scavenge has already been solved + if err == nil { + return nil, sdkerrors.Wrap(sdkerrors.ErrInvalidRequest, "Scavenge has already been solved") + } + + // save the scavebger address to the scavenge + scavenge.Scavenger = msg.Creator + + // save the correct solution to the scavenge + scavenge.Solution = msg.Solution + + // get address of the module account + moduleAcct := sdk.AccAddress(crypto.AddressHash([]byte(types.ModuleName))) + + // convert scavenger address from string to sdk.AccAddress + scavenger, err := sdk.AccAddressFromBech32(scavenge.Scavenger) + if err != nil { + panic(err) + } + + // parse tokens from a string to sdk.Coins + reward, err := sdk.ParseCoinsNormalized(scavenge.Reward) + if err != nil { + panic(err) + } + + // send tokens from a module account to the scavenger + sdkError := k.bankKeeper.SendCoins(ctx, moduleAcct, scavenger, reward) + if sdkError != nil { + return nil, sdkError + } + + // save the updated scavenge to the store + k.SetScavenge(ctx, scavenge) + return &types.MsgRevealSolutionResponse{}, nil +} +``` diff --git a/docs/versioned_docs/version-v0.25/guide/05-scavenge/08-cli.md b/docs/versioned_docs/version-v0.25/guide/05-scavenge/08-cli.md new file mode 100644 index 0000000..8f557b1 --- /dev/null +++ b/docs/versioned_docs/version-v0.25/guide/05-scavenge/08-cli.md @@ -0,0 +1,145 @@ +--- +sidebar_position: 8 +--- + +# CLI + +A command line interface (CLI) lets you interact with your app after it is running on a machine somewhere. Each module has its own namespace within the CLI that gives it the ability to create and sign messages that are destined to be handled by that module. + +The CLI also comes with the ability to query the state of that module. When combined with the rest of the app, the CLI lets you do things like generate keys for a new account or check the status of an interaction you already had with the application. + +The CLI for the scavenge module is present in the `tx.go` and `query.go` files in the `x/scavenge/client/cli/` directory. + +- The `tx.go` file is for making transactions that contain messages that will ultimately update the state. +- The `query.go` file is for making queries let you read information from the state. + +Both files use the [Cobra](https://github.com/spf13/cobra) library. + +## The tx.go file + +The `tx.go` file contains the `GetTxCmd` standard method that is used in the Cosmos SDK. This method is referenced later in the `module.go` file that describes exactly which attributes a modules has. + +This method makes it easier to incorporate different modules for different reasons at the level of the actual application. You are focused on a module now, but later you create an application that uses this module and other modules that are already available within the Cosmos SDK. + +## Commit solution + +```go +// x/scavenge/client/cli/tx_commit_solution.go + +package cli + +import ( + "crypto/sha256" + "encoding/hex" + + "github.com/cosmos/cosmos-sdk/client" + "github.com/cosmos/cosmos-sdk/client/flags" + "github.com/cosmos/cosmos-sdk/client/tx" + "github.com/spf13/cobra" + + "scavenge/x/scavenge/types" +) + +func CmdCommitSolution() *cobra.Command { + cmd := &cobra.Command{ + // pass a solution as the only argument + Use: "commit-solution [solution]", + Short: "Broadcast message commit-solution", + // set the number of arguments to 1 + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + clientCtx, err := client.GetClientTxContext(cmd) + if err != nil { + return err + } + + solution := args[0] + + // find a hash of the solution + solutionHash := sha256.Sum256([]byte(solution)) + + // convert the solution hash to string + solutionHashString := hex.EncodeToString(solutionHash[:]) + + // convert a scavenger address to string + var scavenger = clientCtx.GetFromAddress().String() + + // find the hash of solution and scavenger address + var solutionScavengerHash = sha256.Sum256([]byte(solution + scavenger)) + + // convert the hash to string + var solutionScavengerHashString = hex.EncodeToString(solutionScavengerHash[:]) + + // create a new message + msg := types.NewMsgCommitSolution(clientCtx.GetFromAddress().String(), string(solutionHashString), string(solutionScavengerHashString)) + if err := msg.ValidateBasic(); err != nil { + return err + } + + // broadcast the transaction with the message to the blockchain + return tx.GenerateOrBroadcastTxCLI(clientCtx, cmd.Flags(), msg) + }, + } + + flags.AddTxFlagsToCmd(cmd) + + return cmd +} +``` + +Note that this file makes use of the `sha256` library for hashing the plain text solutions into the scrambled hashes. This activity takes place on the client side so the solutions are never leaked to any public entity that might want to sneak a peak and steal the bounty reward associated with the scavenges. You can also notice that the hashes are converted into hexadecimal representation to make them easy to read as strings. Hashes are ultimately stored as hexadecimal representations in the keeper. + +## Submit scavenge + +```go +// x/scavenge/client/cli/tx_submit_scavenge.go + +package cli + +import ( + "crypto/sha256" + "encoding/hex" + + "github.com/cosmos/cosmos-sdk/client" + "github.com/cosmos/cosmos-sdk/client/flags" + "github.com/cosmos/cosmos-sdk/client/tx" + "github.com/spf13/cobra" + + "scavenge/x/scavenge/types" +) + +func CmdSubmitScavenge() *cobra.Command { + cmd := &cobra.Command{ + Use: "submit-scavenge [solution] [description] [reward]", + Short: "Broadcast message submit-scavenge", + Args: cobra.ExactArgs(3), + RunE: func(cmd *cobra.Command, args []string) error { + clientCtx, err := client.GetClientTxContext(cmd) + if err != nil { + return err + } + + // find a hash of the solution + solutionHash := sha256.Sum256([]byte(args[0])) + + // convert the hash to string + solutionHashString := hex.EncodeToString(solutionHash[:]) + argsDescription := string(args[1]) + argsReward := string(args[2]) + + // create a new message + msg := types.NewMsgSubmitScavenge(clientCtx.GetFromAddress().String(), string(solutionHashString), string(argsDescription), string(argsReward)) + if err := msg.ValidateBasic(); err != nil { + return err + } + + // broadcast the transaction with the message to the blockchain + return tx.GenerateOrBroadcastTxCLI(clientCtx, cmd.Flags(), msg) + }, + } + + flags.AddTxFlagsToCmd(cmd) + + return cmd +} +``` diff --git a/docs/versioned_docs/version-v0.25/guide/05-scavenge/09-play.md b/docs/versioned_docs/version-v0.25/guide/05-scavenge/09-play.md new file mode 100644 index 0000000..c558263 --- /dev/null +++ b/docs/versioned_docs/version-v0.25/guide/05-scavenge/09-play.md @@ -0,0 +1,367 @@ +--- +sidebar_position: 9 +--- + +# Play with your blockchain + +Now that you have made the required updates to you new chain, it's time to test it. + +## Start the blockchain + +To start your blockchain in development, run the following command: + +```bash +ignite chain serve +``` + +The `serve` command builds the chain binary file, initializes a data directory, and starts a node in development. + +For all subsequent commands, you use a terminal window that is different from the window you started the chain in. + +## Create a scavenge + +Follow the instructions and submit a new scavenge. + +The first parameter is the `solution`, but you should also know what the actual question is that your solution solves (the `description`). + +You can make the challenge question something family-friendly like `What's brown and sticky?`. Of course, the only solution to this question is `A stick`. + +Next, you must specify the `reward`. Give away `100token` as a reward for solving the scavenge. + +Now you have all the pieces needed to create your message. Piece them all together in the command and add the flag `--from` so the CLI knows who is sending it: + +```bash +scavenged tx scavenge submit-scavenge "A stick" "What's brown and sticky?" 100token --from alice +``` + +The results: + +```json +{ + "body": { + "messages": [ + { + "@type": "/username.scavenge.scavenge.MsgSubmitScavenge", + "creator": "cosmos1wzgkalxjhaqtznrzzp0xy5jgkxx82xaa660jxh", + "solutionHash": "2f9457a6e8fb202f9e10389a143a383106268c460743dd59d723c0f82d9ba906", + "description": "What's brown and sticky?", + "reward": "100token" + } + ], + "memo": "", + "timeout_height": "0", + "extension_options": [], + "non_critical_extension_options": [] + }, + "auth_info": { + "signer_infos": [], + "fee": { "amount": [], "gas_limit": "200000", "payer": "", "granter": "" } + }, + "signatures": [] +} +``` + +```json +{ + "height": "229", + "txhash": "CE401E1F95FC583355BF6ABB823A4655185E2983CACE7C430E22CC7B573152DD", + "codespace": "", + "code": 0, + "data": "0A100A0E43726561746553636176656E6765", + "raw_log": "[{\"events\":[{\"type\":\"message\",\"attributes\":[{\"key\":\"action\",\"value\":\"SubmitScavenge\"},{\"key\":\"sender\",\"value\":\"cosmos1wzgkalxjhaqtznrzzp0xy5jgkxx82xaa660jxh\"}]},{\"type\":\"transfer\",\"attributes\":[{\"key\":\"recipient\",\"value\":\"cosmos13aupkh5020l9u6qquf7lvtcxhtr5jjama2kwyg\"},{\"key\":\"sender\",\"value\":\"cosmos1wzgkalxjhaqtznrzzp0xy5jgkxx82xaa660jxh\"},{\"key\":\"amount\",\"value\":\"100token\"}]}]}]", + "logs": [ + { + "msg_index": 0, + "log": "", + "events": [ + { + "type": "message", + "attributes": [ + { "key": "action", "value": "SubmitScavenge" }, + { + "key": "sender", + "value": "cosmos1wzgkalxjhaqtznrzzp0xy5jgkxx82xaa660jxh" + } + ] + }, + { + "type": "transfer", + "attributes": [ + { + "key": "recipient", + "value": "cosmos13aupkh5020l9u6qquf7lvtcxhtr5jjama2kwyg" + }, + { + "key": "sender", + "value": "cosmos1wzgkalxjhaqtznrzzp0xy5jgkxx82xaa660jxh" + }, + { "key": "amount", "value": "100token" } + ] + } + ] + } + ], + "info": "", + "gas_wanted": "200000", + "gas_used": "65320", + "tx": null, + "timestamp": "" +} +``` + +Replace the `txhash` with your `txhash` from the previous output: + +```bash +scavenged q tx CE401E1F95FC583355BF6ABB823A4655185E2983CACE7C430E22CC7B573152DD --output json +``` + +The results: + +```json +{ + "height": "229", + "txhash": "CE401E1F95FC583355BF6ABB823A4655185E2983CACE7C430E22CC7B573152DD", + "codespace": "", + "code": 0, + "data": "0A100A0E43726561746553636176656E6765", + "raw_log": "[{\"events\":[{\"type\":\"message\",\"attributes\":[{\"key\":\"action\",\"value\":\"SubmitScavenge\"},{\"key\":\"sender\",\"value\":\"cosmos1wzgkalxjhaqtznrzzp0xy5jgkxx82xaa660jxh\"}]},{\"type\":\"transfer\",\"attributes\":[{\"key\":\"recipient\",\"value\":\"cosmos13aupkh5020l9u6qquf7lvtcxhtr5jjama2kwyg\"},{\"key\":\"sender\",\"value\":\"cosmos1wzgkalxjhaqtznrzzp0xy5jgkxx82xaa660jxh\"},{\"key\":\"amount\",\"value\":\"100token\"}]}]}]", + "logs": [ + { + "msg_index": 0, + "log": "", + "events": [ + { + "type": "message", + "attributes": [ + { "key": "action", "value": "SubmitScavenge" }, + { + "key": "sender", + "value": "cosmos1wzgkalxjhaqtznrzzp0xy5jgkxx82xaa660jxh" + } + ] + }, + { + "type": "transfer", + "attributes": [ + { + "key": "recipient", + "value": "cosmos13aupkh5020l9u6qquf7lvtcxhtr5jjama2kwyg" + }, + { + "key": "sender", + "value": "cosmos1wzgkalxjhaqtznrzzp0xy5jgkxx82xaa660jxh" + }, + { "key": "amount", "value": "100token" } + ] + } + ] + } + ], + "info": "", + "gas_wanted": "200000", + "gas_used": "65320", + "tx": { + "@type": "/cosmos.tx.v1beta1.Tx", + "body": { + "messages": [ + { + "@type": "/username.scavenge.scavenge.MsgSubmitScavenge", + "creator": "cosmos1wzgkalxjhaqtznrzzp0xy5jgkxx82xaa660jxh", + "solutionHash": "2f9457a6e8fb202f9e10389a143a383106268c460743dd59d723c0f82d9ba906", + "description": "What's brown and sticky?", + "reward": "100token" + } + ], + "memo": "", + "timeout_height": "0", + "extension_options": [], + "non_critical_extension_options": [] + }, + "auth_info": { + "signer_infos": [ + { + "public_key": { + "@type": "/cosmos.crypto.secp256k1.PubKey", + "key": "ApRuim5kLByq9AqJJ9dEF5rFCkAbhIehEcPzSouM92p6" + }, + "mode_info": { "single": { "mode": "SIGN_MODE_DIRECT" } }, + "sequence": "1" + } + ], + "fee": { "amount": [], "gas_limit": "200000", "payer": "", "granter": "" } + }, + "signatures": [ + "8W5MkgV8oWpB6UWRGVKuimfPyb1OutG8KPXTIneM6WIvy4YHToG3GUXFpUrh+CxPXmlDh5gIfeR4+nFfUuQXng==" + ] + }, + "timestamp": "2021-07-09T10:24:52Z" +} +``` + +## Query for a list of scavenges + +```bash +scavenged q scavenge list-scavenge --output json +``` + +The results: + +```json +{ + "Scavenge": [ + { + "creator": "cosmos1wzgkalxjhaqtznrzzp0xy5jgkxx82xaa660jxh", + "index": "2f9457a6e8fb202f9e10389a143a383106268c460743dd59d723c0f82d9ba906", + "solutionHash": "2f9457a6e8fb202f9e10389a143a383106268c460743dd59d723c0f82d9ba906", + "solution": "", + "description": "What's brown and sticky?", + "reward": "100token", + "scavenger": "" + } + ], + "pagination": { "next_key": null, "total": "0" } +} +``` + +## Commit a solution + +Follow the instructions and submit the answer as a commit on behalf of `bob`: + +```bash +scavenged tx scavenge commit-solution "A stick" --from bob +``` + +The results: + +```json +{ + "body": { + "messages": [ + { + "@type": "/username.scavenge.scavenge.MsgCommitSolution", + "creator": "cosmos1gkheudhhjsvq0s8fxt7p6pwe0k3k30kepcnz9p", + "solutionHash": "2f9457a6e8fb202f9e10389a143a383106268c460743dd59d723c0f82d9ba906", + "solutionScavengerHash": "461d54ec0bbb1d696a79af80d7f63e4c6df262d76309423da37189453eaec127" + } + ], + "memo": "", + "timeout_height": "0", + "extension_options": [], + "non_critical_extension_options": [] + }, + "auth_info": { + "signer_infos": [], + "fee": { "amount": [], "gas_limit": "200000", "payer": "", "granter": "" } + }, + "signatures": [] +} +``` + +## Query for a list of commits + +```bash +scavenged q scavenge list-commit --output json +``` + +The results: + +```json +{ + "Commit": [ + { + "creator": "cosmos1gkheudhhjsvq0s8fxt7p6pwe0k3k30kepcnz9p", + "index": "461d54ec0bbb1d696a79af80d7f63e4c6df262d76309423da37189453eaec127", + "solutionHash": "2f9457a6e8fb202f9e10389a143a383106268c460743dd59d723c0f82d9ba906", + "solutionScavengerHash": "461d54ec0bbb1d696a79af80d7f63e4c6df262d76309423da37189453eaec127" + } + ], + "pagination": { "next_key": null, "total": "0" } +} +``` + +You don't need to put the `solutionHash` because it can be generated by hashing the actual solution. + +Since all you need is the solution again, you can send and confirm the final message: + +## Reveal a solution + +```bash +scavenged tx scavenge reveal-solution "A stick" --from bob +``` + +The results: + +```json +{ + "body": { + "messages": [ + { + "@type": "/username.scavenge.scavenge.MsgRevealSolution", + "creator": "cosmos1gkheudhhjsvq0s8fxt7p6pwe0k3k30kepcnz9p", + "solution": "A stick" + } + ], + "memo": "", + "timeout_height": "0", + "extension_options": [], + "non_critical_extension_options": [] + }, + "auth_info": { + "signer_infos": [], + "fee": { "amount": [], "gas_limit": "200000", "payer": "", "granter": "" } + }, + "signatures": [] +} +``` + +## Query for a list of solved scavenges + +```bash +scavenged q scavenge list-scavenge --output json +``` + +The results: + +```json +{ + "Scavenge": [ + { + "creator": "cosmos1wzgkalxjhaqtznrzzp0xy5jgkxx82xaa660jxh", + "index": "2f9457a6e8fb202f9e10389a143a383106268c460743dd59d723c0f82d9ba906", + "solutionHash": "2f9457a6e8fb202f9e10389a143a383106268c460743dd59d723c0f82d9ba906", + "solution": "A stick", + "description": "What's brown and sticky?", + "reward": "100token", + "scavenger": "cosmos1gkheudhhjsvq0s8fxt7p6pwe0k3k30kepcnz9p" + } + ], + "pagination": { "next_key": null, "total": "0" } +} +``` + +## Commit a solution again, expect to get an error + +```bash +scavenged tx scavenge commit-solution "A stick" --from bob +``` + +The results: + +```json +{ + "height": "665", + "txhash": "EFA43A3C08BD1D77E597D57E60CD7B4D2E8E442F49BA88C85CC9EEC86E992B75", + "codespace": "sdk", + "code": 18, + "data": "", + "raw_log": "failed to execute message; message index: 0: Commit with that hash already exists: invalid request", + "logs": [], + "info": "", + "gas_wanted": "200000", + "gas_used": "41086", + "tx": null, + "timestamp": "" +} +``` + +Congratulations, you have built and tested a scavenge hunt blockchain! diff --git a/docs/versioned_docs/version-v0.25/guide/05-scavenge/_category_.json b/docs/versioned_docs/version-v0.25/guide/05-scavenge/_category_.json new file mode 100644 index 0000000..9631490 --- /dev/null +++ b/docs/versioned_docs/version-v0.25/guide/05-scavenge/_category_.json @@ -0,0 +1,5 @@ +{ + "label": "Escrow Account: Scavenge", + "position": 5, + "link": null + } \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.25/guide/06-loan.md b/docs/versioned_docs/version-v0.25/guide/06-loan.md new file mode 100644 index 0000000..cf9b099 --- /dev/null +++ b/docs/versioned_docs/version-v0.25/guide/06-loan.md @@ -0,0 +1,922 @@ +--- +sidebar_position: 6 +description: Loan blockchain using Ignite CLI +title: "Advanced Module: DeFi Loan" +--- + +# DeFi loan module + +As a rapidly growing industry in the blockchain ecosystem, (decentralized finance) DeFi is spurring innovation and revolution in spending, sending, locking, and loaning cryptocurrency tokens. + +One of the many goals of blockchain is to make financial instruments available to everyone. A loan in blockchain DeFi can be used in combination with lending, borrowing, spot trading, margin trading, and flash loans. + +With DeFi, end users can quickly and easily access loans without having to submit their passports or background checks like in the traditional banking system. + +In this tutorial, you learn about a basic loan system as you use Ignite CLI to build a loan module. + +**You will learn how to** + +* Scaffold a blockchain +* Scaffold a Cosmos SDK loan module +* Scaffold a list for loan objects +* Create messages in the loan module to interact with the loan object +* Interact with other Cosmos SDK modules +* Use an escrow module account +* Add application messages for a loan system + * Request loan + * Approve loan + * Repay loan + * Liquidate loan + * Cancel loan + +**Note:** The code in this tutorial is written specifically for this learning experience and is intended only for educational purposes. This tutorial code is not intended to be used in production. + +## Module design + +A loan consists of: + +* An `id` +* The `amount` that is being lent +* A `fee` as cost for the loan +* The borrowing party provides a `collateral` to request a loan +* A loan has a `deadline` for repayment, after which the loan can be liquidated +* A loan has a `state` that describes the status as: + + * requested + * approved + * paid + * cancelled + * liquidated + +The two accounts involved in the loan are: + +* `borrower` +* `lender` + +### The borrower + +A borrower posts a loan request with loan information such as: + +* `amount` +* `fee` +* `collateral` +* `deadline` + +The borrower must repay the loan amount and the loan fee to the lender by the deadline risk losing the collateral. + +### The lender + +A lender can approve a loan request from a borrower. + +- After the lender approves the loan, the loan amount is transferred to the borrower. +- If the borrower is unable to pay the loan, the lender can liquidate the loan. +- Loan liquidation transfers the collateral and the fees to the lender. + +## Scaffold the blockchain + +Use Ignite CLI to scaffold a fully functional Cosmos SDK blockchain app named `loan`: + +```bash +ignite scaffold chain loan --no-module +``` + +The `--no-module` flag prevents scaffolding a default module. Don't worry, you will add the loan module later. + +Change into the newly created `loan` directory: + +```bash +cd loan +``` + +## Scaffold the module + +Scaffold the module to create a new `loan` module. Following the Cosmos SDK convention, all modules are scaffolded inside the `x` directory: + +```bash +ignite scaffold module loan --dep bank +``` + +Use the `--dep` flag to specify that this module depends on and is going to interact with the Cosmos SDK `bank` module. + +## Scaffold a list + +Use the `ignite scaffold list` command to scaffold code necessary to store loans in an array-like data structure: + +```bash +ignite scaffold list loan amount fee collateral deadline state borrower lender --no-message +``` + +Use the `--no-message` flag to disable CRUD messages in the scaffold. + +The data you store in an array-like data structure are the loans, with these parameters that are defined in the `Loan` message in `proto/loan/loan.proto`: + +```protobuf +message Loan { + uint64 id = 1; + string amount = 2; + string fee = 3; + string collateral = 4; + string deadline = 5; + string state = 6; + string borrower = 7; + string lender = 8; +} +``` + +Later, you define the messages to interact with the loan list. + +Now it is time to use messages to interact with the loan module. But first, make sure to store your current state in a git commit: + +```bash +git add . +git commit -m "Scaffold loan module and loan list" +``` + +## Scaffold the messages + +In order to create a loan app, you need the following messages: + +* Request loan +* Approve loan +* Repay loan +* Liquidate loan +* Cancel loan + +You can use the `ignite scaffold message` command to create each of the messages. + +You define the details of each message when you scaffold them. + +Create the messages one at a time with the according application logic. + +### Request loan message + +For a loan, the initial message handles the transaction when a username requests a loan. + +The username wants a certain `amount` and is willing to pay `fees` as well as give `collateral`. The `deadline` marks the time when the loan has to be repaid. + +The first message is the `request-loan` message that requires these input parameters: + +* `amount` +* `fee` +* `collateral` +* `deadline` + +```bash +ignite scaffold message request-loan amount fee collateral deadline +``` + +For the sake of simplicity, define every parameter as a string. + +The `request-loan` message creates a new loan object and locks the tokens to be spent as fee and collateral into an escrow account. Describe these conditions in the module keeper `x/loan/keeper/msg_server_request_loan.go`: + +```go +package keeper + +import ( + "context" + + sdk "github.com/cosmos/cosmos-sdk/types" + + "loan/x/loan/types" +) + +func (k msgServer) RequestLoan(goCtx context.Context, msg *types.MsgRequestLoan) (*types.MsgRequestLoanResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + + // Create a new Loan with the following user input + var loan = types.Loan{ + Amount: msg.Amount, + Fee: msg.Fee, + Collateral: msg.Collateral, + Deadline: msg.Deadline, + State: "requested", + Borrower: msg.Creator, + } + + // TODO: collateral has to be more than the amount (+fee?) + + // moduleAcc := sdk.AccAddress(crypto.AddressHash([]byte(types.ModuleName))) + // Get the borrower address + borrower, _ := sdk.AccAddressFromBech32(msg.Creator) + + // Get the collateral as sdk.Coins + collateral, err := sdk.ParseCoinsNormalized(loan.Collateral) + if err != nil { + panic(err) + } + + // Use the module account as escrow account + sdkError := k.bankKeeper.SendCoinsFromAccountToModule(ctx, borrower, types.ModuleName, collateral) + if sdkError != nil { + return nil, sdkError + } + + // Add the loan to the keeper + k.AppendLoan( + ctx, + loan, + ) + + return &types.MsgRequestLoanResponse{}, nil +} +``` + +Since this function is using the `bankKeeper` with the function `SendCoinsFromAccountToModule`, you must add the `SendCoinsFromAccountToModule` function to `x/loan/types/expected_keepers.go` like this: + +```go +package types + +import sdk "github.com/cosmos/cosmos-sdk/types" + +type BankKeeper interface { + SendCoinsFromAccountToModule(ctx sdk.Context, senderAddr sdk.AccAddress, recipientModule string, amt sdk.Coins) error +} +``` + +### Validate the input + +When a loan is created, a certain message input validation is required. You want to throw error messages in case the end user tries impossible inputs. + +You can describe message validation errors in the modules `types` directory. + +Add the following code to the `ValidateBasic()` function in the `x/loan/types/message_request_loan.go` file: + +```go +func (msg *MsgRequestLoan) ValidateBasic() error { + if _, err := sdk.AccAddressFromBech32(msg.Creator); err != nil { + return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid creator address (%s)", err) + } + + amount, _ := sdk.ParseCoinsNormalized(msg.Amount) + if !amount.IsValid() { + return sdkerrors.Wrap(sdkerrors.ErrInvalidRequest, "amount is not a valid Coins object") + } + if amount.Empty() { + return sdkerrors.Wrap(sdkerrors.ErrInvalidRequest, "amount is empty") + } + + fee, _ := sdk.ParseCoinsNormalized(msg.Fee) + if !fee.IsValid() { + return sdkerrors.Wrap(sdkerrors.ErrInvalidRequest, "fee is not a valid Coins object") + } + + collateral, _ := sdk.ParseCoinsNormalized(msg.Collateral) + if !collateral.IsValid() { + return sdkerrors.Wrap(sdkerrors.ErrInvalidRequest, "collateral is not a valid Coins object") + } + if collateral.Empty() { + return sdkerrors.Wrap(sdkerrors.ErrInvalidRequest, "collateral is empty") + } + + return nil +} +``` + +Congratulations, you have created the `request-loan` message. + +## Run and test your first message + +You can run the chain and test your first message. + +Start the blockchain: + +```bash +ignite chain serve +``` + +Add your first loan: + +```bash +loand tx loan request-loan 100token 2token 200token 500 --from alice +``` + +Query your loan: + +```bash +loand query loan list-loan +``` + +The loan shows in the list: + +```bash +Loan: +- amount: 100token + borrower: cosmos17mnrhwchwc8trg4w09s0gvvfsvt58ejtsykkm6 + collateral: 200token + deadline: "500" + fee: 2token + id: "0" + lender: "" + state: requested +``` + +You can stop the blockchain again with CTRL+C. + +### Save iterative changes + +This is a good time to add your advancements to git: + +```bash +git add . +git commit -m "Add request-loan message" +``` + +### Approve loan message + +After a loan request has been published, another account can approve the loan and agree to the terms of the borrower. + +The message `approve-loan` has one parameter, the `id`. +Specify the type of `id` as `uint`. By default, ids are stored as `uint`. + +```bash +ignite scaffold message approve-loan id:uint +``` + +This message must be available for all loan types that are in `"requested"` status. + +The loan approval sends the requested coins for the loan to the borrower and sets the loan state to `"approved"`. + +Modify the `x/loan/keeper/msg_server_approve_loan.go` to implement this logic: + +```go +package keeper + +import ( + "context" + "fmt" + + sdk "github.com/cosmos/cosmos-sdk/types" + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + + "loan/x/loan/types" +) + +func (k msgServer) ApproveLoan(goCtx context.Context, msg *types.MsgApproveLoan) (*types.MsgApproveLoanResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + + loan, found := k.GetLoan(ctx, msg.Id) + if !found { + return nil, sdkerrors.Wrapf(sdkerrors.ErrKeyNotFound, "key %d doesn't exist", msg.Id) + } + + // TODO: for some reason the error doesn't get printed to the terminal + if loan.State != "requested" { + return nil, sdkerrors.Wrapf(types.ErrWrongLoanState, "%v", loan.State) + } + + lender, _ := sdk.AccAddressFromBech32(msg.Creator) + borrower, _ := sdk.AccAddressFromBech32(loan.Borrower) + amount, err := sdk.ParseCoinsNormalized(loan.Amount) + if err != nil { + return nil, sdkerrors.Wrap(types.ErrWrongLoanState, "Cannot parse coins in loan amount") + } + + k.bankKeeper.SendCoins(ctx, lender, borrower, amount) + + loan.Lender = msg.Creator + loan.State = "approved" + + k.SetLoan(ctx, loan) + + return &types.MsgApproveLoanResponse{}, nil +} +``` + +This module uses the `SendCoins` function of `bankKeeper`. Add this `SendCoins` function to the `x/loan/types/expected_keepers.go` file: + +```go +package types + +import ( + sdk "github.com/cosmos/cosmos-sdk/types" +) + +type BankKeeper interface { + // Methods imported from bank should be defined here + SendCoins(ctx sdk.Context, fromAddr sdk.AccAddress, toAddr sdk.AccAddress, amt sdk.Coins) error + SendCoinsFromAccountToModule(ctx sdk.Context, senderAddr sdk.AccAddress, recipientModule string, amt sdk.Coins) error +} +``` + +Now, define the `ErrWrongLoanState` new error type by adding it to the errors definitions in `x/loan/types/errors.go`: + +```go +package types + +// DONTCOVER + +import ( + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" +) + +// x/loan module sentinel errors +var ( + ErrWrongLoanState = sdkerrors.Register(ModuleName, 2, "wrong loan state") +) +``` + +Start the blockchain and use the two commands you already have available: + +```bash +ignite chain serve -r +``` + +Use the `-r` flag to reset the blockchain state and start with a new database. + +Now, request a loan from `bob`: + +```bash +loand tx loan request-loan 100token 2token 200token 500 --from bob -y +``` + +Query your loan request: + +```bash +loand query loan list-loan +``` + +Approve the loan: + +```bash +loand tx loan approve-loan 0 --from alice -y +``` + +This approve loan transaction sends the balances according to the loan request. + +Check for the loan list again to verify that the loan state is now `approved`. + +```bash +Loan: +- amount: 100token + borrower: cosmos1sx8k358xw5pulv7acjhm6klvn3tukk2r2a74gg + collateral: 200token + deadline: "500" + fee: 2token + id: "0" + lender: cosmos1qxm2dtupmr8pp20m0t7tmjq6gq2z8j3d6ltr9d + state: approved +pagination: + next_key: null + total: "0" +``` + +You can query for alice's balance to see the loan in effect. Take the lender address from above, this is alice address: + +```bash +loand query bank balances <alice_address> +``` + +In case everything works as expected, this is a good time to save the state with a git commit: + +```bash +git add . +git commit -m "Add approve loan message" +``` + +### Repay Loan Message + +After the loan has been approved, the username must be able to repay an approved loan. + +Scaffold the message `repay-loan` that a borrower uses to return tokens that were borrowed from the lender: + +```bash +ignite scaffold message repay-loan id:uint +``` + +Repaying a loan requires that the loan is in `"approved"` status. + +The coins as described in the loan are collected and sent from the borrower to the lender, along with the agreed fees. + +The `collateral` is released from the escrow module account. + +Only the `borrower` can repay the loan. + +This loan repayment logic is defined in `x/loan/keeper/msg_server_repay_loan.go`: + +```go +package keeper + +import ( + "context" + "fmt" + + sdk "github.com/cosmos/cosmos-sdk/types" + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + + "loan/x/loan/types" +) + +func (k msgServer) RepayLoan(goCtx context.Context, msg *types.MsgRepayLoan) (*types.MsgRepayLoanResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + + loan, found := k.GetLoan(ctx, msg.Id) + if !found { + return nil, sdkerrors.Wrap(sdkerrors.ErrKeyNotFound, fmt.Sprintf("key %d doesn't exist", msg.Id)) + } + + if loan.State != "approved" { + return nil, sdkerrors.Wrapf(types.ErrWrongLoanState, "%v", loan.State) + } + + lender, _ := sdk.AccAddressFromBech32(loan.Lender) + borrower, _ := sdk.AccAddressFromBech32(loan.Borrower) + + if msg.Creator != loan.Borrower { + return nil, sdkerrors.Wrap(sdkerrors.ErrUnauthorized, "Cannot repay: not the borrower") + } + + amount, _ := sdk.ParseCoinsNormalized(loan.Amount) + fee, _ := sdk.ParseCoinsNormalized(loan.Fee) + collateral, _ := sdk.ParseCoinsNormalized(loan.Collateral) + + err := k.bankKeeper.SendCoins(ctx, borrower, lender, amount) + if err != nil { + return nil, sdkerrors.Wrap(types.ErrWrongLoanState, "Cannot send coins") + } + err = k.bankKeeper.SendCoins(ctx, borrower, lender, fee) + if err != nil { + return nil, sdkerrors.Wrap(types.ErrWrongLoanState, "Cannot send coins") + } + err = k.bankKeeper.SendCoinsFromModuleToAccount(ctx, types.ModuleName, borrower, collateral) + if err != nil { + return nil, sdkerrors.Wrap(types.ErrWrongLoanState, "Cannot send coins") + } + + loan.State = "repayed" + + k.SetLoan(ctx, loan) + + return &types.MsgRepayLoanResponse{}, nil +} +``` + +After the coins have been successfully exchanged, the state of the loan is set to `repayed`. + +To release tokens with the `SendCoinsFromModuleToAccount` function of `bankKeepers`, you need to add the `SendCoinsFromModuleToAccount` function to the `x/loan/types/expected_keepers.go`: + +```go +package types + +import ( + sdk "github.com/cosmos/cosmos-sdk/types" +) + +type BankKeeper interface { + // Methods imported from bank should be defined here + SendCoins(ctx sdk.Context, fromAddr sdk.AccAddress, toAddr sdk.AccAddress, amt sdk.Coins) error + SendCoinsFromAccountToModule(ctx sdk.Context, senderAddr sdk.AccAddress, recipientModule string, amt sdk.Coins) error + SendCoinsFromModuleToAccount(ctx sdk.Context, senderModule string, recipientAddr sdk.AccAddress, amt sdk.Coins) error +} +``` + +Start the blockchain and use the two commands you already have available: + +```bash +ignite chain serve -r +``` + +Use the `-r` flag to reset the blockchain state and start with a new database: + +```bash +loand tx loan request-loan 100token 2token 200token 500 --from bob -y +``` + +Query your loan request: + +```bash +loand query loan list-loan +``` + +Approve the loan: + +```bash +loand tx loan approve-loan 0 --from alice -y +``` + +You can query for alice's balance to see the loan in effect. + +Take the lender address from above, this is alice address: + +```bash +loand query bank balances <alice_address> +``` + +Now repay the loan: + +```bash +loand tx loan repay-loan 0 --from bob -y +``` + +The loan status is now `repayed`: + +```bash +Loan: +- amount: 100token + borrower: cosmos1200nsqsxcyxtllfgal5x8qhqwj8km64ft0eu2d + collateral: 200token + deadline: "500" + fee: 2token + id: "0" + lender: cosmos194pn6vly2nlald3zjqcxfnvasa0xt7ect6h6qk + state: repayed +``` + +The alice balance reflects the repayed amount plus fees: + +```bash +loand query bank balances <alice_address> +``` + +Good job! + +Update your git with the changes you made: + +```bash +git add . +git commit -m "Add repay-loan message" +``` + +### Liquidate Loan Message + +After the deadline is passed, a lender can liquidate a loan when the borrower does not repay the tokens. The message to `liquidate-loan` refers to the loan `id`: + +```bash +ignite scaffold message liquidate-loan id:uint +``` + +* The `liquidate-loan` message must be able to be executed by the `lender`. +* The status of the loan must be `approved`. +* The `deadline` block height must have passed. + +When these properties are valid, the collateral shall be liquidated from the `borrower`. + +Add this liquidate loan logic to the `keeper` in `x/loan/keeper/msg_server_liquidate_loan.go`: + +```go +package keeper + +import ( + "context" + "fmt" + "strconv" + + sdk "github.com/cosmos/cosmos-sdk/types" + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + + "loan/x/loan/types" +) + +func (k msgServer) LiquidateLoan(goCtx context.Context, msg *types.MsgLiquidateLoan) (*types.MsgLiquidateLoanResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + + loan, found := k.GetLoan(ctx, msg.Id) + if !found { + return nil, sdkerrors.Wrap(sdkerrors.ErrKeyNotFound, fmt.Sprintf("key %d doesn't exist", msg.Id)) + } + + if loan.Lender != msg.Creator { + return nil, sdkerrors.Wrap(sdkerrors.ErrUnauthorized, "Cannot liquidate: not the lender") + } + + if loan.State != "approved" { + return nil, sdkerrors.Wrapf(types.ErrWrongLoanState, "%v", loan.State) + } + + lender, _ := sdk.AccAddressFromBech32(loan.Lender) + collateral, _ := sdk.ParseCoinsNormalized(loan.Collateral) + + deadline, err := strconv.ParseInt(loan.Deadline, 10, 64) + if err != nil { + panic(err) + } + + if ctx.BlockHeight() < deadline { + return nil, sdkerrors.Wrap(types.ErrDeadline, "Cannot liquidate before deadline") + } + + k.bankKeeper.SendCoinsFromModuleToAccount(ctx, types.ModuleName, lender, collateral) + + loan.State = "liquidated" + + k.SetLoan(ctx, loan) + + return &types.MsgLiquidateLoanResponse{}, nil +} +``` + +Add the new error `ErrDeadline` to the error messages in `x/loan/types/errors.go`: + +```go +package types + +// DONTCOVER + +import ( + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" +) + +// x/loan module sentinel errors +var ( + ErrWrongLoanState = sdkerrors.Register(ModuleName, 2, "wrong loan state") + ErrDeadline = sdkerrors.Register(ModuleName, 3, "deadline") +) +``` + +These changes are required for the `liquidate-loan` message. + +### Test liquidation message + +You can test the liquidation message now. Start your chain and reset the state of the app: + +```bash +ignite chain serve -r +``` + +Set the deadline for the loan request to 1 block: + +```bash +loand tx loan request-loan 100token 2token 200token 1 --from bob -y +``` + +Query your loan request: + +```bash +loand query loan list-loan +``` + +Approve the loan: + +```bash +loand tx loan approve-loan 0 --from alice -y +``` + +You can query for alice's balances to see the loan in effect. + +Take the lender address from above, this is alice address. + +```bash +loand query bank balances <alice_address> +``` + +Now, liquidate the loan: + +```bash +loand tx loan liquidate-loan 0 --from alice -y +``` + +Query the loan: + +```bash +loand query loan list-loan +``` + +The loan status is now `liquidated`: + +```bash +Loan: +- amount: 100token + borrower: cosmos1lp4ghp4mmsdgpf2fm22f0qtqmnjeh3gr9h3cau + collateral: 200token + deadline: "1" + fee: 2token + id: "0" + lender: cosmos1w6pfj52jp809pyp2a2h573cta23rc0zsulpafm + state: liquidated +``` + +And alice balance reflects the repayed amount plus fees: + +```bash +loand query bank balances <alice_address> +``` + +Add the changes to your local repository: + +```bash +git add . +git commit -m "Add liquidate-loan message" +``` + +### Cancel loan message + +After a loan request has been made and not been approved, the `borrower` must be able to cancel a loan request. + +Scaffold the message for `cancel-loan`: + +```bash +ignite s message cancel-loan id:uint +``` + +* Only the `borrower` can cancel a loan request. +* The state of the request must be `requested`. +* Then the collateral coins can be released from escrow and the status set to `cancelled`. + +Add this functionality to the `keeper` in `x/loan/keeper/msg_server_cancel_loan.go`: + +```go +package keeper + +import ( + "context" + "fmt" + + sdk "github.com/cosmos/cosmos-sdk/types" + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + + "loan/x/loan/types" +) + +func (k msgServer) CancelLoan(goCtx context.Context, msg *types.MsgCancelLoan) (*types.MsgCancelLoanResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + + loan, found := k.GetLoan(ctx, msg.Id) + if !found { + return nil, sdkerrors.Wrap(sdkerrors.ErrKeyNotFound, fmt.Sprintf("key %d doesn't exist", msg.Id)) + } + + if loan.Borrower != msg.Creator { + return nil, sdkerrors.Wrap(sdkerrors.ErrUnauthorized, "Cannot cancel: not the borrower") + } + + if loan.State != "requested" { + return nil, sdkerrors.Wrapf(types.ErrWrongLoanState, "%v", loan.State) + } + + borrower, _ := sdk.AccAddressFromBech32(loan.Borrower) + collateral, _ := sdk.ParseCoinsNormalized(loan.Collateral) + k.bankKeeper.SendCoinsFromModuleToAccount(ctx, types.ModuleName, borrower, collateral) + + loan.State = "cancelled" + + k.SetLoan(ctx, loan) + + return &types.MsgCancelLoanResponse{}, nil +} +``` + +### Test cancelling a loan + +Test the changes for cancelling a loan request: + +```bash +ignite chain serve -r +``` + +```bash +loand tx loan request-loan 100token 2token 200token 100 --from bob -y +``` + +Query your loan request: + +```bash +loand query loan list-loan +``` + +```bash +loand tx loan cancel-loan 0 --from bob -y +``` + +Query your loan request: + +```bash +loand query loan list-loan +``` + +Now the collateral coins can be released from escrow and the status set to `cancelled`. + +```bash +- amount: 100token + borrower: cosmos1lp4ghp4mmsdgpf2fm22f0qtqmnjeh3gr9h3cau + collateral: 200token + deadline: "100" + fee: 2token + id: "2" + lender: "" + state: cancelled +``` + +Consider again updating your local repository with a git commit. After you test and use your loan module, consider publishing your code to a public repository for others to see your accomplishments. + +```bash +git add . +git commit -m "Add cancel-loan message" +``` + +## Complete + +Congratulations. You have completed the loan module tutorial. + +You executed commands and updated files to: + +* Scaffold a blockchain +* Scaffold a module +* Scaffold a list for loan objects +* Create messages in your module to interact with the loan object +* Interact with other modules in your module +* Use an escrow module account +* Add application messages for a loan system + * Request Loan + * Approve Loan + * Repay Loan + * Liquidate Loan + * Cancel Loan diff --git a/docs/versioned_docs/version-v0.25/guide/07-ibc.md b/docs/versioned_docs/version-v0.25/guide/07-ibc.md new file mode 100644 index 0000000..65e6bf5 --- /dev/null +++ b/docs/versioned_docs/version-v0.25/guide/07-ibc.md @@ -0,0 +1,569 @@ +--- +sidebar_position: 7 +description: Build an understanding of how to create and send packets across blockchains and navigate between blockchains. +title: "Inter-Blockchain Communication: Basics" +--- + +# Inter-Blockchain Communication: Basics + +The Inter-Blockchain Communication protocol (IBC) is an important part of the Cosmos SDK ecosystem. The Hello World tutorial is a time-honored tradition in computer programming. This tutorial builds an understanding of how to create and send packets across blockchain. This foundational knowledge helps you navigate between blockchains with the Cosmos SDK. + +**You will learn how to** + +- Use IBC to create and send packets between blockchains. +- Navigate between blockchains using the Cosmos SDK and the Ignite CLI Relayer. +- Create a basic blog post and save the post on another blockchain. + +## What is IBC? + +The Inter-Blockchain Communication protocol (IBC) allows blockchains to talk to each other. IBC handles transport across different sovereign blockchains. This end-to-end, connection-oriented, stateful protocol provides reliable, ordered, and authenticated communication between heterogeneous blockchains. + +The [IBC protocol in the Cosmos SDK](https://ibc.cosmos.network/main/ibc/overview) is the standard for the interaction between two blockchains. The IBCmodule interface defines how packets and messages are constructed to be interpreted by the sending and the receiving blockchain. + +The IBC relayer lets you connect between sets of IBC-enabled chains. This tutorial teaches you how to create two blockchains and then start and use the relayer with Ignite CLI to connect two blockchains. + +This tutorial covers essentials like modules, IBC packets, relayer, and the lifecycle of packets routed through IBC. + +## Create a blockchain + +Create a blockchain app with a blog module to write posts on other blockchains that contain the Hello World message. For this tutorial, you can write posts for the Cosmos SDK universe that contain Hello Mars, Hello Cosmos, and Hello Earth messages. + +For this simple example, create an app that contains a blog module that has a post transaction with title and text. + +After you define the logic, run two blockchains that have this module installed. + +- The chains can send posts between each other using IBC. + +- On the sending chain, save the `acknowledged` and `timed out` posts. + +After the transaction is acknowledged by the receiving chain, you know that the post is saved on both blockchains. + +- The sending chain has the additional data `postID`. + +- Sent posts that are acknowledged and timed out contain the title and the target chain of the post. These identifiers are visible on the parameter `chain`. The following chart shows the lifecycle of a packet that travels through IBC. + +![The Lifecycle of an IBC packet in the Blog Module](./images/packet_sendpost.png) + +## Build your blockchain app + +Use Ignite CLI to scaffold the blockchain app and the blog module. + +### Build a new blockchain + +To scaffold a new blockchain named `planet`: + +```bash +ignite scaffold chain planet --no-module +cd planet +``` + +A new directory named `planet` is created in your home directory. The `planet` directory contains a working blockchain app. + +### Scaffold the blog module inside your blockchain + +Next, use Ignite CLI to scaffold a blog module with IBC capabilities. The blog module contains the logic for creating blog posts and routing them through IBC to the second blockchain. + +To scaffold a module named `blog`: + +```bash +ignite scaffold module blog --ibc +``` + +A new directory with the code for an IBC module is created in `planet/x/blog`. Modules scaffolded with the `--ibc` flag include all the logic for the scaffolded IBC module. + +### Generate CRUD actions for types + +Next, create the CRUD actions for the blog module types. + +Use the `ignite scaffold list` command to scaffold the boilerplate code for the create, read, update, and delete (CRUD) actions. + +These `ignite scaffold list` commands create CRUD code for the following transactions: + +- Creating blog posts + + ```bash + ignite scaffold list post title content creator --no-message --module blog + ``` + +- Processing acknowledgments for sent posts + + ```bash + ignite scaffold list sentPost postID title chain creator --no-message --module blog + ``` + +- Managing post timeouts + + ```bash + ignite scaffold list timedoutPost title chain creator --no-message --module blog + ``` + +The scaffolded code includes proto files for defining data structures, messages, messages handlers, keepers for modifying the state, and CLI commands. + +### Ignite CLI Scaffold List Command Overview + +``` +ignite scaffold list [typeName] [field1] [field2] ... [flags] +``` + +The first argument of the `ignite scaffold list [typeName]` command specifies the name of the type being created. For the blog app, you created `post`, `sentPost`, and `timedoutPost` types. + +The next arguments define the fields that are associated with the type. For the blog app, you created `title`, `content`, `postID`, and `chain` fields. + +The `--module` flag defines which module the new transaction type is added to. This optional flag lets you manage multiple modules within your Ignite CLI app. When the flag is not present, the type is scaffolded in the module that matches the name of the repo. + +When a new type is scaffolded, the default behavior is to scaffold messages that can be sent by users for CRUD operations. The `--no-message` flag disables this feature. Disable the messages option for the app since you want the posts to be created upon reception of IBC packets and not directly created from a user's messages. + +### Scaffold a sendable and interpretable IBC packet + +You must generate code for a packet that contains the title and the content of the blog post. + +The `ignite packet` command creates the logic for an IBC packet that can be sent to another blockchain. + +- The `title` and `content` are stored on the target chain. + +- The `postID` is acknowledged on the sending chain. + +To scaffold a sendable and interpretable IBC packet: + +```bash +ignite scaffold packet ibcPost title content --ack postID --module blog +``` + +Notice the fields in the `ibcPost` packet match the fields in the `post` type that you created earlier. + +- The `--ack` flag defines which identifier is returned to the sending blockchain. + +- The `--module` flag specifies to create the packet in a particular IBC module. + +The `ignite packet` command also scaffolds the CLI command that is capable of sending an IBC packet: + +```bash +planetd tx blog send-ibcPost [portID] [channelID] [title] [content] +``` + +## Modify the source code + +After you create the types and transactions, you must manually insert the logic to manage updates in the database. Modify the source code to save the data as specified earlier in this tutorial. + +### Add creator to the blog post packet + +Start with the proto file that defines the structure of the IBC packet. + +To identify the creator of the post in the receiving blockchain, add the `creator` field inside the packet. This field was not specified directly in the command because it would automatically become a parameter in the `SendIbcPost` CLI command. + +```protobuf +// proto/blog/packet.proto +message IbcPostPacketData { + string title = 1; + string content = 2; + string creator = 3; // < --- +} +``` + +To make sure the receiving chain has content on the creator of a blog post, add the `msg.Creator` value to the IBC `packet`. + +- The content of the `sender` of the message is automatically included in `SendIbcPost` message. +- The sender is verified as the signer of the message, so you can add the `msg.Sender` as the creator to the new packet before it is sent over IBC. + +```go + // x/blog/keeper/msg_server_ibc_post.go + + // Construct the packet + var packet types.IbcPostPacketData + packet.Title = msg.Title + packet.Content = msg.Content + packet.Creator = msg.Creator // < --- + + // Transmit the packet + err := k.TransmitIbcPostPacket( + ctx, + packet, + msg.Port, + msg.ChannelID, + clienttypes.ZeroHeight(), + msg.TimeoutTimestamp, + ) +``` + +### Receive the post + +The methods for primary transaction logic are in the `x/blog/keeper/ibc_post.go` file. Use these methods to manage IBC packets: + +- `TransmitIbcPostPacket` is called manually to send the packet over IBC. This method also defines the logic before the packet is sent over IBC to another blockchain app. +- `OnRecvIbcPostPacket` hook is automatically called when a packet is received on the chain. This method defines the packet reception logic. +- `OnAcknowledgementIbcPostPacket` hook is called when a sent packet is acknowledged on the source chain. This method defines the logic when the packet has been received. +- `OnTimeoutIbcPostPacket` hook is called when a sent packet times out. This method defines the logic when the packet is not received on the target chain + +You must modify the source code to add the logic inside those functions so that the data tables are modified accordingly. + +On reception of the post message, create a new post with the title and the content on the receiving chain. + +To identify the blockchain app that a message is originating from and who created the message, use an identifier in the following format: + +`<portID>-<channelID>-<creatorAddress>` + +Finally, the Ignite CLI-generated AppendPost function returns the ID of the new appended post. You can return this value to the source chain through acknowledgment. + +Append the type instance as `PostID` on receiving the packet: + +- The context `ctx` is an [immutable data structure](https://docs.cosmos.network/main/core/context#go-context-package) that has header data from the transaction. See [how the context is initiated](https://github.com/cosmos/cosmos-sdk/blob/master/types/context.go#L71) +- The identifier format that you defined earlier +- The `title` is the Title of the blog post +- The `content` is the Content of the blog post + +In the `x/blog/keeper/ibc_post.go` file, make sure to import `"strconv"` below `"errors"`: + +```go +// x/blog/keeper/ibc_post.go +import ( + //... + + "strconv" + + //... +) +``` + +Then modify the `OnRecvIbcPostPacket` keeper function with the following code: + +```go +func (k Keeper) OnRecvIbcPostPacket(ctx sdk.Context, packet channeltypes.Packet, data types.IbcPostPacketData) (packetAck types.IbcPostPacketAck, err error) { + // validate packet data upon receiving + if err := data.ValidateBasic(); err != nil { + return packetAck, err + } + + id := k.AppendPost( + ctx, + types.Post{ + Creator: packet.SourcePort + "-" + packet.SourceChannel + "-" + data.Creator, + Title: data.Title, + Content: data.Content, + }, + ) + + packetAck.PostID = strconv.FormatUint(id, 10) + + return packetAck, nil +} +``` + +### Receive the post acknowledgement + +On the sending blockchain, store a `sentPost` so you know that the post has been received on the target chain. + +Store the title and the target to identify the post. + +When a packet is scaffolded, the default type for the received acknowledgment data is a type that identifies if the packet treatment has failed. The `Acknowledgement_Error` type is set if `OnRecvIbcPostPacket` returns an error from the packet. + +```go +// x/blog/keeper/ibc_post.go +func (k Keeper) OnAcknowledgementIbcPostPacket(ctx sdk.Context, packet channeltypes.Packet, data types.IbcPostPacketData, ack channeltypes.Acknowledgement) error { + switch dispatchedAck := ack.Response.(type) { + case *channeltypes.Acknowledgement_Error: + // We will not treat acknowledgment error in this tutorial + return nil + case *channeltypes.Acknowledgement_Result: + // Decode the packet acknowledgment + var packetAck types.IbcPostPacketAck + + if err := types.ModuleCdc.UnmarshalJSON(dispatchedAck.Result, &packetAck); err != nil { + // The counter-party module doesn't implement the correct acknowledgment format + return errors.New("cannot unmarshal acknowledgment") + } + + k.AppendSentPost( + ctx, + types.SentPost{ + Creator: data.Creator, + PostID: packetAck.PostID, + Title: data.Title, + Chain: packet.DestinationPort + "-" + packet.DestinationChannel, + }, + ) + + return nil + default: + return errors.New("the counter-party module does not implement the correct acknowledgment format") + } +} +``` + +### Store information about the timed-out packet + +Store posts that have not been received by target chains in `timedoutPost` posts. This logic follows the same format as `sentPost`. + +```go +// x/blog/keeper/ibc_post.go +func (k Keeper) OnTimeoutIbcPostPacket(ctx sdk.Context, packet channeltypes.Packet, data types.IbcPostPacketData) error { + k.AppendTimedoutPost( + ctx, + types.TimedoutPost{ + Creator: data.Creator, + Title: data.Title, + Chain: packet.DestinationPort + "-" + packet.DestinationChannel, + }, + ) + + return nil +} + +``` + +This last step completes the basic `blog` module setup. The blockchain is now ready! + +## Use the IBC modules + +You can now spin up the blockchain and send a blog post from one blockchain app to the other. Multiple terminal windows are required to complete these next steps. + +### Test the IBC modules + +To test the IBC module, start two blockchain networks on the same machine. Both blockchains use the same source code. Each blockchain has a unique chain ID. + +One blockchain is named `earth` and the other blockchain is named `mars`. + +The `earth.yml` and `mars.yml` files are required in the project directory: + +```yaml +# earth.yml +accounts: + - name: alice + coins: ["1000token", "100000000stake"] + - name: bob + coins: ["500token", "100000000stake"] +validator: + name: alice + staked: "100000000stake" +faucet: + name: bob + coins: ["5token", "100000stake"] +genesis: + chain_id: "earth" +init: + home: "$HOME/.earth" +``` + +```yaml +# mars.yml +accounts: + - name: alice + coins: ["1000token", "1000000000stake"] + - name: bob + coins: ["500token", "100000000stake"] +validator: + name: alice + staked: "100000000stake" +faucet: + host: ":4501" + name: bob + coins: ["5token", "100000stake"] +host: + rpc: ":26659" + p2p: ":26658" + prof: ":6061" + grpc: ":9092" + grpc-web: ":9093" + api: ":1318" +genesis: + chain_id: "mars" +init: + home: "$HOME/.mars" +``` + +Open a terminal window and run the following command to start the `earth` blockchain: + +```bash +ignite chain serve -c earth.yml +``` + +Open a different terminal window and run the following command to start the `mars` blockchain: + +```bash +ignite chain serve -c mars.yml +``` + +### Remove Existing Relayer and Ignite CLI Configurations + +If you previously used the relayer, follow these steps to remove exiting relayer and Ignite CLI configurations: + +- Stop your blockchains and delete previous configuration files: + + ```bash + rm -rf ~/.ignite/relayer + ``` + +If existing relayer configurations do not exist, the command returns `no matches found` and no action is taken. + +### Configure and start the relayer + +First, configure the relayer. Use the Ignite CLI `configure` command with the `--advanced` option: + +```bash +ignite relayer configure -a \ + --source-rpc "http://0.0.0.0:26657" \ + --source-faucet "http://0.0.0.0:4500" \ + --source-port "blog" \ + --source-version "blog-1" \ + --source-gasprice "0.0000025stake" \ + --source-prefix "cosmos" \ + --source-gaslimit 300000 \ + --target-rpc "http://0.0.0.0:26659" \ + --target-faucet "http://0.0.0.0:4501" \ + --target-port "blog" \ + --target-version "blog-1" \ + --target-gasprice "0.0000025stake" \ + --target-prefix "cosmos" \ + --target-gaslimit 300000 +``` + +When prompted, press Enter to accept the default values for `Source Account` and `Target Account`. + +The output looks like: + +``` +--------------------------------------------- +Setting up chains +--------------------------------------------- + +🔐 Account on "source" is "cosmos1xcxgzq75yrxzd0tu2kwmwajv7j550dkj7m00za" + + |· received coins from a faucet + |· (balance: 100000stake,5token) + +🔐 Account on "target" is "cosmos1nxg8e4mfp5v7sea6ez23a65rvy0j59kayqr8cx" + + |· received coins from a faucet + |· (balance: 100000stake,5token) + +⛓ Configured chains: earth-mars +``` + +In a new terminal window, start the relayer process: + +```bash +ignite relayer connect +``` + +Results: + +``` +------ +Paths +------ + +earth-mars: + earth > (port: blog) (channel: channel-0) + mars > (port: blog) (channel: channel-0) + +------ +Listening and relaying packets between chains... +------ +``` + +### Send packets + +You can now send packets and verify the received posts: + +```bash +planetd tx blog send-ibc-post blog channel-0 "Hello" "Hello Mars, I'm Alice from Earth" --from alice --chain-id earth --home ~/.earth +``` + +To verify that the post has been received on Mars: + +```bash +planetd q blog list-post --node tcp://localhost:26659 +``` + +The packet has been received: + +```yaml +Post: +- content: Hello Mars, I'm Alice from Earth + creator: blog-channel-0-cosmos1aew8dk9cs3uzzgeldatgzvm5ca2k4m98xhy20x + id: "0" + title: Hello +pagination: + next_key: null + total: "1" +``` + +To check if the packet has been acknowledged on Earth: + +```bash +planetd q blog list-sent-post +``` + +Output: + +```yaml +SentPost: +- chain: blog-channel-0 + creator: cosmos1aew8dk9cs3uzzgeldatgzvm5ca2k4m98xhy20x + id: "0" + postID: "0" + title: Hello +pagination: + next_key: null + total: "1" +``` + +To test timeout, set the timeout time of a packet to 1 nanosecond, verify that the packet is timed out, and check the timed-out posts: + +```bash +planetd tx blog send-ibc-post blog channel-0 "Sorry" "Sorry Mars, you will never see this post" --from alice --chain-id earth --home ~/.earth --packet-timeout-timestamp 1 +``` + +Check the timed-out posts: + +```bash +planetd q blog list-timedout-post +``` + +Results: + +```yaml +TimedoutPost: +- chain: blog-channel-0 + creator: cosmos1fhpcsxn0g8uask73xpcgwxlfxtuunn3ey5ptjv + id: "0" + title: Sorry +pagination: + next_key: null + total: "2" +``` + +You can also send a post from Mars: + +```bash +planetd tx blog send-ibc-post blog channel-0 "Hello" "Hello Earth, I'm Alice from Mars" --from alice --chain-id mars --home ~/.mars --node tcp://localhost:26659 +``` + +List post on Earth: + +```bash +planetd q blog list-post +``` + +Results: + +```yaml +Post: +- content: Hello Earth, I'm Alice from Mars + creator: blog-channel-0-cosmos1xtpx43l826348s59au24p22pxg6q248638q2tf + id: "0" + title: Hello +pagination: + next_key: null + total: "1" +``` + +## Congratulations 🎉 + +By completing this tutorial, you've learned to use the Inter-Blockchain Communication protocol (IBC). + +Here's what you accomplished in this tutorial: + +- Built two Hello blockchain apps as IBC modules +- Modified the generated code to add CRUD action logic +- Configured and used the Ignite CLI relayer to connect two blockchains with each other +- Transferred IBC packets from one blockchain to another diff --git a/docs/versioned_docs/version-v0.25/guide/08-interchange/00-introduction.md b/docs/versioned_docs/version-v0.25/guide/08-interchange/00-introduction.md new file mode 100644 index 0000000..cd05f34 --- /dev/null +++ b/docs/versioned_docs/version-v0.25/guide/08-interchange/00-introduction.md @@ -0,0 +1,38 @@ +--- +sidebar_position: 0 +slug: /guide/interchange +--- +# Introduction + +The Interchain Exchange is a module to create buy and sell orders between blockchains. + +In this tutorial, you learn how to create a Cosmos SDK module that can create order pairs, buy orders, and sell orders. You create order books and buy and sell orders across blockchains, which in turn enables you to swap token from one blockchain to another. + +**Note:** The code in this tutorial is written specifically for this tutorial and is intended only for educational purposes. This tutorial code is not intended to be used in production. + +If you want to see the end result, see the example implementation in the [interchange repo](https://github.com/tendermint/interchange). + +**You will learn how to:** + +- Create a blockchain with Ignite CLI +- Create a Cosmos SDK IBC module +- Create an order book that hosts buy and sell orders with a module +- Send IBC packets from one blockchain to another +- Deal with timeouts and acknowledgements of IBC packets + +## How the Interchange Exchange Module Works + +To build an exchange that works with two or more blockchains, follow the steps in this tutorial to create a Cosmos SDK module called `dex`. + +The new `dex` module allows you to open an exchange order book for a pair of token: a from one blockchain and a token on another blockchain. The blockchains are required to have the `dex` module available. + +Token can be bought or sold with limit orders on a simple order book. In this tutorial, there is no notion of a liquidity pool or automated market maker (AMM). + +The market is unidirectional: + +- The token sold on the source chain cannot be bought back as it is +- The token bought from the target chain cannot be sold back using the same pair. + +If a token on a source chain is sold, it can only be bought back by creating a new pair on the order book. This workflow is due to the nature of the Inter-Blockchain Communication protocol (IBC) which creates a `voucher` token on the target blockchain. There is a difference of a native blockchain token and a `voucher` token that is minted on another blockchain. You must create a second order book pair in order to receive the native token back. + +In the next chapter, you learn details about the design of the interblockchain exchange. diff --git a/docs/versioned_docs/version-v0.25/guide/08-interchange/01-design.md b/docs/versioned_docs/version-v0.25/guide/08-interchange/01-design.md new file mode 100644 index 0000000..832aa36 --- /dev/null +++ b/docs/versioned_docs/version-v0.25/guide/08-interchange/01-design.md @@ -0,0 +1,92 @@ +--- +sidebar_position: 1 +description: Learn about the interchain exchange module design. +--- + +# App Design + +In this chapter, you learn how the interchain exchange module is designed. The module has order books, buy orders, and sell orders. + +- First, create an order book for a pair of token. +- After an order book exists, you can create buy and sell orders for this pair of token. + +The module uses the Inter-Blockchain Communication protocol [IBC](https://github.com/cosmos/ics/blob/old/ibc/2_IBC_ARCHITECTURE.md). By using IBC, the module can create order books so that multiple blockchains can interact and exchange their token. + +You create an order book pair with a token from one blockchain and another token from another blockchain. In this tutorial, call the module you create the `dex` module. + +> When a user exchanges a token with the `dex` module, a `voucher` of that token is received on the other blockchain. This voucher is similar to how an `ibc-transfer` is constructed. Since a blockchain module does not have the rights to mint new token of a blockchain into existence, the token on the target chain is locked up, and the buyer receives a `voucher` of that token. + +This process can be reversed when the `voucher` gets burned to unlock the original token. This exchange process is explained in more detail throughout the tutorial. + +## Assumption of the Design + +An order book can be created for the exchange of any tokens between any pair of chains. + +- Both blockchains require the `dex` module to be installed and running. +- There can only be one order book for a pair of token at the same time. + +<!-- There is no condition to check for open channels between two chains. --> + +A specific chain cannot mint new coins of its native token. + +<!-- The module is trustless, there is no condition to check when opening a channel between two chains. Any pair of tokens can be exchanged between any pair of chains. --> + +This module is inspired by the [`ibc transfer`](https://github.com/cosmos/ibc-go/tree/main/modules/apps/transfer) module on the Cosmos SDK. The `dex` module you create in this tutorial has similarities, like the `voucher` creation. + +However, the new `dex` module you are creating is more complex because it supports creation of: + +- Several types of packets to send +- Several types of acknowledgments to treat +- More complex logic on how to treat a packet on receipt, on timeout, and more + +## Interchain Exchange Overview + +Assume you have two blockchains: Venus and Mars. + +- The native token on Venus is `venuscoin`. +- The native token on Mars is `marscoin`. + +When a token is exchanged from Mars to Venus: + + - The Venus blockchain has an IBC `voucher` token with a denom that looks like `ibc/B5CB286...A7B21307F`. +- The long string of characters after `ibc/` is a denom trace hash of a token that was transferred using IBC. + +Using the blockchain's API you can get a denom trace from that hash. The denom trace consists of a `base_denom` and a `path`. In our example: + +- The `base_denom` is `marscoin`. +- The `path` contains pairs of ports and channels through which the token has been transferred. + +For a single-hop transfer, the `path` is identified by `transfer/channel-0`. + +Learn more about token paths in [ICS 20 Fungible Token Transfer](https://github.com/cosmos/ibc/tree/master/spec/app/ics-020-fungible-token-transfer). + +**Note:** This token `ibc/Venus/marscoin` cannot be sold back using the same order book. If you want to "reverse" the exchange and receive the Mars token back, you must create and use a new order book for the `ibc/Venus/marscoin` to `marscoin` transfer. + +## The Design of the Order Books + +As a typical exchange, a new pair implies the creation of an order book with orders to sell `marscoin` or orders to buy `venuscoin`. Here, you have two chains and this data structure must be split between Mars and Venus. + +- Users from chain Mars sell `marscoin`. +- Users from chain Venus buy `marscoin`. + +Therefore, we represent: + +- All orders to sell `marscoin` on chain Mars. +- All orders to buy `marscoin` on chain Venus. + +In this example, blockchain Mars holds the sell orders and blockchain Venus holds the buy orders. + +## Exchanging Tokens Back + +Like `ibc-transfer`, each blockchain keeps a trace of the token voucher that was created on the other blockchain. + +If blockchain Mars sells `marscoin` to chain Venus and `ibc/Venus/marscoin` is minted on Venus then, if `ibc/Venus/marscoin` is sold back to Mars, the token is unlocked and the token that is received is `marscoin`. + +## Features + +The features supported by the interchain exchange module are: + +- Create an exchange order book for a token pair between two chains +- Send sell orders on source chain +- Send buy orders on target chain +- Cancel sell or buy orders diff --git a/docs/versioned_docs/version-v0.25/guide/08-interchange/02-init.md b/docs/versioned_docs/version-v0.25/guide/08-interchange/02-init.md new file mode 100644 index 0000000..4f61535 --- /dev/null +++ b/docs/versioned_docs/version-v0.25/guide/08-interchange/02-init.md @@ -0,0 +1,177 @@ +--- +sidebar_position: 2 +description: Create the blockchain for the interchain exchange app. +--- + +# App Init + +## Initialize the Blockchain + +In this chapter, you create the basic blockchain module for the interchain exchange app. You scaffold the blockchain, the module, the transaction, the IBC packets, and messages. In later chapters, you integrate more code into each of the transaction handlers. + +## Create the Blockchain + +Scaffold a new blockchain called `interchange`: + +```bash +ignite scaffold chain interchange --no-module +``` + +A new directory named `interchange` is created. + +Change into this directory where you can scaffold modules, types, and maps: + +```bash +cd interchange +``` + +The `interchange` directory contains a working blockchain app. + +A local GitHub repository has been created for you with the initial scaffold. + +Next, create a new IBC module. + +## Create the dex Module + +Scaffold a module inside your blockchain named `dex` with IBC capabilities. + +The dex module contains the logic to create and maintain order books and route them through IBC to the second blockchain. + +```bash +ignite scaffold module dex --ibc --ordering unordered --dep bank +``` + +## Create CRUD logic for Buy and Sell Order Books + +Scaffold two types with create, read, update, and delete (CRUD) actions. + +Run the following Ignite CLI `type` commands to create `sellOrderBook` and `buyOrderBook` types: + +```bash +ignite scaffold map sell-order-book amountDenom priceDenom --no-message --module dex +ignite scaffold map buy-order-book amountDenom priceDenom --no-message --module dex +``` + +The values are: + +- `amountDenom`: the token to be sold and in which quantity +- `priceDenom`: the token selling price + +The `--no-message` flag specifies to skip the message creation. Custom messages will be created in the next steps. + +The `--module dex` flag specifies to scaffold the type in the `dex` module. + +## Create the IBC Packets + +Create three packets for IBC: + +- An order book pair `createPair` +- A sell order `sellOrder` +- A buy order `buyOrder` + +```bash +ignite scaffold packet create-pair sourceDenom targetDenom --module dex +ignite scaffold packet sell-order amountDenom amount:int priceDenom price:int --ack remainingAmount:int,gain:int --module dex +ignite scaffold packet buy-order amountDenom amount:int priceDenom price:int --ack remainingAmount:int,purchase:int --module dex +``` + +The optional `--ack` flag defines field names and types of the acknowledgment returned after the packet has been received by the target chain. The value of the `--ack` flag is a comma-separated list of names (no spaces). Append optional types after a colon (`:`). + +## Cancel messages + +Cancelling orders is done locally in the network, there is no packet to send. + +Use the `message` command to create a message to cancel a sell or buy order: + +```bash +ignite scaffold message cancel-sell-order port channel amountDenom priceDenom orderID:int --desc "Cancel a sell order" --module dex +ignite scaffold message cancel-buy-order port channel amountDenom priceDenom orderID:int --desc "Cancel a buy order" --module dex +``` + +Use the optional `--desc` flag to define a description of the CLI command that is used to broadcast a transaction with the message. + +## Trace the Denom + +The token denoms must have the same behavior as described in the `ibc-transfer` module: + +- An external token received from a chain has a unique `denom`, referred to as `voucher`. +- When a token is sent to a blockchain and then sent back and received, the chain can resolve the voucher and convert it back to the original token denomination. + +`Voucher` tokens are represented as hashes, therefore you must store which original denomination is related to a voucher. You can do this with an indexed type. + +For a `voucher` you store, define the source port ID, source channel ID, and the original denom: + +```bash +ignite scaffold map denom-trace port channel origin --no-message --module dex +``` + +## Create the Configuration for Two Blockchains + +Add two config files `mars.yml` and `venus.yml` to test two blockchain networks with specific token for each. + +Add the config files in the `interchange` folder. + +The native denoms for Mars are `marscoin`, and for Venus `venuscoin`. + +Create the `mars.yml` file with your content: + +```yaml +# mars.yml +accounts: + - name: alice + coins: ["1000token", "100000000stake", "1000marscoin"] + - name: bob + coins: ["500token", "1000marscoin", "100000000stake"] +validator: + name: alice + staked: "100000000stake" +faucet: + name: bob + coins: ["5token", "100000stake"] +genesis: + chain_id: "mars" +init: + home: "$HOME/.mars" +``` + +Create the `venus.yml` file with your content: + +```yaml +# venus.yml +accounts: + - name: alice + coins: ["1000token", "1000000000stake", "1000venuscoin"] + - name: bob + coins: ["500token", "1000venuscoin", "100000000stake"] +validator: + name: alice + staked: "100000000stake" +faucet: + host: ":4501" + name: bob + coins: ["5token", "100000stake"] +host: + rpc: ":26659" + p2p: ":26658" + prof: ":6061" + grpc: ":9092" + grpc-web: ":9093" + api: ":1318" +genesis: + chain_id: "venus" +init: + home: "$HOME/.venus" +``` + +In the `venus.yml` file, you can see the specific `host` parameter that you can use to change the ports for various running services (rpc, p2p, prof, grpc, api, frontend, and dev-ui). This `host` parameter can be used later so you can run two blockchains in parallel and prevent conflicts when the chains are using the same ports. + +You can also use the `host` parameter to use specific ports for any of the services. + +After scaffolding, now is a good time to make a commit to the local GitHub repository that was created for you. + +```bash +git add . +git commit -m "Scaffold module, maps, packages and messages for the dex" +``` + +Implement the code for the order book in the next chapter. diff --git a/docs/versioned_docs/version-v0.25/guide/08-interchange/03-walkthrough.md b/docs/versioned_docs/version-v0.25/guide/08-interchange/03-walkthrough.md new file mode 100644 index 0000000..4637252 --- /dev/null +++ b/docs/versioned_docs/version-v0.25/guide/08-interchange/03-walkthrough.md @@ -0,0 +1,377 @@ +--- +sidebar_position: 3 +description: Walkthrough of commands to use the interchain exchange module. +--- + +# Use the Interchain Exchange + +In this chapter, you learn details about the order book and commands to: + +- Create an exchange order book for a token pair between two chains +- Send sell orders on source chain +- Send buy orders on target chain +- Cancel sell or buy orders + +The next chapter contains the code for the implementation. + +## Order Book + +To use the exchange, start by creating an order book for a pair of tokens: + +```bash +# Create pair broadcasted to the source blockchain +# interchanged tx dex send-create-pair [src-port] [src-channel] [sourceDenom] [targetDenom] +interchanged tx dex send-create-pair dex channel-0 marscoin venuscoin +``` + +Define a pair of token with two denominations: + +- Source denom (in this example, `marscoin`) +- Target denom (`venuscoin`) + +Creating an order book affects state on the source blockchain to which the transaction was broadcast and the target blockchain. + +On the source blockchain, the `send-create-pair` command creates an empty sell order book: + +```yaml +# Created a sell order book on the source blockchain +SellOrderBook: +- amountDenom: marscoin + creator: "" + index: dex-channel-0-marscoin-venuscoin + orderIDTrack: 0 + orders: [] + priceDenom: venuscoin +``` + +On the target blockchain, the same `send-createPair` command creates a buy order book: + +```yaml +# Created a buy order book on the target blockchain +BuyOrderBook: +- amountDenom: marscoin + creator: "" + index: dex-channel-0-marscoin-venuscoin + orderIDTrack: 1 + orders: [] + priceDenom: venuscoin +``` + +To make an exchange possible, the `createPair` transaction sends an IBC packet to the target chain. + +- When the target chain receives a packet, the target chain creates a buy order book and sends an acknowledgement back to the source chain. +- When the source chain receives an acknowledgement, the source chain creates a sell order book. + +Sending an IBC packet requires a user to specify a port and a channel through which a packet is transferred. + +## Sell Order + +After an order book is created, the next step is to create a sell order: + +```bash +# Sell order broadcasted to the source blockchain +# interchanged tx dex send-sell-order [src-port] [src-channel] [amountDenom] [amount] [priceDenom] [price] +interchanged tx dex send-sell-order dex channel-0 marscoin 10 venuscoin 15 +``` + +The `send-sellOrder` command broadcasts a message that locks token on the source blockchain and creates a sell order on the source blockchain: + +```yaml +# Source blockchain +balances: +- amount: "990" # decreased from 1000 + denom: marscoin +SellOrderBook: +- amountDenom: marscoin + creator: "" + index: dex-channel-0-marscoin-venuscoin + orderIDTrack: 2 + orders: # a new sell order is created + - amount: 10 + creator: cosmos1v3p3j7c64c4ls32pcjct333e8vqe45gwwa289q + id: 0 + price: 15 + priceDenom: venuscoin +``` + +## Buy Order + +A buy order has the same arguments, the amount of token to be purchased and a price: + +```bash +# Buy order broadcasted to the target blockchain +# interchanged tx dex send-buy-order [src-port] [src-channel] [amountDenom] [amount] [priceDenom] [price]` +interchanged tx dex send-buy-order dex channel-0 marscoin 10 venuscoin 5 +``` + +The `send-buy-order` command locks token on the target blockchain: + +```yaml +# Target blockchain +balances: +- amount: "950" # decreased from 1000 + denom: venuscoin +BuyOrderBook: +- amountDenom: marscoin + creator: "" + index: dex-channel-0-marscoin-venuscoin + orderIDTrack: 3 + orders: # a new buy order is created + - amount: 10 + creator: cosmos1qlrz3peenc6s3xjv9k97e8ef72nk3qn3a0xax2 + id: 1 + price: 5 + priceDenom: venuscoin +``` + +## Perform an Exchange with a Sell Order + +You now have two orders open for marscoin: + +- A sell order on the source chain (for 10marscoin at 15venuscoin) +- A buy order on the target chain (for 5marscoin at 5venuscoin) + +Now, perform an exchange by sending a sell order to the source chain: + +```bash +# Sell order broadcasted to the source chain +interchanged tx dex send-sell-order dex channel-0 marscoin 5 venuscoin 3 +``` + +The sell order (for 5marscoin at 3venuscoin) is filled on the target chain by the buy order. + +The amount of the buy order on the target chain is decreased by 5marscoin: + +```yaml +# Target blockchain +BuyOrderBook: +- amountDenom: marscoin + creator: "" + index: dex-channel-0-marscoin-venuscoin + orderIDTrack: 5 + orders: + - amount: 5 # decreased from 10 + creator: cosmos1qlrz3peenc6s3xjv9k97e8ef72nk3qn3a0xax2 + id: 3 + price: 5 + priceDenom: venuscoin +``` + +The sender of the filled sell order exchanged 5marscoin for 25 venuscoin vouchers. + +25 vouchers is a product of the amount of the sell order (5marscoin) and price of the buy order (5venuscoin): + +```yaml +# Source blockchain +balances: +- amount: "25" # increased from 0 + denom: ibc/50D70B7748FB8AA69F09114EC9E5615C39E07381FE80E628A1AF63A6F5C79833 # venuscoin voucher +- amount: "985" # decreased from 990 + denom: marscoin +``` + +The counterparty (the sender of the buy marscoin order) receives 5 marscoin vouchers: + +```yaml +# Target blockchain +balances: +- amount: "5" # increased from 0 + denom: ibc/99678A10AF684E33E88959727F2455AE42CCC64CD76ECFA9691E1B5A32342D33 # marscoin voucher +``` + +The venuscoin balance hasn't changed because the correct amount of venuscoin (50) was locked at the creation of the buy order during the previous step. + +## Perform an Exchange with a Buy Order + +Now, send an order to buy 5marscoin for 15venuscoin: + +```bash +# Buy order broadcasted to the target chain +interchanged tx dex send-buy-order dex channel-0 marscoin 5 venuscoin 15 +``` + +A buy order is immediately filled on the source chain and the sell order creator receives 75 venuscoin vouchers. + +The sell order amount is decreased by the amount of the filled buy order (by 5marscoin): + +```yaml +# Source blockchain +balances: +- amount: "100" # increased from 25 + denom: ibc/50D70B7748FB8AA69F09114EC9E5615C39E07381FE80E628A1AF63A6F5C79833 # venuscoin voucher +SellOrderBook: +- amountDenom: marscoin + creator: "" + index: dex-channel-0-marscoin-venuscoin + orderIDTrack: 4 + orders: + - amount: 5 # decreased from 10 + creator: cosmos1v3p3j7c64c4ls32pcjct333e8vqe45gwwa289q + id: 2 + price: 15 + priceDenom: venuscoin +``` + +The creator of the buy order receives 5 marscoin vouchers for 75 venuscoin (5marscoin * 15venuscoin): + +```yaml +# Target blockchain +balances: +- amount: "10" # increased from 5 + denom: ibc/99678A10AF684E33E88959727F2455AE42CCC64CD76ECFA9691E1B5A32342D33 # marscoin vouchers +- amount: "875" # decreased from 950 + denom: venuscoin +``` + +## Complete Exchange with a Partially Filled Sell Order + +Send an order to sell 10marscoin for 3venuscoin: + +```bash +# Source blockchain +interchanged tx dex send-sell-order dex channel-0 marscoin 10 venuscoin 3 +``` + +The sell amount is 10marscoin, but the opened buy order amount is only 5marscoin. The buy order gets filled completely and removed from the order book. The author of the previously created buy order receives 10 marscoin vouchers from the exchange: + +```yaml +# Target blockchain +balances: +- amount: "15" # increased from 5 + denom: ibc/99678A10AF684E33E88959727F2455AE42CCC64CD76ECFA9691E1B5A32342D33 # marscoin voucher +BuyOrderBook: +- amountDenom: marscoin + creator: "" + index: dex-channel-0-marscoin-venuscoin + orderIDTrack: 5 + orders: [] # buy order with amount 5marscoin has been closed + priceDenom: venuscoin +``` + +The author of the sell order successfully exchanged 5 marscoin and received 25 venuscoin vouchers. The other 5marscoin created a sell order: + +```yaml +# Source blockchain +balances: +- amount: "125" # increased from 100 + denom: ibc/50D70B7748FB8AA69F09114EC9E5615C39E07381FE80E628A1AF63A6F5C79833 # venuscoin vouchers +- amount: "975" # decreased from 985 + denom: marscoin +- amountDenom: marscoin +SellOrderBook: + creator: "" + index: dex-channel-0-marscoin-venuscoin + orderIDTrack: 6 + orders: + - amount: 5 # hasn't changed + creator: cosmos1v3p3j7c64c4ls32pcjct333e8vqe45gwwa289q + id: 2 + price: 15 + - amount: 5 # new order is created + creator: cosmos1v3p3j7c64c4ls32pcjct333e8vqe45gwwa289q + id: 4 + price: 3 +``` + +## Complete Exchange with a Partially Filled Buy Order + +Create an order to buy 10 marscoin for 5 venuscoin: + +```bash +# Target blockchain +interchanged tx dex send-buy-order dex channel-0 marscoin 10 venuscoin 5 +``` + +The buy order is partially filled for 5marscoin. An existing sell order for 5 marscoin (with a price of 3 venuscoin) on the source chain is completely filled and is removed from the order book. The author of the closed sell order receives 15 venuscoin vouchers (product of 5marscoin and 3venuscoin): + +```yaml +# Source blockchain +balances: +- amount: "140" # increased from 125 + denom: ibc/50D70B7748FB8AA69F09114EC9E5615C39E07381FE80E628A1AF63A6F5C79833 # venuscoin vouchers +SellOrderBook: +- amountDenom: marscoin + creator: "" + index: dex-channel-0-marscoin-venuscoin + orderIDTrack: 6 + orders: + - amount: 5 # order hasn't changed + creator: cosmos1v3p3j7c64c4ls32pcjct333e8vqe45gwwa289q + id: 2 + price: 15 + # a sell order for 5 marscoin has been closed + priceDenom: venuscoin +``` + +The author of the buy order receives 5 marscoin vouchers which locks 50 venuscoin of their token. The 5marscoin amount that is not filled by the sell order creates a buy order on the target chain: + +```yaml +# Target blockchain +balances: +- amount: "20" # increased from 15 + denom: ibc/99678A10AF684E33E88959727F2455AE42CCC64CD76ECFA9691E1B5A32342D33 # marscoin vouchers +- amount: "825" # decreased from 875 + denom: venuscoin +BuyOrderBook: +- amountDenom: marscoin + creator: "" + index: dex-channel-0-marscoin-venuscoin + orderIDTrack: 7 + orders: + - amount: 5 # new buy order is created + creator: cosmos1qlrz3peenc6s3xjv9k97e8ef72nk3qn3a0xax2 + id: 5 + price: 5 + priceDenom: venuscoin +``` + +## Cancel an Order + +After these exchanges, you still have two orders open: + +- A sell order on the source chain (5marscoin for 15venuscoin) +- A buy order on the target chain (5marscoin for 5venuscoin) + +To cancel a sell order: + +```bash +# Source blockchain +interchanged tx dex cancel-sell-order dex channel-0 marscoin venuscoin 2 +``` + +The balance of marscoin is increased: + +```yaml +# Source blockchain +balances: +- amount: "980" # increased from 975 + denom: marscoin +``` + +The sell order book on the source blockchain is now empty. + +To cancel a buy order: + +```bash +# Target blockchain +interchanged tx dex cancel-buy-order dex channel-0 marscoin venuscoin 5 +``` + +The amount of venuscoin is increased: + +```yaml +# Target blockchain +balances: +- amount: "850" # increased from 825 + denom: venuscoin +``` + +The buy order book on the target blokchain is now empty. + +This walkthrough of the interchain exchange showed you how to: + +- Create an exchange order book for a token pair between two chains +- Send sell orders on source chain +- Send buy orders on target chain +- Cancel sell or buy orders + diff --git a/docs/versioned_docs/version-v0.25/guide/08-interchange/04-creating-order-books.md b/docs/versioned_docs/version-v0.25/guide/08-interchange/04-creating-order-books.md new file mode 100644 index 0000000..c58427c --- /dev/null +++ b/docs/versioned_docs/version-v0.25/guide/08-interchange/04-creating-order-books.md @@ -0,0 +1,465 @@ +--- +sidebar_position: 4 +description: Implement logic to create order books. +--- + +# Implement the Order Books + +In this chapter, you implement the logic to create order books. + +In the Cosmos SDK, the state is stored in a key-value store. Each order book is stored under a unique key that is composed of four values: + +- Port ID +- Channel ID +- Source denom +- Target denom + +For example, an order book for marscoin and venuscoin could be stored under `dex-channel-4-marscoin-venuscoin`. + +First, define a function that returns an order book store key: + +```go +// x/dex/types/keys.go +import "fmt" + +// ... +func OrderBookIndex(portID string, channelID string, sourceDenom string, targetDenom string) string { + return fmt.Sprintf("%s-%s-%s-%s", portID, channelID, sourceDenom, targetDenom) +} +``` + +The `send-create-pair` command is used to create order books. This command: + +- Creates and broadcasts a transaction with a message of type `SendCreatePair`. +- The message gets routed to the `dex` module. +- Finally, a `SendCreatePair` keeper method is called. + +You need the `send-create-pair` command to do the following: + +* When processing `SendCreatePair` message on the source chain: + * Check that an order book with the given pair of denoms does not yet exist. + * Transmit an IBC packet with information about port, channel, source denoms, and target denoms. +* After the packet is received on the target chain: + * Check that an order book with the given pair of denoms does not yet exist on the target chain. + * Create a new order book for buy orders. + * Transmit an IBC acknowledgement back to the source chain. +* After the acknowledgement is received on the source chain: + * Create a new order book for sell orders. + +## Message Handling in SendCreatePair + +The `SendCreatePair` function was created during the IBC packet scaffolding. The function creates an IBC packet, populates it with source and target denoms, and transmits this packet over IBC. + +Now, add the logic to check for an existing order book for a particular pair of denoms: + +```go +// x/dex/keeper/msg_server_create_pair.go + +import ( + "errors" + + //... +) + +func (k msgServer) SendCreatePair(goCtx context.Context, msg *types.MsgSendCreatePair) (*types.MsgSendCreatePairResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + + // Get an order book index + pairIndex := types.OrderBookIndex(msg.Port, msg.ChannelID, msg.SourceDenom, msg.TargetDenom) + + // If an order book is found, return an error + _, found := k.GetSellOrderBook(ctx, pairIndex) + if found { + return &types.MsgSendCreatePairResponse{}, errors.New("the pair already exist") + } + + // Construct the packet + var packet types.CreatePairPacketData + + packet.SourceDenom = msg.SourceDenom + packet.TargetDenom = msg.TargetDenom + + // Transmit the packet + err := k.TransmitCreatePairPacket( + ctx, + packet, + msg.Port, + msg.ChannelID, + clienttypes.ZeroHeight(), + msg.TimeoutTimestamp, + ) + if err != nil { + return nil, err + } + + return &types.MsgSendCreatePairResponse{}, nil +} +``` + +## Lifecycle of an IBC Packet + +During a successful transmission, an IBC packet goes through these stages: + +1. Message processing before packet transmission on the source chain +2. Reception of a packet on the target chain +3. Acknowledgment of a packet on the source chain +4. Timeout of a packet on the source chain + +In the following section, implement the packet reception logic in the `OnRecvCreatePairPacket` function and the packet acknowledgement logic in the `OnAcknowledgementCreatePairPacket` function. + +Leave the Timeout function empty. + +## Receive an IBC packet + +The protocol buffer definition defines the data that an order book contains. + +Add the `OrderBook` and `Order` messages to the `order.proto` file. + +First, add the proto buffer files to build the Go code files. You can modify these files for the purpose of your app. + +Create a new `order.proto` file in the `proto/dex` directory and add the content: + +```protobuf +// proto/dex/order.proto + +syntax = "proto3"; + +package interchange.dex; + +option go_package = "interchange/x/dex/types"; + +message OrderBook { + int32 idCount = 1; + repeated Order orders = 2; +} + +message Order { + int32 id = 1; + string creator = 2; + int32 amount = 3; + int32 price = 4; +} +``` + +Modify the `buy_order_book.proto` file to have the fields for creating a buy order on the order book. +Don't forget to add the import as well. + +**Tip:** Don't forget to add the import as well. + +```protobuf +// proto/dex/buy_order_book.proto + +// ... +import "dex/order.proto"; + +message BuyOrderBook { + // ... + OrderBook book = 4; +} +``` + +Modify the `sell_order_book.proto` file to add the order book into the buy order book. + +The proto definition for the `SellOrderBook` looks like: + +```protobuf +// proto/dex/sell_order_book.proto + +// ... +import "dex/order.proto"; + +message SellOrderBook { + // ... + OrderBook book = 4; +} +``` + +Now, use Ignite CLI to build the proto files for the `send-create-pair` command: + +```bash +ignite generate proto-go --yes +``` + +Start enhancing the functions for the IBC packets. + +Create a new file `x/dex/types/order_book.go`. + +Add the new order book function to the corresponding Go file: + +```go +// x/dex/types/order_book.go + +package types + +func NewOrderBook() OrderBook { + return OrderBook{ + IdCount: 0, + } +} +``` + +To create a new buy order book type, define `NewBuyOrderBook` in a new file `x/dex/types/buy_order_book.go` : + +```go +// x/dex/types/buy_order_book.go + +package types + +func NewBuyOrderBook(AmountDenom string, PriceDenom string) BuyOrderBook { + book := NewOrderBook() + return BuyOrderBook{ + AmountDenom: AmountDenom, + PriceDenom: PriceDenom, + Book: &book, + } +} +``` + +When an IBC packet is received on the target chain, the module must check whether a book already exists. If not, then create a buy order book for the specified denoms. + + +```go +// x/dex/keeper/create_pair.go + +func (k Keeper) OnRecvCreatePairPacket(ctx sdk.Context, packet channeltypes.Packet, data types.CreatePairPacketData) (packetAck types.CreatePairPacketAck, err error) { + // ... + + // Get an order book index + pairIndex := types.OrderBookIndex(packet.SourcePort, packet.SourceChannel, data.SourceDenom, data.TargetDenom) + + // If an order book is found, return an error + _, found := k.GetBuyOrderBook(ctx, pairIndex) + if found { + return packetAck, errors.New("the pair already exist") + } + + // Create a new buy order book for source and target denoms + book := types.NewBuyOrderBook(data.SourceDenom, data.TargetDenom) + + // Assign order book index + book.Index = pairIndex + + // Save the order book to the store + k.SetBuyOrderBook(ctx, book) + return packetAck, nil +} +``` + +## Receive an IBC Acknowledgement + + +When an IBC acknowledgement is received on the source chain, the module must check whether a book already exists. If not, create a sell order book for the specified denoms. + +Create a new file `x/dex/types/sell_order_book.go`. +Insert the `NewSellOrderBook` function which creates a new sell order book. + +```go +// x/dex/types/sell_order_book.go + +package types + +func NewSellOrderBook(AmountDenom string, PriceDenom string) SellOrderBook { + book := NewOrderBook() + return SellOrderBook{ + AmountDenom: AmountDenom, + PriceDenom: PriceDenom, + Book: &book, + } +} +``` + +Modify the Acknowledgement function in the `x/dex/keeper/create_pair.go` file: + +```go +// x/dex/keeper/create_pair.go + +func (k Keeper) OnAcknowledgementCreatePairPacket(ctx sdk.Context, packet channeltypes.Packet, data types.CreatePairPacketData, ack channeltypes.Acknowledgement) error { + switch dispatchedAck := ack.Response.(type) { + case *channeltypes.Acknowledgement_Error: + return nil + case *channeltypes.Acknowledgement_Result: + // Decode the packet acknowledgment + var packetAck types.CreatePairPacketAck + if err := types.ModuleCdc.UnmarshalJSON(dispatchedAck.Result, &packetAck); err != nil { + // The counter-party module doesn't implement the correct acknowledgment format + return errors.New("cannot unmarshal acknowledgment") + } + + // Set the sell order book + pairIndex := types.OrderBookIndex(packet.SourcePort, packet.SourceChannel, data.SourceDenom, data.TargetDenom) + book := types.NewSellOrderBook(data.SourceDenom, data.TargetDenom) + book.Index = pairIndex + k.SetSellOrderBook(ctx, book) + + return nil + default: + // The counter-party module doesn't implement the correct acknowledgment format + return errors.New("invalid acknowledgment format") + } +} +``` + +In this section, you implemented the logic behind the new `send-create-pair` command: + +- When an IBC packet is received on the target chain, `send-create-pair` command creates a buy order book. +- When an IBC acknowledgement is received on the source chain, the `send-create-pair` command creates a sell order book. + +### Implement the appendOrder Function to Add Orders to the Order Book + +```go +// x/dex/types/order_book.go + +package types + +import ( + "errors" + "sort" +) + +func NewOrderBook() OrderBook { + return OrderBook{ + IdCount: 0, + } +} + +const ( + MaxAmount = int32(100000) + MaxPrice = int32(100000) +) + +type Ordering int + +const ( + Increasing Ordering = iota + Decreasing +) + +var ( + ErrMaxAmount = errors.New("max amount reached") + ErrMaxPrice = errors.New("max price reached") + ErrZeroAmount = errors.New("amount is zero") + ErrZeroPrice = errors.New("price is zero") + ErrOrderNotFound = errors.New("order not found") +) +``` + +The `AppendOrder` function initializes and appends a new order to an order book from the order information: + +```go +// x/dex/types/order_book.go + +func (book *OrderBook) appendOrder(creator string, amount int32, price int32, ordering Ordering) (int32, error) { + if err := checkAmountAndPrice(amount, price); err != nil { + return 0, err + } + + // Initialize the order + var order Order + order.Id = book.GetNextOrderID() + order.Creator = creator + order.Amount = amount + order.Price = price + + // Increment ID tracker + book.IncrementNextOrderID() + + // Insert the order + book.insertOrder(order, ordering) + return order.Id, nil +} +``` + +#### Implement the checkAmountAndPrice Function For an Order + +The `checkAmountAndPrice` function checks for the correct amount or price: + +```go +// x/dex/types/order_book.go + +func checkAmountAndPrice(amount int32, price int32) error { + if amount == int32(0) { + return ErrZeroAmount + } + if amount > MaxAmount { + return ErrMaxAmount + } + + if price == int32(0) { + return ErrZeroPrice + } + if price > MaxPrice { + return ErrMaxPrice + } + + return nil +} +``` + +#### Implement the GetNextOrderID Function + +The `GetNextOrderID` function gets the ID of the next order to append: + +```go +// x/dex/types/order_book.go + +func (book OrderBook) GetNextOrderID() int32 { + return book.IdCount +} +``` + +#### Implement the IncrementNextOrderID Function + +The `IncrementNextOrderID` function updates the ID count for orders: + +```go +// x/dex/types/order_book.go + +func (book *OrderBook) IncrementNextOrderID() { + // Even numbers to have different ID than buy orders + book.IdCount++ +} +``` + +#### Implement the insertOrder Function + +The `insertOrder` function inserts the order in the book with the provided order: + +```go +// x/dex/types/order_book.go + +func (book *OrderBook) insertOrder(order Order, ordering Ordering) { + if len(book.Orders) > 0 { + var i int + + // get the index of the new order depending on the provided ordering + if ordering == Increasing { + i = sort.Search(len(book.Orders), func(i int) bool { return book.Orders[i].Price > order.Price }) + } else { + i = sort.Search(len(book.Orders), func(i int) bool { return book.Orders[i].Price < order.Price }) + } + + // insert order + orders := append(book.Orders, &order) + copy(orders[i+1:], orders[i:]) + orders[i] = &order + book.Orders = orders + } else { + book.Orders = append(book.Orders, &order) + } +} +``` + + +This completes the order book setup. + +Now is a good time to save the state of your implementation. +Because your project is in a local repository, you can use git. Saving your current state lets you jump back and forth in case you introduce errors or need a break. + + +```bash +git add . +git commit -m "Create Order Books" +``` + + +In the next chapter, you learn how to deal with vouchers by minting and burning vouchers and locking and unlocking native blockchain token in your app. diff --git a/docs/versioned_docs/version-v0.25/guide/08-interchange/05-mint-and-burn-voucher.md b/docs/versioned_docs/version-v0.25/guide/08-interchange/05-mint-and-burn-voucher.md new file mode 100644 index 0000000..9d1cf10 --- /dev/null +++ b/docs/versioned_docs/version-v0.25/guide/08-interchange/05-mint-and-burn-voucher.md @@ -0,0 +1,311 @@ +--- +order: 5 +description: Mint vouchers and lock and unlock native token from a blockchain. +--- + +# Mint and Burn Vouchers + +In this chapter, you learn about vouchers. The `dex` module implementation mints vouchers and locks and unlocks native token from a blockchain. + +There is a lot to learn from this `dex` module implementation: + +- You work with the `bank` keeper and use several methods it offers. +- You interact with another module and use the module account to lock tokens. + +This implementation can teach you how to use various interactions with module accounts or minting, locking or burning tokens. + + +## Create the SafeBurn Function to Burn Vouchers or Lock Tokens + +The `SafeBurn` function burns tokens if they are IBC vouchers (have an `ibc/` prefix) and locks tokens if they are native to the chain. + +Create a new `x/dex/keeper/mint.go` file: + +```go +// x/dex/keeper/mint.go + +package keeper + +import ( + "fmt" + "strings" + + sdkmath "cosmossdk.io/math" + sdk "github.com/cosmos/cosmos-sdk/types" + ibctransfertypes "github.com/cosmos/ibc-go/v5/modules/apps/transfer/types" + + "interchange/x/dex/types" +) + +// isIBCToken checks if the token came from the IBC module +// Each IBC token starts with an ibc/ denom, the check is rather simple +func isIBCToken(denom string) bool { + return strings.HasPrefix(denom, "ibc/") +} + +func (k Keeper) SafeBurn(ctx sdk.Context, port string, channel string, sender sdk.AccAddress, denom string, amount int32) error { + if isIBCToken(denom) { + // Burn the tokens + if err := k.BurnTokens(ctx, sender, sdk.NewCoin(denom, sdkmath.NewInt(int64(amount)))); err != nil { + return err + } + } else { + // Lock the tokens + if err := k.LockTokens(ctx, port, channel, sender, sdk.NewCoin(denom, sdkmath.NewInt(int64(amount)))); err != nil { + return err + } + } + + return nil +} +``` + +If the token comes from another blockchain as an IBC token, the burning method actually burns those IBC tokens on one chain and unlocks them on the other chain. The native token are locked away. + +Now, implement the `BurnTokens` keeper method as used in the previous function. The `bankKeeper` has a useful function for this: + +```go +// x/dex/keeper/mint.go + +func (k Keeper) BurnTokens(ctx sdk.Context, sender sdk.AccAddress, tokens sdk.Coin) error { + // transfer the coins to the module account and burn them + if err := k.bankKeeper.SendCoinsFromAccountToModule(ctx, sender, types.ModuleName, sdk.NewCoins(tokens)); err != nil { + return err + } + + if err := k.bankKeeper.BurnCoins( + ctx, types.ModuleName, sdk.NewCoins(tokens), + ); err != nil { + // NOTE: should not happen as the module account was + // retrieved on the step above and it has enough balance + // to burn. + panic(fmt.Sprintf("cannot burn coins after a successful send to a module account: %v", err)) + } + + return nil +} +``` + +Implement the `LockTokens` keeper method. + +To lock token from a native chain, you can send the native token to the Escrow Address: + +```go +// x/dex/keeper/mint.go + +func (k Keeper) LockTokens(ctx sdk.Context, sourcePort string, sourceChannel string, sender sdk.AccAddress, tokens sdk.Coin) error { + // create the escrow address for the tokens + escrowAddress := ibctransfertypes.GetEscrowAddress(sourcePort, sourceChannel) + + // escrow source tokens. It fails if balance insufficient + if err := k.bankKeeper.SendCoins( + ctx, sender, escrowAddress, sdk.NewCoins(tokens), + ); err != nil { + return err + } + + return nil +} +``` + +`BurnTokens` and `LockTokens` use `SendCoinsFromAccountToModule`, `BurnCoins`, and `SendCoins` keeper methods of the `bank` module. + +To start using these function from the `dex` module, first add them to the `BankKeeper` interface in the `x/dex/types/expected_keepers.go` file. + +```go +// x/dex/types/expected_keepers.go + +package types + +import sdk "github.com/cosmos/cosmos-sdk/types" + +// BankKeeper defines the expected bank keeper +type BankKeeper interface { + //... + SendCoinsFromAccountToModule(ctx sdk.Context, senderAddr sdk.AccAddress, recipientModule string, amt sdk.Coins) error + BurnCoins(ctx sdk.Context, moduleName string, amt sdk.Coins) error + SendCoins(ctx sdk.Context, fromAddr sdk.AccAddress, toAddr sdk.AccAddress, amt sdk.Coins) error +} +``` + +## SaveVoucherDenom + +The `SaveVoucherDenom` function saves the voucher denom to be able to convert it back later. + +Create a new `x/dex/keeper/denom.go` file: + +```go +// x/dex/keeper/denom.go + +package keeper + +import ( + sdk "github.com/cosmos/cosmos-sdk/types" + ibctransfertypes "github.com/cosmos/ibc-go/v5/modules/apps/transfer/types" + + "interchange/x/dex/types" +) + +func (k Keeper) SaveVoucherDenom(ctx sdk.Context, port string, channel string, denom string) { + voucher := VoucherDenom(port, channel, denom) + + // Store the origin denom + _, saved := k.GetDenomTrace(ctx, voucher) + if !saved { + k.SetDenomTrace(ctx, types.DenomTrace{ + Index: voucher, + Port: port, + Channel: channel, + Origin: denom, + }) + } +} +``` + +Finally, the last function to implement is the `VoucherDenom` function that returns the voucher of the denom from the port ID and channel ID: + +```go +// x/dex/keeper/denom.go + +func VoucherDenom(port string, channel string, denom string) string { + // since SendPacket did not prefix the denomination, we must prefix denomination here + sourcePrefix := ibctransfertypes.GetDenomPrefix(port, channel) + + // NOTE: sourcePrefix contains the trailing "/" + prefixedDenom := sourcePrefix + denom + + // construct the denomination trace from the full raw denomination + denomTrace := ibctransfertypes.ParseDenomTrace(prefixedDenom) + voucher := denomTrace.IBCDenom() + return voucher[:16] +} +``` + +### Implement an OriginalDenom Function + +The `OriginalDenom` function returns back the original denom of the voucher. + +False is returned if the port ID and channel ID provided are not the origins of the voucher: + +```go +// x/dex/keeper/denom.go + +func (k Keeper) OriginalDenom(ctx sdk.Context, port string, channel string, voucher string) (string, bool) { + trace, exist := k.GetDenomTrace(ctx, voucher) + if exist { + // Check if original port and channel + if trace.Port == port && trace.Channel == channel { + return trace.Origin, true + } + } + + // Not the original chain + return "", false +} +``` + +### Implement a SafeMint Function + +If a token is an IBC token (has an `ibc/` prefix), the `SafeMint` function mints IBC token with `MintTokens`. Otherwise, it unlocks native token with `UnlockTokens`. + +Go back to the `x/dex/keeper/mint.go` file and add the following code: + +```go +// x/dex/keeper/mint.go + +func (k Keeper) SafeMint(ctx sdk.Context, port string, channel string, receiver sdk.AccAddress, denom string, amount int32) error { + if isIBCToken(denom) { + // Mint IBC tokens + if err := k.MintTokens(ctx, receiver, sdk.NewCoin(denom, sdkmath.NewInt(int64(amount)))); err != nil { + return err + } + } else { + // Unlock native tokens + if err := k.UnlockTokens( + ctx, + port, + channel, + receiver, + sdk.NewCoin(denom, sdkmath.NewInt(int64(amount))), + ); err != nil { + return err + } + } + + return nil +} +``` + +#### Implement a `MintTokens` Function + +You can use the `bankKeeper` function again to MintCoins. These token will then be sent to the receiver account: + +```go +// x/dex/keeper/mint.go + +func (k Keeper) MintTokens(ctx sdk.Context, receiver sdk.AccAddress, tokens sdk.Coin) error { + // mint new tokens if the source of the transfer is the same chain + if err := k.bankKeeper.MintCoins( + ctx, types.ModuleName, sdk.NewCoins(tokens), + ); err != nil { + return err + } + + // send to receiver + if err := k.bankKeeper.SendCoinsFromModuleToAccount( + ctx, types.ModuleName, receiver, sdk.NewCoins(tokens), + ); err != nil { + panic(fmt.Sprintf("unable to send coins from module to account despite previously minting coins to module account: %v", err)) + } + + return nil +} +``` + +Finally, add the function to unlock token after they are sent back to the native blockchain: + +```go +// x/dex/keeper/mint.go + +func (k Keeper) UnlockTokens(ctx sdk.Context, sourcePort string, sourceChannel string, receiver sdk.AccAddress, tokens sdk.Coin) error { + // create the escrow address for the tokens + escrowAddress := ibctransfertypes.GetEscrowAddress(sourcePort, sourceChannel) + + // escrow source tokens. It fails if balance insufficient + if err := k.bankKeeper.SendCoins( + ctx, escrowAddress, receiver, sdk.NewCoins(tokens), + ); err != nil { + return err + } + + return nil +} +``` + +The `MintTokens` function uses two keeper methods from the `bank` module: `MintCoins` and `SendCoinsFromModuleToAccount`. +To import these methods, add their signatures to the `BankKeeper` interface in the `x/dex/types/expected_keepers.go` file: + +```go +// x/dex/types/expected_keepers.go + +type BankKeeper interface { + // ... + MintCoins(ctx sdk.Context, moduleName string, amt sdk.Coins) error + SendCoinsFromModuleToAccount(ctx sdk.Context, senderModule string, recipientAddr sdk.AccAddress, amt sdk.Coins) error +} +``` + + +## Summary + +You finished the mint and burn voucher logic. + +It is a good time to make another git commit to save the state of your work: + + +```bash +git add . +git commit -m "Add Mint and Burn Voucher" +``` + +In the next chapter, you look into creating sell orders. diff --git a/docs/versioned_docs/version-v0.25/guide/08-interchange/06-creating-sell-orders.md b/docs/versioned_docs/version-v0.25/guide/08-interchange/06-creating-sell-orders.md new file mode 100644 index 0000000..9341afc --- /dev/null +++ b/docs/versioned_docs/version-v0.25/guide/08-interchange/06-creating-sell-orders.md @@ -0,0 +1,374 @@ +--- +sidebar_position: 6 +description: Implement logic to create sell orders. +--- + +# Create Sell Orders + +In this chapter, you implement the logic for creating sell orders. + +The packet proto file for a sell order is already generated. Add the seller information: + +```protobuf +// proto/dex/packet.proto + +message SellOrderPacketData { + // ... + string seller = 5; +} +``` + +Now, use Ignite CLI to build the proto files for the `send-sell-order` command. You used this command in a previous chapter. + +```bash +ignite generate proto-go --yes +``` + +## Message Handling in SendSellOrder + +Sell orders are created using the `send-sell-order` command. This command creates a transaction with a `SendSellOrder` message that triggers the `SendSellOrder` keeper method. + +The `SendSellOrder` command: + +* Checks that an order book for a specified denom pair exists. +* Safely burns or locks token. + * If the token is an IBC token, burn the token. + * If the token is a native token, lock the token. +* Saves the voucher that is received on the target chain to later resolve a denom. +* Transmits an IBC packet to the target chain. + +```go +// x/dex/keeper/msg_server_sell_order.go + +package keeper + +import ( + "context" + "errors" + + sdk "github.com/cosmos/cosmos-sdk/types" + clienttypes "github.com/cosmos/ibc-go/v5/modules/core/02-client/types" + + "interchange/x/dex/types" +) + +func (k msgServer) SendSellOrder(goCtx context.Context, msg *types.MsgSendSellOrder) (*types.MsgSendSellOrderResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + + // If an order book doesn't exist, throw an error + pairIndex := types.OrderBookIndex(msg.Port, msg.ChannelID, msg.AmountDenom, msg.PriceDenom) + _, found := k.GetSellOrderBook(ctx, pairIndex) + if !found { + return &types.MsgSendSellOrderResponse{}, errors.New("the pair doesn't exist") + } + + // Get sender's address + sender, err := sdk.AccAddressFromBech32(msg.Creator) + if err != nil { + return &types.MsgSendSellOrderResponse{}, err + } + + // Use SafeBurn to ensure no new native tokens are minted + if err := k.SafeBurn(ctx, msg.Port, msg.ChannelID, sender, msg.AmountDenom, msg.Amount); err != nil { + return &types.MsgSendSellOrderResponse{}, err + } + + // Save the voucher received on the other chain, to have the ability to resolve it into the original denom + k.SaveVoucherDenom(ctx, msg.Port, msg.ChannelID, msg.AmountDenom) + + var packet types.SellOrderPacketData + packet.Seller = msg.Creator + packet.AmountDenom = msg.AmountDenom + packet.Amount = msg.Amount + packet.PriceDenom = msg.PriceDenom + packet.Price = msg.Price + + // Transmit the packet + err = k.TransmitSellOrderPacket(ctx, packet, msg.Port, msg.ChannelID, clienttypes.ZeroHeight(), msg.TimeoutTimestamp) + if err != nil { + return nil, err + } + + return &types.MsgSendSellOrderResponse{}, nil +} +``` + +## On Receiving a Sell Order + +When a "sell order" packet is received on the target chain, you want the module to: + +* Update the sell order book +* Distribute sold token to the buyer +* Send the sell order to chain A after the fill attempt + +```go +// x/dex/keeper/sell_order.go + +func (k Keeper) OnRecvSellOrderPacket(ctx sdk.Context, packet channeltypes.Packet, data types.SellOrderPacketData) (packetAck types.SellOrderPacketAck, err error) { + if err := data.ValidateBasic(); err != nil { + return packetAck, err + } + + pairIndex := types.OrderBookIndex(packet.SourcePort, packet.SourceChannel, data.AmountDenom, data.PriceDenom) + book, found := k.GetBuyOrderBook(ctx, pairIndex) + if !found { + return packetAck, errors.New("the pair doesn't exist") + } + + // Fill sell order + remaining, liquidated, gain, _ := book.FillSellOrder(types.Order{ + Amount: data.Amount, + Price: data.Price, + }) + + // Return remaining amount and gains + packetAck.RemainingAmount = remaining.Amount + packetAck.Gain = gain + + // Before distributing sales, we resolve the denom + // First we check if the denom received comes from this chain originally + finalAmountDenom, saved := k.OriginalDenom(ctx, packet.DestinationPort, packet.DestinationChannel, data.AmountDenom) + if !saved { + // If it was not from this chain we use voucher as denom + finalAmountDenom = VoucherDenom(packet.SourcePort, packet.SourceChannel, data.AmountDenom) + } + + // Dispatch liquidated buy orders + for _, liquidation := range liquidated { + liquidation := liquidation + addr, err := sdk.AccAddressFromBech32(liquidation.Creator) + if err != nil { + return packetAck, err + } + + if err := k.SafeMint(ctx, packet.DestinationPort, packet.DestinationChannel, addr, finalAmountDenom, liquidation.Amount); err != nil { + return packetAck, err + } + } + + // Save the new order book + k.SetBuyOrderBook(ctx, book) + + return packetAck, nil +} +``` + +### Implement a FillBuyOrder Function + +The `FillBuyOrder` function tries to fill the sell order with the order book and returns all the side effects: + +```go +// x/dex/types/sell_order_book.go + +func (s *SellOrderBook) FillBuyOrder(order Order) ( + remainingBuyOrder Order, + liquidated []Order, + purchase int32, + filled bool, +) { + var liquidatedList []Order + totalPurchase := int32(0) + remainingBuyOrder = order + + // Liquidate as long as there is match + for { + var match bool + var liquidation Order + remainingBuyOrder, liquidation, purchase, match, filled = s.LiquidateFromBuyOrder( + remainingBuyOrder, + ) + if !match { + break + } + + // Update gains + totalPurchase += purchase + + // Update liquidated + liquidatedList = append(liquidatedList, liquidation) + + if filled { + break + } + } + + return remainingBuyOrder, liquidatedList, totalPurchase, filled +} +``` + +### Implement a LiquidateFromBuyOrder Function + +The `LiquidateFromBuyOrder` function liquidates the first buy order of the book from the sell order. If no match is found, return false for match: + +```go +// x/dex/types/sell_order_book.go + +func (s *SellOrderBook) LiquidateFromBuyOrder(order Order) ( + remainingBuyOrder Order, + liquidatedSellOrder Order, + purchase int32, + match bool, + filled bool, +) { + remainingBuyOrder = order + + // No match if no order + orderCount := len(s.Book.Orders) + if orderCount == 0 { + return order, liquidatedSellOrder, purchase, false, false + } + + // Check if match + lowestAsk := s.Book.Orders[orderCount-1] + if order.Price < lowestAsk.Price { + return order, liquidatedSellOrder, purchase, false, false + } + + liquidatedSellOrder = *lowestAsk + + // Check if buy order can be entirely filled + if lowestAsk.Amount >= order.Amount { + remainingBuyOrder.Amount = 0 + liquidatedSellOrder.Amount = order.Amount + purchase = order.Amount + + // Remove lowest ask if it has been entirely liquidated + lowestAsk.Amount -= order.Amount + if lowestAsk.Amount == 0 { + s.Book.Orders = s.Book.Orders[:orderCount-1] + } else { + s.Book.Orders[orderCount-1] = lowestAsk + } + + return remainingBuyOrder, liquidatedSellOrder, purchase, true, true + } + + // Not entirely filled + purchase = lowestAsk.Amount + s.Book.Orders = s.Book.Orders[:orderCount-1] + remainingBuyOrder.Amount -= lowestAsk.Amount + + return remainingBuyOrder, liquidatedSellOrder, purchase, true, false +} +``` + +### Implement the OnAcknowledgement Function for Sell Order Packets + +After an IBC packet is processed on the target chain, an acknowledgement is returned to the source chain and processed by the `OnAcknowledgementSellOrderPacket` function. + +The dex module on the source chain: + +- Stores the remaining sell order in the sell order book. +- Distributes sold tokens to the buyers. +- Distributes the price of the amount sold to the seller. +- On error, mints the burned tokens. + +```go +// x/dex/keeper/sell_order.go + +func (k Keeper) OnAcknowledgementSellOrderPacket(ctx sdk.Context, packet channeltypes.Packet, data types.SellOrderPacketData, ack channeltypes.Acknowledgement) error { + switch dispatchedAck := ack.Response.(type) { + case *channeltypes.Acknowledgement_Error: + // In case of error we mint back the native token + receiver, err := sdk.AccAddressFromBech32(data.Seller) + if err != nil { + return err + } + + if err := k.SafeMint(ctx, packet.SourcePort, packet.SourceChannel, receiver, data.AmountDenom, data.Amount); err != nil { + return err + } + + return nil + case *channeltypes.Acknowledgement_Result: + // Decode the packet acknowledgment + var packetAck types.SellOrderPacketAck + if err := types.ModuleCdc.UnmarshalJSON(dispatchedAck.Result, &packetAck); err != nil { + // The counter-party module doesn't implement the correct acknowledgment format + return errors.New("cannot unmarshal acknowledgment") + } + + // Get the sell order book + pairIndex := types.OrderBookIndex(packet.SourcePort, packet.SourceChannel, data.AmountDenom, data.PriceDenom) + book, found := k.GetSellOrderBook(ctx, pairIndex) + if !found { + panic("sell order book must exist") + } + + // Append the remaining amount of the order + if packetAck.RemainingAmount > 0 { + _, err := book.AppendOrder(data.Seller, packetAck.RemainingAmount, data.Price) + if err != nil { + return err + } + + // Save the new order book + k.SetSellOrderBook(ctx, book) + } + + // Mint the gains + if packetAck.Gain > 0 { + receiver, err := sdk.AccAddressFromBech32(data.Seller) + if err != nil { + return err + } + + finalPriceDenom, saved := k.OriginalDenom(ctx, packet.SourcePort, packet.SourceChannel, data.PriceDenom) + if !saved { + // If it was not from this chain we use voucher as denom + finalPriceDenom = VoucherDenom(packet.DestinationPort, packet.DestinationChannel, data.PriceDenom) + } + + if err := k.SafeMint(ctx, packet.SourcePort, packet.SourceChannel, receiver, finalPriceDenom, packetAck.Gain); err != nil { + return err + } + } + + return nil + default: + // The counter-party module doesn't implement the correct acknowledgment format + return errors.New("invalid acknowledgment format") + } +} +``` + +```go +// x/dex/types/sell_order_book.go + +func (s *SellOrderBook) AppendOrder(creator string, amount int32, price int32) (int32, error) { + return s.Book.appendOrder(creator, amount, price, Decreasing) +} +``` + +### Add the OnTimeout of a Sell Order Packet Function + +If a timeout occurs, mint back the native token: + +```go +// x/dex/keeper/sell_order.go + +func (k Keeper) OnTimeoutSellOrderPacket(ctx sdk.Context, packet channeltypes.Packet, data types.SellOrderPacketData) error { + // In case of error we mint back the native token + receiver, err := sdk.AccAddressFromBech32(data.Seller) + if err != nil { + return err + } + + if err := k.SafeMint(ctx, packet.SourcePort, packet.SourceChannel, receiver, data.AmountDenom, data.Amount); err != nil { + return err + } + + return nil +} +``` + +## Summary + +Great, you have completed the sell order logic. + +It is a good time to make another git commit again to save the state of your work: + +```bash +git add . +git commit -m "Add Sell Orders" +``` diff --git a/docs/versioned_docs/version-v0.25/guide/08-interchange/07-creating-buy-orders.md b/docs/versioned_docs/version-v0.25/guide/08-interchange/07-creating-buy-orders.md new file mode 100644 index 0000000..92ed5ef --- /dev/null +++ b/docs/versioned_docs/version-v0.25/guide/08-interchange/07-creating-buy-orders.md @@ -0,0 +1,396 @@ +--- +sidebar_position: 7 +description: Implement the buy order logic. +--- + +# Creating Buy Orders + +In this chapter, you implement the creation of buy orders. The logic is very similar to the sell order logic you implemented in the previous chapter. + +## Modify the Proto Definition + +Add the buyer to the proto file definition: + +```protobuf +// proto/dex/packet.proto + +message BuyOrderPacketData { + // ... + string buyer = 5; +} +``` + +Now, use Ignite CLI to build the proto files for the `send-buy-order` command. You used this command in previous chapters. + +```bash +ignite generate proto-go --yes +``` + +## IBC Message Handling in SendBuyOrder + +* Check if the pair exists on the order book +* If the token is an IBC token, burn the tokens +* If the token is a native token, lock the tokens +* Save the voucher received on the target chain to later resolve a denom + +```go +// x/dex/keeper/msg_server_buy_order.go + +package keeper + +import ( + "context" + "errors" + + sdk "github.com/cosmos/cosmos-sdk/types" + + "interchange/x/dex/types" +) + +func (k msgServer) SendBuyOrder(goCtx context.Context, msg *types.MsgSendBuyOrder) (*types.MsgSendBuyOrderResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + + // Cannot send a order if the pair doesn't exist + pairIndex := types.OrderBookIndex(msg.Port, msg.ChannelID, msg.AmountDenom, msg.PriceDenom) + _, found := k.GetBuyOrderBook(ctx, pairIndex) + if !found { + return &types.MsgSendBuyOrderResponse{}, errors.New("the pair doesn't exist") + } + + // Lock the token to send + sender, err := sdk.AccAddressFromBech32(msg.Creator) + if err != nil { + return &types.MsgSendBuyOrderResponse{}, err + } + + // Use SafeBurn to ensure no new native tokens are minted + if err := k.SafeBurn(ctx, msg.Port, msg.ChannelID, sender, msg.PriceDenom, msg.Amount*msg.Price); err != nil { + return &types.MsgSendBuyOrderResponse{}, err + } + + // Save the voucher received on the other chain, to have the ability to resolve it into the original denom + k.SaveVoucherDenom(ctx, msg.Port, msg.ChannelID, msg.PriceDenom) + + // Construct the packet + var packet types.BuyOrderPacketData + packet.Buyer = msg.Creator + + // Transmit an IBC packet... + return &types.MsgSendBuyOrderResponse{}, nil +} +``` + +## On Receiving a Buy Order + +* Update the buy order book +* Distribute sold token to the buyer +* Send to chain A the sell order after the fill attempt + +```go +// x/dex/keeper/buy_order.go + +func (k Keeper) OnRecvBuyOrderPacket(ctx sdk.Context, packet channeltypes.Packet, data types.BuyOrderPacketData) (packetAck types.BuyOrderPacketAck, err error) { + // validate packet data upon receiving + if err := data.ValidateBasic(); err != nil { + return packetAck, err + } + + // Check if the sell order book exists + pairIndex := types.OrderBookIndex(packet.SourcePort, packet.SourceChannel, data.AmountDenom, data.PriceDenom) + book, found := k.GetSellOrderBook(ctx, pairIndex) + if !found { + return packetAck, errors.New("the pair doesn't exist") + } + + // Fill buy order + remaining, liquidated, purchase, _ := book.FillBuyOrder(types.Order{ + Amount: data.Amount, + Price: data.Price, + }) + + // Return remaining amount and gains + packetAck.RemainingAmount = remaining.Amount + packetAck.Purchase = purchase + + // Before distributing gains, we resolve the denom + // First we check if the denom received comes from this chain originally + finalPriceDenom, saved := k.OriginalDenom(ctx, packet.DestinationPort, packet.DestinationChannel, data.PriceDenom) + if !saved { + // If it was not from this chain we use voucher as denom + finalPriceDenom = VoucherDenom(packet.SourcePort, packet.SourceChannel, data.PriceDenom) + } + + // Dispatch liquidated buy order + for _, liquidation := range liquidated { + liquidation := liquidation + addr, err := sdk.AccAddressFromBech32(liquidation.Creator) + if err != nil { + return packetAck, err + } + + if err := k.SafeMint( + ctx, + packet.DestinationPort, + packet.DestinationChannel, + addr, + finalPriceDenom, + liquidation.Amount*liquidation.Price, + ); err != nil { + return packetAck, err + } + } + + // Save the new order book + k.SetSellOrderBook(ctx, book) + + return packetAck, nil +} +``` + +### Implement the FillSellOrder Function + +The `FillSellOrder` function tries to fill the buy order with the order book and returns all the side effects: + +```go +// x/dex/types/buy_order_book.go + +func (b *BuyOrderBook) FillSellOrder(order Order) ( + remainingSellOrder Order, + liquidated []Order, + gain int32, + filled bool, +) { + var liquidatedList []Order + totalGain := int32(0) + remainingSellOrder = order + + // Liquidate as long as there is match + for { + var match bool + var liquidation Order + remainingSellOrder, liquidation, gain, match, filled = b.LiquidateFromSellOrder( + remainingSellOrder, + ) + if !match { + break + } + + // Update gains + totalGain += gain + + // Update liquidated + liquidatedList = append(liquidatedList, liquidation) + + if filled { + break + } + } + + return remainingSellOrder, liquidatedList, totalGain, filled +} +``` + +### Implement The LiquidateFromSellOrder Function + +The `LiquidateFromSellOrder` function liquidates the first sell order of the book from the buy order. If no match is found, return false for match: + +```go +// x/dex/types/buy_order_book.go + +func (b *BuyOrderBook) LiquidateFromSellOrder(order Order) ( + remainingSellOrder Order, + liquidatedBuyOrder Order, + gain int32, + match bool, + filled bool, +) { + remainingSellOrder = order + + // No match if no order + orderCount := len(b.Book.Orders) + if orderCount == 0 { + return order, liquidatedBuyOrder, gain, false, false + } + + // Check if match + highestBid := b.Book.Orders[orderCount-1] + if order.Price > highestBid.Price { + return order, liquidatedBuyOrder, gain, false, false + } + + liquidatedBuyOrder = *highestBid + + // Check if sell order can be entirely filled + if highestBid.Amount >= order.Amount { + remainingSellOrder.Amount = 0 + liquidatedBuyOrder.Amount = order.Amount + gain = order.Amount * highestBid.Price + + // Remove highest bid if it has been entirely liquidated + highestBid.Amount -= order.Amount + if highestBid.Amount == 0 { + b.Book.Orders = b.Book.Orders[:orderCount-1] + } else { + b.Book.Orders[orderCount-1] = highestBid + } + + return remainingSellOrder, liquidatedBuyOrder, gain, true, true + } + + // Not entirely filled + gain = highestBid.Amount * highestBid.Price + b.Book.Orders = b.Book.Orders[:orderCount-1] + remainingSellOrder.Amount -= highestBid.Amount + + return remainingSellOrder, liquidatedBuyOrder, gain, true, false +} +``` + +## Receiving a Buy Order Acknowledgment + + +After a buy order acknowledgement is received, chain `Mars`: + +* Stores the remaining sell order in the sell order book. +* Distributes sold `marscoin` to the buyers. +* Distributes to the seller the price of the amount sold. +* On error, mints back the burned tokens. + +```go +// x/dex/keeper/buy_order.go + +func (k Keeper) OnAcknowledgementBuyOrderPacket(ctx sdk.Context, packet channeltypes.Packet, data types.BuyOrderPacketData, ack channeltypes.Acknowledgement) error { + switch dispatchedAck := ack.Response.(type) { + case *channeltypes.Acknowledgement_Error: + // In case of error we mint back the native token + receiver, err := sdk.AccAddressFromBech32(data.Buyer) + if err != nil { + return err + } + + if err := k.SafeMint( + ctx, + packet.SourcePort, + packet.SourceChannel, + receiver, + data.PriceDenom, + data.Amount*data.Price, + ); err != nil { + return err + } + + return nil + case *channeltypes.Acknowledgement_Result: + // Decode the packet acknowledgment + var packetAck types.BuyOrderPacketAck + + if err := types.ModuleCdc.UnmarshalJSON(dispatchedAck.Result, &packetAck); err != nil { + // The counter-party module doesn't implement the correct acknowledgment format + return errors.New("cannot unmarshal acknowledgment") + } + + // Get the sell order book + pairIndex := types.OrderBookIndex(packet.SourcePort, packet.SourceChannel, data.AmountDenom, data.PriceDenom) + book, found := k.GetBuyOrderBook(ctx, pairIndex) + if !found { + panic("buy order book must exist") + } + + // Append the remaining amount of the order + if packetAck.RemainingAmount > 0 { + _, err := book.AppendOrder( + data.Buyer, + packetAck.RemainingAmount, + data.Price, + ) + if err != nil { + return err + } + + // Save the new order book + k.SetBuyOrderBook(ctx, book) + } + + // Mint the purchase + if packetAck.Purchase > 0 { + receiver, err := sdk.AccAddressFromBech32(data.Buyer) + if err != nil { + return err + } + + finalAmountDenom, saved := k.OriginalDenom(ctx, packet.SourcePort, packet.SourceChannel, data.AmountDenom) + if !saved { + // If it was not from this chain we use voucher as denom + finalAmountDenom = VoucherDenom(packet.DestinationPort, packet.DestinationChannel, data.AmountDenom) + } + + if err := k.SafeMint( + ctx, + packet.SourcePort, + packet.SourceChannel, + receiver, + finalAmountDenom, + packetAck.Purchase, + ); err != nil { + return err + } + } + + return nil + default: + // The counter-party module doesn't implement the correct acknowledgment format + return errors.New("invalid acknowledgment format") + } +} +``` + +`AppendOrder` appends an order in the buy order book. +Add the following function to the `x/dex/types/buy_order_book.go` file in the `types` directory. + +```go +// x/dex/types/buy_order_book.go + +func (b *BuyOrderBook) AppendOrder(creator string, amount int32, price int32) (int32, error) { + return b.Book.appendOrder(creator, amount, price, Increasing) +} +``` + +## OnTimeout of a Buy Order Packet + +If a timeout occurs, mint back the native token: + +```go +// x/dex/keeper/buy_order.go + +func (k Keeper) OnTimeoutBuyOrderPacket(ctx sdk.Context, packet channeltypes.Packet, data types.BuyOrderPacketData) error { + // In case of error we mint back the native token + receiver, err := sdk.AccAddressFromBech32(data.Buyer) + if err != nil { + return err + } + + if err := k.SafeMint( + ctx, + packet.SourcePort, + packet.SourceChannel, + receiver, + data.PriceDenom, + data.Amount*data.Price, + ); err != nil { + return err + } + + return nil +} +``` + +## Summary + +Congratulations, you implemented the buy order logic. + +Again, it's a good time to save your current state to your local GitHub repository: + +```bash +git add . +git commit -m "Add Buy Orders" +``` diff --git a/docs/versioned_docs/version-v0.25/guide/08-interchange/08-cancelling-orders.md b/docs/versioned_docs/version-v0.25/guide/08-interchange/08-cancelling-orders.md new file mode 100644 index 0000000..28ba3d1 --- /dev/null +++ b/docs/versioned_docs/version-v0.25/guide/08-interchange/08-cancelling-orders.md @@ -0,0 +1,193 @@ +--- +sidebar_position: 8 +description: Enable cancelling of buy and sell orders. +--- + +# Cancelling Orders + +You have implemented order books, buy and sell orders. In this chapter, you enable cancelling of buy and sell orders. + +## Cancel a Sell Order + +To cancel a sell order, you have to get the ID of the specific sell order. Then you can use the function `RemoveOrderFromID` to remove the specific order from the order book and update the keeper accordingly. + +Move to the keeper directory and edit the `x/dex/keeper/msg_server_cancel_sell_order.go` file: + +```go +// x/dex/keeper/msg_server_cancel_sell_order.go + +package keeper + +import ( + "context" + "errors" + + sdk "github.com/cosmos/cosmos-sdk/types" + + "interchange/x/dex/types" +) + +func (k msgServer) CancelSellOrder(goCtx context.Context, msg *types.MsgCancelSellOrder) (*types.MsgCancelSellOrderResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + + // Retrieve the book + pairIndex := types.OrderBookIndex(msg.Port, msg.Channel, msg.AmountDenom, msg.PriceDenom) + s, found := k.GetSellOrderBook(ctx, pairIndex) + if !found { + return &types.MsgCancelSellOrderResponse{}, errors.New("the pair doesn't exist") + } + + // Check order creator + order, err := s.Book.GetOrderFromID(msg.OrderID) + if err != nil { + return &types.MsgCancelSellOrderResponse{}, err + } + + if order.Creator != msg.Creator { + return &types.MsgCancelSellOrderResponse{}, errors.New("canceller must be creator") + } + + // Remove order + if err := s.Book.RemoveOrderFromID(msg.OrderID); err != nil { + return &types.MsgCancelSellOrderResponse{}, err + } + + k.SetSellOrderBook(ctx, s) + + // Refund seller with remaining amount + seller, err := sdk.AccAddressFromBech32(order.Creator) + if err != nil { + return &types.MsgCancelSellOrderResponse{}, err + } + + if err := k.SafeMint(ctx, msg.Port, msg.Channel, seller, msg.AmountDenom, order.Amount); err != nil { + return &types.MsgCancelSellOrderResponse{}, err + } + + return &types.MsgCancelSellOrderResponse{}, nil +} +``` + +### Implement the GetOrderFromID Function + +The `GetOrderFromID` function gets an order from the book from its ID. + +Add this function to the `x/dex/types/order_book.go` function in the `types` directory: + +```go +// x/dex/types/order_book.go + +func (book OrderBook) GetOrderFromID(id int32) (Order, error) { + for _, order := range book.Orders { + if order.Id == id { + return *order, nil + } + } + + return Order{}, ErrOrderNotFound +} +``` + +### Implement the RemoveOrderFromID Function + +The `RemoveOrderFromID` function removes an order from the book and keeps it ordered: + +```go +// x/dex/types/order_book.go + +func (book *OrderBook) RemoveOrderFromID(id int32) error { + for i, order := range book.Orders { + if order.Id == id { + book.Orders = append(book.Orders[:i], book.Orders[i+1:]...) + return nil + } + } + + return ErrOrderNotFound +} +``` + +## Cancel a Buy Order + +To cancel a buy order, you have to get the ID of the specific buy order. Then you can use the function `RemoveOrderFromID` to remove the specific order from the order book and update the keeper accordingly: + +```go +// x/dex/keeper/msg_server_cancel_buy_order.go + +package keeper + +import ( + "context" + "errors" + + sdk "github.com/cosmos/cosmos-sdk/types" + + "interchange/x/dex/types" +) + +func (k msgServer) CancelBuyOrder(goCtx context.Context, msg *types.MsgCancelBuyOrder) (*types.MsgCancelBuyOrderResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + + // Retrieve the book + pairIndex := types.OrderBookIndex(msg.Port, msg.Channel, msg.AmountDenom, msg.PriceDenom) + b, found := k.GetBuyOrderBook(ctx, pairIndex) + if !found { + return &types.MsgCancelBuyOrderResponse{}, errors.New("the pair doesn't exist") + } + + // Check order creator + order, err := b.Book.GetOrderFromID(msg.OrderID) + if err != nil { + return &types.MsgCancelBuyOrderResponse{}, err + } + + if order.Creator != msg.Creator { + return &types.MsgCancelBuyOrderResponse{}, errors.New("canceller must be creator") + } + + // Remove order + if err := b.Book.RemoveOrderFromID(msg.OrderID); err != nil { + return &types.MsgCancelBuyOrderResponse{}, err + } + + k.SetBuyOrderBook(ctx, b) + + // Refund buyer with remaining price amount + buyer, err := sdk.AccAddressFromBech32(order.Creator) + if err != nil { + return &types.MsgCancelBuyOrderResponse{}, err + } + + if err := k.SafeMint( + ctx, + msg.Port, + msg.Channel, + buyer, + msg.PriceDenom, + order.Amount*order.Price, + ); err != nil { + return &types.MsgCancelBuyOrderResponse{}, err + } + + return &types.MsgCancelBuyOrderResponse{}, nil +} +``` + +## Summary + +You have completed implementing the functions that are required for the `dex` module. In this chapter, you have implemented the design for cancelling specific buy or sell orders. + +To test if your Ignite CLI blockchain builds correctly, use the `chain build` command: + +```bash +ignite chain build +``` + +Again, it is a good time (a great time!) to add your state to the local GitHub repository: + +```bash +git add . +git commit -m "Add Cancelling Orders" +``` + +Finally, it's now time to write test files. diff --git a/docs/versioned_docs/version-v0.25/guide/08-interchange/09-tests.md b/docs/versioned_docs/version-v0.25/guide/08-interchange/09-tests.md new file mode 100644 index 0000000..8d3d933 --- /dev/null +++ b/docs/versioned_docs/version-v0.25/guide/08-interchange/09-tests.md @@ -0,0 +1,729 @@ +--- +sidebar_position: 9 +description: Add test files. +--- + +# Write Test Files + +To test your application, add the test files to your code. + +After you add the test files, change into the `interchange` directory with your terminal, then run: + +```bash +go test -timeout 30s ./x/dex/types +``` + +## Order Book Tests + +Create a new `x/dex/types/order_book_test.go` file in the `types` directory. + +Add the following testsuite: + +```go +// x/dex/types/order_book_test.go + +package types_test + +import ( + "math/rand" + "testing" + + "github.com/cosmos/cosmos-sdk/crypto/keys/ed25519" + sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/stretchr/testify/require" + + "interchange/x/dex/types" +) + +func GenString(n int) string { + alpha := []rune("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ") + + buf := make([]rune, n) + for i := range buf { + buf[i] = alpha[rand.Intn(len(alpha))] + } + + return string(buf) +} + +func GenAddress() string { + pk := ed25519.GenPrivKey().PubKey() + addr := pk.Address() + return sdk.AccAddress(addr).String() +} + +func GenAmount() int32 { + return int32(rand.Intn(int(types.MaxAmount)) + 1) +} + +func GenPrice() int32 { + return int32(rand.Intn(int(types.MaxPrice)) + 1) +} + +func GenPair() (string, string) { + return GenString(10), GenString(10) +} + +func GenOrder() (string, int32, int32) { + return GenLocalAccount(), GenAmount(), GenPrice() +} + +func GenLocalAccount() string { + return GenAddress() +} + +func MockAccount(str string) string { + return str +} + +func OrderListToOrderBook(list []types.Order) types.OrderBook { + listCopy := make([]*types.Order, len(list)) + for i, order := range list { + order := order + listCopy[i] = &order + } + + return types.OrderBook{ + IdCount: 0, + Orders: listCopy, + } +} + +func TestRemoveOrderFromID(t *testing.T) { + inputList := []types.Order{ + {Id: 3, Creator: MockAccount("3"), Amount: 2, Price: 10}, + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + } + + book := OrderListToOrderBook(inputList) + expectedList := []types.Order{ + {Id: 3, Creator: MockAccount("3"), Amount: 2, Price: 10}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + } + expectedBook := OrderListToOrderBook(expectedList) + err := book.RemoveOrderFromID(2) + require.NoError(t, err) + require.Equal(t, expectedBook, book) + + book = OrderListToOrderBook(inputList) + expectedList = []types.Order{ + {Id: 3, Creator: MockAccount("3"), Amount: 2, Price: 10}, + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + } + expectedBook = OrderListToOrderBook(expectedList) + err = book.RemoveOrderFromID(0) + require.NoError(t, err) + require.Equal(t, expectedBook, book) + + book = OrderListToOrderBook(inputList) + expectedList = []types.Order{ + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + } + expectedBook = OrderListToOrderBook(expectedList) + err = book.RemoveOrderFromID(3) + require.NoError(t, err) + require.Equal(t, expectedBook, book) + + book = OrderListToOrderBook(inputList) + err = book.RemoveOrderFromID(4) + require.ErrorIs(t, err, types.ErrOrderNotFound) +} +``` + +## Buy Order Tests + +Create a new `x/dex/types/buy_order_book_test.go` file in the `types` directory to add the tests for the Buy Order Book: + +```go +// x/dex/types/buy_order_book_test.go + +package types_test + +import ( + "sort" + "testing" + + "github.com/stretchr/testify/require" + + "interchange/x/dex/types" +) + +func OrderListToBuyOrderBook(list []types.Order) types.BuyOrderBook { + listCopy := make([]*types.Order, len(list)) + for i, order := range list { + order := order + listCopy[i] = &order + } + + book := types.BuyOrderBook{ + AmountDenom: "foo", + PriceDenom: "bar", + Book: &types.OrderBook{ + IdCount: 0, + Orders: listCopy, + }, + } + return book +} + +func TestAppendOrder(t *testing.T) { + buyBook := types.NewBuyOrderBook(GenPair()) + + // Prevent zero amount + seller, amount, price := GenOrder() + _, err := buyBook.AppendOrder(seller, 0, price) + require.ErrorIs(t, err, types.ErrZeroAmount) + + // Prevent big amount + _, err = buyBook.AppendOrder(seller, types.MaxAmount+1, price) + require.ErrorIs(t, err, types.ErrMaxAmount) + + // Prevent zero price + _, err = buyBook.AppendOrder(seller, amount, 0) + require.ErrorIs(t, err, types.ErrZeroPrice) + + // Prevent big price + _, err = buyBook.AppendOrder(seller, amount, types.MaxPrice+1) + require.ErrorIs(t, err, types.ErrMaxPrice) + + // Can append buy orders + for i := 0; i < 20; i++ { + // Append a new order + creator, amount, price := GenOrder() + newOrder := types.Order{ + Id: buyBook.Book.IdCount, + Creator: creator, + Amount: amount, + Price: price, + } + orderID, err := buyBook.AppendOrder(creator, amount, price) + + // Checks + require.NoError(t, err) + require.Contains(t, buyBook.Book.Orders, &newOrder) + require.Equal(t, newOrder.Id, orderID) + } + + require.Len(t, buyBook.Book.Orders, 20) + require.True(t, sort.SliceIsSorted(buyBook.Book.Orders, func(i, j int) bool { + return buyBook.Book.Orders[i].Price < buyBook.Book.Orders[j].Price + })) +} + +type liquidateSellRes struct { + Book []types.Order + Remaining types.Order + Liquidated types.Order + Gain int32 + Match bool + Filled bool +} + +func simulateLiquidateFromSellOrder( + t *testing.T, + inputList []types.Order, + inputOrder types.Order, + expected liquidateSellRes, +) { + book := OrderListToBuyOrderBook(inputList) + expectedBook := OrderListToBuyOrderBook(expected.Book) + + require.True(t, sort.SliceIsSorted(book.Book.Orders, func(i, j int) bool { + return book.Book.Orders[i].Price < book.Book.Orders[j].Price + })) + require.True(t, sort.SliceIsSorted(expectedBook.Book.Orders, func(i, j int) bool { + return expectedBook.Book.Orders[i].Price < expectedBook.Book.Orders[j].Price + })) + + remaining, liquidated, gain, match, filled := book.LiquidateFromSellOrder(inputOrder) + + require.Equal(t, expectedBook, book) + require.Equal(t, expected.Remaining, remaining) + require.Equal(t, expected.Liquidated, liquidated) + require.Equal(t, expected.Gain, gain) + require.Equal(t, expected.Match, match) + require.Equal(t, expected.Filled, filled) +} + +func TestLiquidateFromSellOrder(t *testing.T) { + // No match for empty book + inputOrder := types.Order{Id: 10, Creator: MockAccount("1"), Amount: 100, Price: 30} + book := OrderListToBuyOrderBook([]types.Order{}) + _, _, _, match, _ := book.LiquidateFromSellOrder(inputOrder) + require.False(t, match) + + // Buy book + inputBook := []types.Order{ + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + } + + // Test no match if highest bid too low (25 < 30) + book = OrderListToBuyOrderBook(inputBook) + _, _, _, match, _ = book.LiquidateFromSellOrder(inputOrder) + require.False(t, match) + + // Entirely filled (30 < 50) + inputOrder = types.Order{Id: 10, Creator: MockAccount("1"), Amount: 30, Price: 22} + expected := liquidateSellRes{ + Book: []types.Order{ + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + {Id: 0, Creator: MockAccount("0"), Amount: 20, Price: 25}, + }, + Remaining: types.Order{Id: 10, Creator: MockAccount("1"), Amount: 0, Price: 22}, + Liquidated: types.Order{Id: 0, Creator: MockAccount("0"), Amount: 30, Price: 25}, + Gain: int32(30 * 25), + Match: true, + Filled: true, + } + simulateLiquidateFromSellOrder(t, inputBook, inputOrder, expected) + + // Entirely filled and liquidated ( 50 = 50) + inputOrder = types.Order{Id: 10, Creator: MockAccount("1"), Amount: 50, Price: 15} + expected = liquidateSellRes{ + Book: []types.Order{ + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + }, + Remaining: types.Order{Id: 10, Creator: MockAccount("1"), Amount: 0, Price: 15}, + Liquidated: types.Order{Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + Gain: int32(50 * 25), + Match: true, + Filled: true, + } + simulateLiquidateFromSellOrder(t, inputBook, inputOrder, expected) + + // Not filled and entirely liquidated (60 > 50) + inputOrder = types.Order{Id: 10, Creator: MockAccount("1"), Amount: 60, Price: 10} + expected = liquidateSellRes{ + Book: []types.Order{ + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + }, + Remaining: types.Order{Id: 10, Creator: MockAccount("1"), Amount: 10, Price: 10}, + Liquidated: types.Order{Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + Gain: int32(50 * 25), + Match: true, + Filled: false, + } + simulateLiquidateFromSellOrder(t, inputBook, inputOrder, expected) +} + +type fillSellRes struct { + Book []types.Order + Remaining types.Order + Liquidated []types.Order + Gain int32 + Filled bool +} + +func simulateFillSellOrder( + t *testing.T, + inputList []types.Order, + inputOrder types.Order, + expected fillSellRes, +) { + book := OrderListToBuyOrderBook(inputList) + expectedBook := OrderListToBuyOrderBook(expected.Book) + + require.True(t, sort.SliceIsSorted(book.Book.Orders, func(i, j int) bool { + return book.Book.Orders[i].Price < book.Book.Orders[j].Price + })) + require.True(t, sort.SliceIsSorted(expectedBook.Book.Orders, func(i, j int) bool { + return expectedBook.Book.Orders[i].Price < expectedBook.Book.Orders[j].Price + })) + + remaining, liquidated, gain, filled := book.FillSellOrder(inputOrder) + + require.Equal(t, expectedBook, book) + require.Equal(t, expected.Remaining, remaining) + require.Equal(t, expected.Liquidated, liquidated) + require.Equal(t, expected.Gain, gain) + require.Equal(t, expected.Filled, filled) +} + +func TestFillSellOrder(t *testing.T) { + var inputBook []types.Order + + // Empty book + inputOrder := types.Order{Id: 10, Creator: MockAccount("1"), Amount: 30, Price: 30} + expected := fillSellRes{ + Book: []types.Order{}, + Remaining: inputOrder, + Liquidated: []types.Order(nil), + Gain: int32(0), + Filled: false, + } + simulateFillSellOrder(t, inputBook, inputOrder, expected) + + // No match + inputBook = []types.Order{ + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + } + expected = fillSellRes{ + Book: inputBook, + Remaining: inputOrder, + Liquidated: []types.Order(nil), + Gain: int32(0), + Filled: false, + } + simulateFillSellOrder(t, inputBook, inputOrder, expected) + + // First order liquidated, not filled + inputOrder = types.Order{Id: 10, Creator: MockAccount("1"), Amount: 60, Price: 22} + expected = fillSellRes{ + Book: []types.Order{ + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + }, + Remaining: types.Order{Id: 10, Creator: MockAccount("1"), Amount: 10, Price: 22}, + Liquidated: []types.Order{ + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + }, + Gain: int32(50 * 25), + Filled: false, + } + simulateFillSellOrder(t, inputBook, inputOrder, expected) + + // Filled with two order + inputOrder = types.Order{Id: 10, Creator: MockAccount("1"), Amount: 60, Price: 18} + expected = fillSellRes{ + Book: []types.Order{ + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + {Id: 1, Creator: MockAccount("1"), Amount: 190, Price: 20}, + }, + Remaining: types.Order{Id: 10, Creator: MockAccount("1"), Amount: 0, Price: 18}, + Liquidated: []types.Order{ + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + {Id: 1, Creator: MockAccount("1"), Amount: 10, Price: 20}, + }, + Gain: int32(50*25 + 10*20), + Filled: true, + } + simulateFillSellOrder(t, inputBook, inputOrder, expected) + + // Not filled, buy order book liquidated + inputOrder = types.Order{Id: 10, Creator: MockAccount("1"), Amount: 300, Price: 10} + expected = fillSellRes{ + Book: []types.Order{}, + Remaining: types.Order{Id: 10, Creator: MockAccount("1"), Amount: 20, Price: 10}, + Liquidated: []types.Order{ + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + }, + Gain: int32(50*25 + 200*20 + 30*15), + Filled: false, + } + simulateFillSellOrder(t, inputBook, inputOrder, expected) +} +``` + +## Sell Order Tests + +Create a new testsuite for Sell Orders in a new file `x/dex/types/sell_order_book_test.go`: + +```go +// x/dex/types/sell_order_book_test.go + +package types_test + +import ( + "sort" + "testing" + + "github.com/stretchr/testify/require" + + "interchange/x/dex/types" +) + +func OrderListToSellOrderBook(list []types.Order) types.SellOrderBook { + listCopy := make([]*types.Order, len(list)) + for i, order := range list { + order := order + listCopy[i] = &order + } + + book := types.SellOrderBook{ + AmountDenom: "foo", + PriceDenom: "bar", + Book: &types.OrderBook{ + IdCount: 0, + Orders: listCopy, + }, + } + return book +} + +func TestSellOrderBook_AppendOrder(t *testing.T) { + sellBook := types.NewSellOrderBook(GenPair()) + + // Prevent zero amount + seller, amount, price := GenOrder() + _, err := sellBook.AppendOrder(seller, 0, price) + require.ErrorIs(t, err, types.ErrZeroAmount) + + // Prevent big amount + _, err = sellBook.AppendOrder(seller, types.MaxAmount+1, price) + require.ErrorIs(t, err, types.ErrMaxAmount) + + // Prevent zero price + _, err = sellBook.AppendOrder(seller, amount, 0) + require.ErrorIs(t, err, types.ErrZeroPrice) + + // Prevent big price + _, err = sellBook.AppendOrder(seller, amount, types.MaxPrice+1) + require.ErrorIs(t, err, types.ErrMaxPrice) + + // Can append sell orders + for i := 0; i < 20; i++ { + // Append a new order + creator, amount, price := GenOrder() + newOrder := types.Order{ + Id: sellBook.Book.IdCount, + Creator: creator, + Amount: amount, + Price: price, + } + orderID, err := sellBook.AppendOrder(creator, amount, price) + + // Checks + require.NoError(t, err) + require.Contains(t, sellBook.Book.Orders, &newOrder) + require.Equal(t, newOrder.Id, orderID) + } + require.Len(t, sellBook.Book.Orders, 20) + require.True(t, sort.SliceIsSorted(sellBook.Book.Orders, func(i, j int) bool { + return sellBook.Book.Orders[i].Price > sellBook.Book.Orders[j].Price + })) +} + +type liquidateBuyRes struct { + Book []types.Order + Remaining types.Order + Liquidated types.Order + Purchase int32 + Match bool + Filled bool +} + +func simulateLiquidateFromBuyOrder( + t *testing.T, + inputList []types.Order, + inputOrder types.Order, + expected liquidateBuyRes, +) { + book := OrderListToSellOrderBook(inputList) + expectedBook := OrderListToSellOrderBook(expected.Book) + require.True(t, sort.SliceIsSorted(book.Book.Orders, func(i, j int) bool { + return book.Book.Orders[i].Price > book.Book.Orders[j].Price + })) + require.True(t, sort.SliceIsSorted(expectedBook.Book.Orders, func(i, j int) bool { + return expectedBook.Book.Orders[i].Price > expectedBook.Book.Orders[j].Price + })) + + remaining, liquidated, purchase, match, filled := book.LiquidateFromBuyOrder(inputOrder) + + require.Equal(t, expectedBook, book) + require.Equal(t, expected.Remaining, remaining) + require.Equal(t, expected.Liquidated, liquidated) + require.Equal(t, expected.Purchase, purchase) + require.Equal(t, expected.Match, match) + require.Equal(t, expected.Filled, filled) +} + +func TestLiquidateFromBuyOrder(t *testing.T) { + // No match for empty book + inputOrder := types.Order{Id: 10, Creator: MockAccount("1"), Amount: 100, Price: 10} + book := OrderListToSellOrderBook([]types.Order{}) + _, _, _, match, _ := book.LiquidateFromBuyOrder(inputOrder) + require.False(t, match) + + // Sell book + inputBook := []types.Order{ + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + } + + // Test no match if lowest ask too high (25 < 30) + book = OrderListToSellOrderBook(inputBook) + _, _, _, match, _ = book.LiquidateFromBuyOrder(inputOrder) + require.False(t, match) + + // Entirely filled (30 > 15) + inputOrder = types.Order{Id: 10, Creator: MockAccount("1"), Amount: 20, Price: 30} + expected := liquidateBuyRes{ + Book: []types.Order{ + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + {Id: 2, Creator: MockAccount("2"), Amount: 10, Price: 15}, + }, + Remaining: types.Order{Id: 10, Creator: MockAccount("1"), Amount: 0, Price: 30}, + Liquidated: types.Order{Id: 2, Creator: MockAccount("2"), Amount: 20, Price: 15}, + Purchase: int32(20), + Match: true, + Filled: true, + } + simulateLiquidateFromBuyOrder(t, inputBook, inputOrder, expected) + + // Entirely filled (30 = 30) + inputOrder = types.Order{Id: 10, Creator: MockAccount("1"), Amount: 30, Price: 30} + expected = liquidateBuyRes{ + Book: []types.Order{ + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + }, + Remaining: types.Order{Id: 10, Creator: MockAccount("1"), Amount: 0, Price: 30}, + Liquidated: types.Order{Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + Purchase: int32(30), + Match: true, + Filled: true, + } + simulateLiquidateFromBuyOrder(t, inputBook, inputOrder, expected) + + // Not filled and entirely liquidated (60 > 30) + inputOrder = types.Order{Id: 10, Creator: MockAccount("1"), Amount: 60, Price: 30} + expected = liquidateBuyRes{ + Book: []types.Order{ + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + }, + Remaining: types.Order{Id: 10, Creator: MockAccount("1"), Amount: 30, Price: 30}, + Liquidated: types.Order{Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + Purchase: int32(30), + Match: true, + Filled: false, + } + simulateLiquidateFromBuyOrder(t, inputBook, inputOrder, expected) +} + +type fillBuyRes struct { + Book []types.Order + Remaining types.Order + Liquidated []types.Order + Purchase int32 + Filled bool +} + +func simulateFillBuyOrder( + t *testing.T, + inputList []types.Order, + inputOrder types.Order, + expected fillBuyRes, +) { + book := OrderListToSellOrderBook(inputList) + expectedBook := OrderListToSellOrderBook(expected.Book) + + require.True(t, sort.SliceIsSorted(book.Book.Orders, func(i, j int) bool { + return book.Book.Orders[i].Price > book.Book.Orders[j].Price + })) + require.True(t, sort.SliceIsSorted(expectedBook.Book.Orders, func(i, j int) bool { + return expectedBook.Book.Orders[i].Price > expectedBook.Book.Orders[j].Price + })) + + remaining, liquidated, purchase, filled := book.FillBuyOrder(inputOrder) + + require.Equal(t, expectedBook, book) + require.Equal(t, expected.Remaining, remaining) + require.Equal(t, expected.Liquidated, liquidated) + require.Equal(t, expected.Purchase, purchase) + require.Equal(t, expected.Filled, filled) +} + +func TestFillBuyOrder(t *testing.T) { + var inputBook []types.Order + + // Empty book + inputOrder := types.Order{Id: 10, Creator: MockAccount("1"), Amount: 30, Price: 10} + expected := fillBuyRes{ + Book: []types.Order{}, + Remaining: inputOrder, + Liquidated: []types.Order(nil), + Purchase: int32(0), + Filled: false, + } + simulateFillBuyOrder(t, inputBook, inputOrder, expected) + + // No match + inputBook = []types.Order{ + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + } + expected = fillBuyRes{ + Book: inputBook, + Remaining: inputOrder, + Liquidated: []types.Order(nil), + Purchase: int32(0), + Filled: false, + } + simulateFillBuyOrder(t, inputBook, inputOrder, expected) + + // First order liquidated, not filled + inputOrder = types.Order{Id: 10, Creator: MockAccount("1"), Amount: 60, Price: 18} + expected = fillBuyRes{ + Book: []types.Order{ + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + }, + Remaining: types.Order{Id: 10, Creator: MockAccount("1"), Amount: 30, Price: 18}, + Liquidated: []types.Order{ + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + }, + Purchase: int32(30), + Filled: false, + } + simulateFillBuyOrder(t, inputBook, inputOrder, expected) + + // Filled with two order + inputOrder = types.Order{Id: 10, Creator: MockAccount("1"), Amount: 60, Price: 22} + expected = fillBuyRes{ + Book: []types.Order{ + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + {Id: 1, Creator: MockAccount("1"), Amount: 170, Price: 20}, + }, + Remaining: types.Order{Id: 10, Creator: MockAccount("1"), Amount: 0, Price: 22}, + Liquidated: []types.Order{ + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + {Id: 1, Creator: MockAccount("1"), Amount: 30, Price: 20}, + }, + Purchase: int32(30 + 30), + Filled: true, + } + simulateFillBuyOrder(t, inputBook, inputOrder, expected) + + // Not filled, sell order book liquidated + inputOrder = types.Order{Id: 10, Creator: MockAccount("1"), Amount: 300, Price: 30} + expected = fillBuyRes{ + Book: []types.Order{}, + Remaining: types.Order{Id: 10, Creator: MockAccount("1"), Amount: 20, Price: 30}, + Liquidated: []types.Order{ + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + }, + Purchase: int32(30 + 200 + 50), + Filled: false, + } + simulateFillBuyOrder(t, inputBook, inputOrder, expected) +} +``` + +## Successful Test Output + +When the tests are successful, your output is: + +``` +ok interchange/x/dex/types 0.550s +``` diff --git a/docs/versioned_docs/version-v0.25/guide/08-interchange/_category_.json b/docs/versioned_docs/version-v0.25/guide/08-interchange/_category_.json new file mode 100644 index 0000000..f427e86 --- /dev/null +++ b/docs/versioned_docs/version-v0.25/guide/08-interchange/_category_.json @@ -0,0 +1,5 @@ +{ + "label": "Advanced Module: Interchange", + "position": 8, + "link": null + } \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.25/guide/_category_.json b/docs/versioned_docs/version-v0.25/guide/_category_.json new file mode 100644 index 0000000..04bc676 --- /dev/null +++ b/docs/versioned_docs/version-v0.25/guide/_category_.json @@ -0,0 +1,5 @@ +{ + "label": "Developer Tutorials", + "position": 2, + "link": null + } \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.25/guide/images/api.png b/docs/versioned_docs/version-v0.25/guide/images/api.png new file mode 100644 index 0000000..081df8f Binary files /dev/null and b/docs/versioned_docs/version-v0.25/guide/images/api.png differ diff --git a/docs/versioned_docs/version-v0.25/guide/images/packet_sendpost.png b/docs/versioned_docs/version-v0.25/guide/images/packet_sendpost.png new file mode 100644 index 0000000..0bb080c Binary files /dev/null and b/docs/versioned_docs/version-v0.25/guide/images/packet_sendpost.png differ diff --git a/docs/versioned_docs/version-v0.25/index.md b/docs/versioned_docs/version-v0.25/index.md new file mode 100644 index 0000000..8a3176f --- /dev/null +++ b/docs/versioned_docs/version-v0.25/index.md @@ -0,0 +1,63 @@ +--- +sidebar_position: 1 +slug: / +--- + +# Ignite CLI + +[Ignite CLI](https://github.com/ignite/cli) offers everything you need to build, test, and launch your blockchain with a decentralized worldwide community. Ignite CLI is built on top of [Cosmos SDK](https://docs.cosmos.network), the world’s most popular blockchain framework. Ignite CLI accelerates chain development by scaffolding everything you need so you can focus on business logic. + +## What is Ignite CLI? + +Ignite CLI is an easy-to-use CLI tool for creating and maintaining sovereign application-specific blockchains. Blockchains created with Ignite CLI use Cosmos SDK and Tendermint. Ignite CLI and the Cosmos SDK modules are written in the Go programming language. The scaffolded blockchain that is created with Ignite CLI includes a command line interface that lets you manage keys, create validators, and send tokens. + +With just a few commands, you can use Ignite CLI to: + +* Create a modular blockchain written in Go +* Scaffold modules, messages, types with CRUD operations, IBC packets, and more +* Start a blockchain node in development with live reloading +* Connect to other blockchains with a built-in IBC relayer +* Use automatically generated TypeScript/Vuex clients to interact with your blockchain +* Use the Vue.js web app template with a set of components and Vuex modules + +## Install Ignite CLI + +To install the `ignite` binary in `/usr/local/bin` run the following command: + +``` +curl https://get.ignite.com/cli! | bash +``` + +## Bounty program + +Our [Ignite CLI bounty program](06-bounty.md) provides incentives for your participation and pays rewards. + +## Projects using Tendermint and Cosmos SDK + +Many projects already showcase the Tendermint BFT consensus engine and the Cosmos SDK. Explore the [Cosmos ecosystem](https://cosmos.network/ecosystem/apps) to discover a wide variety of apps, blockchains, wallets, and explorers that are built in the Cosmos ecosystem. + +## Projects building with Ignite CLI + +* [Sifchain: omni-chain solution for DEXs](https://github.com/Sifchain/sifnode) +* [Kyve](https://www.kyve.network) +* [crypto.org chain](https://github.com/crypto-org-chain/chain-main) ([initialized with Ignite CLI](https://github.com/crypto-org-chain/chain-main/commit/37b2ecb49a9aae7c581270a4f2dbecfcd8e8a6e9)) +* [Cronos](https://github.com/crypto-org-chain/cronos) +* [Plugchain](https://github.com/oracleNetworkProtocol/plugchain) +* [BitCanna](https://github.com/BitCannaGlobal/bcna) +* [Panacea Core](https://github.com/medibloc/panacea-core) +* [Rook](https://github.com/cmwaters/rook) +* [PI Bridge](https://github.com/pchain-org/pi-bridge) +* [Polynetwork](https://github.com/Switcheo/polynetwork-cosmos) +* [OmniFlix Hub](https://github.com/OmniFlix/omniflixhub) +* [Cudos](https://github.com/CudoVentures/cudos-node) +* [Zenchain](https://github.com/zenchainprotocol/zenchain) +* [Onomy Protocol](https://github.com/onomyprotocol/ochain) +* [Interchain accounts demo](https://github.com/cosmos/interchain-accounts) +* [Celestia](https://github.com/celestiaorg/celestia-app) +* [Umee: decentralized universal capital facility](https://github.com/umee-network/umee) +* [Juno interoperable smart contract hub](https://github.com/CosmosContracts/Juno) +* [Affondra](https://github.com/EG-easy/affondra) +* [Finding Imposter](https://github.com/chantmk/Finding-imposter) +* [Flares payment network](https://github.com/wangfeiping/flares) +* [FirmaChain](https://github.com/firmachain/firmachain) +* [Sonr](https://github.com/sonr-io/sonr) diff --git a/docs/versioned_docs/version-v0.25/kb/00-kb.md b/docs/versioned_docs/version-v0.25/kb/00-kb.md new file mode 100644 index 0000000..67b2a67 --- /dev/null +++ b/docs/versioned_docs/version-v0.25/kb/00-kb.md @@ -0,0 +1,10 @@ +--- +sidebar_position: 1 +slug: /kb +--- + +# Knowledge Base + +Knowledge base articles cover different aspects of Ignite CLI. This online library includes reference content on supported types, protocol buffer files, chain simulation as well as an overview of scaffolding a chain all the way to starting an IBC relayer. + +If you're new to Ignite CLI or want to go through a series of tutorials, visit the [Developer Tutorials](/guide). diff --git a/docs/versioned_docs/version-v0.25/kb/01-scaffold-chain.md b/docs/versioned_docs/version-v0.25/kb/01-scaffold-chain.md new file mode 100644 index 0000000..e7b2e42 --- /dev/null +++ b/docs/versioned_docs/version-v0.25/kb/01-scaffold-chain.md @@ -0,0 +1,68 @@ +--- +sidebar_position: 1 +description: High-level overview of a new Cosmos SDK blockchain project built with Ignite CLI. +--- + +# Scaffold a chain + +The `ignite scaffold chain` command scaffolds a new Cosmos SDK blockchain project. + +## Build a blockchain app + +To build the planet application: + +```bash +ignite scaffold chain planet +``` + +## Directory structure + +The `ignite scaffold chain planet` command creates a directory called `planet` that contains all the files for your project and initializes a local git repository. The `planet` argument is a string that is used for the Go module path. The repository name (`planet`, in this case) is used as the project's name. + +The project directory structure: + +- `app`: files that wire the blockchain together +- `cmd`: binary for the blockchain node +- `docs`: static `openapi.yml` API doc for the blockchain node +- `proto`: protocol buffer files for custom modules +- `x`: modules +- `vue`: scaffolded web application (optional) +- `config.yml`: configuration file + +### Application-specific logic + +Most of the logic of your application-specific blockchain is written in custom modules. Each module effectively encapsulates an independent piece of functionality. Following the Cosmos SDK convention, custom modules are stored inside the `x` directory. By default, `ignite scaffold chain` scaffolds a module with a name that matches the name of the project. In this example, the module name is `x/planet`. + +### Proto files + +Every Cosmos SDK module has protocol buffer files that define data structures, messages, queries, RPCs, and so on. The `proto` directory contains a directory with proto files for each custom module in the `x` directory. + +### Global settings + +Global changes to your blockchain are defined in files inside the `app` directory. These changes include importing third-party modules, defining relationships between modules, and configuring blockchain-wide settings. + +### Configuration + +The `config.yml` file contains configuration options that Ignite CLI uses to build, initialize, and start your blockchain node in development. + +## Address prefix + +Account addresses on Cosmos SDK-based blockchains have string prefixes. For example, the Cosmos Hub blockchain uses the default `cosmos` prefix, so that addresses look like this: `cosmos12fjzdtqfrrve7zyg9sv8j25azw2ua6tvu07ypf`. + +### Change prefix on new blockchains + +When you create a new blockchain, pass a prefix as a value to the `--address-prefix` flag: + +```bash +ignite scaffold chain planet --address-prefix moonlight +``` + +Using the `moonlight` prefix, account addresses on your blockchain look like this: `moonlight12fjzdtqfrrve7zyg9sv8j25azw2ua6tvu07ypf`. + +### Change prefix on existing blockchains + +To change the prefix after the blockchain has been scaffolded, modify the `AccountAddressPrefix` in the `app/app.go` file. + +## Cosmos SDK version + +By default, the `ignite scaffold chain` command creates a Cosmos SDK blockchain using the latest stable version of the Cosmos SDK. diff --git a/docs/versioned_docs/version-v0.25/kb/02-serve.md b/docs/versioned_docs/version-v0.25/kb/02-serve.md new file mode 100644 index 0000000..616ef2a --- /dev/null +++ b/docs/versioned_docs/version-v0.25/kb/02-serve.md @@ -0,0 +1,91 @@ +--- +order: 2 +description: Use the Ignite CLI serve command to start your blockchain. +--- + +# Start a blockchain + +Blockchains are decentralized applications. + +- In production, blockchains often run the same software on many validator nodes that are run by different people and entities. To launch a blockchain in production, the validator entities coordinate the launch process to start their nodes simultaneously. +- During development, a blockchain can be started locally on a single validator node. This convenient process lets you restart a chain quickly and iterate faster. Starting a chain on a single node in development is similar to starting a traditional web application on a local server. + +## Start a blockchain node in development + +Switch to the directory that contains a blockchain that was scaffolded with Ignite CLI. To start the blockchain node, run the following command: + +```bash +ignite chain serve +``` + +This command initializes a chain, builds the code, starts a single validator node, and starts watching for file changes. + +Whenever a file is changed, the chain is automatically reinitialized, rebuilt, and started again. The chain's state is preserved if the changes to the source code are compatible with the previous state. This state preservation is beneficial for development purposes. + +Because the `ignite chain serve` command is a development tool, it should not be used in a production environment. Read on to learn the process of running a blockchain in production. + +## The Magic of `ignite chain serve` + +The `ignite chain serve` command starts a fully operational blockchain. + +The `ignite chain serve` command performs the following tasks: + +- Installs dependencies +- Imports state, if possible +- Builds protocol buffer files +- Optionally generates TypeScript clients and Vuex stores +- Builds a compiled blockchain binary +- Creates accounts +- Initializes a blockchain node +- Starts the following processes: + - Tendermint RPC + - Cosmos SDK API + - Faucet, optional +- Watches for file changes and restarts +- Exports state + +You can use flags to configure how the blockchain runs. + +## Define how your blockchain starts + +Flags for the `ignite chain serve` command determine how your blockchain starts. All flags are optional. + +`--config` + +Custom configuration file. Using unique configuration files is required to launch two blockchains on the same machine from the same source code. When omitted, the default is `config.yml`. + +`--reset-once` + +Reset the state only once. Use this flag to resume a failed reset or to initialize a blockchain from an empty state. The default state persistence imports the existing state and resumes the blockchain. + +`--force-reset` + +Reset state on every file change. Do not import state and turn off state persistence. + +`--verbose` + +Enter verbose detailed mode with extensive logging. + +`--home` + +Specify a custom home directory. + +## Start a blockchain node in production + +The `ignite chain serve` and `ignite chain build` commands compile the source code of the chain in a binary file and install the binary in `~/go/bin`. By default, the binary name is the name of the repository appended with `d`. For example, if you scaffold a chain using `ignite scaffold chain mars`, then the binary is named `marsd`. + +You can customize the binary name in `config.yml`: + +```yaml +build: + binary: "newchaind" +``` + +Or also add custom `ldflags` into your app binary: + +```yaml +build: + ldflags: [ "-X main.Env=prod", "-X main.Version=1.0.1" ] +``` + +Learn more about how to use the binary to [run a chain in production](https://docs.cosmos.network/main/run-node/run-node). diff --git a/docs/versioned_docs/version-v0.25/kb/03-config.md b/docs/versioned_docs/version-v0.25/kb/03-config.md new file mode 100644 index 0000000..63a2349 --- /dev/null +++ b/docs/versioned_docs/version-v0.25/kb/03-config.md @@ -0,0 +1,185 @@ +--- +sidebar_position: 3 +description: Primary configuration file to describe the development environment for your blockchain. +title: config.yml reference +--- + +# config.yml reference + +The `config.yml` file generated in your blockchain folder uses key-value pairs to describe the development environment for your blockchain. + +Only a default set of parameters is provided. If more nuanced configuration is required, you can add these parameters to the `config.yml` file. + +## accounts + +A list of user accounts created during genesis of the blockchain. + +| Key | Required | Type | Description | +| -------- | -------- | --------------- | ------------------------------------------------------------------------------------------------------------------------------- | +| name | Y | String | Local name of a key pair. An account name must be listed to gain access to the account tokens after the blockchain is launched. | +| coins | Y | List of Strings | Initial coins with denominations. For example, "1000token" | +| address | N | String | Account address in Bech32 address format. | +| mnemonic | N | String | Mnemonic used to generate an account. This field is ignored if `address` is specified. | + +Note that you can only use `address` OR `mnemonic` for an account. You can't use both, because an address is derived from a mnemonic. + +If an account is a validator account (`alice` by default), it cannot have an `address` field. + +**accounts example** + +```yaml +accounts: + - name: alice + coins: ["1000token", "100000000stake"] + - name: bob + coins: ["500token"] + address: cosmos1adn9gxjmrc3hrsdx5zpc9sj2ra7kgqkmphf8yw +``` + +## build + +| Key | Required | Type | Description | +|----------|----------|------------------|--------------------------------------------------------------------------------------------------------------| +| main | N | String | When an app contains more than one main Go package, required to define the path of the chain's main package. | +| binary | N | String | Name of the node binary that is built, typically ends with `d`. | +| ldflags | N | List of Strings | ldflags to set version information for go applications. | + +**build example** + +```yaml +build: + binary: "mychaind" + ldflags: [ "-X main.Version=development", "-X main.Date=01/05/2022T19:54" ] +``` + +### build.proto + +| Key | Required | Type | Description | +| ----------------- | -------- | --------------- | ------------------------------------------------------------------------------------------ | +| path | N | String | Path to protocol buffer files. Default: `"proto"`. | + +## client + +Configures and enables client code generation. To prevent Ignite CLI from regenerating the client, remove the `client` property. + +### client.vuex + +```yaml +client: + vuex: + path: "vue/src/store" +``` + +Generates Vuex stores for the blockchain in `path` on `serve` and `build` commands. + +### client.typescript + +```yaml +client: + typescript: + path: "vue/src/generated" +``` + +Generates TypeScript clients for the blockchain in `path` on `serve` and `build` commands. + +### client.openapi + +```yaml +client: + openapi: + path: "docs/static/openapi.yml" +``` + +Generates OpenAPI YAML file in `path`. By default this file is embedded in the node's binary. + +## faucet + +The faucet service sends tokens to addresses. The default address for the web user interface is <http://localhost:4500>. + +| Key | Required | Type | Description | +| ----------------- | -------- | --------------- | ----------------------------------------------------------- | +| name | Y | String | Name of a key pair. The `name` key pair must be in `accounts`. | +| coins | Y | List of Strings | One or more coins with denominations sent per request. | +| coins_max | N | List of Strings | One or more maximum amounts of tokens sent for each address. | +| host | N | String | Host and port number. Default: `:4500`. Cannot be higher than 65536 | +| rate_limit_window | N | String | Time after which the token limit is reset (in seconds). | + +**faucet example** + +```yaml +faucet: + name: faucet + coins: ["100token", "5foo"] + coins_max: ["2000token", "1000foo"] + port: 4500 +``` + +## validator + +A blockchain requires one or more validators. + +| Key | Required | Type | Description | +| ------ | -------- | ------ | ----------------------------------------------------------------------------------------------- | +| name | Y | String | The account that is used to initialize the validator. The `name` key pair must be in `accounts`. | +| staked | Y | String | Amount of coins to bond. Must be less than or equal to the amount of coins in the account. | + +**validator example** + +```yaml +accounts: + - name: alice + coins: ["1000token", "100000000stake"] +validator: + name: user1 + staked: "100000000stake" +``` + +## init.home + +The path to the data directory that stores blockchain data and blockchain configuration. + +**init example** + +```yaml +init: + home: "~/.myblockchain" +``` + +## init.config + +Overwrites properties in `config/config.toml` in the data directory. + +## init.app + +Overwrites properties in `config/app.toml` in the data directory. + +## init.client + +Overwrites properties in `config/client.toml` in the data directory. + +**init.client example** + +```yaml +init: + client: + keyring-backend: "os" +``` + +## host + +Configuration of host names and ports for processes started by Ignite CLI. Port numbers can't exceed 65536. + +**host example** + +```yaml +host: + rpc: ":26659" + p2p: ":26658" + prof: ":6061" + grpc: ":9091" + api: ":1318" +``` + +## genesis + +Use to overwrite values in `genesis.json` in the data directory to test different values in development environments. See [Genesis Overwrites for Development](../kb/04-genesis.md). diff --git a/docs/versioned_docs/version-v0.25/kb/04-genesis.md b/docs/versioned_docs/version-v0.25/kb/04-genesis.md new file mode 100644 index 0000000..0a7e85d --- /dev/null +++ b/docs/versioned_docs/version-v0.25/kb/04-genesis.md @@ -0,0 +1,45 @@ +--- +sidebar_position: 4 +description: Test different scenarios after the blockchain is created. +--- + +# Genesis overwrites for development + +The `genesis.json` file for all new blockchains is automatically created from the `config.yml` file to define the initial state upon genesis of the blockchain. + +In development environments, it is useful to test different scenarios after the blockchain is created. The `genesis.json` file for the blockchain is overwritten by the top-level `genesis` parameter in `config.yml`. + +To set and test different values, add the `genesis` parameter to `config.yml`. + +## Change the value of a single parameter + +To change the value of one parameter, add the key-value pair under the `genesis` parameter. For example, change the value of `chain-id`: + +```yaml +genesis: + chain_id: "foobar" +``` + +## Change values in modules + +You can change one or more parameters of different modules. For example, in the `staking` module you can add a key-value pair to `bond_denom` to change which token gets staked: + +```yaml +genesis: + app_state: + staking: + params: + bond_denom: "denom" +``` + +## Genesis file + +For genesis file details and field definitions, see Cosmos Hub documentation for the [Genesis File](https://hub.cosmos.network/main/resources/genesis). + +## Genesis block summary + +- The genesis block is the first block of a blockchain. + +- The `genesis.json` file for the blockchain is overwritten by the top-level genesis parameter in `config.yml`. + +- After the blockchain is created, add the `genesis` parameter and key-value pairs to set and test different values in your development environment. diff --git a/docs/versioned_docs/version-v0.25/kb/05-types.md b/docs/versioned_docs/version-v0.25/kb/05-types.md new file mode 100644 index 0000000..a70c5e4 --- /dev/null +++ b/docs/versioned_docs/version-v0.25/kb/05-types.md @@ -0,0 +1,58 @@ +--- +sidebar_position: 5 +description: Reference list of supported types. +--- + +# Ignite CLI Supported Types + +Types with CRUD operations are scaffolded with the `ignite scaffold` command. + +## Built-in types + +| Type | Alias | Index | Code Type | Description | +| ------------ | -------- | ----- | ----------- | ------------------------------- | +| string | - | yes | string | Text type | +| array.string | strings | no | []string | List of text type | +| bool | - | yes | bool | Boolean type | +| int | - | yes | int32 | Integer type | +| array.int | ints | no | []int32 | List of integers types | +| uint | - | yes | uint64 | Unsigned integer type | +| array.uint | uints | no | []uint64 | List of unsigned integers types | +| coin | - | no | sdk.Coin | Cosmos SDK coin type | +| array.coin | coins | no | sdk.Coins | List of Cosmos SDK coin types | + +Some types cannot be used an index, like the map and list indexes and module params. + +## Custom types + +You can create custom types and then use the custom type later. + +For example, you can create a `list` type called `user` and then use the `user` type in a subsequent `ignite scaffold` command. + +Here's an example of how to scaffold a new `CoordinatorDescription` type that is reusable in the future: + +```bash +ignite scaffold list coordinator-description description:string --no-message +``` + +Now you can scaffold a message using the `CoordinatorDescription` type: + +```bash +ignite scaffold message add-coordinator address:string description:CoordinatorDescription +``` + +Run the chain and then send the message using the CLI. + +To pass the custom type in JSON format: + +```bash +ignite chain serve +marsd tx mars add-coordinator cosmos1t4jkut0yfnsmqle9vxk3adfwwm9vj9gsj98vqf '{"description":"coordinator description"}' true --from alice --chain-id mars +``` + +If you try to use a type that is not created yet, the follow error occurs: + +```bash +ignite scaffold message validator validator:ValidatorDescription address:string +-> the field type ValidatorDescription doesn't exist +``` diff --git a/docs/versioned_docs/version-v0.25/kb/06-proto.md b/docs/versioned_docs/version-v0.25/kb/06-proto.md new file mode 100644 index 0000000..5de4e9e --- /dev/null +++ b/docs/versioned_docs/version-v0.25/kb/06-proto.md @@ -0,0 +1,22 @@ +--- +description: Protocol buffer file support in Ignite CLI +sidebar_position: 6 +--- + +# Protocol buffer files + +Protocol buffer files define the data structures used by Cosmos SDK modules. + +## Files and directories + +Inside the `proto` directory, a directory for each custom module contains `query.proto`, `tx.proto`, `genesis.proto`, and other files. + +The `ignite chain serve` command automatically generates Go code from proto files on every file change. + +## Third-party proto files + +Third-party proto files, including those of Cosmos SDK and Tendermint, are bundled with Ignite CLI. To import third-party proto files in your custom proto files: + +```protobuf +import "cosmos/base/query/v1beta1/pagination.proto"; +``` diff --git a/docs/versioned_docs/version-v0.25/kb/07-frontend.md b/docs/versioned_docs/version-v0.25/kb/07-frontend.md new file mode 100644 index 0000000..c15d84d --- /dev/null +++ b/docs/versioned_docs/version-v0.25/kb/07-frontend.md @@ -0,0 +1,41 @@ +--- +description: Details on the Vue frontend app created by Ignite CLI. +sidebar_position: 7 +--- + +# Frontend overview + +A Vue frontend app is created in the `vue` directory when a blockchain is scaffolded. To start the frontend app run `npm i && npm run dev` in the `vue` directory. + +The frontend app is built using the `@starport/vue` and `@starport/vuex` packages. For details, see the [monorepo for Ignite CLI front-end development](https://github.com/ignite/web). + +## Client code generation + +A TypeScript (TS) client and associated Vuex stores are automatically generated for your blockchain for custom and standard Cosmos SDK modules. + +To enable client code generation, add the `client` entries to `config.yml`: + +```yaml +client: + typescript: + path: "ts-client" + vuex: + path: "vue/src/store" +``` + +A TS client is generated in the `ts-client` directory (see: [TypeScript client information](/clients/typescript)) and Vuex store modules making use of this client are generated in the `vue/src/store` directory. + +## Client code regeneration + +By default, the filesystem is watched and the clients are regenerated automatically. Clients for standard Cosmos SDK modules are generated after you scaffold a blockchain. + +To regenerate all clients for custom and standard Cosmos SDK modules, run this command: + +```bash +ignite generate vuex +``` + +(Note: this command also runs the typescript client generation and you do not need to run `ignite generate ts-client` separately.) +## Preventing client code regeneration + +To prevent regenerating the client, remove the `client:vuex` property from `config.yml`. diff --git a/docs/versioned_docs/version-v0.25/kb/08-relayer.md b/docs/versioned_docs/version-v0.25/kb/08-relayer.md new file mode 100644 index 0000000..8ed17a7 --- /dev/null +++ b/docs/versioned_docs/version-v0.25/kb/08-relayer.md @@ -0,0 +1,51 @@ +--- +description: IBC relayer to connect local and remote blockchains. +sidebar_position: 8 +--- + +# IBC relayer + +A built-in IBC relayer in Ignite CLI lets you connect blockchains that run on your local computer to blockchains that run on remote computers. The Ignite CLI relayer uses the [TypeScript relayer](https://github.com/confio/ts-relayer). + +## Configure connections + +The `configure` command configures a connection between two blockchains: + +`ignite relayer configure` + +You are prompted for the required RPC endpoints and optional faucet endpoints. Accounts used by the relayer are created on both blockchains and faucets are used, if available, to automatically fetch tokens. + +If the relayer fails to receive tokens from a faucet, you must manually send tokens to addresses. + +By default, a connection for token transfers is set up for the `ibc-transfer` module. + +The optional `--advanced` flag lets you configure port and version for the custom IBC module. + +By default, relayer configuration is stored in `$HOME/.relayer/`. + +## Remove existing relayers + +If you previously used the Ignite CLI relayer, follow these steps to remove existing relayer and Ignite CLI configurations: + +1. Stop your blockchain or blockchains. +2. Delete previous configuration files: + + ```bash + rm -rf ~/.ignite/relayer + ``` + +3. Restart your blockchains. + +## Relayer configure example + +All values can be passed with flags. + +```bash +ignite relayer configure --advanced --source-rpc "http://0.0.0.0:26657" --source-faucet "http://0.0.0.0:4500" --source-port "blog" --source-version "blog-1" --target-rpc "http://0.0.0.0:26659" --target-faucet "http://0.0.0.0:4501" --target-port "blog" --target-version "blog-1" +``` + +## Connect blockchains and watch for IBC packets + +The `ignite relayer connect` command connects configured blockchains and watches for IBC packets to relay. + +**Tip:** You can observe the relayer packets on the terminal window where you connected your relayer. diff --git a/docs/versioned_docs/version-v0.25/kb/09-docker.md b/docs/versioned_docs/version-v0.25/kb/09-docker.md new file mode 100644 index 0000000..259af18 --- /dev/null +++ b/docs/versioned_docs/version-v0.25/kb/09-docker.md @@ -0,0 +1,103 @@ +--- +description: Run Ignite CLI using a Docker container. +sidebar_position: 9 +--- + +# Run Ignite CLI in Docker + +You can run Ignite CLI inside a Docker container without installing the Ignite CLI binary directly on your machine. + +Running Ignite CLI in Docker can be useful for various reasons; isolating your test environment, running Ignite CLI on an unsupported operating system, or experimenting with a different version of Ignite CLI without installing it. + +Docker containers are like virtual machines because they provide an isolated environment to programs that runs inside them. In this case, you can run Ignite CLI in an isolated environment. + +Experimentation and file system impact is limited to the Docker instance. The host machine is not impacted by changes to the container. + +## Prerequisites + +Docker must be installed. See [Get Started with Docker](https://www.docker.com/get-started). + +## Ignite CLI Commands in Docker + +After you scaffold and start a chain in your Docker container, all Ignite CLI commands are available. Just type the commands after `docker run -ti ignitehq/cli`. For example: + +```bash +docker run -ti ignitehq/cli -h +docker run -ti ignitehq/cli scaffold chain github.com/test/planet +docker run -ti ignitehq/cli chain serve +``` + +## Scaffolding a chain + +When Docker is installed, you can build a blockchain with a single command. + +Ignite CLI, and the chains you serve with Ignite CLI, persist some files. +When using the CLI binary directly, those files are located in `$HOME/.ignite` +and `$HOME/.cache`, but in the context of Docker it's better to use a directory different than `$HOME`, so we use `$HOME/sdh`. This folder should be created +manually prior to the docker commands below, or else Docker creates it with the +root user. + +```bash +mkdir $HOME/sdh +``` + +To scaffold a blockchain `planet` in the `/apps` directory in the container, run this command in a terminal window: + +```bash +docker run -ti -v $HOME/sdh:/home/tendermint -v $PWD:/apps ignitehq/cli:0.16.0 scaffold chain github.com/hello/planet +``` + +Be patient, this command takes a minute or two to run because it does everything for you: + +- Creates a container that runs from the `ignitehq/cli:0.16.0` image. +- Executes the Ignite CLI binary inside the image. +- `-v $HOME/sdh:/home/tendermint` maps the `$HOME/sdh` directory in your local computer (the host machine) to the home directory `/home/tendermint` inside the container. +- `-v $PWD:/apps` maps the current directory in the terminal window on the host machine to the `/apps` directory in the container. You can optionally specify an absolute path instead of `$PWD`. + + Using `-w` and `-v` together provides file persistence on the host machine. The application source code on the Docker container is mirrored to the file system of the host machine. + + **Note:** The directory name for the `-w` and `-v` flags can be a name other then `/app`, but the same directory must be specified for both flags. If you omit `-w` and `-v`, the changes are made in the container only and are lost when that container is shut down. + +## Starting a blockchain + +To start the blockchain node in the Docker container you just created, run this command: + +```bash +docker run -ti -v $HOME/sdh:/home/tendermint -v $PWD:/apps -p 1317:1317 -p 26657:26657 ignitehq/cli:0.16.0 chain serve -p planet +``` + +This command does the following: + +- `-v $HOME/sdh:/home/tendermint` maps the `$HOME/sdh` directory in your local computer (the host machine) to the home directory `/home/tendermint` inside the container. +- `-v $PWD:/apps` persists the scaffolded app in the container to the host machine at current working directory. +- `serve -p planet` specifies to use the `planet` directory that contains the source code of the blockchain. +- `-p 1317:1317` maps the API server port (cosmos-sdk) to the host machine to forward port 1317 listening inside the container to port 1317 on the host machine. +- `-p 26657:26657` maps RPC server port 26657 (tendermint) on the host machine to port 26657 in Docker. +- After the blockchain is started, open `http://localhost:26657` to see the Tendermint API. +- The `-v` flag specifies for the container to access the application's source code from the host machine so it can build and run it. + +## Versioning + +You can specify which version of Ignite CLI to install and run in your Docker container. + +### Latest version + +- By default, `ignitehq/cli` resolves to `ignitehq/cli:latest`. +- The `latest` image tag is always the latest stable [Ignite CLI release](https://github.com/ignite/cli/releases). + +For example, if latest release is [v0.15.1](https://github.com/ignite/cli/releases/tag/v0.19.2), the `latest` tag points to the `0.19.2` tag. + +### Specific version + +You can specify to use a specific version of Ignite CLI. All available tags are in the [ignitehq/cli image](https://hub.docker.com/r/ignitehq/cli/tags?page=1&ordering=last_updated) on Docker Hub. + +For example: + +- Use `ignitehq/cli:0.19.2` (without the `v` prefix) to use version 0.15.1. +- Use `ignitehq/cli:main` to use the `main` branch so you can experiment with the next version. + +To get the latest image, run `docker pull`. + +```bash +docker pull ignitehq/cli:main +``` diff --git a/docs/versioned_docs/version-v0.25/kb/10-band.md b/docs/versioned_docs/version-v0.25/kb/10-band.md new file mode 100644 index 0000000..f301bee --- /dev/null +++ b/docs/versioned_docs/version-v0.25/kb/10-band.md @@ -0,0 +1,364 @@ +--- +sidebar_position: 10 +description: IBC oracle integration with BandChain +--- + +# BandChain oracle + +The BandChain oracle communication module has built-in compliance using IBC protocol that can query data points of various types from BandChain. + +Other chains can query this oracle module for real-time information. + +BandChain has multiple scripts deployed into the network. You can request any data using the script id. + +## High-level overview + +Steps to scaffold an IBC BandChain query oracle to request real-time data from BandChain scripts in a specific IBC-enabled Cosmos SDK module. + +## IBC module packet scaffold + +BandChain oracle queries can be scaffolded only in IBC modules. + +The basic syntax to scaffold a band oracle module is: + +```bash +ignite scaffold band [queryName] --module [moduleName] +``` + +Customize your band oracle with flags: + +- --module string - name of the new IBC Module to add the packets to +- --path string - path of the app, default is the current directory (`"."`) +- --signer string - signer label, default is `creator` + +### Acknowledgement messages + +The BandChain oracle returns the ack messages with the request id. The last request id is saved for future queries. + +## Files and directories + +When you scaffold a BandChain oracle module, the following files and directories are created and modified: + +- `proto`: oracle request and response data. +- `x/module_name/keeper`: IBC hooks, gRPC message server. +- `x/module_name/types`: message types, IBC events. +- `x/module_name/client/cli`: CLI command to broadcast a transaction containing a message with a packet. +- `x/module_name/oracle.go`: BandChain oracle packet handlers. + +## Scaffold a BandChain oracle chain + +First, scaffold a chain but don't scaffold a default module: + +```bash +ignite scaffold chain oracle --no-module +``` + +Next, change to the new `oracle` directory and scaffold an IBC-enabled module named `consuming`: + +```bash +cd oracle +ignite scaffold module consuming --ibc +``` + +Finally, scaffold a BandChain query oracle that can request real-time data: + +```bash +ignite scaffold band coinRates --module consuming +``` + +So far, you have scaffolded: + +- A new `oracle` chain without a default module +- A new IBC-enabled `consuming` module +- A new `coinRates` BandChain query oracle + +Now it's time to change the data. + +## Update version + +The output of the `ignite scaffold band coinRates --module consuming` command prompts you to update the `keys.go` file. + +In the `x/consuming/types/keys.go` file, update the `Version` variable in the `const` block to the required version that the IBC module supports: + +```go +const ( + // ... + + // Version defines the current version the IBC module supports + Version = "bandchain-1" + + // ... +) +``` + +## Start your chain in development + +To run the chain from the `oracle` directory: + +```bash +ignite chain serve +``` + +Keep this terminal window open. + +## Configure and connect the Ignite CLI relayer + +If you previously used the Ignite CLI relayer, it is a good idea to remove existing relayer and Ignite CLI configurations: + +1. Stop your blockchains. +2. Delete previous configuration files: + + ```bash + rm -rf ~/.ignite/relayer + ``` + +3. Restart your blockchains. + +In another terminal tab, configure the [Ignite CLI relayer](./08-relayer.md): + +```bash +ignite relayer configure -a \ +--source-rpc "http://rpc-laozi-testnet4.bandchain.org:80" \ +--source-faucet "https://laozi-testnet4.bandchain.org/faucet" \ +--source-port "oracle" \ +--source-gasprice "0uband" \ +--source-gaslimit 5000000 \ +--source-prefix "band" \ +--source-version "bandchain-1" \ +--target-rpc "http://localhost:26657" \ +--target-faucet "http://localhost:4500" \ +--target-port "consuming" \ +--target-gasprice "0.0stake" \ +--target-gaslimit 300000 \ +--target-prefix "cosmos" \ +--target-version "bandchain-1" +``` + +When prompted, press Enter to accept the default source and target accounts. + +The command output confirms the relayer is successfully configured: + +``` +? Source Account default +? Target Account default + +🔐 Account on "source" is default(band1dscvlx0mhpys9fazuk7ej9z4cq7qknzn09pjpq) + + |· received coins from a faucet + |· (balance: 10000000uband) + +🔐 Account on "target" is default(cosmos1dscvlx0mhpys9fazuk7ej9z4cq7qknznk2pseg) + + |· received coins from a faucet + |· (balance: 100000stake,5token) + +⛓ Configured chains: band-laozi-testnet4-oracle +``` + +Connect the relayer: + +```bash +ignite relayer connect +``` + +You can see the paths of the `oracle` port on the testnet and the `consuming` port on your local oracle module in the relayer connection status that is output to the terminal: + +``` +------ +Paths +------ + +band-laozi-testnet4-oracle: + band-laozi-testnet4 > (port: oracle) (channel: channel-405) + oracle > (port: consuming) (channel: channel-0) + +------ +Listening and relaying packets between chains... +------ +``` + +Leave this terminal tab open so you can monitor the relayer. + +## Make a request transaction + +In another terminal tab, use the `oracled` binary to make a request transaction. Because BandChain has multiple scripts already deployed into the network, you can request any data using the BandChain script id. In this case, use script 37 for Coin Rates: + +```bash +# Coin Rates (script 37 into the testnet) +oracled tx consuming coin-rates-data 37 4 3 --channel channel-0 --symbols "BTC,ETH,XRP,BCH" --multiplier 1000000 --fee-limit 30uband --prepare-gas 600000 --execute-gas 600000 --from alice --chain-id oracle +``` + +You can check the last request id that was returned by ack: + +```bash +oracled query consuming last-coin-rates-id +# output: request_id: "101276" +``` + +Now you can check the data by request id to receive the data packet: + +```bash +oracled query consuming coin-rates-result 101276 +``` + +### Multiple oracles + +You can scaffold multiples oracles by module. After scaffold, you must change the `Calldata` and `Result` parameters in the proto file `moduleName.proto` and then adapt the request in the `cli/client/tx_module_name.go` file. + +To create an example for the [gold price](https://laozi-testnet6.cosmoscan.io/oracle-script/33#bridge) bridge: + +```bash +ignite scaffold band goldPrice --module consuming +``` + +In the `proto/consuming/gold_price.proto` file: + +```protobuf +syntax = "proto3"; +package oracle.consuming; + +option go_package = "oracle/x/consuming/types"; + +message GoldPriceCallData { + uint64 multiplier = 2; +} + +message GoldPriceResult { + uint64 price = 1; +} +``` + +In the `x/consuming/cli/client/tx_gold_price.go` file: + +```go +package cli + +import ( + "strconv" + + "github.com/cosmos/cosmos-sdk/client" + "github.com/cosmos/cosmos-sdk/client/flags" + "github.com/cosmos/cosmos-sdk/client/tx" + sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/spf13/cobra" + + "oracle/x/consuming/types" +) + +// CmdRequestGoldPriceData creates and broadcast a GoldPrice request transaction +func CmdRequestGoldPriceData() *cobra.Command { + cmd := &cobra.Command{ + Use: "gold-price-data [oracle-script-id] [requested-validator-count] [sufficient-validator-count]", + Short: "Make a new GoldPrice query request via an existing BandChain oracle script", + Args: cobra.ExactArgs(3), + RunE: func(cmd *cobra.Command, args []string) error { + // retrieve the oracle script id. + uint64OracleScriptID, err := strconv.ParseUint(args[0], 10, 64) + if err != nil { + return err + } + oracleScriptID := types.OracleScriptID(uint64OracleScriptID) + + // retrieve the requested validator count. + askCount, err := strconv.ParseUint(args[1], 10, 64) + if err != nil { + return err + } + + // retrieve the sufficient(minimum) validator count. + minCount, err := strconv.ParseUint(args[2], 10, 64) + if err != nil { + return err + } + + channel, err := cmd.Flags().GetString(flagChannel) + if err != nil { + return err + } + + // retrieve the multiplier for the symbols' price. + multiplier, err := cmd.Flags().GetUint64(flagMultiplier) + if err != nil { + return err + } + + calldata := &types.GoldPriceCallData{ + Multiplier: multiplier, + } + + // retrieve the amount of coins allowed to be paid for oracle request fee from the pool account. + coinStr, err := cmd.Flags().GetString(flagFeeLimit) + if err != nil { + return err + } + feeLimit, err := sdk.ParseCoinsNormalized(coinStr) + if err != nil { + return err + } + + // retrieve the amount of gas allowed for the prepare step of the oracle script. + prepareGas, err := cmd.Flags().GetUint64(flagPrepareGas) + if err != nil { + return err + } + + // retrieve the amount of gas allowed for the execute step of the oracle script. + executeGas, err := cmd.Flags().GetUint64(flagExecuteGas) + if err != nil { + return err + } + + clientCtx, err := client.GetClientTxContext(cmd) + if err != nil { + return err + } + + msg := types.NewMsgGoldPriceData( + clientCtx.GetFromAddress().String(), + oracleScriptID, + channel, + calldata, + askCount, + minCount, + feeLimit, + prepareGas, + executeGas, + ) + if err := msg.ValidateBasic(); err != nil { + return err + } + return tx.GenerateOrBroadcastTxCLI(clientCtx, cmd.Flags(), msg) + }, + } + + cmd.Flags().String(flagChannel, "", "The channel id") + cmd.MarkFlagRequired(flagChannel) + cmd.Flags().Uint64(flagMultiplier, 1000000, "Multiplier used in calling the oracle script") + cmd.Flags().String(flagFeeLimit, "", "the maximum tokens that will be paid to all data source providers") + cmd.Flags().Uint64(flagPrepareGas, 200000, "Prepare gas used in fee counting for prepare request") + cmd.Flags().Uint64(flagExecuteGas, 200000, "Execute gas used in fee counting for execute request") + flags.AddTxFlagsToCmd(cmd) + + return cmd +} +``` + +Make the request transaction: + +```bash +# Gold Price (script 33 into the testnet) +oracled tx consuming gold-price-data 33 4 3 --channel channel-0 --multiplier 1000000 --fee-limit 30uband --prepare-gas 600000 --execute-gas 600000 --from alice --chain-id oracle +``` + +Check the last request id that was returned by ack: + +```bash +oracled query consuming last-gold-price-id +# output: request_id: "101290" +``` + +Request the package data: + +```bash +oracled query consuming gold-price-result 101290 +``` diff --git a/docs/versioned_docs/version-v0.25/kb/11-simapp.md b/docs/versioned_docs/version-v0.25/kb/11-simapp.md new file mode 100644 index 0000000..70f2ef4 --- /dev/null +++ b/docs/versioned_docs/version-v0.25/kb/11-simapp.md @@ -0,0 +1,131 @@ +--- +sidebar_position: 11 +description: Test different scenarios for your chain. + +--- + +# Chain simulation + +The Ignite CLI chain simulator can help you to run your chain based in randomized inputs for you can make fuzz testing and also benchmark test for your chain, simulating the messages, blocks, and accounts. You can scaffold a template to perform simulation testing in each module along with a boilerplate simulation methods for each scaffolded message. + +## Module simulation + +Every new module that is scaffolded with Ignite CLI implements the Cosmos SDK [Module Simulation](https://docs.cosmos.network/main/building-modules/simulator). + +- Each new message creates a file with the simulation methods required for the tests. +- Scaffolding a `CRUD` type like a `list` or `map` creates a simulation file with `create`, `update`, and `delete` simulation methods in the `x/<module>/simulation` folder and registers these methods in `x/<module>/module_simulation.go`. +- Scaffolding a single message creates an empty simulation method to be implemented by the user. + +We recommend that you maintain the simulation methods for each new modification into the message keeper methods. + +Every simulation is weighted because the sender of the operation is assigned randomly. The weight defines how much the simulation calls the message. + +For better randomizations, you can define a random seed. The simulation with the same random seed is deterministic with the same output. + +## Scaffold a simulation + +To create a new chain: + +```bash +ignite scaffold chain mars +``` + +Review the empty `x/mars/simulation` folder and the `x/mars/module_simulation.go` file to see that a simulation is not registered. + +Now, scaffold a new message: + +```bash +ignite scaffold list user address balance:uint state +``` + +A new file `x/mars/simulation/user.go` is created and is registered with the weight in the `x/mars/module_simulation.go` file. + +Be sure to define the proper simulation weight with a minimum weight of 0 and a maximum weight of 100. + +For this example, change the `defaultWeightMsgDeleteUser` to 30 and the `defaultWeightMsgUpdateUser` to 50. + +Run the `BenchmarkSimulation` method into `app/simulation_test.go` to run simulation tests for all modules: + +```bash +ignite chain simulate +``` + +You can also define flags that are provided by the simulation. Flags are defined by the method `simapp.GetSimulatorFlags()`: + +```bash +ignite chain simulate -v --numBlocks 200 --blockSize 50 --seed 33 +``` + +Wait for the entire simulation to finish and check the result of the messages. + +The default `go test` command works to run the simulation: + +```bash +go test -v -benchmem -run=^$ -bench ^BenchmarkSimulation -cpuprofile cpu.out ./app -Commit=true +``` + +### Skip message + +Use logic to avoid sending a message without returning an error. Return only `simtypes.NoOpMsg(...)` into the simulation message handler. + +## Params + +Scaffolding a module with params automatically adds the module in the `module_simulaton.go` file: + +```bash +ignite s module earth --params channel:string,minLaunch:uint,maxLaunch:int +``` + +After the parameters are scaffolded, change the `x/<module>/module_simulation.go` file to set the random parameters into the `RandomizedParams` method. The simulation will change the params randomly according to call the function. + +## Invariants + +Simulating a chain can help you prevent [chain invariants errors](https://docs.cosmos.network/main/building-modules/invariants). An invariant is a function called by the chain to check if something broke, invalidating the chain data. +To create a new invariant and check the chain integrity, you must create a method to validate the invariants and register all invariants. + +For example, in `x/earth/keeper/invariants.go`: + +```go +package keeper + +import ( + "fmt" + + sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/tendermint/spn/x/launch/types" +) + +const zeroLaunchTimestampRoute = "zero-launch-timestamp" + +// RegisterInvariants registers all module invariants +func RegisterInvariants(ir sdk.InvariantRegistry, k Keeper) { + ir.RegisterRoute(types.ModuleName, zeroLaunchTimestampRoute, + ZeroLaunchTimestampInvariant(k)) +} + +// ZeroLaunchTimestampInvariant invariant that checks if the +// `LaunchTimestamp is zero +func ZeroLaunchTimestampInvariant(k Keeper) sdk.Invariant { + return func(ctx sdk.Context) (string, bool) { + all := k.GetAllChain(ctx) + for _, chain := range all { + if chain.LaunchTimestamp == 0 { + return sdk.FormatInvariant( + types.ModuleName, zeroLaunchTimestampRoute, + "LaunchTimestamp is not set while LaunchTriggered is set", + ), true + } + } + return "", false + } +} +``` + +Now, register the keeper invariants into the `x/earth/module.go` file: + +```go +// RegisterInvariants registers the capability module's invariants. +func (am AppModule) RegisterInvariants(ir sdk.InvariantRegistry) { + keeper.RegisterInvariants(ir, am.keeper) +} +``` diff --git a/docs/versioned_docs/version-v0.25/kb/12-params.md b/docs/versioned_docs/version-v0.25/kb/12-params.md new file mode 100644 index 0000000..9fd24ec --- /dev/null +++ b/docs/versioned_docs/version-v0.25/kb/12-params.md @@ -0,0 +1,31 @@ +--- +sidebar_position: 12 +description: Scaffold module parameters to be accessible to the module. +--- + +# Module parameters + +Sometimes you need to set default parameters for a module. The Cosmos SDK [params package](https://docs.cosmos.network/main/modules/params) provides a globally available parameter that is saved into the key-value store. + +Params are managed and centralized by the Cosmos SDK `params` module and are updated with a governance proposal. + +You can use Ignite CLI to scaffold parameters to be accessible for the module. Parameters have default values that can be changed when the chain is live. Since the parameters are managed and centralized by the Cosmos SDK params module, they can be easily updated using a governance proposal. + +To scaffold a module with params using the `--params` flag: + +```bash +ignite scaffold module launch --params minLaunch:uint,maxLaunch:int +``` + +After the parameters are scaffolded, change the `x/<module>/types/params.go` file to set the default values and validate the fields. + +The params module supports all [built-in Ignite CLI types](./05-types.md). + +## Params types + +| Type | Code type | Description | +| ------ | --------- | ----------------------- | +| string | string | Text type | +| bool | bool | Boolean type | +| int | int32 | Integer number | +| uint | uint64 | Unsigned integer number | diff --git a/docs/versioned_docs/version-v0.25/kb/_category_.json b/docs/versioned_docs/version-v0.25/kb/_category_.json new file mode 100644 index 0000000..50fc2dd --- /dev/null +++ b/docs/versioned_docs/version-v0.25/kb/_category_.json @@ -0,0 +1,5 @@ +{ + "label": "Knowledge Base", + "position": 3, + "link": null + } \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.25/migration/_category_.json b/docs/versioned_docs/version-v0.25/migration/_category_.json new file mode 100644 index 0000000..14c3056 --- /dev/null +++ b/docs/versioned_docs/version-v0.25/migration/_category_.json @@ -0,0 +1,5 @@ +{ + "label": "Migration", + "position": 4, + "link": null + } \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.25/migration/readme.md b/docs/versioned_docs/version-v0.25/migration/readme.md new file mode 100644 index 0000000..978e90e --- /dev/null +++ b/docs/versioned_docs/version-v0.25/migration/readme.md @@ -0,0 +1,14 @@ +--- +sidebar_position: 0 +--- + +# Migration Guides + +Welcome to the section on upgrading to a newer version of Ignite CLI! If you're +looking to update to the latest version, you'll want to start by checking the +documentation to see if there are any special considerations or instructions you +need to follow. + +If there is no documentation for the latest version of Ignite CLI, it's +generally safe to assume that there were no breaking changes, and you can +proceed with using the latest version with your project. \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.25/migration/v0.18.md b/docs/versioned_docs/version-v0.25/migration/v0.18.md new file mode 100644 index 0000000..5e10036 --- /dev/null +++ b/docs/versioned_docs/version-v0.25/migration/v0.18.md @@ -0,0 +1,448 @@ +--- +sidebar_position: 999 +title: v0.18.0 +description: For chains that were scaffolded with Ignite CLI versions lower than v0.18, changes are required to use Ignite CLI v0.18. +--- + +# Upgrading a Blockchain to use Ignite CLI v0.18 + +Ignite CLI v0.18 comes with Cosmos SDK v0.44. This version of Cosmos SDK introduced changes that are not compatible with chains that were scaffolded with Ignite CLI versions lower than v0.18. + +**Important:** After upgrading from Ignite CLI v0.17.3 to Ignite CLI v0.18, you must update the default blockchain template to use blockchains that were scaffolded with earlier versions. + +These instructions are written for a blockchain that was scaffolded with the following command: + +``` +ignite scaffold chain github.com/username/mars +``` + +If you used a different module path, replace `username` and `mars` with the correct values for your blockchain. + +## Blockchain + +For each file listed, make the required changes to the source code of the blockchain template. + +### go.mod + +``` +module github.com/username/mars + +go 1.16 + +require ( + github.com/cosmos/cosmos-sdk v0.44.0 + github.com/cosmos/ibc-go v1.2.0 + github.com/gogo/protobuf v1.3.3 + github.com/google/go-cmp v0.5.6 // indirect + github.com/gorilla/mux v1.8.0 + github.com/grpc-ecosystem/grpc-gateway v1.16.0 + github.com/spf13/cast v1.3.1 + github.com/spf13/cobra v1.1.3 + github.com/stretchr/testify v1.7.0 + github.com/tendermint/spm v0.1.6 + github.com/tendermint/tendermint v0.34.13 + github.com/tendermint/tm-db v0.6.4 + google.golang.org/genproto v0.0.0-20210903162649-d08c68adba83 + google.golang.org/grpc v1.40.0 +) + +replace ( + github.com/99designs/keyring => github.com/cosmos/keyring v1.1.7-0.20210622111912-ef00f8ac3d76 + github.com/gogo/protobuf => github.com/regen-network/protobuf v1.3.3-alpha.regen.1 + google.golang.org/grpc => google.golang.org/grpc v1.33.2 +) +``` + +### app/app.go + +```go +import ( + //... + // Add the following packages: + "github.com/cosmos/cosmos-sdk/x/feegrant" + feegrantkeeper "github.com/cosmos/cosmos-sdk/x/feegrant/keeper" + feegrantmodule "github.com/cosmos/cosmos-sdk/x/feegrant/module" + + "github.com/cosmos/ibc-go/modules/apps/transfer" + ibctransferkeeper "github.com/cosmos/ibc-go/modules/apps/transfer/keeper" + ibctransfertypes "github.com/cosmos/ibc-go/modules/apps/transfer/types" + ibc "github.com/cosmos/ibc-go/modules/core" + ibcclient "github.com/cosmos/ibc-go/modules/core/02-client" + ibcporttypes "github.com/cosmos/ibc-go/modules/core/05-port/types" + ibchost "github.com/cosmos/ibc-go/modules/core/24-host" + ibckeeper "github.com/cosmos/ibc-go/modules/core/keeper" + + // Remove the following packages: + // transfer "github.com/cosmos/cosmos-sdk/x/ibc/applications/transfer" + // ibctransferkeeper "github.com/cosmos/cosmos-sdk/x/ibc/applications/transfer/keeper" + // ibctransfertypes "github.com/cosmos/cosmos-sdk/x/ibc/applications/transfer/types" + // ibc "github.com/cosmos/cosmos-sdk/x/ibc/core" + // ibcclient "github.com/cosmos/cosmos-sdk/x/ibc/core/02-client" + // porttypes "github.com/cosmos/cosmos-sdk/x/ibc/core/05-port/types" + // ibchost "github.com/cosmos/cosmos-sdk/x/ibc/core/24-host" + // ibckeeper "github.com/cosmos/cosmos-sdk/x/ibc/core/keeper" +) + +var ( + //... + ModuleBasics = module.NewBasicManager( + //... + slashing.AppModuleBasic{}, + // Add feegrantmodule.AppModuleBasic{}, + feegrantmodule.AppModuleBasic{}, // <-- + ibc.AppModuleBasic{}, + //... + ) + //... +) + +type App struct { + //... + // Replace codec.Marshaler with codec.Codec + appCodec codec.Codec // <-- + // Add FeeGrantKeeper + FeeGrantKeeper feegrantkeeper.Keeper // <-- +} + +func New( /*...*/ ) { + //bApp.SetAppVersion(version.Version) + bApp.SetVersion(version.Version) // <-- + + keys := sdk.NewKVStoreKeys( + //... + upgradetypes.StoreKey, + // Add feegrant.StoreKey + feegrant.StoreKey, // <-- + evidencetypes.StoreKey, + //... + ) + + app.FeeGrantKeeper = feegrantkeeper.NewKeeper(appCodec, keys[feegrant.StoreKey], app.AccountKeeper) // <-- + // Add app.BaseApp as the last argument to upgradekeeper.NewKeeper + app.UpgradeKeeper = upgradekeeper.NewKeeper(skipUpgradeHeights, keys[upgradetypes.StoreKey], appCodec, homePath, app.BaseApp) + + app.IBCKeeper = ibckeeper.NewKeeper( + // Add app.UpgradeKeeper + appCodec, keys[ibchost.StoreKey], app.GetSubspace(ibchost.ModuleName), app.StakingKeeper, app.UpgradeKeeper, scopedIBCKeeper, + ) + + govRouter.AddRoute(govtypes.RouterKey, govtypes.ProposalHandler). + //... + // Replace NewClientUpdateProposalHandler with NewClientProposalHandler + AddRoute(ibchost.RouterKey, ibcclient.NewClientProposalHandler(app.IBCKeeper.ClientKeeper)) + + // Replace porttypes with ibcporttypes + ibcRouter := ibcporttypes.NewRouter() + + app.mm.SetOrderBeginBlockers( + upgradetypes.ModuleName, + // Add capabilitytypes.ModuleName, + capabilitytypes.ModuleName, + minttypes.ModuleName, + //... + // Add feegrant.ModuleName, + feegrant.ModuleName, + ) + + // Add app.appCodec as an argument to module.NewConfigurator: + app.mm.RegisterServices(module.NewConfigurator(app.appCodec, app.MsgServiceRouter(), app.GRPCQueryRouter())) + + // Replace: + // app.SetAnteHandler( + // ante.NewAnteHandler( + // app.AccountKeeper, app.BankKeeper, ante.DefaultSigVerificationGasConsumer, + // encodingConfig.TxConfig.SignModeHandler(), + // ), + // ) + + // With the following: + anteHandler, err := ante.NewAnteHandler( + ante.HandlerOptions{ + AccountKeeper: app.AccountKeeper, + BankKeeper: app.BankKeeper, + SignModeHandler: encodingConfig.TxConfig.SignModeHandler(), + FeegrantKeeper: app.FeeGrantKeeper, + SigGasConsumer: ante.DefaultSigVerificationGasConsumer, + }, + ) + if err != nil { + panic(err) + } + app.SetAnteHandler(anteHandler) + + // Remove the following: + // ctx := app.BaseApp.NewUncachedContext(true, tmproto.Header{}) + // app.CapabilityKeeper.InitializeAndSeal(ctx) +} + +func (app *App) InitChainer(ctx sdk.Context, req abci.RequestInitChain) abci.ResponseInitChain { + var genesisState GenesisState + if err := tmjson.Unmarshal(req.AppStateBytes, &genesisState); err != nil { + panic(err) + } + // Add the following: + app.UpgradeKeeper.SetModuleVersionMap(ctx, app.mm.GetVersionMap()) + return app.mm.InitGenesis(ctx, app.appCodec, genesisState) +} + +// Replace Marshaler with Codec +func (app *App) AppCodec() codec.Codec { + return app.appCodec +} + +// Replace BinaryMarshaler with BinaryCodec +func initParamsKeeper(appCodec codec.BinaryCodec, legacyAmino *codec.LegacyAmino, key, tkey sdk.StoreKey) paramskeeper.Keeper { + //... +} +``` + +### app/genesis.go + +```go +// Replace codec.JSONMarshaler with codec.JSONCodec +func NewDefaultGenesisState(cdc codec.JSONCodec) GenesisState { + //... +} +``` + +### testutil/keeper/mars.go + +Add the following code: + +```go +package keeper + +import ( + "testing" + + "github.com/cosmos/cosmos-sdk/codec" + codectypes "github.com/cosmos/cosmos-sdk/codec/types" + "github.com/cosmos/cosmos-sdk/store" + storetypes "github.com/cosmos/cosmos-sdk/store/types" + sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/stretchr/testify/require" + "github.com/tendermint/tendermint/libs/log" + tmproto "github.com/tendermint/tendermint/proto/tendermint/types" + tmdb "github.com/tendermint/tm-db" + "github.com/username/mars/x/mars/keeper" + "github.com/username/mars/x/mars/types" +) + +func MarsKeeper(t testing.TB) (*keeper.Keeper, sdk.Context) { + storeKey := sdk.NewKVStoreKey(types.StoreKey) + memStoreKey := storetypes.NewMemoryStoreKey(types.MemStoreKey) + + db := tmdb.NewMemDB() + stateStore := store.NewCommitMultiStore(db) + stateStore.MountStoreWithDB(storeKey, sdk.StoreTypeIAVL, db) + stateStore.MountStoreWithDB(memStoreKey, sdk.StoreTypeMemory, nil) + require.NoError(t, stateStore.LoadLatestVersion()) + + registry := codectypes.NewInterfaceRegistry() + k := keeper.NewKeeper( + codec.NewProtoCodec(registry), + storeKey, + memStoreKey, + ) + + ctx := sdk.NewContext(stateStore, tmproto.Header{}, false, log.NewNopLogger()) + return k, ctx +} +``` + +If `mars` is an IBC-enabled module, add the following code, instead: + +```go +package keeper + +import ( + "testing" + + "github.com/cosmos/cosmos-sdk/codec" + codectypes "github.com/cosmos/cosmos-sdk/codec/types" + "github.com/cosmos/cosmos-sdk/store" + storetypes "github.com/cosmos/cosmos-sdk/store/types" + sdk "github.com/cosmos/cosmos-sdk/types" + capabilitykeeper "github.com/cosmos/cosmos-sdk/x/capability/keeper" + typesparams "github.com/cosmos/cosmos-sdk/x/params/types" + ibckeeper "github.com/cosmos/ibc-go/modules/core/keeper" + "github.com/stretchr/testify/require" + "github.com/tendermint/tendermint/libs/log" + tmproto "github.com/tendermint/tendermint/proto/tendermint/types" + tmdb "github.com/tendermint/tm-db" + "github.com/username/test/x/mars/keeper" + "github.com/username/test/x/mars/types" +) + +func MarsKeeper(t testing.TB) (*keeper.Keeper, sdk.Context) { + logger := log.NewNopLogger() + + storeKey := sdk.NewKVStoreKey(types.StoreKey) + memStoreKey := storetypes.NewMemoryStoreKey(types.MemStoreKey) + + db := tmdb.NewMemDB() + stateStore := store.NewCommitMultiStore(db) + stateStore.MountStoreWithDB(storeKey, sdk.StoreTypeIAVL, db) + stateStore.MountStoreWithDB(memStoreKey, sdk.StoreTypeMemory, nil) + require.NoError(t, stateStore.LoadLatestVersion()) + + registry := codectypes.NewInterfaceRegistry() + appCodec := codec.NewProtoCodec(registry) + capabilityKeeper := capabilitykeeper.NewKeeper(appCodec, storeKey, memStoreKey) + + amino := codec.NewLegacyAmino() + ss := typesparams.NewSubspace(appCodec, + amino, + storeKey, + memStoreKey, + "MarsSubSpace", + ) + IBCKeeper := ibckeeper.NewKeeper( + appCodec, + storeKey, + ss, + nil, + nil, + capabilityKeeper.ScopeToModule("MarsIBCKeeper"), + ) + + k := keeper.NewKeeper( + codec.NewProtoCodec(registry), + storeKey, + memStoreKey, + IBCKeeper.ChannelKeeper, + &IBCKeeper.PortKeeper, + capabilityKeeper.ScopeToModule("MarsScopedKeeper"), + ) + + ctx := sdk.NewContext(stateStore, tmproto.Header{}, false, logger) + return k, ctx +} +``` + +### testutil/network/network.go + +```go +func DefaultConfig() network.Config { + //... + return network.Config{ + //... + // Add sdk.DefaultPowerReduction + AccountTokens: sdk.TokensFromConsensusPower(1000, sdk.DefaultPowerReduction), + StakingTokens: sdk.TokensFromConsensusPower(500, sdk.DefaultPowerReduction), + BondedTokens: sdk.TokensFromConsensusPower(100, sdk.DefaultPowerReduction), + //... + } +} +``` + +### testutil/sample/sample.go + +Add the following code: + +```go +package sample + +import ( + "github.com/cosmos/cosmos-sdk/crypto/keys/ed25519" + sdk "github.com/cosmos/cosmos-sdk/types" +) + +// AccAddress returns a sample account address +func AccAddress() string { + pk := ed25519.GenPrivKey().PubKey() + addr := pk.Address() + return sdk.AccAddress(addr).String() +} +``` + +### BandChain Support + +If your module includes integration with BandChain, added manually or scaffolded with `ignite scaffold band`, upgrade the `github.com/bandprotocol/bandchain-packet` package to `v0.0.2` in `go.mod`. + +## Module + +### x/mars/keeper/keeper.go + +```go +type ( + Keeper struct { + // Replace Marshaler with BinaryCodec + cdc codec.BinaryCodec + //... + } +) + +func NewKeeper( + // Replace Marshaler with BinaryCodec + cdc codec.BinaryCodec, + //... +) *Keeper { + // ... +} +``` + +### x/mars/keeper/msg_server_test.go + +```go +package keeper_test + +import ( + //... + // Add the following: + keepertest "github.com/username/mars/testutil/keeper" + "github.com/username/mars/x/mars/keeper" +) + +func setupMsgServer(t testing.TB) (types.MsgServer, context.Context) { + // Replace + // keeper, ctx := setupKeeper(t) + // return NewMsgServerImpl(*keeper), sdk.WrapSDKContext(ctx) + + // With the following: + k, ctx := keepertest.MarsKeeper(t) + return keeper.NewMsgServerImpl(*k), sdk.WrapSDKContext(ctx) +} +``` + +### x/mars/module.go + +```go +type AppModuleBasic struct { + // Replace Marshaler with BinaryCodec + cdc codec.BinaryCodec +} + +// Replace Marshaler with BinaryCodec +func NewAppModuleBasic(cdc codec.BinaryCodec) AppModuleBasic { + return AppModuleBasic{cdc: cdc} +} + +// Replace JSONMarshaler with JSONCodec +func (AppModuleBasic) DefaultGenesis(cdc codec.JSONCodec) json.RawMessage { + return cdc.MustMarshalJSON(types.DefaultGenesis()) +} + +// Replace JSONMarshaler with JSONCodec +func (AppModuleBasic) ValidateGenesis(cdc codec.JSONCodec, config client.TxEncodingConfig, bz json.RawMessage) error { + //... +} + +// Replace codec.Marshaller with codec.Codec +func NewAppModule(cdc codec.Codec, keeper keeper.Keeper) AppModule { + //... +} + +// Replace JSONMarshaler with JSONCodec +func (am AppModule) InitGenesis(ctx sdk.Context, cdc codec.JSONCodec, gs json.RawMessage) []abci.ValidatorUpdate { + //... +} + +// Replace JSONMarshaler with JSONCodec +func (am AppModule) ExportGenesis(ctx sdk.Context, cdc codec.JSONCodec) json.RawMessage { + //... +} + +// Add the following +func (AppModule) ConsensusVersion() uint64 { return 2 } +``` diff --git a/docs/versioned_docs/version-v0.25/migration/v0.19.2.md b/docs/versioned_docs/version-v0.25/migration/v0.19.2.md new file mode 100644 index 0000000..a46fed3 --- /dev/null +++ b/docs/versioned_docs/version-v0.25/migration/v0.19.2.md @@ -0,0 +1,22 @@ +--- +sidebar_position: 998 +title: v0.19.2 +description: For chains that were scaffolded with Ignite CLI versions lower than v0.19.2, changes are required to use Ignite CLI v0.19.2. +--- + +# Upgrading a blockchain to use Ignite CLI v0.19.2 + +Ignite CLI v0.19.2 comes with IBC v2.0.2. + +With Ignite CLI v0.19.2, the contents of the deprecated Ignite CLI Modules `tendermint/spm` repo are moved to the official Ignite CLI repo which introduces breaking changes. + +To migrate your chain that was scaffolded with Ignite CLI versions lower than v0.19.2: + +1. IBC upgrade: Use the [IBC migration documents](https://github.com/cosmos/ibc-go/blob/v6.2.0/docs/migrations/v1-to-v2.md) + +2. In your chain's `go.mod` file, remove `tendermint/spm` and add the v0.19.2 version of `tendermint/starport`. If your chain uses these packages, change the import paths as shown: + + - `github.com/tendermint/spm/ibckeeper` moved to `github.com/tendermint/starport/starport/pkg/cosmosibckeeper` + - `github.com/tendermint/spm/cosmoscmd` moved to `github.com/tendermint/starport/starport/pkg/cosmoscmd` + - `github.com/tendermint/spm/openapiconsole` moved to `github.com/tendermint/starport/starport/pkg/openapiconsole` + - `github.com/tendermint/spm/testutil/sample` moved to `github.com/tendermint/starport/starport/pkg/cosmostestutil/sample` diff --git a/docs/versioned_docs/version-v0.25/migration/v0.20.0.md b/docs/versioned_docs/version-v0.25/migration/v0.20.0.md new file mode 100644 index 0000000..f920a15 --- /dev/null +++ b/docs/versioned_docs/version-v0.25/migration/v0.20.0.md @@ -0,0 +1,11 @@ +--- +sidebar_position: 997 +title: v0.20.0 +description: For chains that were scaffolded with Ignite CLI versions lower than v0.20.0, changes are required to use Ignite CLI v0.20.0. +--- + +# Upgrading a blockchain to use Ignite CLI v0.20.2 + +1. Upgrade your Cosmos SDK version to [v0.45.3](https://github.com/cosmos/cosmos-sdk/releases/tag/v0.45.3). + +2. Update your `SetOrderBeginBlockers` and `SetOrderEndBlockers` in your `app/app.go` to explicitly add entries for all the modules you use in your chain. diff --git a/docs/versioned_docs/version-v0.25/migration/v0.22.0.md b/docs/versioned_docs/version-v0.25/migration/v0.22.0.md new file mode 100644 index 0000000..c9a8f77 --- /dev/null +++ b/docs/versioned_docs/version-v0.25/migration/v0.22.0.md @@ -0,0 +1,30 @@ +--- +sidebar_position: 996 +title: v0.22.0 +description: For chains that were scaffolded with Ignite CLI versions lower than v0.22.0, changes are required to use Ignite CLI v0.22.0. +--- + +# Upgrading a blockchain to use Ignite CLI v0.22.0 + +Ignite CLI v0.22.2 changed the GitHub username from "ignite-hq" to "ignite", which means the imports must be fixed to reflect this change. + +1. In your `go.mod` file find the require line for Ignite CLI that starts with `github.com/ignite-hq/cli` and is followed by a version. + It looks something like `github.com/ignite-hq/cli v0.22.0`, and replace it by `github.com/ignite/cli v0.22.2`. + +2. Make a bulk find and replace in the import statements for `github.com/ignite-hq/cli` to be replaced by `github.com/ignite/cli`. + +3. Finally run `go mod tidy` and ensure there's no mention if `ignite-hq/cli` in your `go.sum` file. + +This update includes an upgrade to the `ibc-go` packages. Please make the according changes: + +1. Upgrade your IBC version to [v3](https://github.com/cosmos/ibc-go/releases/tag/v3.0.0). + + 1. Search for `github.com/cosmos/ibc-go/v2` in the import statements of your `.go` files and replace `v2` in the end with `v3` + + 1. Open your `app.go`, + + - Update your transfer keeper by adding another `app.IBCKeeper.ChannelKeeper` as an argument after `app.IBCKeeper.ChannelKeeper` + + - Define `var transferIBCModule = transfer.NewIBCModule(app.TransferKeeper)` in your `New()` func, and update your existent IBC router to use it: `ibcRouter.AddRoute(ibctransfertypes.ModuleName, transferIBCModule)` + + 3. Open your `go.mod` and change the IBC line with `github.com/cosmos/ibc-go/v3 v3.0.0` diff --git a/docs/versioned_docs/version-v0.25/migration/v0.24.0.md b/docs/versioned_docs/version-v0.25/migration/v0.24.0.md new file mode 100644 index 0000000..523639a --- /dev/null +++ b/docs/versioned_docs/version-v0.25/migration/v0.24.0.md @@ -0,0 +1,330 @@ +--- +sidebar_position: 995 +title: v0.24.0 +description: For chains that were scaffolded with Ignite CLI versions lower than v0.24, changes are required to use Ignite CLI v0.24. +--- + +## Cosmos SDK v0.46 upgrade notes + +### Update dependencies + +Cosmos SDK v0.46 is compatible with the latest version of IBC Go v5. If you have a chain that is using an older version, +update the dependencies in your project. + +Throughout the code you might see the following dependencies: + +```go +package pkg_name + +import ( + "github.com/cosmos/ibc-go/v3/..." +) +``` + +Where `v3` is the version of IBC Go and `...` are different IBC Go packages. + +To upgrade the version to `v5`, a global find-and-replace should work. Replace `cosmos/ibc-go/v3` (or whicherver version +you're using) with `cosmos/ibc-go/v5` only in `*.go` files (to exclude unwated changes to files like `go,sum`). + +### Module keeper + +Add an import: + +```go +// x/{moduleName}/keeper/keeper.go + +package keeper + +// ... + +import ( + //... + storetypes "github.com/cosmos/cosmos-sdk/store/types" +) +``` + +In the `Keeper` struct replace `sdk.StoreKey` with `storetypes.StoreKey`: + +```go +// x/{moduleName}/keeper/keeper.go + +package keeper + +// ... + +type ( + Keeper struct { + cdc codec.BinaryCodec + storeKey storetypes.StoreKey + memKey storetypes.StoreKey + paramstore paramtypes.Subspace + } +) +``` + +In the argument list of the `NewKeeper` function definition: + +```go +package keeper + +// ... + +// x/{moduleName}/keeper/keeper.go + +func NewKeeper( + //... + memKey storetypes.StoreKey, +) +``` + +Store type aliases have been removed from the Cosmos SDK `types` package and now have to be imported from `store/types`, +instead. + +In the `testutil/keeper/{moduleName}.go` replace `types.StoreKey` with `storetypes.StoreKey` and `types.MemStoreKey` +with `storetypes.MemStoreKey`. + +```go +// testutil/keeper/{moduleName}.go + +package keeper + +// ... + +func {moduleName}Keeper(t testing.TB) (*keeper.Keeper, sdk.Context) { + storeKey := sdk.NewKVStoreKey(storetypes.StoreKey) + memStoreKey := storetypes.NewMemoryStoreKey(storetypes.MemStoreKey) + //... +} +``` + +### Testutil network package + +Add the `require` package for testing and `pruningtypes` and remove `storetypes`: + +```go +// testutil/network/network.go + +package network + +// ... + +import ( + pruningtypes "github.com/cosmos/cosmos-sdk/pruning/types" + "github.com/stretchr/testify/require" + // storetypes "github.com/cosmos/cosmos-sdk/store/types" <-- remove this line +) +``` + +In the `DefaultConfig` function replace `storetypes.NewPruningOptionsFromString` +with `pruningtypes.NewPruningOptionsFromString` + +```go +// testutil/network/network.go + +package network + +// ... + +func DefaultConfig() network.Config { + //... + return network.Config{ + AppConstructor: func(val network.Validator) servertypes.Application { + return app.New( + baseapp.SetPruning(pruningtypes.NewPruningOptionsFromString(val.AppConfig.Pruning)), + //... + ) + }, + //... + } +} +``` + +The `New` function in the Cosmos SDK `testutil/network` package now +accepts [three arguments](https://github.com/cosmos/cosmos-sdk/blob/v0.46.0/testutil/network/network.go#L206) instead of +two. + +In the `New` function add `t.TempDir()` as the second argument to `network.New()` and test that no error is thrown +with `require.NoError(t, err)`: + +```go +// testutil/network/network.go + +package network + +// ... + +func New(t *testing.T, configs ...network.Config) *network.Network { + //... + net, err := network.New(t, t.TempDir(), cfg) + require.NoError(t, err) + //... +} +``` + +### Testutil keeper package + +In the `{moduleName}Keeper` function make the following replacements: + +- `storetypes.StoreKey` → `types.StoreKey` +- `storetypes.MemStoreKey` → `types.MemStoreKey` +- `sdk.StoreTypeIAVL` → `storetypes.StoreTypeIAVL` +- `sdk.StoreTypeMemory` → `storetypes.StoreTypeMemory` + +```go +// testutil/keeper/{moduleName}.go + +package keeper + +// ... + +func {moduleName}Keeper(t testing.TB) (*keeper.Keeper, sdk.Context) { + storeKey := sdk.NewKVStoreKey(types.StoreKey) + memStoreKey := storetypes.NewMemoryStoreKey(types.MemStoreKey) + //... + stateStore.MountStoreWithDB(storeKey, storetypes.StoreTypeIAVL, db) + stateStore.MountStoreWithDB(memStoreKey, storetypes.StoreTypeMemory, nil) + //... +} +``` + +### IBC modules + +If you have IBC-enabled modules (for example, added with `ignite scaffold module ... --ibc` or created manually), make +the following changes to the source code. + +Cosmos SDK expects IBC modules +to [implement the `IBCModule` interface](https://ibc.cosmos.network/main/ibc/apps/ibcmodule/). Create a `IBCModule` +type that embeds the module's keeper and a method that returns a new `IBCModule`. Methods in this file will be defined +on this type. + +```go +// x/{moduleName}/module_ibc.go + +package module_name + +// ... + +type IBCModule struct { + keeper keeper.Keeper +} + +func NewIBCModule(k keeper.Keeper) IBCModule { + return IBCModule{ + keeper: k, + } +} +``` + +Replace receivers for all methods in this file from `(am AppModule)` to `(im IBCModule)`. Replace all instances of `am.` +with `im.` to fix the errors. + +`OnChanOpenInit` now returns to values: a `string` and an `error`: + +```go +// x/{moduleName}/module_ibc.go + +package module_name + +// ... + +func (im IBCModule) OnChanOpenInit( /*...*/ ) (string, error) +``` + +Ensure that all return statements (five, in the default template) in `OnChanOpenInit` return two values. For example: + +```go +// x/{moduleName}/module_ibc.go + +package module_name + +// ... + +func (im IBCModule) OnChanOpenInit( /*...*/ ) (string, error) { + //... + return "", sdkerrors.Wrapf(porttypes.ErrInvalidPort, "invalid port: %s, expected %s", portID, boundPort) + //... +} +``` + +Error acknowledgments returned from Transfer `OnRecvPacket` now include a deterministic ABCI code and error message. +Remove the `.Error()` call: + +```go +// x/{moduleName}/module_ibc.go + +package module_name + +// ... + +func (im IBCModule) OnRecvPacket( /*...*/ ) { + //... + if err := modulePacketData.Unmarshal(modulePacket.GetData()); err != nil { + // return channeltypes.NewErrorAcknowledgement(sdkerrors.Wrapf(sdkerrors.ErrUnknownRequest, "cannot unmarshal packet data: %s", err.Error()).Error()) + return channeltypes.NewErrorAcknowledgement(sdkerrors.Wrapf(sdkerrors.ErrUnknownRequest, "cannot unmarshal packet data: %s", err.Error())) + } + + // ... + + // Dispatch packet + switch packet := modulePacketData.Packet.(type) { + // ... + default: + // errMsg := fmt.Sprintf("unrecognized %s packet type: %T", types.ModuleName, packet) + // return channeltypes.NewErrorAcknowledgement(errMsg) + err := fmt.Errorf("unrecognized %s packet type: %T", types.ModuleName, packet) + return channeltypes.NewErrorAcknowledgement(err) + } +} +``` + +After switching to using both `AppModule` and `IBCModule`, modifying the following line: + +```go +// x/{moduleName}/module.go + +package module_name + +// ... + +var ( + //... + _ porttypes.IBCModule = IBCModule{} // instead of "= AppModule{}" +) +``` + +### Main + +The `Execute` function in Cosmos SDK `server/cmd` package now +accepts [three arguments](https://github.com/cosmos/cosmos-sdk/blob/v0.46.0/server/cmd/execute.go#L20) instead of two. + +```go +// cmd/{{projectName}}d/main.go + +package projectNamed + +// ... + +func main() { + //... + if err := svrcmd.Execute(rootCmd, "", app.DefaultNodeHome); err != nil { + os.Exit(1) + } +} +``` + +### Handler + +Cosmos SDK v0.46 no longer needs a `NewHandler` function that was used to handle messages and call appropriate keeper +methods based on message types. Feel free to remove `x/{moduleName}/handler.go` file. + +Since there is no `NewHandler` now, modify the deprecated `Route` function to return `sdk.Route{}`: + +```go +// x/{moduleName}/module.go + +package module_name + +// ... + +func (am AppModule) Route() sdk.Route { return sdk.Route{} } +``` diff --git a/docs/versioned_docs/version-v0.25/migration/v0.25.0.md b/docs/versioned_docs/version-v0.25/migration/v0.25.0.md new file mode 100644 index 0000000..ee1c5b8 --- /dev/null +++ b/docs/versioned_docs/version-v0.25/migration/v0.25.0.md @@ -0,0 +1,67 @@ +--- +sidebar_position: 994 +title: v0.25.0 +description: For chains that were scaffolded with Ignite CLI versions lower than v0.25.0. changes are required to use Ignite CLI v0.24.0. +--- + +## Protobuf directory migration + +`v0.25.0` changes the location of scaffolded `.proto` files. Previously, `.proto` files were located in `./proto/{moduleName}/`, +where `moduleName` is the same name of the Cosmos SDK module found in `./x/{moduleName}/`. This new version of `ignite` +modifies the scaffolded protobuf files so that they are now generated in `.proto/{appName}/{moduleName}`. + +The only change that is needed to be made is to create an `appName` folder in the `proto` directory, and then place the +sub-directories within it. An example below demonstrates this change: + +### Previous Directory Structure + +This example shows a chain that was generated using `ignite` with `v0.24.0` using the following command: + +```bash +ignite s chain github.com/cosmos/planet --no-module +ignite s module mars +``` + +```bash +├── app +├── cmd +├── docs +├── proto +│ ├── mars +├── x +│ ├── mars +├── README.md +├── config.yml +├── go.mod +├── go.sum +└── .gitignore +``` + +### `v0.25.0` Directory Structure + +This example shows a chain that was generated using `ignite` with `v0.25.0` using the following command: + +```bash +ignite s chain github.com/cosmos/planet --no-module +ignite s module mars +``` + +```bash +├── app +├── cmd +├── docs +├── proto +│ ├── planet +│ │ ├── mars +├── x +│ ├── mars +├── README.md +├── config.yml +├── go.mod +├── go.sum +└── .gitignore +``` + +The only difference is the additional directory `planet` which is the name of the application. The name of the app can +be verified by checking the package in the `go.mod` file. In this example, the package is `github.com/cosmos/planet` where +`planet` is the app name. \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.25/network/01-introduction.md b/docs/versioned_docs/version-v0.25/network/01-introduction.md new file mode 100644 index 0000000..9881063 --- /dev/null +++ b/docs/versioned_docs/version-v0.25/network/01-introduction.md @@ -0,0 +1,69 @@ +--- +sidebar_position: 1 +description: Introduction to Ignite Network commands. +--- + +# Ignite Network commands + +The `ignite network` commands allow to coordinate the launch of sovereign Cosmos blockchains by interacting with the Ignite Chain. + +To launch a Cosmos blockchain you need someone to be a coordinator and others to be validators. These are just roles, anyone can be a coordinator or a validator. + +- A coordinator publishes information about a chain to be launched on the Ignite blockchain, approves validator requests and coordinates the launch. +- Validators send requests to join a chain and start their nodes when a blockchain is ready for launch. + +## Launching a chain on Ignite + +Launching with the CLI can be as simple as a few short commands with the CLI using `ignite network` command namespace. + +> **NOTE:** `ignite n` can also be used as a shortcut for `ignite network`. + +To publish the information about your chain as a coordinator, run the following command (the URL should point to a repository with a Cosmos SDK chain): + +``` +ignite network chain publish github.com/ignite/example +``` + +This command will return the launch identifier you will be using in the following +commands. Let's say this identifier is 42. +Next, ask validators to initialize their nodes and request to join the network. +For a testnet you can use the default values suggested by the +CLI. + +``` +ignite network chain init 42 +ignite network chain join 42 --amount 95000000stake +``` + +As a coordinator, list all validator requests: + +``` +ignite network request list 42 +``` + +Approve validator requests: + +``` +ignite network request approve 42 1,2 +``` + +Once you've approved all validators you need in the validator set, announce that +the chain is ready for launch: + +``` +ignite network chain launch 42 +``` + +Validators can now prepare their nodes for launch: + +``` +ignite network chain prepare 42 +``` + +The output of this command will show a command that a validator would use to +launch their node, for example `exampled --home ~/.example`. After enough +validators launch their nodes, a blockchain will be live. + +--- + +The next two sections provide more information on the process of coordinating a chain launch from a coordinator and participating in a chain launch as a validator. diff --git a/docs/versioned_docs/version-v0.25/network/02-coordinator.md b/docs/versioned_docs/version-v0.25/network/02-coordinator.md new file mode 100644 index 0000000..20149c1 --- /dev/null +++ b/docs/versioned_docs/version-v0.25/network/02-coordinator.md @@ -0,0 +1,138 @@ +--- +sidebar_position: 2 +description: Ignite Network commands for coordinators. +--- + +# Coordinator Guide + +Coordinators organize and launch new chains on Ignite Chain. + +--- + +## Publish a chain + +The first step in the process of a chain launch is for the coordinator to publish the intention of launching a chain. +The `publish` command publishes the intention of launching a chain on Ignite from a project git repository. + +```shell +ignite n chain publish https://github.com/ignite/example +``` + +**Output** + +``` +✔ Source code fetched +✔ Blockchain set up +✔ Chain's binary built +✔ Blockchain initialized +✔ Genesis initialized +✔ Network published +⋆ Launch ID: 3 +``` + +`LaunchID` identifies the published blockchain on Ignite blockchain. + +### Specify a initial genesis + +During coordination, new genesis accounts and genesis validators are added into the chain genesis. +The initial genesis where these accounts are added is by default the default genesis generated by the chain binary. + +The coordinator can specify a custom initial genesis for the chain launch with the `--genesis` flag. This custom initial genesis can contain additional default genesis accounts and custom params for the chain modules. + +A URL must be provided for the `--genesis` flag. This can either directly point to a JSON genesis file or a tarball containing a genesis file. + +```shell +ignite n chain publish https://github.com/ignite/example --genesis https://example.com/genesis.tar.gz +``` + +## Approve validator requests + +When coordinating for a chain launch, validators send requests. These represent requests to be part of the genesis as a validator for the chain. + +The coordinator can list these requests: + +``` +ignite n request list 3 +``` + +> **NOTE:** here "3" is specifying the `LaunchID`. + +**Output** + +``` +Id Status Type Content +1 APPROVED Add Genesis Account spn1daefnhnupn85e8vv0yc5epmnkcr5epkqncn2le, 100000000stake +2 APPROVED Add Genesis Validator e3d3ca59d8214206839985712282967aaeddfb01@84.118.211.157:26656, spn1daefnhnupn85e8vv0yc5epmnkcr5epkqncn2le, 95000000stake +3 PENDING Add Genesis Account spn1daefnhnupn85e8vv0yc5epmnkcr5epkqncn2le, 95000000stake +4 PENDING Add Genesis Validator b10f3857133907a14dca5541a14df9e8e3389875@84.118.211.157:26656, spn1daefnhnupn85e8vv0yc5epmnkcr5epkqncn2le, 95000000stake +``` + +The coordinator can either approve or reject these requests. + +To approve the requests: + +``` +ignite n request approve 3 3,4 +``` + +> **NOTE:** when selecting a list of requests, both syntaxes can be used: `1,2,3,4` and `1-3,4`. + +**Output** + +``` +✔ Source code fetched +✔ Blockchain set up +✔ Requests format verified +✔ Blockchain initialized +✔ Genesis initialized +✔ Genesis built +✔ The network can be started +✔ Request(s) #3, #4 verified +✔ Request(s) #3, #4 approved +``` + +Ignite CLI automatically verifies that the requests can be applied for the genesis, the approved requests don't generate an invalid genesis. + +To reject the requests: + +``` +ignite n request reject 3 3,4 +``` + +**Output** + +``` +✔ Request(s) #3, #4 rejected +``` + +--- + +## Initiate the launch of a chain + +When enough validators are approved for the genesis and the coordinator deems the chain ready to be launched, the coordinator can initiate the launch of the chain. + +This action will finalize the genesis of chain, meaning that no new requests can be approved for the chain. + +This action also sets the launch time (or genesis time) for the chain, the time when the blockchain network will go live. + +``` +ignite n chain launch 3 +``` + +**Output** + +``` +✔ Chain 3 will be launched on 2022-10-01 09:00:00.000000 +0200 CEST +``` + +This example output shows the launch time of the chain on the network. + +### Set a custom launch time + +By default, the launch time will be set to the earliest date possible. In practice, the validators should have time to prepare their node for the network launch. If a validator fails to be online, they can get jailed for inactivity in the validator set. + +The coordinator can specify a custom time with the `--launch-time` flag. + +``` +ignite n chain launch --launch-time 2022-01-01T00:00:00Z +``` diff --git a/docs/versioned_docs/version-v0.25/network/03-validator.md b/docs/versioned_docs/version-v0.25/network/03-validator.md new file mode 100644 index 0000000..e594f01 --- /dev/null +++ b/docs/versioned_docs/version-v0.25/network/03-validator.md @@ -0,0 +1,159 @@ +--- +sidebar_position: 3 +description: Ignite Network commands for validators. +--- + +# Validator Guide + +Validators join as genesis validators for chain launches on Ignite Chain. + +--- + +## List all published chains + +Validators can list and explore published chains to be launched on Ignite. + +``` +ignite n chain list +``` + +**Output** + +``` +Launch Id Chain Id Source Phase + +3 example-1 https://github.com/ignite/example coordinating +2 spn-10 https://github.com/tendermint/spn launched +1 example-20 https://github.com/tendermint/spn launching +``` + +- `Launch ID` is the unique identifier of the chain on Ignite. This is the ID used to interact with the chain launch. +- `Chain ID` represents the identifer of the chain network once it will be launched. It should be a unique identifier in practice but doesn't need to be unique on Ignite. +- `Source` is the repository URL of the project. +- `Phase` is the current phase of the chain launch. A chain can have 3 different phases: + - `coordinating`: means the chain is open to receive requests from validators + - `launching`: means the chain no longer receives requests but it hasn't been launched yet + - `launched`: means the chain network has been launched + +--- + +## Request network participation + +When the chain is in the coordination phase, validators can request to be a genesis validator for the chain. +Ignite CLI supports an automatic workflow that can setup a node for the validator and a workflow for advanced users with a specific setup for their node. + +### Simple Flow + +`ignite` can handle validator setup automatically. Initialize the node and generate a gentx file with default values: + +``` +ignite n chain init 3 +``` + +**Output** + +``` +✔ Source code fetched +✔ Blockchain set up +✔ Blockchain initialized +✔ Genesis initialized +? Staking amount 95000000stake +? Commission rate 0.10 +? Commission max rate 0.20 +? Commission max change rate 0.01 +⋆ Gentx generated: /Users/lucas/spn/3/config/gentx/gentx.json +``` + +Now, create and broadcast a request to join a chain as a validator: + +``` +ignite n chain join 3 --amount 100000000stake +``` + +The join command accepts a `--amount` flag with a comma-separated list of tokens. If the flag is provided, the +command will broadcast a request to add the validator’s address as an account to the genesis with the specific amount. + +**Output** + +``` +? Peer's address 192.168.0.1:26656 +✔ Source code fetched +✔ Blockchain set up +✔ Account added to the network by the coordinator! +✔ Validator added to the network by the coordinator! +``` + +--- + +### Advanced Flow + +Using a more advanced setup (e.g. custom `gentx`), validators must provide an additional flag to their command +to point to the custom file: + +``` +ignite n chain join 3 --amount 100000000stake --gentx ~/chain/config/gentx/gentx.json +``` + +--- + +## Launch the network + +### Simple Flow + +Generate the final genesis and config of the node: + +``` +ignite n chain prepare 3 +``` + +**Output** + +``` +✔ Source code fetched +✔ Blockchain set up +✔ Chain's binary built +✔ Genesis initialized +✔ Genesis built +✔ Chain is prepared for launch +``` + +Next, start the node: + +``` +exampled start --home ~/spn/3 +``` + +--- + +### Advanced Flow + +Fetch the final genesis for the chain: + +``` +ignite n chain show genesis 3 +``` + +**Output** + +``` +✔ Source code fetched +✔ Blockchain set up +✔ Blockchain initialized +✔ Genesis initialized +✔ Genesis built +⋆ Genesis generated: ./genesis.json +``` + +Next, fetch the persistent peer list: + +``` +ignite n chain show peers 3 +``` + +**Output** + +``` +⋆ Peer list generated: ./peers.txt +``` + +The fetched genesis file and peer list can be used for a manual node setup. diff --git a/docs/versioned_docs/version-v0.25/network/_category_.json b/docs/versioned_docs/version-v0.25/network/_category_.json new file mode 100644 index 0000000..8f23039 --- /dev/null +++ b/docs/versioned_docs/version-v0.25/network/_category_.json @@ -0,0 +1,5 @@ +{ + "label": "Network", + "position": 7, + "link": null + } diff --git a/docs/versioned_docs/version-v0.25/packages/_category_.json b/docs/versioned_docs/version-v0.25/packages/_category_.json new file mode 100644 index 0000000..3b05bba --- /dev/null +++ b/docs/versioned_docs/version-v0.25/packages/_category_.json @@ -0,0 +1,5 @@ +{ + "label": "Packages", + "position": 5, + "link": null + } diff --git a/docs/versioned_docs/version-v0.25/packages/cosmostxcollector.md b/docs/versioned_docs/version-v0.25/packages/cosmostxcollector.md new file mode 100644 index 0000000..df0fccd --- /dev/null +++ b/docs/versioned_docs/version-v0.25/packages/cosmostxcollector.md @@ -0,0 +1,200 @@ +--- +sidebar_position: 0 +title: cosmostxcollector +slug: /packages/cosmostxcollector +--- + +# cosmostxcollector + +The package implements support for collecting transactions and events from Cosmos blockchains +into a data backend and it also adds support for querying the collected data. + +## Transaction and event data collecting + +Transactions and events can be collected using the `cosmostxcollector.Collector` type. This +type uses a `cosmosclient.Client` instance to fetch the data from each block and a data backend +adapter to save the data. + +### Data backend adapters + +Data backend adapters are used to query and save the collected data into different types of data +backends and must implement the `cosmostxcollector.adapter.Adapter` interface. + +An adapter for PostgreSQL is already implemented in `cosmostxcollector.adapter.postgres.Adapter`. +This is the one used in the examples. + +### Example: Data collection + +The data collection example assumes that there is a PostgreSQL database running in the local +environment containing an empty database named "cosmos". + +The required database tables will be created automatically by the collector the first time it is run. + +When the application is run it will fetch all the transactions and events starting from one of the +recent blocks until the current block height and populate the database: + +```go +package main + +import ( + "context" + "log" + + "github.com/ignite/cli/ignite/pkg/clictx" + "github.com/ignite/cli/ignite/pkg/cosmosclient" + "github.com/ignite/cli/ignite/pkg/cosmostxcollector" + "github.com/ignite/cli/ignite/pkg/cosmostxcollector/adapter/postgres" +) + +const ( + // Name of a local PostgreSQL database + dbName = "cosmos" + + // Cosmos RPC address + rpcAddr = "https://rpc.cosmos.network:443" +) + +func collect(ctx context.Context, db postgres.Adapter) error { + // Make sure that the data backend schema is up to date + if err := db.Init(ctx); err != nil { + return err + } + + // Init the Cosmos client + client, err := cosmosclient.New(ctx, cosmosclient.WithNodeAddress(rpcAddr)) + if err != nil { + return err + } + + // Get the latest block height + latestHeight, err := client.LatestBlockHeight(ctx) + if err != nil { + return err + } + + // Collect transactions and events starting from a block height. + // The collector stops at the latest height available at the time of the call. + collector := cosmostxcollector.New(db, client) + if err := collector.Collect(ctx, latestHeight-50); err != nil { + return err + } + + return nil +} + +func main() { + ctx := clictx.From(context.Background()) + + // Init an adapter for a local PostgreSQL database running with the default values + params := map[string]string{"sslmode": "disable"} + db, err := postgres.NewAdapter(dbName, postgres.WithParams(params)) + if err != nil { + log.Fatal(err) + } + + if err := collect(ctx, db); err != nil { + log.Fatal(err) + } +} +``` + +## Queries + +Collected data can be queried through the data backend adapters using event queries or +cursor-based queries. + +Queries support sorting, paging and filtering by using different options during creation. +The cursor-based ones also support the selection of specific fields or properties and also +passing arguments in cases where the query is a function. + +By default no sorting, filtering nor paging is applied to the queries. + +### Event queries + +The event queries return events and their attributes as `[]cosmostxcollector.query.Event`. + +### Example: Query events + +The example reads transfer events from Cosmos' bank module and paginates the results. + +```go +import ( + "context" + + banktypes "github.com/cosmos/cosmos-sdk/x/bank/types" + "github.com/ignite/cli/ignite/pkg/cosmostxcollector/adapter/postgres" + "github.com/ignite/cli/ignite/pkg/cosmostxcollector/query" +) + +func queryBankTransferEvents(ctx context.Context, db postgres.Adapter) ([]query.Event, error) { + // Create an event query that returns events of type "transfer" + qry := query.NewEventQuery( + query.WithFilters( + // Filter transfer events from Cosmos' bank module + postgres.FilterByEventType(banktypes.EventTypeTransfer), + ), + query.WithPageSize(10), + query.AtPage(1), + ) + + // Execute the query + return db.QueryEvents(ctx, qry) +} +``` + +### Cursor-based queries + +This type of queries is meant to be used in contexts where the Event queries are not +useful. + +Cursor-based queries can query a single "entity" which can be a table, view or function +in relational databases or a collection or function in non relational data backends. + +The result of these types of queries is a cursor that implements the `cosmostxcollector.query.Cursor` +interface. + +### Example: Query events using cursors + +```go +import ( + "context" + + banktypes "github.com/cosmos/cosmos-sdk/x/bank/types" + "github.com/ignite/cli/ignite/pkg/cosmostxcollector/adapter/postgres" + "github.com/ignite/cli/ignite/pkg/cosmostxcollector/query" +) + +func queryBankTransferEventIDs(ctx context.Context, db postgres.Adapter) (ids []int64, err error) { + // Create a query that returns the IDs for events of type "transfer" + qry := query.New( + "event", + query.Fields("id"), + query.WithFilters( + // Filter transfer events from Cosmos' bank module + postgres.NewFilter("type", banktypes.EventTypeTransfer), + ), + query.WithPageSize(10), + query.AtPage(1), + query.SortByFields(query.SortOrderAsc, "id"), + ) + + // Execute the query + cr, err := db.Query(ctx, qry) + if err != nil { + return nil, err + } + + // Read the results + for cr.Next() { + var eventID int64 + + if err := cr.Scan(&eventID); err != nil { + return nil, err + } + + ids = append(ids, eventID) + } + + return ids, nil +} +``` diff --git a/docs/versioned_docs/version-v0.26/01-welcome/01-index.md b/docs/versioned_docs/version-v0.26/01-welcome/01-index.md new file mode 100644 index 0000000..e80d9f7 --- /dev/null +++ b/docs/versioned_docs/version-v0.26/01-welcome/01-index.md @@ -0,0 +1,63 @@ +--- +slug: /welcome +--- + +import ProjectsTable from '@site/src/components/ProjectsTable'; + +# Introduction to Ignite + +[Ignite CLI](https://github.com/ignite/cli) offers everything you need to build, test, and launch your blockchain with a +decentralized worldwide community. Ignite CLI is built on top of [Cosmos SDK](https://docs.cosmos.network), the world’s +most popular blockchain framework. Ignite CLI accelerates chain development by scaffolding everything you need so you +can focus on business logic. + +## What is Ignite CLI? + +Ignite CLI is an easy-to-use CLI tool for creating and maintaining sovereign application-specific blockchains. +Blockchains created with Ignite CLI use Cosmos SDK and Tendermint. Ignite CLI and the Cosmos SDK modules are written in +the Go programming language. The scaffolded blockchain that is created with Ignite CLI includes a command line interface +that lets you manage keys, create validators, and send tokens. + +With just a few commands, you can use Ignite CLI to: + +- Create a modular blockchain written in Go +- Scaffold modules, messages, types with CRUD operations, IBC packets, and more +- Start a blockchain node in development with live reloading +- Connect to other blockchains with a built-in IBC relayer +- Use generated TypeScript/Vuex clients to interact with your blockchain +- Use the Vue.js web app template with a set of components and Vuex modules + +## Install Ignite CLI + +To install the `ignite` binary in `/usr/local/bin` run the following command: + +``` +curl https://get.ignite.com/cli | bash +``` + +## Projects using Tendermint and Cosmos SDK + +Many projects already showcase the Tendermint BFT consensus engine and the Cosmos SDK. Explore +the [Cosmos ecosystem](https://cosmos.network/ecosystem/apps) to discover a wide variety of apps, blockchains, wallets, +and explorers that are built in the Cosmos ecosystem. + +## Projects building with Ignite CLI + +<ProjectsTable data={[ + { name: "Stride Labs", logo: "img/logo/stride.svg"}, + { name: "KYVE Network", logo: "img/logo/kyve.svg"}, + { name: "Umee", logo: "img/logo/umee.svg"}, + { name: "MediBloc Core", logo: "img/logo/medibloc.svg"}, + { name: "Cudos", logo: "img/logo/cudos.svg"}, + { name: "Firma Chain", logo: "img/logo/firmachain.svg"}, + { name: "BitCanna", logo: "img/logo/bitcanna.svg"}, + { name: "Source Protocol", logo: "img/logo/source.svg"}, + { name: "Sonr", logo: "img/logo/sonr.svg"}, + { name: "Neutron", logo: "img/logo/neutron.svg"}, + { name: "OKP4 Blockchain", logo: "img/logo/okp4.svg"}, + { name: "Dymension Hub", logo: "img/logo/dymension.svg"}, + { name: "Electra Blockchain", logo: "img/logo/electra.svg"}, + { name: "OLLO Station", logo: "img/logo/ollostation.svg"}, + { name: "Mun", logo: "img/logo/mun.svg"}, + { name: "Aura Network", logo: "img/logo/aura.svg"}, +]}/> diff --git a/docs/versioned_docs/version-v0.26/01-welcome/02-install.md b/docs/versioned_docs/version-v0.26/01-welcome/02-install.md new file mode 100644 index 0000000..12f041a --- /dev/null +++ b/docs/versioned_docs/version-v0.26/01-welcome/02-install.md @@ -0,0 +1,114 @@ +--- +sidebar_position: 1 +description: Steps to install Ignite CLI on your local computer. +--- + +# Install Ignite CLI + +You can run [Ignite CLI](https://github.com/ignite/cli) in a web-based Gitpod IDE or you can install Ignite CLI on your +local computer. + +## Prerequisites + +Be sure you have met the prerequisites before you install and use Ignite CLI. + +### Operating systems + +Ignite CLI is supported for the following operating systems: + +- GNU/Linux +- macOS +- Windows Subsystem for Linux (WSL) + +### Go + +Ignite CLI is written in the Go programming language. To use Ignite CLI on a local system: + +- Install [Go](https://golang.org/doc/install) (**version 1.19** or higher) +- Ensure the Go environment variables are [set properly](https://golang.org/doc/gopath_code#GOPATH) on your system + +## Verify your Ignite CLI version + +To verify the version of Ignite CLI you have installed, run the following command: + +```bash +ignite version +``` + +## Installing Ignite CLI + +To install the latest version of the `ignite` binary use the following command. + +```bash +curl https://get.ignite.com/cli! | bash +``` + +This command invokes `curl` to download the installation script and pipes the output to `bash` to perform the +installation. The `ignite` binary is installed in `/usr/local/bin`. + +To learn more or customize the installation process, see the [installer docs](https://github.com/ignite/installer) on +GitHub. + +### Write permission + +Ignite CLI installation requires write permission to the `/usr/local/bin/` directory. If the installation fails because +you do not have write permission to `/usr/local/bin/`, run the following command: + +```bash +curl https://get.ignite.com/cli | bash +``` + +Then run this command to move the `ignite` executable to `/usr/local/bin/`: + +```bash +sudo mv ignite /usr/local/bin/ +``` + +On some machines, a permissions error occurs: + +```bash +mv: rename ./ignite to /usr/local/bin/ignite: Permission denied +============ +Error: mv failed +``` + +In this case, use sudo before `curl` and before `bash`: + +```bash +sudo curl https://get.ignite.com/cli | sudo bash +``` + +## Upgrading your Ignite CLI installation {#upgrade} + +Before you install a new version of Ignite CLI, remove all existing Ignite CLI installations. + +To remove the current Ignite CLI installation: + +1. On your terminal window, press `Ctrl+C` to stop the chain that you started with `ignite chain serve`. +2. Remove the Ignite CLI binary with `rm $(which ignite)`. + Depending on your user permissions, run the command with or without `sudo`. +3. Repeat this step until all `ignite` installations are removed from your system. + +After all existing Ignite CLI installations are removed, follow the [Installing Ignite CLI](#installing-ignite-cli) +instructions. + +For details on version features and changes, see +the [changelog.md](https://github.com/ignite/cli/blob/main/changelog.md) +in the repo. + +## Build from source + +To experiment with the source code, you can build from source: + +```bash +git clone https://github.com/ignite/cli --depth=1 +cd cli && make install +``` + +## Summary + +- Verify the prerequisites. +- To set up a local development environment, install Ignite CLI locally on your computer. +- Install Ignite CLI by fetching the binary using cURL or by building from source. +- The latest version is installed by default. You can install previous versions of the precompiled `ignite` binary. +- Stop the chain and remove existing versions before installing a new version. diff --git a/docs/versioned_docs/version-v0.26/01-welcome/_category_.json b/docs/versioned_docs/version-v0.26/01-welcome/_category_.json new file mode 100644 index 0000000..ac625fc --- /dev/null +++ b/docs/versioned_docs/version-v0.26/01-welcome/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Welcome", + "link": null +} \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.26/02-guide/00-introduction.md b/docs/versioned_docs/version-v0.26/02-guide/00-introduction.md new file mode 100644 index 0000000..31dd77b --- /dev/null +++ b/docs/versioned_docs/version-v0.26/02-guide/00-introduction.md @@ -0,0 +1,36 @@ +--- +sidebar_position: 0 +title: Introduction +slug: /guide +--- + +# Introduction + +Developer tutorials provide step-by-step instructions to help you build blockchain developer skills. + +By following these developer tutorials you will learn how to: + +* Install Ignite CLI on your local machine +* Create a new blockchain and start a node locally for development +* Make your blockchain say "Hello, World!" + * Scaffold a Cosmos SDK query + * Modify a keeper method to return a static string + * Use the blockchain CLI to make a query +* Write and read blog posts to your chain in the Blog tutorial + * Scaffold a Cosmos SDK message + * Define new types in protocol buffer files + * Write keeper methods to write data to the store + * Read data from the store and return it as a result a query + * Use the blockchain CLI to broadcast transactions +* Build a blockchain for buying and selling names in the Nameservice tutorial + * Scaffold CRUD logic with `map` + * Use other module methods in your custom module + * Send tokens between addresses +* Build a guessing game with rewards + * Use an escrow account to store tokens +* Use the Inter-Blockchain Communication (IBC) protocol + * Scaffold an IBC-enabled module + * Send and receive IBC packets + * Configure and run a built-in IBC relayer +* Build a decentralized order-book token exchange + * Build an advanced IBC-enabled module diff --git a/docs/versioned_docs/version-v0.26/02-guide/02-getting-started.md b/docs/versioned_docs/version-v0.26/02-guide/02-getting-started.md new file mode 100644 index 0000000..fb2d68f --- /dev/null +++ b/docs/versioned_docs/version-v0.26/02-guide/02-getting-started.md @@ -0,0 +1,150 @@ +--- +sidebar_position: 2 +--- + +# Getting started + +In this tutorial, we will be using Ignite CLI to create a new blockchain. Ignite +CLI is a command line interface that allows users to quickly and easily create +blockchain networks. By using Ignite CLI, we can quickly create a new blockchain +without having to manually set up all the necessary components. + +Once we have created our blockchain with Ignite CLI, we will take a look at the +directory structure and files that were created. This will give us an +understanding of how the blockchain is organized and how the different +components of the blockchain interact with each other. + +By the end of this tutorial, you will have a basic understanding of how to use +Ignite CLI to create a new blockchain, and you will have a high-level +understanding of the directory structure and files that make up a blockchain. +This knowledge will be useful as you continue to explore the world of blockchain +development. + +## Creating a new blockchain + +To create a new blockchain project with Ignite, you will need to run the +following command: + +``` +ignite scaffold chain example +``` + +The `ignite scaffold chain` command will create a new blockchain in a new +directory `example`. + +The new blockchain is built using the Cosmos SDK framework and imports several +standard modules to provide a range of functionality. These modules include +`staking`, which enables a delegated Proof-of-Stake consensus mechanism, `bank` +for facilitating fungible token transfers between accounts, and `gov` for +on-chain governance. In addition to these modules, the blockchain also imports +other modules from the Cosmos SDK framework. + +The `example` directory contains the generated files and directories that make +up the structure of a Cosmos SDK blockchain. This directory includes files for +the chain's configuration, application logic, and tests, among others. It +provides a starting point for developers to quickly set up a new Cosmos SDK +blockchain and build their desired functionality on top of it. + +By default, Ignite creates a new empty custom module with the same name as the +blockchain being created (in this case, `example`) in the `x/` directory. This +module doesn't have any functionality by itself, but can serve as a starting +point for building out the features of your application. If you don't want to +create this module, you can use the `--no-module` flag to skip it. + +## Directory structure + +In order to understand what the Ignite CLI has generated for your project, you +can inspect the contents of the `example/` directory. + +The `app/` directory contains the files that connect the different parts of the +blockchain together. The most important file in this directory is `app.go`, +which includes the type definition of the blockchain and functions for creating +and initializing it. This file is responsible for wiring together the various +components of the blockchain and defining how they will interact with each +other. + +The `cmd/` directory contains the main package responsible for the command-line +interface (CLI) of the compiled binary. This package defines the commands that +can be run from the CLI and how they should be executed. It is an important part +of the blockchain project as it provides a way for developers and users to +interact with the blockchain and perform various tasks, such as querying the +blockchain state or sending transactions. + +The `docs/` directory is used for storing project documentation. By default, +this directory includes an OpenAPI specification file, which is a +machine-readable format for defining the API of a software project. The OpenAPI +specification can be used to automatically generate human-readable documentation +for the project, as well as provide a way for other tools and services to +interact with the API. The `docs/` directory can be used to store any additional +documentation that is relevant to the project. + +The `proto/` directory contains protocol buffer files, which are used to +describe the data structure of the blockchain. Protocol buffers are a language- +and platform-neutral mechanism for serializing structured data, and are often +used in the development of distributed systems, such as blockchain networks. The +protocol buffer files in the `proto/` directory define the data structures and +messages that are used by the blockchain, and are used to generate code for +various programming languages that can be used to interact with the blockchain. +In the context of the Cosmos SDK, protocol buffer files are used to define the +specific types of data that can be sent and received by the blockchain, as well +as the specific RPC endpoints that can be used to access the blockchain's +functionality. + +The `testutil/` directory contains helper functions that are used for testing. +These functions provide a convenient way to perform common tasks that are needed +when writing tests for the blockchain, such as creating test accounts, +generating transactions, and checking the state of the blockchain. By using the +helper functions in the `testutil/` directory, developers can write tests more +quickly and efficiently, and can ensure that their tests are comprehensive and +effective. + +The `x/` directory contains custom Cosmos SDK modules that have been added to +the blockchain. Standard Cosmos SDK modules are pre-built components that +provide common functionality for Cosmos SDK-based blockchains, such as support +for staking and governance. Custom modules, on the other hand, are modules that +have been developed specifically for the blockchain project and provide +project-specific functionality. + +The `config.yml` file is a configuration file that can be used to customize the +blockchain during development. This file includes settings that control various +aspects of the blockchain, such as the network's ID, account balances, and the +node parameters. + +The `.github` directory contains a GitHub Actions workflow that can be used to +automatically build and release a blockchain binary. GitHub Actions is a tool +that allows developers to automate their software development workflows, +including building, testing, and deploying their projects. The workflow in the +`.github` directory is used to automate the process of building the blockchain +binary and releasing it, which can save time and effort for developers. + +The `readme.md` file is a readme file that provides an overview of the +blockchain project. This file typically includes information such as the +project's name and purpose, as well as instructions on how to build and run the +blockchain. By reading the `readme.md` file, developers and users can quickly +understand the purpose and capabilities of the blockchain project and get +started using it. + +## Starting a blockchain node + +To start a blockchain node in development, you can run the following command: + +``` +ignite chain serve +``` + +The `ignite chain serve` command is used to start a blockchain node in +development mode. It first compiles and installs the binary using the +`ignite chain build` command, then initializes the blockchain's data directory +for a single validator using the `ignite chain init` command. After that, it +starts the node locally and enables automatic code reloading so that changes to +the code can be reflected in the running blockchain without having to restart +the node. This allows for faster development and testing of the blockchain. + +Congratulations! 🥳 You have successfully created a brand-new Cosmos blockchain +using the Ignite CLI. This blockchain uses the delegated proof of stake (DPoS) +consensus algorithm, and comes with a set of standard modules for token +transfers, governance, and inflation. Now that you have a basic understanding of +your Cosmos blockchain, it's time to start building custom functionality. In the +following tutorials, you will learn how to build custom modules and add new +features to your blockchain, allowing you to create a unique and powerful +decentralized application. diff --git a/docs/versioned_docs/version-v0.26/02-guide/03-hello/00-express.md b/docs/versioned_docs/version-v0.26/02-guide/03-hello/00-express.md new file mode 100644 index 0000000..08bfc2f --- /dev/null +++ b/docs/versioned_docs/version-v0.26/02-guide/03-hello/00-express.md @@ -0,0 +1,141 @@ +--- +description: Step-by-step guidance to build your first blockchain and your first Cosmos SDK module. +title: Express tutorial +--- + +# "Hello, World!" in 5 minutes + +In this tutorial, you will create a simple blockchain with a custom query that +responds with `"Hello, %s!"`, where `%s` is a name provided in the query. To do +this, you will use the Ignite CLI to generate most of the code, and then modify +the query to return the desired response. After completing the tutorial, you +will have a better understanding of how to create custom queries in a +blockchain. + +First, create a new `hello` blockchain with Ignite CLI: + +``` +ignite scaffold chain hello +``` + +Let's add a query to the blockchain we just created. + +In the Cosmos SDK, a query is a request for information from the blockchain. +Queries are used to retrieve data from the blockchain, such as the current state +of the ledger or the details of a specific transaction. The Cosmos SDK provides +a number of built-in query methods that can be used to retrieve data from the +blockchain, and developers can also create custom queries to access specific +data or perform complex operations. Queries are processed by the blockchain's +nodes and the results are returned to the querying client. + +## Create a query with Ignite + +To add a query, run the following command inside the `hello` directory: + +``` +ignite scaffold query say-hello name --response name +``` + +The `ignite scaffold query` command is a tool used to quickly create new +queries. When you run this command, it makes changes to your source code to add +the new query and make it available in your API. This command accepts a query +name (`"say-hello"`) and a list of request fields (in our case only `name`). The +optional `--response` flag specifies the return values of the query. + +This command made the following changes to the source code. + +The `proto/hello/hello/query.proto` file was modified to define the request and +response for a query, as well as to add the `SayHello` query in the `Query` +service. + +The `x/hello/client/cli/query_say_hello.go` file was created and added to the +project. This file contains a CLI command `CmdSayHello` that allows users to +submit a "say hello" query to the blockchain. This command allows users to +interact with the blockchain in a more user-friendly way, allowing them to +easily submit queries and receive responses from the blockchain. + +The `x/hello/client/cli/query.go` was modified to add the `CmdSayHello` command +to the CLI of the blockchain. + +The `x/hello/keeper/query_say_hello.go` file was created with a keeper method +called `SayHello`. This method is responsible for handling the "say hello" +query, which can be called by a client using the command-line interface (CLI) or +an API. When the "say hello" query is executed, the `SayHello` method is called +to perform the necessary actions and return a response to the client. The +`SayHello` method may retrieve data from the application's database, process the +data, and return a result to the client in a specific format, such as a string +of text or a data structure. + +To change the source code so that the query returns the `"Hello, %s!"` string, +modify the return statement in `query_say_hello.go` to return +`fmt.Sprintf("hello %s", req.Name)`. + +```go title="x/hello/keeper/query_say_hello.go" +func (k Keeper) SayHello(goCtx context.Context, req *types.QuerySayHelloRequest) (*types.QuerySayHelloResponse, error) { + if req == nil { + return nil, status.Error(codes.InvalidArgument, "invalid request") + } + + ctx := sdk.UnwrapSDKContext(goCtx) + + // TODO: Process the query + _ = ctx + // highlight-next-line + return &types.QuerySayHelloResponse{Name: fmt.Sprintf("Hello, %s!", req.Name)}, nil +} +``` + +The function now returns a `QuerySayHelloResponse` struct with the `Name` field +set to the string `"Hello, %s!"` with `req.Name` as the value for the `%s` +placeholder. It also returns a nil error to indicate success. + +Now that you have added a query your blockchain and modified it return the value +you want, you can start your blockchain with Ignite: + +``` +ignite chain serve +``` + +After starting your blockchain, you can use its command-line interface (CLI) to +interact with it and perform various actions such as querying the blockchain's +state, sending transactions, and more. + +You can use the `hellod` binary to run the `say-hello` query: + +``` +hellod q hello say-hello bob +``` + +Once you run this command, the `hellod` binary will send a `say-hello` query to +your blockchain with the argument `bob`. The blockchain will process the query +and return the result, which will be printed by the `hellod` binary. In this +case, the expected result is a string containing the message `Hello, bob!`. + +``` +name: Hello, bob! +``` + +Congratulations! 🎉 You have successfully created a new Cosmos SDK module called +`hello` with a custom query functionality. This allows users to query the +blockchain and receive a response with a personalized greeting. This tutorial +demonstrated how to use Ignite CLI to create a custom query in a blockchain. + +Ignite is an incredibly convenient tool for developers because it automatically +generates much of the code required for a project. This saves developers time +and effort by reducing the amount of code they need to write manually. With +Ignite, developers can quickly and easily set up the basic structure of their +project, allowing them to focus on the more complex and unique aspects of their +work. + +However, it is also important for developers to understand how the code +generated by Ignite works under the hood. One way to do this is to implement the +same functionality manually, without using Ignite. For example, in this tutorial +Ignite was used to generate query functionality, now could try implementing the +same functionality manually to see how it works and gain a deeper understanding +of the code. + +Implementing the same functionality manually can be time-consuming and +challenging, but it can also be a valuable learning experience. By seeing how +the code works at a low level, developers can gain a better understanding of how +different components of their project fit together and how they can be +customized and optimized. \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.26/02-guide/03-hello/01-scaffolding.md b/docs/versioned_docs/version-v0.26/02-guide/03-hello/01-scaffolding.md new file mode 100644 index 0000000..7c245e7 --- /dev/null +++ b/docs/versioned_docs/version-v0.26/02-guide/03-hello/01-scaffolding.md @@ -0,0 +1,236 @@ +--- +title: In-depth tutorial +--- + +# In-depth "Hello, World!" tutorial + +In this tutorial you will implement "Hello, World!" functionality from +scratch. The functionality of the application you will be building will be +identical to what the one you created in the "Express tutorial" section, but +here you will be doing it manually in order to gain a deeper understanding of +the process. + +To begin, let's start with a fresh `hello` blockchain. You can either roll back +the changes you made in the previous section or create a new blockchain using +Ignite. Either way, you will have a blank blockchain that is ready for you to +work with. + +``` +ignite scaffold chain hello +``` + +## `SayHello` RPC + +In Cosmos SDK blockchains, queries are defined as remote procedure calls (RPCs) +in a `Query` service in protocol buffer files. To add a new query, you can add +the following code to the `query.proto` file of your module: + +```protobuf title="proto/hello/hello/query.proto" +service Query { + // highlight-start + rpc SayHello(QuerySayHelloRequest) returns (QuerySayHelloResponse) { + option (google.api.http).get = "/hello/hello/say_hello/{name}"; + } + // highlight-end +} +``` + +The RPC accepts a request argument of type `QuerySayHelloRequest` and returns a +value of type `QuerySayHelloResponse`. To define these types, you can add the +following code to the `query.proto` file: + +```protobuf title="proto/hello/hello/query.proto" +message QuerySayHelloRequest { + string name = 1; +} + +message QuerySayHelloResponse { + string name = 1; +} +``` + +To use the types defined in `query.proto`, you must transpile the protocol +buffer files into Go source code. This can be done by running `ignite chain +serve`, which will build and initialize the blockchain and automatically +generate the Go source code from the protocol buffer files. Alternatively, you +can run `ignite generate proto-go` to only generate the Go source code from the +protocol buffer files, without building and initializing the blockchain. + +## `SayHello` keeper method + +After defining the query, request, and response types in the `query.proto` file, +you will need to implement the logic for the query in your code. This typically +involves writing a function that processes the request and returns the +appropriate response. Create a new file `query_say_hello.go` with the following +contents: + +```go title="x/hello/keeper/query_say_hello.go" +package keeper + +import ( + "context" + "fmt" + + sdk "github.com/cosmos/cosmos-sdk/types" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" + + "hello/x/hello/types" +) + +func (k Keeper) SayHello(goCtx context.Context, req *types.QuerySayHelloRequest) (*types.QuerySayHelloResponse, error) { + if req == nil { + return nil, status.Error(codes.InvalidArgument, "invalid request") + } + ctx := sdk.UnwrapSDKContext(goCtx) + // TODO: Process the query + _ = ctx + return &types.QuerySayHelloResponse{Name: fmt.Sprintf("hello %s", req.Name)}, nil +} +``` + +This code defines a `SayHello` function that accepts a request of type +`QuerySayHelloRequest` and returns a value of type `QuerySayHelloResponse`. The +function first checks if the request is valid, and then processes the query by +returning the response message with the provided name as the value for the `%s` +placeholder. You can add additional logic to the function as needed, such as +retrieving data from the blockchain or performing complex operations, to handle +the query and return the appropriate response. + +## `CmdSayHello` command + +After implementing the query logic, you will need to make the query available to +clients so that they can call it and receive the response. This typically +involves adding the query to the blockchain's application programming interface +(API) and providing a command-line interface (CLI) command that allows users to +easily submit the query and receive the response. + +To provide a CLI command for the query, you can create the `query_say_hello.go` +file and implement a `CmdSayHello` command that calls the `SayHello` function +and prints the response to the console. + +```go title="x/hello/client/cli/query_say_hello.go" +package cli + +import ( + "strconv" + + "github.com/cosmos/cosmos-sdk/client" + "github.com/cosmos/cosmos-sdk/client/flags" + "github.com/spf13/cobra" + + "hello/x/hello/types" +) + +var _ = strconv.Itoa(0) + +func CmdSayHello() *cobra.Command { + cmd := &cobra.Command{ + Use: "say-hello [name]", + Short: "Query say-hello", + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) (err error) { + reqName := args[0] + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } + queryClient := types.NewQueryClient(clientCtx) + params := &types.QuerySayHelloRequest{ + Name: reqName, + } + res, err := queryClient.SayHello(cmd.Context(), params) + if err != nil { + return err + } + return clientCtx.PrintProto(res) + }, + } + flags.AddQueryFlagsToCmd(cmd) + return cmd +} +``` + +The code defines a `CmdSayHello` command. The command is defined using the +`cobra` library, which is a popular framework for building command-line +applications in Go. The command accepts a `name` as an argument and uses it to +create a `QuerySayHelloRequest` struct that is passed to the `SayHello` function +from the `types.QueryClient`. The `SayHello` function is used to send the +`say-hello` query to the blockchain, and the response is stored in the `res` +variable. + +The `QuerySayHelloRequest` struct is defined in the `query.proto` file, which is +a Protocol Buffer file that defines the request and response types for the +query. The `QuerySayHelloRequest` struct includes a `Name` field of type +`string`, which is used to provide the name to be included in the response +message. + +After the query has been sent and the response has been received, the code uses +the `clientCtx.PrintProto` function to print the response to the console. The +`clientCtx` variable is obtained using the `client.GetClientQueryContext` +function, which provides access to the client context, including the client's +configuration and connection information. The `PrintProto` function is used to +print the response using the Protocol Buffer format, which allows for efficient +serialization and deserialization of the data. + +The `flags.AddQueryFlagsToCmd` function is used to add query-related flags to +the command. This allows users to specify additional options when calling the +command, such as the node URL and other query parameters. These flags are used +to configure the query and provide the necessary information to the `SayHello` +function, allowing it to connect to the blockchain and send the query. + +To make the `CmdSayHello` command available to users, you will need to add it to +the chain's binary. This is typically done by modifying the +`x/hello/client/cli/query.go` file and adding the +`cmd.AddCommand(CmdSayHello())` statement. This adds the `CmdSayHello` command +to the list of available commands, allowing users to call it from the +command-line interface (CLI). + +```go title="x/hello/client/cli/query.go" +func GetQueryCmd(queryRoute string) *cobra.Command { + cmd := &cobra.Command{ + Use: types.ModuleName, + Short: fmt.Sprintf("Querying commands for the %s module", types.ModuleName), + DisableFlagParsing: true, + SuggestionsMinimumDistance: 2, + RunE: client.ValidateCmd, + } + cmd.AddCommand(CmdQueryParams()) + // highlight-next-line + cmd.AddCommand(CmdSayHello()) + return cmd +} +``` + +Once you have provided a CLI command, users will be able to call the `say-hello` +query and receive the appropriate response. + +Save all the changes you made to the source code of your project and run the +following command to start a blockchain node: + +``` +ignite chain serve +``` + +Use the following command to submit the query and receive the response: + +``` +hellod q hello say-hello bob +``` + +This command will send a "say-hello" query to the blockchain with the name "bob" +and print the response of "Hello, bob!" to the console. You can modify the query +and response as needed to suit your specific requirements and provide the +desired functionality. + +Congratulations on completing the "Hello, World!" tutorial! In this tutorial, +you learned how to define a new query in a protocol buffer file, implement the +logic for the query in your code, and make the query available to clients +through the blockchain's API and CLI. By following the steps outlined in the +tutorial, you were able to create a functional query that can be used to +retrieve data from your blockchain or perform other operations as needed. + +Now that you have completed the tutorial, you can continue to build on your +knowledge of the Cosmos SDK and explore the many features and capabilities it +offers. You may want to try implementing more complex queries or experiment with +other features of the SDK to see what you can create. \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.26/02-guide/03-hello/_category_.json b/docs/versioned_docs/version-v0.26/02-guide/03-hello/_category_.json new file mode 100644 index 0000000..ab71abd --- /dev/null +++ b/docs/versioned_docs/version-v0.26/02-guide/03-hello/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Hello, World!", + "link": null +} \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.26/02-guide/04-blog/00-express.md b/docs/versioned_docs/version-v0.26/02-guide/04-blog/00-express.md new file mode 100644 index 0000000..0bb8bac --- /dev/null +++ b/docs/versioned_docs/version-v0.26/02-guide/04-blog/00-express.md @@ -0,0 +1,309 @@ +--- +description: Learn module basics by writing and reading blog posts to your chain. +title: Express tutorial +--- + +# "Build a blog" in 5 minutes + +In this tutorial, we will create a blockchain with a module that allows us to +write and read data from the blockchain. This module will implement the ability +to create and read blog posts, similar to a blogging application. The end user +will be able to submit new blog posts and view a list of existing posts on the +blockchain. This tutorial will guide you through the process of creating and +using this module to interact with the blockchain. + +The goal of this tutorial is to provide step-by-step instructions for creating a +feedback loop that allows you to submit data to the blockchain and read that +data back from the blockchain. By the end of this tutorial, you will have +implemented a complete feedback loop and will be able to use it to interact with +the blockchain. + +First, create a new `blog` blockchain with Ignite CLI: + +``` +ignite scaffold chain blog +``` + +In order to create a blog application that uses a blockchain, we need to define +the requirements for our application. We want the application to store objects +of type `Post` on the blockchain. These objects should have two properties: a +`title` and a `body`. + +In addition to storing posts on the blockchain, we also want to provide users +with the ability to perform CRUD (create, read, update, and delete) operations +on these posts. This will allow users to create new posts, read existing posts, +update the contents of existing posts, and delete posts that are no longer +needed. + +One of the features of the Ignite CLI is the ability to generate code that +implements basic CRUD functionality. This is accomplished through the use of +scaffolding commands, which can be used to quickly generate the necessary code +for creating, reading, updating, and deleting data in your application. + +The Ignite CLI is capable of generating code for data that is stored in +different types of data structures. This includes lists, which are collections +of data indexed by an incrementing integer, maps, which are collections indexed +by a custom key, and singles, which are single instances of data. By using these +different data structures, you can customize your application to fit your +specific needs. For example, if you are building a blog application, you may +want to use a list to store all posts, with each post indexed by an integer. +Alternatively, you could use a map to index each post by its unique title, or a +single to store a single post. The choice of data structure will depend on the +specific requirements of your application. + +In addition to the data structure you choose, the Ignite CLI also requires you +to provide the name of the type of data that it will generate code for, as well +as fields that describe the type of data. For example, if you are creating a +blog application, you may want to create a type called "Post" that has fields +for the "title" and "body" of the post. The Ignite CLI will use this information +to generate the necessary code for creating, reading, updating, and deleting +data of this type in your application. + +Switch to the `blog` directory and run the `ignite scaffold list` command: + +``` +cd blog +ignite scaffold list post title body +``` + +Now that you have used the Ignite CLI to generate code for your application, +let's review what it has created. The Ignite CLI will have generated code for +the data structure and data type that you specified, as well as code for the +basic CRUD operations that are needed to manipulate this data. This code will +provide a solid foundation for your application, and you can customize it +further to fit your specific needs. By reviewing the code generated by the +ignite CLI, you can ensure that it meets your requirements and get a better +understanding of how to build your application using this tool. + +The Ignite CLI has generated several files and modifications in the +`proto/blog/blog` directory. These include: + +* `post.proto`: This is a protocol buffer file that defines the `Post` type, + with fields for the `title`, `body`, `id`, and `creator`. +* `tx.proto`: This file has been modified to include three RPCs (remote + procedure calls): `CreatePost`, `UpdatePost`, and `DeletePost`. Each of these + RPCs corresponds to a Cosmos SDK message that can be used to perform the + corresponding CRUD operation on a post. +* `query.proto`: This file has been modified to include two queries: `Post` and + `PostAll`. The `Post` query can be used to retrieve a single post by its ID, + while the `PostAll` query can be used to retrieve a paginated list of posts. +* `genesis.proto`: This file has been modified to include posts in the genesis + state of the module, which defines the initial state of the blockchain when it + is first started. + +The Ignite CLI has also generated several new files in the `x/blog/keeper` +directory that implement the CRUD-specific logic for your application. These +include: + +* `msg_server_post.go`: This file implements keeper methods for the + `CreatePost`, `UpdatePost`, and `DeletePost` messages. These methods are + called when a corresponding message is processed by the module, and they + handle the specific logic for each of the CRUD operations. +* `query_post.go`: This file implements the `Post` and `PostAll` queries, which + are used to retrieve individual posts by ID or a paginated list of posts, + respectively. +* `post.go`: This file implements the underlying functions that the keeper + methods depend on. These functions include appending (adding) posts to the + store, getting individual posts, getting the post count, and other operations + that are needed to manage the posts in the application. + +Overall, these files provide the necessary implementation for the CRUD +functionality of your blog application. They handle the specific logic for each +of the CRUD operations, as well as the underlying functions that these +operations depend on. + +Files were created and modified in the `x/blog/types` directory. + +* `messages_post.go`: This new file contains Cosmos SDK message constructors and + associated methods such as `Route()`, `Type()`, `GetSigners()`, + `GetSignBytes()`, and `ValidateBasic()`. +* `keys.go`: This file was modified to include key prefixes for storing blog + posts. By using key prefixes, we can ensure that the data for our blog posts + is kept separate from other types of data in the database, and that it can be + easily accessed when needed. +* `genesis.go`: This file was modified to define the initial (genesis) state of + the blog module, as well as the `Validate()` function for validating this + initial state. This is an important step in setting up our blockchain, as it + defines the initial data and ensures that it is valid according to the rules + of our application. +* `codec.go`: This file was modified to register our message types with the + encoder, allowing them to be properly serialized and deserialized when + transmitted over the network. + +Additionally, `*.pb.go` files were generated from `*.proto` files, and they +contain type definitions for messages, RPCs, and queries used by our +application. These files are automatically generated from the `*.proto` files +using the Protocol Buffers (protobuf) tool, which allows us to define the +structure of our data in a language-agnostic way. + +The Ignite CLI has added functionality to the `x/blog/client/cli` directory by +creating and modifying several files. +* `tx_post.go`: This file was created to implement CLI commands for broadcasting + transactions containing messages for the blog module. These commands allow + users to easily send messages to the blockchain using the Ignite CLI. +* `query_post.go`: This file was created to implement CLI commands for querying + the blog module. These commands allow users to retrieve information from the + blockchain, such as a list of blog posts. +* `tx.go`: This file was modified to add the CLI commands for broadcasting + transactions to the chain's binary. +* `query.go`: This file was also modified to add the CLI commands for querying + the chain to the chain's binary. + +As you can see, the `ignite scaffold list` command has generated and modified a +number of source code files. These files define the types of messages, logic +that gets executed when a message is processed, and the wiring that connects +everything together. This includes the logic for creating, updating, and +deleting blog posts, as well as the queries needed to retrieve this information. + +To see the generated code in action, we will need to start the blockchain. We +can do this by using the `ignite chain serve` command, which will build, +initialize, and start the blockchain for us: + +``` +ignite chain serve +``` + +Once the blockchain is running, we can use the binary to interact with it and +see how the code handles creating, updating, and deleting blog posts. We can +also see how it processes and responds to queries. This will give us a better +understanding of how our application works and allow us to test its +functionality. + +While `ignite chain serve` is running in one terminal window, open another +terminal and use the chain's binary to create a new blog post on the blockchain: + +``` +blogd tx blog create-post 'Hello, World!' 'This is a blog post' --from alice +``` + +When using the `--from` flag to specify the account that will be used to sign a +transaction, it's important to ensure that the specified account is available +for use. In a development environment, you can see a list of available accounts +in the output of the `ignite chain serve` command, or in the `config.yml` file. + +It's also worth noting that the `--from` flag is required when broadcasting +transactions. This flag specifies the account that will be used to sign the +transaction, which is a crucial step in the transaction process. Without a valid +signature, the transaction will not be accepted by the blockchain. Therefore, +it's important to ensure that the account specified with the `--from` flag is +available. + +After the transaction has been broadcasted successfully, you can query the +blockchain for the list of blog posts. To do this, you can use the `blogd q blog +list-post` command, which will return a paginated list of all the blog posts +that have been added to the blockchain. + +``` +blogd q blog list-post + +Post: +- body: This is a blog post + creator: cosmos1xz770h6g55rrj8vc9ll9krv6mr964tzhqmsu2v + id: "0" + title: Hello, World! +pagination: + next_key: null + total: "0" +``` + +By querying the blockchain, you can verify that your transaction was processed +successfully and that the blog post has been added to the chain. Additionally, +you can use other query commands to retrieve information about other data on the +blockchain, such as accounts, balances, and governance proposals. + +Let's modify the blog post that we just created by changing the `body` content. +To do this, we can use the `blogd tx blog update-post` command, which allows us +to update an existing blog post on the blockchain. When running this command, we +will need to specify the ID of the blog post that we want to modify, as well as +the new body content that we want to use. After running this command, the +transaction will be broadcasted to the blockchain and the blog post will be +updated with the new body content. + +``` +blogd tx blog update-post 0 'Hello, World!' 'This is a blog post from Alice' --from alice +``` + +Now that we have updated the blog post with new content, let's query the +blockchain again to see the changes. To do this, we can use the `blogd q blog +list-post` command, which will return a list of all the blog posts on the +blockchain. By running this command again, we can see the updated blog post in +the list, and we can verify that the changes we made have been successfully +applied to the blockchain. + + +``` +blogd q blog list-post + +Post: +- body: This is a blog post from Alice + creator: cosmos1xz770h6g55rrj8vc9ll9krv6mr964tzhqmsu2v + id: "0" + title: Hello, World! +pagination: + next_key: null + total: "0" +``` + +Let's try to delete one of the blog posts using Bob's account. However, since +the blog post was created using Alice's account, we can expect the blockchain to +check whether the user is authorized to delete the post. In this case, since Bob +is not the author of the post, his transaction should be rejected by the +blockchain. + +To delete a blog post, we can use the `blogd tx blog delete-post` command, which +allows us to delete an existing blog post on the blockchain. When running this +command, we will need to specify the ID of the blog post that we want to delete, +as well as the account that we want to use for signing the transaction. In this +case, we will use Bob's account to sign the transaction. + +After running this command, the transaction will be broadcasted to the +blockchain. However, since Bob is not the author of the post, the blockchain +should reject his transaction and the blog post will not be deleted. This is an +example of how the blockchain can enforce rules and permissions, and it shows +that only authorized users are able to make changes to the blockchain. + +``` +blogd tx blog delete-post 0 --from bob + +raw_log: 'failed to execute message; message index: 0: incorrect owner: unauthorized' +``` + +Now, let's try to delete the blog post again, but this time using Alice's +account. Since Alice is the author of the blog post, she should be authorized to +delete it. + +``` +blogd tx blog delete-post 0 --from alice +``` + +To check whether the blog post has been successfully deleted by Alice, we can +query the blockchain for a list of posts again. + +``` +blogd q blog list-post + +Post: [] +pagination: + next_key: null + total: "0" +``` + +Congratulations on successfully completing the tutorial on building a blog with +Ignite CLI! By following the instructions, you have learned how to create a new +blockchain, generate code for a "post" type with CRUD functionality, start a +local blockchain, and test out the functionality of your blog. + +Now that you have a working example of a simple application, you can experiment +with the code generated by Ignite and see how changes affect the behavior of the +application. This is a valuable skill to have, as it will allow you to customize +your application to fit your specific needs and improve the functionality of +your application. You can try making changes to the data structure or data type, +or add additional fields or functionality to the code. + +In the following tutorials, we will take a closer look at the code that Ignite +generates in order to better understand how to build blockchains. By writing +some of the code ourselves, we can gain a deeper understanding of how Ignite +works and how it can be used to create applications on a blockchain. This will +help us learn more about the capabilities of Ignite CLI and how it can be used +to build robust and powerful applications. Keep an eye out for these tutorials +and get ready to dive deeper into the world of blockchains with Ignite! \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.26/02-guide/04-blog/01-intro.md b/docs/versioned_docs/version-v0.26/02-guide/04-blog/01-intro.md new file mode 100644 index 0000000..16cbc38 --- /dev/null +++ b/docs/versioned_docs/version-v0.26/02-guide/04-blog/01-intro.md @@ -0,0 +1,17 @@ +--- +title: In-depth tutorial +--- + +# In-depth blog tutorial + +In this tutorial, you will learn how to create a blog application as a Cosmos +SDK blockchain using the Ignite CLI by building it from scratch. This means that +you will be responsible for setting up the necessary types, messages, and +queries and writing the logic to create, read, update, and delete blog posts on +the blockchain. + +The functionality of the application you will be building will be identical to +what is generated by the Ignite CLI command `ignite scaffold list post title +body`, but you will be doing it manually in order to gain a deeper understanding +of the process. Through this tutorial, you will learn how to build a blog +application on a Cosmos SDK blockchain using the Ignite CLI in a hands-on way. diff --git a/docs/versioned_docs/version-v0.26/02-guide/04-blog/02-scaffolding.md b/docs/versioned_docs/version-v0.26/02-guide/04-blog/02-scaffolding.md new file mode 100644 index 0000000..f597a47 --- /dev/null +++ b/docs/versioned_docs/version-v0.26/02-guide/04-blog/02-scaffolding.md @@ -0,0 +1,124 @@ +# Creating the structure + +Create a new blockchain with the following command: + +``` +ignite scaffold chain blog +``` + +This will create a new directory called `blog/` containing the necessary files +and directories for your [blockchain +application](https://docs.cosmos.network/main/basics/app-anatomy). Next, +navigate to the newly created directory by running: + +``` +cd blog +``` + +Since your app will be storing and operating with blog posts, you will need to +create a `Post` type to represent these posts. You can do this using the +following Ignite CLI command: + +``` +ignite scaffold type post title body creator id:uint +``` + +This will create a `Post` type with four fields: `title`, `body`, `creator`, all +of type `string`, and `id` of type `uint`. + +It is a good practice to commit your changes to a version control system like +Git after using Ignite's code scaffolding commands. This will allow you to +differentiate between changes made automatically by Ignite and changes made +manually by developers, and also allow you to roll back changes if necessary. +You can commit your changes to Git with the following commands: + +``` +git add . +git commit -am "ignite scaffold type post title body" +``` + +### Creating messages + +Next, you will be implementing CRUD (create, read, update, and delete) +operations for your blog posts. Since create, update, and delete operations +change the state of the application, they are considered write operations. In +Cosmos SDK blockchains, state is changed by broadcasting +[transactions](https://docs.cosmos.network/main/basics/tx-lifecycle) that +contain messages that trigger state transitions. To create the logic for +broadcasting and handling transactions with a "create post" message, you can use +the following Ignite CLI command: + +``` +ignite scaffold message create-post title body --response id:uint +``` + +This will create a "create post" message with two fields: `title` and `body`, +both of which are of type `string`. Posts will be stored in the key-value store +in a list-like data structure, where they are indexed by an incrementing integer +ID. When a new post is created, it will be assigned an ID integer. The +`--response` flag is used to return `id` of type `uint` as a response to the +"create post" message. + +To update a specific blog post in your application, you will need to create a +message called "update post" that accepts three arguments: `title`, `body`, and +`id`. The `id` argument of type `uint` is necessary to specify which blog post +you want to update. You can create this message using the Ignite CLI command: + +``` +ignite scaffold message update-post title body id:uint +``` + +To delete a specific blog post in your application, you will need to create a +message called "delete post" that accepts only the `id` of the post to be +deleted. You can create this message using the Ignite CLI command: + +``` +ignite scaffold message delete-post id:uint +``` + +### Creating queries + +[Queries](https://docs.cosmos.network/main/basics/query-lifecycle) allow users +to retrieve information from the blockchain state. In your application, you will +have two queries: "show post" and "list post". The "show post" query will allow +users to retrieve a specific post by its ID, while the "list post" query will +return a paginated list of all stored posts. + +To create the "show post" query, you can use the following Ignite CLI command: + +``` +ignite scaffold query show-post id:uint --response post:Post +``` + +This query will accept `id` of type `uint` as an argument, and will return a +`post` of type `Post` as a response. + +To create the "list post" query, you can use the following Ignite CLI command: + +``` +ignite scaffold query list-post --response post:Post --paginated +``` + +This query will return a post of type Post in a paginated output. The +`--paginated` flag indicates that the query should return its results in a +paginated format, allowing users to retrieve a specific page of results at a +time. + +## Summary + +Congratulations on completing the initial setup of your blockchain application! +You have successfully created a "post" data type and generated the necessary +code for handling three types of messages (create, update, and delete) and two +types of queries (list and show posts). + +However, at this point, the messages you have created will not trigger any state +transitions, and the queries you have created will not return any results. This +is because Ignite only generates the boilerplate code for these features, and it +is up to you to implement the necessary logic to make them functional. + +In the next chapters of the tutorial, you will learn how to implement the +message handling and query logic to complete your blockchain application. This +will involve writing code to process the messages and queries you have created +and use them to modify or retrieve data from the blockchain's state. By the end +of this process, you will have a fully functional blog application on a Cosmos +SDK blockchain. \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.26/02-guide/04-blog/03-create.md b/docs/versioned_docs/version-v0.26/02-guide/04-blog/03-create.md new file mode 100644 index 0000000..f83372b --- /dev/null +++ b/docs/versioned_docs/version-v0.26/02-guide/04-blog/03-create.md @@ -0,0 +1,319 @@ +# Creating posts + +In this chapter, we will be focusing on the process of handling a "create post" +message. This involves the use of a special type of function known as a keeper +method. [Keeper](https://docs.cosmos.network/main/building-modules/keeper) +methods are responsible for interacting with the blockchain and modifying its +state based on the instructions provided in a message. + +When a "create post" message is received, the corresponding keeper method will +be called and passed the message as an argument. The keeper method can then use +the various getter and setter functions provided by the store object to retrieve +and modify the current state of the blockchain. This allows the keeper method to +effectively process the "create post" message and make the necessary updates to +the blockchain. + +In order to keep the code for accessing and modifying the store object clean and +separate from the logic implemented in the keeper methods, we will create a new +file called `post.go`. This file will contain functions that are specifically +designed to handle operations related to creating and managing posts within the +blockchain. + +## Appending posts to the store + +```go title="x/blog/keeper/post.go" +package keeper + +import ( + "encoding/binary" + + "blog/x/blog/types" + + "github.com/cosmos/cosmos-sdk/store/prefix" + sdk "github.com/cosmos/cosmos-sdk/types" +) + +func (k Keeper) AppendPost(ctx sdk.Context, post types.Post) uint64 { + count := k.GetPostCount(ctx) + post.Id = count + store := prefix.NewStore(ctx.KVStore(k.storeKey), types.KeyPrefix(types.PostKey)) + appendedValue := k.cdc.MustMarshal(&post) + store.Set(GetPostIDBytes(post.Id), appendedValue) + k.SetPostCount(ctx, count+1) + return count +} +``` + +This code defines a function called `AppendPost` which belongs to a `Keeper` +type. The `Keeper` type is responsible for interacting with the blockchain and +modifying its state in response to various messages. + +The `AppendPost` function takes in two arguments: a `Context` object and a +`Post` object. The [`Context`](https://docs.cosmos.network/main/core/context) +object is a standard parameter in many functions in the Cosmos SDK and is used +to provide contextual information about the current state of the blockchain, +such as the current block height. The `Post` object represents a post that will +be added to the blockchain. + +The function begins by retrieving the current post count using the +`GetPostCount` method. You will implement this method in the next step as it has +not been implemented yet. This method is called on the `Keeper` object and takes +in a `Context` object as an argument. It returns the current number of posts +that have been added to the blockchain. + +Next, the function sets the ID of the new post to be the current post count, so +that each post has a unique identifier. It does this by assigning the value of +count to the `Id` field of the `Post` object. + +The function then creates a new +[store](https://docs.cosmos.network/main/core/store) object using the +`prefix.NewStore` function. The `prefix.NewStore` function takes in two +arguments: the `KVStore` associated with the provided context and a key prefix +for the `Post` objects. The `KVStore` is a key-value store that is used to +persist data on the blockchain, and the key prefix is used to differentiate the +`Post` objects from other types of objects that may be stored in the same +`KVStore`. + +The function then serializes the `Post` object using the `cdc.MustMarshal` +function and stores it in the blockchain using the `Set` method of the store +object. The `cdc.MustMarshal` function is part of the Cosmos SDK's +[encoding/decoding](https://docs.cosmos.network/main/core/encoding) library and +is used to convert the `Post` object into a byte slice that can be stored in the +`KVStore`. The `Set` method is called on the store object and takes in two +arguments: a key and a value. In this case, the key is a byte slice generated by +the `GetPostIDBytes` function and the value is the serialized `Post` object. You +will implement this method in the next step as it has not been implemented yet. + +Finally, the function increments the post count by one and updates the +blockchain state using the `SetPostCount` method. You will implement this method +in the next step as it has not been implemented yet. This method is called on +the Keeper object and takes in a `Context` object and a new post count as +arguments. It updates the current post count in the blockchain to be the new +post count provided. + +The function then returns the ID of the newly created post, which is the current +post count before it was incremented. This allows the caller of the function to +know the ID of the post that was just added to the blockchain. + +To complete the implementation of `AppendPost`, the following tasks need to be +performed: + +* Define `PostKey`, which will be used to store and retrieve posts from the + database. +* Implement `GetPostCount`, which will retrieve the current number of posts + stored in the database. +* Implement `GetPostIDBytes`, which will convert a post ID to a byte array. +* Implement `SetPostCount`, which will update the post count stored in the + database. + +### Post key prefix + +In the file `keys.go`, let's define the `PostKey` prefix as follows: + +```go title="x/blog/types/keys.go" +const ( + PostKey = "Post/value/" +) +``` + +This prefix will be used to uniquely identify a post within the system. It will +be used as the beginning of the key for each post, followed by the ID of the +post to create a unique key for each post. + +### Getting the post count + +In the file `post.go`, let's define the `GetPostCount` function as follows: + +```go title="x/blog/keeper/post.go" +func (k Keeper) GetPostCount(ctx sdk.Context) uint64 { + store := prefix.NewStore(ctx.KVStore(k.storeKey), []byte{}) + byteKey := types.KeyPrefix(types.PostCountKey) + bz := store.Get(byteKey) + if bz == nil { + return 0 + } + return binary.BigEndian.Uint64(bz) +} +``` + +This code defines a function named `GetPostCount` that belongs to the `Keeper` +struct. The function takes in a single argument, a context object `ctx` of type +`sdk.Context`, and returns a value of type `uint64`. + +The function begins by creating a new store using the key-value store in the +context and an empty byte slice as the prefix. It then defines a byte slice +`byteKey` using the `KeyPrefix` function from the `types` package, which takes +in the `PostCountKey`. You will define `PostCountKey` in the next step. + +The function then retrieves the value at the key `byteKey` in the store using +the `Get` method and stores it in a variable `bz`. + +Next, the function checks if the value at `byteKey` is `nil` using an if +statement. If it is `nil`, meaning that the key does not exist in the store, the +function returns 0. This indicates that there are no elements or posts +associated with the key. + +If the value at `byteKey` is not nil, the function uses the `binary` package's +`BigEndian` type to parse the bytes in `bz` and returns the resulting `uint64` +value. The `BigEndian` type is used to interpret the bytes in `bz` as a +big-endian encoded unsigned 64-bit integer. The `Uint64` method converts the +bytes to a `uint64` value and returns it. + +`GetPostCount` function is used to retrieve the total number of posts stored in +the key-value store, represented as a `uint64` value. + +In the file `keys.go`, let's define the `PostCountKey` as follows: + +```go title="x/blog/types/keys.go" +const ( + PostCountKey = "Post/count/" +) +``` + +This key will be used to keep track of the ID of the latest post added to the +store. + +### Converting post ID to bytes + +Now, let's implement `GetPostIDBytes`, which will convert a post ID to a byte +array. + +```go title="x/blog/keeper/post.go" +func GetPostIDBytes(id uint64) []byte { + bz := make([]byte, 8) + binary.BigEndian.PutUint64(bz, id) + return bz +} +``` + +`GetPostIDBytes` takes in a value `id` of type `uint64` and returns a value of +type `[]byte`. + +The function starts by creating a new byte slice `bz` with a length of 8 using +the `make` built-in function. It then uses the `binary` package's `BigEndian` +type to encode the value of `id` as a big-endian encoded unsigned integer and +store the result in `bz` using the `PutUint64` method. Finally, the function +returns the resulting byte slice `bz`. + +This function can be used to convert a post ID, represented as a `uint64`, to a +byte slice that can be used as a key in a key-value store. The +`binary.BigEndian.PutUint64` function encodes the `uint64` value of `id` as a +big-endian encoded unsigned integer and stores the resulting bytes in the +`[]byte` slice `bz`. The resulting byte slice can then be used as a key in the +store. + +### Updating the post count + +Implement `SetPostCount` in `post.go`, which will update the post count stored +in the database. + +```go title="x/blog/keeper/post.go" +func (k Keeper) SetPostCount(ctx sdk.Context, count uint64) { + store := prefix.NewStore(ctx.KVStore(k.storeKey), []byte{}) + byteKey := types.KeyPrefix(types.PostCountKey) + bz := make([]byte, 8) + binary.BigEndian.PutUint64(bz, count) + store.Set(byteKey, bz) +} +``` + +This code defines a function `SetPostCount` in the `Keeper` struct. The function +takes in a context `ctx` of type `sdk.Context` and a value `count` of type +`uint64`, and does not return a value. + +The function first creates a new store by calling the `NewStore` function from +the prefix package and passing in the key-value store from the context and an +empty byte slice as the prefix. It stores the resulting store in a variable +named `store`. + +Next, the function defines a byte slice `byteKey` using the `KeyPrefix` function +from the `types` package and passing in the `PostCountKey`. The `KeyPrefix` +function returns a byte slice with the given key as a prefix. + +The function then creates a new byte slice `bz` with a length of 8 using the +`make` built-in function. It then uses the `binary` package's `BigEndian` type +to encode the value of count as a big-endian encoded unsigned integer and store +the result in `bz` using the `PutUint64` method. + +Finally, the function calls the `Set` method on the `store` variable, passing in +`byteKey` and `bz` as arguments. This sets the value at the key `byteKey` in the +store to the value `bz`. + +This function can be used to update the count of posts stored in the database. +It does this by converting the `uint64` value of count to a byte slice using the +`binary.BigEndian.PutUint64` function, and then storing the resulting byte slice +at the key `byteKey` in the store using the `Set` method. + +Now that you have implemented the code for creating blog posts, you can proceed +to implement the keeper method that is invoked when the "create post" message is +processed. + +## Handling the "create post" message + +```go title="x/blog/keeper/msg_server_create_post.go" +package keeper + +import ( + "context" + + "blog/x/blog/types" + + sdk "github.com/cosmos/cosmos-sdk/types" +) + +func (k msgServer) CreatePost(goCtx context.Context, msg *types.MsgCreatePost) (*types.MsgCreatePostResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + var post = types.Post{ + Creator: msg.Creator, + Title: msg.Title, + Body: msg.Body, + } + id := k.AppendPost( + ctx, + post, + ) + return &types.MsgCreatePostResponse{ + Id: id, + }, nil +} +``` + +The `CreatePost` function is a message handler for the `MsgCreatePost` message +type. It is responsible for creating a new post on the blockchain based on the +information provided in the `MsgCreatePost` message. + +The function first retrieves the Cosmos SDK context from the Go context using +the `sdk.UnwrapSDKContext` function. It then creates a new `Post` object using +the `Creator`, `Title`, and `Body` fields from the MsgCreatePost message. + +Next, the function calls the `AppendPost` method on the `msgServer` object +(which is of the Keeper type) and passes in the Cosmos SDK context and the new +`Post` object as arguments. The `AppendPost` method is responsible for adding +the new post to the blockchain and returning the ID of the new post. + +Finally, the function returns a `MsgCreatePostResponse` object that contains the +ID of the new post. It also returns a nil error, indicating that the operation +was successful. + +## Summary + +Great job! You have successfully implemented the logic for writing blog posts to +the blockchain store and the keeper method that will be called when a "create +post" message is processed. + +The `AppendPost` keeper method retrieves the current post count, sets the ID of +the new post to be the current post count, serializes the post object, and +stores it in the blockchain using the `Set` method of the `store` object. The +key for the post in the store is a byte slice generated by the `GetPostIDBytes` +function and the value is the serialized post object. The function then +increments the post count by one and updates the blockchain state using the +`SetPostCount` method. + +The `CreatePost` handler method receives a `MsgCreatePost` message containing +the data for the new post, creates a new `Post` object using this data, and +passes it to the `AppendPost` keeper method to be added to the blockchain. It +then returns a `MsgCreatePostResponse` object containing the ID of the newly +created post. + +By implementing these methods, you have successfully implemented the necessary +logic for handling "create post" messages and adding posts to the blockchain. \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.26/02-guide/04-blog/04-update.md b/docs/versioned_docs/version-v0.26/02-guide/04-blog/04-update.md new file mode 100644 index 0000000..82048bd --- /dev/null +++ b/docs/versioned_docs/version-v0.26/02-guide/04-blog/04-update.md @@ -0,0 +1,127 @@ +# Updating posts + +In this chapter, we will be focusing on the process of handling an "update post" +message. + +To update a post, you need to retrieve the specific post from the store using +the "Get" operation, modify the values, and then write the updated post back to +the store using the "Set" operation. + +Let's first implement a getter and a setter logic. + +## Getting posts + +Implement the `GetPost` keeper method in `post.go`: + +```go title="x/blog/keeper/post.go" +func (k Keeper) GetPost(ctx sdk.Context, id uint64) (val types.Post, found bool) { + store := prefix.NewStore(ctx.KVStore(k.storeKey), types.KeyPrefix(types.PostKey)) + b := store.Get(GetPostIDBytes(id)) + if b == nil { + return val, false + } + k.cdc.MustUnmarshal(b, &val) + return val, true +} +``` + +`GetPost` takes in two arguments: a context `ctx` and an `id` of type `uint64` +representing the ID of the post to be retrieved. It returns a `types.Post` +struct containing the values of the post, and a boolean value indicating whether +the post was found in the database. + +The function first creates a `store` using the `prefix.NewStore` method, passing +in the key-value store from the context and the `types.KeyPrefix` function +applied to the `types.PostKey` constant as arguments. It then attempts to +retrieve the post from the store using the `store.Get` method, passing in the ID +of the post as a byte slice. If the post is not found in the store, it returns +an empty `types.Post` struct and a boolean value of false. + +If the post is found in the store, the function unmarshals the retrieved byte +slice into a `types.Post` struct using the `cdc.MustUnmarshal` method, passing +in a pointer to the val variable as an argument. It then returns the val struct +and a boolean value of true to indicate that the post was found in the database. + +## Setting posts + +Implement the `SetPost` keeper method in `post.go`: + +```go title="x/blog/keeper/post.go" +func (k Keeper) SetPost(ctx sdk.Context, post types.Post) { + store := prefix.NewStore(ctx.KVStore(k.storeKey), types.KeyPrefix(types.PostKey)) + b := k.cdc.MustMarshal(&post) + store.Set(GetPostIDBytes(post.Id), b) +} +``` + +`SetPost` takes in two arguments: a context `ctx` and a `types.Post` struct +containing the updated values for the post. The function does not return +anything. + +The function first creates a store using the `prefix.NewStore` method, passing +in the key-value store from the context and the `types.KeyPrefix` function +applied to the `types.PostKey` constant as arguments. It then marshals the +updated post struct into a byte slice using the `cdc.MustMarshal` method, +passing in a pointer to the post struct as an argument. Finally, it updates the +post in the store using the `store.Set` method, passing in the ID of the post as +a byte slice and the marshaled post struct as arguments. + + +## Update posts + +```go title="x/blog/keeper/msg_server_update_post.go" +package keeper + +import ( + "context" + "fmt" + + "blog/x/blog/types" + + sdk "github.com/cosmos/cosmos-sdk/types" + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" +) + +func (k msgServer) UpdatePost(goCtx context.Context, msg *types.MsgUpdatePost) (*types.MsgUpdatePostResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + var post = types.Post{ + Creator: msg.Creator, + Id: msg.Id, + Title: msg.Title, + Body: msg.Body, + } + val, found := k.GetPost(ctx, msg.Id) + if !found { + return nil, sdkerrors.Wrap(sdkerrors.ErrKeyNotFound, fmt.Sprintf("key %d doesn't exist", msg.Id)) + } + if msg.Creator != val.Creator { + return nil, sdkerrors.Wrap(sdkerrors.ErrUnauthorized, "incorrect owner") + } + k.SetPost(ctx, post) + return &types.MsgUpdatePostResponse{}, nil +} +``` + +`UpdatePost` takes in a context and a message `MsgUpdatePost` as input, and +returns a response `MsgUpdatePostResponse` and an `error`. The function first +retrieves the current values of the post from the database using the provided +`msg.Id`, and checks if the post exists and if the `msg.Creator` is the same as +the current owner of the post. If either of these checks fail, it returns an +error. If both checks pass, it updates the post in the database with the new +values provided in `msg`, and returns a response without an error. + +## Summary + +Well done! You have successfully implemented a number of important methods for +managing posts within a store. + +The `GetPost` method allows you to retrieve a specific post from the store based +on its unique identification number, or post ID. This can be useful for +displaying a specific post to a user, or for updating it. + +The `SetPost` method enables you to update an existing post in the store. This +can be useful for correcting mistakes or updating the content of a post as new +information becomes available. + +Finally, you implemented the `UpdatePost` method, which is called whenever the +blockchain processes a message requesting an update to a post. \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.26/02-guide/04-blog/05-delete.md b/docs/versioned_docs/version-v0.26/02-guide/04-blog/05-delete.md new file mode 100644 index 0000000..86c91a8 --- /dev/null +++ b/docs/versioned_docs/version-v0.26/02-guide/04-blog/05-delete.md @@ -0,0 +1,74 @@ +# Deleting posts + +In this chapter, we will be focusing on the process of handling a "delete post" +message. + +## Removing posts + +```go title="x/blog/keeper/post.go" +func (k Keeper) RemovePost(ctx sdk.Context, id uint64) { + store := prefix.NewStore(ctx.KVStore(k.storeKey), types.KeyPrefix(types.PostKey)) + store.Delete(GetPostIDBytes(id)) +} +``` + +`RemovePost` function takes in two arguments: a context object `ctx` and an +unsigned integer `id`. The function removes a post from a key-value store by +deleting the key-value pair associated with the given `id`. The key-value store +is accessed using the `store` variable, which is created by using the `prefix` +package to create a new store using the context's key-value store and a prefix +based on the `PostKey` constant. The `Delete` method is then called on the +`store` object, using the `GetPostIDBytes` function to convert the `id` to a +byte slice as the key to delete. + +## Deleting posts + +```go title="x/blog/keeper/msg_server_delete_post.go" +package keeper + +import ( + "context" + "fmt" + + "blog/x/blog/types" + + sdk "github.com/cosmos/cosmos-sdk/types" + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" +) + +func (k msgServer) DeletePost(goCtx context.Context, msg *types.MsgDeletePost) (*types.MsgDeletePostResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + val, found := k.GetPost(ctx, msg.Id) + if !found { + return nil, sdkerrors.Wrap(sdkerrors.ErrKeyNotFound, fmt.Sprintf("key %d doesn't exist", msg.Id)) + } + if msg.Creator != val.Creator { + return nil, sdkerrors.Wrap(sdkerrors.ErrUnauthorized, "incorrect owner") + } + k.RemovePost(ctx, msg.Id) + return &types.MsgDeletePostResponse{}, nil +} +``` + +`DeletePost` takes in two arguments: a context `goCtx` of type `context.Context` +and a pointer to a message of type `*types.MsgDeletePost`. The function returns +a pointer to a message of type `*types.MsgDeletePostResponse` and an `error`. + +Inside the function, the context is unwrapped using the `sdk.UnwrapSDKContext` +function and the value of the post with the ID specified in the message is +retrieved using the `GetPost` function. If the post is not found, an error is +returned using the `sdkerrors.Wrap` function. If the creator of the message does +not match the creator of the post, another error is returned. If both of these +checks pass, the `RemovePost` function is called with the context and the ID of +the post to delete the post. Finally, the function returns a response message +with no data and a `nil` error. + +In short, `DeletePost` handles a request to delete a post, ensuring that the +requester is the creator of the post before deleting it. + +## Summary + +Congratulations on completing the implementation of the `RemovePost` and +`DeletePost` methods in the keeper package! These methods provide functionality +for removing a post from a store and handling a request to delete a post, +respectively. \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.26/02-guide/04-blog/06-show.md b/docs/versioned_docs/version-v0.26/02-guide/04-blog/06-show.md new file mode 100644 index 0000000..63ea056 --- /dev/null +++ b/docs/versioned_docs/version-v0.26/02-guide/04-blog/06-show.md @@ -0,0 +1,81 @@ +# Show a post + +In this chapter, you will implement a feature in your blogging application that +enables users to retrieve individual blog posts by their unique ID. This ID is +assigned to each blog post when it is created and stored on the blockchain. By +adding this querying functionality, users will be able to easily retrieve +specific blog posts by specifying their ID. + +## Show post + +Let's implement the `ShowPost` keeper method that will be called when a user +makes a query to the blockchain application, specifying the ID of the desired +post. + +```go title="x/blog/keeper/query_show_post.go" +package keeper + +import ( + "context" + + "blog/x/blog/types" + + sdk "github.com/cosmos/cosmos-sdk/types" + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" +) + +func (k Keeper) ShowPost(goCtx context.Context, req *types.QueryShowPostRequest) (*types.QueryShowPostResponse, error) { + if req == nil { + return nil, status.Error(codes.InvalidArgument, "invalid request") + } + + ctx := sdk.UnwrapSDKContext(goCtx) + post, found := k.GetPost(ctx, req.Id) + if !found { + return nil, sdkerrors.ErrKeyNotFound + } + + return &types.QueryShowPostResponse{Post: post}, nil +} +``` + +`ShowPost` is a function for retrieving a single post object from the +blockchain's state. It takes in two arguments: a `context.Context` object called +`goCtx` and a pointer to a `types.QueryShowPostRequest` object called `req`. It +returns a pointer to a `types.QueryShowPostResponse` object and an `error`. + +The function first checks if the `req` argument is `nil`. If it is, it returns +an `error` with the code `InvalidArgument` and the message "invalid request" +using the `status.Error` function from the `google.golang.org/grpc/status` +package. + +If the `req` argument is not `nil`, the function unwraps the `sdk.Context` +object from the `context.Context` object using the `sdk.UnwrapSDKContext` +function. It then retrieves a post object with the specified `Id` from the +blockchain's state using the `GetPost` function, and checks if the post was +found by checking the value of the `found` boolean variable. If the post was not +found, it returns an error with the type `sdkerrors.ErrKeyNotFound`. + +If the post was found, the function creates a new `types.QueryShowPostResponse` +object with the retrieved post object as a field, and returns a pointer to this +object and a `nil` error. + +## Modify `QueryShowPostResponse` + +Include the option `[(gogoproto.nullable) = false]` in the `post` field in the +`QueryShowPostResponse` message to generate the field without a pointer. + +```proto title="proto/blog/blog/query.proto" +message QueryShowPostResponse { + // highlight-next-line + Post post = 1 [(gogoproto.nullable) = false]; +} +``` + +Run the command to generate Go files from proto: + +``` +ignite generate proto-go +``` \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.26/02-guide/04-blog/07-list.md b/docs/versioned_docs/version-v0.26/02-guide/04-blog/07-list.md new file mode 100644 index 0000000..a41e56d --- /dev/null +++ b/docs/versioned_docs/version-v0.26/02-guide/04-blog/07-list.md @@ -0,0 +1,97 @@ +# List posts + +In this chapter, you will develop a feature that enables users to retrieve all +of the blog posts stored on your blockchain application. The feature will allow +users to perform a query and receive a paginated response, which means that the +output will be divided into smaller chunks or "pages" of data. This will allow +users to more easily navigate and browse through the list of posts, as they will +be able to view a specific number of posts at a time rather than having to +scroll through a potentially lengthy list all at once. + +## List posts + +Let's implement the `ListPost` keeper method that will be called when a user +makes a query to the blockchain application, requesting a paginated list of all +the posts stored on chain. + +```go title="x/blog/keeper/query_list_post.go" +package keeper + +import ( + "context" + + "blog/x/blog/types" + + "github.com/cosmos/cosmos-sdk/store/prefix" + sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/cosmos/cosmos-sdk/types/query" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" +) + +func (k Keeper) ListPost(goCtx context.Context, req *types.QueryListPostRequest) (*types.QueryListPostResponse, error) { + if req == nil { + return nil, status.Error(codes.InvalidArgument, "invalid request") + } + + var posts []types.Post + ctx := sdk.UnwrapSDKContext(goCtx) + + store := ctx.KVStore(k.storeKey) + postStore := prefix.NewStore(store, types.KeyPrefix(types.PostKey)) + + pageRes, err := query.Paginate(postStore, req.Pagination, func(key []byte, value []byte) error { + var post types.Post + if err := k.cdc.Unmarshal(value, &post); err != nil { + return err + } + + posts = append(posts, post) + return nil + }) + + if err != nil { + return nil, status.Error(codes.Internal, err.Error()) + } + + return &types.QueryListPostResponse{Post: posts, Pagination: pageRes}, nil +} +``` + +`ListPost` takes in two arguments: a context object and a request object of type +`QueryListPostRequest`. It returns a response object of type +`QueryListPostResponse` and an error. + +The function first checks if the request object is `nil` and returns an error +with a `InvalidArgument` code if it is. It then initializes an empty slice of +`Post` objects and unwraps the context object. + +It retrieves a key-value store from the context using the `storeKey` field of +the keeper struct and creates a new store using a prefix of the `PostKey`. It +then calls the `Paginate` function from the `query` package on the store and the +pagination information in the request object. The function passed as an argument +to Paginate iterates over the key-value pairs in the store and unmarshals the +values into `Post` objects, which are then appended to the `posts` slice. + +If an error occurs during pagination, the function returns an `Internal error` +with the error message. Otherwise, it returns a `QueryListPostResponse` object +with the list of posts and pagination information. + +## Modify `QueryListPostResponse` + +Add a `repeated` keyword to return a list of posts and include the option +`[(gogoproto.nullable) = false]` to generate the field without a pointer. + +```proto title="proto/blog/blog/query.proto" +message QueryListPostResponse { + // highlight-next-line + repeated Post post = 1 [(gogoproto.nullable) = false]; + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} +``` + +Run the command to generate Go files from proto: + +``` +ignite generate proto-go +``` \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.26/02-guide/04-blog/08-play.md b/docs/versioned_docs/version-v0.26/02-guide/04-blog/08-play.md new file mode 100644 index 0000000..1d90d82 --- /dev/null +++ b/docs/versioned_docs/version-v0.26/02-guide/04-blog/08-play.md @@ -0,0 +1,97 @@ +# Play + +## Create a blog post by Alice + +``` +blogd tx blog create-post hello world --from alice +``` + +## Show a blog post + +``` +blogd q blog show-post 0 +``` + +```yml +post: + body: world + creator: cosmos1x33ummgkjdd6h2frlugt3tft7vnc0nxyfxnx9h + id: "0" + title: hello +``` + +## Create a blog post by Bob + +``` +blogd tx blog create-post foo bar --from bob +``` + +## List all blog posts with pagination + +``` +blogd q blog list-post +``` + +```yml +pagination: + next_key: null + total: "2" +post: +- body: world + creator: cosmos1x33ummgkjdd6h2frlugt3tft7vnc0nxyfxnx9h + id: "0" + title: hello +- body: bar + creator: cosmos1ysl9ws3fdamrrj4fs9ytzrrzw6ul3veddk7gz3 + id: "1" + title: foo +``` + +## Update a blog post + +``` +blogd tx blog update-post hello cosmos 0 --from alice +``` + +``` +blogd q blog show-post 0 +``` + +```yml +post: + body: cosmos + creator: cosmos1x33ummgkjdd6h2frlugt3tft7vnc0nxyfxnx9h + id: "0" + title: hello +``` + +## Delete a blog post + +``` +blogd tx blog delete-post 0 --from alice +``` + +``` +blogd q blog list-post +``` + +```yml +pagination: + next_key: null + total: "1" +post: +- body: bar + creator: cosmos1ysl9ws3fdamrrj4fs9ytzrrzw6ul3veddk7gz3 + id: "1" + title: foo +``` + +## Delete a blog post unsuccessfully + +``` +blogd tx blog delete-post 1 --from alice +``` + +```yml +raw_log: 'failed to execute message; message index: 0: incorrect owner: unauthorized' +``` \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.26/02-guide/04-blog/09-summary.md b/docs/versioned_docs/version-v0.26/02-guide/04-blog/09-summary.md new file mode 100644 index 0000000..1bcd996 --- /dev/null +++ b/docs/versioned_docs/version-v0.26/02-guide/04-blog/09-summary.md @@ -0,0 +1,22 @@ +# Summary + +Congratulations on completing the Blog tutorial and building your first +functional application-specific blockchain using Ignite and Cosmos SDK! This is +a significant accomplishment, and you should be proud of the hard work and +dedication you put into it. + +One of the great things about using Ignite is that it allows you to quickly +generate most of the code for your app with just a few commands. This not only +saves you time, but also provides a solid structure for you to build upon as you +develop your app further. In this tutorial, you were able to create code for +handling four types of messages and two types of queries, which are important +building blocks for any blockchain application. + +You also tackled the task of implementing business-specific logic for creating, +updating, and deleting blog posts, as well as fetching individual blog posts by +ID and paginated lists of posts. You should now have a good understanding of how +to implement this sort of functionality in a blockchain context. + +Overall, completing this tutorial is a major accomplishment, and you should feel +confident in your ability to continue developing and expanding upon your app. +Keep up the great work, and keep learning and growing as a developer! \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.26/02-guide/04-blog/_category_.json b/docs/versioned_docs/version-v0.26/02-guide/04-blog/_category_.json new file mode 100644 index 0000000..21c2246 --- /dev/null +++ b/docs/versioned_docs/version-v0.26/02-guide/04-blog/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Module basics: Blog", + "link": null +} \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.26/02-guide/05-loan/00-intro.md b/docs/versioned_docs/version-v0.26/02-guide/05-loan/00-intro.md new file mode 100644 index 0000000..d90dcc5 --- /dev/null +++ b/docs/versioned_docs/version-v0.26/02-guide/05-loan/00-intro.md @@ -0,0 +1,86 @@ +# DeFi Loan + +Decentralized finance (DeFi) is a rapidly growing sector of the blockchain +ecosystem that is transforming the way we think about financial instruments and +services. DeFi offers a wide range of innovative financial products and +services, including lending, borrowing, spot trading, margin trading, and flash +loans, that are accessible to anyone with an internet connection and a digital +wallet. + +One of the key benefits of DeFi is that it allows end users to access financial +instruments and services quickly and easily, without the need for complex +onboarding processes or the submission of personal documents such as passports +or background checks. This makes DeFi an attractive alternative to traditional +banking systems, which can be slow, costly, and inconvenient. + +In this tutorial, you will learn how to create a DeFi platform that enables +users to lend and borrow digital assets from each other. The platform you will +build will be powered by a blockchain, which provides a decentralized and +immutable record of all transactions. This ensures that the platform is +transparent, secure, and resistant to fraud. + +A loan is a financial transaction in which one party, the borrower, receives a +certain amount of assets, such as money or digital tokens, and agrees to pay +back the loan amount plus a fee to the lender by a predetermined deadline. To +secure the loan, the borrower provides collateral, which may be seized by the +lender if the borrower fails to pay back the loan as agreed. + +A loan has several properties that define its terms and conditions. + +The `id` is a unique identifier that is used to identify the loan on a +blockchain. + +The `amount` is the amount of assets that are being lent to the borrower. + +The `fee` is the cost that the borrower must pay to the lender for the loan. + +The `collateral` is the asset or assets that the borrower provides to the lender +as security for the loan. + +The `deadline` is the date by which the borrower must pay back the loan. If the +borrower fails to pay back the loan by the deadline, the lender may choose to +liquidate the loan and seize the collateral. + +The `state` of a loan describes the current status of the loan and can take on +several values, such as `requested`, `approved`, `paid`, `cancelled`, or +`liquidated`. A loan is in the `requested` state when the borrower first submits +a request for the loan. If the lender approves the request, the loan moves to +the `approved` state. When the borrower repays the loan, the loan moves to the +`paid` state. If the borrower cancels the loan before it is approved, the loan +moves to the `cancelled` state. If the borrower is unable to pay back the loan +by the deadline, the lender may choose to liquidate the loan and seize the +collateral. In this case, the loan moves to the `liquidated` state. + +In a loan transaction, there are two parties involved: the borrower and the +lender. The borrower is the party that requests the loan and agrees to pay back +the loan amount plus a fee to the lender by a predetermined deadline. The lender +is the party that approves the loan request and provides the borrower with the +loan amount. + +As a borrower, you should be able to perform several actions on the loan +platform. These actions may include: + +* requesting a loan, +* canceling a loan, +* repaying a loan. + +Requesting a loan allows you to specify the terms and conditions of the loan, +including the amount, the fee, the collateral, and the deadline for repayment. +If you cancel a loan, you can withdraw your request for the loan before it is +approved or funded. Repaying a loan allows you to pay back the loan amount plus +the fee to the lender in accordance with the loan terms. + +As a lender, you should be able to perform two actions on the platform: + +* approving a loan +* liquidating a loan. + +Approving a loan allows you to accept the terms and conditions of the loan and +send the loan amount to the borrower. Liquidating a loan allows the lender to +seize the collateral if you are unable to pay back the loan by the deadline. + +By performing these actions, lenders and borrowers can interact with each other +and facilitate the lending and borrowing of digital assets on the platform. The +platform enables users to access financial instruments and services that allow +them to manage their assets and achieve their financial goals in a secure and +transparent manner. \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.26/02-guide/05-loan/01-init.md b/docs/versioned_docs/version-v0.26/02-guide/05-loan/01-init.md new file mode 100644 index 0000000..f09531a --- /dev/null +++ b/docs/versioned_docs/version-v0.26/02-guide/05-loan/01-init.md @@ -0,0 +1,72 @@ +# Creating a structure of the application + +To create a structure for a blockchain application that enables users to lend +and borrow digital assets from each other, use the Ignite CLI to generate the +necessary code. + +First, create a new blockchain called `loan` by running the following command: + +``` +ignite scaffold chain loan --no-module +``` + +The `--no-module` flag tells Ignite not to create a default module. Instead, you +will create the module yourself in the next step. + +Next, change the directory to `loan/`: + +``` +cd loan +``` + +Create a module with a dependency on the standard Cosmos SDK `bank` module by +running the following command: + +``` +ignite scaffold module loan --dep bank +``` + +Create a `loan` model with a list of properties. + +``` +ignite scaffold list loan amount fee collateral deadline state borrower lender --no-message +``` + +The `--no-message` flag tells Ignite not to generate Cosmos SDK messages for +creating, updating, and deleting loans. Instead, you will generate the code for +custom messages. + + +To generate the code for handling the messages for requesting, approving, +repaying, liquidating, and cancelling loans, run the following commands: + +``` +ignite scaffold message request-loan amount fee collateral deadline +``` + +``` +ignite scaffold message approve-loan id:uint +``` + +``` +ignite scaffold message repay-loan id:uint +``` + +``` +ignite scaffold message liquidate-loan id:uint +``` + +``` +ignite scaffold message cancel-loan id:uint +``` + +Great job! By using a few simple commands with Ignite CLI, you have successfully +set up the foundation for your blockchain application. You have created a loan +model and included keeper methods to allow interaction with the store. In +addition, you have also implemented message handlers for five custom messages. + +Now that the basic structure is in place, it's time to move on to the next phase +of development. In the coming sections, you will be focusing on implementing the +business logic within the message handlers you have created. This will involve +writing code to define the specific actions and processes that should be carried +out when each message is received. \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.26/02-guide/05-loan/02-bank.md b/docs/versioned_docs/version-v0.26/02-guide/05-loan/02-bank.md new file mode 100644 index 0000000..652a408 --- /dev/null +++ b/docs/versioned_docs/version-v0.26/02-guide/05-loan/02-bank.md @@ -0,0 +1,32 @@ +# Importing methods from the Bank keeper + +In the previous step you have created the `loan` module with `ignite scaffold +module` using `--dep bank`. This command created a new module and added the +`bank` keeper to the `loan` module, which allows you to add and use bank's +keeper methods in loan's keeper methods. + +To see the changes made by `--dep bank`, review the following files: +`x/loan/keeper/keeper.go` and `x/loan/module.go`. + +Ignite takes care of adding the `bank` keeper, but you still need to tell the +`loan` module which `bank` methods you will be using. You will be using three +methods: `SendCoins`, `SendCoinsFromAccountToModule`, and +`SendCoinsFromModuleToAccount`. You can do that by adding method signatures to +the `BankKeeper` interface: + +```go title="x/loan/types/expected_keepers.go" +package types + +import ( + sdk "github.com/cosmos/cosmos-sdk/types" +) + +type BankKeeper interface { + SpendableCoins(ctx sdk.Context, addr sdk.AccAddress) sdk.Coins + // highlight-start + SendCoins(ctx sdk.Context, fromAddr sdk.AccAddress, toAddr sdk.AccAddress, amt sdk.Coins) error + SendCoinsFromAccountToModule(ctx sdk.Context, senderAddr sdk.AccAddress, recipientModule string, amt sdk.Coins) error + SendCoinsFromModuleToAccount(ctx sdk.Context, senderModule string, recipientAddr sdk.AccAddress, amt sdk.Coins) error + // highlight-end +} +``` \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.26/02-guide/05-loan/03-request.md b/docs/versioned_docs/version-v0.26/02-guide/05-loan/03-request.md new file mode 100644 index 0000000..5640ff8 --- /dev/null +++ b/docs/versioned_docs/version-v0.26/02-guide/05-loan/03-request.md @@ -0,0 +1,117 @@ +# Request a loan + +Implement `RequestLoan` keeper method that will be called whenever a user +requests a loan. `RequestLoan` creates a new loan with the provided data, sends +the collateral from the borrower's account to a module account, and adds the +loan to the blockchain's store. + +## Keeper method + +```go title="x/loan/keeper/msg_server_request_loan.go" +package keeper + +import ( + "context" + + sdk "github.com/cosmos/cosmos-sdk/types" + + "loan/x/loan/types" +) + +func (k msgServer) RequestLoan(goCtx context.Context, msg *types.MsgRequestLoan) (*types.MsgRequestLoanResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + var loan = types.Loan{ + Amount: msg.Amount, + Fee: msg.Fee, + Collateral: msg.Collateral, + Deadline: msg.Deadline, + State: "requested", + Borrower: msg.Creator, + } + borrower, err := sdk.AccAddressFromBech32(msg.Creator) + if err != nil { + panic(err) + } + collateral, err := sdk.ParseCoinsNormalized(loan.Collateral) + if err != nil { + panic(err) + } + sdkError := k.bankKeeper.SendCoinsFromAccountToModule(ctx, borrower, types.ModuleName, collateral) + if sdkError != nil { + return nil, sdkError + } + k.AppendLoan(ctx, loan) + return &types.MsgRequestLoanResponse{}, nil +} +``` + +The function takes in two arguments: a `context.Context` object and a pointer to +a `types.MsgRequestLoan` struct. It returns a pointer to a +`types.MsgRequestLoanResponse` struct and an `error` object. + +The first thing the function does is create a new `types.Loan` struct with the +data from the input `types.MsgRequestLoan` struct. It sets the `State` field of +`the types.Loan` struct to "requested". + +Next, the function gets the borrower's address from the `msg.Creator` field of +the input `types.MsgRequestLoan` struct. It then parses the `loan.Collateral` +field (which is a string) into `sdk.Coins` using the `sdk.ParseCoinsNormalized` +function. + +The function then sends the collateral from the borrower's account to a module +account using the `k.bankKeeper.SendCoinsFromAccountToModule` function. Finally, +it adds the new loan to a keeper using the `k.AppendLoan` function. The function +returns a `types.MsgRequestLoanResponse` struct and a `nil` error if all goes +well. + +## Basic message validation + +When a loan is created, a certain message input validation is required. You want +to throw error messages in case the end user tries impossible inputs. + +```go title="x/loan/types/message_request_loan.go" +package types + +import ( + // highlight-next-line + "strconv" + + sdk "github.com/cosmos/cosmos-sdk/types" + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" +) + +func (msg *MsgRequestLoan) ValidateBasic() error { + _, err := sdk.AccAddressFromBech32(msg.Creator) + if err != nil { + return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid creator address (%s)", err) + } + // highlight-start + amount, _ := sdk.ParseCoinsNormalized(msg.Amount) + if !amount.IsValid() { + return sdkerrors.Wrap(sdkerrors.ErrInvalidRequest, "amount is not a valid Coins object") + } + if amount.Empty() { + return sdkerrors.Wrap(sdkerrors.ErrInvalidRequest, "amount is empty") + } + fee, _ := sdk.ParseCoinsNormalized(msg.Fee) + if !fee.IsValid() { + return sdkerrors.Wrap(sdkerrors.ErrInvalidRequest, "fee is not a valid Coins object") + } + deadline, err := strconv.ParseInt(msg.Deadline, 10, 64) + if err != nil { + return sdkerrors.Wrap(sdkerrors.ErrInvalidRequest, "deadline is not an integer") + } + if deadline <= 0 { + return sdkerrors.Wrap(sdkerrors.ErrInvalidRequest, "deadline should be a positive integer") + } + collateral, _ := sdk.ParseCoinsNormalized(msg.Collateral) + if !collateral.IsValid() { + return sdkerrors.Wrap(sdkerrors.ErrInvalidRequest, "collateral is not a valid Coins object") + } + if collateral.Empty() { + return sdkerrors.Wrap(sdkerrors.ErrInvalidRequest, "collateral is empty") + } + // highlight-end + return nil +} +``` diff --git a/docs/versioned_docs/version-v0.26/02-guide/05-loan/04-approve.md b/docs/versioned_docs/version-v0.26/02-guide/05-loan/04-approve.md new file mode 100644 index 0000000..b0f0e6c --- /dev/null +++ b/docs/versioned_docs/version-v0.26/02-guide/05-loan/04-approve.md @@ -0,0 +1,97 @@ +# Approve a loan + +After a loan request has been made, it is possible for another account to +approve the loan and accept the terms proposed by the borrower. This process +involves the transfer of the requested funds from the lender to the borrower. + +To be eligible for approval, a loan must have a status of "requested." This +means that the borrower has made a request for a loan and is waiting for a +lender to agree to the terms and provide the funds. Once a lender has decided to +approve the loan, they can initiate the transfer of the funds to the borrower. + +Upon loan approval, the status of the loan is changed to "approved." This +signifies that the funds have been successfully transferred and that the loan +agreement is now in effect. + +## Keeper method + +```go title="x/loan/keeper/msg_server_approve_loan.go" +package keeper + +import ( + "context" + + sdk "github.com/cosmos/cosmos-sdk/types" + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + + "loan/x/loan/types" +) + +func (k msgServer) ApproveLoan(goCtx context.Context, msg *types.MsgApproveLoan) (*types.MsgApproveLoanResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + loan, found := k.GetLoan(ctx, msg.Id) + if !found { + return nil, sdkerrors.Wrapf(sdkerrors.ErrKeyNotFound, "key %d doesn't exist", msg.Id) + } + if loan.State != "requested" { + return nil, sdkerrors.Wrapf(types.ErrWrongLoanState, "%v", loan.State) + } + lender, _ := sdk.AccAddressFromBech32(msg.Creator) + borrower, _ := sdk.AccAddressFromBech32(loan.Borrower) + amount, err := sdk.ParseCoinsNormalized(loan.Amount) + if err != nil { + return nil, sdkerrors.Wrap(types.ErrWrongLoanState, "Cannot parse coins in loan amount") + } + err = k.bankKeeper.SendCoins(ctx, lender, borrower, amount) + if err != nil { + return nil, err + } + loan.Lender = msg.Creator + loan.State = "approved" + k.SetLoan(ctx, loan) + return &types.MsgApproveLoanResponse{}, nil +} +``` + +`ApproveLoan` takes a context and a message of type `*types.MsgApproveLoan` as +input, and returns a pointer to a `types.MsgApproveLoanResponse` and an `error`. + +The function first retrieves a loan object by calling `k.GetLoan(ctx, msg.Id)`, +where `ctx` is a context object, `k` is the `msgServer` object, `GetLoan` is a +method on `k`, and `msg.Id` is a field of the msg object passed as an argument. +If the loan is not found, it returns `nil` and an error wrapped with +`sdkerrors.ErrKeyNotFound`. + +Next, the function checks if the loan's state is `"requested"`. If it is not, it +returns `nil` and an error wrapped with `types.ErrWrongLoanState`. + +If the loan's state is `"requested"`, the function parses the addresses of the +lender and borrower from bech32 strings, and then parses the `amount` of the +loan from a string. If there is an error parsing the coins in the loan amount, +it returns `nil` and an error wrapped with `types.ErrWrongLoanState`. + +Otherwise, the function calls the `SendCoins` method on the `k.bankKeeper` +object, passing it the context, the lender and borrower addresses, and the +amount of the loan. It then updates the lender field of the loan object and sets +its state to `"approved"`. Finally, it stores the updated loan object by calling +`k.SetLoan(ctx, loan)`. + +At the end, the function returns a `types.MsgApproveLoanResponse` object and +`nil` for the error. + +## Register a custom error + +To register the custom error `ErrWrongLoanState` that is used in the +`ApproveLoan` function, modify the "errors.go" file: + +```go title="x/loan/types/errors.go" +package types + +import ( + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" +) + +var ( + ErrWrongLoanState = sdkerrors.Register(ModuleName, 2, "wrong loan state") +) +``` \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.26/02-guide/05-loan/05-repay.md b/docs/versioned_docs/version-v0.26/02-guide/05-loan/05-repay.md new file mode 100644 index 0000000..23e97ec --- /dev/null +++ b/docs/versioned_docs/version-v0.26/02-guide/05-loan/05-repay.md @@ -0,0 +1,98 @@ +# Repay a loan + +The `RepayLoan` method is responsible for handling the repayment of a loan. This +involves transferring the borrowed funds, along with any agreed upon fees, from +the borrower to the lender. In addition, the collateral that was provided as +part of the loan agreement will be released from the escrow account and returned +to the borrower. + +It is important to note that the `RepayLoan` method can only be called under +certain conditions. Firstly, the transaction must be signed by the borrower of +the loan. This ensures that only the borrower has the ability to initiate the +repayment process. Secondly, the loan must be in an approved status. This means +that the loan has received approval and is ready to be repaid. + +To implement the `RepayLoan` method, we must ensure that these conditions are +met before proceeding with the repayment process. Once the necessary checks have +been performed, the method can then handle the transfer of funds and the release +of the collateral from the escrow account. + +## Keeper method + +```go title="x/loan/keeper/msg_server_repay_loan.go" +package keeper + +import ( + "context" + "fmt" + + sdk "github.com/cosmos/cosmos-sdk/types" + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + + "loan/x/loan/types" +) + +func (k msgServer) RepayLoan(goCtx context.Context, msg *types.MsgRepayLoan) (*types.MsgRepayLoanResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + loan, found := k.GetLoan(ctx, msg.Id) + if !found { + return nil, sdkerrors.Wrapf(sdkerrors.ErrKeyNotFound, "key %d doesn't exist", msg.Id) + } + if loan.State != "approved" { + return nil, sdkerrors.Wrapf(types.ErrWrongLoanState, "%v", loan.State) + } + lender, _ := sdk.AccAddressFromBech32(loan.Lender) + borrower, _ := sdk.AccAddressFromBech32(loan.Borrower) + if msg.Creator != loan.Borrower { + return nil, sdkerrors.Wrap(sdkerrors.ErrUnauthorized, "Cannot repay: not the borrower") + } + amount, _ := sdk.ParseCoinsNormalized(loan.Amount) + fee, _ := sdk.ParseCoinsNormalized(loan.Fee) + collateral, _ := sdk.ParseCoinsNormalized(loan.Collateral) + err := k.bankKeeper.SendCoins(ctx, borrower, lender, amount) + if err != nil { + return nil, err + } + err = k.bankKeeper.SendCoins(ctx, borrower, lender, fee) + if err != nil { + return nil, err + } + err = k.bankKeeper.SendCoinsFromModuleToAccount(ctx, types.ModuleName, borrower, collateral) + if err != nil { + return nil, err + } + loan.State = "repayed" + k.SetLoan(ctx, loan) + return &types.MsgRepayLoanResponse{}, nil +} +``` + +`RepayLoan` takes in two arguments: a context and a pointer to a +`types.MsgRepayLoan` type. It returns a pointer to a +`types.MsgRepayLoanResponse` type and an `error`. + +The method first retrieves a loan from storage by passing the provided loan ID +to the `k.GetLoan` method. If the loan cannot be found, the method returns an +error wrapped in a `sdkerrors.ErrKeyNotFound` error. + +The method then checks that the state of the loan is "approved". If it is not, +the method returns an error wrapped in a `types.ErrWrongLoanState` error. + +Next, the method converts the lender and borrower addresses stored in the loan +struct to `sdk.AccAddress` types using the `sdk.AccAddressFromBech32` function. +It then checks that the transaction is signed by the borrower of the loan by +comparing the `msg.Creator` field to the borrower address stored in the loan +struct. If these do not match, the method returns an error wrapped in a +`sdkerrors.ErrUnauthorized` error. + +The method then parses the loan amount, fee, and collateral stored in the loan +struct as `sdk.Coins` using the `sdk.ParseCoinsNormalized` function. It then +uses the `k.bankKeeper.SendCoins` function to transfer the loan amount and fee +from the borrower to the lender. It then uses the +`k.bankKeeper.SendCoinsFromModuleToAccount` function to transfer the collateral +from the escrow account to the borrower. + +Finally, the method updates the state of the loan to "repayed" and stores the +updated loan in storage using the `k.SetLoan` method. The method returns a +`types.MsgRepayLoanResponse` and a `nil` error to indicate that the repayment +process was successful. \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.26/02-guide/05-loan/06-liquidate.md b/docs/versioned_docs/version-v0.26/02-guide/05-loan/06-liquidate.md new file mode 100644 index 0000000..0e8de14 --- /dev/null +++ b/docs/versioned_docs/version-v0.26/02-guide/05-loan/06-liquidate.md @@ -0,0 +1,90 @@ +# Liquidate loan + +The `LiquidateLoan` method is a function that allows the lender to sell off the +collateral belonging to the borrower in the event that the borrower has failed +to repay the loan by the specified deadline. This process is known as +"liquidation" and is typically carried out as a way for the lender to recoup +their losses in the event that the borrower is unable to fulfill their repayment +obligations. + +During the liquidation process, the collateral tokens that have been pledged by +the borrower as security for the loan are transferred from the borrower's +account to the lender's account. This transfer is initiated by the lender and is +typically triggered when the borrower fails to repay the loan by the agreed upon +deadline. Once the collateral has been transferred, the lender can then sell it +off in order to recoup their losses and compensate for the unpaid loan. + +## Keeper method + +```go title="x/loan/keeper/msg_server_liquidate_loan.go" +package keeper + +import ( + "context" + "fmt" + "strconv" + + sdk "github.com/cosmos/cosmos-sdk/types" + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + + "loan/x/loan/types" +) + +func (k msgServer) LiquidateLoan(goCtx context.Context, msg *types.MsgLiquidateLoan) (*types.MsgLiquidateLoanResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + loan, found := k.GetLoan(ctx, msg.Id) + if !found { + return nil, sdkerrors.Wrapf(sdkerrors.ErrKeyNotFound, "key %d doesn't exist", msg.Id) + } + if loan.Lender != msg.Creator { + return nil, sdkerrors.Wrap(sdkerrors.ErrUnauthorized, "Cannot liquidate: not the lender") + } + if loan.State != "approved" { + return nil, sdkerrors.Wrapf(types.ErrWrongLoanState, "%v", loan.State) + } + lender, _ := sdk.AccAddressFromBech32(loan.Lender) + collateral, _ := sdk.ParseCoinsNormalized(loan.Collateral) + deadline, err := strconv.ParseInt(loan.Deadline, 10, 64) + if err != nil { + panic(err) + } + if ctx.BlockHeight() < deadline { + return nil, sdkerrors.Wrap(types.ErrDeadline, "Cannot liquidate before deadline") + } + err = k.bankKeeper.SendCoinsFromModuleToAccount(ctx, types.ModuleName, lender, collateral) + if err != nil { + return nil, err + } + loan.State = "liquidated" + k.SetLoan(ctx, loan) + return &types.MsgLiquidateLoanResponse{}, nil +} +``` + +`LiquidateLoan` takes in a context and a `types.MsgLiquidateLoan` message as input and returns a types.MsgLiquidateLoanResponse message and an error as output. + +The function first retrieves a loan using the `GetLoan` method and the `Id` field of the input message. If the loan is not found, it returns an error using the `sdkerrors.Wrap` function and the `sdkerrors.ErrKeyNotFound` error code. + +Next, the function checks that the `Creator` field of the input message is the same as the `Lender` field of the loan. If they are not the same, it returns an error using the `sdkerrors.Wrap` function and the `sdkerrors.ErrUnauthorized` error code. + +The function then checks that the State field of the loan is equal to "approved". If it is not, it returns an error using the `sdkerrors.Wrapf` function and the `types.ErrWrongLoanState` error code. + +The function then converts the Lender field of the loan to an address using the `sdk.AccAddressFromBech32` function and the `Collateral` field to coins using the `sdk.ParseCoinsNormalized` function. It also converts the `Deadline` field to an integer using the `strconv.ParseInt` function. If this function returns an error, it panics. + +Finally, the function checks that the current block height is greater than or equal to the deadline. If it is not, it returns an error using the `sdkerrors.Wrap` function and the `types.ErrDeadline` error code. If all checks pass, the function uses the `bankKeeper.SendCoinsFromModuleToAccount` method to transfer the collateral from the module account to the lender's account and updates the `State` field of the loan to `"liquidated"`. It then stores the updated loan using the `SetLoan` method and returns a `types.MsgLiquidateLoanResponse` message with no error. + +## Register a custom error + +```go title="x/loan/types/errors.go" +package types + +import ( + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" +) + +var ( + ErrWrongLoanState = sdkerrors.Register(ModuleName, 2, "wrong loan state") + // highlight-next-line + ErrDeadline = sdkerrors.Register(ModuleName, 3, "deadline") +) +``` \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.26/02-guide/05-loan/07-cancel.md b/docs/versioned_docs/version-v0.26/02-guide/05-loan/07-cancel.md new file mode 100644 index 0000000..06e7792 --- /dev/null +++ b/docs/versioned_docs/version-v0.26/02-guide/05-loan/07-cancel.md @@ -0,0 +1,73 @@ +# Cancel a loan + +As a borrower, you have the option to cancel a loan you have created if you no +longer want to proceed with it. However, this action is only possible if the +loan's current status is marked as "requested". + +If you decide to cancel the loan, the collateral tokens that were being held as +security for the loan will be transferred back to your account from the module +account. This means that you will regain possession of the collateral tokens you +had originally put up for the loan. + +```go title="x/loan/keeper/msg_server_cancel_loan.go" +package keeper + +import ( + "context" + + sdk "github.com/cosmos/cosmos-sdk/types" + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + + "loan/x/loan/types" +) + +func (k msgServer) CancelLoan(goCtx context.Context, msg *types.MsgCancelLoan) (*types.MsgCancelLoanResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + loan, found := k.GetLoan(ctx, msg.Id) + if !found { + return nil, sdkerrors.Wrapf(sdkerrors.ErrKeyNotFound, "key %d doesn't exist", msg.Id) + } + if loan.Borrower != msg.Creator { + return nil, sdkerrors.Wrap(sdkerrors.ErrUnauthorized, "Cannot cancel: not the borrower") + } + if loan.State != "requested" { + return nil, sdkerrors.Wrapf(types.ErrWrongLoanState, "%v", loan.State) + } + borrower, _ := sdk.AccAddressFromBech32(loan.Borrower) + collateral, _ := sdk.ParseCoinsNormalized(loan.Collateral) + err := k.bankKeeper.SendCoinsFromModuleToAccount(ctx, types.ModuleName, borrower, collateral) + if err != nil { + return nil, err + } + loan.State = "cancelled" + k.SetLoan(ctx, loan) + return &types.MsgCancelLoanResponse{}, nil +} +``` + +`CancelLoan` takes in two arguments: a `context.Context` named `goCtx` and a +pointer to a `types.MsgCancelLoan` named `msg`. It returns a pointer to a +`types.MsgCancelLoanResponse` and an error. + +The function begins by using the `sdk.UnwrapSDKContext` method to get the +`sdk.Context` from the `context.Context` object. It then uses the `GetLoan` +method of the `msgServer` type to retrieve a loan identified by the `Id` field +of the `msg` argument. If the loan is not found, the function returns an error +using the `sdk.ErrKeyNotFound` error wrapped with the `sdk.Wrap` method. + +Next, the function checks if the `Creator` field of the msg argument is the same +as the `Borrower` field of the loan. If they are not the same, the function +returns an error using the `sdk.ErrUnauthorized` error wrapped with the +`sdk.Wrap` method. + +The function then checks if the `State` field of the loan is equal to the string +`"requested"`. If it is not, the function returns an error using the +types.`ErrWrongLoanState` error wrapped with the `sdk.Wrapf` method. + +If the loan has the correct state and the creator of the message is the borrower +of the loan, the function proceeds to send the collateral coins held in the +`Collateral` field of the loan back to the borrower's account using the +`SendCoinsFromModuleToAccount` method of the `bankKeeper`. The function then +updates the State field of the loan to the string "cancelled" and sets the +updated loan using the `SetLoan` method. Finally, the function returns a +`types.MsgCancelLoanResponse` object and a nil error. \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.26/02-guide/05-loan/08-play.md b/docs/versioned_docs/version-v0.26/02-guide/05-loan/08-play.md new file mode 100644 index 0000000..f0b0bc9 --- /dev/null +++ b/docs/versioned_docs/version-v0.26/02-guide/05-loan/08-play.md @@ -0,0 +1,318 @@ +# Play + +Add `10000foocoin` to Alice's account. These tokens will be used as a loan +collateral. + +```yml title="config.yml" +version: 1 +accounts: + - name: alice + coins: + - 20000token + # highlight-next-line + - 10000foocoin + - 200000000stake + - name: bob + coins: + - 10000token + - 100000000stake +client: + openapi: + path: docs/static/openapi.yml +faucet: + name: bob + coins: + - 5token + - 100000stake +validators: + - name: alice + bonded: 100000000stake +``` + +Start a blockchain node: + +``` +ignite chain serve +``` + +## Repaying a loan + +Request a loan of `1000token` with `100token` as a fee and `1000foocoin` as a +collateral from Alice's account. The deadline is set to `500` blocks: + +``` +loand tx loan request-loan 1000token 100token 1000foocoin 500 --from alice +``` + +``` +loand q loan list-loan +``` + +```yml +Loan: +- amount: 1000token + borrower: cosmos153dk8qh56v4yg6e4uzrvvqjueu6d36fptlr2kw + collateral: 1000foocoin + deadline: "500" + fee: 100token + id: "0" + lender: "" + state: requested +``` + +Please be aware that the addresses displayed in your terminal window (such as those in the `borrower` field) will not match the ones provided in this tutorial. This is because Ignite generates new accounts each time a chain is started, unless an account has a mnemonic specified in the `config.yml` file. + +Approve the loan from Bob's account: + +``` +loand tx loan approve-loan 0 --from bob +``` + +``` +loand q loan list-loan +``` + +The `lender` field has been updated to Bob's address and the `state` field has +been updated to `approved`: + +```yml +Loan: +- amount: 1000token + borrower: cosmos153dk8qh56v4yg6e4uzrvvqjueu6d36fptlr2kw + collateral: 1000foocoin + deadline: "500" + fee: 100token + id: "0" + # highlight-start + lender: cosmos1qfzpxfhsu2qfy2exkukuanrkzrrexh9yeg2pr4 + state: approved + # highlight-end +``` + +``` +loand q bank balances $(loand keys show alice -a) +``` + +The `foocoin` balance has been updated to `9000`, because `1000foocoin` has been +transferred as collateral to the module account. The `token` balance has been +updated to `21000`, because `1000token` has been transferred from Bob's account +to Alice's account as a loan: + +```yml +balances: + # highlight-start +- amount: "9000" + denom: foocoin + # highlight-end +- amount: "100000000" + denom: stake + # highlight-start +- amount: "21000" + denom: token + # highlight-end +``` + +``` +loand q bank balances $(loand keys show bob -a) +``` + +The `token` balance has been updated to `9000`, because `1000token` has been +transferred from Bob's account to Alice's account as a loan: + +```yml +balances: +- amount: "100000000" + denom: stake + # highlight-start +- amount: "9000" + denom: token + # highlight-end +``` + +Repay the loan from Alice's account: + +``` +loand tx loan repay-loan 0 --from alice +``` + +``` +loand q loan list-loan +``` + +The `state` field has been updated to `repayed`: + +```yml +Loan: +- amount: 1000token + borrower: cosmos153dk8qh56v4yg6e4uzrvvqjueu6d36fptlr2kw + collateral: 1000foocoin + deadline: "500" + fee: 100token + id: "0" + lender: cosmos1qfzpxfhsu2qfy2exkukuanrkzrrexh9yeg2pr4 + # highlight-next-line + state: repayed +``` + +``` +loand q bank balances $(loand keys show alice -a) +``` + +The `foocoin` balance has been updated to `10000`, because `1000foocoin` has +been transferred from the module account to Alice's account. The `token` balance +has been updated to `19900`, because `1000token` has been transferred from +Alice's account to Bob's account as a repayment and `100token` has been +transferred from Alice's account to Bob's account as a fee: + +```yml +balances: + # highlight-start +- amount: "10000" + denom: foocoin + # highlight-end +- amount: "100000000" + denom: stake + # highlight-start +- amount: "19900" + denom: token + # highlight-end +``` + +``` +loand q bank balances $(loand keys show bob -a) +``` + +The `token` balance has been updated to `10100`, because `1000token` has been +transferred from Alice's account to Bob's account as a repayment and `100token` +has been transferred from Alice's account to Bob's account as a fee: + +```yml +balances: +- amount: "100000000" + denom: stake + # highlight-start +- amount: "10100" + denom: token + # highlight-end +``` + +## Liquidating a loan + +Request a loan of `1000token` with `100token` as a fee and `1000foocoin` as a +collateral from Alice's account. The deadline is set to `20` blocks. The +deadline is set to a very small value, so that the loan can be quickly +liquidated in the next step: + +``` +loand tx loan request-loan 1000token 100token 1000foocoin 20 --from alice +``` + +``` +loand q loan list-loan +``` + +A loan has been added to the list: + +```yml +Loan: +- amount: 1000token + borrower: cosmos153dk8qh56v4yg6e4uzrvvqjueu6d36fptlr2kw + collateral: 1000foocoin + deadline: "500" + fee: 100token + id: "0" + lender: cosmos1qfzpxfhsu2qfy2exkukuanrkzrrexh9yeg2pr4 + state: repayed + # highlight-start +- amount: 1000token + borrower: cosmos153dk8qh56v4yg6e4uzrvvqjueu6d36fptlr2kw + collateral: 1000foocoin + deadline: "20" + fee: 100token + id: "1" + lender: "" + state: requested + # highlight-end +``` + +Approve the loan from Bob's account: + +``` +loand tx loan approve-loan 1 --from bob +``` + +Liquidate the loan from Bob's account: + +``` +loand tx loan liquidate-loan 1 --from bob +``` + +``` +loand q loan list-loan +``` + +The loan has been liquidated: + +```yml +Loan: +- amount: 1000token + borrower: cosmos153dk8qh56v4yg6e4uzrvvqjueu6d36fptlr2kw + collateral: 1000foocoin + deadline: "500" + fee: 100token + id: "0" + lender: cosmos1qfzpxfhsu2qfy2exkukuanrkzrrexh9yeg2pr4 + state: repayed + # highlight-start +- amount: 1000token + borrower: cosmos153dk8qh56v4yg6e4uzrvvqjueu6d36fptlr2kw + collateral: 1000foocoin + deadline: "20" + fee: 100token + id: "1" + lender: cosmos1qfzpxfhsu2qfy2exkukuanrkzrrexh9yeg2pr4 + state: liquidated + # highlight-end +``` + +``` +loand q bank balances $(loand keys show alice -a) +``` + +The `foocoin` balance has been updated to `9000`, because `1000foocoin` has been +transferred from Alice's account to the module account as a collateral. Alice +has lost her collateral, but she has kept the loan amount: + +```yml +balances: + # highlight-start +- amount: "9000" + denom: foocoin + # highlight-end +- amount: "100000000" + denom: stake + # highlight-start +- amount: "20900" + denom: token + # highlight-end +``` + +``` +loand q bank balances $(loand keys show bob -a) +``` + +The `foocoin` balance has been updated to `1000`, because `1000foocoin` has been +transferred from the module account to Bob's account as a collateral. Bob has +gained the collateral, but he has lost the loan amount: + +```yml +balances: + # highlight-start +- amount: "1000" + denom: foocoin + # highlight-end +- amount: "100000000" + denom: stake +- amount: "9100" + denom: token +``` diff --git a/docs/versioned_docs/version-v0.26/02-guide/05-loan/_category_.json b/docs/versioned_docs/version-v0.26/02-guide/05-loan/_category_.json new file mode 100644 index 0000000..029e81b --- /dev/null +++ b/docs/versioned_docs/version-v0.26/02-guide/05-loan/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Advanced Module: DeFi Loan", + "link": null +} \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.26/02-guide/06-ibc.md b/docs/versioned_docs/version-v0.26/02-guide/06-ibc.md new file mode 100644 index 0000000..6ca5f2c --- /dev/null +++ b/docs/versioned_docs/version-v0.26/02-guide/06-ibc.md @@ -0,0 +1,710 @@ +--- +sidebar_position: 7 +description: Build an understanding of how to create and send packets across blockchains and navigate between blockchains. +title: "Inter-Blockchain Communication: Basics" +--- + +# Inter-Blockchain Communication: Basics + +The Inter-Blockchain Communication protocol (IBC) is an important part of the +Cosmos SDK ecosystem. The Hello World tutorial is a time-honored tradition in +computer programming. This tutorial builds an understanding of how to create and +send packets across blockchain. This foundational knowledge helps you navigate +between blockchains with the Cosmos SDK. + +**You will learn how to** + +- Use IBC to create and send packets between blockchains. +- Navigate between blockchains using the Cosmos SDK and the Ignite CLI Relayer. +- Create a basic blog post and save the post on another blockchain. + +## What is IBC? + +The Inter-Blockchain Communication protocol (IBC) allows blockchains to talk to +each other. IBC handles transport across different sovereign blockchains. This +end-to-end, connection-oriented, stateful protocol provides reliable, ordered, +and authenticated communication between heterogeneous blockchains. + +The [IBC protocol in the Cosmos +SDK](https://ibc.cosmos.network/main/ibc/overview) is the standard for the +interaction between two blockchains. The IBCmodule interface defines how packets +and messages are constructed to be interpreted by the sending and the receiving +blockchain. + +The IBC relayer lets you connect between sets of IBC-enabled chains. This +tutorial teaches you how to create two blockchains and then start and use the +relayer with Ignite CLI to connect two blockchains. + +This tutorial covers essentials like modules, IBC packets, relayer, and the +lifecycle of packets routed through IBC. + +## Create a blockchain + +Create a blockchain app with a blog module to write posts on other blockchains +that contain the Hello World message. For this tutorial, you can write posts for +the Cosmos SDK universe that contain Hello Mars, Hello Cosmos, and Hello Earth +messages. + +For this simple example, create an app that contains a blog module that has a +post transaction with title and text. + +After you define the logic, run two blockchains that have this module installed. + +- The chains can send posts between each other using IBC. + +- On the sending chain, save the `acknowledged` and `timed out` posts. + +After the transaction is acknowledged by the receiving chain, you know that the +post is saved on both blockchains. + +- The sending chain has the additional data `postID`. + +- Sent posts that are acknowledged and timed out contain the title and the + target chain of the post. These identifiers +- are visible on the parameter `chain`. The following chart shows the lifecycle + of a packet that travels through IBC. + +![The Lifecycle of an IBC packet](./images/packet_sendpost.png) + +## Build your blockchain app + +Use Ignite CLI to scaffold the blockchain app and the blog module. + +### Build a new blockchain + +To scaffold a new blockchain named `planet`: + +```bash +ignite scaffold chain planet --no-module +cd planet +``` + +A new directory named `planet` is created in your home directory. The `planet` +directory contains a working blockchain app. + +### Scaffold the blog module inside your blockchain + +Next, use Ignite CLI to scaffold a blog module with IBC capabilities. The blog +module contains the logic for creating blog posts and routing them through IBC +to the second blockchain. + +To scaffold a module named `blog`: + +```bash +ignite scaffold module blog --ibc +``` + +A new directory with the code for an IBC module is created in `planet/x/blog`. +Modules scaffolded with the `--ibc` flag include all the logic for the +scaffolded IBC module. + +### Generate CRUD actions for types + +Next, create the CRUD actions for the blog module types. + +Use the `ignite scaffold list` command to scaffold the boilerplate code for the +create, read, update, and delete (CRUD) actions. + +These `ignite scaffold list` commands create CRUD code for the following +transactions: + +- Creating blog posts + + ```bash + ignite scaffold list post title content creator --no-message --module blog + ``` + +- Processing acknowledgments for sent posts + + ```bash + ignite scaffold list sentPost postID title chain creator --no-message --module blog + ``` + +- Managing post timeouts + + ```bash + ignite scaffold list timedoutPost title chain creator --no-message --module blog + ``` + +The scaffolded code includes proto files for defining data structures, messages, +messages handlers, keepers for modifying the state, and CLI commands. + +### Ignite CLI Scaffold List Command Overview + +``` +ignite scaffold list [typeName] [field1] [field2] ... [flags] +``` + +The first argument of the `ignite scaffold list [typeName]` command specifies +the name of the type being created. For the blog app, you created `post`, +`sentPost`, and `timedoutPost` types. + +The next arguments define the fields that are associated with the type. For the +blog app, you created `title`, `content`, `postID`, and `chain` fields. + +The `--module` flag defines which module the new transaction type is added to. +This optional flag lets you manage multiple modules within your Ignite CLI app. +When the flag is not present, the type is scaffolded in the module that matches +the name of the repo. + +When a new type is scaffolded, the default behavior is to scaffold messages that +can be sent by users for CRUD operations. The `--no-message` flag disables this +feature. Disable the messages option for the app since you want the posts to be +created upon reception of IBC packets and not directly created from a user's +messages. + +### Scaffold a sendable and interpretable IBC packet + +You must generate code for a packet that contains the title and the content of +the blog post. + +The `ignite packet` command creates the logic for an IBC packet that can be sent +to another blockchain. + +- The `title` and `content` are stored on the target chain. + +- The `postID` is acknowledged on the sending chain. + +To scaffold a sendable and interpretable IBC packet: + +```bash +ignite scaffold packet ibcPost title content --ack postID --module blog +``` + +Notice the fields in the `ibcPost` packet match the fields in the `post` type +that you created earlier. + +- The `--ack` flag defines which identifier is returned to the sending + blockchain. + +- The `--module` flag specifies to create the packet in a particular IBC module. + +The `ignite packet` command also scaffolds the CLI command that is capable of +sending an IBC packet: + +```bash +planetd tx blog send-ibcPost [portID] [channelID] [title] [content] +``` + +## Modify the source code + +After you create the types and transactions, you must manually insert the logic +to manage updates in the database. Modify the source code to save the data as +specified earlier in this tutorial. + +### Add creator to the blog post packet + +Start with the proto file that defines the structure of the IBC packet. + +To identify the creator of the post in the receiving blockchain, add the +`creator` field inside the packet. This field was not specified directly in the +command because it would automatically become a parameter in the `SendIbcPost` +CLI command. + +```protobuf title="proto/planet/blog/packet.proto" +message IbcPostPacketData { + string title = 1; + string content = 2; + // highlight-next-line + string creator = 3; +} +``` + +To make sure the receiving chain has content on the creator of a blog post, add +the `msg.Creator` value to the IBC `packet`. + +- The content of the `sender` of the message is automatically included in + `SendIbcPost` message. +- The sender is verified as the signer of the message, so you can add the + `msg.Sender` as the creator to the new packet +- before it is sent over IBC. + +```go title="x/blog/keeper/msg_server_ibc_post.go" +package keeper + +import ( + // ... + "planet/x/blog/types" +) + +func (k msgServer) SendIbcPost(goCtx context.Context, msg *types.MsgSendIbcPost) (*types.MsgSendIbcPostResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + + // TODO: logic before transmitting the packet + + // Construct the packet + var packet types.IbcPostPacketData + + packet.Title = msg.Title + packet.Content = msg.Content + // highlight-next-line + packet.Creator = msg.Creator + + // Transmit the packet + err := k.TransmitIbcPostPacket( + ctx, + packet, + msg.Port, + msg.ChannelID, + clienttypes.ZeroHeight(), + msg.TimeoutTimestamp, + ) + if err != nil { + return nil, err + } + + return &types.MsgSendIbcPostResponse{}, nil +} +``` + +### Receive the post + +The methods for primary transaction logic are in the `x/blog/keeper/ibc_post.go` +file. Use these methods to manage IBC packets: + +- `TransmitIbcPostPacket` is called manually to send the packet over IBC. This + method also defines the logic before the packet is sent over IBC to another + blockchain app. +- `OnRecvIbcPostPacket` hook is automatically called when a packet is received + on the chain. This method defines the packet reception logic. +- `OnAcknowledgementIbcPostPacket` hook is called when a sent packet is + acknowledged on the source chain. This method defines the logic when the + packet has been received. +- `OnTimeoutIbcPostPacket` hook is called when a sent packet times out. This + method defines the logic when the packet is not received on the target chain + +You must modify the source code to add the logic inside those functions so that +the data tables are modified accordingly. + +On reception of the post message, create a new post with the title and the +content on the receiving chain. + +To identify the blockchain app that a message is originating from and who +created the message, use an identifier in the following format: + +`<portID>-<channelID>-<creatorAddress>` + +Finally, the Ignite CLI-generated AppendPost function returns the ID of the new +appended post. You can return this value to the source chain through +acknowledgment. + +Append the type instance as `PostID` on receiving the packet: + +- The context `ctx` is an [immutable data + structure](https://docs.cosmos.network/main/core/context#go-context-package) + that has header data from the transaction. See [how the context is + initiated](https://github.com/cosmos/cosmos-sdk/blob/main/types/context.go#L71) +- The identifier format that you defined earlier +- The `title` is the Title of the blog post +- The `content` is the Content of the blog post + +In the `x/blog/keeper/ibc_post.go` file, make sure to import `"strconv"` below +`"errors"`: + +```go title="x/blog/keeper/ibc_post.go" +import ( + //... + + "strconv" + +// ... +) +``` + +Then modify the `OnRecvIbcPostPacket` keeper function with the following code: + +```go +package keeper + +// ... + +func (k Keeper) OnRecvIbcPostPacket(ctx sdk.Context, packet channeltypes.Packet, data types.IbcPostPacketData) (packetAck types.IbcPostPacketAck, err error) { + // validate packet data upon receiving + if err := data.ValidateBasic(); err != nil { + return packetAck, err + } + + id := k.AppendPost( + ctx, + types.Post{ + Creator: packet.SourcePort + "-" + packet.SourceChannel + "-" + data.Creator, + Title: data.Title, + Content: data.Content, + }, + ) + + packetAck.PostID = strconv.FormatUint(id, 10) + + return packetAck, nil +} +``` + +### Receive the post acknowledgement + +On the sending blockchain, store a `sentPost` so you know that the post has been +received on the target chain. + +Store the title and the target to identify the post. + +When a packet is scaffolded, the default type for the received acknowledgment +data is a type that identifies if the packet treatment has failed. The +`Acknowledgement_Error` type is set if `OnRecvIbcPostPacket` returns an error +from the packet. + +```go title="x/blog/keeper/ibc_post.go" +package keeper + +// ... + +// x/blog/keeper/ibc_post.go +func (k Keeper) OnAcknowledgementIbcPostPacket(ctx sdk.Context, packet channeltypes.Packet, data types.IbcPostPacketData, ack channeltypes.Acknowledgement) error { + switch dispatchedAck := ack.Response.(type) { + case *channeltypes.Acknowledgement_Error: + // We will not treat acknowledgment error in this tutorial + return nil + case *channeltypes.Acknowledgement_Result: + // Decode the packet acknowledgment + var packetAck types.IbcPostPacketAck + + if err := types.ModuleCdc.UnmarshalJSON(dispatchedAck.Result, &packetAck); err != nil { + // The counter-party module doesn't implement the correct acknowledgment format + return errors.New("cannot unmarshal acknowledgment") + } + + k.AppendSentPost( + ctx, + types.SentPost{ + Creator: data.Creator, + PostID: packetAck.PostID, + Title: data.Title, + Chain: packet.DestinationPort + "-" + packet.DestinationChannel, + }, + ) + + return nil + default: + return errors.New("the counter-party module does not implement the correct acknowledgment format") + } +} +``` + +### Store information about the timed-out packet + +Store posts that have not been received by target chains in `timedoutPost` +posts. This logic follows the same format as `sentPost`. + +```go title="x/blog/keeper/ibc_post.go" +func (k Keeper) OnTimeoutIbcPostPacket(ctx sdk.Context, packet channeltypes.Packet, data types.IbcPostPacketData) error { + k.AppendTimedoutPost( + ctx, + types.TimedoutPost{ + Creator: data.Creator, + Title: data.Title, + Chain: packet.DestinationPort + "-" + packet.DestinationChannel, + }, + ) + + return nil +} + +``` + +This last step completes the basic `blog` module setup. The blockchain is now +ready! + +## Use the IBC modules + +You can now spin up the blockchain and send a blog post from one blockchain app +to the other. Multiple terminal windows are required to complete these next +steps. + +### Test the IBC modules + +To test the IBC module, start two blockchain networks on the same machine. Both +blockchains use the same source code. Each blockchain has a unique chain ID. + +One blockchain is named `earth` and the other blockchain is named `mars`. + +The `earth.yml` and `mars.yml` files are required in the project directory: + +```yaml title="earth.yml" +version: 1 +build: + proto: + path: proto + third_party_paths: + - third_party/proto + - proto_vendor +accounts: +- name: alice + coins: + - 1000token + - 100000000stake +- name: bob + coins: + - 500token + - 100000000stake +faucet: + name: bob + coins: + - 5token + - 100000stake + host: 0.0.0.0:4500 +genesis: + chain_id: earth +validators: +- name: alice + bonded: 100000000stake + home: $HOME/.earth +``` + +```yaml title="mars.yml" +version: 1 +build: + proto: + path: proto + third_party_paths: + - third_party/proto + - proto_vendor +accounts: +- name: alice + coins: + - 1000token + - 1000000000stake +- name: bob + coins: + - 500token + - 100000000stake +faucet: + name: bob + coins: + - 5token + - 100000stake + host: :4501 +genesis: + chain_id: mars +validators: +- name: alice + bonded: 100000000stake + app: + api: + address: :1318 + grpc: + address: :9092 + grpc-web: + address: :9093 + config: + p2p: + laddr: :26658 + rpc: + laddr: :26659 + pprof_laddr: :6061 + home: $HOME/.mars +``` + +Open a terminal window and run the following command to start the `earth` +blockchain: + +```bash +ignite chain serve -c earth.yml +``` + +Open a different terminal window and run the following command to start the +`mars` blockchain: + +```bash +ignite chain serve -c mars.yml +``` + +### Remove Existing Relayer and Ignite CLI Configurations + +If you previously used the relayer, follow these steps to remove exiting relayer +and Ignite CLI configurations: + +- Stop your blockchains and delete previous configuration files: + + ```bash + rm -rf ~/.ignite/relayer + ``` + +If existing relayer configurations do not exist, the command returns `no matches +found` and no action is taken. + +### Configure and start the relayer + +First, configure the relayer. Use the Ignite CLI `configure` command with the +`--advanced` option: + +```bash +ignite relayer configure -a \ + --source-rpc "http://0.0.0.0:26657" \ + --source-faucet "http://0.0.0.0:4500" \ + --source-port "blog" \ + --source-version "blog-1" \ + --source-gasprice "0.0000025stake" \ + --source-prefix "cosmos" \ + --source-gaslimit 300000 \ + --target-rpc "http://0.0.0.0:26659" \ + --target-faucet "http://0.0.0.0:4501" \ + --target-port "blog" \ + --target-version "blog-1" \ + --target-gasprice "0.0000025stake" \ + --target-prefix "cosmos" \ + --target-gaslimit 300000 +``` + +When prompted, press Enter to accept the default values for `Source Account` and +`Target Account`. + +The output looks like: + +``` +--------------------------------------------- +Setting up chains +--------------------------------------------- + +🔐 Account on "source" is "cosmos1xcxgzq75yrxzd0tu2kwmwajv7j550dkj7m00za" + + |· received coins from a faucet + |· (balance: 100000stake,5token) + +🔐 Account on "target" is "cosmos1nxg8e4mfp5v7sea6ez23a65rvy0j59kayqr8cx" + + |· received coins from a faucet + |· (balance: 100000stake,5token) + +⛓ Configured chains: earth-mars +``` + +In a new terminal window, start the relayer process: + +```bash +ignite relayer connect +``` + +Results: + +``` +------ +Paths +------ + +earth-mars: + earth > (port: blog) (channel: channel-0) + mars > (port: blog) (channel: channel-0) + +------ +Listening and relaying packets between chains... +------ +``` + +### Send packets + +You can now send packets and verify the received posts: + +```bash +planetd tx blog send-ibc-post blog channel-0 "Hello" "Hello Mars, I'm Alice from Earth" --from alice --chain-id earth --home ~/.earth +``` + +To verify that the post has been received on Mars: + +```bash +planetd q blog list-post --node tcp://localhost:26659 +``` + +The packet has been received: + +```yaml +Post: + - content: Hello Mars, I'm Alice from Earth + creator: blog-channel-0-cosmos1aew8dk9cs3uzzgeldatgzvm5ca2k4m98xhy20x + id: "0" + title: Hello +pagination: + next_key: null + total: "1" +``` + +To check if the packet has been acknowledged on Earth: + +```bash +planetd q blog list-sent-post +``` + +Output: + +```yaml +SentPost: + - chain: blog-channel-0 + creator: cosmos1aew8dk9cs3uzzgeldatgzvm5ca2k4m98xhy20x + id: "0" + postID: "0" + title: Hello +pagination: + next_key: null + total: "1" +``` + +To test timeout, set the timeout time of a packet to 1 nanosecond, verify that +the packet is timed out, and check the timed-out posts: + +```bash +planetd tx blog send-ibc-post blog channel-0 "Sorry" "Sorry Mars, you will never see this post" --from alice --chain-id earth --home ~/.earth --packet-timeout-timestamp 1 +``` + +Check the timed-out posts: + +```bash +planetd q blog list-timedout-post +``` + +Results: + +```yaml +TimedoutPost: + - chain: blog-channel-0 + creator: cosmos1fhpcsxn0g8uask73xpcgwxlfxtuunn3ey5ptjv + id: "0" + title: Sorry +pagination: + next_key: null + total: "2" +``` + +You can also send a post from Mars: + +```bash +planetd tx blog send-ibc-post blog channel-0 "Hello" "Hello Earth, I'm Alice from Mars" --from alice --chain-id mars --home ~/.mars --node tcp://localhost:26659 +``` + +List post on Earth: + +```bash +planetd q blog list-post +``` + +Results: + +```yaml +Post: + - content: Hello Earth, I'm Alice from Mars + creator: blog-channel-0-cosmos1xtpx43l826348s59au24p22pxg6q248638q2tf + id: "0" + title: Hello +pagination: + next_key: null + total: "1" +``` + +## Congratulations 🎉 + +By completing this tutorial, you've learned to use the Inter-Blockchain +Communication protocol (IBC). + +Here's what you accomplished in this tutorial: + +- Built two Hello blockchain apps as IBC modules +- Modified the generated code to add CRUD action logic +- Configured and used the Ignite CLI relayer to connect two blockchains with + each other +- Transferred IBC packets from one blockchain to another diff --git a/docs/versioned_docs/version-v0.26/02-guide/07-interchange/00-introduction.md b/docs/versioned_docs/version-v0.26/02-guide/07-interchange/00-introduction.md new file mode 100644 index 0000000..5537e34 --- /dev/null +++ b/docs/versioned_docs/version-v0.26/02-guide/07-interchange/00-introduction.md @@ -0,0 +1,49 @@ +--- +sidebar_position: 0 +slug: /guide/interchange +--- + +# Introduction + +The Interchain Exchange is a module to create buy and sell orders between blockchains. + +In this tutorial, you learn how to create a Cosmos SDK module that can create order pairs, buy orders, and sell orders. +You create order books and buy and sell orders across blockchains, which in turn enables you to swap token from one +blockchain to another. + +**Note:** The code in this tutorial is written specifically for this tutorial and is intended only for educational +purposes. This tutorial code is not intended to be used in production. + +If you want to see the end result, see the example implementation in +the [interchange repo](https://github.com/tendermint/interchange). + +**You will learn how to:** + +- Create a blockchain with Ignite CLI +- Create a Cosmos SDK IBC module +- Create an order book that hosts buy and sell orders with a module +- Send IBC packets from one blockchain to another +- Deal with timeouts and acknowledgements of IBC packets + +## How the Interchange Exchange Module Works + +To build an exchange that works with two or more blockchains, follow the steps in this tutorial to create a Cosmos SDK +module called `dex`. + +The new `dex` module allows you to open an exchange order book for a pair of token: a from one blockchain and a token +on another blockchain. The blockchains are required to have the `dex` module available. + +Token can be bought or sold with limit orders on a simple order book. In this tutorial, there is no notion of a +liquidity pool or automated market maker (AMM). + +The market is unidirectional: + +- The token sold on the source chain cannot be bought back as it is +- The token bought from the target chain cannot be sold back using the same pair. + +If a token on a source chain is sold, it can only be bought back by creating a new pair on the order book. +This workflow is due to the nature of the Inter-Blockchain Communication protocol (IBC) which creates a `voucher` +token on the target blockchain. There is a difference of a native blockchain token and a `voucher` token that is minted +on another blockchain. You must create a second order book pair in order to receive the native token back. + +In the next chapter, you learn details about the design of the interblockchain exchange. diff --git a/docs/versioned_docs/version-v0.26/02-guide/07-interchange/01-design.md b/docs/versioned_docs/version-v0.26/02-guide/07-interchange/01-design.md new file mode 100644 index 0000000..448a0f5 --- /dev/null +++ b/docs/versioned_docs/version-v0.26/02-guide/07-interchange/01-design.md @@ -0,0 +1,108 @@ +--- +sidebar_position: 1 +description: Learn about the interchain exchange module design. +--- + +# App Design + +In this chapter, you learn how the interchain exchange module is designed. The module has order books, buy orders, and +sell orders. + +- First, create an order book for a pair of token. +- After an order book exists, you can create buy and sell orders for this pair of token. + +The module uses the Inter-Blockchain Communication +protocol [IBC](https://github.com/cosmos/ibc/blob/old/ibc/2_IBC_ARCHITECTURE.md). +By using IBC, the module can create order books so that multiple blockchains can interact and exchange their token. + +You create an order book pair with a token from one blockchain and another token from another blockchain. In this +tutorial, call the module you create the `dex` module. + +> When a user exchanges a token with the `dex` module, a `voucher` of that token is received on the other blockchain. +> This voucher is similar to how an `ibc-transfer` is constructed. Since a blockchain module does not have the rights +> to mint new token of a blockchain into existence, the token on the target chain is locked up, and the buyer receives +> a `voucher` of that token. + +This process can be reversed when the `voucher` gets burned to unlock the original token. This exchange process is +explained in more detail throughout the tutorial. + +## Assumption of the Design + +An order book can be created for the exchange of any tokens between any pair of chains. + +- Both blockchains require the `dex` module to be installed and running. +- There can only be one order book for a pair of token at the same time. + +<!-- There is no condition to check for open channels between two chains. --> + +A specific chain cannot mint new coins of its native token. + +<!-- The module is trustless, there is no condition to check when opening a channel between two chains. +Any pair of tokens can be exchanged between any pair of chains. --> + +This module is inspired by the [`ibc transfer`](https://github.com/cosmos/ibc-go/tree/main/modules/apps/transfer) +module on the Cosmos SDK. The `dex` module you create in this tutorial has similarities, like the `voucher` creation. + +However, the new `dex` module you are creating is more complex because it supports creation of: + +- Several types of packets to send +- Several types of acknowledgments to treat +- More complex logic on how to treat a packet on receipt, on timeout, and more + +## Interchain Exchange Overview + +Assume you have two blockchains: Venus and Mars. + +- The native token on Venus is `venuscoin`. +- The native token on Mars is `marscoin`. + +When a token is exchanged from Mars to Venus: + +- The Venus blockchain has an IBC `voucher` token with a denom that looks like `ibc/B5CB286...A7B21307F`. +- The long string of characters after `ibc/` is a denom trace hash of a token that was transferred using IBC. + +Using the blockchain's API you can get a denom trace from that hash. The denom trace consists of a `base_denom` and a +`path`. In our example: + +- The `base_denom` is `marscoin`. +- The `path` contains pairs of ports and channels through which the token has been transferred. + +For a single-hop transfer, the `path` is identified by `transfer/channel-0`. + +Learn more about token paths +in [ICS 20 Fungible Token Transfer](https://github.com/cosmos/ibc/tree/main/spec/app/ics-020-fungible-token-transfer). + +**Note:** This token `ibc/Venus/marscoin` cannot be sold back using the same order book. If you want to "reverse" the +exchange and receive the Mars token back, you must create and use a new order book for the `ibc/Venus/marscoin` to +`marscoin` transfer. + +## The Design of the Order Books + +As a typical exchange, a new pair implies the creation of an order book with orders to sell `marscoin` or orders to buy +`venuscoin`. Here, you have two chains and this data structure must be split between Mars and Venus. + +- Users from chain Mars sell `marscoin`. +- Users from chain Venus buy `marscoin`. + +Therefore, we represent: + +- All orders to sell `marscoin` on chain Mars. +- All orders to buy `marscoin` on chain Venus. + +In this example, blockchain Mars holds the sell orders and blockchain Venus holds the buy orders. + +## Exchanging Tokens Back + +Like `ibc-transfer`, each blockchain keeps a trace of the token voucher that was created on the other blockchain. + +If blockchain Mars sells `marscoin` to chain Venus and `ibc/Venus/marscoin` is minted on Venus then, if +`ibc/Venus/marscoin` is sold back to Mars, the token is unlocked and the token that is received is `marscoin`. + +## Features + +The features supported by the interchain exchange module are: + +- Create an exchange order book for a token pair between two chains +- Send sell orders on source chain +- Send buy orders on target chain +- Cancel sell or buy orders diff --git a/docs/versioned_docs/version-v0.26/02-guide/07-interchange/02-init.md b/docs/versioned_docs/version-v0.26/02-guide/07-interchange/02-init.md new file mode 100644 index 0000000..a0893f4 --- /dev/null +++ b/docs/versioned_docs/version-v0.26/02-guide/07-interchange/02-init.md @@ -0,0 +1,219 @@ +--- +sidebar_position: 2 +description: Create the blockchain for the interchain exchange app. +--- + +# App Init + +## Initialize the Blockchain + +In this chapter, you create the basic blockchain module for the interchain exchange app. You scaffold the blockchain, +the module, the transaction, the IBC packets, and messages. In later chapters, you integrate more code into each of the +transaction handlers. + +## Create the Blockchain + +Scaffold a new blockchain called `interchange`: + +```bash +ignite scaffold chain interchange --no-module +``` + +A new directory named `interchange` is created. + +Change into this directory where you can scaffold modules, types, and maps: + +```bash +cd interchange +``` + +The `interchange` directory contains a working blockchain app. + +A local GitHub repository has been created for you with the initial scaffold. + +Next, create a new IBC module. + +## Create the dex Module + +Scaffold a module inside your blockchain named `dex` with IBC capabilities. + +The dex module contains the logic to create and maintain order books and route them through IBC to the second +blockchain. + +```bash +ignite scaffold module dex --ibc --ordering unordered --dep bank +``` + +## Create CRUD logic for Buy and Sell Order Books + +Scaffold two types with create, read, update, and delete (CRUD) actions. + +Run the following Ignite CLI `type` commands to create `sellOrderBook` and `buyOrderBook` types: + +```bash +ignite scaffold map sell-order-book amountDenom priceDenom --no-message --module dex +ignite scaffold map buy-order-book amountDenom priceDenom --no-message --module dex +``` + +The values are: + +- `amountDenom`: the token to be sold and in which quantity +- `priceDenom`: the token selling price + +The `--no-message` flag specifies to skip the message creation. Custom messages will be created in the next steps. + +The `--module dex` flag specifies to scaffold the type in the `dex` module. + +## Create the IBC Packets + +Create three packets for IBC: + +- An order book pair `createPair` +- A sell order `sellOrder` +- A buy order `buyOrder` + +```bash +ignite scaffold packet create-pair sourceDenom targetDenom --module dex +ignite scaffold packet sell-order amountDenom amount:int priceDenom price:int --ack remainingAmount:int,gain:int --module dex +ignite scaffold packet buy-order amountDenom amount:int priceDenom price:int --ack remainingAmount:int,purchase:int --module dex +``` + +The optional `--ack` flag defines field names and types of the acknowledgment returned after the packet has been +received by the target chain. The value of the `--ack` flag is a comma-separated list of names (no spaces). Append +optional types after a colon (`:`). + +## Cancel messages + +Cancelling orders is done locally in the network, there is no packet to send. + +Use the `message` command to create a message to cancel a sell or buy order: + +```bash +ignite scaffold message cancel-sell-order port channel amountDenom priceDenom orderID:int --desc "Cancel a sell order" --module dex +ignite scaffold message cancel-buy-order port channel amountDenom priceDenom orderID:int --desc "Cancel a buy order" --module dex +``` + +Use the optional `--desc` flag to define a description of the CLI command that is used to broadcast a transaction with +the message. + +## Trace the Denom + +The token demons must have the same behavior as described in the `ibc-transfer` module: + +- An external token received from a chain has a unique `denom`, referred to as `voucher`. +- When a token is sent to a blockchain and then sent back and received, the chain can resolve the voucher and convert + it back to the original token denomination. + +`Voucher` tokens are represented as hashes, therefore you must store which original denomination is related to a +voucher. +You can do this with an indexed type. + +For a `voucher` you store, define the source port ID, source channel ID, and the original denom: + +```bash +ignite scaffold map denom-trace port channel origin --no-message --module dex +``` + +## Create the Configuration for Two Blockchains + +Add two config files `mars.yml` and `venus.yml` to test two blockchain networks with specific token for each. + +Add the config files in the `interchange` folder. + +The native denoms for Mars are `marscoin`, and for Venus `venuscoin`. + +Create the `mars.yml` file with your content: + +```yaml title="mars.yml" +version: 1 +build: + proto: + path: proto + third_party_paths: + - third_party/proto + - proto_vendor +accounts: +- name: alice + coins: + - 1000token + - 100000000stake + - 1000marscoin +- name: bob + coins: + - 500token + - 1000marscoin + - 100000000stake +faucet: + name: bob + coins: + - 5token + - 100000stake + host: 0.0.0.0:4500 +genesis: + chain_id: mars +validators: +- name: alice + bonded: 100000000stake + home: $HOME/.mars +``` + +Create the `venus.yml` file with your content: + +```yaml title="venus.yml" +version: 1 +build: + proto: + path: proto + third_party_paths: + - third_party/proto + - proto_vendor +accounts: +- name: alice + coins: + - 1000token + - 1000000000stake + - 1000venuscoin +- name: bob + coins: + - 500token + - 1000venuscoin + - 100000000stake +faucet: + name: bob + coins: + - 5token + - 100000stake + host: :4501 +genesis: + chain_id: venus +validators: +- name: alice + bonded: 100000000stake + app: + api: + address: :1318 + grpc: + address: :9092 + grpc-web: + address: :9093 + config: + p2p: + laddr: :26658 + rpc: + laddr: :26659 + pprof_laddr: :6061 + home: $HOME/.venus +``` + +In order to run two blockchains side by side on a single machine, you need to +start them on different ports. `venus.yml` has a validators configuration that +stars services HTTP API, gRPC, P2P and RPC services on custom ports. + +After scaffolding, now is a good time to make a commit to the local GitHub repository that was created for you. + +```bash +git add . +git commit -m "Scaffold module, maps, packages and messages for the dex" +``` + +Implement the code for the order book in the next chapter. diff --git a/docs/versioned_docs/version-v0.26/02-guide/07-interchange/03-walkthrough.md b/docs/versioned_docs/version-v0.26/02-guide/07-interchange/03-walkthrough.md new file mode 100644 index 0000000..198e222 --- /dev/null +++ b/docs/versioned_docs/version-v0.26/02-guide/07-interchange/03-walkthrough.md @@ -0,0 +1,687 @@ +--- +sidebar_position: 3 +description: Walkthrough of commands to use the interchain exchange module. +--- + +# Use the Interchain Exchange + +In this chapter, you will learn about the exchange and how it will function once +it is implemented. This will give you a better understanding of what you will be +building in the coming chapters. + +To achieve this, we will perform the following tasks: + +* Start two local blockchains +* Set up an IBC relayer between the two chains +* Create an exchange order book for a token pair on the two chains +* Submit sell orders on the Mars chain +* Submit buy orders on the Venus chain +* Cancel sell or buy orders + +Starting the two local blockchains and setting up the IBC relayer will allow us +to create an exchange order book between the two chains. This order book will +allow us to submit sell and buy orders, as well as cancel any orders that we no +longer want to maintain. + +It is important to note that the commands in this chapter will only work +properly if you have completed all the following chapters in this tutorial. By +the end of this chapter, you should have a good understanding of how the +exchange will operate. + +## Start blockchain nodes + +To start using the interchain exchange, you will need to start two separate +blockchains. This can be done by running the `ignite chain serve` command, +followed by the `-c` flag and the path to the configuration file for each +blockchain. For example, to start the `mars` blockchain, you would run: + +``` +ignite chain serve -c mars.yml +``` + +To start the `venus` blockchain, you would run a similar command, but with the +path to the `venus.yml` configuration file: + +``` +ignite chain serve -c venus.yml +``` + +Once both blockchains are running, you can proceed with configuring the relayer +to enable interchain exchange between the two chains. + +## Relayer + +Next, let's set up an IBC relayer between two chains. If you have used a relayer +in the past, reset the relayer configuration directory: + +``` +rm -rf ~/.ignite/relayer +``` + +Now you can use the `ignite relayer configure` command. This command allows you +to specify the source and target chains, along with their respective RPC +endpoints, faucet URLs, port numbers, versions, gas prices, and gas limits. + +``` +ignite relayer configure -a --source-rpc "http://0.0.0.0:26657" --source-faucet "http://0.0.0.0:4500" --source-port "dex" --source-version "dex-1" --source-gasprice "0.0000025stake" --source-prefix "cosmos" --source-gaslimit 300000 --target-rpc "http://0.0.0.0:26659" --target-faucet "http://0.0.0.0:4501" --target-port "dex" --target-version "dex-1" --target-gasprice "0.0000025stake" --target-prefix "cosmos" --target-gaslimit 300000 +``` + +To create a connection between the two chains, you can use the ignite relayer +connect command. This command will establish a connection between the source and +target chains, allowing you to transfer data and assets between them. + +``` +ignite relayer connect +``` + +Now that we have two separate blockchain networks up and running, and a relayer +connection established to facilitate communication between them, we are ready to +begin using the interchain exchange binary to interact with these networks. This +will allow us to create order books and buy/sell orders, enabling us to trade +assets between the two chains. + +## Order Book + +To create an order book for a pair of tokens, you can use the following command: + +``` +interchanged tx dex send-create-pair dex channel-0 marscoin venuscoin --from alice --chain-id mars --home ~/.mars +``` + +This command will create an order book for the pair of tokens `marscoin` and +`venuscoin`. The command will be executed by the user `alice` on the Mars +blockchain. The `--home` parameter specifies the location of the configuration +directory for the Mars blockchain. + +Creating an order book affects state on the Mars blockchain to which the +transaction was broadcast and the Venus blockchain. + +On the Mars blockchain, the `send-create-pair` command creates an empty sell +order book. + +``` +interchanged q dex list-sell-order-book +``` + +```yml +sellOrderBook: +- amountDenom: marscoin + book: + idCount: 0 + orders: [] + index: dex-channel-0-marscoin-venuscoin + priceDenom: venuscoin +``` + +On the Venus blockchain, the same `send-createPair` command creates a buy order +book: + +``` +interchanged q dex list-buy-order-book --node tcp://localhost:26659 +``` + +```yml +buyOrderBook: +- amountDenom: marscoin + book: + idCount: 0 + orders: [] + index: dex-channel-0-marscoin-venuscoin + priceDenom: venuscoin +``` + +In the `create-pair` command on the Mars blockchain, an IBC packet is sent to +the Venus chain. This packet contains information that is used to create a buy +order book on the Venus chain. + +When the Venus chain receives the IBC packet, it processes the information +contained in the packet and creates a buy order book. The Venus chain then sends +an acknowledgement back to the Mars chain to confirm that the buy order book has +been successfully created. + +Upon receiving the acknowledgement from the Venus chain, the Mars chain creates +a sell order book. This sell order book is associated with the buy order book on +the Venus chain, allowing users to trade assets between the two chains. + +## Sell Order + +After creating an order book, the next step is to create a sell order. This can +be done using the `send-sell-order` command, which is used to broadcast a +transaction with a message that locks a specified amount of tokens and creates a +sell order on the Mars blockchain. + +``` +interchanged tx dex send-sell-order dex channel-0 marscoin 10 venuscoin 15 --from alice --chain-id mars --home ~/.mars +``` + +In the example provided, the `send-sell-order` command is used to create a sell +order for 10 `marscoin` token and 15 `venuscoin` token. This sell order will be +added to the order book on the Mars blockchain. + +``` +interchanged q bank balances $(interchanged keys show -a alice --home ~/.mars) +``` + +```yml +balances: +- amount: "990" # decreased from 1000 + denom: marscoin +- amount: "1000" + denom: token +``` + +``` +interchanged q dex list-sell-order-book +``` + +```yml +sellOrderBook: +- amountDenom: marscoin + book: + idCount: 1 + orders: # a new sell order is created + - amount: 10 + creator: cosmos14ntyzr6d2dx4ppds9tvenx53fn0xl5jcakrtm4 + id: 0 + price: 15 + index: dex-channel-0-marscoin-venuscoin + priceDenom: venuscoin +``` + +## Buy order + +After creating a sell order, the next step in the trading process is typically +to create a buy order. This can be done using the `send-buy-order` command, +which is used to lock a specified amount of tokens and create a buy order on the +Venus blockchain + +``` +interchanged tx dex send-buy-order dex channel-0 marscoin 10 venuscoin 5 --from alice --chain-id venus --home ~/.venus --node tcp://localhost:26659 +``` + +In the example provided, the `send-buy-order` command is used to create a buy +order for 10 `marscoin` token and 5 `venuscoin` token. This buy order will be +added to the order book on the Venus blockchain. + +``` +interchanged q bank balances $(interchanged keys show -a alice --home ~/.venus) --node tcp://localhost:26659 +``` + +```yml +balances: +- amount: "900000000" + denom: stake +- amount: "1000" + denom: token +- amount: "950" # decreased from 1000 + denom: venuscoin +``` + +``` +interchanged q dex list-buy-order-book --node tcp://localhost:26659 +``` + +```yml +buyOrderBook: +- amountDenom: marscoin + book: + idCount: 1 + orders: # a new buy order is created + - amount: 10 + creator: cosmos1mrrttwtdcp47pl4hq6sar3mwqpmtc7pcl9e6ss + id: 0 + price: 5 + index: dex-channel-0-marscoin-venuscoin + priceDenom: venuscoin +``` + +## Perform an Exchange with a Sell Order + +You currently have two open orders for `marscoin`: + +* A sell order on the Mars chain, where you are offering to sell 10 `marscoin` + for 15 `venuscoin`. +* A buy order on the Venus chain, where you are willing to buy 5 `marscoin` for + 5 `venuscoin`. + +To perform an exchange, you can send a sell order to the Mars chain using the +following command: + +``` +interchanged tx dex send-sell-order dex channel-0 marscoin 5 venuscoin 3 --from alice --home ~/.mars +``` + +This sell order, offering to sell 5 `marscoin` for 3 `venuscoin`, will be filled +on the Venus chain by the existing buy order. This will result in the amount of +the buy order on the Venus chain being reduced by 5 `marscoin`. + +``` +interchanged q dex list-buy-order-book --node tcp://localhost:26659 +``` + +```yml +buyOrderBook: +- amountDenom: marscoin + book: + idCount: 1 + orders: + - amount: 5 # decreased from 10 + creator: cosmos1mrrttwtdcp47pl4hq6sar3mwqpmtc7pcl9e6ss + id: 0 + price: 5 + index: dex-channel-0-marscoin-venuscoin + priceDenom: venuscoin +``` + +The sender of the filled sell order traded 5 `marscoin` for 25 `venuscoin` +tokens. This means that the amount of the sell order (5 `marscoin`) was +multiplied by the price of the buy order (5 `venuscoin`) to determine the value +of the exchange. In this case, the value of the exchange was 25 `venuscoin` +vouchers. + +``` +interchanged q bank balances $(interchanged keys show -a alice --home ~/.mars) +``` + +```yml +balances: +- amount: "25" # increased from 0 + denom: ibc/BB38C24E9877 +- amount: "985" # decreased from 990 + denom: marscoin +- amount: "1000" + denom: token +``` + +The counterparty, or the sender of the buy `marscoin` order, will receive 5 +`marscoin` as a result of the exchange. + +``` +interchanged q bank balances $(interchanged keys show -a alice --home ~/.venus) --node tcp://localhost:26659 +``` + +```yml +balances: +- amount: "5" # increased from 0 + denom: ibc/745B473BFE24 # marscoin voucher +- amount: "900000000" + denom: stake +- amount: "1000" + denom: token +- amount: "950" + denom: venuscoin +``` + +The `venuscoin` balance has remained unchanged because the appropriate amount of +`venuscoin` (50) was already locked at the time the buy order was created in the +previous step. + + +## Perform an Exchange with a Buy Order + +To perform an exchange with a buy order, send a transaction to the decentralized +exchange to buy 5 `marscoin` for 15 `venuscoin`. This is done by running the +following command: + +``` +interchanged tx dex send-buy-order dex channel-0 marscoin 5 venuscoin 15 --from alice --home ~/.venus --node tcp://localhost:26659 +``` + +This buy order will be immediately filled on the Mars chain, and the creator of +the sell order will receive 75 `venuscoin` vouchers as payment. + +``` +interchanged q bank balances $(interchanged keys show -a alice --home ~/.mars) +``` + +```yml +balances: +- amount: "100" # increased from 25 + denom: ibc/BB38C24E9877 # venuscoin voucher +- amount: "985" + denom: marscoin +- amount: "1000" + denom: token +``` + +The amount of the sell order will be decreased by the amount of the filled buy +order, so in this case it will be decreased by 5 `marscoin`. + +``` +interchanged q dex list-sell-order-book +``` + +```yml +sellOrderBook: +- amountDenom: marscoin + book: + idCount: 1 + orders: + - amount: 5 # decreased from 10 + creator: cosmos14ntyzr6d2dx4ppds9tvenx53fn0xl5jcakrtm4 + id: 0 + price: 15 + index: dex-channel-0-marscoin-venuscoin + priceDenom: venuscoin +``` + +The creator of the buy order receives 5 marscoin vouchers for 75 venuscoin +(5marscoin * 15venuscoin): + +``` +interchanged q bank balances $(interchanged keys show -a alice --home ~/.venus) --node tcp://localhost:26659 +``` + +```yml +balances: +- amount: "10" # increased from 5 + denom: ibc/745B473BFE24 # marscoin vouchers +- amount: "900000000" + denom: stake +- amount: "1000" + denom: token +- amount: "875" # decreased from 950 + denom: venuscoin +``` + +## Complete Exchange with a Partially Filled Sell Order + +To complete the exchange with a partially filled sell order, send a transaction +to the decentralized exchange to sell 10 `marscoin` for 3 `venuscoin`. This is +done by running the following command: + +``` +interchanged tx dex send-sell-order dex channel-0 marscoin 10 venuscoin 3 --from alice --home ~/.mars +``` + +In this scenario, the sell amount is 10 `marscoin`, but there is an existing buy +order for only 5 `marscoin`. The buy order will be filled completely and removed +from the order book. The author of the previously created buy order will receive +10 `marscoin` vouchers from the exchange. + +To check the balances, she can run the following command: + +``` +interchanged q bank balances $(interchanged keys show -a alice --home ~/.venus) --node tcp://localhost:26659 +``` + +```yml +balances: +- amount: "15" # increased from 5 + denom: ibc/745B473BFE24 # marscoin voucher +- amount: "900000000" + denom: stake +- amount: "1000" + denom: token +- amount: "875" + denom: venuscoin +``` + +``` +interchanged q dex list-buy-order-book --node tcp://localhost:26659 +``` + +```yml +buyOrderBook: +- amountDenom: marscoin + book: + idCount: 1 + orders: [] # buy order with amount 5marscoin has been closed + index: dex-channel-0-marscoin-venuscoin + priceDenom: venuscoin +``` + +``` +interchanged q bank balances $(interchanged keys show -a alice --home ~/.mars) +``` + +The author of the sell order successfully exchanged 5 marscoin and received 25 +venuscoin vouchers. The other 5marscoin created a sell order: + +```yml +balances: +- amount: "125" # increased from 100 + denom: ibc/BB38C24E9877 # venuscoin vouchers +- amount: "975" # decreased from 985 + denom: marscoin +- amount: "1000" + denom: token +``` + +``` +interchanged q dex list-sell-order-book +``` + +```yml +sellOrderBook: +- amountDenom: marscoin + book: + idCount: 2 + orders: + - amount: 5 # hasn't changed + creator: cosmos14ntyzr6d2dx4ppds9tvenx53fn0xl5jcakrtm4 + id: 0 + price: 15 + - amount: 5 # new order is created + creator: cosmos14ntyzr6d2dx4ppds9tvenx53fn0xl5jcakrtm4 + id: 1 + price: 3 + index: dex-channel-0-marscoin-venuscoin + priceDenom: venuscoin +``` + +## Complete Exchange with a Partially Filled Buy Order + +To complete the exchange with a partially filled buy order, send a transaction +to the decentralized exchange to buy 10 `marscoin` for 5 `venuscoin`. This is +done by running the following command: + +``` +interchanged tx dex send-buy-order dex channel-0 marscoin 10 venuscoin 5 --from alice --home ~/.venus --node tcp://localhost:26659 +``` + +In this scenario, the buy order is only partially filled for 5 `marscoin`. There +is an existing sell order for 5 `marscoin` (with a price of 3 `venuscoin`) on +the Mars chain, which is completely filled and removed from the order book. The +author of the closed sell order will receive 15 `venuscoin` vouchers as payment, +which is the product of 5 `marscoin` and 3 `venuscoin`. + +``` +interchanged q bank balances $(interchanged keys show -a alice --home ~/.mars) +``` + +```yml +balances: +- amount: "140" # increased from 125 + denom: ibc/BB38C24E9877 # venuscoin vouchers +- amount: "975" + denom: marscoin +- amount: "1000" + denom: token +``` + +``` +interchanged q dex list-sell-order-book +``` + +```yml +sellOrderBook: +- amountDenom: marscoin + book: + idCount: 2 + orders: + - amount: 5 # order hasn't changed + creator: cosmos14ntyzr6d2dx4ppds9tvenx53fn0xl5jcakrtm4 + id: 0 + price: 15 + # a sell order for 5 marscoin has been closed + index: dex-channel-0-marscoin-venuscoin + priceDenom: venuscoin +``` + +In this scenario, the author of the buy order will receive 5 `marscoin` vouchers +as payment, which locks up 50 `venuscoin` of their token. The remaining 5 +`marscoin` that is not filled by the sell order will create a new buy order on +the Venus chain. This means that the author of the buy order is still interested +in purchasing 5 `marscoin`, and is willing to pay the specified price for it. +The new buy order will remain on the order book until it is filled by another +sell order, or it is cancelled by the buyer. + +``` +interchanged q bank balances $(interchanged keys show -a alice --home ~/.venus) --node tcp://localhost:26659 +``` + +```yml +balances: +- amount: "20" # increased from 15 + denom: ibc/745B473BFE24 # marscoin vouchers +- amount: "900000000" + denom: stake +- amount: "1000" + denom: token +- amount: "825" # decreased from 875 + denom: venuscoin +``` + +``` +interchanged q dex list-buy-order-book --node tcp://localhost:26659 +``` + +```yml +buyOrderBook: +- amountDenom: marscoin + book: + idCount: 2 + orders: + - amount: 5 # new buy order is created + creator: cosmos1mrrttwtdcp47pl4hq6sar3mwqpmtc7pcl9e6ss + id: 1 + price: 5 + index: dex-channel-0-marscoin-venuscoin + priceDenom: venuscoin +``` + +## Cancel an Order + +After the exchanges described, there are still two open orders: a sell order on +the Mars chain (5 `marscoin` for 15 `venuscoin`), and a buy order on the Venus +chain (5 `marscoin` for 5 `venuscoin`). + +To cancel an order on a blockchain, you can use the `cancel-sell-order` or +`cancel-buy-order` command, depending on the type of order you want to cancel. +The command takes several arguments, including the `channel-id` of the IBC +connection, the `amount-denom` and `price-denom` of the order, and the +`order-id` of the order you want to cancel. + +To cancel a sell order on the Mars chain, you would run the following command: + +``` +interchanged tx dex cancel-sell-order dex channel-0 marscoin venuscoin 0 --from alice --home ~/.mars +``` + +This will cancel the sell order and remove it from the order book. The balance +of Alice's `marscoin` will be increased by the amount of the cancelled sell +order. + +To check Alice's balances, including her updated `marscoin` balance, run the +following command: + +``` +interchanged q bank balances $(interchanged keys show -a alice --home ~/.mars) +``` + +This will return a list of Alice's balances, including her updated `marscoin` +balance. + +```yml +balances: +- amount: "140" + denom: ibc/BB38C24E9877 +- amount: "980" # increased from 975 + denom: marscoin +- amount: "1000" + denom: token +``` + +After the sell order on the Mars chain has been cancelled, the sell order book +on that blockchain will be empty. This means that there are no longer any active +sell orders on the Mars chain, and anyone interested in purchasing `marscoin` +will need to create a new buy order. The sell order book will remain empty until +a new sell order is created and added to it. + +``` +interchanged q dex list-sell-order-book +``` + +```yml +sellOrderBook: +- amountDenom: marscoin + book: + idCount: 2 + orders: [] + index: dex-channel-0-marscoin-venuscoin + priceDenom: venuscoin +``` + +To cancel a buy order on the `Venus` chain, you can run the following command: + +``` +interchanged tx dex cancel-buy-order dex channel-0 marscoin venuscoin 1 --from alice --home ~/.venus --node tcp://localhost:26659 +``` + +This will cancel the buy order and remove it from the order book. The balance of +Alice's `venuscoin` will be increased by the amount of the cancelled buy order. + +To check Alice's balances, including her updated `venuscoin` balance, you can +run the following command: + +``` +interchanged q bank balances $(interchanged keys show -a alice --home ~/.venus) --node tcp://localhost:26659 +``` + +The amount of `venuscoin` is increased: + +```yml +balances: +- amount: "20" + denom: ibc/745B473BFE24 +- amount: "900000000" + denom: stake +- amount: "1000" + denom: token +- amount: "850" # increased from 825 + denom: venuscoin +``` + +This will return a list of Alice's balances, including her updated `venuscoin` +balance. + +After canceling a buy order, the buy order book on the Venus blockchain will be +empty. This means that there are no longer any active buy orders on the chain, +and anyone interested in selling `marscoin` will need to create a new sell +order. The buy order book will remain empty until a new buy order is created and +added to it. + +``` +interchanged q dex list-buy-order-book --node tcp://localhost:26659 +``` + +```yml +buyOrderBook: +- amountDenom: marscoin + book: + idCount: 2 + orders: [] + index: dex-channel-0-marscoin-venuscoin + priceDenom: venuscoin +``` + +In this walkthrough, we demonstrated how to set up an interchain exchange for +trading tokens between two different blockchain networks. This involved creating +an exchange order book for a specific token pair and establishing a fixed +exchange rate between the two. + +Once the exchange was set up, users could send sell orders on the Mars chain and +buy orders on the Venus chain. This allowed them to offer their tokens for sale +or purchase tokens from the exchange. In addition, users could also cancel their +orders if needed. \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.26/02-guide/07-interchange/04-creating-order-books.md b/docs/versioned_docs/version-v0.26/02-guide/07-interchange/04-creating-order-books.md new file mode 100644 index 0000000..1c0de3f --- /dev/null +++ b/docs/versioned_docs/version-v0.26/02-guide/07-interchange/04-creating-order-books.md @@ -0,0 +1,480 @@ +--- +sidebar_position: 4 +description: Implement logic to create order books. +--- + +# Implement the Order Books + +In this chapter, you implement the logic to create order books. + +In the Cosmos SDK, the state is stored in a key-value store. Each order book is stored under a unique key that is +composed of four values: + +- Port ID +- Channel ID +- Source denom +- Target denom + +For example, an order book for marscoin and venuscoin could be stored under `dex-channel-4-marscoin-venuscoin`. + +First, define a function that returns an order book store key: + +```go +// x/dex/types/keys.go +package types + +import "fmt" + +// ... +func OrderBookIndex(portID string, channelID string, sourceDenom string, targetDenom string) string { + return fmt.Sprintf("%s-%s-%s-%s", portID, channelID, sourceDenom, targetDenom) +} +``` + +The `send-create-pair` command is used to create order books. This command: + +- Creates and broadcasts a transaction with a message of type `SendCreatePair`. +- The message gets routed to the `dex` module. +- Finally, a `SendCreatePair` keeper method is called. + +You need the `send-create-pair` command to do the following: + +- When processing `SendCreatePair` message on the source chain: + - Check that an order book with the given pair of denoms does not yet exist. + - Transmit an IBC packet with information about port, channel, source denoms, and target denoms. +- After the packet is received on the target chain: + - Check that an order book with the given pair of denoms does not yet exist on the target chain. + - Create a new order book for buy orders. + - Transmit an IBC acknowledgement back to the source chain. +- After the acknowledgement is received on the source chain: + - Create a new order book for sell orders. + +## Message Handling in SendCreatePair + +The `SendCreatePair` function was created during the IBC packet scaffolding. The function creates an IBC packet, +populates it with source and target denoms, and transmits this packet over IBC. + +Now, add the logic to check for an existing order book for a particular pair of denoms: + +```go +// x/dex/keeper/msg_server_create_pair.go + +package keeper + +import ( + "errors" + // ... +) + +func (k msgServer) SendCreatePair(goCtx context.Context, msg *types.MsgSendCreatePair) (*types.MsgSendCreatePairResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + + // Get an order book index + pairIndex := types.OrderBookIndex(msg.Port, msg.ChannelID, msg.SourceDenom, msg.TargetDenom) + + // If an order book is found, return an error + _, found := k.GetSellOrderBook(ctx, pairIndex) + if found { + return &types.MsgSendCreatePairResponse{}, errors.New("the pair already exist") + } + + // Construct the packet + var packet types.CreatePairPacketData + + packet.SourceDenom = msg.SourceDenom + packet.TargetDenom = msg.TargetDenom + + // Transmit the packet + err := k.TransmitCreatePairPacket( + ctx, + packet, + msg.Port, + msg.ChannelID, + clienttypes.ZeroHeight(), + msg.TimeoutTimestamp, + ) + if err != nil { + return nil, err + } + + return &types.MsgSendCreatePairResponse{}, nil +} +``` + +## Lifecycle of an IBC Packet + +During a successful transmission, an IBC packet goes through these stages: + +1. Message processing before packet transmission on the source chain +2. Reception of a packet on the target chain +3. Acknowledgment of a packet on the source chain +4. Timeout of a packet on the source chain + +In the following section, implement the packet reception logic in the `OnRecvCreatePairPacket` function and the packet +acknowledgement logic in the `OnAcknowledgementCreatePairPacket` function. + +Leave the Timeout function empty. + +## Receive an IBC packet + +The protocol buffer definition defines the data that an order book contains. + +Add the `OrderBook` and `Order` messages to the `order.proto` file. + +First, add the proto buffer files to build the Go code files. You can modify these files for the purpose of your app. + +Create a new `order.proto` file in the `proto/interchange/dex` directory and add the content: + +```protobuf +// proto/interchange/dex/order.proto + +syntax = "proto3"; + +package interchange.dex; + +option go_package = "interchange/x/dex/types"; + +message OrderBook { + int32 idCount = 1; + repeated Order orders = 2; +} + +message Order { + int32 id = 1; + string creator = 2; + int32 amount = 3; + int32 price = 4; +} +``` + +Modify the `buy_order_book.proto` file to have the fields for creating a buy order on the order book. +Don't forget to add the import as well. + +**Tip:** Don't forget to add the import as well. + +```protobuf +// proto/interchange/dex/buy_order_book.proto + +// ... + +import "interchange/dex/order.proto"; + +message BuyOrderBook { + // ... + OrderBook book = 4; +} +``` + +Modify the `sell_order_book.proto` file to add the order book into the buy order book. + +The proto definition for the `SellOrderBook` looks like: + +```protobuf +// proto/interchange/dex/sell_order_book.proto + +// ... +import "interchange/dex/order.proto"; + +message SellOrderBook { + // ... + OrderBook book = 4; +} +``` + +Now, use Ignite CLI to build the proto files for the `send-create-pair` command: + +```bash +ignite generate proto-go --yes +``` + +Start enhancing the functions for the IBC packets. + +Create a new file `x/dex/types/order_book.go`. + +Add the new order book function to the corresponding Go file: + +```go +// x/dex/types/order_book.go + +package types + +func NewOrderBook() OrderBook { + return OrderBook{ + IdCount: 0, + } +} +``` + +To create a new buy order book type, define `NewBuyOrderBook` in a new file `x/dex/types/buy_order_book.go` : + +```go +// x/dex/types/buy_order_book.go + +package types + +func NewBuyOrderBook(AmountDenom string, PriceDenom string) BuyOrderBook { + book := NewOrderBook() + return BuyOrderBook{ + AmountDenom: AmountDenom, + PriceDenom: PriceDenom, + Book: &book, + } +} +``` + +When an IBC packet is received on the target chain, the module must check whether a book already exists. If not, then +create a buy order book for the specified denoms. + +```go +// x/dex/keeper/create_pair.go + +package keeper + +// ... + +func (k Keeper) OnRecvCreatePairPacket(ctx sdk.Context, packet channeltypes.Packet, data types.CreatePairPacketData) (packetAck types.CreatePairPacketAck, err error) { + // ... + + // Get an order book index + pairIndex := types.OrderBookIndex(packet.SourcePort, packet.SourceChannel, data.SourceDenom, data.TargetDenom) + + // If an order book is found, return an error + _, found := k.GetBuyOrderBook(ctx, pairIndex) + if found { + return packetAck, errors.New("the pair already exist") + } + + // Create a new buy order book for source and target denoms + book := types.NewBuyOrderBook(data.SourceDenom, data.TargetDenom) + + // Assign order book index + book.Index = pairIndex + + // Save the order book to the store + k.SetBuyOrderBook(ctx, book) + return packetAck, nil +} +``` + +## Receive an IBC Acknowledgement + +When an IBC acknowledgement is received on the source chain, the module must check whether a book already exists. If +not, +create a sell order book for the specified denoms. + +Create a new file `x/dex/types/sell_order_book.go`. +Insert the `NewSellOrderBook` function which creates a new sell order book. + +```go +// x/dex/types/sell_order_book.go + +package types + +func NewSellOrderBook(AmountDenom string, PriceDenom string) SellOrderBook { + book := NewOrderBook() + return SellOrderBook{ + AmountDenom: AmountDenom, + PriceDenom: PriceDenom, + Book: &book, + } +} +``` + +Modify the Acknowledgement function in the `x/dex/keeper/create_pair.go` file: + +```go +// x/dex/keeper/create_pair.go + +package keeper + +// ... + +func (k Keeper) OnAcknowledgementCreatePairPacket(ctx sdk.Context, packet channeltypes.Packet, data types.CreatePairPacketData, ack channeltypes.Acknowledgement) error { + switch dispatchedAck := ack.Response.(type) { + case *channeltypes.Acknowledgement_Error: + return nil + case *channeltypes.Acknowledgement_Result: + // Decode the packet acknowledgment + var packetAck types.CreatePairPacketAck + if err := types.ModuleCdc.UnmarshalJSON(dispatchedAck.Result, &packetAck); err != nil { + // The counter-party module doesn't implement the correct acknowledgment format + return errors.New("cannot unmarshal acknowledgment") + } + + // Set the sell order book + pairIndex := types.OrderBookIndex(packet.SourcePort, packet.SourceChannel, data.SourceDenom, data.TargetDenom) + book := types.NewSellOrderBook(data.SourceDenom, data.TargetDenom) + book.Index = pairIndex + k.SetSellOrderBook(ctx, book) + + return nil + default: + // The counter-party module doesn't implement the correct acknowledgment format + return errors.New("invalid acknowledgment format") + } +} +``` + +In this section, you implemented the logic behind the new `send-create-pair` command: + +- When an IBC packet is received on the target chain, `send-create-pair` command creates a buy order book. +- When an IBC acknowledgement is received on the source chain, the `send-create-pair` command creates a sell order book. + +### Implement the appendOrder Function to Add Orders to the Order Book + +```go +// x/dex/types/order_book.go + +package types + +import ( + "errors" + "sort" +) + +func NewOrderBook() OrderBook { + return OrderBook{ + IdCount: 0, + } +} + +const ( + MaxAmount = int32(100000) + MaxPrice = int32(100000) +) + +type Ordering int + +const ( + Increasing Ordering = iota + Decreasing +) + +var ( + ErrMaxAmount = errors.New("max amount reached") + ErrMaxPrice = errors.New("max price reached") + ErrZeroAmount = errors.New("amount is zero") + ErrZeroPrice = errors.New("price is zero") + ErrOrderNotFound = errors.New("order not found") +) +``` + +The `AppendOrder` function initializes and appends a new order to an order book from the order information: + +```go +// x/dex/types/order_book.go + +func (book *OrderBook) appendOrder(creator string, amount int32, price int32, ordering Ordering) (int32, error) { + if err := checkAmountAndPrice(amount, price); err != nil { + return 0, err + } + + // Initialize the order + var order Order + order.Id = book.GetNextOrderID() + order.Creator = creator + order.Amount = amount + order.Price = price + + // Increment ID tracker + book.IncrementNextOrderID() + + // Insert the order + book.insertOrder(order, ordering) + return order.Id, nil +} +``` + +#### Implement the checkAmountAndPrice Function For an Order + +The `checkAmountAndPrice` function checks for the correct amount or price: + +```go +// x/dex/types/order_book.go + +func checkAmountAndPrice(amount int32, price int32) error { + if amount == int32(0) { + return ErrZeroAmount + } + if amount > MaxAmount { + return ErrMaxAmount + } + + if price == int32(0) { + return ErrZeroPrice + } + if price > MaxPrice { + return ErrMaxPrice + } + + return nil +} +``` + +#### Implement the GetNextOrderID Function + +The `GetNextOrderID` function gets the ID of the next order to append: + +```go +// x/dex/types/order_book.go + +func (book OrderBook) GetNextOrderID() int32 { + return book.IdCount +} +``` + +#### Implement the IncrementNextOrderID Function + +The `IncrementNextOrderID` function updates the ID count for orders: + +```go +// x/dex/types/order_book.go + +func (book *OrderBook) IncrementNextOrderID() { + // Even numbers to have different ID than buy orders + book.IdCount++ +} +``` + +#### Implement the insertOrder Function + +The `insertOrder` function inserts the order in the book with the provided order: + +```go +// x/dex/types/order_book.go + +func (book *OrderBook) insertOrder(order Order, ordering Ordering) { + if len(book.Orders) > 0 { + var i int + + // get the index of the new order depending on the provided ordering + if ordering == Increasing { + i = sort.Search(len(book.Orders), func(i int) bool { return book.Orders[i].Price > order.Price }) + } else { + i = sort.Search(len(book.Orders), func(i int) bool { return book.Orders[i].Price < order.Price }) + } + + // insert order + orders := append(book.Orders, &order) + copy(orders[i+1:], orders[i:]) + orders[i] = &order + book.Orders = orders + } else { + book.Orders = append(book.Orders, &order) + } +} +``` + +This completes the order book setup. + +Now is a good time to save the state of your implementation. +Because your project is in a local repository, you can use git. Saving your current state lets you jump back and forth +in case you introduce errors or need a break. + +```bash +git add . +git commit -m "Create Order Books" +``` + +In the next chapter, you learn how to deal with vouchers by minting and burning vouchers and locking and unlocking +native blockchain token in your app. diff --git a/docs/versioned_docs/version-v0.26/02-guide/07-interchange/05-mint-and-burn-voucher.md b/docs/versioned_docs/version-v0.26/02-guide/07-interchange/05-mint-and-burn-voucher.md new file mode 100644 index 0000000..5b29c3c --- /dev/null +++ b/docs/versioned_docs/version-v0.26/02-guide/07-interchange/05-mint-and-burn-voucher.md @@ -0,0 +1,351 @@ +--- +order: 5 +description: Mint vouchers and lock and unlock native token from a blockchain. +--- + +# Mint and Burn Vouchers + +In this chapter, you learn about vouchers. The `dex` module implementation mints vouchers and locks and unlocks native +token from a blockchain. + +There is a lot to learn from this `dex` module implementation: + +- You work with the `bank` keeper and use several methods it offers. +- You interact with another module and use the module account to lock tokens. + +This implementation can teach you how to use various interactions with module accounts or minting, locking or burning +tokens. + +## Create the SafeBurn Function to Burn Vouchers or Lock Tokens + +The `SafeBurn` function burns tokens if they are IBC vouchers (have an `ibc/` prefix) and locks tokens if they are +native to the chain. + +Create a new `x/dex/keeper/mint.go` file: + +```go +// x/dex/keeper/mint.go + +package keeper + +import ( + "fmt" + "strings" + + sdkmath "cosmossdk.io/math" + sdk "github.com/cosmos/cosmos-sdk/types" + ibctransfertypes "github.com/cosmos/ibc-go/v6/modules/apps/transfer/types" + + "interchange/x/dex/types" +) + +// isIBCToken checks if the token came from the IBC module +// Each IBC token starts with an ibc/ denom, the check is rather simple +func isIBCToken(denom string) bool { + return strings.HasPrefix(denom, "ibc/") +} + +func (k Keeper) SafeBurn(ctx sdk.Context, port string, channel string, sender sdk.AccAddress, denom string, amount int32) error { + if isIBCToken(denom) { + // Burn the tokens + if err := k.BurnTokens(ctx, sender, sdk.NewCoin(denom, sdkmath.NewInt(int64(amount)))); err != nil { + return err + } + } else { + // Lock the tokens + if err := k.LockTokens(ctx, port, channel, sender, sdk.NewCoin(denom, sdkmath.NewInt(int64(amount)))); err != nil { + return err + } + } + + return nil +} +``` + +If the token comes from another blockchain as an IBC token, the burning method actually burns those IBC tokens on one +chain and unlocks them on the other chain. The native token are locked away. + +Now, implement the `BurnTokens` keeper method as used in the previous function. The `bankKeeper` has a useful function +for this: + +```go +// x/dex/keeper/mint.go + +package keeper + +// ... + +func (k Keeper) BurnTokens(ctx sdk.Context, sender sdk.AccAddress, tokens sdk.Coin) error { + // transfer the coins to the module account and burn them + if err := k.bankKeeper.SendCoinsFromAccountToModule(ctx, sender, types.ModuleName, sdk.NewCoins(tokens)); err != nil { + return err + } + + if err := k.bankKeeper.BurnCoins( + ctx, types.ModuleName, sdk.NewCoins(tokens), + ); err != nil { + // NOTE: should not happen as the module account was + // retrieved on the step above and it has enough balance + // to burn. + panic(fmt.Sprintf("cannot burn coins after a successful send to a module account: %v", err)) + } + + return nil +} +``` + +Implement the `LockTokens` keeper method. + +To lock token from a native chain, you can send the native token to the Escrow Address: + +```go +// x/dex/keeper/mint.go + +package keeper + +// ... + +func (k Keeper) LockTokens(ctx sdk.Context, sourcePort string, sourceChannel string, sender sdk.AccAddress, tokens sdk.Coin) error { + // create the escrow address for the tokens + escrowAddress := ibctransfertypes.GetEscrowAddress(sourcePort, sourceChannel) + + // escrow source tokens. It fails if balance insufficient + if err := k.bankKeeper.SendCoins( + ctx, sender, escrowAddress, sdk.NewCoins(tokens), + ); err != nil { + return err + } + + return nil +} +``` + +`BurnTokens` and `LockTokens` use `SendCoinsFromAccountToModule`, `BurnCoins`, and `SendCoins` keeper methods of the +`bank` module. + +To start using these function from the `dex` module, first add them to the `BankKeeper` interface in the +`x/dex/types/expected_keepers.go` file. + +```go +// x/dex/types/expected_keepers.go + +package types + +import sdk "github.com/cosmos/cosmos-sdk/types" + +// BankKeeper defines the expected bank keeper +type BankKeeper interface { + //... + SendCoinsFromAccountToModule(ctx sdk.Context, senderAddr sdk.AccAddress, recipientModule string, amt sdk.Coins) error + BurnCoins(ctx sdk.Context, moduleName string, amt sdk.Coins) error + SendCoins(ctx sdk.Context, fromAddr sdk.AccAddress, toAddr sdk.AccAddress, amt sdk.Coins) error +} +``` + +## SaveVoucherDenom + +The `SaveVoucherDenom` function saves the voucher denom to be able to convert it back later. + +Create a new `x/dex/keeper/denom.go` file: + +```go +// x/dex/keeper/denom.go + +package keeper + +import ( + sdk "github.com/cosmos/cosmos-sdk/types" + ibctransfertypes "github.com/cosmos/ibc-go/v6/modules/apps/transfer/types" + + "interchange/x/dex/types" +) + +func (k Keeper) SaveVoucherDenom(ctx sdk.Context, port string, channel string, denom string) { + voucher := VoucherDenom(port, channel, denom) + + // Store the origin denom + _, saved := k.GetDenomTrace(ctx, voucher) + if !saved { + k.SetDenomTrace(ctx, types.DenomTrace{ + Index: voucher, + Port: port, + Channel: channel, + Origin: denom, + }) + } +} +``` + +Finally, the last function to implement is the `VoucherDenom` function that returns the voucher of the denom from the +port ID and channel ID: + +```go +// x/dex/keeper/denom.go + +package keeper + +// ... + +func VoucherDenom(port string, channel string, denom string) string { + // since SendPacket did not prefix the denomination, we must prefix denomination here + sourcePrefix := ibctransfertypes.GetDenomPrefix(port, channel) + + // NOTE: sourcePrefix contains the trailing "/" + prefixedDenom := sourcePrefix + denom + + // construct the denomination trace from the full raw denomination + denomTrace := ibctransfertypes.ParseDenomTrace(prefixedDenom) + voucher := denomTrace.IBCDenom() + return voucher[:16] +} +``` + +### Implement an OriginalDenom Function + +The `OriginalDenom` function returns back the original denom of the voucher. + +False is returned if the port ID and channel ID provided are not the origins of the voucher: + +```go +// x/dex/keeper/denom.go + +package keeper + +// ... + +func (k Keeper) OriginalDenom(ctx sdk.Context, port string, channel string, voucher string) (string, bool) { + trace, exist := k.GetDenomTrace(ctx, voucher) + if exist { + // Check if original port and channel + if trace.Port == port && trace.Channel == channel { + return trace.Origin, true + } + } + + // Not the original chain + return "", false +} +``` + +### Implement a SafeMint Function + +If a token is an IBC token (has an `ibc/` prefix), the `SafeMint` function mints IBC token with `MintTokens`. +Otherwise, it unlocks native token with `UnlockTokens`. + +Go back to the `x/dex/keeper/mint.go` file and add the following code: + +```go +// x/dex/keeper/mint.go + +package keeper + +// ... + +func (k Keeper) SafeMint(ctx sdk.Context, port string, channel string, receiver sdk.AccAddress, denom string, amount int32) error { + if isIBCToken(denom) { + // Mint IBC tokens + if err := k.MintTokens(ctx, receiver, sdk.NewCoin(denom, sdkmath.NewInt(int64(amount)))); err != nil { + return err + } + } else { + // Unlock native tokens + if err := k.UnlockTokens( + ctx, + port, + channel, + receiver, + sdk.NewCoin(denom, sdkmath.NewInt(int64(amount))), + ); err != nil { + return err + } + } + + return nil +} +``` + +#### Implement a `MintTokens` Function + +You can use the `bankKeeper` function again to MintCoins. These token will then be sent to the receiver account: + +```go +// x/dex/keeper/mint.go + +package keeper + +// ... + +func (k Keeper) MintTokens(ctx sdk.Context, receiver sdk.AccAddress, tokens sdk.Coin) error { + // mint new tokens if the source of the transfer is the same chain + if err := k.bankKeeper.MintCoins( + ctx, types.ModuleName, sdk.NewCoins(tokens), + ); err != nil { + return err + } + + // send to receiver + if err := k.bankKeeper.SendCoinsFromModuleToAccount( + ctx, types.ModuleName, receiver, sdk.NewCoins(tokens), + ); err != nil { + panic(fmt.Sprintf("unable to send coins from module to account despite previously minting coins to module account: %v", err)) + } + + return nil +} +``` + +Finally, add the function to unlock token after they are sent back to the native blockchain: + +```go +// x/dex/keeper/mint.go + +package keeper + +// ... + +func (k Keeper) UnlockTokens(ctx sdk.Context, sourcePort string, sourceChannel string, receiver sdk.AccAddress, tokens sdk.Coin) error { + // create the escrow address for the tokens + escrowAddress := ibctransfertypes.GetEscrowAddress(sourcePort, sourceChannel) + + // escrow source tokens. It fails if balance insufficient + if err := k.bankKeeper.SendCoins( + ctx, escrowAddress, receiver, sdk.NewCoins(tokens), + ); err != nil { + return err + } + + return nil +} +``` + +The `MintTokens` function uses two keeper methods from the `bank` module: `MintCoins` and `SendCoinsFromModuleToAccount` +. +To import these methods, add their signatures to the `BankKeeper` interface in the `x/dex/types/expected_keepers.go` +file: + +```go +// x/dex/types/expected_keepers.go + +package types + +// ... + +type BankKeeper interface { + // ... + MintCoins(ctx sdk.Context, moduleName string, amt sdk.Coins) error + SendCoinsFromModuleToAccount(ctx sdk.Context, senderModule string, recipientAddr sdk.AccAddress, amt sdk.Coins) error +} +``` + +## Summary + +You finished the mint and burn voucher logic. + +It is a good time to make another git commit to save the state of your work: + +```bash +git add . +git commit -m "Add Mint and Burn Voucher" +``` + +In the next chapter, you look into creating sell orders. diff --git a/docs/versioned_docs/version-v0.26/02-guide/07-interchange/06-creating-sell-orders.md b/docs/versioned_docs/version-v0.26/02-guide/07-interchange/06-creating-sell-orders.md new file mode 100644 index 0000000..46f527d --- /dev/null +++ b/docs/versioned_docs/version-v0.26/02-guide/07-interchange/06-creating-sell-orders.md @@ -0,0 +1,402 @@ +--- +sidebar_position: 6 +description: Implement logic to create sell orders. +--- + +# Create Sell Orders + +In this chapter, you implement the logic for creating sell orders. + +The packet proto file for a sell order is already generated. Add the seller information: + +```protobuf +// proto/dex/packet.proto + +message SellOrderPacketData { + // ... + string seller = 5; +} +``` + +Now, use Ignite CLI to build the proto files for the `send-sell-order` command. You used this command in a previous +chapter. + +```bash +ignite generate proto-go --yes +``` + +## Message Handling in SendSellOrder + +Sell orders are created using the `send-sell-order` command. This command creates a transaction with a `SendSellOrder` +message that triggers the `SendSellOrder` keeper method. + +The `SendSellOrder` command: + +* Checks that an order book for a specified denom pair exists. +* Safely burns or locks token. + * If the token is an IBC token, burn the token. + * If the token is a native token, lock the token. +* Saves the voucher that is received on the target chain to later resolve a denom. +* Transmits an IBC packet to the target chain. + +```go +// x/dex/keeper/msg_server_sell_order.go + +package keeper + +import ( + "context" + "errors" + + sdk "github.com/cosmos/cosmos-sdk/types" + clienttypes "github.com/cosmos/ibc-go/v6/modules/core/02-client/types" + + "interchange/x/dex/types" +) + +func (k msgServer) SendSellOrder(goCtx context.Context, msg *types.MsgSendSellOrder) (*types.MsgSendSellOrderResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + + // If an order book doesn't exist, throw an error + pairIndex := types.OrderBookIndex(msg.Port, msg.ChannelID, msg.AmountDenom, msg.PriceDenom) + _, found := k.GetSellOrderBook(ctx, pairIndex) + if !found { + return &types.MsgSendSellOrderResponse{}, errors.New("the pair doesn't exist") + } + + // Get sender's address + sender, err := sdk.AccAddressFromBech32(msg.Creator) + if err != nil { + return &types.MsgSendSellOrderResponse{}, err + } + + // Use SafeBurn to ensure no new native tokens are minted + if err := k.SafeBurn(ctx, msg.Port, msg.ChannelID, sender, msg.AmountDenom, msg.Amount); err != nil { + return &types.MsgSendSellOrderResponse{}, err + } + + // Save the voucher received on the other chain, to have the ability to resolve it into the original denom + k.SaveVoucherDenom(ctx, msg.Port, msg.ChannelID, msg.AmountDenom) + + var packet types.SellOrderPacketData + packet.Seller = msg.Creator + packet.AmountDenom = msg.AmountDenom + packet.Amount = msg.Amount + packet.PriceDenom = msg.PriceDenom + packet.Price = msg.Price + + // Transmit the packet + err = k.TransmitSellOrderPacket(ctx, packet, msg.Port, msg.ChannelID, clienttypes.ZeroHeight(), msg.TimeoutTimestamp) + if err != nil { + return nil, err + } + + return &types.MsgSendSellOrderResponse{}, nil +} +``` + +## On Receiving a Sell Order + +When a "sell order" packet is received on the target chain, you want the module to: + +* Update the sell order book +* Distribute sold token to the buyer +* Send the sell order to chain A after the fill attempt + +```go +// x/dex/keeper/sell_order.go + +package keeper + +// ... + +func (k Keeper) OnRecvSellOrderPacket(ctx sdk.Context, packet channeltypes.Packet, data types.SellOrderPacketData) (packetAck types.SellOrderPacketAck, err error) { + if err := data.ValidateBasic(); err != nil { + return packetAck, err + } + + pairIndex := types.OrderBookIndex(packet.SourcePort, packet.SourceChannel, data.AmountDenom, data.PriceDenom) + book, found := k.GetBuyOrderBook(ctx, pairIndex) + if !found { + return packetAck, errors.New("the pair doesn't exist") + } + + // Fill sell order + remaining, liquidated, gain, _ := book.FillSellOrder(types.Order{ + Amount: data.Amount, + Price: data.Price, + }) + + // Return remaining amount and gains + packetAck.RemainingAmount = remaining.Amount + packetAck.Gain = gain + + // Before distributing sales, we resolve the denom + // First we check if the denom received comes from this chain originally + finalAmountDenom, saved := k.OriginalDenom(ctx, packet.DestinationPort, packet.DestinationChannel, data.AmountDenom) + if !saved { + // If it was not from this chain we use voucher as denom + finalAmountDenom = VoucherDenom(packet.SourcePort, packet.SourceChannel, data.AmountDenom) + } + + // Dispatch liquidated buy orders + for _, liquidation := range liquidated { + liquidation := liquidation + addr, err := sdk.AccAddressFromBech32(liquidation.Creator) + if err != nil { + return packetAck, err + } + + if err := k.SafeMint(ctx, packet.DestinationPort, packet.DestinationChannel, addr, finalAmountDenom, liquidation.Amount); err != nil { + return packetAck, err + } + } + + // Save the new order book + k.SetBuyOrderBook(ctx, book) + + return packetAck, nil +} +``` + +### Implement the FillSellOrder Function + +The `FillSellOrder` function tries to fill the buy order with the order book and returns all the side effects: + +```go +// x/dex/types/buy_order_book.go + +package types + +// ... + +func (b *BuyOrderBook) FillSellOrder(order Order) ( + remainingSellOrder Order, + liquidated []Order, + gain int32, + filled bool, +) { + var liquidatedList []Order + totalGain := int32(0) + remainingSellOrder = order + + // Liquidate as long as there is match + for { + var match bool + var liquidation Order + remainingSellOrder, liquidation, gain, match, filled = b.LiquidateFromSellOrder( + remainingSellOrder, + ) + if !match { + break + } + + // Update gains + totalGain += gain + + // Update liquidated + liquidatedList = append(liquidatedList, liquidation) + + if filled { + break + } + } + + return remainingSellOrder, liquidatedList, totalGain, filled +} +``` + +### Implement The LiquidateFromSellOrder Function + +The `LiquidateFromSellOrder` function liquidates the first sell order of the book from the buy order. If no match is +found, return false for match: + +```go +// x/dex/types/buy_order_book.go + +package types + +// ... + +func (b *BuyOrderBook) LiquidateFromSellOrder(order Order) ( + remainingSellOrder Order, + liquidatedBuyOrder Order, + gain int32, + match bool, + filled bool, +) { + remainingSellOrder = order + + // No match if no order + orderCount := len(b.Book.Orders) + if orderCount == 0 { + return order, liquidatedBuyOrder, gain, false, false + } + + // Check if match + highestBid := b.Book.Orders[orderCount-1] + if order.Price > highestBid.Price { + return order, liquidatedBuyOrder, gain, false, false + } + + liquidatedBuyOrder = *highestBid + + // Check if sell order can be entirely filled + if highestBid.Amount >= order.Amount { + remainingSellOrder.Amount = 0 + liquidatedBuyOrder.Amount = order.Amount + gain = order.Amount * highestBid.Price + + // Remove the highest bid if it has been entirely liquidated + highestBid.Amount -= order.Amount + if highestBid.Amount == 0 { + b.Book.Orders = b.Book.Orders[:orderCount-1] + } else { + b.Book.Orders[orderCount-1] = highestBid + } + + return remainingSellOrder, liquidatedBuyOrder, gain, true, true + } + + // Not entirely filled + gain = highestBid.Amount * highestBid.Price + b.Book.Orders = b.Book.Orders[:orderCount-1] + remainingSellOrder.Amount -= highestBid.Amount + + return remainingSellOrder, liquidatedBuyOrder, gain, true, false +} +``` + +### Implement the OnAcknowledgement Function for Sell Order Packets + +After an IBC packet is processed on the target chain, an acknowledgement is returned to the source chain and processed +by the `OnAcknowledgementSellOrderPacket` function. + +The dex module on the source chain: + +* Stores the remaining sell order in the sell order book. +* Distributes sold tokens to the buyers. +* Distributes the price of the amount sold to the seller. +* On error, mints the burned tokens. + +```go +// x/dex/keeper/sell_order.go + +package keeper + +// ... + +func (k Keeper) OnAcknowledgementSellOrderPacket(ctx sdk.Context, packet channeltypes.Packet, data types.SellOrderPacketData, ack channeltypes.Acknowledgement) error { + switch dispatchedAck := ack.Response.(type) { + case *channeltypes.Acknowledgement_Error: + // In case of error we mint back the native token + receiver, err := sdk.AccAddressFromBech32(data.Seller) + if err != nil { + return err + } + + if err := k.SafeMint(ctx, packet.SourcePort, packet.SourceChannel, receiver, data.AmountDenom, data.Amount); err != nil { + return err + } + + return nil + case *channeltypes.Acknowledgement_Result: + // Decode the packet acknowledgment + var packetAck types.SellOrderPacketAck + if err := types.ModuleCdc.UnmarshalJSON(dispatchedAck.Result, &packetAck); err != nil { + // The counter-party module doesn't implement the correct acknowledgment format + return errors.New("cannot unmarshal acknowledgment") + } + + // Get the sell order book + pairIndex := types.OrderBookIndex(packet.SourcePort, packet.SourceChannel, data.AmountDenom, data.PriceDenom) + book, found := k.GetSellOrderBook(ctx, pairIndex) + if !found { + panic("sell order book must exist") + } + + // Append the remaining amount of the order + if packetAck.RemainingAmount > 0 { + _, err := book.AppendOrder(data.Seller, packetAck.RemainingAmount, data.Price) + if err != nil { + return err + } + + // Save the new order book + k.SetSellOrderBook(ctx, book) + } + + // Mint the gains + if packetAck.Gain > 0 { + receiver, err := sdk.AccAddressFromBech32(data.Seller) + if err != nil { + return err + } + + finalPriceDenom, saved := k.OriginalDenom(ctx, packet.SourcePort, packet.SourceChannel, data.PriceDenom) + if !saved { + // If it was not from this chain we use voucher as denom + finalPriceDenom = VoucherDenom(packet.DestinationPort, packet.DestinationChannel, data.PriceDenom) + } + + if err := k.SafeMint(ctx, packet.SourcePort, packet.SourceChannel, receiver, finalPriceDenom, packetAck.Gain); err != nil { + return err + } + } + + return nil + default: + // The counter-party module doesn't implement the correct acknowledgment format + return errors.New("invalid acknowledgment format") + } +} +``` + +```go +// x/dex/types/sell_order_book.go + +package types + +// ... + +func (s *SellOrderBook) AppendOrder(creator string, amount int32, price int32) (int32, error) { + return s.Book.appendOrder(creator, amount, price, Decreasing) +} +``` + +### Add the OnTimeout of a Sell Order Packet Function + +If a timeout occurs, mint back the native token: + +```go +// x/dex/keeper/sell_order.go + +package keeper + +// ... + +func (k Keeper) OnTimeoutSellOrderPacket(ctx sdk.Context, packet channeltypes.Packet, data types.SellOrderPacketData) error { + // In case of error we mint back the native token + receiver, err := sdk.AccAddressFromBech32(data.Seller) + if err != nil { + return err + } + + if err := k.SafeMint(ctx, packet.SourcePort, packet.SourceChannel, receiver, data.AmountDenom, data.Amount); err != nil { + return err + } + + return nil +} +``` + +## Summary + +Great, you have completed the sell order logic. + +It is a good time to make another git commit again to save the state of your work: + +```bash +git add . +git commit -m "Add Sell Orders" +``` diff --git a/docs/versioned_docs/version-v0.26/02-guide/07-interchange/07-creating-buy-orders.md b/docs/versioned_docs/version-v0.26/02-guide/07-interchange/07-creating-buy-orders.md new file mode 100644 index 0000000..e1ed165 --- /dev/null +++ b/docs/versioned_docs/version-v0.26/02-guide/07-interchange/07-creating-buy-orders.md @@ -0,0 +1,440 @@ +--- +sidebar_position: 7 +description: Implement the buy order logic. +--- + +# Creating Buy Orders + +In this chapter, you implement the creation of buy orders. The logic is very similar to the sell order logic you +implemented in the previous chapter. + +## Modify the Proto Definition + +Add the buyer to the proto file definition: + +```protobuf +// proto/interchange/dex/packet.proto + +message BuyOrderPacketData { + // ... + string buyer = 5; +} +``` + +Now, use Ignite CLI to build the proto files for the `send-buy-order` command. You used this command in previous +chapters. + +```bash +ignite generate proto-go --yes +``` + +## IBC Message Handling in SendBuyOrder + +* Check if the pair exists on the order book +* If the token is an IBC token, burn the tokens +* If the token is a native token, lock the tokens +* Save the voucher received on the target chain to later resolve a denom + +```go +// x/dex/keeper/msg_server_buy_order.go + +package keeper + +import ( + "context" + "errors" + + sdk "github.com/cosmos/cosmos-sdk/types" + + "interchange/x/dex/types" +) + +func (k msgServer) SendBuyOrder(goCtx context.Context, msg *types.MsgSendBuyOrder) (*types.MsgSendBuyOrderResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + + // Cannot send a order if the pair doesn't exist + pairIndex := types.OrderBookIndex(msg.Port, msg.ChannelID, msg.AmountDenom, msg.PriceDenom) + _, found := k.GetBuyOrderBook(ctx, pairIndex) + if !found { + return &types.MsgSendBuyOrderResponse{}, errors.New("the pair doesn't exist") + } + + // Lock the token to send + sender, err := sdk.AccAddressFromBech32(msg.Creator) + if err != nil { + return &types.MsgSendBuyOrderResponse{}, err + } + + // Use SafeBurn to ensure no new native tokens are minted + if err := k.SafeBurn(ctx, msg.Port, msg.ChannelID, sender, msg.PriceDenom, msg.Amount*msg.Price); err != nil { + return &types.MsgSendBuyOrderResponse{}, err + } + + // Save the voucher received on the other chain, to have the ability to resolve it into the original denom + k.SaveVoucherDenom(ctx, msg.Port, msg.ChannelID, msg.PriceDenom) + + // Construct the packet + var packet types.BuyOrderPacketData + + packet.Buyer = msg.Creator + packet.AmountDenom = msg.AmountDenom + packet.Amount = msg.Amount + packet.PriceDenom = msg.PriceDenom + packet.Price = msg.Price + + // Transmit the packet + err = k.TransmitBuyOrderPacket( + ctx, + packet, + msg.Port, + msg.ChannelID, + clienttypes.ZeroHeight(), + msg.TimeoutTimestamp, + ) + if err != nil { + return nil, err + } + + // Transmit an IBC packet... + return &types.MsgSendBuyOrderResponse{}, nil +} +``` + +## On Receiving a Buy Order + +* Update the buy order book +* Distribute sold token to the buyer +* Send the sell order to chain A after the fill attempt + +```go +// x/dex/keeper/buy_order.go + +package keeper + +// ... + +func (k Keeper) OnRecvBuyOrderPacket(ctx sdk.Context, packet channeltypes.Packet, data types.BuyOrderPacketData) (packetAck types.BuyOrderPacketAck, err error) { + // validate packet data upon receiving + if err := data.ValidateBasic(); err != nil { + return packetAck, err + } + + // Check if the sell order book exists + pairIndex := types.OrderBookIndex(packet.SourcePort, packet.SourceChannel, data.AmountDenom, data.PriceDenom) + book, found := k.GetSellOrderBook(ctx, pairIndex) + if !found { + return packetAck, errors.New("the pair doesn't exist") + } + + // Fill buy order + remaining, liquidated, purchase, _ := book.FillBuyOrder(types.Order{ + Amount: data.Amount, + Price: data.Price, + }) + + // Return remaining amount and gains + packetAck.RemainingAmount = remaining.Amount + packetAck.Purchase = purchase + + // Before distributing gains, we resolve the denom + // First we check if the denom received comes from this chain originally + finalPriceDenom, saved := k.OriginalDenom(ctx, packet.DestinationPort, packet.DestinationChannel, data.PriceDenom) + if !saved { + // If it was not from this chain we use voucher as denom + finalPriceDenom = VoucherDenom(packet.SourcePort, packet.SourceChannel, data.PriceDenom) + } + + // Dispatch liquidated buy order + for _, liquidation := range liquidated { + liquidation := liquidation + addr, err := sdk.AccAddressFromBech32(liquidation.Creator) + if err != nil { + return packetAck, err + } + + if err := k.SafeMint( + ctx, + packet.DestinationPort, + packet.DestinationChannel, + addr, + finalPriceDenom, + liquidation.Amount*liquidation.Price, + ); err != nil { + return packetAck, err + } + } + + // Save the new order book + k.SetSellOrderBook(ctx, book) + + return packetAck, nil +} +``` + +### Implement a FillBuyOrder Function + +The `FillBuyOrder` function tries to fill the sell order with the order book and returns all the side effects: + +```go +// x/dex/types/sell_order_book.go + +package types + +// ... + +func (s *SellOrderBook) FillBuyOrder(order Order) ( + remainingBuyOrder Order, + liquidated []Order, + purchase int32, + filled bool, +) { + var liquidatedList []Order + totalPurchase := int32(0) + remainingBuyOrder = order + + // Liquidate as long as there is match + for { + var match bool + var liquidation Order + remainingBuyOrder, liquidation, purchase, match, filled = s.LiquidateFromBuyOrder( + remainingBuyOrder, + ) + if !match { + break + } + + // Update gains + totalPurchase += purchase + + // Update liquidated + liquidatedList = append(liquidatedList, liquidation) + + if filled { + break + } + } + + return remainingBuyOrder, liquidatedList, totalPurchase, filled +} +``` + +### Implement a LiquidateFromBuyOrder Function + +The `LiquidateFromBuyOrder` function liquidates the first buy order of the book from the sell order. If no match is +found, return false for match: + +```go +// x/dex/types/sell_order_book.go + +package types + +// ... + +func (s *SellOrderBook) LiquidateFromBuyOrder(order Order) ( + remainingBuyOrder Order, + liquidatedSellOrder Order, + purchase int32, + match bool, + filled bool, +) { + remainingBuyOrder = order + + // No match if no order + orderCount := len(s.Book.Orders) + if orderCount == 0 { + return order, liquidatedSellOrder, purchase, false, false + } + + // Check if match + lowestAsk := s.Book.Orders[orderCount-1] + if order.Price < lowestAsk.Price { + return order, liquidatedSellOrder, purchase, false, false + } + + liquidatedSellOrder = *lowestAsk + + // Check if buy order can be entirely filled + if lowestAsk.Amount >= order.Amount { + remainingBuyOrder.Amount = 0 + liquidatedSellOrder.Amount = order.Amount + purchase = order.Amount + + // Remove lowest ask if it has been entirely liquidated + lowestAsk.Amount -= order.Amount + if lowestAsk.Amount == 0 { + s.Book.Orders = s.Book.Orders[:orderCount-1] + } else { + s.Book.Orders[orderCount-1] = lowestAsk + } + + return remainingBuyOrder, liquidatedSellOrder, purchase, true, true + } + + // Not entirely filled + purchase = lowestAsk.Amount + s.Book.Orders = s.Book.Orders[:orderCount-1] + remainingBuyOrder.Amount -= lowestAsk.Amount + + return remainingBuyOrder, liquidatedSellOrder, purchase, true, false +} +``` + +## Receiving a Buy Order Acknowledgment + +After a buy order acknowledgement is received, chain `Mars`: + +* Stores the remaining sell order in the sell order book. +* Distributes sold `marscoin` to the buyers. +* Distributes to the seller the price of the amount sold. +* On error, mints back the burned tokens. + +```go +// x/dex/keeper/buy_order.go + +package keeper + +// ... + +func (k Keeper) OnAcknowledgementBuyOrderPacket(ctx sdk.Context, packet channeltypes.Packet, data types.BuyOrderPacketData, ack channeltypes.Acknowledgement) error { + switch dispatchedAck := ack.Response.(type) { + case *channeltypes.Acknowledgement_Error: + // In case of error we mint back the native token + receiver, err := sdk.AccAddressFromBech32(data.Buyer) + if err != nil { + return err + } + + if err := k.SafeMint( + ctx, + packet.SourcePort, + packet.SourceChannel, + receiver, + data.PriceDenom, + data.Amount*data.Price, + ); err != nil { + return err + } + + return nil + case *channeltypes.Acknowledgement_Result: + // Decode the packet acknowledgment + var packetAck types.BuyOrderPacketAck + + if err := types.ModuleCdc.UnmarshalJSON(dispatchedAck.Result, &packetAck); err != nil { + // The counter-party module doesn't implement the correct acknowledgment format + return errors.New("cannot unmarshal acknowledgment") + } + + // Get the sell order book + pairIndex := types.OrderBookIndex(packet.SourcePort, packet.SourceChannel, data.AmountDenom, data.PriceDenom) + book, found := k.GetBuyOrderBook(ctx, pairIndex) + if !found { + panic("buy order book must exist") + } + + // Append the remaining amount of the order + if packetAck.RemainingAmount > 0 { + _, err := book.AppendOrder( + data.Buyer, + packetAck.RemainingAmount, + data.Price, + ) + if err != nil { + return err + } + + // Save the new order book + k.SetBuyOrderBook(ctx, book) + } + + // Mint the purchase + if packetAck.Purchase > 0 { + receiver, err := sdk.AccAddressFromBech32(data.Buyer) + if err != nil { + return err + } + + finalAmountDenom, saved := k.OriginalDenom(ctx, packet.SourcePort, packet.SourceChannel, data.AmountDenom) + if !saved { + // If it was not from this chain we use voucher as denom + finalAmountDenom = VoucherDenom(packet.DestinationPort, packet.DestinationChannel, data.AmountDenom) + } + + if err := k.SafeMint( + ctx, + packet.SourcePort, + packet.SourceChannel, + receiver, + finalAmountDenom, + packetAck.Purchase, + ); err != nil { + return err + } + } + + return nil + default: + // The counter-party module doesn't implement the correct acknowledgment format + return errors.New("invalid acknowledgment format") + } +} +``` + +`AppendOrder` appends an order in the buy order book. +Add the following function to the `x/dex/types/buy_order_book.go` file in the `types` directory. + +```go +// x/dex/types/buy_order_book.go + +package types + +// ... + +func (b *BuyOrderBook) AppendOrder(creator string, amount int32, price int32) (int32, error) { + return b.Book.appendOrder(creator, amount, price, Increasing) +} +``` + +## OnTimeout of a Buy Order Packet + +If a timeout occurs, mint back the native token: + +```go +// x/dex/keeper/buy_order.go + +package keeper + +// ... + +func (k Keeper) OnTimeoutBuyOrderPacket(ctx sdk.Context, packet channeltypes.Packet, data types.BuyOrderPacketData) error { + // In case of error we mint back the native token + receiver, err := sdk.AccAddressFromBech32(data.Buyer) + if err != nil { + return err + } + + if err := k.SafeMint( + ctx, + packet.SourcePort, + packet.SourceChannel, + receiver, + data.PriceDenom, + data.Amount*data.Price, + ); err != nil { + return err + } + + return nil +} +``` + +## Summary + +Congratulations, you implemented the buy order logic. + +Again, it's a good time to save your current state to your local GitHub repository: + +```bash +git add . +git commit -m "Add Buy Orders" +``` diff --git a/docs/versioned_docs/version-v0.26/02-guide/07-interchange/08-cancelling-orders.md b/docs/versioned_docs/version-v0.26/02-guide/07-interchange/08-cancelling-orders.md new file mode 100644 index 0000000..f6c44ee --- /dev/null +++ b/docs/versioned_docs/version-v0.26/02-guide/07-interchange/08-cancelling-orders.md @@ -0,0 +1,200 @@ +--- +sidebar_position: 8 +description: Enable cancelling of buy and sell orders. +--- + +# Cancelling Orders + +You have implemented order books, buy and sell orders. In this chapter, you enable cancelling of buy and sell orders. + +## Cancel a Sell Order + +To cancel a sell order, you have to get the ID of the specific sell order. Then you can use the function +`RemoveOrderFromID` to remove the specific order from the order book and update the keeper accordingly. + +Move to the keeper directory and edit the `x/dex/keeper/msg_server_cancel_sell_order.go` file: + +```go +// x/dex/keeper/msg_server_cancel_sell_order.go + +package keeper + +import ( + "context" + "errors" + + sdk "github.com/cosmos/cosmos-sdk/types" + + "interchange/x/dex/types" +) + +func (k msgServer) CancelSellOrder(goCtx context.Context, msg *types.MsgCancelSellOrder) (*types.MsgCancelSellOrderResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + + // Retrieve the book + pairIndex := types.OrderBookIndex(msg.Port, msg.Channel, msg.AmountDenom, msg.PriceDenom) + s, found := k.GetSellOrderBook(ctx, pairIndex) + if !found { + return &types.MsgCancelSellOrderResponse{}, errors.New("the pair doesn't exist") + } + + // Check order creator + order, err := s.Book.GetOrderFromID(msg.OrderID) + if err != nil { + return &types.MsgCancelSellOrderResponse{}, err + } + + if order.Creator != msg.Creator { + return &types.MsgCancelSellOrderResponse{}, errors.New("canceller must be creator") + } + + // Remove order + if err := s.Book.RemoveOrderFromID(msg.OrderID); err != nil { + return &types.MsgCancelSellOrderResponse{}, err + } + + k.SetSellOrderBook(ctx, s) + + // Refund seller with remaining amount + seller, err := sdk.AccAddressFromBech32(order.Creator) + if err != nil { + return &types.MsgCancelSellOrderResponse{}, err + } + + if err := k.SafeMint(ctx, msg.Port, msg.Channel, seller, msg.AmountDenom, order.Amount); err != nil { + return &types.MsgCancelSellOrderResponse{}, err + } + + return &types.MsgCancelSellOrderResponse{}, nil +} +``` + +### Implement the GetOrderFromID Function + +The `GetOrderFromID` function gets an order from the book from its ID. + +Add this function to the `x/dex/types/order_book.go` function in the `types` directory: + +```go +// x/dex/types/order_book.go + +func (book OrderBook) GetOrderFromID(id int32) (Order, error) { + for _, order := range book.Orders { + if order.Id == id { + return *order, nil + } + } + + return Order{}, ErrOrderNotFound +} +``` + +### Implement the RemoveOrderFromID Function + +The `RemoveOrderFromID` function removes an order from the book and keeps it ordered: + +```go +// x/dex/types/order_book.go + +package types + +// ... + +func (book *OrderBook) RemoveOrderFromID(id int32) error { + for i, order := range book.Orders { + if order.Id == id { + book.Orders = append(book.Orders[:i], book.Orders[i+1:]...) + return nil + } + } + + return ErrOrderNotFound +} +``` + +## Cancel a Buy Order + +To cancel a buy order, you have to get the ID of the specific buy order. Then you can use the function +`RemoveOrderFromID` to remove the specific order from the order book and update the keeper accordingly: + +```go +// x/dex/keeper/msg_server_cancel_buy_order.go + +package keeper + +import ( + "context" + "errors" + + sdk "github.com/cosmos/cosmos-sdk/types" + + "interchange/x/dex/types" +) + +func (k msgServer) CancelBuyOrder(goCtx context.Context, msg *types.MsgCancelBuyOrder) (*types.MsgCancelBuyOrderResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + + // Retrieve the book + pairIndex := types.OrderBookIndex(msg.Port, msg.Channel, msg.AmountDenom, msg.PriceDenom) + b, found := k.GetBuyOrderBook(ctx, pairIndex) + if !found { + return &types.MsgCancelBuyOrderResponse{}, errors.New("the pair doesn't exist") + } + + // Check order creator + order, err := b.Book.GetOrderFromID(msg.OrderID) + if err != nil { + return &types.MsgCancelBuyOrderResponse{}, err + } + + if order.Creator != msg.Creator { + return &types.MsgCancelBuyOrderResponse{}, errors.New("canceller must be creator") + } + + // Remove order + if err := b.Book.RemoveOrderFromID(msg.OrderID); err != nil { + return &types.MsgCancelBuyOrderResponse{}, err + } + + k.SetBuyOrderBook(ctx, b) + + // Refund buyer with remaining price amount + buyer, err := sdk.AccAddressFromBech32(order.Creator) + if err != nil { + return &types.MsgCancelBuyOrderResponse{}, err + } + + if err := k.SafeMint( + ctx, + msg.Port, + msg.Channel, + buyer, + msg.PriceDenom, + order.Amount*order.Price, + ); err != nil { + return &types.MsgCancelBuyOrderResponse{}, err + } + + return &types.MsgCancelBuyOrderResponse{}, nil +} +``` + +## Summary + +You have completed implementing the functions that are required for the `dex` module. In this chapter, you have +implemented the design for cancelling specific buy or sell orders. + +To test if your Ignite CLI blockchain builds correctly, use the `chain build` command: + +```bash +ignite chain build +``` + +Again, it is a good time (a great time!) to add your state to the local GitHub repository: + +```bash +git add . +git commit -m "Add Cancelling Orders" +``` + +Finally, it's now time to write test files. diff --git a/docs/versioned_docs/version-v0.26/02-guide/07-interchange/09-tests.md b/docs/versioned_docs/version-v0.26/02-guide/07-interchange/09-tests.md new file mode 100644 index 0000000..8d3d933 --- /dev/null +++ b/docs/versioned_docs/version-v0.26/02-guide/07-interchange/09-tests.md @@ -0,0 +1,729 @@ +--- +sidebar_position: 9 +description: Add test files. +--- + +# Write Test Files + +To test your application, add the test files to your code. + +After you add the test files, change into the `interchange` directory with your terminal, then run: + +```bash +go test -timeout 30s ./x/dex/types +``` + +## Order Book Tests + +Create a new `x/dex/types/order_book_test.go` file in the `types` directory. + +Add the following testsuite: + +```go +// x/dex/types/order_book_test.go + +package types_test + +import ( + "math/rand" + "testing" + + "github.com/cosmos/cosmos-sdk/crypto/keys/ed25519" + sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/stretchr/testify/require" + + "interchange/x/dex/types" +) + +func GenString(n int) string { + alpha := []rune("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ") + + buf := make([]rune, n) + for i := range buf { + buf[i] = alpha[rand.Intn(len(alpha))] + } + + return string(buf) +} + +func GenAddress() string { + pk := ed25519.GenPrivKey().PubKey() + addr := pk.Address() + return sdk.AccAddress(addr).String() +} + +func GenAmount() int32 { + return int32(rand.Intn(int(types.MaxAmount)) + 1) +} + +func GenPrice() int32 { + return int32(rand.Intn(int(types.MaxPrice)) + 1) +} + +func GenPair() (string, string) { + return GenString(10), GenString(10) +} + +func GenOrder() (string, int32, int32) { + return GenLocalAccount(), GenAmount(), GenPrice() +} + +func GenLocalAccount() string { + return GenAddress() +} + +func MockAccount(str string) string { + return str +} + +func OrderListToOrderBook(list []types.Order) types.OrderBook { + listCopy := make([]*types.Order, len(list)) + for i, order := range list { + order := order + listCopy[i] = &order + } + + return types.OrderBook{ + IdCount: 0, + Orders: listCopy, + } +} + +func TestRemoveOrderFromID(t *testing.T) { + inputList := []types.Order{ + {Id: 3, Creator: MockAccount("3"), Amount: 2, Price: 10}, + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + } + + book := OrderListToOrderBook(inputList) + expectedList := []types.Order{ + {Id: 3, Creator: MockAccount("3"), Amount: 2, Price: 10}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + } + expectedBook := OrderListToOrderBook(expectedList) + err := book.RemoveOrderFromID(2) + require.NoError(t, err) + require.Equal(t, expectedBook, book) + + book = OrderListToOrderBook(inputList) + expectedList = []types.Order{ + {Id: 3, Creator: MockAccount("3"), Amount: 2, Price: 10}, + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + } + expectedBook = OrderListToOrderBook(expectedList) + err = book.RemoveOrderFromID(0) + require.NoError(t, err) + require.Equal(t, expectedBook, book) + + book = OrderListToOrderBook(inputList) + expectedList = []types.Order{ + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + } + expectedBook = OrderListToOrderBook(expectedList) + err = book.RemoveOrderFromID(3) + require.NoError(t, err) + require.Equal(t, expectedBook, book) + + book = OrderListToOrderBook(inputList) + err = book.RemoveOrderFromID(4) + require.ErrorIs(t, err, types.ErrOrderNotFound) +} +``` + +## Buy Order Tests + +Create a new `x/dex/types/buy_order_book_test.go` file in the `types` directory to add the tests for the Buy Order Book: + +```go +// x/dex/types/buy_order_book_test.go + +package types_test + +import ( + "sort" + "testing" + + "github.com/stretchr/testify/require" + + "interchange/x/dex/types" +) + +func OrderListToBuyOrderBook(list []types.Order) types.BuyOrderBook { + listCopy := make([]*types.Order, len(list)) + for i, order := range list { + order := order + listCopy[i] = &order + } + + book := types.BuyOrderBook{ + AmountDenom: "foo", + PriceDenom: "bar", + Book: &types.OrderBook{ + IdCount: 0, + Orders: listCopy, + }, + } + return book +} + +func TestAppendOrder(t *testing.T) { + buyBook := types.NewBuyOrderBook(GenPair()) + + // Prevent zero amount + seller, amount, price := GenOrder() + _, err := buyBook.AppendOrder(seller, 0, price) + require.ErrorIs(t, err, types.ErrZeroAmount) + + // Prevent big amount + _, err = buyBook.AppendOrder(seller, types.MaxAmount+1, price) + require.ErrorIs(t, err, types.ErrMaxAmount) + + // Prevent zero price + _, err = buyBook.AppendOrder(seller, amount, 0) + require.ErrorIs(t, err, types.ErrZeroPrice) + + // Prevent big price + _, err = buyBook.AppendOrder(seller, amount, types.MaxPrice+1) + require.ErrorIs(t, err, types.ErrMaxPrice) + + // Can append buy orders + for i := 0; i < 20; i++ { + // Append a new order + creator, amount, price := GenOrder() + newOrder := types.Order{ + Id: buyBook.Book.IdCount, + Creator: creator, + Amount: amount, + Price: price, + } + orderID, err := buyBook.AppendOrder(creator, amount, price) + + // Checks + require.NoError(t, err) + require.Contains(t, buyBook.Book.Orders, &newOrder) + require.Equal(t, newOrder.Id, orderID) + } + + require.Len(t, buyBook.Book.Orders, 20) + require.True(t, sort.SliceIsSorted(buyBook.Book.Orders, func(i, j int) bool { + return buyBook.Book.Orders[i].Price < buyBook.Book.Orders[j].Price + })) +} + +type liquidateSellRes struct { + Book []types.Order + Remaining types.Order + Liquidated types.Order + Gain int32 + Match bool + Filled bool +} + +func simulateLiquidateFromSellOrder( + t *testing.T, + inputList []types.Order, + inputOrder types.Order, + expected liquidateSellRes, +) { + book := OrderListToBuyOrderBook(inputList) + expectedBook := OrderListToBuyOrderBook(expected.Book) + + require.True(t, sort.SliceIsSorted(book.Book.Orders, func(i, j int) bool { + return book.Book.Orders[i].Price < book.Book.Orders[j].Price + })) + require.True(t, sort.SliceIsSorted(expectedBook.Book.Orders, func(i, j int) bool { + return expectedBook.Book.Orders[i].Price < expectedBook.Book.Orders[j].Price + })) + + remaining, liquidated, gain, match, filled := book.LiquidateFromSellOrder(inputOrder) + + require.Equal(t, expectedBook, book) + require.Equal(t, expected.Remaining, remaining) + require.Equal(t, expected.Liquidated, liquidated) + require.Equal(t, expected.Gain, gain) + require.Equal(t, expected.Match, match) + require.Equal(t, expected.Filled, filled) +} + +func TestLiquidateFromSellOrder(t *testing.T) { + // No match for empty book + inputOrder := types.Order{Id: 10, Creator: MockAccount("1"), Amount: 100, Price: 30} + book := OrderListToBuyOrderBook([]types.Order{}) + _, _, _, match, _ := book.LiquidateFromSellOrder(inputOrder) + require.False(t, match) + + // Buy book + inputBook := []types.Order{ + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + } + + // Test no match if highest bid too low (25 < 30) + book = OrderListToBuyOrderBook(inputBook) + _, _, _, match, _ = book.LiquidateFromSellOrder(inputOrder) + require.False(t, match) + + // Entirely filled (30 < 50) + inputOrder = types.Order{Id: 10, Creator: MockAccount("1"), Amount: 30, Price: 22} + expected := liquidateSellRes{ + Book: []types.Order{ + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + {Id: 0, Creator: MockAccount("0"), Amount: 20, Price: 25}, + }, + Remaining: types.Order{Id: 10, Creator: MockAccount("1"), Amount: 0, Price: 22}, + Liquidated: types.Order{Id: 0, Creator: MockAccount("0"), Amount: 30, Price: 25}, + Gain: int32(30 * 25), + Match: true, + Filled: true, + } + simulateLiquidateFromSellOrder(t, inputBook, inputOrder, expected) + + // Entirely filled and liquidated ( 50 = 50) + inputOrder = types.Order{Id: 10, Creator: MockAccount("1"), Amount: 50, Price: 15} + expected = liquidateSellRes{ + Book: []types.Order{ + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + }, + Remaining: types.Order{Id: 10, Creator: MockAccount("1"), Amount: 0, Price: 15}, + Liquidated: types.Order{Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + Gain: int32(50 * 25), + Match: true, + Filled: true, + } + simulateLiquidateFromSellOrder(t, inputBook, inputOrder, expected) + + // Not filled and entirely liquidated (60 > 50) + inputOrder = types.Order{Id: 10, Creator: MockAccount("1"), Amount: 60, Price: 10} + expected = liquidateSellRes{ + Book: []types.Order{ + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + }, + Remaining: types.Order{Id: 10, Creator: MockAccount("1"), Amount: 10, Price: 10}, + Liquidated: types.Order{Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + Gain: int32(50 * 25), + Match: true, + Filled: false, + } + simulateLiquidateFromSellOrder(t, inputBook, inputOrder, expected) +} + +type fillSellRes struct { + Book []types.Order + Remaining types.Order + Liquidated []types.Order + Gain int32 + Filled bool +} + +func simulateFillSellOrder( + t *testing.T, + inputList []types.Order, + inputOrder types.Order, + expected fillSellRes, +) { + book := OrderListToBuyOrderBook(inputList) + expectedBook := OrderListToBuyOrderBook(expected.Book) + + require.True(t, sort.SliceIsSorted(book.Book.Orders, func(i, j int) bool { + return book.Book.Orders[i].Price < book.Book.Orders[j].Price + })) + require.True(t, sort.SliceIsSorted(expectedBook.Book.Orders, func(i, j int) bool { + return expectedBook.Book.Orders[i].Price < expectedBook.Book.Orders[j].Price + })) + + remaining, liquidated, gain, filled := book.FillSellOrder(inputOrder) + + require.Equal(t, expectedBook, book) + require.Equal(t, expected.Remaining, remaining) + require.Equal(t, expected.Liquidated, liquidated) + require.Equal(t, expected.Gain, gain) + require.Equal(t, expected.Filled, filled) +} + +func TestFillSellOrder(t *testing.T) { + var inputBook []types.Order + + // Empty book + inputOrder := types.Order{Id: 10, Creator: MockAccount("1"), Amount: 30, Price: 30} + expected := fillSellRes{ + Book: []types.Order{}, + Remaining: inputOrder, + Liquidated: []types.Order(nil), + Gain: int32(0), + Filled: false, + } + simulateFillSellOrder(t, inputBook, inputOrder, expected) + + // No match + inputBook = []types.Order{ + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + } + expected = fillSellRes{ + Book: inputBook, + Remaining: inputOrder, + Liquidated: []types.Order(nil), + Gain: int32(0), + Filled: false, + } + simulateFillSellOrder(t, inputBook, inputOrder, expected) + + // First order liquidated, not filled + inputOrder = types.Order{Id: 10, Creator: MockAccount("1"), Amount: 60, Price: 22} + expected = fillSellRes{ + Book: []types.Order{ + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + }, + Remaining: types.Order{Id: 10, Creator: MockAccount("1"), Amount: 10, Price: 22}, + Liquidated: []types.Order{ + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + }, + Gain: int32(50 * 25), + Filled: false, + } + simulateFillSellOrder(t, inputBook, inputOrder, expected) + + // Filled with two order + inputOrder = types.Order{Id: 10, Creator: MockAccount("1"), Amount: 60, Price: 18} + expected = fillSellRes{ + Book: []types.Order{ + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + {Id: 1, Creator: MockAccount("1"), Amount: 190, Price: 20}, + }, + Remaining: types.Order{Id: 10, Creator: MockAccount("1"), Amount: 0, Price: 18}, + Liquidated: []types.Order{ + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + {Id: 1, Creator: MockAccount("1"), Amount: 10, Price: 20}, + }, + Gain: int32(50*25 + 10*20), + Filled: true, + } + simulateFillSellOrder(t, inputBook, inputOrder, expected) + + // Not filled, buy order book liquidated + inputOrder = types.Order{Id: 10, Creator: MockAccount("1"), Amount: 300, Price: 10} + expected = fillSellRes{ + Book: []types.Order{}, + Remaining: types.Order{Id: 10, Creator: MockAccount("1"), Amount: 20, Price: 10}, + Liquidated: []types.Order{ + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + }, + Gain: int32(50*25 + 200*20 + 30*15), + Filled: false, + } + simulateFillSellOrder(t, inputBook, inputOrder, expected) +} +``` + +## Sell Order Tests + +Create a new testsuite for Sell Orders in a new file `x/dex/types/sell_order_book_test.go`: + +```go +// x/dex/types/sell_order_book_test.go + +package types_test + +import ( + "sort" + "testing" + + "github.com/stretchr/testify/require" + + "interchange/x/dex/types" +) + +func OrderListToSellOrderBook(list []types.Order) types.SellOrderBook { + listCopy := make([]*types.Order, len(list)) + for i, order := range list { + order := order + listCopy[i] = &order + } + + book := types.SellOrderBook{ + AmountDenom: "foo", + PriceDenom: "bar", + Book: &types.OrderBook{ + IdCount: 0, + Orders: listCopy, + }, + } + return book +} + +func TestSellOrderBook_AppendOrder(t *testing.T) { + sellBook := types.NewSellOrderBook(GenPair()) + + // Prevent zero amount + seller, amount, price := GenOrder() + _, err := sellBook.AppendOrder(seller, 0, price) + require.ErrorIs(t, err, types.ErrZeroAmount) + + // Prevent big amount + _, err = sellBook.AppendOrder(seller, types.MaxAmount+1, price) + require.ErrorIs(t, err, types.ErrMaxAmount) + + // Prevent zero price + _, err = sellBook.AppendOrder(seller, amount, 0) + require.ErrorIs(t, err, types.ErrZeroPrice) + + // Prevent big price + _, err = sellBook.AppendOrder(seller, amount, types.MaxPrice+1) + require.ErrorIs(t, err, types.ErrMaxPrice) + + // Can append sell orders + for i := 0; i < 20; i++ { + // Append a new order + creator, amount, price := GenOrder() + newOrder := types.Order{ + Id: sellBook.Book.IdCount, + Creator: creator, + Amount: amount, + Price: price, + } + orderID, err := sellBook.AppendOrder(creator, amount, price) + + // Checks + require.NoError(t, err) + require.Contains(t, sellBook.Book.Orders, &newOrder) + require.Equal(t, newOrder.Id, orderID) + } + require.Len(t, sellBook.Book.Orders, 20) + require.True(t, sort.SliceIsSorted(sellBook.Book.Orders, func(i, j int) bool { + return sellBook.Book.Orders[i].Price > sellBook.Book.Orders[j].Price + })) +} + +type liquidateBuyRes struct { + Book []types.Order + Remaining types.Order + Liquidated types.Order + Purchase int32 + Match bool + Filled bool +} + +func simulateLiquidateFromBuyOrder( + t *testing.T, + inputList []types.Order, + inputOrder types.Order, + expected liquidateBuyRes, +) { + book := OrderListToSellOrderBook(inputList) + expectedBook := OrderListToSellOrderBook(expected.Book) + require.True(t, sort.SliceIsSorted(book.Book.Orders, func(i, j int) bool { + return book.Book.Orders[i].Price > book.Book.Orders[j].Price + })) + require.True(t, sort.SliceIsSorted(expectedBook.Book.Orders, func(i, j int) bool { + return expectedBook.Book.Orders[i].Price > expectedBook.Book.Orders[j].Price + })) + + remaining, liquidated, purchase, match, filled := book.LiquidateFromBuyOrder(inputOrder) + + require.Equal(t, expectedBook, book) + require.Equal(t, expected.Remaining, remaining) + require.Equal(t, expected.Liquidated, liquidated) + require.Equal(t, expected.Purchase, purchase) + require.Equal(t, expected.Match, match) + require.Equal(t, expected.Filled, filled) +} + +func TestLiquidateFromBuyOrder(t *testing.T) { + // No match for empty book + inputOrder := types.Order{Id: 10, Creator: MockAccount("1"), Amount: 100, Price: 10} + book := OrderListToSellOrderBook([]types.Order{}) + _, _, _, match, _ := book.LiquidateFromBuyOrder(inputOrder) + require.False(t, match) + + // Sell book + inputBook := []types.Order{ + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + } + + // Test no match if lowest ask too high (25 < 30) + book = OrderListToSellOrderBook(inputBook) + _, _, _, match, _ = book.LiquidateFromBuyOrder(inputOrder) + require.False(t, match) + + // Entirely filled (30 > 15) + inputOrder = types.Order{Id: 10, Creator: MockAccount("1"), Amount: 20, Price: 30} + expected := liquidateBuyRes{ + Book: []types.Order{ + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + {Id: 2, Creator: MockAccount("2"), Amount: 10, Price: 15}, + }, + Remaining: types.Order{Id: 10, Creator: MockAccount("1"), Amount: 0, Price: 30}, + Liquidated: types.Order{Id: 2, Creator: MockAccount("2"), Amount: 20, Price: 15}, + Purchase: int32(20), + Match: true, + Filled: true, + } + simulateLiquidateFromBuyOrder(t, inputBook, inputOrder, expected) + + // Entirely filled (30 = 30) + inputOrder = types.Order{Id: 10, Creator: MockAccount("1"), Amount: 30, Price: 30} + expected = liquidateBuyRes{ + Book: []types.Order{ + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + }, + Remaining: types.Order{Id: 10, Creator: MockAccount("1"), Amount: 0, Price: 30}, + Liquidated: types.Order{Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + Purchase: int32(30), + Match: true, + Filled: true, + } + simulateLiquidateFromBuyOrder(t, inputBook, inputOrder, expected) + + // Not filled and entirely liquidated (60 > 30) + inputOrder = types.Order{Id: 10, Creator: MockAccount("1"), Amount: 60, Price: 30} + expected = liquidateBuyRes{ + Book: []types.Order{ + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + }, + Remaining: types.Order{Id: 10, Creator: MockAccount("1"), Amount: 30, Price: 30}, + Liquidated: types.Order{Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + Purchase: int32(30), + Match: true, + Filled: false, + } + simulateLiquidateFromBuyOrder(t, inputBook, inputOrder, expected) +} + +type fillBuyRes struct { + Book []types.Order + Remaining types.Order + Liquidated []types.Order + Purchase int32 + Filled bool +} + +func simulateFillBuyOrder( + t *testing.T, + inputList []types.Order, + inputOrder types.Order, + expected fillBuyRes, +) { + book := OrderListToSellOrderBook(inputList) + expectedBook := OrderListToSellOrderBook(expected.Book) + + require.True(t, sort.SliceIsSorted(book.Book.Orders, func(i, j int) bool { + return book.Book.Orders[i].Price > book.Book.Orders[j].Price + })) + require.True(t, sort.SliceIsSorted(expectedBook.Book.Orders, func(i, j int) bool { + return expectedBook.Book.Orders[i].Price > expectedBook.Book.Orders[j].Price + })) + + remaining, liquidated, purchase, filled := book.FillBuyOrder(inputOrder) + + require.Equal(t, expectedBook, book) + require.Equal(t, expected.Remaining, remaining) + require.Equal(t, expected.Liquidated, liquidated) + require.Equal(t, expected.Purchase, purchase) + require.Equal(t, expected.Filled, filled) +} + +func TestFillBuyOrder(t *testing.T) { + var inputBook []types.Order + + // Empty book + inputOrder := types.Order{Id: 10, Creator: MockAccount("1"), Amount: 30, Price: 10} + expected := fillBuyRes{ + Book: []types.Order{}, + Remaining: inputOrder, + Liquidated: []types.Order(nil), + Purchase: int32(0), + Filled: false, + } + simulateFillBuyOrder(t, inputBook, inputOrder, expected) + + // No match + inputBook = []types.Order{ + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + } + expected = fillBuyRes{ + Book: inputBook, + Remaining: inputOrder, + Liquidated: []types.Order(nil), + Purchase: int32(0), + Filled: false, + } + simulateFillBuyOrder(t, inputBook, inputOrder, expected) + + // First order liquidated, not filled + inputOrder = types.Order{Id: 10, Creator: MockAccount("1"), Amount: 60, Price: 18} + expected = fillBuyRes{ + Book: []types.Order{ + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + }, + Remaining: types.Order{Id: 10, Creator: MockAccount("1"), Amount: 30, Price: 18}, + Liquidated: []types.Order{ + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + }, + Purchase: int32(30), + Filled: false, + } + simulateFillBuyOrder(t, inputBook, inputOrder, expected) + + // Filled with two order + inputOrder = types.Order{Id: 10, Creator: MockAccount("1"), Amount: 60, Price: 22} + expected = fillBuyRes{ + Book: []types.Order{ + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + {Id: 1, Creator: MockAccount("1"), Amount: 170, Price: 20}, + }, + Remaining: types.Order{Id: 10, Creator: MockAccount("1"), Amount: 0, Price: 22}, + Liquidated: []types.Order{ + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + {Id: 1, Creator: MockAccount("1"), Amount: 30, Price: 20}, + }, + Purchase: int32(30 + 30), + Filled: true, + } + simulateFillBuyOrder(t, inputBook, inputOrder, expected) + + // Not filled, sell order book liquidated + inputOrder = types.Order{Id: 10, Creator: MockAccount("1"), Amount: 300, Price: 30} + expected = fillBuyRes{ + Book: []types.Order{}, + Remaining: types.Order{Id: 10, Creator: MockAccount("1"), Amount: 20, Price: 30}, + Liquidated: []types.Order{ + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + }, + Purchase: int32(30 + 200 + 50), + Filled: false, + } + simulateFillBuyOrder(t, inputBook, inputOrder, expected) +} +``` + +## Successful Test Output + +When the tests are successful, your output is: + +``` +ok interchange/x/dex/types 0.550s +``` diff --git a/docs/versioned_docs/version-v0.26/02-guide/07-interchange/_category_.json b/docs/versioned_docs/version-v0.26/02-guide/07-interchange/_category_.json new file mode 100644 index 0000000..f427e86 --- /dev/null +++ b/docs/versioned_docs/version-v0.26/02-guide/07-interchange/_category_.json @@ -0,0 +1,5 @@ +{ + "label": "Advanced Module: Interchange", + "position": 8, + "link": null + } \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.26/02-guide/08-debug.md b/docs/versioned_docs/version-v0.26/02-guide/08-debug.md new file mode 100644 index 0000000..081b44f --- /dev/null +++ b/docs/versioned_docs/version-v0.26/02-guide/08-debug.md @@ -0,0 +1,209 @@ +--- +description: Debugging your Cosmos SDK blockchain +--- + +# Debugging a chain + +Ignite chain debug command can help you find issues during development. It uses +[Delve](https://github.com/go-delve/delve) debugger which enables you to +interact with your blockchain app by controlling the execution of the process, +evaluating variables, and providing information of thread / goroutine state, CPU +register state and more. + +## Debug Command + +The debug command requires that the blockchain app binary is build with +debugging support by removing optimizations and inlining. A debug binary is +built by default by the `ignite chain serve` command or can optionally be +created using the `--debug` flag when running `ignite chain init` or `ignite +chain build` sub-commands. + +To start a debugging session in the terminal run: + +``` +ignite chain debug +``` + +The command runs your blockchain app in the background, attaches to it and +launches a terminal debugger shell: + +``` +Type 'help' for list of commands. +(dlv) +``` + +At this point the blockchain app blocks execution, so you can set one or more +breakpoints before continuing execution. + +Use the +[break](https://github.com/go-delve/delve/blob/master/Documentation/cli/README.md#break) +(alias `b`) command to set any number of breakpoints using, for example the +`<filename>:<line>` notation: + +``` +(dlv) break x/hello/client/cli/query_say_hello.go:14 +``` + +This command adds a breakpoint to the `x/hello/client/cli/query_say_hello.go` +file at line 14. + +Once all breakpoints are set resume blockchain execution using the +[continue](https://github.com/go-delve/delve/blob/master/Documentation/cli/README.md#continue) +(alias `c`) command: + +``` +(dlv) continue +``` + +The debugger will launch the shell and stop blockchain execution again when a +breakpoint is triggered. + +Within the debugger shell use the `quit` (alias `q`) or `exit` commands to stop +the blockchain app and exit the debugger. + +## Debug Server + +A debug server can optionally be started in cases where the default terminal +client is not desirable. When the server starts it first runs the blockchain +app, attaches to it and finally waits for a client connection. The default +server address is *tcp://127.0.0.1:30500* and it accepts both JSON-RPC or DAP +client connections. + +To start a debug server use the following flag: + +``` +ignite chain debug --server +``` + +To start a debug server with a custom address use the following flags: + +``` +ignite chain debug --server --server-address 127.0.0.1:30500 +``` + +The debug server stops automatically when the client connection is closed. + +## Debugging Clients + +### Gdlv: Multiplatform Delve UI + +[Gdlv](https://github.com/aarzilli/gdlv) is a graphical frontend to Delve for +Linux, Windows and macOS. + +Using it as debugging client is straightforward as it doesn't require any +configuration. Once the debug server is running and listening for client +requests connect to it by running: + +``` +gdlv connect 127.0.0.1:30500 +``` + +Setting breakpoints and continuing execution is done in the same way as Delve, +by using the `break` and `continue` commands. + +### Visual Studio Code + +Using [Visual Studio Code](https://code.visualstudio.com/) as debugging client +requires an initial configuration to allow it to connect to the debug server. + +Make sure that the [Go](https://code.visualstudio.com/docs/languages/go) +extension is installed. + +VS Code debugging is configured using the `launch.json` file which is usually +located inside the `.vscode` folder in your workspace. + +You can use the following launch configuration to set up VS Code as debugging +client: + +```json title=launch.json +{ + "version": "0.2.0", + "configurations": [ + { + "name": "Connect to Debug Server", + "type": "go", + "request": "attach", + "mode": "remote", + "remotePath": "${workspaceFolder}", + "port": 30500, + "host": "127.0.0.1" + } + ] +} +``` + +Alternatively it's possible to create a custom `launch.json` file from the "Run +and Debug" panel. When prompted choose the Go debugger option labeled "Go: +Connect to Server" and enter the debug host address and then the port number. + +## Example: Debugging a Blockchain App + +In this short example we will be using Ignite CLI to create a new blockchain and +a query to be able to trigger a debugging breakpoint when the query is called. + +Create a new blockchain: + +``` +ignite scaffold chain hello +``` + +Scaffold a new query in the `hello` directory: + +``` +ignite scaffold query say-hello name --response name +``` + +The next step initializes the blockchain's data directory and compiles a debug +binary: + +``` +ignite chain init --debug +``` + +Once the initialization finishes launch the debugger shell: + +``` +ignite chain debug +``` + +Within the debugger shell create a breakpoint that will be triggered when the +`SayHello` function is called and then continue execution: + +``` +(dlv) break x/hello/keeper/query_say_hello.go:12 +(dlv) continue +``` + +From a different terminal use the `hellod` binary to call the query: + +``` +hellod query hello say-hello bob +``` + +A debugger shell will be launched when the breakpoint is triggered: + +``` + 7: "google.golang.org/grpc/codes" + 8: "google.golang.org/grpc/status" + 9: "hello/x/hello/types" + 10: ) + 11: +=> 12: func (k Keeper) SayHello(goCtx context.Context, req *types.QuerySayHelloRequest) (*types.QuerySayHelloResponse, error) { + 13: if req == nil { + 14: return nil, status.Error(codes.InvalidArgument, "invalid request") + 15: } + 16: + 17: ctx := sdk.UnwrapSDKContext(goCtx) +``` + +From then on you can use Delve commands like `next` (alias `n`) or `print` +(alias `p`) to control execution and print values. For example, to print the +*name* argument value use the `print` command followed by "req.Name": + +``` +(dlv) print req.Name +"bob" +``` + +Finally, use `quit` (alias `q`) to stop the blockchain app and finish the +debugging session. diff --git a/docs/versioned_docs/version-v0.26/02-guide/09-docker.md b/docs/versioned_docs/version-v0.26/02-guide/09-docker.md new file mode 100644 index 0000000..753e47b --- /dev/null +++ b/docs/versioned_docs/version-v0.26/02-guide/09-docker.md @@ -0,0 +1,142 @@ +--- +description: Run Ignite CLI using a Docker container. +--- + +# Running inside a Docker container + +You can run Ignite CLI inside a Docker container without installing the Ignite +CLI binary directly on your machine. + +Running Ignite CLI in Docker can be useful for various reasons; isolating your +test environment, running Ignite CLI on an unsupported operating system, or +experimenting with a different version of Ignite CLI without installing it. + +Docker containers are like virtual machines because they provide an isolated +environment to programs that runs inside them. In this case, you can run Ignite +CLI in an isolated environment. + +Experimentation and file system impact is limited to the Docker instance. The +host machine is not impacted by changes to the container. + +## Prerequisites + +Docker must be installed. See [Get Started with +Docker](https://www.docker.com/get-started). + +## Ignite CLI Commands in Docker + +After you scaffold and start a chain in your Docker container, all Ignite CLI +commands are available. Just type the commands after `docker run -ti +ignite/cli`. For example: + +```bash +docker run -ti ignitehq/cli -h +docker run -ti ignitehq/cli scaffold chain planet +docker run -ti ignitehq/cli chain serve +``` + +## Scaffolding a chain + +When Docker is installed, you can build a blockchain with a single command. + +Ignite CLI, and the chains you serve with Ignite CLI, persist some files. When +using the CLI binary directly, those files are located in `$HOME/.ignite` and +`$HOME/.cache`, but in the context of Docker it's better to use a directory +different from `$HOME`, so we use `$HOME/sdh`. This folder should be created +manually prior to the docker commands below, or else Docker creates it with the +root user. + +```bash +mkdir $HOME/sdh +``` + +To scaffold a blockchain `planet` in the `/apps` directory in the container, run +this command in a terminal window: + +```bash +docker run -ti -v $HOME/sdh:/home/tendermint -v $PWD:/apps ignitehq/cli:0.25.2 scaffold chain planet +``` + +Be patient, this command takes a minute or two to run because it does everything +for you: + +- Creates a container that runs from the `ignitehq/cli:0.25.2` image. +- Executes the Ignite CLI binary inside the image. +- `-v $HOME/sdh:/home/tendermint` maps the `$HOME/sdh` directory in your local + computer (the host machine) to the home directory `/home/tendermint` inside + the container. +- `-v $PWD:/apps` maps the current directory in the terminal window on the host + machine to the `/apps` directory in the container. You can optionally specify + an absolute path instead of `$PWD`. + + Using `-w` and `-v` together provides file persistence on the host machine. + The application source code on the Docker container is mirrored to the file + system of the host machine. + + **Note:** The directory name for the `-w` and `-v` flags can be a name other + than `/app`, but the same directory must be specified for both flags. If you + omit `-w` and `-v`, the changes are made in the container only and are lost + when that container is shut down. + +## Starting a blockchain + +To start the blockchain node in the Docker container you just created, run this +command: + +```bash +docker run -ti -v $HOME/sdh:/home/tendermint -v $PWD:/apps -p 1317:1317 -p 26657:26657 ignitehq/cli:0.25.2 chain serve -p planet +``` + +This command does the following: + +- `-v $HOME/sdh:/home/tendermint` maps the `$HOME/sdh` directory in your local + computer (the host machine) to the home directory `/home/tendermint` inside + the container. +- `-v $PWD:/apps` persists the scaffolded app in the container to the host + machine at current working directory. +- `serve -p planet` specifies to use the `planet` directory that contains the + source code of the blockchain. +- `-p 1317:1317` maps the API server port (cosmos-sdk) to the host machine to + forward port 1317 listening inside the container to port 1317 on the host + machine. +- `-p 26657:26657` maps RPC server port 26657 (tendermint) on the host machine + to port 26657 in Docker. +- After the blockchain is started, open `http://localhost:26657` to see the + Tendermint API. +- The `-v` flag specifies for the container to access the application's source + code from the host machine, so it can build and run it. + +## Versioning + +You can specify which version of Ignite CLI to install and run in your Docker +container. + +### Latest version + +- By default, `ignite/cli` resolves to `ignite/cli:latest`. +- The `latest` image tag is always the latest stable [Ignite CLI + release](https://github.com/ignite/cli/releases). + +For example, if latest release is +[v0.25.2](https://github.com/ignite/cli/releases/tag/v0.25.2), the `latest` tag +points to the `0.25.2` tag. + +### Specific version + +You can specify to use a specific version of Ignite CLI. All available tags are +in the [ignite/cli +image](https://hub.docker.com/r/ignitehq/cli/tags?page=1&ordering=last_updated) on +Docker Hub. + +For example: + +- Use `ignitehq/cli:0.25.2` (without the `v` prefix) to use version `0.25.2`. +- Use `ignitehq/cli` to use the latest version. +- Use `ignitehq/cli:main` to use the `main` branch, so you can experiment with + the upcoming version. + +To get the latest image, run `docker pull`. + +```bash +docker pull ignitehq/cli:main +``` diff --git a/docs/versioned_docs/version-v0.26/02-guide/10-simapp.md b/docs/versioned_docs/version-v0.26/02-guide/10-simapp.md new file mode 100644 index 0000000..2285744 --- /dev/null +++ b/docs/versioned_docs/version-v0.26/02-guide/10-simapp.md @@ -0,0 +1,164 @@ +--- +sidebar_position: 10 +description: Test different scenarios for your chain. +--- + +# Chain simulation + +The Ignite CLI chain simulator can help you to run your chain based in +randomized inputs for you can make fuzz testing and also benchmark test for your +chain, simulating the messages, blocks, and accounts. You can scaffold a +template to perform simulation testing in each module along with a boilerplate +simulation methods for each scaffolded message. + +## Module simulation + +Every new module that is scaffolded with Ignite CLI implements the Cosmos SDK +[Module +Simulation](https://docs.cosmos.network/main/building-modules/simulator). + +- Each new message creates a file with the simulation methods required for the + tests. +- Scaffolding a `CRUD` type like a `list` or `map` creates a simulation file + with `create`, `update`, and `delete` simulation methods in the + `x/<module>/simulation` folder and registers these methods in + `x/<module>/module_simulation.go`. +- Scaffolding a single message creates an empty simulation method to be + implemented by the user. + +We recommend that you maintain the simulation methods for each new modification +into the message keeper methods. + +Every simulation is weighted because the sender of the operation is assigned +randomly. The weight defines how much the simulation calls the message. + +For better randomizations, you can define a random seed. The simulation with the +same random seed is deterministic with the same output. + +## Scaffold a simulation + +To create a new chain: + +``` +ignite scaffold chain mars +``` + +Review the empty `x/mars/simulation` folder and the +`x/mars/module_simulation.go` file to see that a simulation is not registered. + +Now, scaffold a new message: + +``` +ignite scaffold list user address balance:uint state +``` + +A new file `x/mars/simulation/user.go` is created and is registered with the +weight in the `x/mars/module_simulation.go` file. + +Be sure to define the proper simulation weight with a minimum weight of 0 and a +maximum weight of 100. + +For this example, change the `defaultWeightMsgDeleteUser` to 30 and the +`defaultWeightMsgUpdateUser` to 50. + +Run the `BenchmarkSimulation` method into `app/simulation_test.go` to run +simulation tests for all modules: + +``` +ignite chain simulate +``` + +You can also define flags that are provided by the simulation. Flags are defined +by the method `simapp.GetSimulatorFlags()`: + +``` +ignite chain simulate -v --numBlocks 200 --blockSize 50 --seed 33 +``` + +Wait for the entire simulation to finish and check the result of the messages. + +The default `go test` command works to run the simulation: + +``` +go test -v -benchmem -run=^$ -bench ^BenchmarkSimulation -cpuprofile cpu.out ./app -Commit=true +``` + +### Skip message + +Use logic to avoid sending a message without returning an error. Return only +`simtypes.NoOpMsg(...)` into the simulation message handler. + +## Params + +Scaffolding a module with params automatically adds the module in the +`module_simulaton.go` file: + +``` +ignite s module earth --params channel:string,minLaunch:uint,maxLaunch:int +``` + +After the parameters are scaffolded, change the +`x/<module>/module_simulation.go` file to set the random parameters into the +`RandomizedParams` method. The simulation will change the params randomly +according to call the function. + +## Invariants + +Simulating a chain can help you prevent [chain invariants +errors](https://docs.cosmos.network/main/building-modules/invariants). An +invariant is a function called by the chain to check if something broke, +invalidating the chain data. To create a new invariant and check the chain +integrity, you must create a method to validate the invariants and register all +invariants. + + +For example, in `x/earth/keeper/invariants.go`: + +```go title="x/earth/keeper/invariants.go" +package keeper + +import ( + "fmt" + + sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/tendermint/spn/x/launch/types" +) + +const zeroLaunchTimestampRoute = "zero-launch-timestamp" + +// RegisterInvariants registers all module invariants +func RegisterInvariants(ir sdk.InvariantRegistry, k Keeper) { + ir.RegisterRoute(types.ModuleName, zeroLaunchTimestampRoute, + ZeroLaunchTimestampInvariant(k)) +} + +// ZeroLaunchTimestampInvariant invariant that checks if the +// `LaunchTimestamp is zero +func ZeroLaunchTimestampInvariant(k Keeper) sdk.Invariant { + return func(ctx sdk.Context) (string, bool) { + all := k.GetAllChain(ctx) + for _, chain := range all { + if chain.LaunchTimestamp == 0 { + return sdk.FormatInvariant( + types.ModuleName, zeroLaunchTimestampRoute, + "LaunchTimestamp is not set while LaunchTriggered is set", + ), true + } + } + return "", false + } +} +``` + +Now, register the keeper invariants into the `x/earth/module.go` file: + +```go +package earth + +// ... + +// RegisterInvariants registers the capability module's invariants. +func (am AppModule) RegisterInvariants(ir sdk.InvariantRegistry) { + keeper.RegisterInvariants(ir, am.keeper) +} +``` diff --git a/docs/versioned_docs/version-v0.26/02-guide/_category_.json b/docs/versioned_docs/version-v0.26/02-guide/_category_.json new file mode 100644 index 0000000..3c599cc --- /dev/null +++ b/docs/versioned_docs/version-v0.26/02-guide/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Develop a chain", + "link": null +} \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.26/02-guide/images/api.png b/docs/versioned_docs/version-v0.26/02-guide/images/api.png new file mode 100644 index 0000000..081df8f Binary files /dev/null and b/docs/versioned_docs/version-v0.26/02-guide/images/api.png differ diff --git a/docs/versioned_docs/version-v0.26/02-guide/images/packet_sendpost.png b/docs/versioned_docs/version-v0.26/02-guide/images/packet_sendpost.png new file mode 100644 index 0000000..0bb080c Binary files /dev/null and b/docs/versioned_docs/version-v0.26/02-guide/images/packet_sendpost.png differ diff --git a/docs/versioned_docs/version-v0.26/03-clients/01-go-client.md b/docs/versioned_docs/version-v0.26/03-clients/01-go-client.md new file mode 100644 index 0000000..e5ab27c --- /dev/null +++ b/docs/versioned_docs/version-v0.26/03-clients/01-go-client.md @@ -0,0 +1,300 @@ +--- +description: Blockchain client in Go +title: Go client +--- + +# A client in the Go programming language + +In this tutorial, we will show you how to create a standalone Go program that +serves as a client for a blockchain. We will use the Ignite CLI to set up a +standard blockchain. To communicate with the blockchain, we will utilize the +`cosmosclient` package, which provides an easy-to-use interface for interacting +with the blockchain. You will learn how to use the `cosmosclient` package to +send transactions and query the blockchain. By the end of this tutorial, you +will have a good understanding of how to build a client for a blockchain using +Go and the `cosmosclient` package. + +## Create a blockchain + +To create a blockchain using the Ignite CLI, use the following command: + +``` +ignite scaffold chain blog +``` + +This will create a new Cosmos SDK blockchain called "blog". + +Once the blockchain has been created, you can generate code for a "blog" model +that will enable you to perform create, read, update, and delete (CRUD) +operations on blog posts. To do this, you can use the following command: + +``` +cd blog +ignite scaffold list post title body +``` + +This will generate the necessary code for the "blog" model, including functions +for creating, reading, updating, and deleting blog posts. With this code in +place, you can now use your blockchain to perform CRUD operations on blog posts. +You can use the generated code to create new blog posts, retrieve existing ones, +update their content, and delete them as needed. This will give you a fully +functional Cosmos SDK blockchain with the ability to manage blog posts. + +Start your blockchain node with the following command: + +``` +ignite chain serve +``` + +## Creating a blockchain client + +Create a new directory called `blogclient` on the same level as `blog` +directory. As the name suggests, `blogclient` will contain a standalone Go +program that acts as a client to your `blog` blockchain. + +```bash +mkdir blogclient +``` + +This command will create a new directory called `blogclient` in your current +location. If you type `ls` in your terminal window, you should see both the +`blog` and `blogclient` directories listed. + +To initialize a new Go package inside the `blogclient` directory, you can use +the following command: + +``` +cd blogclient +go mod init blogclient +``` + +This will create a `go.mod` file in the `blogclient` directory, which contains +information about the package and the Go version being used. + +To import dependencies for your package, you can add the following code to the +`go.mod` file: + +```text title="blogclient/go.mod" +module blogclient + +go 1.19 + +require ( + blog v0.0.0-00010101000000-000000000000 + github.com/ignite/cli v0.25.2 +) + +replace blog => ../blog +replace github.com/gogo/protobuf => github.com/regen-network/protobuf v1.3.3-alpha.regen.1 +``` + +Your package will import two dependencies: + +* `blog`, which contains `types` of messages and a query client +* `ignite` for the `cosmosclient` package + +The `replace` directive uses the package from the local `blog` directory and is +specified as a relative path to the `blogclient` directory. + +Cosmos SDK uses a custom version of the `protobuf` package, so use the `replace` +directive to specify the correct dependency. + +Finally, install dependencies for your `blogclient`: + +```bash +go mod tidy +``` + +### Main logic of the client in `main.go` + +Create a `main.go` file inside the `blogclient` directory and add the following +code: + +```go title="blogclient/main.go" +package main + +import ( + "context" + "fmt" + "log" + + // Importing the general purpose Cosmos blockchain client + "github.com/ignite/cli/ignite/pkg/cosmosclient" + + // Importing the types package of your blog blockchain + "blog/x/blog/types" +) + +func main() { + ctx := context.Background() + addressPrefix := "cosmos" + + // Create a Cosmos client instance + client, err := cosmosclient.New(ctx, cosmosclient.WithAddressPrefix(addressPrefix)) + if err != nil { + log.Fatal(err) + } + + // Account `alice` was initialized during `ignite chain serve` + accountName := "alice" + + // Get account from the keyring + account, err := client.Account(accountName) + if err != nil { + log.Fatal(err) + } + + addr, err := account.Address(addressPrefix) + if err != nil { + log.Fatal(err) + } + + // Define a message to create a post + msg := &types.MsgCreatePost{ + Creator: addr, + Title: "Hello!", + Body: "This is the first post", + } + + // Broadcast a transaction from account `alice` with the message + // to create a post store response in txResp + txResp, err := client.BroadcastTx(ctx, account, msg) + if err != nil { + log.Fatal(err) + } + + // Print response from broadcasting a transaction + fmt.Print("MsgCreatePost:\n\n") + fmt.Println(txResp) + + // Instantiate a query client for your `blog` blockchain + queryClient := types.NewQueryClient(client.Context()) + + // Query the blockchain using the client's `PostAll` method + // to get all posts store all posts in queryResp + queryResp, err := queryClient.PostAll(ctx, &types.QueryAllPostRequest{}) + if err != nil { + log.Fatal(err) + } + + // Print response from querying all the posts + fmt.Print("\n\nAll posts:\n\n") + fmt.Println(queryResp) +} +``` + +The code above creates a standalone Go program that acts as a client to the +`blog` blockchain. It begins by importing the required packages, including the +general purpose Cosmos blockchain client and the `types` package of the `blog` +blockchain. + +In the `main` function, the code creates a Cosmos client instance and sets the +address prefix to "cosmos". It then retrieves an account named `"alice"` from +the keyring and gets the address of the account using the address prefix. + +Next, the code defines a message to create a blog post with the title "Hello!" +and body "This is the first post". It then broadcasts a transaction from the +account "alice" with the message to create the post, and stores the response in +the variable `txResp`. + +The code then instantiates a query client for the blog blockchain and uses it to +query the blockchain to retrieve all the posts. It stores the response in the +variable `queryResp` and prints it to the console. + +Finally, the code prints the response from broadcasting the transaction to the +console. This allows the user to see the results of creating and querying a blog +post on the `blog` blockchain using the client. + +To find out more about the `cosmosclient` package, you can refer to the Go +package documentation for +[`cosmosclient`](https://pkg.go.dev/github.com/ignite/cli/ignite/pkg/cosmosclient). +This documentation provides information on how to use the `Client` type with +`Options` and `KeyringBackend`. + +## Run the blockchain and the client + +Make sure your blog blockchain is still running with `ignite chain serve`. + +Run the blockchain client: + +```bash +go run main.go +``` + +If the command is successful, the results of running the command will be printed +to the terminal. The output may include some warnings, which can be ignored. + +```yml +MsgCreatePost: + +code: 0 +codespace: "" +data: 12220A202F626C6F672E626C6F672E4D7367437265617465506F7374526573706F6E7365 +events: +- attributes: + - index: true + key: ZmVl + value: null + - index: true + key: ZmVlX3BheWVy + value: Y29zbW9zMWR6ZW13NzZ3enQ3cDBnajd3MzQyN2E0eHg3MjRkejAzd3hnOGhk + type: tx +- attributes: + - index: true + key: YWNjX3NlcQ== + value: Y29zbW9zMWR6ZW13NzZ3enQ3cDBnajd3MzQyN2E0eHg3MjRkejAzd3hnOGhkLzE= + type: tx +- attributes: + - index: true + key: c2lnbmF0dXJl + value: UWZncUJCUFQvaWxWVzJwNUJNTngzcDlvRzVpSXp0elhXdE9yMHcwVE00OEtlSkRqR0FEdU9VNjJiY1ZRNVkxTHdEbXNuYUlsTmc3VE9uMnJ2ZWRHSlE9PQ== + type: tx +- attributes: + - index: true + key: YWN0aW9u + value: L2Jsb2cuYmxvZy5Nc2dDcmVhdGVQb3N0 + type: message +gas_used: "52085" +gas_wanted: "300000" +height: "20" +info: "" +logs: +- events: + - attributes: + - key: action + value: /blog.blog.MsgCreatePost + type: message + log: "" + msg_index: 0 +raw_log: '[{"msg_index":0,"events":[{"type":"message","attributes":[{"key":"action","value":"/blog.blog.MsgCreatePost"}]}]}]' +timestamp: "" +tx: null +txhash: 4F53B75C18254F96EF159821DDD665E965DBB576A5AC2B94CE863EB62E33156A + +All posts: + +Post:<title:"Hello!" body:"This is the first post" creator:"cosmos1dzemw76wzt7p0gj7w3427a4xx724dz03wxg8hd" > pagination:<total:1 > +``` + +As you can see the client has successfully broadcasted a transaction and queried +the chain for blog posts. + +Please note, that some values in the output on your terminal (like transaction +hash and block height) might be different from the output above. + +You can confirm the new post with using the `blogd q blog list-post` command: + +```yaml +Post: +- body: This is the first post + creator: cosmos1dzemw76wzt7p0gj7w3427a4xx724dz03wxg8hd + id: "0" + title: Hello! +pagination: + next_key: null + total: "0" +``` + +Great job! You have successfully completed the process of creating a Go client +for your Cosmos SDK blockchain, submitting a transaction, and querying the +chain. \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.26/03-clients/02-typescript.md b/docs/versioned_docs/version-v0.26/03-clients/02-typescript.md new file mode 100644 index 0000000..9d41a23 --- /dev/null +++ b/docs/versioned_docs/version-v0.26/03-clients/02-typescript.md @@ -0,0 +1,430 @@ +--- +description: Information about the generated TypeScript client code. +--- + +# TypeScript frontend + +Ignite offers powerful functionality for generating client-side code for your +blockchain. Think of this as a one-click client SDK generation tailored +specifically for your blockchain. + +See `ignite generate ts-client --help` learn more on how to use TypeScript code generation. + +## Starting a node + +Create a new blockchain with `ignite scaffold chain`. You can use an existing +blockchain project if you have one, instead. + +``` +ignite scaffold chain example +``` + +For testing purposes add a new account to `config.yml` with a mnemonic: + +```yml title="config.yml" +accounts: + - name: frank + coins: ["1000token", "100000000stake"] + mnemonic: play butter frown city voyage pupil rabbit wheat thrive mind skate turkey helmet thrive door either differ gate exhibit impose city swallow goat faint +``` + +Run a command to generate TypeScript clients for both standard and custom Cosmos +SDK modules: + +``` +ignite generate ts-client --clear-cache +``` + +Run a command to start your blockchain node: + +``` +ignite chain serve -r +``` + +## Setting up a TypeScript frontend client + +The best way to get started building with the TypeScript client is by using +[Vite](https://vitejs.dev). Vite provides boilerplate code for +vanilla TS projects as well as React, Vue, Lit, Svelte and Preact frameworks. +You can find additional information at the [Vite Getting Started +guide](https://vitejs.dev/guide). + +You will also need to [polyfill](https://developer.mozilla.org/en-US/docs/Glossary/Polyfill) the client's dependencies. The following is an +example of setting up a vanilla TS project with the necessary polyfills: + +```bash +npm create vite@latest my-frontend-app -- --template vanilla-ts +cd my-frontend-app +npm install --save-dev @esbuild-plugins/node-globals-polyfill @rollup/plugin-node-resolve +``` + +You must then create the necessary `vite.config.ts` file. + +```typescript title="my-frontend-app/vite.config.ts" +import { nodeResolve } from "@rollup/plugin-node-resolve"; +import { NodeGlobalsPolyfillPlugin } from "@esbuild-plugins/node-globals-polyfill"; +import { defineConfig } from "vite"; + +export default defineConfig({ + plugins: [nodeResolve()], + + optimizeDeps: { + esbuildOptions: { + define: { + global: "globalThis", + }, + plugins: [ + NodeGlobalsPolyfillPlugin({ + buffer: true, + }), + ], + }, + }, +}); +``` + +You are then ready to use the generated client code inside this project directly +or by publishing the client and installing it like any other `npm` package. + +After the chain starts, you will see Frank's address is +`cosmos13xkhcx2dquhqdml0k37sr7yndquwteuvt2cml7`. We'll be using Frank's account +for querying data and broadcasting transactions in the next section. + +## Querying + +The code generated in `ts-client` comes with a `package.json` file ready to +publish which you can modify to suit your needs. To use`ts-client` install the +required dependencies: + +``` +cd ts-client +npm install +``` + +The client is based on a modular architecture where you can configure a client +class to support the modules you need and instantiate it. + +By default, the generated client exports a client class that includes all the +Cosmos SDK, custom and 3rd party modules in use in your project. + +To instantiate the client you need to provide environment information (endpoints +and chain prefix). For querying that's all you need: + +```typescript title="my-frontend-app/src/main.ts" +import { Client } from "../../ts-client"; + +const client = new Client( + { + apiURL: "http://localhost:1317", + rpcURL: "http://localhost:26657", + prefix: "cosmos", + } +); +``` + +The example above uses `ts-client` from a local directory. If you have published +your `ts-client` on `npm` replace `../../ts-client` with a package name. + +The resulting client instance contains namespaces for each module, each with a +`query` and `tx` namespace containing the module's relevant querying and +transacting methods with full type and auto-completion support. + +To query for a balance of an address: + +```typescript +const balances = await client.CosmosBankV1Beta1.query.queryAllBalances( + 'cosmos13xkhcx2dquhqdml0k37sr7yndquwteuvt2cml7' +); +``` + +## Broadcasting a transaction + +Add signing capabilities to the client by creating a wallet from a mnemonic +(we're using the Frank's mnemonic added to `config.yml` earlier) and passing it +as an optional argument to `Client()`. The wallet implements the CosmJS +OfflineSigner` interface. + +```typescript title="my-frontend-app/src/main.ts" +import { Client } from "../../ts-client"; +// highlight-start +import { DirectSecp256k1HdWallet } from "@cosmjs/proto-signing"; + +const mnemonic = + "play butter frown city voyage pupil rabbit wheat thrive mind skate turkey helmet thrive door either differ gate exhibit impose city swallow goat faint"; +const wallet = await DirectSecp256k1HdWallet.fromMnemonic(mnemonic); +// highlight-end + +const client = new Client( + { + apiURL: "http://localhost:1317", + rpcURL: "http://localhost:26657", + prefix: "cosmos", + }, + // highlight-next-line + wallet +); +``` + +Broadcasting a transaction: + +```typescript title="my-frontend-app/src/main.ts" +const tx_result = await client.CosmosBankV1Beta1.tx.sendMsgSend({ + value: { + amount: [ + { + amount: '200', + denom: 'token', + }, + ], + fromAddress: 'cosmos13xkhcx2dquhqdml0k37sr7yndquwteuvt2cml7', + toAddress: 'cosmos15uw6qpxqs6zqh0zp3ty2ac29cvnnzd3qwjntnc', + }, + fee: { + amount: [{ amount: '0', denom: 'stake' }], + gas: '200000', + }, + memo: '', +}) +``` + +## Broadcasting a transaction with a custom message + +If your chain already has custom messages defined, you can use those. If not, +we'll be using Ignite's scaffolded code as an example. Create a post with CRUD +messages: + +``` +ignite scaffold list post title body +``` + +After adding messages to your chain you may need to re-generate the TypeScript +client: + +``` +ignite generate ts-client --clear-cache +``` + +Broadcast a transaction containing the custom `MsgCreatePost`: + +```typescript title="my-frontend-app/src/main.ts" +import { Client } from "../../ts-client"; +import { DirectSecp256k1HdWallet } from "@cosmjs/proto-signing"; + +const mnemonic = + "play butter frown city voyage pupil rabbit wheat thrive mind skate turkey helmet thrive door either differ gate exhibit impose city swallow goat faint"; +const wallet = await DirectSecp256k1HdWallet.fromMnemonic(mnemonic); + +const client = new Client( + { + apiURL: "http://localhost:1317", + rpcURL: "http://localhost:26657", + prefix: "cosmos", + }, + wallet +); +// highlight-start +const tx_result = await client.ExampleExample.tx.sendMsgCreatePost({ + value: { + title: 'foo', + body: 'bar', + creator: 'cosmos13xkhcx2dquhqdml0k37sr7yndquwteuvt2cml7', + }, + fee: { + amount: [{ amount: '0', denom: 'stake' }], + gas: '200000', + }, + memo: '', +}) +// highlight-end +``` + +## Lightweight client + +If you prefer, you can construct a lighter client using only the modules you are +interested in by importing the generic client class and expanding it with the +modules you need: + +```typescript title="my-frontend-app/src/main.ts" +// highlight-start +import { IgniteClient } from '../../ts-client/client' +import { Module as CosmosBankV1Beta1 } from '../../ts-client/cosmos.bank.v1beta1' +import { Module as CosmosStakingV1Beta1 } from '../../ts-client/cosmos.staking.v1beta1' +// highlight-end +import { DirectSecp256k1HdWallet } from '@cosmjs/proto-signing' + +const mnemonic = + 'play butter frown city voyage pupil rabbit wheat thrive mind skate turkey helmet thrive door either differ gate exhibit impose city swallow goat faint' +const wallet = await DirectSecp256k1HdWallet.fromMnemonic(mnemonic) +// highlight-next-line +const Client = IgniteClient.plugin([CosmosBankV1Beta1, CosmosStakingV1Beta1]) + +const client = new Client( + { + apiURL: 'http://localhost:1317', + rpcURL: 'http://localhost:26657', + prefix: 'cosmos', + }, + wallet, +) +``` + +## Broadcasting a multi-message transaction + +You can also construct TX messages separately and send them in a single TX using +a global signing client like so: + +```typescript title="my-frontend-app/src/main.ts" +const msg1 = await client.CosmosBankV1Beta1.tx.msgSend({ + value: { + amount: [ + { + amount: '200', + denom: 'token', + }, + ], + fromAddress: 'cosmos13xkhcx2dquhqdml0k37sr7yndquwteuvt2cml7', + toAddress: 'cosmos15uw6qpxqs6zqh0zp3ty2ac29cvnnzd3qwjntnc', + }, +}) + +const msg2 = await client.CosmosBankV1Beta1.tx.msgSend({ + value: { + amount: [ + { + amount: '200', + denom: 'token', + }, + ], + fromAddress: 'cosmos13xkhcx2dquhqdml0k37sr7yndquwteuvt2cml7', + toAddress: 'cosmos15uw6qpxqs6zqh0zp3ty2ac29cvnnzd3qwjntnc', + }, +}) + +const tx_result = await client.signAndBroadcast( + [msg1, msg2], + { + amount: [{ amount: '0', denom: 'stake' }], + gas: '200000', + }, + '', +) +``` + +Finally, for additional ease-of-use, apart from the modular client mentioned +above, each generated module is usable on its own in a stripped-down way by +exposing a separate txClient and queryClient. + +```typescript title="my-frontend-app/src/main.ts" +import { txClient } from '../../ts-client/cosmos.bank.v1beta1' +import { DirectSecp256k1HdWallet } from '@cosmjs/proto-signing' + +const mnemonic = + 'play butter frown city voyage pupil rabbit wheat thrive mind skate turkey helmet thrive door either differ gate exhibit impose city swallow goat faint' +const wallet = await DirectSecp256k1HdWallet.fromMnemonic(mnemonic) + +const client = txClient({ + signer: wallet, + prefix: 'cosmos', + addr: 'http://localhost:26657', +}) + +const tx_result = await client.sendMsgSend({ + value: { + amount: [ + { + amount: '200', + denom: 'token', + }, + ], + fromAddress: 'cosmos13xkhcx2dquhqdml0k37sr7yndquwteuvt2cml7', + toAddress: 'cosmos15uw6qpxqs6zqh0zp3ty2ac29cvnnzd3qwjntnc', + }, + fee: { + amount: [{ amount: '0', denom: 'stake' }], + gas: '200000', + }, + memo: '', +}) +``` + +## Usage with Keplr + +Normally, Keplr provides a wallet object implementing the `OfflineSigner` +interface, so you can simply replace the `wallet` argument in client +instantiation with `window.keplr.getOfflineSigner(chainId)`. However, Keplr +requires information about your chain, like chain ID, denoms, fees, etc. +[`experimentalSuggestChain()`](https://docs.keplr.app/api/guide/suggest-chain) is +a method Keplr provides to pass this information to the Keplr extension. + +The generated client makes this easier by offering a `useKeplr()` method that +automatically discovers the chain information and sets it up for you. Thus, you +can instantiate the client without a wallet and then call `useKeplr()` to enable +transacting via Keplr like so: + +```typescript title="my-frontend-app/src/main.ts" +import { Client } from '../../ts-client'; + +const client = new Client({ + apiURL: "http://localhost:1317", + rpcURL: "http://localhost:26657", + prefix: "cosmos" + } +); +await client.useKeplr(); +``` + +`useKeplr()` optionally accepts an object argument that contains one or more of +the same keys as the `ChainInfo` type argument of `experimentalSuggestChain()` +allowing you to override the auto-discovered values. + +For example, the default chain name and token precision (which are not recorded +on-chain) are set to `<chainId> Network` and `0` while the ticker for the denom +is set to the denom name in uppercase. If you want to override these, you can do +something like: + +```typescript title="my-frontend-app/src/main.ts" +import { Client } from '../../ts-client'; + +const client = new Client({ + apiURL: "http://localhost:1317", + rpcURL: "http://localhost:26657", + prefix: "cosmos" + } +); +await client.useKeplr({ + chainName: 'My Great Chain', + stakeCurrency: { + coinDenom: 'TOKEN', + coinMinimalDenom: 'utoken', + coinDecimals: '6', + }, +}) +``` + +## Wallet switching + +The client also allows you to switch out the wallet for a different one on an +already instantiated client like so: + +```typescript +import { Client } from '../../ts-client'; +import { DirectSecp256k1HdWallet } from "@cosmjs/proto-signing"; + +const mnemonic = + 'play butter frown city voyage pupil rabbit wheat thrive mind skate turkey helmet thrive door either differ gate exhibit impose city swallow goat faint' +const wallet = await DirectSecp256k1HdWallet.fromMnemonic(mnemonic); + +const client = new Client({ + apiURL: "http://localhost:1317", + rpcURL: "http://localhost:26657", + prefix: "cosmos" + } +); +await client.useKeplr(); + +// broadcast transactions using the Keplr wallet + +client.useSigner(wallet); + +// broadcast transactions using the CosmJS wallet +``` diff --git a/docs/versioned_docs/version-v0.26/03-clients/03-vue.md b/docs/versioned_docs/version-v0.26/03-clients/03-vue.md new file mode 100644 index 0000000..a0987a9 --- /dev/null +++ b/docs/versioned_docs/version-v0.26/03-clients/03-vue.md @@ -0,0 +1,174 @@ +# Vue frontend + +Welcome to this tutorial on using Ignite to develop a web application for your +blockchain with Vue 3. Ignite is a tool that simplifies the process of building +a blockchain application by providing a set of templates and generators that can +be used to get up and running quickly. + +One of the features of Ignite is its support for [Vue 3](https://vuejs.org/), a +popular JavaScript framework for building user interfaces. In this tutorial, you +will learn how to use Ignite to create a new blockchain and scaffold a Vue +frontend template. This will give you a basic foundation for your web +application and make it easier to get started building out the rest of your +application. + +Once you have your blockchain and Vue template set up, the next step is to +generate an API client. This will allow you to easily interact with your +blockchain from your web application, enabling you to retrieve data and make +transactions. By the end of this tutorial, you will have a fully functional web +application that is connected to your own blockchain. + +Prerequisites: + +* [Node.js](https://nodejs.org/en/) +* [Keplr](https://www.keplr.app/) Chrome extension + +## Create a blockchain and a Vue app + +Create a new blockchain project: + +``` +ignite scaffold chain example +``` + +To create a Vue frontend template, go to the `example` directory and run the +following command: + +``` +ignite scaffold vue +``` + +This will create a new Vue project in the `vue` directory. This project can be +used with any blockchain, but it depends on an API client to interact with the +blockchain. To generate an API client, run the following command in the +`example` directory: + +``` +ignite generate composables +``` + +This command generates two directories: + +* `ts-client`: a framework-agnostic TypeScript client that can be used to + interact with your blockchain. You can learn more about how to use this client + in the [TypeScript client tutorial](/clients/typescript). +* `vue/src/composables`: a collection of Vue 3 + [composables](https://vuejs.org/guide/reusability/composables.html) that wrap + the TypeScript client and make it easier to interact with your blockchain from + your Vue application. + +## Set up Keplr and an account + +Open your browser with the Keplr wallet extension installed. Follow [the +instructions](https://keplr.crunch.help/en/getting-started/creating-a-new-keplr-account) +to create a new account or use an existing one. Make sure to save the mnemonic +phrase as you will need it in the next step. + +Do not use a mnemonic phrase that is associated with an account that holds +assets you care about. If you do, you risk losing those assets. It's a good +practice to create a new account for development purposes. + +Add the account you're using in Keplr to your blockchain's `config.yml` file: + +```yml +accounts: + - name: alice + coins: [20000token, 200000000stake] + - name: bob + coins: [10000token, 100000000stake] + # highlight-start + - name: frank + coins: [10000token, 100000000stake] + mnemonic: struggle since inmate safe logic kite tag web win stay security wonder + # highlight-end +``` + +Replace the `struggle since...` mnemonic with the one you saved in the previous +step. + +Adding an account with a mnemonic to the config file will tell Ignite CLI to add +the account to the blockchain when you start it. This is useful for development +purposes, but you should not do this in production. + +## Start a blockchain and a Vue app + +In the `example` directory run the following command to start your blockchain: + +``` +ignite chain serve +``` + +To start your Vue application, go to the `vue` directory and run the following +command in a separate terminal window: + +``` +npm install && npm run dev +``` + +It is recommended to run `npm install` before starting your app with `npm run +dev` to ensure that all dependencies are installed (including the ones that the +API client has, see `vue/postinstall.js`). + +Open your browser and navigate to +[http://localhost:5173/](http://localhost:5173/). + +![Web app](/img/web-1.png) + +Press "Connect wallet", enter your password into Keplr and press "Approve" to +add your blockchain to Keplr. + +<img src="/img/web-4.png" width="300"/> + +Make sure to select the account you're using for development purposes and the +"Example Network" in Keplr's blockchain dropdown. You should see a list of +assets in your Vue app. + +![Web app](/img/web-5.png) + +Congratulations! You have successfully created a client-side Vue application and +connected it to your blockchain. You can modify the source code of your Vue +application to build out the rest of your project. + +## Setting the address prefix + +It is necessary to set the correct address prefix in order for the Vue app to +properly interact with a Cosmos chain. The address prefix is used to identify +the chain that the app is connected to, and must match the prefix used by the +chain. + +By default, Ignite creates a chain with the `cosmos` prefix. If you have +created your chain with `ignite scaffold chain ... --address-prefix foo` or +manually changed the prefix in the source code of the chain, you need to set the +prefix in the Vue app. + +There are two ways to set the address prefix in a Vue app. + +### Using an environment variable + +You can set the `VITE_ADDRESS_PREFIX` environment variable to the correct +address prefix for your chain. This will override the default prefix used by the +app. + +To set the `VITE_ADDRESS_PREFIX` environment variable, you can use the following +command: + +```bash +export VITE_ADDRESS_PREFIX=your-prefix +``` + +Replace `your-prefix` with the actual address prefix for your chain. + +### Setting address prefix in the code + +Alternatively, you can manually set the correct address prefix by replacing the +fallback value of the `prefix` variable in the file `./vue/src/env.ts`. + +To do this, open the file `./vue/src/env.ts` and find the following line: + +```ts title="./vue/src/env.ts" +const prefix = process.env.VITE_ADDRESS_PREFIX || 'your-prefix'; +``` + +Replace `your-prefix` with the actual address prefix for your chain. + +Save the file and restart the Vue app to apply the changes. diff --git a/docs/versioned_docs/version-v0.26/03-clients/04-react.md b/docs/versioned_docs/version-v0.26/03-clients/04-react.md new file mode 100644 index 0000000..9d688d9 --- /dev/null +++ b/docs/versioned_docs/version-v0.26/03-clients/04-react.md @@ -0,0 +1,130 @@ +# React frontend + +Welcome to this tutorial on using Ignite to develop a web application for your +blockchain with React. Ignite is a tool that simplifies the process of building +a blockchain application by providing a set of templates and generators that can +be used to get up and running quickly. + +One of the features of Ignite is its support for [React](https://reactjs.org/), a +popular JavaScript framework for building user interfaces. In this tutorial, you +will learn how to use Ignite to create a new blockchain and scaffold a React +frontend template. This will give you a basic foundation for your web +application and make it easier to get started building out the rest of your +application. + +Once you have your blockchain and React template set up, the next step is to +generate an API client. This will allow you to easily interact with your +blockchain from your web application, enabling you to retrieve data and make +transactions. By the end of this tutorial, you will have a fully functional web +application that is connected to your own blockchain. + +Prerequisites: + +* [Node.js](https://nodejs.org/en/) +* [Keplr](https://www.keplr.app/) Chrome extension + +## Create a blockchain and a React app + +Create a new blockchain project: + +``` +ignite scaffold chain example +``` + +To create a React frontend template, go to the `example` directory and run the +following command: + +``` +ignite scaffold react +``` + +This will create a new React project in the `react` directory. This project can be +used with any blockchain, but it depends on an API client to interact with the +blockchain. To generate an API client, run the following command in the +`example` directory: + +``` +ignite generate hooks +``` + +This command generates two directories: + +* `ts-client`: a framework-agnostic TypeScript client that can be used to + interact with your blockchain. You can learn more about how to use this client + in the [TypeScript client tutorial](/clients/typescript). +* `react/src/hooks`: a collection of + [React Hooks](https://reactjs.org/docs/hooks-intro.html) that wrap + the TypeScript client and make it easier to interact with your blockchain from + your React application. + +## Set up Keplr and an account + +Open your browser with the Keplr wallet extension installed. Follow [the +instructions](https://keplr.crunch.help/en/getting-started/creating-a-new-keplr-account) +to create a new account or use an existing one. Make sure to save the mnemonic +phrase as you will need it in the next step. + +Do not use a mnemonic phrase that is associated with an account that holds +assets you care about. If you do, you risk losing those assets. It's a good +practice to create a new account for development purposes. + +Add the account you're using in Keplr to your blockchain's `config.yml` file: + +```yml +accounts: + - name: alice + coins: [20000token, 200000000stake] + - name: bob + coins: [10000token, 100000000stake] + # highlight-start + - name: frank + coins: [10000token, 100000000stake] + mnemonic: struggle since inmate safe logic kite tag web win stay security wonder + # highlight-end +``` + +Replace the `struggle since...` mnemonic with the one you saved in the previous +step. + +Adding an account with a mnemonic to the config file will tell Ignite CLI to add +the account to the blockchain when you start it. This is useful for development +purposes, but you should not do this in production. + +## Start a blockchain and a React app + +In the `example` directory run the following command to start your blockchain: + +``` +ignite chain serve +``` + +To start your React application, go to the `react` directory and run the following +command in a separate terminal window: + +``` +npm install && npm run dev +``` + +It is recommended to run `npm install` before starting your app with `npm run +dev` to ensure that all dependencies are installed (including the ones that the +API client has, see `react/postinstall.js`). + +Open your browser and navigate to +[http://localhost:5173/](http://localhost:5173/). + +![Web app](/img/web-1.png) + +Press "Connect wallet", enter your password into Keplr and press "Approve" to +add your blockchain to Keplr. + +<img src="/img/web-4.png" width="300"/> + +Make sure to select the account you're using for development purposes and the +"Example Network" in Keplr's blockchain dropdown. You should see a list of +assets in your React app. + +![Web app](/img/web-5.png) + +Congratulations! You have successfully created a client-side React application and +connected it to your blockchain. You can modify the source code of your React +application to build out the rest of your project. \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.26/03-clients/_category_.json b/docs/versioned_docs/version-v0.26/03-clients/_category_.json new file mode 100644 index 0000000..036cfbe --- /dev/null +++ b/docs/versioned_docs/version-v0.26/03-clients/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Develop a client app", + "link": null +} \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.26/04-network/01-chain.md b/docs/versioned_docs/version-v0.26/04-network/01-chain.md new file mode 100644 index 0000000..9cec1e9 --- /dev/null +++ b/docs/versioned_docs/version-v0.26/04-network/01-chain.md @@ -0,0 +1,237 @@ +--- +sidebar_position: 1 +description: Ignite Chain. +--- + +# Ignite Chain + +## Introduction + +_Ignite is a blockchain to help launch Cosmos SDK-based blockchains._ + +Using Cosmos SDK and Ignite CLI, developers can quickly create a crypto application that is decentralized, economical for usage, and scalable. The Cosmos SDK framework allows developers to create sovereign application-specific blockchains that become part of the wider [Cosmos ecosystem](https://cosmos.network/ecosystem/apps). Blockchains created with Cosmos SDK use a Proof-of-Stake (PoS) consensus protocol that requires validators to secure the chain. + +Even though tools like Ignite CLI simplify the development of a Cosmos SDK blockchain, launching a new chain is a highly complex process. One of the major challenges of developing and launching your own sovereign blockchain is ensuring the security of the underlying consensus. Since Cosmos SDK chains are based on the PoS consensus, each blockchain requires initial coin allocations and validators before they can be launched, which presents developers with significant challenges, such as determining their chain's tokenomics or coordinating a robust validator set. + +The initial coin allocations and validators are described in a JSON-formatted genesis file that is shared among all initial nodes in the network. This genesis file defines the initial state of the application. Based on PoS, secure chains require the initial allocation of coins to be well distributed so that no single validator holds more than 1/3 of all tokens and receives a disproportionate amount of voting power. + +Along with ensuring the security of the underlying consensus, another highly difficult task in launching a new blockchain is attracting a diverse set of validators for the genesis file. Many promising projects fail to capture the attention of a sufficient number of trustworthy validators to secure their chains due to a lack of resources or experience. + +The Ignite Chain has, therefore, been conceived to facilitate the launch of Cosmos SDK blockchains by helping developers to navigate the complexities of launching a blockchain and coordinate the genesis of a new chain. Using the decentralized nature of blockchain, Ignite's coordination features help blockchain builders connect with validators and investors, speeding up the time to market of their projects and chances of success. + +Commands to interact with Ignite Chain are integrated into Ignite CLI and allow launching chains from it. Integration with Ignite Chain allows the CLI to support the developer in the entire lifecycle of realizing a Cosmos project, from the development and experimentation of the blockchain to the launch of its mainnet. + +## What is Ignite Chain + +Ignite Chain is a secure platform that simplifies the launch of Cosmos SDK-based chains, lending vital resources and support at the coordination, preparation, and launch stages. Ignite provides the tools that blockchain projects need to overcome the complexities of launching their chain, from validator coordination and token issuance to fundraising and community building. + +Ignite facilitates the launch of new chains with an overall launch process during three phases: + +- Coordination +- Preparation +- Launch + +To reduce friction at each phase, Ignite provides an immutable and universal database for validator coordination. + +In the future, Ignite will also offer: + +- Token issuance: Ignite allows the issuance of tokens (called vouchers) that represent a share + allocation of a future mainnet network +- A fundraising platform for selling vouchers +- A permissionless framework to reward validator activities on a launched testnet network + +## Validator coordination + +To launch a chain in the Cosmos ecosystem, the validators must start nodes that connect to each other to create the new blockchain network. A node must be started from a file called the genesis file. The genesis file must be identical on all validator nodes before the new chain can be started. + +![genesis](./assets/genesis.png) + +The JSON-formatted genesis file contains information on the initial state of the chain, including coin allocations, the list of validators, various parameters for the chain like the maximum number of validators actively signing blocks, and the specific launch time. Because each validator has the same genesis file, the blockchain network starts automatically when the genesis time is reached. + +![launch](./assets/launch.png) + +### Ignite as a coordination source of truth + +Ignite Chain acts as a source of truth for new chains to coordinate a validator set and for validators to generate the genesis for a chain launch. The blockchain doesn’t directly store the final genesis file in its own ledger but rather stores information that allows generating the genesis file in a deterministic manner. + +The information stored on Ignite that supports deterministic generation of the genesis file for a specific chain launch is referred to as the _launch information_. When creating a new chain on Ignite, the coordinator provides the initial launch information. Then, through on-chain coordination, this launch information is updated by interacting with the blockchain by sending messages. When the chain is ready to be launched, the genesis file is generated by calling a genesis generation algorithm that uses the launch information. + +**GenesisGenerate(LaunchInformation) => genesis.json** + +The genesis generation algorithm is officially and formally specified. The official implementation of the genesis generation algorithm is developed in Go using Ignite CLI. However, any project is free to develop its own implementation of the algorithm as long as it complies with the specification of the algorithm. + +The genesis generation algorithm is not part of the on-chain protocol. In order to successfully launch a new chain, all validators must use the algorithm to generate their genesis using the launch information. The algorithm deterministically generates the genesis from the launch information that is stored on the Ignite chain. + +If any element of the launch information is censored, for example, removing an account balance, the launched chain reputation is negatively impacted and implies that the majority of validators agree on not using: + +- The tamper-proof launch information +- The official genesis generation algorithm + +Outside of the genesis generation, the genesis generation algorithm specification gives guidance on how to set up your network configuration. For example, the launch information can contain the addresses of the persistent peers of the blockchain network. + +![generation](./assets/generation.png) + +## Launch information + +Launch information can be created or updated in three different ways: + +1. Defined during chain creation but updatable by the coordinator after creation +2. Determined through coordination +3. Determined through specific on-chain logic not related to coordination + +### 1 - Launch information determined during chain creation: + +- `GenesisChainID`: The identifier for the network +- `SourceURL`: The URL of the git repository of the source code for building the blockchain + node binary +- `SourceHash`: The specific hash that identifies the release of the source code +- `InitialGenesis`: A multiformat structure that specifies the initial genesis for the chain + launch before running the genesis generation algorithm + +### 2 - Launch information determined through coordination: + +- `GenesisAccounts`: A list of genesis accounts for the chain, comprised of addresses with associated balances +- `VestingAccounts`: A list of genesis accounts with vesting options +- `GenesisValidators`: A list of the initial validators at chain launch +- `ParamChanges`: A list of module param changes in the genesis state + +### 3 - Launch information determined through on-chain logic: + +- `GenesisTime`: The timestamp for the network start, also referred to as LaunchTime + +### Initial genesis + +The launch information contains the initial genesis structure. This structure provides the information for generating the initial genesis before running the genesis generation algorithm and finalizing the genesis file. + +The initial genesis structure can be: + +- `DefaultGenesis`: the default genesis file is generated by the chain binary init command +- `GenesisURL`: the initial genesis for a chain launch is an existing genesis file that is + fetched from a URL and then modified with the required algorithm - this initial genesis type should be used when the initial genesis state is extensive, + containing a lot of accounts for token distribution, containing records for an + airdrop +- `GenesisConfig`: the initial genesis for a chain launch is generated from an Ignite CLI + config that contains genesis accounts and module parameters - this initial genesis type should be used when the coordinator doesn’t have extensive state for the initial genesis but some module parameters must be customized. For example, the staking bond denom for the staking token + +## Coordination process + +The coordination process starts immediately after the chain is created and ends when the coordinator triggers the launch of the chain. + +The launch information is updated during the coordination process. + +During the coordination process, any entity can send requests to the network. A request is an object whose content specifies updates to the launch information. + +The chain coordinator approves or rejects the requests: + +- If a request is approved, the content is applied to the launch information +- If the request is rejected, no change is made to the launch information + +The request creator can also directly reject or cancel the request. + +Each chain contains a request pool that contains all requests. Each request has a status: + +- _PENDING_: Waiting for the approval of the coordinator +- _APPROVED_: Approved by the coordinator, its content has been applied to the launch + information +- _REJECTED_: Rejected by the coordinator or the request creator + +Approving or rejecting a request is irreversible. The only possible status transitions are: + +- _PENDING_ to _APPROVED_ +- _PENDING_ to _REJECTED_ + +To revert the effect on launch information from a request, a user must send the eventual opposite request (example: AddAccount → RemoveAccount). + +Since the coordinator is the sole approver for requests, each request created by the coordinator is immediately set to APPROVED and its content is applied to the launch information. + +![requests](./assets/requests.png) + +## Available requests + +Six types of requests can be sent to the Ignite chain: + +- `AddGenesisAccount` +- `AddVestingAccount` +- `AddGenesisValidator` +- `RemoveAccount` +- `RemoveValidator` +- `ChangeParam` + +**`AddGenesisAccount`** requests a new account for the chain genesis with a coin balance. This request content is composed of two fields: + +- Account address, must be unique in launch information +- Account balance + +The request automatically fails to be applied if a genesis account or a vesting account with an identical address is already specified in the launch information. + +**`AddVestingAccount`** requests a new account for the chain genesis with a coin balance and vesting options. This request content is composed of two fields: + +- Address of the account +- Vesting options of the account + +The currently supported vesting option is delayed vesting where the total balance of the account is specified and a number of tokens of the total balance of the account are vested only after an end time is reached. + +The request automatically fails to be applied if a genesis account or a vesting account with an identical address is already specified in the launch information. + +**`AddGenesisValidator`** requests a new genesis validator for the chain. A genesis validator in a Cosmos SDK blockchain represents an account with an existing balance in the genesis that self-delegates part of its balance during genesis initialization to become a bonded validator when the network starts. In most cases, the validator must first request an account with `AddGenesisAccount` before requesting to be a validator, unless they already have an account with a balance in the initial genesis of the chain. + +Self-delegation during genesis initialization is performed with a [Cosmos SDK module named genutils](https://pkg.go.dev/github.com/cosmos/cosmos-sdk/x/genutil). In the genesis, the _genutils_ module contains objects called gentx that represent transactions that were executed before the network launch. To be a validator when the network starts, a future validator must provide a gentx that contains the transaction for the self-delegation from their account. + +The request content is composed of five fields: + +- The gentx for the validator self-delegation +- The address of the validator +- The consensus public key of the validator node +- The self-delegation +- The peer information for the validator node + +The request automatically fails to be applied if a validator with the same address already exists in the launch information. + +**`RemoveAccount`** requests the removal of a genesis or vesting account from the launch information. The request content contains the address of the account to be removed. The request automatically fails to be applied if no genesis or vesting account with the specified address exists in the launch information. + +**`RemoveValidator`** requests the removal of a genesis validator from the launch information. The request content contains the address of the validator to be removed. The request automatically fails to be applied if no validator account with the specified address exists in the launch information. + +**`ChangeParam`** requests the modification of a module parameter in the genesis. Modules in a Cosmos SDK blockchain can have parameters that will configure the logic of the blockchain. The parameters can be changed through governance once the blockchain network is live. During the launch process, the initial parameters of the chain are set in the genesis. + +This request content is composed of three fields: + +- The name of the module +- The name of the parameter +- The value of the parameter represented as generic data + +### Request validity + +Some checks are verified on-chain when applying a request. For example, a genesis account can’t be added twice. However, some other validity properties can’t be checked on-chain. For example, because a gentx is represented through a generic byte array in the blockchain, an on-chain check is not possible to verify that the gentx is correctly signed or that the provided consensus public key that is stored on-chain corresponds to the consensus public key in the gentx. This gentx verification is the responsibility of the client interacting with the blockchain to ensure the requests have a valid format and allow for the start of the chain. Some validity checks are specified in the genesis generation algorithm. + +## Launch process + +The overall launch process of a chain through Ignite is composed of three phases: + +- Coordination phase +- Preparation phase +- Launch phase + +After the coordinator creates the chain on Ignite and provides the initial launch information, the launch process enters the coordination phase where users can send requests for the chain genesis. After the coordinator deems the chain as ready to be launched, they trigger the launch of the chain. During this operation, the coordinator provides the launch time, or genesis, time for the chain. + +Once the launch is triggered and before the launch time is reached, the chain launch process enters the preparation phase. During the preparation phase, requests can no longer be sent and the launch information of the chain is finalized. The validators run the genesis generation algorithm to get the final genesis of the chain and prepare their node. The remaining time must provide enough time for the validators to prepare their nodes. This launch time is set by the coordinator, although a specific range for the remaining time is imposed. + +Once the launch time is reached, the chain network is started and the chain launch process enters the launch phase. At this point, since the chain is live, no further action is required from the coordinator. However, under some circumstances, the chain might have failed to start. For example, a chain does not start if every validator in the genesis does not start their node. + +The coordinator has the ability to revert the chain launch. Reverting the chain launch sets the launch process back to the coordination phase where requests can be sent again to allow addressing the issue related to the launch failure. Reverting the launch has an effect only on Ignite. If the new chain is effectively launched, reverting the launch on Ignite has no effect on the chain liveness. Reverting the launch of the chain can be performed only by the coordinator after the launch time plus a delay called the revert delay. + +![process](./assets/process.png) + +## Genesis generation + +To ensure determinism, genesis generation rules must be rigorously specified depending on the launch information of the chain. + +The general steps for the genesis generation are: + +- Building the blockchain node binary from source +- Generating the initial genesis +- Setting the chain ID +- Setting the genesis time +- Adding genesis accounts +- Adding genesis accounts with vesting options +- Adding gentxs for genesis validators +- Changing module params from param changes diff --git a/docs/versioned_docs/version-v0.26/04-network/02-introduction.md b/docs/versioned_docs/version-v0.26/04-network/02-introduction.md new file mode 100644 index 0000000..dc711fe --- /dev/null +++ b/docs/versioned_docs/version-v0.26/04-network/02-introduction.md @@ -0,0 +1,75 @@ +--- +sidebar_position: 2 +description: Introduction to Ignite Network commands. +--- + +# Ignite Network commands + +The `ignite network` commands allow to coordinate the launch of sovereign Cosmos blockchains by interacting with the +Ignite Chain. + +To launch a Cosmos blockchain you need someone to be a coordinator and others to be validators. These are just roles, +anyone can be a coordinator or a validator. + +- A coordinator publishes information about a chain to be launched on the Ignite blockchain, approves validator requests + and coordinates the launch. +- Validators send requests to join a chain and start their nodes when a blockchain is ready for launch. + +## Launching a chain on Ignite + +Launching with the CLI can be as simple as a few short commands with the CLI using `ignite network` command +namespace. + +> **NOTE:** `ignite n` can also be used as a shortcut for `ignite network`. + +To publish the information about your chain as a coordinator, run the following command (the URL should point to a +repository with a Cosmos SDK chain): + +``` +ignite network chain publish github.com/ignite/example +``` + +This command will return the launch identifier you will be using in the following +commands. Let's say this identifier is 42. +Next, ask validators to initialize their nodes and request to join the network. +For a testnet you can use the default values suggested by the +CLI. + +``` +ignite network chain init 42 +ignite network chain join 42 --amount 95000000stake +``` + +As a coordinator, list all validator requests: + +``` +ignite network request list 42 +``` + +Approve validator requests: + +``` +ignite network request approve 42 1,2 +``` + +Once you've approved all validators you need in the validator set, announce that +the chain is ready for launch: + +``` +ignite network chain launch 42 +``` + +Validators can now prepare their nodes for launch: + +``` +ignite network chain prepare 42 +``` + +The output of this command will show a command that a validator would use to +launch their node, for example `exampled --home ~/.example`. After enough +validators launch their nodes, a blockchain will be live. + +--- + +The next two sections provide more information on the process of coordinating a chain launch from a coordinator and +participating in a chain launch as a validator. diff --git a/docs/versioned_docs/version-v0.26/04-network/03-coordinator.md b/docs/versioned_docs/version-v0.26/04-network/03-coordinator.md new file mode 100644 index 0000000..76d5997 --- /dev/null +++ b/docs/versioned_docs/version-v0.26/04-network/03-coordinator.md @@ -0,0 +1,146 @@ +--- +sidebar_position: 3 +description: Ignite Network commands for coordinators. +--- + +# Coordinator Guide + +Coordinators organize and launch new chains on Ignite Chain. + +--- + +## Publish a chain + +The first step in the process of a chain launch is for the coordinator to publish the intention of launching a chain. +The `publish` command publishes the intention of launching a chain on Ignite from a project git repository. + +```shell +ignite n chain publish https://github.com/ignite/example +``` + +**Output** + +``` +✔ Source code fetched +✔ Blockchain set up +✔ Chain's binary built +✔ Blockchain initialized +✔ Genesis initialized +✔ Network published +⋆ Launch ID: 3 +``` + +`LaunchID` identifies the published blockchain on Ignite blockchain. + +### Specify a initial genesis + +During coordination, new genesis accounts and genesis validators are added into the chain genesis. +The initial genesis where these accounts are added is by default the default genesis generated by the chain binary. + +The coordinator can specify a custom initial genesis for the chain launch with the `--genesis` flag. This custom initial +genesis can contain additional default genesis accounts and custom params for the chain modules. + +A URL must be provided for the `--genesis-url` flag. This can either directly point to a JSON genesis file or a tarball +containing a genesis file. + +```shell +ignite n chain publish https://github.com/ignite/example --genesis-url https://raw.githubusercontent.com/ignite/example/master/genesis/gen.json +``` + +## Approve validator requests + +When coordinating for a chain launch, validators send requests. These represent requests to be part of the genesis as a +validator for the chain. + +The coordinator can list these requests: + +``` +ignite n request list 3 +``` + +> **NOTE:** here "3" is specifying the `LaunchID`. + +**Output** + +``` +Id Status Type Content +1 APPROVED Add Genesis Account spn1daefnhnupn85e8vv0yc5epmnkcr5epkqncn2le, 100000000stake +2 APPROVED Add Genesis Validator e3d3ca59d8214206839985712282967aaeddfb01@84.118.211.157:26656, spn1daefnhnupn85e8vv0yc5epmnkcr5epkqncn2le, 95000000stake +3 PENDING Add Genesis Account spn1daefnhnupn85e8vv0yc5epmnkcr5epkqncn2le, 95000000stake +4 PENDING Add Genesis Validator b10f3857133907a14dca5541a14df9e8e3389875@84.118.211.157:26656, spn1daefnhnupn85e8vv0yc5epmnkcr5epkqncn2le, 95000000stake +``` + +The coordinator can either approve or reject these requests. + +To approve the requests: + +``` +ignite n request approve 3 3,4 +``` + +> **NOTE:** when selecting a list of requests, both syntaxes can be used: `1,2,3,4` and `1-3,4`. + +**Output** + +``` +✔ Source code fetched +✔ Blockchain set up +✔ Requests format verified +✔ Blockchain initialized +✔ Genesis initialized +✔ Genesis built +✔ The network can be started +✔ Request(s) #3, #4 verified +✔ Request(s) #3, #4 approved +``` + +Ignite CLI automatically verifies that the requests can be applied for the genesis, the approved requests don't generate +an invalid genesis. + +To reject the requests: + +``` +ignite n request reject 3 3,4 +``` + +**Output** + +``` +✔ Request(s) #3, #4 rejected +``` + +--- + +## Initiate the launch of a chain + +When enough validators are approved for the genesis and the coordinator deems the chain ready to be launched, the +coordinator can initiate the launch of the chain. + +This action will finalize the genesis of chain, meaning that no new requests can be approved for the chain. + +This action also sets the launch time (or genesis time) for the chain, the time when the blockchain network will go +live. + +``` +ignite n chain launch 3 +``` + +**Output** + +``` +✔ Chain 3 will be launched on 2022-10-01 09:00:00.000000 +0200 CEST +``` + +This example output shows the launch time of the chain on the network. + +### Set a custom launch time + +By default, the launch time will be set to the earliest date possible. In practice, the validators should have time to +prepare their node for the network launch. If a validator fails to be online, they can get jailed for inactivity in the +validator set. + +The coordinator can specify a custom time with the `--launch-time` flag. + +``` +ignite n chain launch --launch-time 2022-01-01T00:00:00Z +``` diff --git a/docs/versioned_docs/version-v0.26/04-network/04-validator.md b/docs/versioned_docs/version-v0.26/04-network/04-validator.md new file mode 100644 index 0000000..51a55e7 --- /dev/null +++ b/docs/versioned_docs/version-v0.26/04-network/04-validator.md @@ -0,0 +1,161 @@ +--- +sidebar_position: 4 +description: Ignite Network commands for validators. +--- + +# Validator Guide + +Validators join as genesis validators for chain launches on Ignite Chain. + +--- + +## List all published chains + +Validators can list and explore published chains to be launched on Ignite. + +``` +ignite n chain list +``` + +**Output** + +``` +Launch Id Chain Id Source Phase + +3 example-1 https://github.com/ignite/example coordinating +2 spn-10 https://github.com/tendermint/spn launched +1 example-20 https://github.com/tendermint/spn launching +``` + +- `Launch ID` is the unique identifier of the chain on Ignite. This is the ID used to interact with the chain launch. +- `Chain ID` represents the identifer of the chain network once it will be launched. It should be a unique identifier in + practice but doesn't need to be unique on Ignite. +- `Source` is the repository URL of the project. +- `Phase` is the current phase of the chain launch. A chain can have 3 different phases: + - `coordinating`: means the chain is open to receive requests from validators + - `launching`: means the chain no longer receives requests but it hasn't been launched yet + - `launched`: means the chain network has been launched + +--- + +## Request network participation + +When the chain is in the coordination phase, validators can request to be a genesis validator for the chain. +Ignite CLI supports an automatic workflow that can setup a node for the validator and a workflow for advanced users with +a specific setup for their node. + +### Simple Flow + +`ignite` can handle validator setup automatically. Initialize the node and generate a gentx file with default values: + +``` +ignite n chain init 3 +``` + +**Output** + +``` +✔ Source code fetched +✔ Blockchain set up +✔ Blockchain initialized +✔ Genesis initialized +? Staking amount 95000000stake +? Commission rate 0.10 +? Commission max rate 0.20 +? Commission max change rate 0.01 +⋆ Gentx generated: /Users/lucas/spn/3/config/gentx/gentx.json +``` + +Now, create and broadcast a request to join a chain as a validator: + +``` +ignite n chain join 3 --amount 100000000stake +``` + +The join command accepts a `--amount` flag with a comma-separated list of tokens. If the flag is provided, the +command will broadcast a request to add the validator’s address as an account to the genesis with the specific amount. + +**Output** + +``` +? Peer's address 192.168.0.1:26656 +✔ Source code fetched +✔ Blockchain set up +✔ Account added to the network by the coordinator! +✔ Validator added to the network by the coordinator! +``` + +--- + +### Advanced Flow + +Using a more advanced setup (e.g. custom `gentx`), validators must provide an additional flag to their command +to point to the custom file: + +``` +ignite n chain join 3 --amount 100000000stake --gentx ~/chain/config/gentx/gentx.json +``` + +--- + +## Launch the network + +### Simple Flow + +Generate the final genesis and config of the node: + +``` +ignite n chain prepare 3 +``` + +**Output** + +``` +✔ Source code fetched +✔ Blockchain set up +✔ Chain's binary built +✔ Genesis initialized +✔ Genesis built +✔ Chain is prepared for launch +``` + +Next, start the node: + +``` +exampled start --home ~/spn/3 +``` + +--- + +### Advanced Flow + +Fetch the final genesis for the chain: + +``` +ignite n chain show genesis 3 +``` + +**Output** + +``` +✔ Source code fetched +✔ Blockchain set up +✔ Blockchain initialized +✔ Genesis initialized +✔ Genesis built +⋆ Genesis generated: ./genesis.json +``` + +Next, fetch the persistent peer list: + +``` +ignite n chain show peers 3 +``` + +**Output** + +``` +⋆ Peer list generated: ./peers.txt +``` + +The fetched genesis file and peer list can be used for a manual node setup. diff --git a/docs/versioned_docs/version-v0.26/04-network/05-coordination.md b/docs/versioned_docs/version-v0.26/04-network/05-coordination.md new file mode 100644 index 0000000..eb26b71 --- /dev/null +++ b/docs/versioned_docs/version-v0.26/04-network/05-coordination.md @@ -0,0 +1,72 @@ +--- +sidebar_position: 5 +description: Other commands for coordination. +--- + +# Other commands for coordination + +Ignite CLI offers various other commands to coordinate chain launches that can be used by coordinators, validators, or other participants. + +The requests follow the same logic as the request for validator participation; they must be approved by the chain coordinator to be effective in the genesis. + +--- + +## Request a genesis account + +Any participant can request a genesis account with an associated balance for the chain. +The participant must provide an address with a comma-separated list of token balances. + +Any prefix can be used for the Bech32 address, it is automatically converted into `spn` on the Ignite Chain. + +``` +ignite n request add-account 3 spn1pe5h2gelhu8aukmrnj0clmec56aspxzuxcy99y 1000stake +``` + +**Output** + +``` +Source code fetched +Blockchain set up +⋆ Request 10 to add account to the network has been submitted! +``` +--- + +## Request to remove a genesis account + +Any participant can request to remove a genesis account from the chain genesis. +It might be the case if, for example, a user suggests an account balance that is so high it could harm the network. +The participant must provide the address of the account. + +Any prefix can be used for the Bech32 address, it is automatically converted into `spn` on the Ignite Chain. + +``` +ignite n request remove-account 3 spn1pe5h2gelhu8aukmrnj0clmec56aspxzuxcy99y +``` + +**Output** + +``` +Request 11 to remove account from the network has been submitted! +``` +--- + +## Request to remove a genesis validator + +Any participant can request to remove a genesis validator (gentx) from the chain genesis. +It might be the case if, for example, a chain failed to launch because of some validators, and they must be removed from genesis. +The participant must provide the address of the validator account (same format as genesis account). + +Any prefix can be used for the Bech32 address, it is automatically converted into `spn` on the Ignite Chain. + +The request removes only the gentx from the genesis but not the associated account balance. + +``` +ignite n request remove-validator 429 spn1pe5h2gelhu8aukmrnj0clmec56aspxzuxcy99y +``` + +**Output** + +``` +Request 12 to remove validator from the network has been submitted! +``` +--- \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.26/04-network/_category_.json b/docs/versioned_docs/version-v0.26/04-network/_category_.json new file mode 100644 index 0000000..c45c6eb --- /dev/null +++ b/docs/versioned_docs/version-v0.26/04-network/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Launch a chain", + "link": null +} \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.26/04-network/assets/generation.png b/docs/versioned_docs/version-v0.26/04-network/assets/generation.png new file mode 100644 index 0000000..c10cac2 Binary files /dev/null and b/docs/versioned_docs/version-v0.26/04-network/assets/generation.png differ diff --git a/docs/versioned_docs/version-v0.26/04-network/assets/genesis.png b/docs/versioned_docs/version-v0.26/04-network/assets/genesis.png new file mode 100644 index 0000000..b640db4 Binary files /dev/null and b/docs/versioned_docs/version-v0.26/04-network/assets/genesis.png differ diff --git a/docs/versioned_docs/version-v0.26/04-network/assets/launch.png b/docs/versioned_docs/version-v0.26/04-network/assets/launch.png new file mode 100644 index 0000000..4ea50b2 Binary files /dev/null and b/docs/versioned_docs/version-v0.26/04-network/assets/launch.png differ diff --git a/docs/versioned_docs/version-v0.26/04-network/assets/process.png b/docs/versioned_docs/version-v0.26/04-network/assets/process.png new file mode 100644 index 0000000..a5c21d2 Binary files /dev/null and b/docs/versioned_docs/version-v0.26/04-network/assets/process.png differ diff --git a/docs/versioned_docs/version-v0.26/04-network/assets/requests.png b/docs/versioned_docs/version-v0.26/04-network/assets/requests.png new file mode 100644 index 0000000..d097e68 Binary files /dev/null and b/docs/versioned_docs/version-v0.26/04-network/assets/requests.png differ diff --git a/docs/versioned_docs/version-v0.26/05-contributing/01-docs.md b/docs/versioned_docs/version-v0.26/05-contributing/01-docs.md new file mode 100644 index 0000000..4c05ed2 --- /dev/null +++ b/docs/versioned_docs/version-v0.26/05-contributing/01-docs.md @@ -0,0 +1,105 @@ +--- +sidebar_position: 1 +slug: /contributing +--- + +# Improving documentation + +Thank you for visiting our repository and considering making contributions. We +appreciate your interest in helping us to create and maintain awesome tutorials +and documentation. + +## Using this repo + +Review existing [Ignite CLI issues](https://github.com/ignite/cli/issues) to see +if your question has already been asked and answered. + +- To provide feedback, file an issue and provide generous details to help us + understand how we can make it better. +- To provide a fix, make a direct contribution. If you're not a member or + maintainer, fork the repo and then submit a pull request (PR) from your forked + repo to the `main` branch. +- Start by creating a draft pull request. Create your draft PR early, even if + your work is just beginning or incomplete. Your draft PR indicates to the + community that you're working on something and provides a space for + conversations early in the development process. Merging is blocked for `Draft` + PRs, so they provide a safe place to experiment and invite comments. + +## Reviewing technical content PRs + +Some of the best content contributions come during the PR review cycles. Follow +best practices for technical content PR reviews just like you do for code +reviews. + +- For in-line suggestions, use the [GitHub suggesting + feature](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/reviewing-changes-in-pull-requests/commenting-on-a-pull-request) + . +- The PR owner can merge in your suggested commits one at a time or in batch + (preferred). +- When you are providing a more granular extensive review that results in more + than 20 in-line suggestions, go ahead and check out the branch and make the + changes yourself. + +## Writing and contributing + +We welcome contributions to the docs and tutorials. + +Our technical content follows the [Google developer documentation style +guide](https://developers.google.com/style). Highlights to help you get started: + +- [Highlights](https://developers.google.com/style/highlights) +- [Word list](https://developers.google.com/style/word-list) +- [Style and tone](https://developers.google.com/style/tone) +- [Writing for a global + audience](https://developers.google.com/style/translation) +- [Cross-references](https://developers.google.com/style/cross-references) +- [Present tense](https://developers.google.com/style/tense) + +The Google guidelines include more material than is listed here and are used as +a guide that enables easy decision-making about proposed content changes. + +Other useful resources: + +- [Google Technical Writing Courses](https://developers.google.com/tech-writing) +- [GitHub Guides Mastering + Markdown](https://docs.github.com/en/get-started/writing-on-github/getting-started-with-writing-and-formatting-on-github/basic-writing-and-formatting-syntax) + +## Where can I find the tutorials and docs? + +Technical content includes knowledge base articles and interactive tutorials. + +- The Ignite CLI Developer Tutorials content is in the `docs/guide` folder. +- The Knowledge Base content is in the `docs/kb` folder. +- Upgrade information is in the `docs/migration` folder. + +Note: The CLI docs are auto-generated and do not support doc updates. + +Locations and folders for other content can vary. Explore the self-describing +folders for the content that you are interested in. Some articles and tutorials +reside in a single Markdown file while sub-folders might be present for other +tutorials. + +As always, work-in-progress content might be happening in other locations and +repos. + +## Who works on the tutorials? + +The Ignite product team developers are focused on building Ignite CLI and +improving the developer experience. The Ignite Ecosystem Development team owns +the technical content and tutorials and manages developer onboarding. + +Meet the [people behind Ignite CLI and our +contributors](https://github.com/ignite/cli/graphs/contributors). + +## Viewing docs builds + +Use a preview to see what your changes will look like in production before the +updated pages are published. + +- While a PR is in draft mode, you can rely on using the preview feature in + Markdown. +- After the PR moves from **Draft** to **Ready for review**, the CI status + checks generate a deployment preview. This preview stays up to date as you + continue to work and commit new changes to the same branch. A `Docs Deploy + Preview / build_and_deploy (pull_request)` preview on a GitHub actions URL is + unique for that PR. diff --git a/docs/versioned_docs/version-v0.26/05-contributing/_category_.json b/docs/versioned_docs/version-v0.26/05-contributing/_category_.json new file mode 100644 index 0000000..094b1f3 --- /dev/null +++ b/docs/versioned_docs/version-v0.26/05-contributing/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Contribute to Ignite", + "link": null +} \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.26/06-migration/_category_.json b/docs/versioned_docs/version-v0.26/06-migration/_category_.json new file mode 100644 index 0000000..9460d57 --- /dev/null +++ b/docs/versioned_docs/version-v0.26/06-migration/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Migration", + "link": null +} \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.26/06-migration/readme.md b/docs/versioned_docs/version-v0.26/06-migration/readme.md new file mode 100644 index 0000000..978e90e --- /dev/null +++ b/docs/versioned_docs/version-v0.26/06-migration/readme.md @@ -0,0 +1,14 @@ +--- +sidebar_position: 0 +--- + +# Migration Guides + +Welcome to the section on upgrading to a newer version of Ignite CLI! If you're +looking to update to the latest version, you'll want to start by checking the +documentation to see if there are any special considerations or instructions you +need to follow. + +If there is no documentation for the latest version of Ignite CLI, it's +generally safe to assume that there were no breaking changes, and you can +proceed with using the latest version with your project. \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.26/06-migration/v0.18.md b/docs/versioned_docs/version-v0.26/06-migration/v0.18.md new file mode 100644 index 0000000..c516986 --- /dev/null +++ b/docs/versioned_docs/version-v0.26/06-migration/v0.18.md @@ -0,0 +1,458 @@ +--- +sidebar_position: 999 +title: v0.18.0 +description: For chains that were scaffolded with Ignite CLI versions lower than v0.18, changes are required to use Ignite CLI v0.18. +--- + +# Upgrading a Blockchain to use Ignite CLI v0.18 + +Ignite CLI v0.18 comes with Cosmos SDK v0.44. This version of Cosmos SDK introduced changes that are not compatible with +chains that were scaffolded with Ignite CLI versions lower than v0.18. + +**Important:** After upgrading from Ignite CLI v0.17.3 to Ignite CLI v0.18, you must update the default blockchain +template to use blockchains that were scaffolded with earlier versions. + +These instructions are written for a blockchain that was scaffolded with the following command: + +``` +ignite scaffold chain github.com/username/mars +``` + +If you used a different module path, replace `username` and `mars` with the correct values for your blockchain. + +## Blockchain + +For each file listed, make the required changes to the source code of the blockchain template. + +### go.mod + +``` +module github.com/username/mars + +go 1.16 + +require ( + github.com/cosmos/cosmos-sdk v0.44.0 + github.com/cosmos/ibc-go v1.2.0 + github.com/gogo/protobuf v1.3.3 + github.com/google/go-cmp v0.5.6 // indirect + github.com/gorilla/mux v1.8.0 + github.com/grpc-ecosystem/grpc-gateway v1.16.0 + github.com/spf13/cast v1.3.1 + github.com/spf13/cobra v1.1.3 + github.com/stretchr/testify v1.7.0 + github.com/tendermint/spm v0.1.6 + github.com/tendermint/tendermint v0.34.13 + github.com/tendermint/tm-db v0.6.4 + google.golang.org/genproto v0.0.0-20210903162649-d08c68adba83 + google.golang.org/grpc v1.40.0 +) + +replace ( + github.com/99designs/keyring => github.com/cosmos/keyring v1.1.7-0.20210622111912-ef00f8ac3d76 + github.com/gogo/protobuf => github.com/regen-network/protobuf v1.3.3-alpha.regen.1 + google.golang.org/grpc => google.golang.org/grpc v1.33.2 +) +``` + +### app/app.go + +```go +package app + +import ( + //... + // Add the following packages: + "github.com/cosmos/cosmos-sdk/x/feegrant" + feegrantkeeper "github.com/cosmos/cosmos-sdk/x/feegrant/keeper" + feegrantmodule "github.com/cosmos/cosmos-sdk/x/feegrant/module" + + "github.com/cosmos/ibc-go/modules/apps/transfer" + ibctransferkeeper "github.com/cosmos/ibc-go/modules/apps/transfer/keeper" + ibctransfertypes "github.com/cosmos/ibc-go/modules/apps/transfer/types" + ibc "github.com/cosmos/ibc-go/modules/core" + ibcclient "github.com/cosmos/ibc-go/modules/core/02-client" + ibcporttypes "github.com/cosmos/ibc-go/modules/core/05-port/types" + ibchost "github.com/cosmos/ibc-go/modules/core/24-host" + ibckeeper "github.com/cosmos/ibc-go/modules/core/keeper" + // Remove the following packages: + // transfer "github.com/cosmos/cosmos-sdk/x/ibc/applications/transfer" + // ibctransferkeeper "github.com/cosmos/cosmos-sdk/x/ibc/applications/transfer/keeper" + // ibctransfertypes "github.com/cosmos/cosmos-sdk/x/ibc/applications/transfer/types" + // ibc "github.com/cosmos/cosmos-sdk/x/ibc/core" + // ibcclient "github.com/cosmos/cosmos-sdk/x/ibc/core/02-client" + // porttypes "github.com/cosmos/cosmos-sdk/x/ibc/core/05-port/types" + // ibchost "github.com/cosmos/cosmos-sdk/x/ibc/core/24-host" + // ibckeeper "github.com/cosmos/cosmos-sdk/x/ibc/core/keeper" +) + +var ( + //... + ModuleBasics = module.NewBasicManager( + //... + slashing.AppModuleBasic{}, + // Add feegrantmodule.AppModuleBasic{}, + feegrantmodule.AppModuleBasic{}, // <-- + ibc.AppModuleBasic{}, + //... + ) + //... +) + +type App struct { + //... + // Replace codec.Marshaler with codec.Codec + appCodec codec.Codec // <-- + // Add FeeGrantKeeper + FeeGrantKeeper feegrantkeeper.Keeper // <-- +} + +func New( /*...*/ ) { + //bApp.SetAppVersion(version.Version) + bApp.SetVersion(version.Version) // <-- + + keys := sdk.NewKVStoreKeys( + //... + upgradetypes.StoreKey, + // Add feegrant.StoreKey + feegrant.StoreKey, // <-- + evidencetypes.StoreKey, + //... + ) + + app.FeeGrantKeeper = feegrantkeeper.NewKeeper(appCodec, keys[feegrant.StoreKey], app.AccountKeeper) // <-- + // Add app.BaseApp as the last argument to upgradekeeper.NewKeeper + app.UpgradeKeeper = upgradekeeper.NewKeeper(skipUpgradeHeights, keys[upgradetypes.StoreKey], appCodec, homePath, app.BaseApp) + + app.IBCKeeper = ibckeeper.NewKeeper( + // Add app.UpgradeKeeper + appCodec, keys[ibchost.StoreKey], app.GetSubspace(ibchost.ModuleName), app.StakingKeeper, app.UpgradeKeeper, scopedIBCKeeper, + ) + + govRouter.AddRoute(govtypes.RouterKey, govtypes.ProposalHandler). + //... + // Replace NewClientUpdateProposalHandler with NewClientProposalHandler + AddRoute(ibchost.RouterKey, ibcclient.NewClientProposalHandler(app.IBCKeeper.ClientKeeper)) + + // Replace porttypes with ibcporttypes + ibcRouter := ibcporttypes.NewRouter() + + app.mm.SetOrderBeginBlockers( + upgradetypes.ModuleName, + // Add capabilitytypes.ModuleName, + capabilitytypes.ModuleName, + minttypes.ModuleName, + //... + // Add feegrant.ModuleName, + feegrant.ModuleName, + ) + + // Add app.appCodec as an argument to module.NewConfigurator: + app.mm.RegisterServices(module.NewConfigurator(app.appCodec, app.MsgServiceRouter(), app.GRPCQueryRouter())) + + // Replace: + // app.SetAnteHandler( + // ante.NewAnteHandler( + // app.AccountKeeper, app.BankKeeper, ante.DefaultSigVerificationGasConsumer, + // encodingConfig.TxConfig.SignModeHandler(), + // ), + // ) + + // With the following: + anteHandler, err := ante.NewAnteHandler( + ante.HandlerOptions{ + AccountKeeper: app.AccountKeeper, + BankKeeper: app.BankKeeper, + SignModeHandler: encodingConfig.TxConfig.SignModeHandler(), + FeegrantKeeper: app.FeeGrantKeeper, + SigGasConsumer: ante.DefaultSigVerificationGasConsumer, + }, + ) + if err != nil { + panic(err) + } + app.SetAnteHandler(anteHandler) + + // Remove the following: + // ctx := app.BaseApp.NewUncachedContext(true, tmproto.Header{}) + // app.CapabilityKeeper.InitializeAndSeal(ctx) +} + +func (app *App) InitChainer(ctx sdk.Context, req abci.RequestInitChain) abci.ResponseInitChain { + var genesisState GenesisState + if err := tmjson.Unmarshal(req.AppStateBytes, &genesisState); err != nil { + panic(err) + } + // Add the following: + app.UpgradeKeeper.SetModuleVersionMap(ctx, app.mm.GetVersionMap()) + return app.mm.InitGenesis(ctx, app.appCodec, genesisState) +} + +// Replace Marshaler with Codec +func (app *App) AppCodec() codec.Codec { + return app.appCodec +} + +// Replace BinaryMarshaler with BinaryCodec +func initParamsKeeper(appCodec codec.BinaryCodec, legacyAmino *codec.LegacyAmino, key, tkey sdk.StoreKey) paramskeeper.Keeper { + //... +} +``` + +### app/genesis.go + +```go +// Replace codec.JSONMarshaler with codec.JSONCodec +func NewDefaultGenesisState(cdc codec.JSONCodec) GenesisState { + // ... +} +``` + +### testutil/keeper/mars.go + +Add the following code: + +```go +package keeper + +import ( + "testing" + + "github.com/cosmos/cosmos-sdk/codec" + codectypes "github.com/cosmos/cosmos-sdk/codec/types" + "github.com/cosmos/cosmos-sdk/store" + storetypes "github.com/cosmos/cosmos-sdk/store/types" + sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/stretchr/testify/require" + "github.com/tendermint/tendermint/libs/log" + tmproto "github.com/tendermint/tendermint/proto/tendermint/types" + tmdb "github.com/tendermint/tm-db" + "github.com/username/mars/x/mars/keeper" + "github.com/username/mars/x/mars/types" +) + +func MarsKeeper(t testing.TB) (*keeper.Keeper, sdk.Context) { + storeKey := sdk.NewKVStoreKey(types.StoreKey) + memStoreKey := storetypes.NewMemoryStoreKey(types.MemStoreKey) + + db := tmdb.NewMemDB() + stateStore := store.NewCommitMultiStore(db) + stateStore.MountStoreWithDB(storeKey, sdk.StoreTypeIAVL, db) + stateStore.MountStoreWithDB(memStoreKey, sdk.StoreTypeMemory, nil) + require.NoError(t, stateStore.LoadLatestVersion()) + + registry := codectypes.NewInterfaceRegistry() + k := keeper.NewKeeper( + codec.NewProtoCodec(registry), + storeKey, + memStoreKey, + ) + + ctx := sdk.NewContext(stateStore, tmproto.Header{}, false, log.NewNopLogger()) + return k, ctx +} +``` + +If `mars` is an IBC-enabled module, add the following code, instead: + +```go +package keeper + +import ( + "testing" + + "github.com/cosmos/cosmos-sdk/codec" + codectypes "github.com/cosmos/cosmos-sdk/codec/types" + "github.com/cosmos/cosmos-sdk/store" + storetypes "github.com/cosmos/cosmos-sdk/store/types" + sdk "github.com/cosmos/cosmos-sdk/types" + capabilitykeeper "github.com/cosmos/cosmos-sdk/x/capability/keeper" + typesparams "github.com/cosmos/cosmos-sdk/x/params/types" + ibckeeper "github.com/cosmos/ibc-go/modules/core/keeper" + "github.com/stretchr/testify/require" + "github.com/tendermint/tendermint/libs/log" + tmproto "github.com/tendermint/tendermint/proto/tendermint/types" + tmdb "github.com/tendermint/tm-db" + "github.com/username/test/x/mars/keeper" + "github.com/username/test/x/mars/types" +) + +func MarsKeeper(t testing.TB) (*keeper.Keeper, sdk.Context) { + logger := log.NewNopLogger() + + storeKey := sdk.NewKVStoreKey(types.StoreKey) + memStoreKey := storetypes.NewMemoryStoreKey(types.MemStoreKey) + + db := tmdb.NewMemDB() + stateStore := store.NewCommitMultiStore(db) + stateStore.MountStoreWithDB(storeKey, sdk.StoreTypeIAVL, db) + stateStore.MountStoreWithDB(memStoreKey, sdk.StoreTypeMemory, nil) + require.NoError(t, stateStore.LoadLatestVersion()) + + registry := codectypes.NewInterfaceRegistry() + appCodec := codec.NewProtoCodec(registry) + capabilityKeeper := capabilitykeeper.NewKeeper(appCodec, storeKey, memStoreKey) + + amino := codec.NewLegacyAmino() + ss := typesparams.NewSubspace(appCodec, + amino, + storeKey, + memStoreKey, + "MarsSubSpace", + ) + IBCKeeper := ibckeeper.NewKeeper( + appCodec, + storeKey, + ss, + nil, + nil, + capabilityKeeper.ScopeToModule("MarsIBCKeeper"), + ) + + k := keeper.NewKeeper( + codec.NewProtoCodec(registry), + storeKey, + memStoreKey, + IBCKeeper.ChannelKeeper, + &IBCKeeper.PortKeeper, + capabilityKeeper.ScopeToModule("MarsScopedKeeper"), + ) + + ctx := sdk.NewContext(stateStore, tmproto.Header{}, false, logger) + return k, ctx +} +``` + +### testutil/network/network.go + +```go +func DefaultConfig() network.Config { + // ... + return network.Config{ + // ... + // Add sdk.DefaultPowerReduction + AccountTokens: sdk.TokensFromConsensusPower(1000, sdk.DefaultPowerReduction), + StakingTokens: sdk.TokensFromConsensusPower(500, sdk.DefaultPowerReduction), + BondedTokens: sdk.TokensFromConsensusPower(100, sdk.DefaultPowerReduction), + // ... + } +} +``` + +### testutil/sample/sample.go + +Add the following code: + +```go +package sample + +import ( + "github.com/cosmos/cosmos-sdk/crypto/keys/ed25519" + sdk "github.com/cosmos/cosmos-sdk/types" +) + +// AccAddress returns a sample account address +func AccAddress() string { + pk := ed25519.GenPrivKey().PubKey() + addr := pk.Address() + return sdk.AccAddress(addr).String() +} +``` + +### BandChain Support + +If your module includes integration with BandChain, added manually or scaffolded with `ignite scaffold band`, upgrade +the `github.com/bandprotocol/bandchain-packet` package to `v0.0.2` in `go.mod`. + +## Module + +### x/mars/keeper/keeper.go + +```go +package keeper + +// ... + +type ( + Keeper struct { + // Replace Marshaler with BinaryCodec + cdc codec.BinaryCodec + //... + } +) + +func NewKeeper( + // Replace Marshaler with BinaryCodec + cdc codec.BinaryCodec, + // ... +) *Keeper { + // ... +} +``` + +### x/mars/keeper/msg_server_test.go + +```go +package keeper_test + +import ( + //... + // Add the following: + keepertest "github.com/username/mars/testutil/keeper" + "github.com/username/mars/x/mars/keeper" +) + +func setupMsgServer(t testing.TB) (types.MsgServer, context.Context) { + // Replace + // keeper, ctx := setupKeeper(t) + // return NewMsgServerImpl(*keeper), sdk.WrapSDKContext(ctx) + + // With the following: + k, ctx := keepertest.MarsKeeper(t) + return keeper.NewMsgServerImpl(*k), sdk.WrapSDKContext(ctx) +} +``` + +### x/mars/module.go + +```go +package mars + +type AppModuleBasic struct { + // Replace Marshaler with BinaryCodec + cdc codec.BinaryCodec +} + +// Replace Marshaler with BinaryCodec +func NewAppModuleBasic(cdc codec.BinaryCodec) AppModuleBasic { + return AppModuleBasic{cdc: cdc} +} + +// Replace JSONMarshaler with JSONCodec +func (AppModuleBasic) DefaultGenesis(cdc codec.JSONCodec) json.RawMessage { + return cdc.MustMarshalJSON(types.DefaultGenesis()) +} + +// Replace JSONMarshaler with JSONCodec +func (AppModuleBasic) ValidateGenesis(cdc codec.JSONCodec, config client.TxEncodingConfig, bz json.RawMessage) error { + //... +} + +// Replace codec.Marshaller with codec.Codec +func NewAppModule(cdc codec.Codec, keeper keeper.Keeper) AppModule { + //... +} + +// Replace JSONMarshaler with JSONCodec +func (am AppModule) InitGenesis(ctx sdk.Context, cdc codec.JSONCodec, gs json.RawMessage) []abci.ValidatorUpdate { + //... +} + +// Replace JSONMarshaler with JSONCodec +func (am AppModule) ExportGenesis(ctx sdk.Context, cdc codec.JSONCodec) json.RawMessage { + //... +} + +// Add the following +func (AppModule) ConsensusVersion() uint64 { return 2 } +``` diff --git a/docs/versioned_docs/version-v0.26/06-migration/v0.19.2.md b/docs/versioned_docs/version-v0.26/06-migration/v0.19.2.md new file mode 100644 index 0000000..0ebd0ec --- /dev/null +++ b/docs/versioned_docs/version-v0.26/06-migration/v0.19.2.md @@ -0,0 +1,26 @@ +--- +sidebar_position: 998 +title: v0.19.2 +description: For chains that were scaffolded with Ignite CLI versions lower than v0.19.2, changes are required to use Ignite CLI v0.19.2. +--- + +# Upgrading a blockchain to use Ignite CLI v0.19.2 + +Ignite CLI v0.19.2 comes with IBC v2.0.2. + +With Ignite CLI v0.19.2, the contents of the deprecated Ignite CLI Modules `tendermint/spm` repo are moved to the +official Ignite CLI repo which introduces breaking changes. + +To migrate your chain that was scaffolded with Ignite CLI versions lower than v0.19.2: + +1. IBC upgrade: Use + the [IBC migration documents](https://github.com/cosmos/ibc-go/blob/v6.2.0/docs/migrations/v1-to-v2.md) + +2. In your chain's `go.mod` file, remove `tendermint/spm` and add the v0.19.2 version of `tendermint/starport`. If your + chain uses these packages, change the import paths as shown: + + - `github.com/tendermint/spm/ibckeeper` moved to `github.com/tendermint/starport/starport/pkg/cosmosibckeeper` + - `github.com/tendermint/spm/cosmoscmd` moved to `github.com/tendermint/starport/starport/pkg/cosmoscmd` + - `github.com/tendermint/spm/openapiconsole` moved to `github.com/tendermint/starport/starport/pkg/openapiconsole` + - `github.com/tendermint/spm/testutil/sample` moved + to `github.com/tendermint/starport/starport/pkg/cosmostestutil/sample` diff --git a/docs/versioned_docs/version-v0.26/06-migration/v0.20.0.md b/docs/versioned_docs/version-v0.26/06-migration/v0.20.0.md new file mode 100644 index 0000000..197dafc --- /dev/null +++ b/docs/versioned_docs/version-v0.26/06-migration/v0.20.0.md @@ -0,0 +1,12 @@ +--- +sidebar_position: 997 +title: v0.20.0 +description: For chains that were scaffolded with Ignite CLI versions lower than v0.20.0, changes are required to use Ignite CLI v0.20.0. +--- + +# Upgrading a blockchain to use Ignite CLI v0.20.2 + +1. Upgrade your Cosmos SDK version to [v0.45.3](https://github.com/cosmos/cosmos-sdk/releases/tag/v0.45.3). + +2. Update your `SetOrderBeginBlockers` and `SetOrderEndBlockers` in your `app/app.go` to explicitly add entries for all + the modules you use in your chain. diff --git a/docs/versioned_docs/version-v0.26/06-migration/v0.22.0.md b/docs/versioned_docs/version-v0.26/06-migration/v0.22.0.md new file mode 100644 index 0000000..e2d82e6 --- /dev/null +++ b/docs/versioned_docs/version-v0.26/06-migration/v0.22.0.md @@ -0,0 +1,36 @@ +--- +sidebar_position: 996 +title: v0.22.0 +description: For chains that were scaffolded with Ignite CLI versions lower than v0.22.0, changes are required to use Ignite CLI v0.22.0. +--- + +# Upgrading a blockchain to use Ignite CLI v0.22.0 + +Ignite CLI v0.22.2 changed the GitHub username from "ignite-hq" to "ignite", which means the imports must be fixed to +reflect this change. + +1. In your `go.mod` file find the require line for Ignite CLI that starts with `github.com/ignite-hq/cli` and is + followed by a version. + It looks something like `github.com/ignite-hq/cli v0.22.0`, and replace it by `github.com/ignite/cli v0.22.2`. + +2. Make a bulk find and replace in the import statements for `github.com/ignite-hq/cli` to be replaced + by `github.com/ignite/cli`. + +3. Finally, run `go mod tidy` and ensure there's no mention if `ignite-hq/cli` in your `go.sum` file. + +This update includes an upgrade to the `ibc-go` packages. Please make the according changes: + +1. Upgrade your IBC version to [v3](https://github.com/cosmos/ibc-go/releases/tag/v3.0.0). + + 1. Search for `github.com/cosmos/ibc-go/v2` in the import statements of your `.go` files and replace `v2` in the end + with `v3` + + 1. Open your `app.go`, + + - Update your transfer keeper by adding another `app.IBCKeeper.ChannelKeeper` as an argument + after `app.IBCKeeper.ChannelKeeper` + + - Define `var transferIBCModule = transfer.NewIBCModule(app.TransferKeeper)` in your `New()` func, and update + your existent IBC router to use it: `ibcRouter.AddRoute(ibctransfertypes.ModuleName, transferIBCModule)` + + 3. Open your `go.mod` and change the IBC line with `github.com/cosmos/ibc-go/v3 v3.0.0` diff --git a/docs/versioned_docs/version-v0.26/06-migration/v0.24.0.md b/docs/versioned_docs/version-v0.26/06-migration/v0.24.0.md new file mode 100644 index 0000000..ccce0e1 --- /dev/null +++ b/docs/versioned_docs/version-v0.26/06-migration/v0.24.0.md @@ -0,0 +1,330 @@ +--- +sidebar_position: 995 +title: v0.24.0 +description: For chains that were scaffolded with Ignite CLI versions lower than v0.24, changes are required to use Ignite CLI v0.24.0. +--- + +## Cosmos SDK v0.46 upgrade notes + +### Update dependencies + +Cosmos SDK v0.46 is compatible with the latest version of IBC Go v5. If you have a chain that is using an older version, +update the dependencies in your project. + +Throughout the code you might see the following dependencies: + +```go +package pkg_name + +import ( + "github.com/cosmos/ibc-go/v3/..." +) +``` + +Where `v3` is the version of IBC Go and `...` are different IBC Go packages. + +To upgrade the version to `v5`, a global find-and-replace should work. Replace `cosmos/ibc-go/v3` (or whicherver version +you're using) with `cosmos/ibc-go/v5` only in `*.go` files (to exclude unwated changes to files like `go,sum`). + +### Module keeper + +Add an import: + +```go +// x/{moduleName}/keeper/keeper.go + +package keeper + +// ... + +import ( + //... + storetypes "github.com/cosmos/cosmos-sdk/store/types" +) +``` + +In the `Keeper` struct replace `sdk.StoreKey` with `storetypes.StoreKey`: + +```go +// x/{moduleName}/keeper/keeper.go + +package keeper + +// ... + +type ( + Keeper struct { + cdc codec.BinaryCodec + storeKey storetypes.StoreKey + memKey storetypes.StoreKey + paramstore paramtypes.Subspace + } +) +``` + +In the argument list of the `NewKeeper` function definition: + +```go +package keeper + +// ... + +// x/{moduleName}/keeper/keeper.go + +func NewKeeper( + //... + memKey storetypes.StoreKey, +) +``` + +Store type aliases have been removed from the Cosmos SDK `types` package and now have to be imported from `store/types`, +instead. + +In the `testutil/keeper/{moduleName}.go` replace `types.StoreKey` with `storetypes.StoreKey` and `types.MemStoreKey` +with `storetypes.MemStoreKey`. + +```go +// testutil/keeper/{moduleName}.go + +package keeper + +// ... + +func {moduleName}Keeper(t testing.TB) (*keeper.Keeper, sdk.Context) { + storeKey := sdk.NewKVStoreKey(storetypes.StoreKey) + memStoreKey := storetypes.NewMemoryStoreKey(storetypes.MemStoreKey) + //... +} +``` + +### Testutil network package + +Add the `require` package for testing and `pruningtypes` and remove `storetypes`: + +```go +// testutil/network/network.go + +package network + +// ... + +import ( + pruningtypes "github.com/cosmos/cosmos-sdk/pruning/types" + "github.com/stretchr/testify/require" + // storetypes "github.com/cosmos/cosmos-sdk/store/types" <-- remove this line +) +``` + +In the `DefaultConfig` function replace `storetypes.NewPruningOptionsFromString` +with `pruningtypes.NewPruningOptionsFromString` + +```go +// testutil/network/network.go + +package network + +// ... + +func DefaultConfig() network.Config { + //... + return network.Config{ + AppConstructor: func(val network.Validator) servertypes.Application { + return app.New( + baseapp.SetPruning(pruningtypes.NewPruningOptionsFromString(val.AppConfig.Pruning)), + //... + ) + }, + //... + } +} +``` + +The `New` function in the Cosmos SDK `testutil/network` package now +accepts [three arguments](https://github.com/cosmos/cosmos-sdk/blob/v0.46.0/testutil/network/network.go#L206) instead of +two. + +In the `New` function add `t.TempDir()` as the second argument to `network.New()` and test that no error is thrown +with `require.NoError(t, err)`: + +```go +// testutil/network/network.go + +package network + +// ... + +func New(t *testing.T, configs ...network.Config) *network.Network { + //... + net, err := network.New(t, t.TempDir(), cfg) + require.NoError(t, err) + //... +} +``` + +### Testutil keeper package + +In the `{moduleName}Keeper` function make the following replacements: + +- `storetypes.StoreKey` → `types.StoreKey` +- `storetypes.MemStoreKey` → `types.MemStoreKey` +- `sdk.StoreTypeIAVL` → `storetypes.StoreTypeIAVL` +- `sdk.StoreTypeMemory` → `storetypes.StoreTypeMemory` + +```go +// testutil/keeper/{moduleName}.go + +package keeper + +// ... + +func {moduleName}Keeper(t testing.TB) (*keeper.Keeper, sdk.Context) { + storeKey := sdk.NewKVStoreKey(types.StoreKey) + memStoreKey := storetypes.NewMemoryStoreKey(types.MemStoreKey) + //... + stateStore.MountStoreWithDB(storeKey, storetypes.StoreTypeIAVL, db) + stateStore.MountStoreWithDB(memStoreKey, storetypes.StoreTypeMemory, nil) + //... +} +``` + +### IBC modules + +If you have IBC-enabled modules (for example, added with `ignite scaffold module ... --ibc` or created manually), make +the following changes to the source code. + +Cosmos SDK expects IBC modules +to [implement the `IBCModule` interface](https://ibc.cosmos.network/main/ibc/apps/ibcmodule/). Create a `IBCModule` +type that embeds the module's keeper and a method that returns a new `IBCModule`. Methods in this file will be defined +on this type. + +```go +// x/{moduleName}/module_ibc.go + +package module_name + +// ... + +type IBCModule struct { + keeper keeper.Keeper +} + +func NewIBCModule(k keeper.Keeper) IBCModule { + return IBCModule{ + keeper: k, + } +} +``` + +Replace receivers for all methods in this file from `(am AppModule)` to `(im IBCModule)`. Replace all instances of `am.` +with `im.` to fix the errors. + +`OnChanOpenInit` now returns to values: a `string` and an `error`: + +```go +// x/{moduleName}/module_ibc.go + +package module_name + +// ... + +func (im IBCModule) OnChanOpenInit( /*...*/ ) (string, error) +``` + +Ensure that all return statements (five, in the default template) in `OnChanOpenInit` return two values. For example: + +```go +// x/{moduleName}/module_ibc.go + +package module_name + +// ... + +func (im IBCModule) OnChanOpenInit( /*...*/ ) (string, error) { + //... + return "", sdkerrors.Wrapf(porttypes.ErrInvalidPort, "invalid port: %s, expected %s", portID, boundPort) + //... +} +``` + +Error acknowledgments returned from Transfer `OnRecvPacket` now include a deterministic ABCI code and error message. +Remove the `.Error()` call: + +```go +// x/{moduleName}/module_ibc.go + +package module_name + +// ... + +func (im IBCModule) OnRecvPacket( /*...*/ ) { + //... + if err := modulePacketData.Unmarshal(modulePacket.GetData()); err != nil { + // return channeltypes.NewErrorAcknowledgement(sdkerrors.Wrapf(sdkerrors.ErrUnknownRequest, "cannot unmarshal packet data: %s", err.Error()).Error()) + return channeltypes.NewErrorAcknowledgement(sdkerrors.Wrapf(sdkerrors.ErrUnknownRequest, "cannot unmarshal packet data: %s", err.Error())) + } + + // ... + + // Dispatch packet + switch packet := modulePacketData.Packet.(type) { + // ... + default: + // errMsg := fmt.Sprintf("unrecognized %s packet type: %T", types.ModuleName, packet) + // return channeltypes.NewErrorAcknowledgement(errMsg) + err := fmt.Errorf("unrecognized %s packet type: %T", types.ModuleName, packet) + return channeltypes.NewErrorAcknowledgement(err) + } +} +``` + +After switching to using both `AppModule` and `IBCModule`, modifying the following line: + +```go +// x/{moduleName}/module.go + +package module_name + +// ... + +var ( + //... + _ porttypes.IBCModule = IBCModule{} // instead of "= AppModule{}" +) +``` + +### Main + +The `Execute` function in Cosmos SDK `server/cmd` package now +accepts [three arguments](https://github.com/cosmos/cosmos-sdk/blob/v0.46.0/server/cmd/execute.go#L20) instead of two. + +```go +// cmd/{{projectName}}d/main.go + +package projectNamed + +// ... + +func main() { + //... + if err := svrcmd.Execute(rootCmd, "", app.DefaultNodeHome); err != nil { + os.Exit(1) + } +} +``` + +### Handler + +Cosmos SDK v0.46 no longer needs a `NewHandler` function that was used to handle messages and call appropriate keeper +methods based on message types. Feel free to remove `x/{moduleName}/handler.go` file. + +Since there is no `NewHandler` now, modify the deprecated `Route` function to return `sdk.Route{}`: + +```go +// x/{moduleName}/module.go + +package module_name + +// ... + +func (am AppModule) Route() sdk.Route { return sdk.Route{} } +``` diff --git a/docs/versioned_docs/version-v0.26/06-migration/v0.25.0.md b/docs/versioned_docs/version-v0.26/06-migration/v0.25.0.md new file mode 100644 index 0000000..66ec75c --- /dev/null +++ b/docs/versioned_docs/version-v0.26/06-migration/v0.25.0.md @@ -0,0 +1,1187 @@ +--- +sidebar_position: 994 +title: v0.25.0 +description: For chains that were scaffolded with Ignite CLI versions lower than v0.25.0. changes are required to use Ignite CLI v0.25.0. +--- + +## Protobuf directory migration + +`v0.25.0` changes the location of scaffolded `.proto` files. Previously, `.proto` files were located in `./proto/{moduleName}/`, +where `moduleName` is the same name of the Cosmos SDK module found in `./x/{moduleName}/`. This new version of `ignite` +modifies the scaffolded protobuf files so that they are now generated in `./proto/{appName}/{moduleName}`. + +The only change that is needed to be made is to create an `{appName}` folder in the `proto` directory, and then place the +sub-directories within it. An example below demonstrates this change: + +### Previous Directory Structure + +This example shows a chain that was generated using `ignite` with `v0.24.0` using the following command: + +```bash +ignite s chain github.com/cosmos/planet --no-module +ignite s module mars +``` + +```bash +├── app +├── cmd +├── docs +├── proto +│ ├── mars +├── x +│ ├── mars +├── README.md +├── config.yml +├── go.mod +├── go.sum +└── .gitignore +``` + +### `v0.25.0` Directory Structure + +This example shows a chain that was generated using `ignite` with `v0.25.0` using the following command: + +```bash +ignite s chain github.com/cosmos/planet --no-module +ignite s module mars +``` + +```bash +├── app +├── cmd +├── docs +├── proto +│ ├── planet +│ │ ├── mars +├── x +│ ├── mars +├── README.md +├── config.yml +├── go.mod +├── go.sum +└── .gitignore +``` + +The only difference is the additional directory `planet` which is the name of the application. The name of the app can +be verified by checking the package in the `go.mod` file. In this example, the package is `github.com/cosmos/planet` +where `planet` is the app name. + + --- + +## Removing `cosmoscmd` + +`v0.25.0` removes the `cosmoscmd` package from scaffolded chains. This package provided utility for creating +commands and starting up their application. The `cosmoscmd` package is now deprecated, and it is suggested that chains +implement this functionality in their codebase so they can be more easily upgraded and customized. + +The main functionality of `cosmoscmd` will be moved to the `app` package of your chain. Some imports in these +examples contain the sample string, `{ModulePath}`. Replace this string with the Go module path of your blockchain. +For example, if your blockchain module path is `github.com/planet/mars`, `{ModulePath}/app/params` would be become +`github.com/planet/mars/app/params`. + +#### Migration in `app` package + +To begin, create a new file, `./app/params/encoding.go`, containing the following code: + +```go +package params + +import ( + "github.com/cosmos/cosmos-sdk/client" + "github.com/cosmos/cosmos-sdk/codec" + "github.com/cosmos/cosmos-sdk/codec/types" +) + +// EncodingConfig specifies the concrete encoding types to use for a given app. +// This is provided for compatibility between protobuf and amino implementations. +type EncodingConfig struct { + InterfaceRegistry types.InterfaceRegistry + Marshaler codec.Codec + TxConfig client.TxConfig + Amino *codec.LegacyAmino +} +``` + +Next, create a new file, `./app/encoding.go`, containing the following code: + +```go +package app + +import ( + "github.com/cosmos/cosmos-sdk/codec" + "github.com/cosmos/cosmos-sdk/codec/types" + "github.com/cosmos/cosmos-sdk/std" + "github.com/cosmos/cosmos-sdk/x/auth/tx" + + "{ModulePath}/app/params" +) + +// makeEncodingConfig creates an EncodingConfig for an amino based test configuration. +func makeEncodingConfig() params.EncodingConfig { + amino := codec.NewLegacyAmino() + interfaceRegistry := types.NewInterfaceRegistry() + marshaler := codec.NewProtoCodec(interfaceRegistry) + txCfg := tx.NewTxConfig(marshaler, tx.DefaultSignModes) + + return params.EncodingConfig{ + InterfaceRegistry: interfaceRegistry, + Marshaler: marshaler, + TxConfig: txCfg, + Amino: amino, + } +} + +// MakeEncodingConfig creates an EncodingConfig for testing +func MakeEncodingConfig() params.EncodingConfig { + encodingConfig := makeEncodingConfig() + std.RegisterLegacyAminoCodec(encodingConfig.Amino) + std.RegisterInterfaces(encodingConfig.InterfaceRegistry) + ModuleBasics.RegisterLegacyAminoCodec(encodingConfig.Amino) + ModuleBasics.RegisterInterfaces(encodingConfig.InterfaceRegistry) + return encodingConfig +} +``` + +Next, modify `./app/simulation_test.go` so that it looks like the following: + +```go +package app_test + +import ( + "os" + "testing" + "time" + + "github.com/cosmos/cosmos-sdk/simapp" + simulationtypes "github.com/cosmos/cosmos-sdk/types/simulation" + "github.com/cosmos/cosmos-sdk/x/simulation" + "github.com/stretchr/testify/require" + abci "github.com/tendermint/tendermint/abci/types" + tmproto "github.com/tendermint/tendermint/proto/tendermint/types" + tmtypes "github.com/tendermint/tendermint/types" + // remove-next-line + "github.com/ignite/cli/ignite/pkg/cosmoscmd" + + // highlight-next-line + "{ModulePath}/app" +) + +// remove-start +type SimApp interface { + cosmoscmd.App + GetBaseApp() *baseapp.BaseApp + AppCodec() codec.Codec + SimulationManager() *module.SimulationManager + ModuleAccountAddrs() map[string]bool + Name() string + LegacyAmino() *codec.LegacyAmino + BeginBlocker(ctx sdk.Context, req abci.RequestBeginBlock) + abci.ResponseBeginBlock + EndBlocker(ctx sdk.Context, req abci.RequestEndBlock) + abci.ResponseEndBlock + InitChainer(ctx sdk.Context, req abci.RequestInitChain) + abci.ResponseInitChain +} + +// remove-end + +// ... + +// BenchmarkSimulation run the chain simulation +// Running using starport command: +// `starport chain simulate -v --numBlocks 200 --blockSize 50` +// Running as go benchmark test: +// `go test -benchmem -run=^$ -bench ^BenchmarkSimulation ./app -NumBlocks=200 -BlockSize 50 -Commit=true -Verbose=true -Enabled=true` +func BenchmarkSimulation(b *testing.B) { + + // ... + + // remove-next-line + encoding := cosmoscmd.MakeEncodingConfig(app.ModuleBasics) + // highlight-next-line + encoding := app.MakeEncodingConfig() + + app := app.New( + logger, + db, + nil, + true, + map[int64]bool{}, + app.DefaultNodeHome, + 0, + encoding, + simapp.EmptyAppOptions{}, + ) + + // remove-start + simApp, ok := app.(SimApp) + require.True(b, ok, "can't use simapp") + // remove-end + + // Run randomized simulations + _, simParams, simErr := simulation.SimulateFromSeed( + b, + os.Stdout, + // highlight-next-line + app.BaseApp, + // highlight-next-line + simapp.AppStateFn(app.AppCodec(), app.SimulationManager()), + simulationtypes.RandomAccounts, + // highlight-next-line + simapp.SimulationOperations(app, app.AppCodec(), config), + // highlight-next-line + app.ModuleAccountAddrs(), + config, + // highlight-next-line + app.AppCodec(), + ) + + // export state and simParams before the simulation error is checked + // highlight-next-line + err = simapp.CheckExportSimulation(app, config, simParams) + require.NoError(b, err) + require.NoError(b, simErr) + + // ... +} +``` + +The main changes here are that the `SimApp` interface has been removed and is being replaced with `app`. + +The final modification in the `app` package is in `app/app.go`: + +```go +package app + +import ( + // ... + + // this line is used by starport scaffolding # stargate/app/moduleImport + + // remove-next-line + "github.com/ignite/cli/ignite/pkg/cosmoscmd" + + // highlight-start + appparams "{ModulePath}/app/params" + "{ModulePath}/docs" + // highlight-end +) + +// ... + +var ( + // remove-next-line + _ cosmoscmd.App = (*App)(nil) + _ servertypes.Application = (*App)(nil) + _ simapp.App = (*App)(nil) +) + +// ... + +// New returns a reference to an initialized blockchain app +func New( + logger log.Logger, + db dbm.DB, + traceStore io.Writer, + loadLatest bool, + skipUpgradeHeights map[int64]bool, + homePath string, + invCheckPeriod uint, + // highlight-next-line + encodingConfig appparams.EncodingConfig, + appOpts servertypes.AppOptions, + baseAppOptions ...func(*baseapp.BaseApp), + // highlight-next-line +) *App { + appCodec := encodingConfig.Marshaler + cdc := encodingConfig.Amino + interfaceRegistry := encodingConfig.InterfaceRegistry + + bApp := baseapp.NewBaseApp( + Name, + logger, + db, + encodingConfig.TxConfig.TxDecoder(), + baseAppOptions..., + ) + + // ... + +} + +// ... + +// Name returns the name of the App +func (app *App) Name() string { return app.BaseApp.Name() } + +// remove-start +// GetBaseApp returns the base app of the application +func (app App) GetBaseApp() *baseapp.BaseApp { return app.BaseApp } + +// remove-end + +// BeginBlocker application updates every begin block +func (app *App) BeginBlocker(ctx sdk.Context, req abci.RequestBeginBlock) abci.ResponseBeginBlock { + return app.mm.BeginBlock(ctx, req) +} + +// ... +``` + +Again, here we are removing the use of `cosmoscmd` and replacing it with `app`. + +#### Migration in `cmd` package + +Some imports in these +examples contain the sample string, `{binaryNamePrefix}d`. Replace this string with the binary name of your blockchain. +For example, if your blockchain module path is `github.com/planet/mars`, `./cmd/{binaryNamePrefix}d/cmd/` would be +become `./cmd/marsd/cmd/`. + +First, create the new file `./cmd/{binaryNamePrefix}d/cmd/config.go` with the following code: + +```go +package cmd + +import ( + sdk "github.com/cosmos/cosmos-sdk/types" + + "{ModulePath}/app" +) + +func initSDKConfig() { + // Set prefixes + accountPubKeyPrefix := app.AccountAddressPrefix + "pub" + validatorAddressPrefix := app.AccountAddressPrefix + "valoper" + validatorPubKeyPrefix := app.AccountAddressPrefix + "valoperpub" + consNodeAddressPrefix := app.AccountAddressPrefix + "valcons" + consNodePubKeyPrefix := app.AccountAddressPrefix + "valconspub" + + // Set and seal config + config := sdk.GetConfig() + config.SetBech32PrefixForAccount(app.AccountAddressPrefix, accountPubKeyPrefix) + config.SetBech32PrefixForValidator(validatorAddressPrefix, validatorPubKeyPrefix) + config.SetBech32PrefixForConsensusNode(consNodeAddressPrefix, consNodePubKeyPrefix) + config.Seal() +} +``` + +Next, create the new file `./cmd/{binaryNamePrefix}d/cmd/genaccounts.go` with the following code: + +```go +package cmd + +import ( + "bufio" + "encoding/json" + "errors" + "fmt" + + "github.com/cosmos/cosmos-sdk/client" + "github.com/cosmos/cosmos-sdk/client/flags" + "github.com/cosmos/cosmos-sdk/crypto/keyring" + "github.com/cosmos/cosmos-sdk/server" + sdk "github.com/cosmos/cosmos-sdk/types" + authtypes "github.com/cosmos/cosmos-sdk/x/auth/types" + authvesting "github.com/cosmos/cosmos-sdk/x/auth/vesting/types" + banktypes "github.com/cosmos/cosmos-sdk/x/bank/types" + "github.com/cosmos/cosmos-sdk/x/genutil" + genutiltypes "github.com/cosmos/cosmos-sdk/x/genutil/types" + "github.com/spf13/cobra" +) + +const ( + flagVestingStart = "vesting-start-time" + flagVestingEnd = "vesting-end-time" + flagVestingAmt = "vesting-amount" +) + +// AddGenesisAccountCmd returns add-genesis-account cobra Command. +func AddGenesisAccountCmd(defaultNodeHome string) *cobra.Command { + cmd := &cobra.Command{ + Use: "add-genesis-account [address_or_key_name] [coin][,[coin]]", + Short: "Add a genesis account to genesis.json", + Long: `Add a genesis account to genesis.json. The provided account must specify +the account address or key name and a list of initial coins. If a key name is given, +the address will be looked up in the local Keybase. The list of initial tokens must +contain valid denominations. Accounts may optionally be supplied with vesting parameters. +`, + Args: cobra.ExactArgs(2), + RunE: func(cmd *cobra.Command, args []string) error { + clientCtx := client.GetClientContextFromCmd(cmd) + cdc := clientCtx.Codec + + serverCtx := server.GetServerContextFromCmd(cmd) + config := serverCtx.Config + + config.SetRoot(clientCtx.HomeDir) + + coins, err := sdk.ParseCoinsNormalized(args[1]) + if err != nil { + return fmt.Errorf("failed to parse coins: %w", err) + } + + addr, err := sdk.AccAddressFromBech32(args[0]) + if err != nil { + inBuf := bufio.NewReader(cmd.InOrStdin()) + keyringBackend, err := cmd.Flags().GetString(flags.FlagKeyringBackend) + if err != nil { + return err + } + + // attempt to lookup address from Keybase if no address was provided + kb, err := keyring.New(sdk.KeyringServiceName(), keyringBackend, clientCtx.HomeDir, inBuf, cdc) + if err != nil { + return err + } + + info, err := kb.Key(args[0]) + if err != nil { + return fmt.Errorf("failed to get address from Keybase: %w", err) + } + + addr, err = info.GetAddress() + if err != nil { + return fmt.Errorf("failed to get address from Keybase: %w", err) + } + } + + vestingStart, err := cmd.Flags().GetInt64(flagVestingStart) + if err != nil { + return err + } + vestingEnd, err := cmd.Flags().GetInt64(flagVestingEnd) + if err != nil { + return err + } + vestingAmtStr, err := cmd.Flags().GetString(flagVestingAmt) + if err != nil { + return err + } + + vestingAmt, err := sdk.ParseCoinsNormalized(vestingAmtStr) + if err != nil { + return fmt.Errorf("failed to parse vesting amount: %w", err) + } + + // create concrete account type based on input parameters + var genAccount authtypes.GenesisAccount + + balances := banktypes.Balance{Address: addr.String(), Coins: coins.Sort()} + baseAccount := authtypes.NewBaseAccount(addr, nil, 0, 0) + + if !vestingAmt.IsZero() { + baseVestingAccount := authvesting.NewBaseVestingAccount(baseAccount, vestingAmt.Sort(), vestingEnd) + + if (balances.Coins.IsZero() && !baseVestingAccount.OriginalVesting.IsZero()) || + baseVestingAccount.OriginalVesting.IsAnyGT(balances.Coins) { + return errors.New("vesting amount cannot be greater than total amount") + } + + switch { + case vestingStart != 0 && vestingEnd != 0: + genAccount = authvesting.NewContinuousVestingAccountRaw(baseVestingAccount, vestingStart) + + case vestingEnd != 0: + genAccount = authvesting.NewDelayedVestingAccountRaw(baseVestingAccount) + + default: + return errors.New("invalid vesting parameters; must supply start and end time or end time") + } + } else { + genAccount = baseAccount + } + + if err := genAccount.Validate(); err != nil { + return fmt.Errorf("failed to validate new genesis account: %w", err) + } + + genFile := config.GenesisFile() + appState, genDoc, err := genutiltypes.GenesisStateFromGenFile(genFile) + if err != nil { + return fmt.Errorf("failed to unmarshal genesis state: %w", err) + } + + authGenState := authtypes.GetGenesisStateFromAppState(cdc, appState) + + accs, err := authtypes.UnpackAccounts(authGenState.Accounts) + if err != nil { + return fmt.Errorf("failed to get accounts from any: %w", err) + } + + if accs.Contains(addr) { + return fmt.Errorf("cannot add account at existing address %s", addr) + } + + // Add the new account to the set of genesis accounts and sanitize the + // accounts afterwards. + accs = append(accs, genAccount) + accs = authtypes.SanitizeGenesisAccounts(accs) + + genAccs, err := authtypes.PackAccounts(accs) + if err != nil { + return fmt.Errorf("failed to convert accounts into any's: %w", err) + } + authGenState.Accounts = genAccs + + authGenStateBz, err := cdc.MarshalJSON(&authGenState) + if err != nil { + return fmt.Errorf("failed to marshal auth genesis state: %w", err) + } + + appState[authtypes.ModuleName] = authGenStateBz + + bankGenState := banktypes.GetGenesisStateFromAppState(cdc, appState) + bankGenState.Balances = append(bankGenState.Balances, balances) + bankGenState.Balances = banktypes.SanitizeGenesisBalances(bankGenState.Balances) + + bankGenStateBz, err := cdc.MarshalJSON(bankGenState) + if err != nil { + return fmt.Errorf("failed to marshal bank genesis state: %w", err) + } + + appState[banktypes.ModuleName] = bankGenStateBz + + appStateJSON, err := json.Marshal(appState) + if err != nil { + return fmt.Errorf("failed to marshal application genesis state: %w", err) + } + + genDoc.AppState = appStateJSON + return genutil.ExportGenesisFile(genDoc, genFile) + }, + } + + cmd.Flags().String(flags.FlagKeyringBackend, flags.DefaultKeyringBackend, "Select keyring's backend (os|file|kwallet|pass|test)") + cmd.Flags().String(flags.FlagHome, defaultNodeHome, "The application home directory") + cmd.Flags().String(flagVestingAmt, "", "amount of coins for vesting accounts") + cmd.Flags().Int64(flagVestingStart, 0, "schedule start time (unix epoch) for vesting accounts") + cmd.Flags().Int64(flagVestingEnd, 0, "schedule end time (unix epoch) for vesting accounts") + flags.AddQueryFlagsToCmd(cmd) + + return cmd +} +``` + +This command allows one to generate new accounts: `appd add-genesis-account`. + +Next, create the new file `./cmd/{binaryNamePrefix}d/cmd/root.go` with the following code: + +```go +package cmd + +import ( + "errors" + "io" + "os" + "path/filepath" + "strings" + + "github.com/cosmos/cosmos-sdk/baseapp" + "github.com/cosmos/cosmos-sdk/client" + "github.com/cosmos/cosmos-sdk/client/config" + "github.com/cosmos/cosmos-sdk/client/debug" + "github.com/cosmos/cosmos-sdk/client/flags" + "github.com/cosmos/cosmos-sdk/client/keys" + "github.com/cosmos/cosmos-sdk/client/rpc" + "github.com/cosmos/cosmos-sdk/server" + serverconfig "github.com/cosmos/cosmos-sdk/server/config" + servertypes "github.com/cosmos/cosmos-sdk/server/types" + "github.com/cosmos/cosmos-sdk/snapshots" + snapshottypes "github.com/cosmos/cosmos-sdk/snapshots/types" + "github.com/cosmos/cosmos-sdk/store" + sdk "github.com/cosmos/cosmos-sdk/types" + authcmd "github.com/cosmos/cosmos-sdk/x/auth/client/cli" + "github.com/cosmos/cosmos-sdk/x/auth/types" + banktypes "github.com/cosmos/cosmos-sdk/x/bank/types" + "github.com/cosmos/cosmos-sdk/x/crisis" + genutilcli "github.com/cosmos/cosmos-sdk/x/genutil/client/cli" + "github.com/ignite/cli/ignite/services/network" + "github.com/spf13/cast" + "github.com/spf13/cobra" + "github.com/spf13/pflag" + tmcfg "github.com/tendermint/tendermint/config" + tmcli "github.com/tendermint/tendermint/libs/cli" + "github.com/tendermint/tendermint/libs/log" + dbm "github.com/tendermint/tm-db" + // this line is used by starport scaffolding # root/moduleImport + + "{ModulePath}/app" + appparams "{ModulePath}/app/params" +) + +// NewRootCmd creates a new root command for a Cosmos SDK application +func NewRootCmd() (*cobra.Command, appparams.EncodingConfig) { + encodingConfig := app.MakeEncodingConfig() + initClientCtx := client.Context{}. + WithCodec(encodingConfig.Marshaler). + WithInterfaceRegistry(encodingConfig.InterfaceRegistry). + WithTxConfig(encodingConfig.TxConfig). + WithLegacyAmino(encodingConfig.Amino). + WithInput(os.Stdin). + WithAccountRetriever(types.AccountRetriever{}). + WithHomeDir(app.DefaultNodeHome). + WithViper("") + + rootCmd := &cobra.Command{ + Use: app.Name + "d", + Short: "Stargate CosmosHub App", + PersistentPreRunE: func(cmd *cobra.Command, _ []string) error { + // set the default command outputs + cmd.SetOut(cmd.OutOrStdout()) + cmd.SetErr(cmd.ErrOrStderr()) + initClientCtx, err := client.ReadPersistentCommandFlags(initClientCtx, cmd.Flags()) + if err != nil { + return err + } + initClientCtx, err = config.ReadFromClientConfig(initClientCtx) + if err != nil { + return err + } + + if err := client.SetCmdClientContextHandler(initClientCtx, cmd); err != nil { + return err + } + + customAppTemplate, customAppConfig := initAppConfig() + customTMConfig := initTendermintConfig() + return server.InterceptConfigsPreRunHandler( + cmd, customAppTemplate, customAppConfig, customTMConfig, + ) + }, + } + + initRootCmd(rootCmd, encodingConfig) + overwriteFlagDefaults(rootCmd, map[string]string{ + flags.FlagChainID: strings.ReplaceAll(app.Name, "-", ""), + flags.FlagKeyringBackend: "test", + }) + + return rootCmd, encodingConfig +} + +// initTendermintConfig helps to override default Tendermint Config values. +// return tmcfg.DefaultConfig if no custom configuration is required for the application. +func initTendermintConfig() *tmcfg.Config { + cfg := tmcfg.DefaultConfig() + return cfg +} + +func initRootCmd( + rootCmd *cobra.Command, + encodingConfig appparams.EncodingConfig, +) { + // Set config + initSDKConfig() + + rootCmd.AddCommand( + genutilcli.InitCmd(app.ModuleBasics, app.DefaultNodeHome), + genutilcli.CollectGenTxsCmd(banktypes.GenesisBalancesIterator{}, app.DefaultNodeHome), + genutilcli.MigrateGenesisCmd(), + genutilcli.GenTxCmd( + app.ModuleBasics, + encodingConfig.TxConfig, + banktypes.GenesisBalancesIterator{}, + app.DefaultNodeHome, + ), + genutilcli.ValidateGenesisCmd(app.ModuleBasics), + AddGenesisAccountCmd(app.DefaultNodeHome), + tmcli.NewCompletionCmd(rootCmd, true), + debug.Cmd(), + config.Cmd(), + // this line is used by starport scaffolding # root/commands + ) + + a := appCreator{ + encodingConfig, + } + + // add server commands + server.AddCommands( + rootCmd, + app.DefaultNodeHome, + a.newApp, + a.appExport, + addModuleInitFlags, + ) + + // add keybase, auxiliary RPC, query, and tx child commands + rootCmd.AddCommand( + rpc.StatusCommand(), + queryCommand(), + txCommand(), + keys.Commands(app.DefaultNodeHome), + ) +} + +// queryCommand returns the sub-command to send queries to the app +func queryCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "query", + Aliases: []string{"q"}, + Short: "Querying subcommands", + DisableFlagParsing: true, + SuggestionsMinimumDistance: 2, + RunE: client.ValidateCmd, + } + + cmd.AddCommand( + authcmd.GetAccountCmd(), + rpc.ValidatorCommand(), + rpc.BlockCommand(), + authcmd.QueryTxsByEventsCmd(), + authcmd.QueryTxCmd(), + ) + + app.ModuleBasics.AddQueryCommands(cmd) + cmd.PersistentFlags().String(flags.FlagChainID, "", "The network chain ID") + + return cmd +} + +// txCommand returns the sub-command to send transactions to the app +func txCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "tx", + Short: "Transactions subcommands", + DisableFlagParsing: true, + SuggestionsMinimumDistance: 2, + RunE: client.ValidateCmd, + } + + cmd.AddCommand( + authcmd.GetSignCommand(), + authcmd.GetSignBatchCommand(), + authcmd.GetMultiSignCommand(), + authcmd.GetValidateSignaturesCommand(), + flags.LineBreak, + authcmd.GetBroadcastCommand(), + authcmd.GetEncodeCommand(), + authcmd.GetDecodeCommand(), + ) + + app.ModuleBasics.AddTxCommands(cmd) + cmd.PersistentFlags().String(flags.FlagChainID, "", "The network chain ID") + + return cmd +} + +func addModuleInitFlags(startCmd *cobra.Command) { + crisis.AddModuleInitFlags(startCmd) + // this line is used by starport scaffolding # root/arguments +} + +func overwriteFlagDefaults(c *cobra.Command, defaults map[string]string) { + set := func(s *pflag.FlagSet, key, val string) { + if f := s.Lookup(key); f != nil { + f.DefValue = val + f.Value.Set(val) + } + } + for key, val := range defaults { + set(c.Flags(), key, val) + set(c.PersistentFlags(), key, val) + } + for _, c := range c.Commands() { + overwriteFlagDefaults(c, defaults) + } +} + +type appCreator struct { + encodingConfig appparams.EncodingConfig +} + +// newApp creates a new Cosmos SDK app +func (a appCreator) newApp( + logger log.Logger, + db dbm.DB, + traceStore io.Writer, + appOpts servertypes.AppOptions, +) servertypes.Application { + var cache sdk.MultiStorePersistentCache + + if cast.ToBool(appOpts.Get(server.FlagInterBlockCache)) { + cache = store.NewCommitKVStoreCacheManager() + } + + skipUpgradeHeights := make(map[int64]bool) + for _, h := range cast.ToIntSlice(appOpts.Get(server.FlagUnsafeSkipUpgrades)) { + skipUpgradeHeights[int64(h)] = true + } + + pruningOpts, err := server.GetPruningOptionsFromFlags(appOpts) + if err != nil { + panic(err) + } + + snapshotDir := filepath.Join(cast.ToString(appOpts.Get(flags.FlagHome)), "data", "snapshots") + snapshotDB, err := dbm.NewDB("metadata", dbm.GoLevelDBBackend, snapshotDir) + if err != nil { + panic(err) + } + snapshotStore, err := snapshots.NewStore(snapshotDB, snapshotDir) + if err != nil { + panic(err) + } + + snapshotOptions := snapshottypes.NewSnapshotOptions( + cast.ToUint64(appOpts.Get(server.FlagStateSyncSnapshotInterval)), + cast.ToUint32(appOpts.Get(server.FlagStateSyncSnapshotKeepRecent)), + ) + + return app.New( + logger, + db, + traceStore, + true, + skipUpgradeHeights, + cast.ToString(appOpts.Get(flags.FlagHome)), + cast.ToUint(appOpts.Get(server.FlagInvCheckPeriod)), + a.encodingConfig, + appOpts, + baseapp.SetPruning(pruningOpts), + baseapp.SetMinGasPrices(cast.ToString(appOpts.Get(server.FlagMinGasPrices))), + baseapp.SetMinRetainBlocks(cast.ToUint64(appOpts.Get(server.FlagMinRetainBlocks))), + baseapp.SetHaltHeight(cast.ToUint64(appOpts.Get(server.FlagHaltHeight))), + baseapp.SetHaltTime(cast.ToUint64(appOpts.Get(server.FlagHaltTime))), + baseapp.SetInterBlockCache(cache), + baseapp.SetTrace(cast.ToBool(appOpts.Get(server.FlagTrace))), + baseapp.SetIndexEvents(cast.ToStringSlice(appOpts.Get(server.FlagIndexEvents))), + baseapp.SetSnapshot(snapshotStore, snapshotOptions), + ) +} + +// appExport creates a new simapp (optionally at a given height) +func (a appCreator) appExport( + logger log.Logger, + db dbm.DB, + traceStore io.Writer, + height int64, + forZeroHeight bool, + jailAllowedAddrs []string, + appOpts servertypes.AppOptions, +) (servertypes.ExportedApp, error) { + homePath, ok := appOpts.Get(flags.FlagHome).(string) + if !ok || homePath == "" { + return servertypes.ExportedApp{}, errors.New("application home not set") + } + + app := app.New( + logger, + db, + traceStore, + height == -1, // -1: no height provided + map[int64]bool{}, + homePath, + uint(1), + a.encodingConfig, + appOpts, + ) + + if height != -1 { + if err := app.LoadHeight(height); err != nil { + return servertypes.ExportedApp{}, err + } + } + + return app.ExportAppStateAndValidators(forZeroHeight, jailAllowedAddrs) +} + +// initAppConfig helps to override default appConfig template and configs. +// return "", nil if no custom configuration is required for the application. +func initAppConfig() (string, interface{}) { + // The following code snippet is just for reference. + + // WASMConfig defines configuration for the wasm module. + type WASMConfig struct { + // This is the maximum sdk gas (wasm and storage) that we allow for any x/wasm "smart" queries + QueryGasLimit uint64 `mapstructure:"query_gas_limit"` + + // Address defines the gRPC-web server to listen on + LruSize uint64 `mapstructure:"lru_size"` + } + + type CustomAppConfig struct { + serverconfig.Config + + WASM WASMConfig `mapstructure:"wasm"` + } + + // Optionally allow the chain developer to overwrite the SDK's default + // server config. + srvCfg := serverconfig.DefaultConfig() + // The SDK's default minimum gas price is set to "" (empty value) inside + // app.toml. If left empty by validators, the node will halt on startup. + // However, the chain developer can set a default app.toml value for their + // validators here. + // + // In summary: + // - if you leave srvCfg.MinGasPrices = "", all validators MUST tweak their + // own app.toml config, + // - if you set srvCfg.MinGasPrices non-empty, validators CAN tweak their + // own app.toml to override, or use this default value. + // + // In simapp, we set the min gas prices to 0. + srvCfg.MinGasPrices = "0stake" + + customAppConfig := CustomAppConfig{ + Config: *srvCfg, + WASM: WASMConfig{ + LruSize: 1, + QueryGasLimit: 300000, + }, + } + + customAppTemplate := serverconfig.DefaultConfigTemplate + ` +[wasm] +# This is the maximum sdk gas (wasm and storage) that we allow for any x/wasm "smart" queries +query_gas_limit = 300000 +# This is the number of wasm vm instances we keep cached in memory for speed-up +# Warning: this is currently unstable and may lead to crashes, best to keep for 0 unless testing locally +lru_size = 0` + + return customAppTemplate, customAppConfig +} +``` + +Finally, modify `./cmd/{binaryNamePrefix}d/main.go` to include the new changes: + +```go +package main + +import ( + "os" + + "github.com/cosmos/cosmos-sdk/server" + svrcmd "github.com/cosmos/cosmos-sdk/server/cmd" + // remove-next-line + "github.com/ignite/cli/ignite/pkg/cosmoscmd" + + "{ModulePath}/app" + "{ModulePath}/cmd/{BinaryNamePrefix}d/cmd" +) + +func main() { + // highlight-start + rootCmd, _ := cmd.NewRootCmd() + if err := svrcmd.Execute(rootCmd, "", app.DefaultNodeHome); err != nil { + switch e := err.(type) { + case server.ErrorCode: + os.Exit(e.Code) + + default: + os.Exit(1) + } + } + // highlight-end +} +``` + +#### Migration in `testutil` package + +Modify `./testutil/network/network.go` to include the new changes: + + +```go +package network + +import ( + "fmt" + "testing" + "time" + + "github.com/cosmos/cosmos-sdk/baseapp" + "github.com/cosmos/cosmos-sdk/crypto/hd" + "github.com/cosmos/cosmos-sdk/crypto/keyring" + pruningtypes "github.com/cosmos/cosmos-sdk/pruning/types" + servertypes "github.com/cosmos/cosmos-sdk/server/types" + "github.com/cosmos/cosmos-sdk/simapp" + "github.com/cosmos/cosmos-sdk/testutil/network" + sdk "github.com/cosmos/cosmos-sdk/types" + authtypes "github.com/cosmos/cosmos-sdk/x/auth/types" + "github.com/stretchr/testify/require" + tmrand "github.com/tendermint/tendermint/libs/rand" + tmdb "github.com/tendermint/tm-db" + + // highlight-next-line + "{ModulePath}/app" + + // remove-next-line + "github.com/ignite/cli/ignite/pkg/cosmoscmd" +) + +// ... + +// DefaultConfig will initialize config for the network with custom application, +// genesis and single validator. All other parameters are inherited from cosmos-sdk/testutil/network.DefaultConfig +func DefaultConfig() network.Config { + // highlight-next-line + encoding := app.MakeEncodingConfig() + // remove-next-line + encoding := cosmoscmd.MakeEncodingConfig(app.ModuleBasics) + return network.Config{ + Codec: encoding.Marshaler, + TxConfig: encoding.TxConfig, + LegacyAmino: encoding.Amino, + InterfaceRegistry: encoding.InterfaceRegistry, + AccountRetriever: authtypes.AccountRetriever{}, + AppConstructor: func(val network.Validator) servertypes.Application { + return app.New( + val.Ctx.Logger, tmdb.NewMemDB(), nil, true, map[int64]bool{}, val.Ctx.Config.RootDir, 0, + encoding, + simapp.EmptyAppOptions{}, + baseapp.SetPruning(pruningtypes.NewPruningOptionsFromString(val.AppConfig.Pruning)), + baseapp.SetMinGasPrices(val.AppConfig.MinGasPrices), + ) + }, + GenesisState: app.ModuleBasics.DefaultGenesis(encoding.Marshaler), + TimeoutCommit: 2 * time.Second, + ChainID: "chain-" + tmrand.NewRand().Str(6), + NumValidators: 1, + BondDenom: sdk.DefaultBondDenom, + MinGasPrices: fmt.Sprintf("0.000006%s", sdk.DefaultBondDenom), + AccountTokens: sdk.TokensFromConsensusPower(1000, sdk.DefaultPowerReduction), + StakingTokens: sdk.TokensFromConsensusPower(500, sdk.DefaultPowerReduction), + BondedTokens: sdk.TokensFromConsensusPower(100, sdk.DefaultPowerReduction), + PruningStrategy: pruningtypes.PruningOptionNothing, + CleanupDir: true, + SigningAlgo: string(hd.Secp256k1Type), + KeyringOptions: []keyring.Option{}, + } +} +``` + + --- + +## Fix ICA controller keeper wiring + +Related issue: https://github.com/ignite/cli/issues/2867 + +Apply the following changes to `app/app.go` file : + +```go +package app + +import ( + + // highlight-start + icacontrollerkeeper "github.com/cosmos/ibc-go/v5/modules/apps/27-interchain-accounts/controller/keeper" + icacontrollertypes "github.com/cosmos/ibc-go/v5/modules/apps/27-interchain-accounts/controller/types" + // highlight-end + // ... +) + +// New returns a reference to an initialized blockchain app +func New( + logger log.Logger, + db dbm.DB, + traceStore io.Writer, + loadLatest bool, + skipUpgradeHeights map[int64]bool, + homePath string, + invCheckPeriod uint, + encodingConfig appparams.EncodingConfig, + appOpts servertypes.AppOptions, + baseAppOptions ...func(*baseapp.BaseApp), +) *App { + + // ... + + keys := sdk.NewKVStoreKeys( + authtypes.StoreKey, authz.ModuleName, banktypes.StoreKey, + stakingtypes.StoreKey, + minttypes.StoreKey, distrtypes.StoreKey, slashingtypes.StoreKey, + govtypes.StoreKey, + paramstypes.StoreKey, ibchost.StoreKey, upgradetypes.StoreKey, + feegrant.StoreKey, evidencetypes.StoreKey, + ibctransfertypes.StoreKey, icahosttypes.StoreKey, + capabilitytypes.StoreKey, group.StoreKey, + // highlight-next-line + icacontrollertypes.StoreKey, + yourchainmoduletypes.StoreKey, + // this line is used by starport scaffolding # stargate/app/storeKey + ) + + // ... + + // remove-next-line + icaModule := ica.NewAppModule(nil, &app.ICAHostKeeper) + // highlight-start + icaControllerKeeper := icacontrollerkeeper.NewKeeper( + appCodec, keys[icacontrollertypes.StoreKey], + app.GetSubspace(icacontrollertypes.SubModuleName), + app.IBCKeeper.ChannelKeeper, // may be replaced with middleware such as ics29 fee + app.IBCKeeper.ChannelKeeper, &app.IBCKeeper.PortKeeper, + scopedICAControllerKeeper, app.MsgServiceRouter(), + ) + icaModule := ica.NewAppModule(&icaControllerKeeper, &app.ICAHostKeeper) + // highlight-end + icaHostIBCModule := icahost.NewIBCModule(app.ICAHostKeeper) + + // ... +} + +// ... + +// initParamsKeeper init params keeper and its subspaces +func initParamsKeeper(appCodec codec.BinaryCodec, legacyAmino *codec.LegacyAmino, key, tkey storetypes.StoreKey) paramskeeper.Keeper { + paramsKeeper := paramskeeper.NewKeeper(appCodec, legacyAmino, key, tkey) + + paramsKeeper.Subspace(authtypes.ModuleName) + paramsKeeper.Subspace(banktypes.ModuleName) + paramsKeeper.Subspace(stakingtypes.ModuleName) + paramsKeeper.Subspace(minttypes.ModuleName) + paramsKeeper.Subspace(distrtypes.ModuleName) + paramsKeeper.Subspace(slashingtypes.ModuleName) + paramsKeeper.Subspace(govtypes.ModuleName).WithKeyTable(govv1.ParamKeyTable()) + paramsKeeper.Subspace(crisistypes.ModuleName) + paramsKeeper.Subspace(ibctransfertypes.ModuleName) + paramsKeeper.Subspace(ibchost.ModuleName) + // highlight-next-line + paramsKeeper.Subspace(icacontrollertypes.SubModuleName) + paramsKeeper.Subspace(icahosttypes.SubModuleName) + paramsKeeper.Subspace(mychainmoduletypes.ModuleName) + // this line is used by starport scaffolding # stargate/app/paramSubspace + + return paramsKeeper +} +``` + + --- + +## Fix capability keeper not sealed + +Related issue: https://github.com/ignite/cli/issues/1921 + +Apply the following change to `app/app.go` file : + +```go +package app + +// New returns a reference to an initialized blockchain app +func New( + logger log.Logger, + db dbm.DB, + traceStore io.Writer, + loadLatest bool, + skipUpgradeHeights map[int64]bool, + homePath string, + invCheckPeriod uint, + encodingConfig appparams.EncodingConfig, + appOpts servertypes.AppOptions, + baseAppOptions ...func(*baseapp.BaseApp), +) *App { + + // ... + + // this line is used by starport scaffolding # stargate/app/keeperDefinition + + // highlight-start + // Sealing prevents other modules from creating scoped sub-keepers + app.CapabilityKeeper.Seal() + // highlight-end + + // Create static IBC router, add transfer route, then set and seal it + + // ... +} +``` diff --git a/docs/versioned_docs/version-v0.26/06-migration/v0.25.1.md b/docs/versioned_docs/version-v0.26/06-migration/v0.25.1.md new file mode 100644 index 0000000..f3d1cc2 --- /dev/null +++ b/docs/versioned_docs/version-v0.26/06-migration/v0.25.1.md @@ -0,0 +1,67 @@ +--- +sidebar_position: 993 +title: v0.25.1 +description: For chains that were scaffolded with Ignite CLI versions lower than v0.25.1. changes are required to use Ignite CLI v0.25.1. +--- + +## Drabonberry fix + +`v0.25.1` contains the Dragonberry fix, update your `go.mod` as : + +```sh +require ( + // remove-next-line + github.com/ignite/cli v0.24.0 + // highlight-next-line + github.com/ignite/cli v0.25.1 +) + +// highlight-next-line +replace github.com/confio/ics23/go => github.com/cosmos/cosmos-sdk/ics23/go v0.8.0 +``` + +Then run: + +``` +$ go mod tidy +``` + +As a result, you should see `cosmos-sdk` and `ibc-go` upgraded as well. + +Finally, apply the following change to `app/app.go`: + +```go +package app + +// New returns a reference to an initialized blockchain app +func New( + logger log.Logger, + db dbm.DB, + traceStore io.Writer, + loadLatest bool, + skipUpgradeHeights map[int64]bool, + homePath string, + invCheckPeriod uint, + encodingConfig appparams.EncodingConfig, + appOpts servertypes.AppOptions, + baseAppOptions ...func(*baseapp.BaseApp), +) *App { + + // ... + + app.ICAHostKeeper = icahostkeeper.NewKeeper( + appCodec, keys[icahosttypes.StoreKey], + app.GetSubspace(icahosttypes.SubModuleName), + app.IBCKeeper.ChannelKeeper, + // highlight-next-line + app.IBCKeeper.ChannelKeeper, + &app.IBCKeeper.PortKeeper, + app.AccountKeeper, + scopedICAHostKeeper, + app.MsgServiceRouter(), + ) + + // ... + +} +``` diff --git a/docs/versioned_docs/version-v0.26/06-migration/v0.26.0.md b/docs/versioned_docs/version-v0.26/06-migration/v0.26.0.md new file mode 100644 index 0000000..9dd3afd --- /dev/null +++ b/docs/versioned_docs/version-v0.26/06-migration/v0.26.0.md @@ -0,0 +1,263 @@ +--- +sidebar_position: 992 +title: v0.26.0 +description: For chains that were scaffolded with Ignite CLI versions lower than v0.26.0. changes are required to use Ignite CLI v0.26.0. +--- + +Ignite CLI `v0.26.0` is fully compatible with chains that are compatible with `v0.25.1`. Please follow the existing +migration guides if your chain is not upgraded to `v0.25.1` support. + +## Go Version + +Chains that are newly scaffolded with Ignite CLI `v0.26.0` now require `go 1.19` in their `go.mod` files. It is +recommended that chains scaffolded with an older version of Ignite CLI also bump their required `go` version and update +their tooling to the latest version. + +## ibc-go v6 + +Chains that are newly scaffolded with Ignite CLI `v0.26.0` now use `ibc-go/v6` for ibc functionality. It is not +necessary, but recommended to upgrade to the newest version of `ibc-go`. Most migrations can be done by following the +`ibc-go` [migration guide](https://github.com/cosmos/ibc-go/blob/v6.2.0/docs/migrations/v5-to-v6.md), but there are some +specific changes that will need to be followed for Ignite scaffolded chains. + +### Removing `cosmosibckeeper` + +Ignite CLI `v0.26.0` has deprecated [pkg/cosmosibckeeper](https://github.com/ignite/cli/tree/v0.26.0/ignite/pkg/cosmosibckeeper). +This package contained interfaces for ibc-related keepers. Newly scaffolded chains now include the interface files in their +`./x/{moduleName}/types` directory in a new `expected_ibc_keeper.go` file. To migrate, create the following file for +each module: + +```go title="x/{moduleName}/types/expected_ibc_keeper.go" +package types + +import ( + sdk "github.com/cosmos/cosmos-sdk/types" + capabilitytypes "github.com/cosmos/cosmos-sdk/x/capability/types" + clienttypes "github.com/cosmos/ibc-go/v6/modules/core/02-client/types" + channeltypes "github.com/cosmos/ibc-go/v6/modules/core/04-channel/types" +) + +// ChannelKeeper defines the expected IBC channel keeper. +type ChannelKeeper interface { + GetChannel(ctx sdk.Context, portID, channelID string) (channeltypes.Channel, bool) + GetNextSequenceSend(ctx sdk.Context, portID, channelID string) (uint64, bool) + SendPacket( + ctx sdk.Context, + channelCap *capabilitytypes.Capability, + sourcePort string, + sourceChannel string, + timeoutHeight clienttypes.Height, + timeoutTimestamp uint64, + data []byte, + ) (uint64, error) + ChanCloseInit(ctx sdk.Context, portID, channelID string, chanCap *capabilitytypes.Capability) error +} + +// PortKeeper defines the expected IBC port keeper. +type PortKeeper interface { + BindPort(ctx sdk.Context, portID string) *capabilitytypes.Capability +} + +// ScopedKeeper defines the expected IBC scoped keeper. +type ScopedKeeper interface { + GetCapability(ctx sdk.Context, name string) (*capabilitytypes.Capability, bool) + AuthenticateCapability(ctx sdk.Context, cap *capabilitytypes.Capability, name string) bool + ClaimCapability(ctx sdk.Context, cap *capabilitytypes.Capability, name string) error +} +``` + +Next, make the following updates to each `x/{moduleName}/keeper/keeper.go` file for each ibc-enabled +module in your project: + +```go title="x/{moduleName}/keeper/keeper.go" +package keeper + +import ( + "fmt" + + // remove-start + "blogibc/x/testibc/types" + "github.com/cosmos/cosmos-sdk/codec" + storetypes "github.com/cosmos/cosmos-sdk/store/types" + sdk "github.com/cosmos/cosmos-sdk/types" + paramtypes "github.com/cosmos/cosmos-sdk/x/params/types" + "github.com/ignite/cli/ignite/pkg/cosmosibckeeper" + "github.com/tendermint/tendermint/libs/log" + // remove-end + // highlight-start + "github.com/cosmos/cosmos-sdk/codec" + storetypes "github.com/cosmos/cosmos-sdk/store/types" + sdk "github.com/cosmos/cosmos-sdk/types" + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + capabilitytypes "github.com/cosmos/cosmos-sdk/x/capability/types" + paramtypes "github.com/cosmos/cosmos-sdk/x/params/types" + channeltypes "github.com/cosmos/ibc-go/v6/modules/core/04-channel/types" + host "github.com/cosmos/ibc-go/v6/modules/core/24-host" + "github.com/cosmos/ibc-go/v6/modules/core/exported" + "github.com/tendermint/tendermint/libs/log" + + "{appName}/x/{moduleName}/types" + // highlight-end +) + +type ( + Keeper struct { + // remove-line-next + *cosmosibckeeper.Keeper + cdc codec.BinaryCodec + storeKey storetypes.StoreKey + memKey storetypes.StoreKey + paramstore paramtypes.Subspace + + // highlight-start + channelKeeper types.ChannelKeeper + portKeeper types.PortKeeper + scopedKeeper exported.ScopedKeeper + // highlight-end + } +) + +func NewKeeper( + cdc codec.BinaryCodec, + storeKey, + memKey storetypes.StoreKey, + ps paramtypes.Subspace, + // highlight-start + channelKeeper types.ChannelKeeper, + portKeeper types.PortKeeper, + scopedKeeper types.ScopedKeeper, + // highlight-end +) *Keeper { + // set KeyTable if it has not already been set + if !ps.HasKeyTable() { + ps = ps.WithKeyTable(types.ParamKeyTable()) + } + + return &Keeper{ + // remove-start + Keeper: cosmosibckeeper.NewKeeper( + types.PortKey, + storeKey, + channelKeeper, + portKeeper, + scopedKeeper, + ), + // remove-end + cdc: cdc, + storeKey: storeKey, + memKey: memKey, + paramstore: ps, + // highlight-start + channelKeeper: channelKeeper, + portKeeper: portKeeper, + scopedKeeper: scopedKeeper, + // highlight-end + } +} + +// highlight-start +// ---------------------------------------------------------------------------- +// IBC Keeper Logic +// ---------------------------------------------------------------------------- + +// ChanCloseInit defines a wrapper function for the channel Keeper's function. +func (k Keeper) ChanCloseInit(ctx sdk.Context, portID, channelID string) error { + capName := host.ChannelCapabilityPath(portID, channelID) + chanCap, ok := k.scopedKeeper.GetCapability(ctx, capName) + if !ok { + return sdkerrors.Wrapf(channeltypes.ErrChannelCapabilityNotFound, "could not retrieve channel capability at: %s", capName) + } + return k.channelKeeper.ChanCloseInit(ctx, portID, channelID, chanCap) +} + +// IsBound checks if the IBC app module is already bound to the desired port +func (k Keeper) IsBound(ctx sdk.Context, portID string) bool { + _, ok := k.scopedKeeper.GetCapability(ctx, host.PortPath(portID)) + return ok +} + +// BindPort defines a wrapper function for the port Keeper's function in +// order to expose it to module's InitGenesis function +func (k Keeper) BindPort(ctx sdk.Context, portID string) error { + cap := k.portKeeper.BindPort(ctx, portID) + return k.ClaimCapability(ctx, cap, host.PortPath(portID)) +} + +// GetPort returns the portID for the IBC app module. Used in ExportGenesis +func (k Keeper) GetPort(ctx sdk.Context) string { + store := ctx.KVStore(k.storeKey) + return string(store.Get(types.PortKey)) +} + +// SetPort sets the portID for the IBC app module. Used in InitGenesis +func (k Keeper) SetPort(ctx sdk.Context, portID string) { + store := ctx.KVStore(k.storeKey) + store.Set(types.PortKey, []byte(portID)) +} + +// AuthenticateCapability wraps the scopedKeeper's AuthenticateCapability function +func (k Keeper) AuthenticateCapability(ctx sdk.Context, cap *capabilitytypes.Capability, name string) bool { + return k.scopedKeeper.AuthenticateCapability(ctx, cap, name) +} + +// ClaimCapability allows the IBC app module to claim a capability that core IBC +// passes to it +func (k Keeper) ClaimCapability(ctx sdk.Context, cap *capabilitytypes.Capability, name string) error { + return k.scopedKeeper.ClaimCapability(ctx, cap, name) +} + +//highlight-end + +func (k Keeper) Logger(ctx sdk.Context) log.Logger { + return ctx.Logger().With("module", fmt.Sprintf("x/%s", types.ModuleName)) +} +``` + +### Remaining migration + +After all uses of `cosmosibckeeper` have been removed, you can follow any remaining steps in the`ibc-go`[migration guide](https://github.com/cosmos/ibc-go/blob/v6.2.0/docs/migrations/v5-to-v6.md). + +## Scaffolded Release Workflow + +The develop branch of the CLI has been deprecated. To continue using the release workflow that uses the CLI to +automatically build and release your chain's binaries, replace develop with main in the following lines: + +```yaml title=".github/workflows/release.yml" +... + +jobs: + might_release: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v2 + with: + fetch-depth: 0 + - name: Prepare Release Variables + id: vars + // highlight-next-line + uses: ignite/cli/actions/release/vars@main + - name: Issue Release Assets + // highlight-next-line + uses: ignite/cli/actions/cli@main + if: ${{ steps.vars.outputs.should_release == 'true' }} + with: + args: chain build --release --release.prefix ${{ steps.vars.outputs.tarball_prefix }} -t linux:amd64 -t darwin:amd64 -t darwin:arm64 + - name: Delete the "latest" Release + uses: dev-drprasad/delete-tag-and-release@v0.2.0 + if: ${{ steps.vars.outputs.is_release_type_latest == 'true' }} + with: + tag_name: ${{ steps.vars.outputs.tag_name }} + delete_release: true + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Publish the Release + uses: softprops/action-gh-release@v1 + if: ${{ steps.vars.outputs.should_release == 'true' }} + with: + tag_name: ${{ steps.vars.outputs.tag_name }} + files: release/* + prerelease: true + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} +``` diff --git a/docs/versioned_docs/version-v0.26/07-packages/_category_.json b/docs/versioned_docs/version-v0.26/07-packages/_category_.json new file mode 100644 index 0000000..6dbb883 --- /dev/null +++ b/docs/versioned_docs/version-v0.26/07-packages/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Packages", + "link": null +} \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.26/07-packages/cosmostxcollector.md b/docs/versioned_docs/version-v0.26/07-packages/cosmostxcollector.md new file mode 100644 index 0000000..df0fccd --- /dev/null +++ b/docs/versioned_docs/version-v0.26/07-packages/cosmostxcollector.md @@ -0,0 +1,200 @@ +--- +sidebar_position: 0 +title: cosmostxcollector +slug: /packages/cosmostxcollector +--- + +# cosmostxcollector + +The package implements support for collecting transactions and events from Cosmos blockchains +into a data backend and it also adds support for querying the collected data. + +## Transaction and event data collecting + +Transactions and events can be collected using the `cosmostxcollector.Collector` type. This +type uses a `cosmosclient.Client` instance to fetch the data from each block and a data backend +adapter to save the data. + +### Data backend adapters + +Data backend adapters are used to query and save the collected data into different types of data +backends and must implement the `cosmostxcollector.adapter.Adapter` interface. + +An adapter for PostgreSQL is already implemented in `cosmostxcollector.adapter.postgres.Adapter`. +This is the one used in the examples. + +### Example: Data collection + +The data collection example assumes that there is a PostgreSQL database running in the local +environment containing an empty database named "cosmos". + +The required database tables will be created automatically by the collector the first time it is run. + +When the application is run it will fetch all the transactions and events starting from one of the +recent blocks until the current block height and populate the database: + +```go +package main + +import ( + "context" + "log" + + "github.com/ignite/cli/ignite/pkg/clictx" + "github.com/ignite/cli/ignite/pkg/cosmosclient" + "github.com/ignite/cli/ignite/pkg/cosmostxcollector" + "github.com/ignite/cli/ignite/pkg/cosmostxcollector/adapter/postgres" +) + +const ( + // Name of a local PostgreSQL database + dbName = "cosmos" + + // Cosmos RPC address + rpcAddr = "https://rpc.cosmos.network:443" +) + +func collect(ctx context.Context, db postgres.Adapter) error { + // Make sure that the data backend schema is up to date + if err := db.Init(ctx); err != nil { + return err + } + + // Init the Cosmos client + client, err := cosmosclient.New(ctx, cosmosclient.WithNodeAddress(rpcAddr)) + if err != nil { + return err + } + + // Get the latest block height + latestHeight, err := client.LatestBlockHeight(ctx) + if err != nil { + return err + } + + // Collect transactions and events starting from a block height. + // The collector stops at the latest height available at the time of the call. + collector := cosmostxcollector.New(db, client) + if err := collector.Collect(ctx, latestHeight-50); err != nil { + return err + } + + return nil +} + +func main() { + ctx := clictx.From(context.Background()) + + // Init an adapter for a local PostgreSQL database running with the default values + params := map[string]string{"sslmode": "disable"} + db, err := postgres.NewAdapter(dbName, postgres.WithParams(params)) + if err != nil { + log.Fatal(err) + } + + if err := collect(ctx, db); err != nil { + log.Fatal(err) + } +} +``` + +## Queries + +Collected data can be queried through the data backend adapters using event queries or +cursor-based queries. + +Queries support sorting, paging and filtering by using different options during creation. +The cursor-based ones also support the selection of specific fields or properties and also +passing arguments in cases where the query is a function. + +By default no sorting, filtering nor paging is applied to the queries. + +### Event queries + +The event queries return events and their attributes as `[]cosmostxcollector.query.Event`. + +### Example: Query events + +The example reads transfer events from Cosmos' bank module and paginates the results. + +```go +import ( + "context" + + banktypes "github.com/cosmos/cosmos-sdk/x/bank/types" + "github.com/ignite/cli/ignite/pkg/cosmostxcollector/adapter/postgres" + "github.com/ignite/cli/ignite/pkg/cosmostxcollector/query" +) + +func queryBankTransferEvents(ctx context.Context, db postgres.Adapter) ([]query.Event, error) { + // Create an event query that returns events of type "transfer" + qry := query.NewEventQuery( + query.WithFilters( + // Filter transfer events from Cosmos' bank module + postgres.FilterByEventType(banktypes.EventTypeTransfer), + ), + query.WithPageSize(10), + query.AtPage(1), + ) + + // Execute the query + return db.QueryEvents(ctx, qry) +} +``` + +### Cursor-based queries + +This type of queries is meant to be used in contexts where the Event queries are not +useful. + +Cursor-based queries can query a single "entity" which can be a table, view or function +in relational databases or a collection or function in non relational data backends. + +The result of these types of queries is a cursor that implements the `cosmostxcollector.query.Cursor` +interface. + +### Example: Query events using cursors + +```go +import ( + "context" + + banktypes "github.com/cosmos/cosmos-sdk/x/bank/types" + "github.com/ignite/cli/ignite/pkg/cosmostxcollector/adapter/postgres" + "github.com/ignite/cli/ignite/pkg/cosmostxcollector/query" +) + +func queryBankTransferEventIDs(ctx context.Context, db postgres.Adapter) (ids []int64, err error) { + // Create a query that returns the IDs for events of type "transfer" + qry := query.New( + "event", + query.Fields("id"), + query.WithFilters( + // Filter transfer events from Cosmos' bank module + postgres.NewFilter("type", banktypes.EventTypeTransfer), + ), + query.WithPageSize(10), + query.AtPage(1), + query.SortByFields(query.SortOrderAsc, "id"), + ) + + // Execute the query + cr, err := db.Query(ctx, qry) + if err != nil { + return nil, err + } + + // Read the results + for cr.Next() { + var eventID int64 + + if err := cr.Scan(&eventID); err != nil { + return nil, err + } + + ids = append(ids, eventID) + } + + return ids, nil +} +``` diff --git a/docs/versioned_docs/version-v0.26/08-references/01-cli.md b/docs/versioned_docs/version-v0.26/08-references/01-cli.md new file mode 100644 index 0000000..01ca8fb --- /dev/null +++ b/docs/versioned_docs/version-v0.26/08-references/01-cli.md @@ -0,0 +1,4145 @@ +--- +description: Ignite CLI docs. +--- + +# CLI commands + +Documentation for Ignite CLI. +## ignite + +Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain + +**Synopsis** + +Ignite CLI is a tool for creating sovereign blockchains built with Cosmos SDK, the world’s +most popular modular blockchain framework. Ignite CLI offers everything you need to scaffold, +test, build, and launch your blockchain. + +To get started, create a blockchain: + + ignite scaffold chain example + + +**Options** + +``` + -h, --help help for ignite +``` + +**SEE ALSO** + +* [ignite account](#ignite-account) - Create, delete, and show Ignite accounts +* [ignite chain](#ignite-chain) - Build, init and start a blockchain node +* [ignite completion](#ignite-completion) - Generate the autocompletion script for the specified shell +* [ignite docs](#ignite-docs) - Show Ignite CLI docs +* [ignite generate](#ignite-generate) - Generate clients, API docs from source code +* [ignite network](#ignite-network) - Launch a blockchain in production +* [ignite node](#ignite-node) - Make requests to a live blockchain node +* [ignite plugin](#ignite-plugin) - Handle plugins +* [ignite relayer](#ignite-relayer) - Connect blockchains with an IBC relayer +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more +* [ignite tools](#ignite-tools) - Tools for advanced users +* [ignite version](#ignite-version) - Print the current build information + + +## ignite account + +Create, delete, and show Ignite accounts + +**Synopsis** + +Commands for managing Ignite accounts. An Ignite account is a private/public +keypair stored in a keyring. Currently Ignite accounts are used when interacting +with Ignite relayer commands and when using "ignite network" commands. + +Note: Ignite account commands are not for managing your chain's keys and accounts. Use +you chain's binary to manage accounts from "config.yml". For example, if your +blockchain is called "mychain", use "mychaind keys" to manage keys for the +chain. + + +**Options** + +``` + -h, --help help for account + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain +* [ignite account create](#ignite-account-create) - Create a new account +* [ignite account delete](#ignite-account-delete) - Delete an account by name +* [ignite account export](#ignite-account-export) - Export an account as a private key +* [ignite account import](#ignite-account-import) - Import an account by using a mnemonic or a private key +* [ignite account list](#ignite-account-list) - Show a list of all accounts +* [ignite account show](#ignite-account-show) - Show detailed information about a particular account + + +## ignite account create + +Create a new account + +``` +ignite account create [name] [flags] +``` + +**Options** + +``` + -h, --help help for create +``` + +**Options inherited from parent commands** + +``` + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**SEE ALSO** + +* [ignite account](#ignite-account) - Create, delete, and show Ignite accounts + + +## ignite account delete + +Delete an account by name + +``` +ignite account delete [name] [flags] +``` + +**Options** + +``` + -h, --help help for delete +``` + +**Options inherited from parent commands** + +``` + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**SEE ALSO** + +* [ignite account](#ignite-account) - Create, delete, and show Ignite accounts + + +## ignite account export + +Export an account as a private key + +``` +ignite account export [name] [flags] +``` + +**Options** + +``` + -h, --help help for export + --non-interactive do not enter into interactive mode + --passphrase string passphrase to encrypt the exported key + --path string path to export private key. default: ./key_[name] +``` + +**Options inherited from parent commands** + +``` + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**SEE ALSO** + +* [ignite account](#ignite-account) - Create, delete, and show Ignite accounts + + +## ignite account import + +Import an account by using a mnemonic or a private key + +``` +ignite account import [name] [flags] +``` + +**Options** + +``` + -h, --help help for import + --non-interactive do not enter into interactive mode + --passphrase string passphrase to decrypt the imported key (ignored when secret is a mnemonic) + --secret string Your mnemonic or path to your private key (use interactive mode instead to securely pass your mnemonic) +``` + +**Options inherited from parent commands** + +``` + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**SEE ALSO** + +* [ignite account](#ignite-account) - Create, delete, and show Ignite accounts + + +## ignite account list + +Show a list of all accounts + +``` +ignite account list [flags] +``` + +**Options** + +``` + --address-prefix string account address prefix (default "cosmos") + -h, --help help for list +``` + +**Options inherited from parent commands** + +``` + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**SEE ALSO** + +* [ignite account](#ignite-account) - Create, delete, and show Ignite accounts + + +## ignite account show + +Show detailed information about a particular account + +``` +ignite account show [name] [flags] +``` + +**Options** + +``` + --address-prefix string account address prefix (default "cosmos") + -h, --help help for show +``` + +**Options inherited from parent commands** + +``` + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**SEE ALSO** + +* [ignite account](#ignite-account) - Create, delete, and show Ignite accounts + + +## ignite chain + +Build, init and start a blockchain node + +**Synopsis** + +Commands in this namespace let you to build, initialize, and start your +blockchain node locally for development purposes. + +To run these commands you should be inside the project's directory so that +Ignite can find the source code. To ensure that you are, run "ls", you should +see the following files in the output: "go.mod", "x", "proto", "app", etc. + +By default the "build" command will identify the "main" package of the project, +install dependencies if necessary, set build flags, compile the project into a +binary and install the binary. The "build" command is useful if you just want +the compiled binary, for example, to initialize and start the chain manually. It +can also be used to release your chain's binaries automatically as part of +continuous integration workflow. + +The "init" command will build the chain's binary and use it to initialize a +local validator node. By default the validator node will be initialized in your +$HOME directory in a hidden directory that matches the name of your project. +This directory is called a data directory and contains a chain's genesis file +and a validator key. This command is useful if you want to quickly build and +initialize the data directory and use the chain's binary to manually start the +blockchain. The "init" command is meant only for development purposes, not +production. + +The "serve" command builds, initializes, and starts your blockchain locally with +a single validator node for development purposes. "serve" also watches the +source code directory for file changes and intelligently +re-builds/initializes/starts the chain, essentially providing "code-reloading". +The "serve" command is meant only for development purposes, not production. + +To distinguish between production and development consider the following. + +In production, blockchains often run the same software on many validator nodes +that are run by different people and entities. To launch a blockchain in +production, the validator entities coordinate the launch process to start their +nodes simultaneously. + +During development, a blockchain can be started locally on a single validator +node. This convenient process lets you restart a chain quickly and iterate +faster. Starting a chain on a single node in development is similar to starting +a traditional web application on a local server. + +The "faucet" command lets you send tokens to an address from the "faucet" +account defined in "config.yml". Alternatively, you can use the chain's binary +to send token from any other account that exists on chain. + +The "simulate" command helps you start a simulation testing process for your +chain. + + +**Options** + +``` + -c, --config string path to Ignite config file (default: ./config.yml) + -h, --help help for chain + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain +* [ignite chain build](#ignite-chain-build) - Build a node binary +* [ignite chain debug](#ignite-chain-debug) - Launch a debugger for a blockchain app +* [ignite chain faucet](#ignite-chain-faucet) - Send coins to an account +* [ignite chain init](#ignite-chain-init) - Initialize your chain +* [ignite chain serve](#ignite-chain-serve) - Start a blockchain node in development +* [ignite chain simulate](#ignite-chain-simulate) - Run simulation testing for the blockchain + + +## ignite chain build + +Build a node binary + +**Synopsis** + + +The build command compiles the source code of the project into a binary and +installs the binary in the $(go env GOPATH)/bin directory. + +You can customize the output directory for the binary using a flag: + + ignite chain build --output dist + +To compile the binary Ignite first compiles protocol buffer (proto) files into +Go source code. Proto files contain required type and services definitions. If +you're using another program to compile proto files, you can use a flag to tell +Ignite to skip the proto compilation step: + + ignite chain build --skip-proto + +Afterwards, Ignite install dependencies specified in the go.mod file. By default +Ignite doesn't check that dependencies of the main module stored in the module +cache have not been modified since they were downloaded. To enforce dependency +checking (essentially, running "go mod verify") use a flag: + + ignite chain build --check-dependencies + +Next, Ignite identifies the "main" package of the project. By default the "main" +package is located in "cmd/{app}d" directory, where "{app}" is the name of the +scaffolded project and "d" stands for daemon. If your project contains more +than one "main" package, specify the path to the one that Ignite should compile +in config.yml: + + build: + main: custom/path/to/main + +By default the binary name will match the top-level module name (specified in +go.mod) with a suffix "d". This can be customized in config.yml: + + build: + binary: mychaind + +You can also specify custom linker flags: + + build: + ldflags: + - "-X main.Version=development" + - "-X main.Date=01/05/2022T19:54" + +To build binaries for a release, use the --release flag. The binaries for one or +more specified release targets are built in a "release/" directory in the +project's source directory. Specify the release targets with GOOS:GOARCH build +tags. If the optional --release.targets is not specified, a binary is created +for your current environment. + + ignite chain build --release -t linux:amd64 -t darwin:amd64 -t darwin:arm64 + + +``` +ignite chain build [flags] +``` + +**Options** + +``` + --check-dependencies verify that cached dependencies have not been modified since they were downloaded + --clear-cache clear the build cache (advanced) + --debug build a debug binary + -h, --help help for build + -o, --output string binary output path + -p, --path string path of the app (default ".") + --release build for a release + --release.prefix string tarball prefix for each release target. Available only with --release flag + -t, --release.targets strings release targets. Available only with --release flag + --skip-proto skip file generation from proto + -v, --verbose verbose output +``` + +**Options inherited from parent commands** + +``` + -c, --config string path to Ignite config file (default: ./config.yml) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite chain](#ignite-chain) - Build, init and start a blockchain node + + +## ignite chain debug + +Launch a debugger for a blockchain app + +**Synopsis** + +The debug command starts a debug server and launches a debugger. + +Ignite uses the Delve debugger by default. Delve enables you to interact with +your program by controlling the execution of the process, evaluating variables, +and providing information of thread / goroutine state, CPU register state and +more. + +A debug server can optionally be started in cases where default terminal client +is not desirable. When the server starts it first runs the blockchain app, +attaches to it and finally waits for a client connection. It accepts both +JSON-RPC or DAP client connections. + +To start a debug server use the following flag: + + ignite chain debug --server + +To start a debug server with a custom address use the following flags: + + ignite chain debug --server --server-address 127.0.0.1:30500 + +The debug server stops automatically when the client connection is closed. + + +``` +ignite chain debug [flags] +``` + +**Options** + +``` + -h, --help help for debug + -p, --path string path of the app (default ".") + --server start a debug server + --server-address string debug server address (default "127.0.0.1:30500") +``` + +**Options inherited from parent commands** + +``` + -c, --config string path to Ignite config file (default: ./config.yml) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite chain](#ignite-chain) - Build, init and start a blockchain node + + +## ignite chain faucet + +Send coins to an account + +``` +ignite chain faucet [address] [coin<,...>] [flags] +``` + +**Options** + +``` + -h, --help help for faucet + --home string directory where the blockchain node is initialized + -p, --path string path of the app (default ".") + -v, --verbose verbose output +``` + +**Options inherited from parent commands** + +``` + -c, --config string path to Ignite config file (default: ./config.yml) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite chain](#ignite-chain) - Build, init and start a blockchain node + + +## ignite chain init + +Initialize your chain + +**Synopsis** + +The init command compiles and installs the binary (like "ignite chain build") +and uses that binary to initialize the blockchain's data directory for one +validator. To learn how the build process works, refer to "ignite chain build +--help". + +By default, the data directory will be initialized in $HOME/.mychain, where +"mychain" is the name of the project. To set a custom data directory use the +--home flag or set the value in config.yml: + + validators: + - name: alice + bonded: '100000000stake' + home: "~/.customdir" + +The data directory contains three files in the "config" directory: app.toml, +config.toml, client.toml. These files let you customize the behavior of your +blockchain node and the client executable. When a chain is re-initialized the +data directory can be reset. To make some values in these files persistent, set +them in config.yml: + + validators: + - name: alice + bonded: '100000000stake' + app: + minimum-gas-prices: "0.025stake" + config: + consensus: + timeout_commit: "5s" + timeout_propose: "5s" + client: + output: "json" + +The configuration above changes the minimum gas price of the validator (by +default the gas price is set to 0 to allow "free" transactions), sets the block +time to 5s, and changes the output format to JSON. To see what kind of values +this configuration accepts see the generated TOML files in the data directory. + +As part of the initialization process Ignite creates on-chain accounts with +token balances. By default, config.yml has two accounts in the top-level +"accounts" property. You can add more accounts and change their token balances. +Refer to config.yml guide to see which values you can set. + +One of these accounts is a validator account and the amount of self-delegated +tokens can be set in the top-level "validator" property. + +One of the most important components of an initialized chain is the genesis +file, the 0th block of the chain. The genesis file is stored in the data +directory "config" subdirectory and contains the initial state of the chain, +including consensus and module parameters. You can customize the values of the +genesis in config.yml: + + genesis: + app_state: + staking: + params: + bond_denom: "foo" + +The example above changes the staking token to "foo". If you change the staking +denom, make sure the validator account has the right tokens. + +The init command is meant to be used ONLY FOR DEVELOPMENT PURPOSES. Under the +hood it runs commands like "appd init", "appd add-genesis-account", "appd +gentx", and "appd collect-gentx". For production, you may want to run these +commands manually to ensure a production-level node initialization. + + +``` +ignite chain init [flags] +``` + +**Options** + +``` + --check-dependencies verify that cached dependencies have not been modified since they were downloaded + --clear-cache clear the build cache (advanced) + --debug build a debug binary + -h, --help help for init + --home string directory where the blockchain node is initialized + -p, --path string path of the app (default ".") + --skip-proto skip file generation from proto +``` + +**Options inherited from parent commands** + +``` + -c, --config string path to Ignite config file (default: ./config.yml) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite chain](#ignite-chain) - Build, init and start a blockchain node + + +## ignite chain serve + +Start a blockchain node in development + +**Synopsis** + +The serve command compiles and installs the binary (like "ignite chain build"), +uses that binary to initialize the blockchain's data directory for one validator +(like "ignite chain init"), and starts the node locally for development purposes +with automatic code reloading. + +Automatic code reloading means Ignite starts watching the project directory. +Whenever a file change is detected, Ignite automatically rebuilds, reinitializes +and restarts the node. + +Whenever possible Ignite will try to keep the current state of the chain by +exporting and importing the genesis file. + +To force Ignite to start from a clean slate even if a genesis file exists, use +the following flag: + + ignite chain serve --reset-once + +To force Ignite to reset the state every time the source code is modified, use +the following flag: + + ignite chain serve --force-reset + +With Ignite it's possible to start more than one blockchain from the same source +code using different config files. This is handy if you're building +inter-blockchain functionality and, for example, want to try sending packets +from one blockchain to another. To start a node using a specific config file: + + ignite chain serve --config mars.yml + +The serve command is meant to be used ONLY FOR DEVELOPMENT PURPOSES. Under the +hood, it runs "appd start", where "appd" is the name of your chain's binary. For +production, you may want to run "appd start" manually. + + +``` +ignite chain serve [flags] +``` + +**Options** + +``` + --check-dependencies verify that cached dependencies have not been modified since they were downloaded + --clear-cache clear the build cache (advanced) + -f, --force-reset force reset of the app state on start and every source change + --generate-clients generate code for the configured clients on reset or source code change + -h, --help help for serve + --home string directory where the blockchain node is initialized + -p, --path string path of the app (default ".") + --quit-on-fail quit program if the app fails to start + -r, --reset-once reset the app state once on init + --skip-proto skip file generation from proto + -v, --verbose verbose output +``` + +**Options inherited from parent commands** + +``` + -c, --config string path to Ignite config file (default: ./config.yml) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite chain](#ignite-chain) - Build, init and start a blockchain node + + +## ignite chain simulate + +Run simulation testing for the blockchain + +**Synopsis** + +Run simulation testing for the blockchain. It sends many randomized-input messages of each module to a simulated node and checks if invariants break + +``` +ignite chain simulate [flags] +``` + +**Options** + +``` + --blockSize int operations per block (default 30) + --exportParamsHeight int height to which export the randomly generated params + --exportParamsPath string custom file path to save the exported params JSON + --exportStatePath string custom file path to save the exported app state JSON + --exportStatsPath string custom file path to save the exported simulation statistics JSON + --genesis string custom simulation genesis file; cannot be used with params file + --genesisTime int override genesis UNIX time instead of using a random UNIX time + -h, --help help for simulate + --initialBlockHeight int initial block to start the simulation (default 1) + --lean lean simulation log output + --numBlocks int number of new blocks to simulate from the initial block height (default 200) + --params string custom simulation params file which overrides any random params; cannot be used with genesis + --period uint run slow invariants only once every period assertions + --printAllInvariants print all invariants if a broken invariant is found + --seed int simulation random seed (default 42) + --simulateEveryOperation run slow invariants every operation + -v, --verbose verbose log output +``` + +**Options inherited from parent commands** + +``` + -c, --config string path to Ignite config file (default: ./config.yml) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite chain](#ignite-chain) - Build, init and start a blockchain node + + +## ignite completion + +Generate the autocompletion script for the specified shell + +**Synopsis** + +Generate the autocompletion script for ignite for the specified shell. +See each sub-command's help for details on how to use the generated script. + + +**Options** + +``` + -h, --help help for completion +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain +* [ignite completion bash](#ignite-completion-bash) - Generate the autocompletion script for bash +* [ignite completion fish](#ignite-completion-fish) - Generate the autocompletion script for fish +* [ignite completion powershell](#ignite-completion-powershell) - Generate the autocompletion script for powershell +* [ignite completion zsh](#ignite-completion-zsh) - Generate the autocompletion script for zsh + + +## ignite completion bash + +Generate the autocompletion script for bash + +**Synopsis** + +Generate the autocompletion script for the bash shell. + +This script depends on the 'bash-completion' package. +If it is not installed already, you can install it via your OS's package manager. + +To load completions in your current shell session: + + source <(ignite completion bash) + +To load completions for every new session, execute once: + +**#### Linux:** + + ignite completion bash > /etc/bash_completion.d/ignite + +**#### macOS:** + + ignite completion bash > $(brew --prefix)/etc/bash_completion.d/ignite + +You will need to start a new shell for this setup to take effect. + + +``` +ignite completion bash +``` + +**Options** + +``` + -h, --help help for bash + --no-descriptions disable completion descriptions +``` + +**SEE ALSO** + +* [ignite completion](#ignite-completion) - Generate the autocompletion script for the specified shell + + +## ignite completion fish + +Generate the autocompletion script for fish + +**Synopsis** + +Generate the autocompletion script for the fish shell. + +To load completions in your current shell session: + + ignite completion fish | source + +To load completions for every new session, execute once: + + ignite completion fish > ~/.config/fish/completions/ignite.fish + +You will need to start a new shell for this setup to take effect. + + +``` +ignite completion fish [flags] +``` + +**Options** + +``` + -h, --help help for fish + --no-descriptions disable completion descriptions +``` + +**SEE ALSO** + +* [ignite completion](#ignite-completion) - Generate the autocompletion script for the specified shell + + +## ignite completion powershell + +Generate the autocompletion script for powershell + +**Synopsis** + +Generate the autocompletion script for powershell. + +To load completions in your current shell session: + + ignite completion powershell | Out-String | Invoke-Expression + +To load completions for every new session, add the output of the above command +to your powershell profile. + + +``` +ignite completion powershell [flags] +``` + +**Options** + +``` + -h, --help help for powershell + --no-descriptions disable completion descriptions +``` + +**SEE ALSO** + +* [ignite completion](#ignite-completion) - Generate the autocompletion script for the specified shell + + +## ignite completion zsh + +Generate the autocompletion script for zsh + +**Synopsis** + +Generate the autocompletion script for the zsh shell. + +If shell completion is not already enabled in your environment you will need +to enable it. You can execute the following once: + + echo "autoload -U compinit; compinit" >> ~/.zshrc + +To load completions in your current shell session: + + source <(ignite completion zsh); compdef _ignite ignite + +To load completions for every new session, execute once: + +**#### Linux:** + + ignite completion zsh > "${fpath[1]}/_ignite" + +**#### macOS:** + + ignite completion zsh > $(brew --prefix)/share/zsh/site-functions/_ignite + +You will need to start a new shell for this setup to take effect. + + +``` +ignite completion zsh [flags] +``` + +**Options** + +``` + -h, --help help for zsh + --no-descriptions disable completion descriptions +``` + +**SEE ALSO** + +* [ignite completion](#ignite-completion) - Generate the autocompletion script for the specified shell + + +## ignite docs + +Show Ignite CLI docs + +``` +ignite docs [flags] +``` + +**Options** + +``` + -h, --help help for docs +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain + + +## ignite generate + +Generate clients, API docs from source code + +**Synopsis** + +Generate clients, API docs from source code. + +Such as compiling protocol buffer files into Go or implement particular +functionality, for example, generating an OpenAPI spec. + +Produced source code can be regenerated by running a command again and is not +meant to be edited by hand. + + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -h, --help help for generate + -p, --path string path of the app (default ".") +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain +* [ignite generate composables](#ignite-generate-composables) - TypeScript frontend client and Vue 3 composables +* [ignite generate hooks](#ignite-generate-hooks) - TypeScript frontend client and React hooks +* [ignite generate openapi](#ignite-generate-openapi) - OpenAPI spec for your chain +* [ignite generate proto-go](#ignite-generate-proto-go) - Compile protocol buffer files to Go source code required by Cosmos SDK +* [ignite generate ts-client](#ignite-generate-ts-client) - TypeScript frontend client +* [ignite generate vuex](#ignite-generate-vuex) - *DEPRECATED* TypeScript frontend client and Vuex stores + + +## ignite generate composables + +TypeScript frontend client and Vue 3 composables + +``` +ignite generate composables [flags] +``` + +**Options** + +``` + -h, --help help for composables + -o, --output string Vue 3 composables output path + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + --clear-cache clear the build cache (advanced) + -p, --path string path of the app (default ".") +``` + +**SEE ALSO** + +* [ignite generate](#ignite-generate) - Generate clients, API docs from source code + + +## ignite generate hooks + +TypeScript frontend client and React hooks + +``` +ignite generate hooks [flags] +``` + +**Options** + +``` + -h, --help help for hooks + -o, --output string React hooks output path + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + --clear-cache clear the build cache (advanced) + -p, --path string path of the app (default ".") +``` + +**SEE ALSO** + +* [ignite generate](#ignite-generate) - Generate clients, API docs from source code + + +## ignite generate openapi + +OpenAPI spec for your chain + +``` +ignite generate openapi [flags] +``` + +**Options** + +``` + -h, --help help for openapi + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + --clear-cache clear the build cache (advanced) + -p, --path string path of the app (default ".") +``` + +**SEE ALSO** + +* [ignite generate](#ignite-generate) - Generate clients, API docs from source code + + +## ignite generate proto-go + +Compile protocol buffer files to Go source code required by Cosmos SDK + +``` +ignite generate proto-go [flags] +``` + +**Options** + +``` + -h, --help help for proto-go + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + --clear-cache clear the build cache (advanced) + -p, --path string path of the app (default ".") +``` + +**SEE ALSO** + +* [ignite generate](#ignite-generate) - Generate clients, API docs from source code + + +## ignite generate ts-client + +TypeScript frontend client + +**Synopsis** + +Generate a framework agnostic TypeScript client for your blockchain project. + +By default the TypeScript client is generated in the "ts-client/" directory. You +can customize the output directory in config.yml: + + client: + typescript: + path: new-path + +Output can also be customized by using a flag: + + ignite generate ts-client --output new-path + +TypeScript client code can be automatically regenerated on reset or source code +changes when the blockchain is started with a flag: + + ignite chain serve --generate-clients + + +``` +ignite generate ts-client [flags] +``` + +**Options** + +``` + -h, --help help for ts-client + -o, --output string TypeScript client output path + --use-cache use build cache to speed-up generation + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + --clear-cache clear the build cache (advanced) + -p, --path string path of the app (default ".") +``` + +**SEE ALSO** + +* [ignite generate](#ignite-generate) - Generate clients, API docs from source code + + +## ignite generate vuex + +*DEPRECATED* TypeScript frontend client and Vuex stores + +``` +ignite generate vuex [flags] +``` + +**Options** + +``` + -h, --help help for vuex + -o, --output string Vuex store output path + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + --clear-cache clear the build cache (advanced) + -p, --path string path of the app (default ".") +``` + +**SEE ALSO** + +* [ignite generate](#ignite-generate) - Generate clients, API docs from source code + + +## ignite network + +Launch a blockchain in production + +**Synopsis** + + +Ignite Network commands allow to coordinate the launch of sovereign Cosmos blockchains. + +To launch a Cosmos blockchain you need someone to be a coordinator and others to +be validators. These are just roles, anyone can be a coordinator or a validator. +A coordinator publishes information about a chain to be launched on the Ignite +blockchain, approves validator requests and coordinates the launch. Validators +send requests to join a chain and start their nodes when a blockchain is ready +for launch. + +To publish the information about your chain as a coordinator run the following +command (the URL should point to a repository with a Cosmos SDK chain): + + ignite network chain publish github.com/ignite/example + +This command will return a launch identifier you will be using in the following +commands. Let's say this identifier is 42. + +Next, ask validators to initialize their nodes and request to join the network +as validators. For a testnet you can use the default values suggested by the +CLI. + + ignite network chain init 42 + + ignite network chain join 42 --amount 95000000stake + +As a coordinator list all validator requests: + + ignite network request list 42 + +Approve validator requests: + + ignite network request approve 42 1,2 + +Once you've approved all validators you need in the validator set, announce that +the chain is ready for launch: + + ignite network chain launch 42 + +Validators can now prepare their nodes for launch: + + ignite network chain prepare 42 + +The output of this command will show a command that a validator would use to +launch their node, for example “exampled --home ~/.example”. After enough +validators launch their nodes, a blockchain will be live. + + +**Options** + +``` + -h, --help help for network + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain +* [ignite network chain](#ignite-network-chain) - Publish a chain, join as a validator and prepare node for launch +* [ignite network coordinator](#ignite-network-coordinator) - Show and update a coordinator profile +* [ignite network profile](#ignite-network-profile) - Show the address profile info +* [ignite network project](#ignite-network-project) - Handle projects +* [ignite network request](#ignite-network-request) - Create, show, reject and approve requests +* [ignite network reward](#ignite-network-reward) - Manage network rewards +* [ignite network tool](#ignite-network-tool) - Commands to run subsidiary tools +* [ignite network validator](#ignite-network-validator) - Show and update a validator profile +* [ignite network version](#ignite-network-version) - Version of the plugin + + +## ignite network chain + +Publish a chain, join as a validator and prepare node for launch + +**Synopsis** + +The "chain" namespace features the most commonly used commands for launching +blockchains with Ignite. + +As a coordinator you "publish" your blockchain to Ignite. When enough validators +are approved for the genesis and no changes are excepted to be made to the +genesis, a coordinator announces that the chain is ready for launch with the +"launch" command. In the case of an unsuccessful launch, the coordinator can revert it +using the "revert-launch" command. + +As a validator, you "init" your node and apply to become a validator for a +blockchain with the "join" command. After the launch of the chain is announced, +validators can generate the finalized genesis and download the list of peers with the +"prepare" command. + +The "install" command can be used to download, compile the source code and +install the chain's binary locally. The binary can be used, for example, to +initialize a validator node or to interact with the chain after it has been +launched. + +All chains published to Ignite can be listed by using the "list" command. + + +**Options** + +``` + -h, --help help for chain +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network](#ignite-network) - Launch a blockchain in production +* [ignite network chain init](#ignite-network-chain-init) - Initialize a chain from a published chain ID +* [ignite network chain install](#ignite-network-chain-install) - Install chain binary for a launch +* [ignite network chain join](#ignite-network-chain-join) - Request to join a network as a validator +* [ignite network chain launch](#ignite-network-chain-launch) - Trigger the launch of a chain +* [ignite network chain list](#ignite-network-chain-list) - List published chains +* [ignite network chain prepare](#ignite-network-chain-prepare) - Prepare the chain for launch +* [ignite network chain publish](#ignite-network-chain-publish) - Publish a new chain to start a new network +* [ignite network chain revert-launch](#ignite-network-chain-revert-launch) - Revert launch of a network as a coordinator +* [ignite network chain show](#ignite-network-chain-show) - Show details of a chain + + +## ignite network chain init + +Initialize a chain from a published chain ID + +**Synopsis** + +Ignite network chain init is a command used by validators to initialize a +validator node for a blockchain from the information stored on the Ignite chain. + + ignite network chain init 42 + +This command fetches the information about a chain with launch ID 42. The source +code of the chain is cloned in a temporary directory, and the node's binary is +compiled from the source. The binary is then used to initialize the node. By +default, Ignite uses "~/spn/[launch-id]/" as the home directory for the blockchain. + +An important part of initializing a validator node is creation of the gentx (a +transaction that adds a validator at the genesis of the chain). + +The "init" command will prompt for values like self-delegation and commission. +These values will be used in the validator's gentx. You can use flags to provide +the values in non-interactive mode. + +Use the "--home" flag to choose a different path for the home directory of the +blockchain: + + ignite network chain init 42 --home ~/mychain + +The end result of the "init" command is a validator home directory with a +genesis validator transaction (gentx) file. + +``` +ignite network chain init [launch-id] [flags] +``` + +**Options** + +``` + --check-dependencies verify that cached dependencies have not been modified since they were downloaded + --clear-cache clear the build cache (advanced) + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for init + --home string home directory used for blockchains + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") + --validator-account string account for the chain validator (default "default") + --validator-details string details about the validator + --validator-gas-price string validator gas price + --validator-identity string validator identity signature (ex. UPort or Keybase) + --validator-moniker string custom validator moniker + --validator-security-contact string validator security contact email + --validator-self-delegation string validator minimum self delegation + --validator-website string associate a website with the validator + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network chain](#ignite-network-chain) - Publish a chain, join as a validator and prepare node for launch + + +## ignite network chain install + +Install chain binary for a launch + +``` +ignite network chain install [launch-id] [flags] +``` + +**Options** + +``` + --check-dependencies verify that cached dependencies have not been modified since they were downloaded + --clear-cache clear the build cache (advanced) + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for install +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network chain](#ignite-network-chain) - Publish a chain, join as a validator and prepare node for launch + + +## ignite network chain join + +Request to join a network as a validator + +**Synopsis** + +The "join" command is used by validators to send a request to join a blockchain. +The required argument is a launch ID of a blockchain. The "join" command expects +that the validator has already setup a home directory for the blockchain and has +a gentx either by running "ignite network chain init" or initializing the data +directory manually with the chain's binary. + +By default the "join" command just sends the request to join as a validator. +However, often a validator also needs to request an genesis account with a token +balance to afford self-delegation. + +The following command will send a request to join blockchain with launch ID 42 +as a validator and request to be added as an account with a token balance of +95000000 STAKE. + + ignite network chain join 42 --amount 95000000stake + +A request to join as a validator contains a gentx file. Ignite looks for gentx +in a home directory used by "ignite network chain init" by default. To use a +different directory, use the "--home" flag or pass a gentx file directly with +the "--gentx" flag. + +To join a chain as a validator, you must provide the IP address of your node so +that other validators can connect to it. The join command will ask you for the +IP address and will attempt to automatically detect and fill in the value. If +you want to manually specify the IP address, you can use the "--peer-address" +flag: + + ignite network chain join 42 --peer-address 0.0.0.0 + +Since "join" broadcasts a transaction to the Ignite blockchain, you will need an +account on the Ignite blockchain. During the testnet phase, however, Ignite +automatically requests tokens from a faucet. + + +``` +ignite network chain join [launch-id] [flags] +``` + +**Options** + +``` + --amount string amount of coins for account request (ignored if coordinator has fixed the account balances or if --no-acount flag is set) + --check-dependencies verify that cached dependencies have not been modified since they were downloaded + --from string account name to use for sending transactions to SPN (default "default") + --gentx string path to a gentx json file + -h, --help help for join + --home string home directory used for blockchains + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") + --no-account prevent sending a request for a genesis account + --peer-address string peer's address + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network chain](#ignite-network-chain) - Publish a chain, join as a validator and prepare node for launch + + +## ignite network chain launch + +Trigger the launch of a chain + +**Synopsis** + +The launch command communicates to the world that the chain is ready to be +launched. + +Only the coordinator of the chain can execute the launch command. + + ignite network chain launch 42 + +After the launch command is executed no changes to the genesis are accepted. For +example, validators will no longer be able to successfully execute the "ignite +network chain join" command to apply as a validator. + +The launch command sets the date and time after which the chain will start. By +default, the current time is set. To give validators more time to prepare for +the launch, set the time with the "--launch-time" flag: + + ignite network chain launch 42 --launch-time 2023-01-01T00:00:00Z + +After the launch command is executed, validators can generate the finalized +genesis and prepare their nodes for the launch. For example, validators can run +"ignite network chain prepare" to generate the genesis and populate the peer +list. + +If you want to change the launch time or open up the genesis file for changes +you can use "ignite network chain revert-launch" to make it possible, for +example, to accept new validators and add accounts. + + +``` +ignite network chain launch [launch-id] [flags] +``` + +**Options** + +``` + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for launch + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") + --launch-time string timestamp the chain is effectively launched (example "2022-01-01T00:00:00Z") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network chain](#ignite-network-chain) - Publish a chain, join as a validator and prepare node for launch + + +## ignite network chain list + +List published chains + +``` +ignite network chain list [flags] +``` + +**Options** + +``` + --advanced show advanced information about the chains + -h, --help help for list + --limit uint limit of results per page (default 100) + --page uint page for chain list result (default 1) +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network chain](#ignite-network-chain) - Publish a chain, join as a validator and prepare node for launch + + +## ignite network chain prepare + +Prepare the chain for launch + +**Synopsis** + +The prepare command prepares a validator node for the chain launch by generating +the final genesis and adding IP addresses of peers to the validator's +configuration file. + + ignite network chain prepare 42 + +By default, Ignite uses "$HOME/spn/LAUNCH_ID" as the data directory. If you used +a different data directory when initializing the node, use the "--home" flag and +set the correct path to the data directory. + +Ignite generates the genesis file in "config/genesis.json" and adds peer IPs by +modifying "config/config.toml". + +The prepare command should be executed after the coordinator has triggered the +chain launch and finalized the genesis with "ignite network chain launch". You +can force Ignite to run the prepare command without checking if the launch has +been triggered with the "--force" flag (this is not recommended). + +After the prepare command is executed the node is ready to be started. + + +``` +ignite network chain prepare [launch-id] [flags] +``` + +**Options** + +``` + --check-dependencies verify that cached dependencies have not been modified since they were downloaded + --clear-cache clear the build cache (advanced) + -f, --force force the prepare command to run even if the chain is not launched + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for prepare + --home string home directory used for blockchains + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network chain](#ignite-network-chain) - Publish a chain, join as a validator and prepare node for launch + + +## ignite network chain publish + +Publish a new chain to start a new network + +**Synopsis** + +To begin the process of launching a blockchain with Ignite, a coordinator needs +to publish the information about a blockchain. The only required bit of +information is the URL of the source code of the blockchain. + +The following command publishes the information about an example blockchain: + + ignite network chain publish github.com/ignite/example + +This command fetches the source code of the blockchain, compiles the binary, +verifies that a blockchain can be started with the binary, and publishes the +information about the blockchain to Ignite. Currently, only public repositories +are supported. The command returns an integer number that acts as an identifier +of the chain on Ignite. + +By publishing a blockchain on Ignite you become the "coordinator" of this +blockchain. A coordinator is an account that has the authority to approve and +reject validator requests, set parameters of the blockchain and trigger the +launch of the chain. + +The default Git branch is used when publishing a chain. If you want to use a +specific branch, tag or a commit hash, use "--branch", "--tag", or "--hash" +flags respectively. + +The repository name is used as the default chain ID. Ignite does not ensure that +chain IDs are unique, but they have to have a valid format: [string]-[integer]. +To set a custom chain ID use the "--chain-id" flag. + + ignite network chain publish github.com/ignite/example --chain-id foo-1 + +Once the chain is published users can request accounts with coin balances to be +added to the chain's genesis. By default, users are free to request any number +of tokens. If you want all users requesting tokens to get the same amount, use +the "--account-balance" flag with a list of coins. + + ignite network chain publish github.com/ignite/example --account-balance 2000foocoin + + +``` +ignite network chain publish [source-url] [flags] +``` + +**Options** + +``` + --account-balance string balance for each approved genesis account for the chain + --amount string amount of coins for account request + --branch string Git branch to use for the repo + --chain-id string chain ID to use for this network + --check-dependencies verify that cached dependencies have not been modified since they were downloaded + --clear-cache clear the build cache (advanced) + --from string account name to use for sending transactions to SPN (default "default") + --genesis-config string name of an Ignite config file in the repo for custom Genesis + --genesis-url string URL to a custom Genesis + --hash string Git hash to use for the repo + -h, --help help for publish + --home string home directory used for blockchains + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") + --mainnet initialize a mainnet project + --metadata string add chain metadata + --no-check skip verifying chain's integrity + --project uint project ID to use for this network + --reward.coins string reward coins + --reward.height int last reward height + --shares string add shares for the project + --tag string Git tag to use for the repo + --total-supply string add a total of the mainnet of a project + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network chain](#ignite-network-chain) - Publish a chain, join as a validator and prepare node for launch + + +## ignite network chain revert-launch + +Revert launch of a network as a coordinator + +**Synopsis** + +The revert launch command reverts the previously scheduled launch of a chain. + +Only the coordinator of the chain can execute the launch command. + + ignite network chain revert-launch 42 + +After the revert launch command is executed, changes to the genesis of the chain +are allowed again. For example, validators will be able to request to join the +chain. Revert launch also resets the launch time. + + +``` +ignite network chain revert-launch [launch-id] [flags] +``` + +**Options** + +``` + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for revert-launch + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network chain](#ignite-network-chain) - Publish a chain, join as a validator and prepare node for launch + + +## ignite network chain show + +Show details of a chain + +**Options** + +``` + -h, --help help for show +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network chain](#ignite-network-chain) - Publish a chain, join as a validator and prepare node for launch +* [ignite network chain show accounts](#ignite-network-chain-show-accounts) - Show all vesting and genesis accounts of the chain +* [ignite network chain show genesis](#ignite-network-chain-show-genesis) - Show the chain genesis file +* [ignite network chain show info](#ignite-network-chain-show-info) - Show info details of the chain +* [ignite network chain show peers](#ignite-network-chain-show-peers) - Show peers list of the chain +* [ignite network chain show validators](#ignite-network-chain-show-validators) - Show all validators of the chain + + +## ignite network chain show accounts + +Show all vesting and genesis accounts of the chain + +``` +ignite network chain show accounts [launch-id] [flags] +``` + +**Options** + +``` + -h, --help help for accounts + --prefix string account address prefix (default "spn") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network chain show](#ignite-network-chain-show) - Show details of a chain + + +## ignite network chain show genesis + +Show the chain genesis file + +``` +ignite network chain show genesis [launch-id] [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -h, --help help for genesis + --out string path to output Genesis file (default "./genesis.json") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network chain show](#ignite-network-chain-show) - Show details of a chain + + +## ignite network chain show info + +Show info details of the chain + +``` +ignite network chain show info [launch-id] [flags] +``` + +**Options** + +``` + -h, --help help for info +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network chain show](#ignite-network-chain-show) - Show details of a chain + + +## ignite network chain show peers + +Show peers list of the chain + +``` +ignite network chain show peers [launch-id] [flags] +``` + +**Options** + +``` + -h, --help help for peers + --out string path to output peers list (default "./peers.txt") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network chain show](#ignite-network-chain-show) - Show details of a chain + + +## ignite network chain show validators + +Show all validators of the chain + +``` +ignite network chain show validators [launch-id] [flags] +``` + +**Options** + +``` + -h, --help help for validators + --prefix string account address prefix (default "spn") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network chain show](#ignite-network-chain-show) - Show details of a chain + + +## ignite network coordinator + +Show and update a coordinator profile + +**Options** + +``` + -h, --help help for coordinator +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network](#ignite-network) - Launch a blockchain in production +* [ignite network coordinator set](#ignite-network-coordinator-set) - Set an information in a coordinator profile +* [ignite network coordinator show](#ignite-network-coordinator-show) - Show a coordinator profile + + +## ignite network coordinator set + +Set an information in a coordinator profile + +**Synopsis** + +Coordinators on Ignite can set a profile containing a description for the coordinator. +The coordinator set command allows to set information for the coordinator. +The following information can be set: +- details: general information about the coordinator. +- identity: a piece of information to verify the identity of the coordinator with a system like Keybase or Veramo. +- website: website of the coordinator. + + +``` +ignite network coordinator set details|identity|website [value] [flags] +``` + +**Options** + +``` + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for set + --home string home directory used for blockchains + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network coordinator](#ignite-network-coordinator) - Show and update a coordinator profile + + +## ignite network coordinator show + +Show a coordinator profile + +``` +ignite network coordinator show [address] [flags] +``` + +**Options** + +``` + -h, --help help for show +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network coordinator](#ignite-network-coordinator) - Show and update a coordinator profile + + +## ignite network profile + +Show the address profile info + +``` +ignite network profile [project-id] [flags] +``` + +**Options** + +``` + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for profile + --home string home directory used for blockchains + --keyring-backend string keyring backend to store your account keys (default "test") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network](#ignite-network) - Launch a blockchain in production + + +## ignite network project + +Handle projects + +**Options** + +``` + -h, --help help for project +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network](#ignite-network) - Launch a blockchain in production +* [ignite network project account](#ignite-network-project-account) - Handle project accounts +* [ignite network project create](#ignite-network-project-create) - Create a project +* [ignite network project list](#ignite-network-project-list) - List published projects +* [ignite network project show](#ignite-network-project-show) - Show published project +* [ignite network project update](#ignite-network-project-update) - Update details fo the project of the project + + +## ignite network project account + +Handle project accounts + +**Options** + +``` + -h, --help help for account +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network project](#ignite-network-project) - Handle projects +* [ignite network project account list](#ignite-network-project-account-list) - Show all mainnet and mainnet vesting of the project + + +## ignite network project account list + +Show all mainnet and mainnet vesting of the project + +``` +ignite network project account list [project-id] [flags] +``` + +**Options** + +``` + -h, --help help for list +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network project account](#ignite-network-project-account) - Handle project accounts + + +## ignite network project create + +Create a project + +``` +ignite network project create [name] [total-supply] [flags] +``` + +**Options** + +``` + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for create + --home string home directory used for blockchains + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") + --metadata string Add a metadata to the chain +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network project](#ignite-network-project) - Handle projects + + +## ignite network project list + +List published projects + +``` +ignite network project list [flags] +``` + +**Options** + +``` + -h, --help help for list +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network project](#ignite-network-project) - Handle projects + + +## ignite network project show + +Show published project + +``` +ignite network project show [project-id] [flags] +``` + +**Options** + +``` + -h, --help help for show +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network project](#ignite-network-project) - Handle projects + + +## ignite network project update + +Update details fo the project of the project + +``` +ignite network project update [project-id] [flags] +``` + +**Options** + +``` + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for update + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") + --metadata string update the project metadata + --name string update the project name + --total-supply string update the total of the mainnet of a project +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network project](#ignite-network-project) - Handle projects + + +## ignite network request + +Create, show, reject and approve requests + +**Synopsis** + +The "request" namespace contains commands for creating, showing, approving, and +rejecting requests. + +A request is mechanism in Ignite that allows changes to be made to the genesis +file like adding accounts with token balances and validators. Anyone can submit +a request, but only the coordinator of a chain can approve or reject a request. + +Each request has a status: + +* Pending: waiting for the approval of the coordinator +* Approved: approved by the coordinator, its content has been applied to the + launch information +* Rejected: rejected by the coordinator or the request creator + + +**Options** + +``` + -h, --help help for request +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network](#ignite-network) - Launch a blockchain in production +* [ignite network request add-account](#ignite-network-request-add-account) - Send request to add account +* [ignite network request approve](#ignite-network-request-approve) - Approve requests +* [ignite network request change-param](#ignite-network-request-change-param) - Send request to change a module param +* [ignite network request list](#ignite-network-request-list) - List all requests for a chain +* [ignite network request reject](#ignite-network-request-reject) - Reject requests +* [ignite network request remove-account](#ignite-network-request-remove-account) - Send request to remove a genesis account +* [ignite network request remove-validator](#ignite-network-request-remove-validator) - Send request to remove a validator +* [ignite network request show](#ignite-network-request-show) - Show detailed information about a request +* [ignite network request verify](#ignite-network-request-verify) - Verify the request and simulate the chain genesis from them + + +## ignite network request add-account + +Send request to add account + +**Synopsis** + +The "add account" command creates a new request to add an account with a given +address and a specified coin balance to the genesis of the chain. + +The request automatically fails to be applied if a genesis account or a vesting +account with an identical address is already specified in the launch +information. + +If a coordinator has specified that all genesis accounts on a chain should have +the same balance (useful for testnets, for example), the "add account" expects +only an address as an argument. Attempt to provide a token balance will result +in an error. + + +``` +ignite network request add-account [launch-id] [address] [coins] [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for add-account + --home string home directory used for blockchains + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network request](#ignite-network-request) - Create, show, reject and approve requests + + +## ignite network request approve + +Approve requests + +**Synopsis** + +The "approve" command is used by a chain's coordinator to approve requests. +Multiple requests can be approved using a comma-separated list and/or using a +dash syntax. + + ignite network request approve 42 1,2,3-6,7,8 + +The command above approves requests with IDs from 1 to 8 included on a chain +with a launch ID 42. + +When requests are approved Ignite applies the requested changes and simulates +initializing and launching the chain locally. If the chain starts successfully, +requests are considered to be "verified" and are approved. If one or more +requested changes stop the chain from launching locally, the verification +process fails and the approval of all requests is canceled. To skip the +verification process use the "--no-verification" flag. + +Note that Ignite will try to approve requests in the same order as request IDs +are submitted to the "approve" command. + +``` +ignite network request approve [launch-id] [number<,...>] [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for approve + --home string home directory used for blockchains + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") + --no-verification approve the requests without verifying them +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network request](#ignite-network-request) - Create, show, reject and approve requests + + +## ignite network request change-param + +Send request to change a module param + +``` +ignite network request change-param [launch-id] [module-name] [param-name] [value (json, string, number)] [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for change-param + --home string home directory used for blockchains + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network request](#ignite-network-request) - Create, show, reject and approve requests + + +## ignite network request list + +List all requests for a chain + +``` +ignite network request list [launch-id] [flags] +``` + +**Options** + +``` + -h, --help help for list + --prefix string account address prefix (default "spn") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network request](#ignite-network-request) - Create, show, reject and approve requests + + +## ignite network request reject + +Reject requests + +**Synopsis** + +The "reject" command is used by a chain's coordinator to reject requests. + + ignite network request reject 42 1,2,3-6,7,8 + +The syntax of the "reject" command is similar to that of the "approve" command. + + +``` +ignite network request reject [launch-id] [number<,...>] [flags] +``` + +**Options** + +``` + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for reject + --home string home directory used for blockchains + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network request](#ignite-network-request) - Create, show, reject and approve requests + + +## ignite network request remove-account + +Send request to remove a genesis account + +``` +ignite network request remove-account [launch-id] [address] [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for remove-account + --home string home directory used for blockchains + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network request](#ignite-network-request) - Create, show, reject and approve requests + + +## ignite network request remove-validator + +Send request to remove a validator + +``` +ignite network request remove-validator [launch-id] [address] [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for remove-validator + --home string home directory used for blockchains + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network request](#ignite-network-request) - Create, show, reject and approve requests + + +## ignite network request show + +Show detailed information about a request + +``` +ignite network request show [launch-id] [request-id] [flags] +``` + +**Options** + +``` + -h, --help help for show +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network request](#ignite-network-request) - Create, show, reject and approve requests + + +## ignite network request verify + +Verify the request and simulate the chain genesis from them + +**Synopsis** + +The "verify" command applies selected requests to the genesis of a chain locally +to verify that approving these requests will result in a valid genesis that +allows a chain to launch without issues. This command does not approve requests, +only checks them. + + +``` +ignite network request verify [launch-id] [number<,...>] [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for verify + --home string home directory used for blockchains + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network request](#ignite-network-request) - Create, show, reject and approve requests + + +## ignite network reward + +Manage network rewards + +**Options** + +``` + -h, --help help for reward +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network](#ignite-network) - Launch a blockchain in production +* [ignite network reward release](#ignite-network-reward-release) - Connect the monitoring modules of launched chains with SPN +* [ignite network reward set](#ignite-network-reward-set) - set a network chain reward + + +## ignite network reward release + +Connect the monitoring modules of launched chains with SPN + +``` +ignite network reward release [launch-id] [chain-rpc] [flags] +``` + +**Options** + +``` + --create-client-only only create the network client id + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for release + --keyring-backend string keyring backend to store your account keys (default "test") + --spn-gaslimit int gas limit used for transactions on SPN (default 400000) + --spn-gasprice string gas price used for transactions on SPN (default "0.0000025uspn") + --testnet-account string testnet chain account (default "default") + --testnet-faucet string faucet address of the testnet chain + --testnet-gaslimit int gas limit used for transactions on testnet chain (default 400000) + --testnet-gasprice string gas price used for transactions on testnet chain (default "0.0000025stake") + --testnet-prefix string address prefix of the testnet chain (default "cosmos") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network reward](#ignite-network-reward) - Manage network rewards + + +## ignite network reward set + +set a network chain reward + +``` +ignite network reward set [launch-id] [last-reward-height] [coins] [flags] +``` + +**Options** + +``` + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for set + --home string home directory used for blockchains + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network reward](#ignite-network-reward) - Manage network rewards + + +## ignite network tool + +Commands to run subsidiary tools + +**Options** + +``` + -h, --help help for tool +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network](#ignite-network) - Launch a blockchain in production +* [ignite network tool proxy-tunnel](#ignite-network-tool-proxy-tunnel) - Setup a proxy tunnel via HTTP + + +## ignite network tool proxy-tunnel + +Setup a proxy tunnel via HTTP + +**Synopsis** + +Starts an HTTP proxy server and HTTP proxy clients for each node that +needs HTTP tunneling. + +HTTP tunneling is activated **ONLY** if SPN_CONFIG_FILE has "tunneled_peers" +field inside with a list of tunneled peers/nodes. + +If you're using SPN as coordinator and do not want to allow HTTP tunneling +feature at all, you can prevent "spn.yml" file to being generated by not +approving validator requests that has HTTP tunneling enabled instead of plain +TCP connections. + +``` +ignite network tool proxy-tunnel SPN_CONFIG_FILE [flags] +``` + +**Options** + +``` + -h, --help help for proxy-tunnel +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network tool](#ignite-network-tool) - Commands to run subsidiary tools + + +## ignite network validator + +Show and update a validator profile + +**Options** + +``` + -h, --help help for validator +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network](#ignite-network) - Launch a blockchain in production +* [ignite network validator set](#ignite-network-validator-set) - Set an information in a validator profile +* [ignite network validator show](#ignite-network-validator-show) - Show a validator profile + + +## ignite network validator set + +Set an information in a validator profile + +**Synopsis** + +Validators on Ignite can set a profile containing a description for the validator. +The validator set command allows to set information for the validator. +The following information can be set: +- details: general information about the validator. +- identity: piece of information to verify identity of the validator with a system like Keybase of Veramo. +- website: website of the validator. +- security: security contact for the validator. + + +``` +ignite network validator set details|identity|website|security [value] [flags] +``` + +**Options** + +``` + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for set + --home string home directory used for blockchains + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network validator](#ignite-network-validator) - Show and update a validator profile + + +## ignite network validator show + +Show a validator profile + +``` +ignite network validator show [address] [flags] +``` + +**Options** + +``` + -h, --help help for show +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network validator](#ignite-network-validator) - Show and update a validator profile + + +## ignite network version + +Version of the plugin + +**Synopsis** + +The version of the plugin to use to interact with a chain might be specified by the coordinator. + + +``` +ignite network version [flags] +``` + +**Options** + +``` + -h, --help help for version +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network](#ignite-network) - Launch a blockchain in production + + +## ignite node + +Make requests to a live blockchain node + +**Options** + +``` + -h, --help help for node + --node string <host>:<port> to tendermint rpc interface for this chain (default "https://rpc.cosmos.network:443") +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain +* [ignite node query](#ignite-node-query) - Querying subcommands +* [ignite node tx](#ignite-node-tx) - Transactions subcommands + + +## ignite node query + +Querying subcommands + +**Options** + +``` + -h, --help help for query +``` + +**Options inherited from parent commands** + +``` + --node string <host>:<port> to tendermint rpc interface for this chain (default "https://rpc.cosmos.network:443") +``` + +**SEE ALSO** + +* [ignite node](#ignite-node) - Make requests to a live blockchain node +* [ignite node query bank](#ignite-node-query-bank) - Querying commands for the bank module +* [ignite node query tx](#ignite-node-query-tx) - Query for transaction by hash + + +## ignite node query bank + +Querying commands for the bank module + +**Options** + +``` + -h, --help help for bank +``` + +**Options inherited from parent commands** + +``` + --node string <host>:<port> to tendermint rpc interface for this chain (default "https://rpc.cosmos.network:443") +``` + +**SEE ALSO** + +* [ignite node query](#ignite-node-query) - Querying subcommands +* [ignite node query bank balances](#ignite-node-query-bank-balances) - Query for account balances by account name or address + + +## ignite node query bank balances + +Query for account balances by account name or address + +``` +ignite node query bank balances [from_account_or_address] [flags] +``` + +**Options** + +``` + --address-prefix string account address prefix (default "cosmos") + --count-total count total number of records in all balances to query for + -h, --help help for balances + --home string directory where the blockchain node is initialized + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") + --limit uint pagination limit of all balances to query for (default 100) + --offset uint pagination offset of all balances to query for + --page uint pagination page of all balances to query for. This sets offset to a multiple of limit (default 1) + --page-key string pagination page-key of all balances to query for + --reverse results are sorted in descending order +``` + +**Options inherited from parent commands** + +``` + --node string <host>:<port> to tendermint rpc interface for this chain (default "https://rpc.cosmos.network:443") +``` + +**SEE ALSO** + +* [ignite node query bank](#ignite-node-query-bank) - Querying commands for the bank module + + +## ignite node query tx + +Query for transaction by hash + +``` +ignite node query tx [hash] [flags] +``` + +**Options** + +``` + -h, --help help for tx +``` + +**Options inherited from parent commands** + +``` + --node string <host>:<port> to tendermint rpc interface for this chain (default "https://rpc.cosmos.network:443") +``` + +**SEE ALSO** + +* [ignite node query](#ignite-node-query) - Querying subcommands + + +## ignite node tx + +Transactions subcommands + +**Options** + +``` + --address-prefix string account address prefix (default "cosmos") + --fees string fees to pay along with transaction; eg: 10uatom + --gas string gas limit to set per-transaction; set to "auto" to calculate sufficient gas automatically (default "auto") + --gas-prices string gas prices in decimal format to determine the transaction fee (e.g. 0.1uatom) + --generate-only build an unsigned transaction and write it to STDOUT + -h, --help help for tx + --home string directory where the blockchain node is initialized + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**Options inherited from parent commands** + +``` + --node string <host>:<port> to tendermint rpc interface for this chain (default "https://rpc.cosmos.network:443") +``` + +**SEE ALSO** + +* [ignite node](#ignite-node) - Make requests to a live blockchain node +* [ignite node tx bank](#ignite-node-tx-bank) - Bank transaction subcommands + + +## ignite node tx bank + +Bank transaction subcommands + +**Options** + +``` + -h, --help help for bank +``` + +**Options inherited from parent commands** + +``` + --address-prefix string account address prefix (default "cosmos") + --fees string fees to pay along with transaction; eg: 10uatom + --gas string gas limit to set per-transaction; set to "auto" to calculate sufficient gas automatically (default "auto") + --gas-prices string gas prices in decimal format to determine the transaction fee (e.g. 0.1uatom) + --generate-only build an unsigned transaction and write it to STDOUT + --home string directory where the blockchain node is initialized + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") + --node string <host>:<port> to tendermint rpc interface for this chain (default "https://rpc.cosmos.network:443") +``` + +**SEE ALSO** + +* [ignite node tx](#ignite-node-tx) - Transactions subcommands +* [ignite node tx bank send](#ignite-node-tx-bank-send) - Send funds from one account to another. + + +## ignite node tx bank send + +Send funds from one account to another. + +``` +ignite node tx bank send [from_account_or_address] [to_account_or_address] [amount] [flags] +``` + +**Options** + +``` + -h, --help help for send +``` + +**Options inherited from parent commands** + +``` + --address-prefix string account address prefix (default "cosmos") + --fees string fees to pay along with transaction; eg: 10uatom + --gas string gas limit to set per-transaction; set to "auto" to calculate sufficient gas automatically (default "auto") + --gas-prices string gas prices in decimal format to determine the transaction fee (e.g. 0.1uatom) + --generate-only build an unsigned transaction and write it to STDOUT + --home string directory where the blockchain node is initialized + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") + --node string <host>:<port> to tendermint rpc interface for this chain (default "https://rpc.cosmos.network:443") +``` + +**SEE ALSO** + +* [ignite node tx bank](#ignite-node-tx-bank) - Bank transaction subcommands + + +## ignite plugin + +Handle plugins + +**Options** + +``` + -h, --help help for plugin +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain +* [ignite plugin add](#ignite-plugin-add) - Adds a plugin declaration to a plugin configuration +* [ignite plugin describe](#ignite-plugin-describe) - Output information about the a registered plugin +* [ignite plugin list](#ignite-plugin-list) - List declared plugins and status +* [ignite plugin remove](#ignite-plugin-remove) - Removes a plugin declaration from a chain's plugin configuration +* [ignite plugin scaffold](#ignite-plugin-scaffold) - Scaffold a new plugin +* [ignite plugin update](#ignite-plugin-update) - Update plugins + + +## ignite plugin add + +Adds a plugin declaration to a plugin configuration + +**Synopsis** + +Adds a plugin declaration to a plugin configuration. +Respects key value pairs declared after the plugin path to be added to the +generated configuration definition. +Example: + ignite plugin add github.com/org/my-plugin/ foo=bar baz=qux + +``` +ignite plugin add [path] [key=value]... [flags] +``` + +**Options** + +``` + -g, --global use global plugins configuration ($HOME/.ignite/plugins/plugins.yml) + -h, --help help for add +``` + +**SEE ALSO** + +* [ignite plugin](#ignite-plugin) - Handle plugins + + +## ignite plugin describe + +Output information about the a registered plugin + +**Synopsis** + +Output information about a registered plugins commands and hooks. + +``` +ignite plugin describe [path] [flags] +``` + +**Options** + +``` + -h, --help help for describe +``` + +**SEE ALSO** + +* [ignite plugin](#ignite-plugin) - Handle plugins + + +## ignite plugin list + +List declared plugins and status + +**Synopsis** + +Prints status and information of declared plugins + +``` +ignite plugin list [flags] +``` + +**Options** + +``` + -h, --help help for list +``` + +**SEE ALSO** + +* [ignite plugin](#ignite-plugin) - Handle plugins + + +## ignite plugin remove + +Removes a plugin declaration from a chain's plugin configuration + +``` +ignite plugin remove [path] [flags] +``` + +**Options** + +``` + -g, --global use global plugins configuration ($HOME/.ignite/plugins/plugins.yml) + -h, --help help for remove +``` + +**SEE ALSO** + +* [ignite plugin](#ignite-plugin) - Handle plugins + + +## ignite plugin scaffold + +Scaffold a new plugin + +**Synopsis** + +Scaffolds a new plugin in the current directory with the given repository path configured. A git repository will be created with the given module name, unless the current directory is already a git repository. + +``` +ignite plugin scaffold [github.com/org/repo] [flags] +``` + +**Options** + +``` + -h, --help help for scaffold +``` + +**SEE ALSO** + +* [ignite plugin](#ignite-plugin) - Handle plugins + + +## ignite plugin update + +Update plugins + +**Synopsis** + +Updates a plugin specified by path. If no path is specified all declared plugins are updated + +``` +ignite plugin update [path] [flags] +``` + +**Options** + +``` + -h, --help help for update +``` + +**SEE ALSO** + +* [ignite plugin](#ignite-plugin) - Handle plugins + + +## ignite relayer + +Connect blockchains with an IBC relayer + +**Options** + +``` + -h, --help help for relayer +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain +* [ignite relayer configure](#ignite-relayer-configure) - Configure source and target chains for relaying +* [ignite relayer connect](#ignite-relayer-connect) - Link chains associated with paths and start relaying tx packets in between + + +## ignite relayer configure + +Configure source and target chains for relaying + +``` +ignite relayer configure [flags] +``` + +**Options** + +``` + -a, --advanced advanced configuration options for custom IBC modules + -h, --help help for configure + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") + --ordered set the channel as ordered + -r, --reset reset the relayer config + --source-account string source Account + --source-client-id string use a custom client id for source + --source-faucet string faucet address of the source chain + --source-gaslimit int gas limit used for transactions on source chain + --source-gasprice string gas price used for transactions on source chain + --source-port string IBC port ID on the source chain + --source-prefix string address prefix of the source chain + --source-rpc string RPC address of the source chain + --source-version string module version on the source chain + --target-account string target Account + --target-client-id string use a custom client id for target + --target-faucet string faucet address of the target chain + --target-gaslimit int gas limit used for transactions on target chain + --target-gasprice string gas price used for transactions on target chain + --target-port string IBC port ID on the target chain + --target-prefix string address prefix of the target chain + --target-rpc string RPC address of the target chain + --target-version string module version on the target chain +``` + +**SEE ALSO** + +* [ignite relayer](#ignite-relayer) - Connect blockchains with an IBC relayer + + +## ignite relayer connect + +Link chains associated with paths and start relaying tx packets in between + +``` +ignite relayer connect [<path>,...] [flags] +``` + +**Options** + +``` + -h, --help help for connect + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**SEE ALSO** + +* [ignite relayer](#ignite-relayer) - Connect blockchains with an IBC relayer + + +## ignite scaffold + +Create a new blockchain, module, message, query, and more + +**Synopsis** + +Scaffolding is a quick way to generate code for major pieces of your +application. + +For details on each scaffolding target (chain, module, message, etc.) run the +corresponding command with a "--help" flag, for example, "ignite scaffold chain +--help". + +The Ignite team strongly recommends committing the code to a version control +system before running scaffolding commands. This will make it easier to see the +changes to the source code as well as undo the command if you've decided to roll +back the changes. + +This blockchain you create with the chain scaffolding command uses the modular +Cosmos SDK framework and imports many standard modules for functionality like +proof of stake, token transfer, inter-blockchain connectivity, governance, and +more. Custom functionality is implemented in modules located by convention in +the "x/" directory. By default, your blockchain comes with an empty custom +module. Use the module scaffolding command to create an additional module. + +An empty custom module doesn't do much, it's basically a container for logic +that is responsible for processing transactions and changing the application +state. Cosmos SDK blockchains work by processing user-submitted signed +transactions, which contain one or more messages. A message contains data that +describes a state transition. A module can be responsible for handling any +number of messages. + +A message scaffolding command will generate the code for handling a new type of +Cosmos SDK message. Message fields describe the state transition that the +message is intended to produce if processed without errors. + +Scaffolding messages is useful to create individual "actions" that your module +can perform. Sometimes, however, you want your blockchain to have the +functionality to create, read, update and delete (CRUD) instances of a +particular type. Depending on how you want to store the data there are three +commands that scaffold CRUD functionality for a type: list, map, and single. +These commands create four messages (one for each CRUD action), and the logic to +add, delete, and fetch the data from the store. If you want to scaffold only the +logic, for example, you've decided to scaffold messages separately, you can do +that as well with the "--no-message" flag. + +Reading data from a blockchain happens with a help of queries. Similar to how +you can scaffold messages to write data, you can scaffold queries to read the +data back from your blockchain application. + +You can also scaffold a type, which just produces a new protocol buffer file +with a proto message description. Note that proto messages produce (and +correspond with) Go types whereas Cosmos SDK messages correspond to proto "rpc" +in the "Msg" service. + +If you're building an application with custom IBC logic, you might need to +scaffold IBC packets. An IBC packet represents the data sent from one blockchain +to another. You can only scaffold IBC packets in IBC-enabled modules scaffolded +with an "--ibc" flag. Note that the default module is not IBC-enabled. + + +**Options** + +``` + -h, --help help for scaffold +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain +* [ignite scaffold chain](#ignite-scaffold-chain) - New Cosmos SDK blockchain +* [ignite scaffold list](#ignite-scaffold-list) - CRUD for data stored as an array +* [ignite scaffold map](#ignite-scaffold-map) - CRUD for data stored as key-value pairs +* [ignite scaffold message](#ignite-scaffold-message) - Message to perform state transition on the blockchain +* [ignite scaffold module](#ignite-scaffold-module) - Custom Cosmos SDK module +* [ignite scaffold packet](#ignite-scaffold-packet) - Message for sending an IBC packet +* [ignite scaffold query](#ignite-scaffold-query) - Query for fetching data from a blockchain +* [ignite scaffold react](#ignite-scaffold-react) - React web app template +* [ignite scaffold single](#ignite-scaffold-single) - CRUD for data stored in a single location +* [ignite scaffold type](#ignite-scaffold-type) - Type definition +* [ignite scaffold vue](#ignite-scaffold-vue) - Vue 3 web app template + + +## ignite scaffold chain + +New Cosmos SDK blockchain + +**Synopsis** + +Create a new application-specific Cosmos SDK blockchain. + +For example, the following command will create a blockchain called "hello" in +the "hello/" directory: + + ignite scaffold chain hello + +A project name can be a simple name or a URL. The name will be used as the Go +module path for the project. Examples of project names: + + ignite scaffold chain foo + ignite scaffold chain foo/bar + ignite scaffold chain example.org/foo + ignite scaffold chain github.com/username/foo + +A new directory with source code files will be created in the current directory. +To use a different path use the "--path" flag. + +Most of the logic of your blockchain is written in custom modules. Each module +effectively encapsulates an independent piece of functionality. Following the +Cosmos SDK convention, custom modules are stored inside the "x/" directory. By +default, Ignite creates a module with a name that matches the name of the +project. To create a blockchain without a default module use the "--no-module" +flag. Additional modules can be added after a project is created with "ignite +scaffold module" command. + +Account addresses on Cosmos SDK-based blockchains have string prefixes. For +example, the Cosmos Hub blockchain uses the default "cosmos" prefix, so that +addresses look like this: "cosmos12fjzdtqfrrve7zyg9sv8j25azw2ua6tvu07ypf". To +use a custom address prefix use the "--address-prefix" flag. For example: + + ignite scaffold chain foo --address-prefix bar + +By default when compiling a blockchain's source code Ignite creates a cache to +speed up the build process. To clear the cache when building a blockchain use +the "--clear-cache" flag. It is very unlikely you will ever need to use this +flag. + +The blockchain is using the Cosmos SDK modular blockchain framework. Learn more +about Cosmos SDK on https://docs.cosmos.network + + +``` +ignite scaffold chain [name] [flags] +``` + +**Options** + +``` + --address-prefix string account address prefix (default "cosmos") + --clear-cache clear the build cache (advanced) + -h, --help help for chain + --no-module create a project without a default module + -p, --path string create a project in a specific path (default ".") +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold list + +CRUD for data stored as an array + +**Synopsis** + +The "list" scaffolding command is used to generate files that implement the +logic for storing and interacting with data stored as a list in the blockchain +state. + +The command accepts a NAME argument that will be used as the name of a new type +of data. It also accepts a list of FIELDs that describe the type. + +The interaction with the data follows the create, read, updated, and delete +(CRUD) pattern. For each type three Cosmos SDK messages are defined for writing +data to the blockchain: MsgCreate{Name}, MsgUpdate{Name}, MsgDelete{Name}. For +reading data two queries are defined: {Name} and {Name}All. The type, messages, +and queries are defined in the "proto/" directory as protocol buffer messages. +Messages and queries are mounted in the "Msg" and "Query" services respectively. + +When messages are handled, the appropriate keeper methods are called. By +convention, the methods are defined in +"x/{moduleName}/keeper/msg_server_{name}.go". Helpful methods for getting, +setting, removing, and appending are defined in the same "keeper" package in +"{name}.go". + +The "list" command essentially allows you to define a new type of data and +provides the logic to create, read, update, and delete instances of the type. +For example, let's review a command that generates the code to handle a list of +posts and each post has "title" and "body" fields: + + ignite scaffold list post title body + +This provides you with a "Post" type, MsgCreatePost, MsgUpdatePost, +MsgDeletePost and two queries: Post and PostAll. The compiled CLI, let's say the +binary is "blogd" and the module is "blog", has commands to query the chain (see +"blogd q blog") and broadcast transactions with the messages above (see "blogd +tx blog"). + +The code generated with the list command is meant to be edited and tailored to +your application needs. Consider the code to be a "skeleton" for the actual +business logic you will implement next. + +By default, all fields are assumed to be strings. If you want a field of a +different type, you can specify it after a colon ":". The following types are +supported: string, bool, int, uint, coin, array.string, array.int, array.uint, +array.coin. An example of using field types: + + ignite scaffold list pool amount:coin tags:array.string height:int + +Supported types: + +| Type | Alias | Index | Code Type | Description | +|--------------|---------|-------|-----------|---------------------------------| +| string | - | yes | string | Text type | +| array.string | strings | no | []string | List of text type | +| bool | - | yes | bool | Boolean type | +| int | - | yes | int32 | Integer type | +| array.int | ints | no | []int32 | List of integers types | +| uint | - | yes | uint64 | Unsigned integer type | +| array.uint | uints | no | []uint64 | List of unsigned integers types | +| coin | - | no | sdk.Coin | Cosmos SDK coin type | +| array.coin | coins | no | sdk.Coins | List of Cosmos SDK coin types | + +"Index" indicates whether the type can be used as an index in +"ignite scaffold map". + +Ignite also supports custom types: + + ignite scaffold list product-details name desc + ignite scaffold list product price:coin details:ProductDetails + +In the example above the "ProductDetails" type was defined first, and then used +as a custom type for the "details" field. Ignite doesn't support arrays of +custom types yet. + +Your chain will accept custom types in JSON-notation: + + exampled tx example create-product 100coin '{"name": "x", "desc": "y"}' --from alice + +By default the code will be scaffolded in the module that matches your project's +name. If you have several modules in your project, you might want to specify a +different module: + + ignite scaffold list post title body --module blog + +By default, each message comes with a "creator" field that represents the +address of the transaction signer. You can customize the name of this field with +a flag: + + ignite scaffold list post title body --signer author + +It's possible to scaffold just the getter/setter logic without the CRUD +messages. This is useful when you want the methods to handle a type, but would +like to scaffold messages manually. Use a flag to skip message scaffolding: + + ignite scaffold list post title body --no-message + +The "creator" field is not generated if a list is scaffolded with the +"--no-message" flag. + + +``` +ignite scaffold list NAME [field]... [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -h, --help help for list + --module string specify which module to generate code in + --no-message skip generating message handling logic + --no-simulation skip simulation logic + -p, --path string path of the app (default ".") + --signer string label for the message signer (default: creator) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold map + +CRUD for data stored as key-value pairs + +**Synopsis** + +The "map" scaffolding command is used to generate files that implement the logic +for storing and interacting with data stored as key-value pairs (or a +dictionary) in the blockchain state. + +The "map" command is very similar to "ignite scaffold list" with the main +difference in how values are indexed. With "list" values are indexed by an +incrementing integer, whereas "list" values are indexed by a user-provided value +(or multiple values). + +Let's use the same blog post example: + + ignite scaffold map post title body + +This command scaffolds a "Post" type and CRUD functionality to create, read, +updated, and delete posts. However, when creating a new post with your chain's +binary (or by submitting a transaction through the chain's API) you will be +required to provide an "index": + + blogd tx blog create-post [index] [title] [body] + blogd tx blog create-post hello "My first post" "This is the body" + +This command will create a post and store it in the blockchain's state under the +"hello" index. You will be able to fetch back the value of the post by querying +for the "hello" key. + + blogd q blog show-post hello + +To customize the index, use the "--index" flag. Multiple indices can be +provided, which simplifies querying values. For example: + + ignite scaffold map product price desc --index category,guid + +With this command, you would get a "Product" value indexed by both a category +and a GUID (globally unique ID). This will let you programmatically fetch +product values that have the same category but are using different GUIDs. + +Since the behavior of "list" and "map" scaffolding is very similar, you can use +the "--no-message", "--module", "--signer" flags as well as the colon syntax for +custom types. + + +``` +ignite scaffold map NAME [field]... [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -h, --help help for map + --index strings fields that index the value (default [index]) + --module string specify which module to generate code in + --no-message skip generating message handling logic + --no-simulation skip simulation logic + -p, --path string path of the app (default ".") + --signer string label for the message signer (default: creator) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold message + +Message to perform state transition on the blockchain + +**Synopsis** + +Message scaffolding is useful for quickly adding functionality to your +blockchain to handle specific Cosmos SDK messages. + +Messages are objects whose end goal is to trigger state transitions on the +blockchain. A message is a container for fields of data that affect how the +blockchain's state will change. You can think of messages as "actions" that a +user can perform. + +For example, the bank module has a "Send" message for token transfers between +accounts. The send message has three fields: from address (sender), to address +(recipient), and a token amount. When this message is successfully processed, +the token amount will be deducted from the sender's account and added to the +recipient's account. + +Ignite's message scaffolding lets you create new types of messages and add them +to your chain. For example: + + ignite scaffold message add-pool amount:coins denom active:bool --module dex + +The command above will create a new message MsgAddPool with three fields: amount +(in tokens), denom (a string), and active (a boolean). The message will be added +to the "dex" module. + +By default, the message is defined as a proto message in the +"proto/{app}/{module}/tx.proto" and registered in the "Msg" service. A CLI command to +create and broadcast a transaction with MsgAddPool is created in the module's +"cli" package. Additionally, Ignite scaffolds a message constructor and the code +to satisfy the sdk.Msg interface and register the message in the module. + +Most importantly in the "keeper" package Ignite scaffolds an "AddPool" function. +Inside this function, you can implement message handling logic. + +When successfully processed a message can return data. Use the —response flag to +specify response fields and their types. For example + + ignite scaffold message create-post title body --response id:int,title + +The command above will scaffold MsgCreatePost which returns both an ID (an +integer) and a title (a string). + +Message scaffolding follows the rules as "ignite scaffold list/map/single" and +supports fields with standard and custom types. See "ignite scaffold list —help" +for details. + + +``` +ignite scaffold message [name] [field1] [field2] ... [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -d, --desc string description of the command + -h, --help help for message + --module string module to add the message into. Default: app's main module + --no-simulation disable CRUD simulation scaffolding + -p, --path string path of the app (default ".") + -r, --response strings response fields + --signer string label for the message signer (default: creator) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold module + +Custom Cosmos SDK module + +**Synopsis** + +Scaffold a new Cosmos SDK module. + +Cosmos SDK is a modular framework and each independent piece of functionality is +implemented in a separate module. By default your blockchain imports a set of +standard Cosmos SDK modules. To implement custom functionality of your +blockchain, scaffold a module and implement the logic of your application. + +This command does the following: + +* Creates a directory with module's protocol buffer files in "proto/" +* Creates a directory with module's boilerplate Go code in "x/" +* Imports the newly created module by modifying "app/app.go" +* Creates a file in "testutil/keeper/" that contains logic to create a keeper + for testing purposes + +This command will proceed with module scaffolding even if "app/app.go" doesn't +have the required default placeholders. If the placeholders are missing, you +will need to modify "app/app.go" manually to import the module. If you want the +command to fail if it can't import the module, use the "--require-registration" +flag. + +To scaffold an IBC-enabled module use the "--ibc" flag. An IBC-enabled module is +like a regular module with the addition of IBC-specific logic and placeholders +to scaffold IBC packets with "ignite scaffold packet". + +A module can depend on one or more other modules and import their keeper +methods. To scaffold a module with a dependency use the "--dep" flag + +For example, your new custom module "foo" might have functionality that requires +sending tokens between accounts. The method for sending tokens is a defined in +the "bank"'s module keeper. You can scaffold a "foo" module with the dependency +on "bank" with the following command: + + ignite scaffold module foo --dep bank + +You can then define which methods you want to import from the "bank" keeper in +"expected_keepers.go". + +You can also scaffold a module with a list of dependencies that can include both +standard and custom modules (provided they exist): + + ignite scaffold module bar --dep foo,mint,account,FeeGrant + +Note: the "--dep" flag doesn't install third-party modules into your +application, it just generates extra code that specifies which existing modules +your new custom module depends on. + +A Cosmos SDK module can have parameters (or "params"). Params are values that +can be set at the genesis of the blockchain and can be modified while the +blockchain is running. An example of a param is "Inflation rate change" of the +"mint" module. A module can be scaffolded with params using the "--params" flag +that accepts a list of param names. By default params are of type "string", but +you can specify a type for each param. For example: + + ignite scaffold module foo --params baz:uint,bar:bool + +Refer to Cosmos SDK documentation to learn more about modules, dependencies and +params. + + +``` +ignite scaffold module [name] [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + --dep strings add a dependency on another module + -h, --help help for module + --ibc add IBC functionality + --ordering string channel ordering of the IBC module [none|ordered|unordered] (default "none") + --params strings add module parameters + -p, --path string path of the app (default ".") + --require-registration fail if module can't be registered + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold packet + +Message for sending an IBC packet + +**Synopsis** + +Scaffold an IBC packet in a specific IBC-enabled Cosmos SDK module + +``` +ignite scaffold packet [packetName] [field1] [field2] ... --module [moduleName] [flags] +``` + +**Options** + +``` + --ack strings custom acknowledgment type (field1,field2,...) + --clear-cache clear the build cache (advanced) + -h, --help help for packet + --module string IBC Module to add the packet into + --no-message disable send message scaffolding + -p, --path string path of the app (default ".") + --signer string label for the message signer (default: creator) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold query + +Query for fetching data from a blockchain + +``` +ignite scaffold query [name] [request_field1] [request_field2] ... [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -d, --desc string description of the CLI to broadcast a tx with the message + -h, --help help for query + --module string module to add the query into. Default: app's main module + --paginated define if the request can be paginated + -p, --path string path of the app (default ".") + -r, --response strings response fields + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold react + +React web app template + +``` +ignite scaffold react [flags] +``` + +**Options** + +``` + -h, --help help for react + -p, --path string path to scaffold content of the React app (default "./react") + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold single + +CRUD for data stored in a single location + +``` +ignite scaffold single NAME [field]... [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -h, --help help for single + --module string specify which module to generate code in + --no-message skip generating message handling logic + --no-simulation skip simulation logic + -p, --path string path of the app (default ".") + --signer string label for the message signer (default: creator) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold type + +Type definition + +``` +ignite scaffold type NAME [field]... [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -h, --help help for type + --module string specify which module to generate code in + --no-message skip generating message handling logic + --no-simulation skip simulation logic + -p, --path string path of the app (default ".") + --signer string label for the message signer (default: creator) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold vue + +Vue 3 web app template + +``` +ignite scaffold vue [flags] +``` + +**Options** + +``` + -h, --help help for vue + -p, --path string path to scaffold content of the Vue.js app (default "./vue") + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite tools + +Tools for advanced users + +**Options** + +``` + -h, --help help for tools +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain +* [ignite tools ibc-relayer](#ignite-tools-ibc-relayer) - TypeScript implementation of an IBC relayer +* [ignite tools ibc-setup](#ignite-tools-ibc-setup) - Collection of commands to quickly setup a relayer +* [ignite tools protoc](#ignite-tools-protoc) - Execute the protoc command + + +## ignite tools ibc-relayer + +TypeScript implementation of an IBC relayer + +``` +ignite tools ibc-relayer [--] [...] [flags] +``` + +**Examples** + +``` +ignite tools ibc-relayer -- -h +``` + +**Options** + +``` + -h, --help help for ibc-relayer +``` + +**SEE ALSO** + +* [ignite tools](#ignite-tools) - Tools for advanced users + + +## ignite tools ibc-setup + +Collection of commands to quickly setup a relayer + +``` +ignite tools ibc-setup [--] [...] [flags] +``` + +**Examples** + +``` +ignite tools ibc-setup -- -h +ignite tools ibc-setup -- init --src relayer_test_1 --dest relayer_test_2 +``` + +**Options** + +``` + -h, --help help for ibc-setup +``` + +**SEE ALSO** + +* [ignite tools](#ignite-tools) - Tools for advanced users + + +## ignite tools protoc + +Execute the protoc command + +**Synopsis** + +The protoc command. You don't need to setup the global protoc include folder with -I, it's automatically handled + +``` +ignite tools protoc [--] [...] [flags] +``` + +**Examples** + +``` +ignite tools protoc -- --version +``` + +**Options** + +``` + -h, --help help for protoc +``` + +**SEE ALSO** + +* [ignite tools](#ignite-tools) - Tools for advanced users + + +## ignite version + +Print the current build information + +``` +ignite version [flags] +``` + +**Options** + +``` + -h, --help help for version +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain + diff --git a/docs/versioned_docs/version-v0.26/08-references/02-config.md b/docs/versioned_docs/version-v0.26/08-references/02-config.md new file mode 100644 index 0000000..5cc484c --- /dev/null +++ b/docs/versioned_docs/version-v0.26/08-references/02-config.md @@ -0,0 +1,263 @@ +--- +sidebar_position: 3 +description: Primary configuration file to describe the development environment for your blockchain. +title: Configuration file +--- + +# Configuration file reference + +The `config.yml` file generated in your blockchain folder uses key-value pairs +to describe the development environment for your blockchain. + +Only a default set of parameters is provided. If more nuanced configuration is +required, you can add these parameters to the `config.yml` file. + +## Accounts + +A list of user accounts created during genesis of the blockchain. + +```yml +accounts: + - name: alice + coins: ['20000token', '200000000stake'] + - name: bob + coins: ['10000token', '100000000stake'] +``` + +Ignite uses information from `accounts` when initializing the chain with `ignite +chain init` and `ignite chain start`. In the example above Ignite will add two +accounts to the `genesis.json` file of the chain. + +`name` is a local name of a key pair associated with an account. Once the chain +is initialized and started, you will be able to use `name` when signing +transactions. With the configuration above, you'd be able to sign transactions +both with Alice's and Bob's accounts like so `exampled tx bank send ... --from +alice`. + +`coins` is a list of token balances for the account. If a token denomination is +in this list, it will exist in the genesis balance and will be a valid token. +When initialized with the config file above, a chain will only have two accounts +at genesis (Alice and Bob) and two native tokens (with denominations `token` and +`stake`). + +By default, every time a chain is re-initialized, Ignite will create a new key +pair for each account. So even though the account name can remain the same +(`bob`), every chain reinitialize it will have a different mnemonic and address. + +If you want an account to have a specific address, provide the `address` field +with a valid bech32 address. The prefix (by default, `cosmos`) should match the +one expected by your chain. When an account is provided with an `address` a key +pair will not be generated, because it's impossible to derive a key from an +address. An account with a given address will be added to the genesis file (with +an associated token balance), but because there is no key pair, you will not be +able to broadcast transactions from that address. This is useful when you have +generated a key pair outside of Ignite (for example, using your chain's CLI or +in an extension wallet) and want to have a token balance associated with the +address of this key pair. + +```yml +accounts: + - name: bob + coins: ['20000token', '200000000stake'] + address: cosmos1s39200s6v4c96ml2xzuh389yxpd0guk2mzn3mz +``` + +If you want an account to be initialized from a specific mnemonic, provide the +`mnemonic` field with a valid mnemonic. A private key, a public key and an +address will be derived from a mnemonic. + +```yml +accounts: + - name: bob + coins: ['20000token', '200000000stake'] + mnemonic: cargo ramp supreme review change various throw air figure humble soft steel slam pole betray inhale already dentist enough away office apple sample glue +``` + +You cannot have both `address` and `mnemonic` defined for a single account. + +Some accounts are used as validator accounts (see `validators` section). +Validator accounts cannot have an `address` field, because Ignite needs to be +able to derive a private key (either from a random mnemonic or from a specific +one provided in the `mnemonic` field). Validator accounts should have enough +tokens of the staking denomination for self-delegation. + +By default, the `alice` account is used as a validator account, its key is +derived from a mnemonic generated randomly at genesis, the staking denomination +is `stake`, and this account has enough `stake` for self-delegation. + +If your chain is using its own +[cointype](https://github.com/satoshilabs/slips/blob/master/slip-0044.md), you +can use the `cointype` field to provide the integer value + +```yml +accounts: + - name: bob + coins: ['20000token', '200000000stake'] + cointype: 7777777 +``` + +## Validators + +Commands like `ignite chain init` and `ignite chain serve` initialize and launch +a validator node for development purposes. + +```yml +validators: + - name: alice + bonded: '100000000stake' +``` + +`name` refers to key name in the `accounts` list. + +`bonded` is the self-delegation amount of a validator. The `bonded` amount +should not be lower than `1000000` nor higher than the account's +balance in the `account` list. + +Validators store their node configuration files in the data directory. By +default, Ignite uses the name of the project as the name of the data directory, +for example, `$HOME/.example/`. To use a different path for the data directory +you can customize the `home` property. + +Configuration in the data directory is reset frequently by Ignite. To persist +some changes to configuration files you can use `app`, `config` and `client` +properties that correspond to `$HOME/.example/config/app.toml`, +`$HOME/.example/config/config.toml` and `$HOME/.example/config/client.toml`. + +```yml +validators: + - name: alice + bonded: '100000000stake' + home: "~/.mychain" + app: + pruning: "nothing" + config: + moniker: "mychain" + client: + output: "json" +``` + +To see which properties are available for `config.toml`, `app.toml` and +`client.toml`, initialize a chain with `ignite chain init` and open the file you +want to know more about. + +Currently, Ignite starts only one validator node, so the first item in the +`validators` list is used (the rest is ignored). Support for multiple validators +is in progress. + +## Build + +The `build` property lets you customize how Ignite builds your chain's binary. + +By default, Ignite builds the `main` package from `cmd/PROJECT_NAME/main.go`. If +you more than one `main` package in your project, or you have renamed the +directory, use the `main` property to provide the path to the `main` Go package: + +```yml +build: + main: cmd/hello/cmd +``` + +Ignite compiles your project into a binary and uses the project's name with a +`d` suffix as name for the binary. To customize the binary name use the `binary` +property: + +```yml +build: + binary: "helloworldd" +``` + +To customize the linker flags used in the build process: + +```yml +build: + ldflags: [ "-X main.Version=development", "-X main.Date=01/05/2022T19:54" ] +``` + +By default, custom protocol buffer (proto) files are located in the `proto` +directory. If your project keeps proto files in a different directory, you +should tell Ignite about this: + +```yml +build: + proto: + path: "myproto" +``` + +Ignite comes with required third-party proto out of the box. Ignite also looks +into `third_party/proto` and `proto_vendor` directories for extra proto files. +If your project keeps third-party proto files in a different directory, you +should tell Ignite about this: + +```yml +build: + proto: + third_party_paths: ["my_third_party/proto"] +``` + +## Faucet + +The faucet service sends tokens to addresses. + +```yml +faucet: + name: bob + coins: ["5token", "100000stake"] +``` + +`name` refers to a key name in the `accounts` list. This is a required property. + +`coins` is the amount of tokens that will be sent to a user by the faucet. This +is a required property. + +`coins_max` is a maximum amount of tokens that can be sent to a single address. +To reset the token limit use the `rate_limit_window` property (in seconds). + +The default the faucet works on port `4500`. To use a different port number use +the `port` property. + +```yml +faucet: + name: faucet + coins: [ "100token", "5foo" ] + coins_max: [ "2000token", "1000foo" ] + port: 4500 + rate_limit_window: 3600 +``` + +## Genesis + +Genesis file is the initial block in the blockchain. It is required to launch a +blockchain, because it contains important information like token balances, and +modules' state. Genesis is stored in `$DATA_DIR/config/genesis.json`. + +Since the genesis file is reinitialized frequently during development, you can +set persistent options in the `genesis` property: + +```yml +genesis: + app_state: + staking: + params: + bond_denom: "denom" +``` + +To know which properties a genesis file supports, initialize a chain and look up +the genesis file in the data directory. + +## Client code generation + +Ignite can generate client-side code for interacting with your chain with the +`ignite generate` set of commands. Use the following properties to customize the +paths where the client-side code is generated. + +```yml +client: + openapi: + path: "docs/static/openapi.yml" + typescript: + path: "ts-client" + composables: + path: "vue/src/composables" + hooks: + path: "react/src/hooks" +``` diff --git a/docs/versioned_docs/version-v0.26/08-references/_category_.json b/docs/versioned_docs/version-v0.26/08-references/_category_.json new file mode 100644 index 0000000..3bcc076 --- /dev/null +++ b/docs/versioned_docs/version-v0.26/08-references/_category_.json @@ -0,0 +1,5 @@ +{ + "label": "References", + "link": null, + "collapsed": false +} \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.26/plugins/01-using-plugins.md b/docs/versioned_docs/version-v0.26/plugins/01-using-plugins.md new file mode 100644 index 0000000..ed3b261 --- /dev/null +++ b/docs/versioned_docs/version-v0.26/plugins/01-using-plugins.md @@ -0,0 +1,43 @@ +--- +description: Using and Developing plugins +--- + +# Using Plugins + +Ignite plugins offer a way to extend the functionality of the Ignite CLI. There +are two core concepts within plugins : `Commands` and `Hooks`. Where `Commands` +extend the cli's functionality, and `Hooks` extend existing command +functionality. + +Plugins are registered in an Ignite scaffolded Blockchain project through the +`plugins.yml`, or globally through `$HOME/.ignite/plugins/plugins.yml`. + +To use a plugin within your project, execute the following command inside the +project directory: + +```sh +ignite plugin add github.com/project/cli-plugin +``` + +The plugin will be available only when running `ignite` inside the project +directory. + +To use a plugin globally on the other hand, execute the following command: + +```sh +ignite plugin add -g github.com/project/cli-plugin +``` + +The command will compile the plugin and make it immediately available to the +`ignite` command lists. + +## Listing installed plugins + +When in an ignite scaffolded blockchain you can use the command `ignite plugin +list` to list all plugins and there statuses. + +## Updating plugins + +When a plugin in a remote repository releases updates, running `ignite plugin +update <path/to/plugin>` will update a specific plugin declared in your +project's `config.yml`. diff --git a/docs/versioned_docs/version-v0.26/plugins/02-dev-plugins.md b/docs/versioned_docs/version-v0.26/plugins/02-dev-plugins.md new file mode 100644 index 0000000..819025e --- /dev/null +++ b/docs/versioned_docs/version-v0.26/plugins/02-dev-plugins.md @@ -0,0 +1,244 @@ +--- +description: Using and Developing plugins +--- + +# Developing Plugins + +It's easy to create a plugin and use it immediately in your project. First +choose a directory outside your project and run : + +```sh +$ ignite plugin scaffold my-plugin +``` + +This will create a new directory `my-plugin` that contains the plugin's code, +and will output some instructions about how to use your plugin with the +`ignite` command. Indeed, a plugin path can be a local directory, which has +several benefits: + +- you don't need to use a git repository during the development of your plugin. +- the plugin is recompiled each time you run the `ignite` binary in your +project, if the source files are older than the plugin binary. + +Thus, the plugin development workflow is as simple as : + +1. scaffold a plugin with `ignite plugin scaffold my-plugin` +2. add it to your config via `ignite plugin add -g /path/to/my-plugin` +3. update plugin code +4. run `ignite my-plugin` binary to compile and run the plugin. +5. go back to 3. + +Once your plugin is ready, you can publish it to a git repository, and the +community can use it by calling `ignite plugin add github.com/foo/my-plugin`. + +Now let's detail how to update your plugin's code. + +## The plugin interface + +The `ignite` plugin system uses `github.com/hashicorp/go-plugin` under the hood, +which implies to implement a predefined interface: + +```go title=ignite/services/plugin/interface.go +// An ignite plugin must implements the Plugin interface. +type Interface interface { + // Manifest declares the plugin's Command(s) and Hook(s). + Manifest() (Manifest, error) + + // Execute will be invoked by ignite when a plugin Command is executed. + // It is global for all commands declared in Manifest, if you have declared + // multiple commands, use cmd.Path to distinguish them. + Execute(cmd ExecutedCommand) error + + // ExecuteHookPre is invoked by ignite when a command specified by the Hook + // path is invoked. + // It is global for all hooks declared in Manifest, if you have declared + // multiple hooks, use hook.Name to distinguish them. + ExecuteHookPre(hook ExecutedHook) error + + // ExecuteHookPost is invoked by ignite when a command specified by the hook + // path is invoked. + // It is global for all hooks declared in Manifest, if you have declared + // multiple hooks, use hook.Name to distinguish them. + ExecuteHookPost(hook ExecutedHook) error + + // ExecuteHookCleanUp is invoked by ignite when a command specified by the + // hook path is invoked. Unlike ExecuteHookPost, it is invoked regardless of + // execution status of the command and hooks. + // It is global for all hooks declared in Manifest, if you have declared + // multiple hooks, use hook.Name to distinguish them. + ExecuteHookCleanUp(hook ExecutedHook) error +} +``` + +The code scaffolded already implements this interface, you just need to update +the methods' body. + + +## Defining plugin's manifest + +Here is the `Manifest` struct : + +```go title=ignite/services/plugin/interface.go +type Manifest struct { + Name string + // Commands contains the commands that will be added to the list of ignite + // commands. Each commands are independent, for nested commands use the + // inner Commands field. + Commands []Command + // Hooks contains the hooks that will be attached to the existing ignite + // commands. + Hooks []Hook + // SharedHost enables sharing a single plugin server across all running instances + // of a plugin. Useful if a plugin adds or extends long running commands + // + // Example: if a plugin defines a hook on `ignite chain serve`, a plugin server is instanciated + // when the command is run. Now if you want to interact with that instance from commands + // defined in that plugin, you need to enable `SharedHost`, or else the commands will just + // instantiate separate plugin servers. + // + // When enabled, all plugins of the same `Path` loaded from the same configuration will + // attach it's rpc client to a an existing rpc server. + // + // If a plugin instance has no other running plugin servers, it will create one and it will be the host. + SharedHost bool `yaml:"shared_host"` +} +``` + +In your plugin's code, the `Manifest` method already returns a predefined +`Manifest` struct as an example. Adapt it according to your need. + +If your plugin adds one or more new commands to `ignite`, feeds the `Commands` +field. + +If your plugin adds features to existing commands, feeds the `Hooks` field. + +Of course a plugin can declare `Commands` *and* `Hooks`. + +A plugin may also share a host process by setting `SharedHost` to `true`. +`SharedHost` is desirable if a plugin hooks into, or declares long running commands. +Commands executed from the same plugin context interact with the same plugin server. +Allowing all executing commands to share the same server instance, giving shared execution context. + +## Adding new command + +Plugin commands are custom commands added to the ignite cli by a registered +plugin. Commands can be of any path not defined already by ignite. All plugin +commands will extend of the command root `ignite`. + +For instance, let's say your plugin adds a new `oracle` command to `ignite +scaffold`, the `Manifest()` method will look like : + +```go +func (p) Manifest() (plugin.Manifest, error) { + return plugin.Manifest{ + Name: "oracle", + Commands: []plugin.Command{ + { + Use: "oracle [name]", + Short: "Scaffold an oracle module", + Long: "Long description goes here...", + // Optionnal flags is required + Flags: []plugin.Flag{ + {Name: "source", Type: plugin.FlagTypeString, Usage: "the oracle source"}, + }, + // Attach the command to `scaffold` + PlaceCommandUnder: "ignite scaffold", + }, + }, + }, nil +} +``` + +To update the plugin execution, you have to change the plugin `Execute` command, +for instance : + +```go +func (p) Execute(cmd plugin.ExecutedCommand) error { + if len(cmd.Args) == 0 { + return fmt.Errorf("oracle name missing") + } + var ( + name = cmd.Args[0] + source, _ = cmd.Flags().GetString("source") + ) + // Read chain information + c, err := getChain(cmd) + if err != nil { + return err + } + + //... +} +``` + +Then, run `ignite scaffold oracle` to execute the plugin. + +## Adding hooks + +Plugin `Hooks` allow existing ignite commands to be extended with new +functionality. Hooks are useful when you want to streamline functionality +without needing to run custom scripts after or before a command has been run. +this can streamline processes that where once error prone or forgotten all +together. + +The following are hooks defined which will run on a registered `ignite` commands + +| Name | Description | +| -------- | ----------------------------------------------------------------------------------------------------------------------------------------------------- | +| Pre | Runs before a commands main functionality is invoked in the `PreRun` scope | +| Post | Runs after a commands main functionality is invoked in the `PostRun` scope | +| Clean Up | Runs after a commands main functionality is invoked. if the command returns an error it will run before the error is returned to guarantee execution. | + +*Note*: If a hook causes an error in the pre step the command will not run +resulting in `post` and `clean up` not executing. + +The following is an example of a `hook` definition. + +```go +func (p) Manifest() (plugin.Manifest, error) { + return plugin.Manifest{ + Name: "oracle", + Hooks: []plugin.Hook{ + { + Name: "my-hook", + PlaceHookOn: "ignite chain build", + }, + }, + }, nil +} + +func (p) ExecuteHookPre(hook plugin.ExecutedHook) error { + switch hook.Name { + case "my-hook": + fmt.Println("I'm executed before ignite chain build") + default: + return fmt.Errorf("hook not defined") + } + return nil +} + +func (p) ExecuteHookPost(hook plugin.ExecutedHook) error { + switch hook.Name { + case "my-hook": + fmt.Println("I'm executed after ignite chain build (if no error)") + default: + return fmt.Errorf("hook not defined") + } + return nil +} + +func (p) ExecuteHookCleanUp(hook plugin.ExecutedHook) error { + switch hook.Name { + case "my-hook": + fmt.Println("I'm executed after ignite chain build (regardless errors)") + default: + return fmt.Errorf("hook not defined") + } + return nil +} +``` + +Above we can see a similar definition to `Command` where a hook has a `Name` and +a `PlaceHookOn`. You'll notice that the `Execute*` methods map directly to each +life cycle of the hook. All hooks defined within the plugin will invoke these +methods. diff --git a/docs/versioned_docs/version-v0.26/plugins/_category_.json b/docs/versioned_docs/version-v0.26/plugins/_category_.json new file mode 100644 index 0000000..6596f50 --- /dev/null +++ b/docs/versioned_docs/version-v0.26/plugins/_category_.json @@ -0,0 +1,5 @@ +{ + "label": "Plugins", + "position": 7, + "link": null +} diff --git a/docs/versioned_docs/version-v0.27/01-welcome/01-index.md b/docs/versioned_docs/version-v0.27/01-welcome/01-index.md new file mode 100644 index 0000000..7e8da39 --- /dev/null +++ b/docs/versioned_docs/version-v0.27/01-welcome/01-index.md @@ -0,0 +1,63 @@ +--- +slug: /welcome +--- + +import ProjectsTable from '@site/src/components/ProjectsTable'; + +# Introduction to Ignite + +[Ignite CLI](https://github.com/ignite/cli) offers everything you need to build, test, and launch your blockchain with a +decentralized worldwide community. Ignite CLI is built on top of [Cosmos SDK](https://docs.cosmos.network), the world’s +most popular blockchain framework. Ignite CLI accelerates chain development by scaffolding everything you need so you +can focus on business logic. + +## What is Ignite CLI? + +Ignite CLI is an easy-to-use CLI tool for creating and maintaining sovereign application-specific blockchains. +Blockchains created with Ignite CLI use Cosmos SDK and Tendermint. Ignite CLI and the Cosmos SDK modules are written in +the Go programming language. The scaffolded blockchain that is created with Ignite CLI includes a command line interface +that lets you manage keys, create validators, and send tokens. + +With just a few commands, you can use Ignite CLI to: + +- Create a modular blockchain written in Go +- Scaffold modules, messages, types with CRUD operations, IBC packets, and more +- Start a blockchain node in development with live reloading +- Connect to other blockchains with a built-in IBC relayer +- Use generated TypeScript/Vuex clients to interact with your blockchain +- Use the Vue.js web app template with a set of components and Vuex modules + +## Install Ignite CLI + +To install the `ignite` binary in `/usr/local/bin` run the following command: + +``` +curl https://get.ignite.com/cli | bash +``` + +## Projects using Tendermint and Cosmos SDK + +Many projects already showcase the Tendermint BFT consensus engine and the Cosmos SDK. Explore +the [Cosmos ecosystem](https://cosmos.network/ecosystem/apps) to discover a wide variety of apps, blockchains, wallets, +and explorers that are built in the Cosmos ecosystem. + +## Projects building with Ignite CLI + +<ProjectsTable data={[ + { name: "Stride Labs", logo: "img/logo/stride.svg"}, + { name: "KYVE Network", logo: "img/logo/kyve.svg"}, + { name: "Umee", logo: "img/logo/umee.svg"}, + { name: "MediBloc Core", logo: "img/logo/medibloc.svg"}, + { name: "Cudos", logo: "img/logo/cudos.svg"}, + { name: "Firma Chain", logo: "img/logo/firmachain.svg"}, + { name: "BitCanna", logo: "img/logo/bitcanna.svg"}, + { name: "Source Protocol", logo: "img/logo/source.svg"}, + { name: "Sonr", logo: "img/logo/sonr.svg"}, + { name: "Neutron", logo: "img/logo/neutron.svg"}, + { name: "OKP4 Blockchain", logo: "img/logo/okp4.svg"}, + { name: "Dymension Hub", logo: "img/logo/dymension.svg"}, + { name: "Electra Blockchain", logo: "img/logo/electra.svg"}, + { name: "OLLO Station", logo: "img/logo/ollostation.svg"}, + { name: "Mun", logo: "img/logo/mun.svg"}, + { name: "Aura Network", logo: "img/logo/aura.svg"}, +]}/> \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.27/01-welcome/02-install.md b/docs/versioned_docs/version-v0.27/01-welcome/02-install.md new file mode 100644 index 0000000..437ab33 --- /dev/null +++ b/docs/versioned_docs/version-v0.27/01-welcome/02-install.md @@ -0,0 +1,114 @@ +--- +sidebar_position: 1 +description: Steps to install Ignite CLI on your local computer. +--- + +# Install Ignite CLI + +You can run [Ignite CLI](https://github.com/ignite/cli) in a web-based Gitpod IDE or you can install Ignite CLI on your +local computer. + +## Prerequisites + +Be sure you have met the prerequisites before you install and use Ignite CLI. + +### Operating systems + +Ignite CLI is supported for the following operating systems: + +- GNU/Linux +- macOS +- Windows Subsystem for Linux (WSL) + +### Go + +Ignite CLI is written in the Go programming language. To use Ignite CLI on a local system: + +- Install [Go](https://golang.org/doc/install) (**version 1.19** or higher) +- Ensure the Go environment variables are [set properly](https://golang.org/doc/gopath_code#GOPATH) on your system + +## Verify your Ignite CLI version + +To verify the version of Ignite CLI you have installed, run the following command: + +```bash +ignite version +``` + +## Installing Ignite CLI + +To install the latest version of the `ignite` binary use the following command. + +```bash +curl https://get.ignite.com/cli! | bash +``` + +This command invokes `curl` to download the installation script and pipes the output to `bash` to perform the +installation. The `ignite` binary is installed in `/usr/local/bin`. + +To learn more or customize the installation process, see the [installer docs](https://github.com/ignite/installer) on +GitHub. + +### Write permission + +Ignite CLI installation requires write permission to the `/usr/local/bin/` directory. If the installation fails because +you do not have write permission to `/usr/local/bin/`, run the following command: + +```bash +curl https://get.ignite.com/cli | bash +``` + +Then run this command to move the `ignite` executable to `/usr/local/bin/`: + +```bash +sudo mv ignite /usr/local/bin/ +``` + +On some machines, a permissions error occurs: + +```bash +mv: rename ./ignite to /usr/local/bin/ignite: Permission denied +============ +Error: mv failed +``` + +In this case, use sudo before `curl` and before `bash`: + +```bash +sudo curl https://get.ignite.com/cli | sudo bash +``` + +## Upgrading your Ignite CLI installation + +Before you install a new version of Ignite CLI, remove all existing Ignite CLI installations. + +To remove the current Ignite CLI installation: + +1. On your terminal window, press `Ctrl+C` to stop the chain that you started with `ignite chain serve`. +2. Remove the Ignite CLI binary with `rm $(which ignite)`. + Depending on your user permissions, run the command with or without `sudo`. +3. Repeat this step until all `ignite` installations are removed from your system. + +After all existing Ignite CLI installations are removed, follow the [Installing Ignite CLI](#installing-ignite-cli) +instructions. + +For details on version features and changes, see +the [changelog.md](https://github.com/ignite/cli/blob/main/changelog.md) +in the repo. + +## Build from source + +To experiment with the source code, you can build from source: + +```bash +git clone https://github.com/ignite/cli --depth=1 +cd cli && make install +``` + +## Summary + +- Verify the prerequisites. +- To set up a local development environment, install Ignite CLI locally on your computer. +- Install Ignite CLI by fetching the binary using cURL or by building from source. +- The latest version is installed by default. You can install previous versions of the precompiled `ignite` binary. +- Stop the chain and remove existing versions before installing a new version. diff --git a/docs/versioned_docs/version-v0.27/01-welcome/_category_.json b/docs/versioned_docs/version-v0.27/01-welcome/_category_.json new file mode 100644 index 0000000..ac625fc --- /dev/null +++ b/docs/versioned_docs/version-v0.27/01-welcome/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Welcome", + "link": null +} \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.27/02-guide/00-introduction.md b/docs/versioned_docs/version-v0.27/02-guide/00-introduction.md new file mode 100644 index 0000000..31dd77b --- /dev/null +++ b/docs/versioned_docs/version-v0.27/02-guide/00-introduction.md @@ -0,0 +1,36 @@ +--- +sidebar_position: 0 +title: Introduction +slug: /guide +--- + +# Introduction + +Developer tutorials provide step-by-step instructions to help you build blockchain developer skills. + +By following these developer tutorials you will learn how to: + +* Install Ignite CLI on your local machine +* Create a new blockchain and start a node locally for development +* Make your blockchain say "Hello, World!" + * Scaffold a Cosmos SDK query + * Modify a keeper method to return a static string + * Use the blockchain CLI to make a query +* Write and read blog posts to your chain in the Blog tutorial + * Scaffold a Cosmos SDK message + * Define new types in protocol buffer files + * Write keeper methods to write data to the store + * Read data from the store and return it as a result a query + * Use the blockchain CLI to broadcast transactions +* Build a blockchain for buying and selling names in the Nameservice tutorial + * Scaffold CRUD logic with `map` + * Use other module methods in your custom module + * Send tokens between addresses +* Build a guessing game with rewards + * Use an escrow account to store tokens +* Use the Inter-Blockchain Communication (IBC) protocol + * Scaffold an IBC-enabled module + * Send and receive IBC packets + * Configure and run a built-in IBC relayer +* Build a decentralized order-book token exchange + * Build an advanced IBC-enabled module diff --git a/docs/versioned_docs/version-v0.27/02-guide/02-getting-started.md b/docs/versioned_docs/version-v0.27/02-guide/02-getting-started.md new file mode 100644 index 0000000..c593605 --- /dev/null +++ b/docs/versioned_docs/version-v0.27/02-guide/02-getting-started.md @@ -0,0 +1,151 @@ +--- +sidebar_position: 2 +--- + +# Getting started + +In this tutorial, we will be using Ignite CLI to create a new blockchain. Ignite +CLI is a command line interface that allows users to quickly and easily create +blockchain networks. By using Ignite CLI, we can quickly create a new blockchain +without having to manually set up all the necessary components. + +Once we have created our blockchain with Ignite CLI, we will take a look at the +directory structure and files that were created. This will give us an +understanding of how the blockchain is organized and how the different +components of the blockchain interact with each other. + +By the end of this tutorial, you will have a basic understanding of how to use +Ignite CLI to create a new blockchain, and you will have a high-level +understanding of the directory structure and files that make up a blockchain. +This knowledge will be useful as you continue to explore the world of blockchain +development. + +## Creating a new blockchain + +To create a new blockchain project with Ignite, you will need to run the +following command: + +``` +ignite scaffold chain example +``` + +The [`ignite scaffold chain`](/references/cli#ignite-scaffold-chain) command +will create a new blockchain in a new directory `example`. + +The new blockchain is built using the Cosmos SDK framework and imports several +standard modules to provide a range of functionality. These modules include +`staking`, which enables a delegated Proof-of-Stake consensus mechanism, `bank` +for facilitating fungible token transfers between accounts, and `gov` for +on-chain governance. In addition to these modules, the blockchain also imports +other modules from the Cosmos SDK framework. + +The `example` directory contains the generated files and directories that make +up the structure of a Cosmos SDK blockchain. This directory includes files for +the chain's configuration, application logic, and tests, among others. It +provides a starting point for developers to quickly set up a new Cosmos SDK +blockchain and build their desired functionality on top of it. + +By default, Ignite creates a new empty custom module with the same name as the +blockchain being created (in this case, `example`) in the `x/` directory. This +module doesn't have any functionality by itself, but can serve as a starting +point for building out the features of your application. If you don't want to +create this module, you can use the `--no-module` flag to skip it. + +## Directory structure + +In order to understand what the Ignite CLI has generated for your project, you +can inspect the contents of the `example/` directory. + +The `app/` directory contains the files that connect the different parts of the +blockchain together. The most important file in this directory is `app.go`, +which includes the type definition of the blockchain and functions for creating +and initializing it. This file is responsible for wiring together the various +components of the blockchain and defining how they will interact with each +other. + +The `cmd/` directory contains the main package responsible for the command-line +interface (CLI) of the compiled binary. This package defines the commands that +can be run from the CLI and how they should be executed. It is an important part +of the blockchain project as it provides a way for developers and users to +interact with the blockchain and perform various tasks, such as querying the +blockchain state or sending transactions. + +The `docs/` directory is used for storing project documentation. By default, +this directory includes an OpenAPI specification file, which is a +machine-readable format for defining the API of a software project. The OpenAPI +specification can be used to automatically generate human-readable documentation +for the project, as well as provide a way for other tools and services to +interact with the API. The `docs/` directory can be used to store any additional +documentation that is relevant to the project. + +The `proto/` directory contains protocol buffer files, which are used to +describe the data structure of the blockchain. Protocol buffers are a language- +and platform-neutral mechanism for serializing structured data, and are often +used in the development of distributed systems, such as blockchain networks. The +protocol buffer files in the `proto/` directory define the data structures and +messages that are used by the blockchain, and are used to generate code for +various programming languages that can be used to interact with the blockchain. +In the context of the Cosmos SDK, protocol buffer files are used to define the +specific types of data that can be sent and received by the blockchain, as well +as the specific RPC endpoints that can be used to access the blockchain's +functionality. + +The `testutil/` directory contains helper functions that are used for testing. +These functions provide a convenient way to perform common tasks that are needed +when writing tests for the blockchain, such as creating test accounts, +generating transactions, and checking the state of the blockchain. By using the +helper functions in the `testutil/` directory, developers can write tests more +quickly and efficiently, and can ensure that their tests are comprehensive and +effective. + +The `x/` directory contains custom Cosmos SDK modules that have been added to +the blockchain. Standard Cosmos SDK modules are pre-built components that +provide common functionality for Cosmos SDK-based blockchains, such as support +for staking and governance. Custom modules, on the other hand, are modules that +have been developed specifically for the blockchain project and provide +project-specific functionality. + +The `config.yml` file is a configuration file that can be used to customize the +blockchain during development. This file includes settings that control various +aspects of the blockchain, such as the network's ID, account balances, and the +node parameters. + +The `.github` directory contains a GitHub Actions workflow that can be used to +automatically build and release a blockchain binary. GitHub Actions is a tool +that allows developers to automate their software development workflows, +including building, testing, and deploying their projects. The workflow in the +`.github` directory is used to automate the process of building the blockchain +binary and releasing it, which can save time and effort for developers. + +The `readme.md` file is a readme file that provides an overview of the +blockchain project. This file typically includes information such as the +project's name and purpose, as well as instructions on how to build and run the +blockchain. By reading the `readme.md` file, developers and users can quickly +understand the purpose and capabilities of the blockchain project and get +started using it. + +## Starting a blockchain node + +To start a blockchain node in development, you can run the following command: + +``` +ignite chain serve +``` + +The [`ignite chain serve`](/references/cli#ignite-scaffold-chain) command is used to start +a blockchain node in development mode. It first compiles and installs the binary +using the `ignite chain build` command, then initializes the blockchain's data +directory for a single validator using the `ignite chain init` command. After +that, it starts the node locally and enables automatic code reloading so that +changes to the code can be reflected in the running blockchain without having to +restart the node. This allows for faster development and testing of the +blockchain. + +Congratulations! 🥳 You have successfully created a brand-new Cosmos blockchain +using the Ignite CLI. This blockchain uses the delegated proof of stake (DPoS) +consensus algorithm, and comes with a set of standard modules for token +transfers, governance, and inflation. Now that you have a basic understanding of +your Cosmos blockchain, it's time to start building custom functionality. In the +following tutorials, you will learn how to build custom modules and add new +features to your blockchain, allowing you to create a unique and powerful +decentralized application. diff --git a/docs/versioned_docs/version-v0.27/02-guide/03-hello/00-express.md b/docs/versioned_docs/version-v0.27/02-guide/03-hello/00-express.md new file mode 100644 index 0000000..49ac984 --- /dev/null +++ b/docs/versioned_docs/version-v0.27/02-guide/03-hello/00-express.md @@ -0,0 +1,141 @@ +--- +description: Step-by-step guidance to build your first blockchain and your first Cosmos SDK module. +title: Express tutorial +--- + +# "Hello, World!" in 5 minutes + +In this tutorial, you will create a simple blockchain with a custom query that +responds with `"Hello, %s!"`, where `%s` is a name provided in the query. To do +this, you will use the Ignite CLI to generate most of the code, and then modify +the query to return the desired response. After completing the tutorial, you +will have a better understanding of how to create custom queries in a +blockchain. + +First, create a new `hello` blockchain with Ignite CLI: + +``` +ignite scaffold chain hello +``` + +Let's add a query to the blockchain we just created. + +In the Cosmos SDK, a query is a request for information from the blockchain. +Queries are used to retrieve data from the blockchain, such as the current state +of the ledger or the details of a specific transaction. The Cosmos SDK provides +a number of built-in query methods that can be used to retrieve data from the +blockchain, and developers can also create custom queries to access specific +data or perform complex operations. Queries are processed by the blockchain's +nodes and the results are returned to the querying client. + +## Create a query with Ignite + +To add a query, run the following command inside the `hello` directory: + +``` +ignite scaffold query say-hello name --response name +``` + +The `ignite scaffold query` command is a tool used to quickly create new +queries. When you run this command, it makes changes to your source code to add +the new query and make it available in your API. This command accepts a query +name (`"say-hello"`) and a list of request fields (in our case only `name`). The +optional `--response` flag specifies the return values of the query. + +This command made the following changes to the source code. + +The `proto/hello/hello/query.proto` file was modified to define the request and +response for a query, as well as to add the `SayHello` query in the `Query` +service. + +The `x/hello/client/cli/query_say_hello.go` file was created and added to the +project. This file contains a CLI command `CmdSayHello` that allows users to +submit a "say hello" query to the blockchain. This command allows users to +interact with the blockchain in a more user-friendly way, allowing them to +easily submit queries and receive responses from the blockchain. + +The `x/hello/client/cli/query.go` was modified to add the `CmdSayHello` command +to the CLI of the blockchain. + +The `x/hello/keeper/query_say_hello.go` file was created with a keeper method +called `SayHello`. This method is responsible for handling the "say hello" +query, which can be called by a client using the command-line interface (CLI) or +an API. When the "say hello" query is executed, the `SayHello` method is called +to perform the necessary actions and return a response to the client. The +`SayHello` method may retrieve data from the application's database, process the +data, and return a result to the client in a specific format, such as a string +of text or a data structure. + +To change the source code so that the query returns the `"Hello, %s!"` string, +modify the return statement in `query_say_hello.go` to return +`fmt.Sprintf("hello %s", req.Name)`. + +```go title="x/hello/keeper/query_say_hello.go" +func (k Keeper) SayHello(goCtx context.Context, req *types.QuerySayHelloRequest) (*types.QuerySayHelloResponse, error) { + if req == nil { + return nil, status.Error(codes.InvalidArgument, "invalid request") + } + + ctx := sdk.UnwrapSDKContext(goCtx) + + // TODO: Process the query + _ = ctx + // highlight-next-line + return &types.QuerySayHelloResponse{Name: fmt.Sprintf("Hello, %s!", req.Name)}, nil +} +``` + +The function now returns a `QuerySayHelloResponse` struct with the `Name` field +set to the string `"Hello, %s!"` with `req.Name` as the value for the `%s` +placeholder. It also returns a nil error to indicate success. + +Now that you have added a query your blockchain and modified it return the value +you want, you can start your blockchain with Ignite: + +``` +ignite chain serve +``` + +After starting your blockchain, you can use its command-line interface (CLI) to +interact with it and perform various actions such as querying the blockchain's +state, sending transactions, and more. + +You can use the `hellod` binary to run the `say-hello` query: + +``` +hellod q hello say-hello bob +``` + +Once you run this command, the `hellod` binary will send a `say-hello` query to +your blockchain with the argument `bob`. The blockchain will process the query +and return the result, which will be printed by the `hellod` binary. In this +case, the expected result is a string containing the message `Hello, bob!`. + +``` +name: Hello, bob! +``` + +Congratulations! 🎉 You have successfully created a new Cosmos SDK module called +`hello` with a custom query functionality. This allows users to query the +blockchain and receive a response with a personalized greeting. This tutorial +demonstrated how to use Ignite CLI to create a custom query in a blockchain. + +Ignite is an incredibly convenient tool for developers because it automatically +generates much of the code required for a project. This saves developers time +and effort by reducing the amount of code they need to write manually. With +Ignite, developers can quickly and easily set up the basic structure of their +project, allowing them to focus on the more complex and unique aspects of their +work. + +However, it is also important for developers to understand how the code +generated by Ignite works under the hood. One way to do this is to implement the +same functionality manually, without using Ignite. For example, in this tutorial +Ignite was used to generate query functionality, now could try implementing the +same functionality manually to see how it works and gain a deeper understanding +of the code. + +Implementing the same functionality manually can be time-consuming and +challenging, but it can also be a valuable learning experience. By seeing how +the code works at a low level, developers can gain a better understanding of how +different components of their project fit together and how they can be +customized and optimized. diff --git a/docs/versioned_docs/version-v0.27/02-guide/03-hello/01-scaffolding.md b/docs/versioned_docs/version-v0.27/02-guide/03-hello/01-scaffolding.md new file mode 100644 index 0000000..7c245e7 --- /dev/null +++ b/docs/versioned_docs/version-v0.27/02-guide/03-hello/01-scaffolding.md @@ -0,0 +1,236 @@ +--- +title: In-depth tutorial +--- + +# In-depth "Hello, World!" tutorial + +In this tutorial you will implement "Hello, World!" functionality from +scratch. The functionality of the application you will be building will be +identical to what the one you created in the "Express tutorial" section, but +here you will be doing it manually in order to gain a deeper understanding of +the process. + +To begin, let's start with a fresh `hello` blockchain. You can either roll back +the changes you made in the previous section or create a new blockchain using +Ignite. Either way, you will have a blank blockchain that is ready for you to +work with. + +``` +ignite scaffold chain hello +``` + +## `SayHello` RPC + +In Cosmos SDK blockchains, queries are defined as remote procedure calls (RPCs) +in a `Query` service in protocol buffer files. To add a new query, you can add +the following code to the `query.proto` file of your module: + +```protobuf title="proto/hello/hello/query.proto" +service Query { + // highlight-start + rpc SayHello(QuerySayHelloRequest) returns (QuerySayHelloResponse) { + option (google.api.http).get = "/hello/hello/say_hello/{name}"; + } + // highlight-end +} +``` + +The RPC accepts a request argument of type `QuerySayHelloRequest` and returns a +value of type `QuerySayHelloResponse`. To define these types, you can add the +following code to the `query.proto` file: + +```protobuf title="proto/hello/hello/query.proto" +message QuerySayHelloRequest { + string name = 1; +} + +message QuerySayHelloResponse { + string name = 1; +} +``` + +To use the types defined in `query.proto`, you must transpile the protocol +buffer files into Go source code. This can be done by running `ignite chain +serve`, which will build and initialize the blockchain and automatically +generate the Go source code from the protocol buffer files. Alternatively, you +can run `ignite generate proto-go` to only generate the Go source code from the +protocol buffer files, without building and initializing the blockchain. + +## `SayHello` keeper method + +After defining the query, request, and response types in the `query.proto` file, +you will need to implement the logic for the query in your code. This typically +involves writing a function that processes the request and returns the +appropriate response. Create a new file `query_say_hello.go` with the following +contents: + +```go title="x/hello/keeper/query_say_hello.go" +package keeper + +import ( + "context" + "fmt" + + sdk "github.com/cosmos/cosmos-sdk/types" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" + + "hello/x/hello/types" +) + +func (k Keeper) SayHello(goCtx context.Context, req *types.QuerySayHelloRequest) (*types.QuerySayHelloResponse, error) { + if req == nil { + return nil, status.Error(codes.InvalidArgument, "invalid request") + } + ctx := sdk.UnwrapSDKContext(goCtx) + // TODO: Process the query + _ = ctx + return &types.QuerySayHelloResponse{Name: fmt.Sprintf("hello %s", req.Name)}, nil +} +``` + +This code defines a `SayHello` function that accepts a request of type +`QuerySayHelloRequest` and returns a value of type `QuerySayHelloResponse`. The +function first checks if the request is valid, and then processes the query by +returning the response message with the provided name as the value for the `%s` +placeholder. You can add additional logic to the function as needed, such as +retrieving data from the blockchain or performing complex operations, to handle +the query and return the appropriate response. + +## `CmdSayHello` command + +After implementing the query logic, you will need to make the query available to +clients so that they can call it and receive the response. This typically +involves adding the query to the blockchain's application programming interface +(API) and providing a command-line interface (CLI) command that allows users to +easily submit the query and receive the response. + +To provide a CLI command for the query, you can create the `query_say_hello.go` +file and implement a `CmdSayHello` command that calls the `SayHello` function +and prints the response to the console. + +```go title="x/hello/client/cli/query_say_hello.go" +package cli + +import ( + "strconv" + + "github.com/cosmos/cosmos-sdk/client" + "github.com/cosmos/cosmos-sdk/client/flags" + "github.com/spf13/cobra" + + "hello/x/hello/types" +) + +var _ = strconv.Itoa(0) + +func CmdSayHello() *cobra.Command { + cmd := &cobra.Command{ + Use: "say-hello [name]", + Short: "Query say-hello", + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) (err error) { + reqName := args[0] + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } + queryClient := types.NewQueryClient(clientCtx) + params := &types.QuerySayHelloRequest{ + Name: reqName, + } + res, err := queryClient.SayHello(cmd.Context(), params) + if err != nil { + return err + } + return clientCtx.PrintProto(res) + }, + } + flags.AddQueryFlagsToCmd(cmd) + return cmd +} +``` + +The code defines a `CmdSayHello` command. The command is defined using the +`cobra` library, which is a popular framework for building command-line +applications in Go. The command accepts a `name` as an argument and uses it to +create a `QuerySayHelloRequest` struct that is passed to the `SayHello` function +from the `types.QueryClient`. The `SayHello` function is used to send the +`say-hello` query to the blockchain, and the response is stored in the `res` +variable. + +The `QuerySayHelloRequest` struct is defined in the `query.proto` file, which is +a Protocol Buffer file that defines the request and response types for the +query. The `QuerySayHelloRequest` struct includes a `Name` field of type +`string`, which is used to provide the name to be included in the response +message. + +After the query has been sent and the response has been received, the code uses +the `clientCtx.PrintProto` function to print the response to the console. The +`clientCtx` variable is obtained using the `client.GetClientQueryContext` +function, which provides access to the client context, including the client's +configuration and connection information. The `PrintProto` function is used to +print the response using the Protocol Buffer format, which allows for efficient +serialization and deserialization of the data. + +The `flags.AddQueryFlagsToCmd` function is used to add query-related flags to +the command. This allows users to specify additional options when calling the +command, such as the node URL and other query parameters. These flags are used +to configure the query and provide the necessary information to the `SayHello` +function, allowing it to connect to the blockchain and send the query. + +To make the `CmdSayHello` command available to users, you will need to add it to +the chain's binary. This is typically done by modifying the +`x/hello/client/cli/query.go` file and adding the +`cmd.AddCommand(CmdSayHello())` statement. This adds the `CmdSayHello` command +to the list of available commands, allowing users to call it from the +command-line interface (CLI). + +```go title="x/hello/client/cli/query.go" +func GetQueryCmd(queryRoute string) *cobra.Command { + cmd := &cobra.Command{ + Use: types.ModuleName, + Short: fmt.Sprintf("Querying commands for the %s module", types.ModuleName), + DisableFlagParsing: true, + SuggestionsMinimumDistance: 2, + RunE: client.ValidateCmd, + } + cmd.AddCommand(CmdQueryParams()) + // highlight-next-line + cmd.AddCommand(CmdSayHello()) + return cmd +} +``` + +Once you have provided a CLI command, users will be able to call the `say-hello` +query and receive the appropriate response. + +Save all the changes you made to the source code of your project and run the +following command to start a blockchain node: + +``` +ignite chain serve +``` + +Use the following command to submit the query and receive the response: + +``` +hellod q hello say-hello bob +``` + +This command will send a "say-hello" query to the blockchain with the name "bob" +and print the response of "Hello, bob!" to the console. You can modify the query +and response as needed to suit your specific requirements and provide the +desired functionality. + +Congratulations on completing the "Hello, World!" tutorial! In this tutorial, +you learned how to define a new query in a protocol buffer file, implement the +logic for the query in your code, and make the query available to clients +through the blockchain's API and CLI. By following the steps outlined in the +tutorial, you were able to create a functional query that can be used to +retrieve data from your blockchain or perform other operations as needed. + +Now that you have completed the tutorial, you can continue to build on your +knowledge of the Cosmos SDK and explore the many features and capabilities it +offers. You may want to try implementing more complex queries or experiment with +other features of the SDK to see what you can create. \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.27/02-guide/03-hello/_category_.json b/docs/versioned_docs/version-v0.27/02-guide/03-hello/_category_.json new file mode 100644 index 0000000..ab71abd --- /dev/null +++ b/docs/versioned_docs/version-v0.27/02-guide/03-hello/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Hello, World!", + "link": null +} \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.27/02-guide/04-blog/00-express.md b/docs/versioned_docs/version-v0.27/02-guide/04-blog/00-express.md new file mode 100644 index 0000000..0bb8bac --- /dev/null +++ b/docs/versioned_docs/version-v0.27/02-guide/04-blog/00-express.md @@ -0,0 +1,309 @@ +--- +description: Learn module basics by writing and reading blog posts to your chain. +title: Express tutorial +--- + +# "Build a blog" in 5 minutes + +In this tutorial, we will create a blockchain with a module that allows us to +write and read data from the blockchain. This module will implement the ability +to create and read blog posts, similar to a blogging application. The end user +will be able to submit new blog posts and view a list of existing posts on the +blockchain. This tutorial will guide you through the process of creating and +using this module to interact with the blockchain. + +The goal of this tutorial is to provide step-by-step instructions for creating a +feedback loop that allows you to submit data to the blockchain and read that +data back from the blockchain. By the end of this tutorial, you will have +implemented a complete feedback loop and will be able to use it to interact with +the blockchain. + +First, create a new `blog` blockchain with Ignite CLI: + +``` +ignite scaffold chain blog +``` + +In order to create a blog application that uses a blockchain, we need to define +the requirements for our application. We want the application to store objects +of type `Post` on the blockchain. These objects should have two properties: a +`title` and a `body`. + +In addition to storing posts on the blockchain, we also want to provide users +with the ability to perform CRUD (create, read, update, and delete) operations +on these posts. This will allow users to create new posts, read existing posts, +update the contents of existing posts, and delete posts that are no longer +needed. + +One of the features of the Ignite CLI is the ability to generate code that +implements basic CRUD functionality. This is accomplished through the use of +scaffolding commands, which can be used to quickly generate the necessary code +for creating, reading, updating, and deleting data in your application. + +The Ignite CLI is capable of generating code for data that is stored in +different types of data structures. This includes lists, which are collections +of data indexed by an incrementing integer, maps, which are collections indexed +by a custom key, and singles, which are single instances of data. By using these +different data structures, you can customize your application to fit your +specific needs. For example, if you are building a blog application, you may +want to use a list to store all posts, with each post indexed by an integer. +Alternatively, you could use a map to index each post by its unique title, or a +single to store a single post. The choice of data structure will depend on the +specific requirements of your application. + +In addition to the data structure you choose, the Ignite CLI also requires you +to provide the name of the type of data that it will generate code for, as well +as fields that describe the type of data. For example, if you are creating a +blog application, you may want to create a type called "Post" that has fields +for the "title" and "body" of the post. The Ignite CLI will use this information +to generate the necessary code for creating, reading, updating, and deleting +data of this type in your application. + +Switch to the `blog` directory and run the `ignite scaffold list` command: + +``` +cd blog +ignite scaffold list post title body +``` + +Now that you have used the Ignite CLI to generate code for your application, +let's review what it has created. The Ignite CLI will have generated code for +the data structure and data type that you specified, as well as code for the +basic CRUD operations that are needed to manipulate this data. This code will +provide a solid foundation for your application, and you can customize it +further to fit your specific needs. By reviewing the code generated by the +ignite CLI, you can ensure that it meets your requirements and get a better +understanding of how to build your application using this tool. + +The Ignite CLI has generated several files and modifications in the +`proto/blog/blog` directory. These include: + +* `post.proto`: This is a protocol buffer file that defines the `Post` type, + with fields for the `title`, `body`, `id`, and `creator`. +* `tx.proto`: This file has been modified to include three RPCs (remote + procedure calls): `CreatePost`, `UpdatePost`, and `DeletePost`. Each of these + RPCs corresponds to a Cosmos SDK message that can be used to perform the + corresponding CRUD operation on a post. +* `query.proto`: This file has been modified to include two queries: `Post` and + `PostAll`. The `Post` query can be used to retrieve a single post by its ID, + while the `PostAll` query can be used to retrieve a paginated list of posts. +* `genesis.proto`: This file has been modified to include posts in the genesis + state of the module, which defines the initial state of the blockchain when it + is first started. + +The Ignite CLI has also generated several new files in the `x/blog/keeper` +directory that implement the CRUD-specific logic for your application. These +include: + +* `msg_server_post.go`: This file implements keeper methods for the + `CreatePost`, `UpdatePost`, and `DeletePost` messages. These methods are + called when a corresponding message is processed by the module, and they + handle the specific logic for each of the CRUD operations. +* `query_post.go`: This file implements the `Post` and `PostAll` queries, which + are used to retrieve individual posts by ID or a paginated list of posts, + respectively. +* `post.go`: This file implements the underlying functions that the keeper + methods depend on. These functions include appending (adding) posts to the + store, getting individual posts, getting the post count, and other operations + that are needed to manage the posts in the application. + +Overall, these files provide the necessary implementation for the CRUD +functionality of your blog application. They handle the specific logic for each +of the CRUD operations, as well as the underlying functions that these +operations depend on. + +Files were created and modified in the `x/blog/types` directory. + +* `messages_post.go`: This new file contains Cosmos SDK message constructors and + associated methods such as `Route()`, `Type()`, `GetSigners()`, + `GetSignBytes()`, and `ValidateBasic()`. +* `keys.go`: This file was modified to include key prefixes for storing blog + posts. By using key prefixes, we can ensure that the data for our blog posts + is kept separate from other types of data in the database, and that it can be + easily accessed when needed. +* `genesis.go`: This file was modified to define the initial (genesis) state of + the blog module, as well as the `Validate()` function for validating this + initial state. This is an important step in setting up our blockchain, as it + defines the initial data and ensures that it is valid according to the rules + of our application. +* `codec.go`: This file was modified to register our message types with the + encoder, allowing them to be properly serialized and deserialized when + transmitted over the network. + +Additionally, `*.pb.go` files were generated from `*.proto` files, and they +contain type definitions for messages, RPCs, and queries used by our +application. These files are automatically generated from the `*.proto` files +using the Protocol Buffers (protobuf) tool, which allows us to define the +structure of our data in a language-agnostic way. + +The Ignite CLI has added functionality to the `x/blog/client/cli` directory by +creating and modifying several files. +* `tx_post.go`: This file was created to implement CLI commands for broadcasting + transactions containing messages for the blog module. These commands allow + users to easily send messages to the blockchain using the Ignite CLI. +* `query_post.go`: This file was created to implement CLI commands for querying + the blog module. These commands allow users to retrieve information from the + blockchain, such as a list of blog posts. +* `tx.go`: This file was modified to add the CLI commands for broadcasting + transactions to the chain's binary. +* `query.go`: This file was also modified to add the CLI commands for querying + the chain to the chain's binary. + +As you can see, the `ignite scaffold list` command has generated and modified a +number of source code files. These files define the types of messages, logic +that gets executed when a message is processed, and the wiring that connects +everything together. This includes the logic for creating, updating, and +deleting blog posts, as well as the queries needed to retrieve this information. + +To see the generated code in action, we will need to start the blockchain. We +can do this by using the `ignite chain serve` command, which will build, +initialize, and start the blockchain for us: + +``` +ignite chain serve +``` + +Once the blockchain is running, we can use the binary to interact with it and +see how the code handles creating, updating, and deleting blog posts. We can +also see how it processes and responds to queries. This will give us a better +understanding of how our application works and allow us to test its +functionality. + +While `ignite chain serve` is running in one terminal window, open another +terminal and use the chain's binary to create a new blog post on the blockchain: + +``` +blogd tx blog create-post 'Hello, World!' 'This is a blog post' --from alice +``` + +When using the `--from` flag to specify the account that will be used to sign a +transaction, it's important to ensure that the specified account is available +for use. In a development environment, you can see a list of available accounts +in the output of the `ignite chain serve` command, or in the `config.yml` file. + +It's also worth noting that the `--from` flag is required when broadcasting +transactions. This flag specifies the account that will be used to sign the +transaction, which is a crucial step in the transaction process. Without a valid +signature, the transaction will not be accepted by the blockchain. Therefore, +it's important to ensure that the account specified with the `--from` flag is +available. + +After the transaction has been broadcasted successfully, you can query the +blockchain for the list of blog posts. To do this, you can use the `blogd q blog +list-post` command, which will return a paginated list of all the blog posts +that have been added to the blockchain. + +``` +blogd q blog list-post + +Post: +- body: This is a blog post + creator: cosmos1xz770h6g55rrj8vc9ll9krv6mr964tzhqmsu2v + id: "0" + title: Hello, World! +pagination: + next_key: null + total: "0" +``` + +By querying the blockchain, you can verify that your transaction was processed +successfully and that the blog post has been added to the chain. Additionally, +you can use other query commands to retrieve information about other data on the +blockchain, such as accounts, balances, and governance proposals. + +Let's modify the blog post that we just created by changing the `body` content. +To do this, we can use the `blogd tx blog update-post` command, which allows us +to update an existing blog post on the blockchain. When running this command, we +will need to specify the ID of the blog post that we want to modify, as well as +the new body content that we want to use. After running this command, the +transaction will be broadcasted to the blockchain and the blog post will be +updated with the new body content. + +``` +blogd tx blog update-post 0 'Hello, World!' 'This is a blog post from Alice' --from alice +``` + +Now that we have updated the blog post with new content, let's query the +blockchain again to see the changes. To do this, we can use the `blogd q blog +list-post` command, which will return a list of all the blog posts on the +blockchain. By running this command again, we can see the updated blog post in +the list, and we can verify that the changes we made have been successfully +applied to the blockchain. + + +``` +blogd q blog list-post + +Post: +- body: This is a blog post from Alice + creator: cosmos1xz770h6g55rrj8vc9ll9krv6mr964tzhqmsu2v + id: "0" + title: Hello, World! +pagination: + next_key: null + total: "0" +``` + +Let's try to delete one of the blog posts using Bob's account. However, since +the blog post was created using Alice's account, we can expect the blockchain to +check whether the user is authorized to delete the post. In this case, since Bob +is not the author of the post, his transaction should be rejected by the +blockchain. + +To delete a blog post, we can use the `blogd tx blog delete-post` command, which +allows us to delete an existing blog post on the blockchain. When running this +command, we will need to specify the ID of the blog post that we want to delete, +as well as the account that we want to use for signing the transaction. In this +case, we will use Bob's account to sign the transaction. + +After running this command, the transaction will be broadcasted to the +blockchain. However, since Bob is not the author of the post, the blockchain +should reject his transaction and the blog post will not be deleted. This is an +example of how the blockchain can enforce rules and permissions, and it shows +that only authorized users are able to make changes to the blockchain. + +``` +blogd tx blog delete-post 0 --from bob + +raw_log: 'failed to execute message; message index: 0: incorrect owner: unauthorized' +``` + +Now, let's try to delete the blog post again, but this time using Alice's +account. Since Alice is the author of the blog post, she should be authorized to +delete it. + +``` +blogd tx blog delete-post 0 --from alice +``` + +To check whether the blog post has been successfully deleted by Alice, we can +query the blockchain for a list of posts again. + +``` +blogd q blog list-post + +Post: [] +pagination: + next_key: null + total: "0" +``` + +Congratulations on successfully completing the tutorial on building a blog with +Ignite CLI! By following the instructions, you have learned how to create a new +blockchain, generate code for a "post" type with CRUD functionality, start a +local blockchain, and test out the functionality of your blog. + +Now that you have a working example of a simple application, you can experiment +with the code generated by Ignite and see how changes affect the behavior of the +application. This is a valuable skill to have, as it will allow you to customize +your application to fit your specific needs and improve the functionality of +your application. You can try making changes to the data structure or data type, +or add additional fields or functionality to the code. + +In the following tutorials, we will take a closer look at the code that Ignite +generates in order to better understand how to build blockchains. By writing +some of the code ourselves, we can gain a deeper understanding of how Ignite +works and how it can be used to create applications on a blockchain. This will +help us learn more about the capabilities of Ignite CLI and how it can be used +to build robust and powerful applications. Keep an eye out for these tutorials +and get ready to dive deeper into the world of blockchains with Ignite! \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.27/02-guide/04-blog/01-intro.md b/docs/versioned_docs/version-v0.27/02-guide/04-blog/01-intro.md new file mode 100644 index 0000000..16cbc38 --- /dev/null +++ b/docs/versioned_docs/version-v0.27/02-guide/04-blog/01-intro.md @@ -0,0 +1,17 @@ +--- +title: In-depth tutorial +--- + +# In-depth blog tutorial + +In this tutorial, you will learn how to create a blog application as a Cosmos +SDK blockchain using the Ignite CLI by building it from scratch. This means that +you will be responsible for setting up the necessary types, messages, and +queries and writing the logic to create, read, update, and delete blog posts on +the blockchain. + +The functionality of the application you will be building will be identical to +what is generated by the Ignite CLI command `ignite scaffold list post title +body`, but you will be doing it manually in order to gain a deeper understanding +of the process. Through this tutorial, you will learn how to build a blog +application on a Cosmos SDK blockchain using the Ignite CLI in a hands-on way. diff --git a/docs/versioned_docs/version-v0.27/02-guide/04-blog/02-scaffolding.md b/docs/versioned_docs/version-v0.27/02-guide/04-blog/02-scaffolding.md new file mode 100644 index 0000000..f597a47 --- /dev/null +++ b/docs/versioned_docs/version-v0.27/02-guide/04-blog/02-scaffolding.md @@ -0,0 +1,124 @@ +# Creating the structure + +Create a new blockchain with the following command: + +``` +ignite scaffold chain blog +``` + +This will create a new directory called `blog/` containing the necessary files +and directories for your [blockchain +application](https://docs.cosmos.network/main/basics/app-anatomy). Next, +navigate to the newly created directory by running: + +``` +cd blog +``` + +Since your app will be storing and operating with blog posts, you will need to +create a `Post` type to represent these posts. You can do this using the +following Ignite CLI command: + +``` +ignite scaffold type post title body creator id:uint +``` + +This will create a `Post` type with four fields: `title`, `body`, `creator`, all +of type `string`, and `id` of type `uint`. + +It is a good practice to commit your changes to a version control system like +Git after using Ignite's code scaffolding commands. This will allow you to +differentiate between changes made automatically by Ignite and changes made +manually by developers, and also allow you to roll back changes if necessary. +You can commit your changes to Git with the following commands: + +``` +git add . +git commit -am "ignite scaffold type post title body" +``` + +### Creating messages + +Next, you will be implementing CRUD (create, read, update, and delete) +operations for your blog posts. Since create, update, and delete operations +change the state of the application, they are considered write operations. In +Cosmos SDK blockchains, state is changed by broadcasting +[transactions](https://docs.cosmos.network/main/basics/tx-lifecycle) that +contain messages that trigger state transitions. To create the logic for +broadcasting and handling transactions with a "create post" message, you can use +the following Ignite CLI command: + +``` +ignite scaffold message create-post title body --response id:uint +``` + +This will create a "create post" message with two fields: `title` and `body`, +both of which are of type `string`. Posts will be stored in the key-value store +in a list-like data structure, where they are indexed by an incrementing integer +ID. When a new post is created, it will be assigned an ID integer. The +`--response` flag is used to return `id` of type `uint` as a response to the +"create post" message. + +To update a specific blog post in your application, you will need to create a +message called "update post" that accepts three arguments: `title`, `body`, and +`id`. The `id` argument of type `uint` is necessary to specify which blog post +you want to update. You can create this message using the Ignite CLI command: + +``` +ignite scaffold message update-post title body id:uint +``` + +To delete a specific blog post in your application, you will need to create a +message called "delete post" that accepts only the `id` of the post to be +deleted. You can create this message using the Ignite CLI command: + +``` +ignite scaffold message delete-post id:uint +``` + +### Creating queries + +[Queries](https://docs.cosmos.network/main/basics/query-lifecycle) allow users +to retrieve information from the blockchain state. In your application, you will +have two queries: "show post" and "list post". The "show post" query will allow +users to retrieve a specific post by its ID, while the "list post" query will +return a paginated list of all stored posts. + +To create the "show post" query, you can use the following Ignite CLI command: + +``` +ignite scaffold query show-post id:uint --response post:Post +``` + +This query will accept `id` of type `uint` as an argument, and will return a +`post` of type `Post` as a response. + +To create the "list post" query, you can use the following Ignite CLI command: + +``` +ignite scaffold query list-post --response post:Post --paginated +``` + +This query will return a post of type Post in a paginated output. The +`--paginated` flag indicates that the query should return its results in a +paginated format, allowing users to retrieve a specific page of results at a +time. + +## Summary + +Congratulations on completing the initial setup of your blockchain application! +You have successfully created a "post" data type and generated the necessary +code for handling three types of messages (create, update, and delete) and two +types of queries (list and show posts). + +However, at this point, the messages you have created will not trigger any state +transitions, and the queries you have created will not return any results. This +is because Ignite only generates the boilerplate code for these features, and it +is up to you to implement the necessary logic to make them functional. + +In the next chapters of the tutorial, you will learn how to implement the +message handling and query logic to complete your blockchain application. This +will involve writing code to process the messages and queries you have created +and use them to modify or retrieve data from the blockchain's state. By the end +of this process, you will have a fully functional blog application on a Cosmos +SDK blockchain. \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.27/02-guide/04-blog/03-create.md b/docs/versioned_docs/version-v0.27/02-guide/04-blog/03-create.md new file mode 100644 index 0000000..f83372b --- /dev/null +++ b/docs/versioned_docs/version-v0.27/02-guide/04-blog/03-create.md @@ -0,0 +1,319 @@ +# Creating posts + +In this chapter, we will be focusing on the process of handling a "create post" +message. This involves the use of a special type of function known as a keeper +method. [Keeper](https://docs.cosmos.network/main/building-modules/keeper) +methods are responsible for interacting with the blockchain and modifying its +state based on the instructions provided in a message. + +When a "create post" message is received, the corresponding keeper method will +be called and passed the message as an argument. The keeper method can then use +the various getter and setter functions provided by the store object to retrieve +and modify the current state of the blockchain. This allows the keeper method to +effectively process the "create post" message and make the necessary updates to +the blockchain. + +In order to keep the code for accessing and modifying the store object clean and +separate from the logic implemented in the keeper methods, we will create a new +file called `post.go`. This file will contain functions that are specifically +designed to handle operations related to creating and managing posts within the +blockchain. + +## Appending posts to the store + +```go title="x/blog/keeper/post.go" +package keeper + +import ( + "encoding/binary" + + "blog/x/blog/types" + + "github.com/cosmos/cosmos-sdk/store/prefix" + sdk "github.com/cosmos/cosmos-sdk/types" +) + +func (k Keeper) AppendPost(ctx sdk.Context, post types.Post) uint64 { + count := k.GetPostCount(ctx) + post.Id = count + store := prefix.NewStore(ctx.KVStore(k.storeKey), types.KeyPrefix(types.PostKey)) + appendedValue := k.cdc.MustMarshal(&post) + store.Set(GetPostIDBytes(post.Id), appendedValue) + k.SetPostCount(ctx, count+1) + return count +} +``` + +This code defines a function called `AppendPost` which belongs to a `Keeper` +type. The `Keeper` type is responsible for interacting with the blockchain and +modifying its state in response to various messages. + +The `AppendPost` function takes in two arguments: a `Context` object and a +`Post` object. The [`Context`](https://docs.cosmos.network/main/core/context) +object is a standard parameter in many functions in the Cosmos SDK and is used +to provide contextual information about the current state of the blockchain, +such as the current block height. The `Post` object represents a post that will +be added to the blockchain. + +The function begins by retrieving the current post count using the +`GetPostCount` method. You will implement this method in the next step as it has +not been implemented yet. This method is called on the `Keeper` object and takes +in a `Context` object as an argument. It returns the current number of posts +that have been added to the blockchain. + +Next, the function sets the ID of the new post to be the current post count, so +that each post has a unique identifier. It does this by assigning the value of +count to the `Id` field of the `Post` object. + +The function then creates a new +[store](https://docs.cosmos.network/main/core/store) object using the +`prefix.NewStore` function. The `prefix.NewStore` function takes in two +arguments: the `KVStore` associated with the provided context and a key prefix +for the `Post` objects. The `KVStore` is a key-value store that is used to +persist data on the blockchain, and the key prefix is used to differentiate the +`Post` objects from other types of objects that may be stored in the same +`KVStore`. + +The function then serializes the `Post` object using the `cdc.MustMarshal` +function and stores it in the blockchain using the `Set` method of the store +object. The `cdc.MustMarshal` function is part of the Cosmos SDK's +[encoding/decoding](https://docs.cosmos.network/main/core/encoding) library and +is used to convert the `Post` object into a byte slice that can be stored in the +`KVStore`. The `Set` method is called on the store object and takes in two +arguments: a key and a value. In this case, the key is a byte slice generated by +the `GetPostIDBytes` function and the value is the serialized `Post` object. You +will implement this method in the next step as it has not been implemented yet. + +Finally, the function increments the post count by one and updates the +blockchain state using the `SetPostCount` method. You will implement this method +in the next step as it has not been implemented yet. This method is called on +the Keeper object and takes in a `Context` object and a new post count as +arguments. It updates the current post count in the blockchain to be the new +post count provided. + +The function then returns the ID of the newly created post, which is the current +post count before it was incremented. This allows the caller of the function to +know the ID of the post that was just added to the blockchain. + +To complete the implementation of `AppendPost`, the following tasks need to be +performed: + +* Define `PostKey`, which will be used to store and retrieve posts from the + database. +* Implement `GetPostCount`, which will retrieve the current number of posts + stored in the database. +* Implement `GetPostIDBytes`, which will convert a post ID to a byte array. +* Implement `SetPostCount`, which will update the post count stored in the + database. + +### Post key prefix + +In the file `keys.go`, let's define the `PostKey` prefix as follows: + +```go title="x/blog/types/keys.go" +const ( + PostKey = "Post/value/" +) +``` + +This prefix will be used to uniquely identify a post within the system. It will +be used as the beginning of the key for each post, followed by the ID of the +post to create a unique key for each post. + +### Getting the post count + +In the file `post.go`, let's define the `GetPostCount` function as follows: + +```go title="x/blog/keeper/post.go" +func (k Keeper) GetPostCount(ctx sdk.Context) uint64 { + store := prefix.NewStore(ctx.KVStore(k.storeKey), []byte{}) + byteKey := types.KeyPrefix(types.PostCountKey) + bz := store.Get(byteKey) + if bz == nil { + return 0 + } + return binary.BigEndian.Uint64(bz) +} +``` + +This code defines a function named `GetPostCount` that belongs to the `Keeper` +struct. The function takes in a single argument, a context object `ctx` of type +`sdk.Context`, and returns a value of type `uint64`. + +The function begins by creating a new store using the key-value store in the +context and an empty byte slice as the prefix. It then defines a byte slice +`byteKey` using the `KeyPrefix` function from the `types` package, which takes +in the `PostCountKey`. You will define `PostCountKey` in the next step. + +The function then retrieves the value at the key `byteKey` in the store using +the `Get` method and stores it in a variable `bz`. + +Next, the function checks if the value at `byteKey` is `nil` using an if +statement. If it is `nil`, meaning that the key does not exist in the store, the +function returns 0. This indicates that there are no elements or posts +associated with the key. + +If the value at `byteKey` is not nil, the function uses the `binary` package's +`BigEndian` type to parse the bytes in `bz` and returns the resulting `uint64` +value. The `BigEndian` type is used to interpret the bytes in `bz` as a +big-endian encoded unsigned 64-bit integer. The `Uint64` method converts the +bytes to a `uint64` value and returns it. + +`GetPostCount` function is used to retrieve the total number of posts stored in +the key-value store, represented as a `uint64` value. + +In the file `keys.go`, let's define the `PostCountKey` as follows: + +```go title="x/blog/types/keys.go" +const ( + PostCountKey = "Post/count/" +) +``` + +This key will be used to keep track of the ID of the latest post added to the +store. + +### Converting post ID to bytes + +Now, let's implement `GetPostIDBytes`, which will convert a post ID to a byte +array. + +```go title="x/blog/keeper/post.go" +func GetPostIDBytes(id uint64) []byte { + bz := make([]byte, 8) + binary.BigEndian.PutUint64(bz, id) + return bz +} +``` + +`GetPostIDBytes` takes in a value `id` of type `uint64` and returns a value of +type `[]byte`. + +The function starts by creating a new byte slice `bz` with a length of 8 using +the `make` built-in function. It then uses the `binary` package's `BigEndian` +type to encode the value of `id` as a big-endian encoded unsigned integer and +store the result in `bz` using the `PutUint64` method. Finally, the function +returns the resulting byte slice `bz`. + +This function can be used to convert a post ID, represented as a `uint64`, to a +byte slice that can be used as a key in a key-value store. The +`binary.BigEndian.PutUint64` function encodes the `uint64` value of `id` as a +big-endian encoded unsigned integer and stores the resulting bytes in the +`[]byte` slice `bz`. The resulting byte slice can then be used as a key in the +store. + +### Updating the post count + +Implement `SetPostCount` in `post.go`, which will update the post count stored +in the database. + +```go title="x/blog/keeper/post.go" +func (k Keeper) SetPostCount(ctx sdk.Context, count uint64) { + store := prefix.NewStore(ctx.KVStore(k.storeKey), []byte{}) + byteKey := types.KeyPrefix(types.PostCountKey) + bz := make([]byte, 8) + binary.BigEndian.PutUint64(bz, count) + store.Set(byteKey, bz) +} +``` + +This code defines a function `SetPostCount` in the `Keeper` struct. The function +takes in a context `ctx` of type `sdk.Context` and a value `count` of type +`uint64`, and does not return a value. + +The function first creates a new store by calling the `NewStore` function from +the prefix package and passing in the key-value store from the context and an +empty byte slice as the prefix. It stores the resulting store in a variable +named `store`. + +Next, the function defines a byte slice `byteKey` using the `KeyPrefix` function +from the `types` package and passing in the `PostCountKey`. The `KeyPrefix` +function returns a byte slice with the given key as a prefix. + +The function then creates a new byte slice `bz` with a length of 8 using the +`make` built-in function. It then uses the `binary` package's `BigEndian` type +to encode the value of count as a big-endian encoded unsigned integer and store +the result in `bz` using the `PutUint64` method. + +Finally, the function calls the `Set` method on the `store` variable, passing in +`byteKey` and `bz` as arguments. This sets the value at the key `byteKey` in the +store to the value `bz`. + +This function can be used to update the count of posts stored in the database. +It does this by converting the `uint64` value of count to a byte slice using the +`binary.BigEndian.PutUint64` function, and then storing the resulting byte slice +at the key `byteKey` in the store using the `Set` method. + +Now that you have implemented the code for creating blog posts, you can proceed +to implement the keeper method that is invoked when the "create post" message is +processed. + +## Handling the "create post" message + +```go title="x/blog/keeper/msg_server_create_post.go" +package keeper + +import ( + "context" + + "blog/x/blog/types" + + sdk "github.com/cosmos/cosmos-sdk/types" +) + +func (k msgServer) CreatePost(goCtx context.Context, msg *types.MsgCreatePost) (*types.MsgCreatePostResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + var post = types.Post{ + Creator: msg.Creator, + Title: msg.Title, + Body: msg.Body, + } + id := k.AppendPost( + ctx, + post, + ) + return &types.MsgCreatePostResponse{ + Id: id, + }, nil +} +``` + +The `CreatePost` function is a message handler for the `MsgCreatePost` message +type. It is responsible for creating a new post on the blockchain based on the +information provided in the `MsgCreatePost` message. + +The function first retrieves the Cosmos SDK context from the Go context using +the `sdk.UnwrapSDKContext` function. It then creates a new `Post` object using +the `Creator`, `Title`, and `Body` fields from the MsgCreatePost message. + +Next, the function calls the `AppendPost` method on the `msgServer` object +(which is of the Keeper type) and passes in the Cosmos SDK context and the new +`Post` object as arguments. The `AppendPost` method is responsible for adding +the new post to the blockchain and returning the ID of the new post. + +Finally, the function returns a `MsgCreatePostResponse` object that contains the +ID of the new post. It also returns a nil error, indicating that the operation +was successful. + +## Summary + +Great job! You have successfully implemented the logic for writing blog posts to +the blockchain store and the keeper method that will be called when a "create +post" message is processed. + +The `AppendPost` keeper method retrieves the current post count, sets the ID of +the new post to be the current post count, serializes the post object, and +stores it in the blockchain using the `Set` method of the `store` object. The +key for the post in the store is a byte slice generated by the `GetPostIDBytes` +function and the value is the serialized post object. The function then +increments the post count by one and updates the blockchain state using the +`SetPostCount` method. + +The `CreatePost` handler method receives a `MsgCreatePost` message containing +the data for the new post, creates a new `Post` object using this data, and +passes it to the `AppendPost` keeper method to be added to the blockchain. It +then returns a `MsgCreatePostResponse` object containing the ID of the newly +created post. + +By implementing these methods, you have successfully implemented the necessary +logic for handling "create post" messages and adding posts to the blockchain. \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.27/02-guide/04-blog/04-update.md b/docs/versioned_docs/version-v0.27/02-guide/04-blog/04-update.md new file mode 100644 index 0000000..82048bd --- /dev/null +++ b/docs/versioned_docs/version-v0.27/02-guide/04-blog/04-update.md @@ -0,0 +1,127 @@ +# Updating posts + +In this chapter, we will be focusing on the process of handling an "update post" +message. + +To update a post, you need to retrieve the specific post from the store using +the "Get" operation, modify the values, and then write the updated post back to +the store using the "Set" operation. + +Let's first implement a getter and a setter logic. + +## Getting posts + +Implement the `GetPost` keeper method in `post.go`: + +```go title="x/blog/keeper/post.go" +func (k Keeper) GetPost(ctx sdk.Context, id uint64) (val types.Post, found bool) { + store := prefix.NewStore(ctx.KVStore(k.storeKey), types.KeyPrefix(types.PostKey)) + b := store.Get(GetPostIDBytes(id)) + if b == nil { + return val, false + } + k.cdc.MustUnmarshal(b, &val) + return val, true +} +``` + +`GetPost` takes in two arguments: a context `ctx` and an `id` of type `uint64` +representing the ID of the post to be retrieved. It returns a `types.Post` +struct containing the values of the post, and a boolean value indicating whether +the post was found in the database. + +The function first creates a `store` using the `prefix.NewStore` method, passing +in the key-value store from the context and the `types.KeyPrefix` function +applied to the `types.PostKey` constant as arguments. It then attempts to +retrieve the post from the store using the `store.Get` method, passing in the ID +of the post as a byte slice. If the post is not found in the store, it returns +an empty `types.Post` struct and a boolean value of false. + +If the post is found in the store, the function unmarshals the retrieved byte +slice into a `types.Post` struct using the `cdc.MustUnmarshal` method, passing +in a pointer to the val variable as an argument. It then returns the val struct +and a boolean value of true to indicate that the post was found in the database. + +## Setting posts + +Implement the `SetPost` keeper method in `post.go`: + +```go title="x/blog/keeper/post.go" +func (k Keeper) SetPost(ctx sdk.Context, post types.Post) { + store := prefix.NewStore(ctx.KVStore(k.storeKey), types.KeyPrefix(types.PostKey)) + b := k.cdc.MustMarshal(&post) + store.Set(GetPostIDBytes(post.Id), b) +} +``` + +`SetPost` takes in two arguments: a context `ctx` and a `types.Post` struct +containing the updated values for the post. The function does not return +anything. + +The function first creates a store using the `prefix.NewStore` method, passing +in the key-value store from the context and the `types.KeyPrefix` function +applied to the `types.PostKey` constant as arguments. It then marshals the +updated post struct into a byte slice using the `cdc.MustMarshal` method, +passing in a pointer to the post struct as an argument. Finally, it updates the +post in the store using the `store.Set` method, passing in the ID of the post as +a byte slice and the marshaled post struct as arguments. + + +## Update posts + +```go title="x/blog/keeper/msg_server_update_post.go" +package keeper + +import ( + "context" + "fmt" + + "blog/x/blog/types" + + sdk "github.com/cosmos/cosmos-sdk/types" + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" +) + +func (k msgServer) UpdatePost(goCtx context.Context, msg *types.MsgUpdatePost) (*types.MsgUpdatePostResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + var post = types.Post{ + Creator: msg.Creator, + Id: msg.Id, + Title: msg.Title, + Body: msg.Body, + } + val, found := k.GetPost(ctx, msg.Id) + if !found { + return nil, sdkerrors.Wrap(sdkerrors.ErrKeyNotFound, fmt.Sprintf("key %d doesn't exist", msg.Id)) + } + if msg.Creator != val.Creator { + return nil, sdkerrors.Wrap(sdkerrors.ErrUnauthorized, "incorrect owner") + } + k.SetPost(ctx, post) + return &types.MsgUpdatePostResponse{}, nil +} +``` + +`UpdatePost` takes in a context and a message `MsgUpdatePost` as input, and +returns a response `MsgUpdatePostResponse` and an `error`. The function first +retrieves the current values of the post from the database using the provided +`msg.Id`, and checks if the post exists and if the `msg.Creator` is the same as +the current owner of the post. If either of these checks fail, it returns an +error. If both checks pass, it updates the post in the database with the new +values provided in `msg`, and returns a response without an error. + +## Summary + +Well done! You have successfully implemented a number of important methods for +managing posts within a store. + +The `GetPost` method allows you to retrieve a specific post from the store based +on its unique identification number, or post ID. This can be useful for +displaying a specific post to a user, or for updating it. + +The `SetPost` method enables you to update an existing post in the store. This +can be useful for correcting mistakes or updating the content of a post as new +information becomes available. + +Finally, you implemented the `UpdatePost` method, which is called whenever the +blockchain processes a message requesting an update to a post. \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.27/02-guide/04-blog/05-delete.md b/docs/versioned_docs/version-v0.27/02-guide/04-blog/05-delete.md new file mode 100644 index 0000000..86c91a8 --- /dev/null +++ b/docs/versioned_docs/version-v0.27/02-guide/04-blog/05-delete.md @@ -0,0 +1,74 @@ +# Deleting posts + +In this chapter, we will be focusing on the process of handling a "delete post" +message. + +## Removing posts + +```go title="x/blog/keeper/post.go" +func (k Keeper) RemovePost(ctx sdk.Context, id uint64) { + store := prefix.NewStore(ctx.KVStore(k.storeKey), types.KeyPrefix(types.PostKey)) + store.Delete(GetPostIDBytes(id)) +} +``` + +`RemovePost` function takes in two arguments: a context object `ctx` and an +unsigned integer `id`. The function removes a post from a key-value store by +deleting the key-value pair associated with the given `id`. The key-value store +is accessed using the `store` variable, which is created by using the `prefix` +package to create a new store using the context's key-value store and a prefix +based on the `PostKey` constant. The `Delete` method is then called on the +`store` object, using the `GetPostIDBytes` function to convert the `id` to a +byte slice as the key to delete. + +## Deleting posts + +```go title="x/blog/keeper/msg_server_delete_post.go" +package keeper + +import ( + "context" + "fmt" + + "blog/x/blog/types" + + sdk "github.com/cosmos/cosmos-sdk/types" + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" +) + +func (k msgServer) DeletePost(goCtx context.Context, msg *types.MsgDeletePost) (*types.MsgDeletePostResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + val, found := k.GetPost(ctx, msg.Id) + if !found { + return nil, sdkerrors.Wrap(sdkerrors.ErrKeyNotFound, fmt.Sprintf("key %d doesn't exist", msg.Id)) + } + if msg.Creator != val.Creator { + return nil, sdkerrors.Wrap(sdkerrors.ErrUnauthorized, "incorrect owner") + } + k.RemovePost(ctx, msg.Id) + return &types.MsgDeletePostResponse{}, nil +} +``` + +`DeletePost` takes in two arguments: a context `goCtx` of type `context.Context` +and a pointer to a message of type `*types.MsgDeletePost`. The function returns +a pointer to a message of type `*types.MsgDeletePostResponse` and an `error`. + +Inside the function, the context is unwrapped using the `sdk.UnwrapSDKContext` +function and the value of the post with the ID specified in the message is +retrieved using the `GetPost` function. If the post is not found, an error is +returned using the `sdkerrors.Wrap` function. If the creator of the message does +not match the creator of the post, another error is returned. If both of these +checks pass, the `RemovePost` function is called with the context and the ID of +the post to delete the post. Finally, the function returns a response message +with no data and a `nil` error. + +In short, `DeletePost` handles a request to delete a post, ensuring that the +requester is the creator of the post before deleting it. + +## Summary + +Congratulations on completing the implementation of the `RemovePost` and +`DeletePost` methods in the keeper package! These methods provide functionality +for removing a post from a store and handling a request to delete a post, +respectively. \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.27/02-guide/04-blog/06-show.md b/docs/versioned_docs/version-v0.27/02-guide/04-blog/06-show.md new file mode 100644 index 0000000..63ea056 --- /dev/null +++ b/docs/versioned_docs/version-v0.27/02-guide/04-blog/06-show.md @@ -0,0 +1,81 @@ +# Show a post + +In this chapter, you will implement a feature in your blogging application that +enables users to retrieve individual blog posts by their unique ID. This ID is +assigned to each blog post when it is created and stored on the blockchain. By +adding this querying functionality, users will be able to easily retrieve +specific blog posts by specifying their ID. + +## Show post + +Let's implement the `ShowPost` keeper method that will be called when a user +makes a query to the blockchain application, specifying the ID of the desired +post. + +```go title="x/blog/keeper/query_show_post.go" +package keeper + +import ( + "context" + + "blog/x/blog/types" + + sdk "github.com/cosmos/cosmos-sdk/types" + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" +) + +func (k Keeper) ShowPost(goCtx context.Context, req *types.QueryShowPostRequest) (*types.QueryShowPostResponse, error) { + if req == nil { + return nil, status.Error(codes.InvalidArgument, "invalid request") + } + + ctx := sdk.UnwrapSDKContext(goCtx) + post, found := k.GetPost(ctx, req.Id) + if !found { + return nil, sdkerrors.ErrKeyNotFound + } + + return &types.QueryShowPostResponse{Post: post}, nil +} +``` + +`ShowPost` is a function for retrieving a single post object from the +blockchain's state. It takes in two arguments: a `context.Context` object called +`goCtx` and a pointer to a `types.QueryShowPostRequest` object called `req`. It +returns a pointer to a `types.QueryShowPostResponse` object and an `error`. + +The function first checks if the `req` argument is `nil`. If it is, it returns +an `error` with the code `InvalidArgument` and the message "invalid request" +using the `status.Error` function from the `google.golang.org/grpc/status` +package. + +If the `req` argument is not `nil`, the function unwraps the `sdk.Context` +object from the `context.Context` object using the `sdk.UnwrapSDKContext` +function. It then retrieves a post object with the specified `Id` from the +blockchain's state using the `GetPost` function, and checks if the post was +found by checking the value of the `found` boolean variable. If the post was not +found, it returns an error with the type `sdkerrors.ErrKeyNotFound`. + +If the post was found, the function creates a new `types.QueryShowPostResponse` +object with the retrieved post object as a field, and returns a pointer to this +object and a `nil` error. + +## Modify `QueryShowPostResponse` + +Include the option `[(gogoproto.nullable) = false]` in the `post` field in the +`QueryShowPostResponse` message to generate the field without a pointer. + +```proto title="proto/blog/blog/query.proto" +message QueryShowPostResponse { + // highlight-next-line + Post post = 1 [(gogoproto.nullable) = false]; +} +``` + +Run the command to generate Go files from proto: + +``` +ignite generate proto-go +``` \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.27/02-guide/04-blog/07-list.md b/docs/versioned_docs/version-v0.27/02-guide/04-blog/07-list.md new file mode 100644 index 0000000..a41e56d --- /dev/null +++ b/docs/versioned_docs/version-v0.27/02-guide/04-blog/07-list.md @@ -0,0 +1,97 @@ +# List posts + +In this chapter, you will develop a feature that enables users to retrieve all +of the blog posts stored on your blockchain application. The feature will allow +users to perform a query and receive a paginated response, which means that the +output will be divided into smaller chunks or "pages" of data. This will allow +users to more easily navigate and browse through the list of posts, as they will +be able to view a specific number of posts at a time rather than having to +scroll through a potentially lengthy list all at once. + +## List posts + +Let's implement the `ListPost` keeper method that will be called when a user +makes a query to the blockchain application, requesting a paginated list of all +the posts stored on chain. + +```go title="x/blog/keeper/query_list_post.go" +package keeper + +import ( + "context" + + "blog/x/blog/types" + + "github.com/cosmos/cosmos-sdk/store/prefix" + sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/cosmos/cosmos-sdk/types/query" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" +) + +func (k Keeper) ListPost(goCtx context.Context, req *types.QueryListPostRequest) (*types.QueryListPostResponse, error) { + if req == nil { + return nil, status.Error(codes.InvalidArgument, "invalid request") + } + + var posts []types.Post + ctx := sdk.UnwrapSDKContext(goCtx) + + store := ctx.KVStore(k.storeKey) + postStore := prefix.NewStore(store, types.KeyPrefix(types.PostKey)) + + pageRes, err := query.Paginate(postStore, req.Pagination, func(key []byte, value []byte) error { + var post types.Post + if err := k.cdc.Unmarshal(value, &post); err != nil { + return err + } + + posts = append(posts, post) + return nil + }) + + if err != nil { + return nil, status.Error(codes.Internal, err.Error()) + } + + return &types.QueryListPostResponse{Post: posts, Pagination: pageRes}, nil +} +``` + +`ListPost` takes in two arguments: a context object and a request object of type +`QueryListPostRequest`. It returns a response object of type +`QueryListPostResponse` and an error. + +The function first checks if the request object is `nil` and returns an error +with a `InvalidArgument` code if it is. It then initializes an empty slice of +`Post` objects and unwraps the context object. + +It retrieves a key-value store from the context using the `storeKey` field of +the keeper struct and creates a new store using a prefix of the `PostKey`. It +then calls the `Paginate` function from the `query` package on the store and the +pagination information in the request object. The function passed as an argument +to Paginate iterates over the key-value pairs in the store and unmarshals the +values into `Post` objects, which are then appended to the `posts` slice. + +If an error occurs during pagination, the function returns an `Internal error` +with the error message. Otherwise, it returns a `QueryListPostResponse` object +with the list of posts and pagination information. + +## Modify `QueryListPostResponse` + +Add a `repeated` keyword to return a list of posts and include the option +`[(gogoproto.nullable) = false]` to generate the field without a pointer. + +```proto title="proto/blog/blog/query.proto" +message QueryListPostResponse { + // highlight-next-line + repeated Post post = 1 [(gogoproto.nullable) = false]; + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} +``` + +Run the command to generate Go files from proto: + +``` +ignite generate proto-go +``` \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.27/02-guide/04-blog/08-play.md b/docs/versioned_docs/version-v0.27/02-guide/04-blog/08-play.md new file mode 100644 index 0000000..1d90d82 --- /dev/null +++ b/docs/versioned_docs/version-v0.27/02-guide/04-blog/08-play.md @@ -0,0 +1,97 @@ +# Play + +## Create a blog post by Alice + +``` +blogd tx blog create-post hello world --from alice +``` + +## Show a blog post + +``` +blogd q blog show-post 0 +``` + +```yml +post: + body: world + creator: cosmos1x33ummgkjdd6h2frlugt3tft7vnc0nxyfxnx9h + id: "0" + title: hello +``` + +## Create a blog post by Bob + +``` +blogd tx blog create-post foo bar --from bob +``` + +## List all blog posts with pagination + +``` +blogd q blog list-post +``` + +```yml +pagination: + next_key: null + total: "2" +post: +- body: world + creator: cosmos1x33ummgkjdd6h2frlugt3tft7vnc0nxyfxnx9h + id: "0" + title: hello +- body: bar + creator: cosmos1ysl9ws3fdamrrj4fs9ytzrrzw6ul3veddk7gz3 + id: "1" + title: foo +``` + +## Update a blog post + +``` +blogd tx blog update-post hello cosmos 0 --from alice +``` + +``` +blogd q blog show-post 0 +``` + +```yml +post: + body: cosmos + creator: cosmos1x33ummgkjdd6h2frlugt3tft7vnc0nxyfxnx9h + id: "0" + title: hello +``` + +## Delete a blog post + +``` +blogd tx blog delete-post 0 --from alice +``` + +``` +blogd q blog list-post +``` + +```yml +pagination: + next_key: null + total: "1" +post: +- body: bar + creator: cosmos1ysl9ws3fdamrrj4fs9ytzrrzw6ul3veddk7gz3 + id: "1" + title: foo +``` + +## Delete a blog post unsuccessfully + +``` +blogd tx blog delete-post 1 --from alice +``` + +```yml +raw_log: 'failed to execute message; message index: 0: incorrect owner: unauthorized' +``` \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.27/02-guide/04-blog/09-summary.md b/docs/versioned_docs/version-v0.27/02-guide/04-blog/09-summary.md new file mode 100644 index 0000000..1bcd996 --- /dev/null +++ b/docs/versioned_docs/version-v0.27/02-guide/04-blog/09-summary.md @@ -0,0 +1,22 @@ +# Summary + +Congratulations on completing the Blog tutorial and building your first +functional application-specific blockchain using Ignite and Cosmos SDK! This is +a significant accomplishment, and you should be proud of the hard work and +dedication you put into it. + +One of the great things about using Ignite is that it allows you to quickly +generate most of the code for your app with just a few commands. This not only +saves you time, but also provides a solid structure for you to build upon as you +develop your app further. In this tutorial, you were able to create code for +handling four types of messages and two types of queries, which are important +building blocks for any blockchain application. + +You also tackled the task of implementing business-specific logic for creating, +updating, and deleting blog posts, as well as fetching individual blog posts by +ID and paginated lists of posts. You should now have a good understanding of how +to implement this sort of functionality in a blockchain context. + +Overall, completing this tutorial is a major accomplishment, and you should feel +confident in your ability to continue developing and expanding upon your app. +Keep up the great work, and keep learning and growing as a developer! \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.27/02-guide/04-blog/_category_.json b/docs/versioned_docs/version-v0.27/02-guide/04-blog/_category_.json new file mode 100644 index 0000000..21c2246 --- /dev/null +++ b/docs/versioned_docs/version-v0.27/02-guide/04-blog/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Module basics: Blog", + "link": null +} \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.27/02-guide/05-loan/00-intro.md b/docs/versioned_docs/version-v0.27/02-guide/05-loan/00-intro.md new file mode 100644 index 0000000..d90dcc5 --- /dev/null +++ b/docs/versioned_docs/version-v0.27/02-guide/05-loan/00-intro.md @@ -0,0 +1,86 @@ +# DeFi Loan + +Decentralized finance (DeFi) is a rapidly growing sector of the blockchain +ecosystem that is transforming the way we think about financial instruments and +services. DeFi offers a wide range of innovative financial products and +services, including lending, borrowing, spot trading, margin trading, and flash +loans, that are accessible to anyone with an internet connection and a digital +wallet. + +One of the key benefits of DeFi is that it allows end users to access financial +instruments and services quickly and easily, without the need for complex +onboarding processes or the submission of personal documents such as passports +or background checks. This makes DeFi an attractive alternative to traditional +banking systems, which can be slow, costly, and inconvenient. + +In this tutorial, you will learn how to create a DeFi platform that enables +users to lend and borrow digital assets from each other. The platform you will +build will be powered by a blockchain, which provides a decentralized and +immutable record of all transactions. This ensures that the platform is +transparent, secure, and resistant to fraud. + +A loan is a financial transaction in which one party, the borrower, receives a +certain amount of assets, such as money or digital tokens, and agrees to pay +back the loan amount plus a fee to the lender by a predetermined deadline. To +secure the loan, the borrower provides collateral, which may be seized by the +lender if the borrower fails to pay back the loan as agreed. + +A loan has several properties that define its terms and conditions. + +The `id` is a unique identifier that is used to identify the loan on a +blockchain. + +The `amount` is the amount of assets that are being lent to the borrower. + +The `fee` is the cost that the borrower must pay to the lender for the loan. + +The `collateral` is the asset or assets that the borrower provides to the lender +as security for the loan. + +The `deadline` is the date by which the borrower must pay back the loan. If the +borrower fails to pay back the loan by the deadline, the lender may choose to +liquidate the loan and seize the collateral. + +The `state` of a loan describes the current status of the loan and can take on +several values, such as `requested`, `approved`, `paid`, `cancelled`, or +`liquidated`. A loan is in the `requested` state when the borrower first submits +a request for the loan. If the lender approves the request, the loan moves to +the `approved` state. When the borrower repays the loan, the loan moves to the +`paid` state. If the borrower cancels the loan before it is approved, the loan +moves to the `cancelled` state. If the borrower is unable to pay back the loan +by the deadline, the lender may choose to liquidate the loan and seize the +collateral. In this case, the loan moves to the `liquidated` state. + +In a loan transaction, there are two parties involved: the borrower and the +lender. The borrower is the party that requests the loan and agrees to pay back +the loan amount plus a fee to the lender by a predetermined deadline. The lender +is the party that approves the loan request and provides the borrower with the +loan amount. + +As a borrower, you should be able to perform several actions on the loan +platform. These actions may include: + +* requesting a loan, +* canceling a loan, +* repaying a loan. + +Requesting a loan allows you to specify the terms and conditions of the loan, +including the amount, the fee, the collateral, and the deadline for repayment. +If you cancel a loan, you can withdraw your request for the loan before it is +approved or funded. Repaying a loan allows you to pay back the loan amount plus +the fee to the lender in accordance with the loan terms. + +As a lender, you should be able to perform two actions on the platform: + +* approving a loan +* liquidating a loan. + +Approving a loan allows you to accept the terms and conditions of the loan and +send the loan amount to the borrower. Liquidating a loan allows the lender to +seize the collateral if you are unable to pay back the loan by the deadline. + +By performing these actions, lenders and borrowers can interact with each other +and facilitate the lending and borrowing of digital assets on the platform. The +platform enables users to access financial instruments and services that allow +them to manage their assets and achieve their financial goals in a secure and +transparent manner. \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.27/02-guide/05-loan/01-init.md b/docs/versioned_docs/version-v0.27/02-guide/05-loan/01-init.md new file mode 100644 index 0000000..f09531a --- /dev/null +++ b/docs/versioned_docs/version-v0.27/02-guide/05-loan/01-init.md @@ -0,0 +1,72 @@ +# Creating a structure of the application + +To create a structure for a blockchain application that enables users to lend +and borrow digital assets from each other, use the Ignite CLI to generate the +necessary code. + +First, create a new blockchain called `loan` by running the following command: + +``` +ignite scaffold chain loan --no-module +``` + +The `--no-module` flag tells Ignite not to create a default module. Instead, you +will create the module yourself in the next step. + +Next, change the directory to `loan/`: + +``` +cd loan +``` + +Create a module with a dependency on the standard Cosmos SDK `bank` module by +running the following command: + +``` +ignite scaffold module loan --dep bank +``` + +Create a `loan` model with a list of properties. + +``` +ignite scaffold list loan amount fee collateral deadline state borrower lender --no-message +``` + +The `--no-message` flag tells Ignite not to generate Cosmos SDK messages for +creating, updating, and deleting loans. Instead, you will generate the code for +custom messages. + + +To generate the code for handling the messages for requesting, approving, +repaying, liquidating, and cancelling loans, run the following commands: + +``` +ignite scaffold message request-loan amount fee collateral deadline +``` + +``` +ignite scaffold message approve-loan id:uint +``` + +``` +ignite scaffold message repay-loan id:uint +``` + +``` +ignite scaffold message liquidate-loan id:uint +``` + +``` +ignite scaffold message cancel-loan id:uint +``` + +Great job! By using a few simple commands with Ignite CLI, you have successfully +set up the foundation for your blockchain application. You have created a loan +model and included keeper methods to allow interaction with the store. In +addition, you have also implemented message handlers for five custom messages. + +Now that the basic structure is in place, it's time to move on to the next phase +of development. In the coming sections, you will be focusing on implementing the +business logic within the message handlers you have created. This will involve +writing code to define the specific actions and processes that should be carried +out when each message is received. \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.27/02-guide/05-loan/02-bank.md b/docs/versioned_docs/version-v0.27/02-guide/05-loan/02-bank.md new file mode 100644 index 0000000..652a408 --- /dev/null +++ b/docs/versioned_docs/version-v0.27/02-guide/05-loan/02-bank.md @@ -0,0 +1,32 @@ +# Importing methods from the Bank keeper + +In the previous step you have created the `loan` module with `ignite scaffold +module` using `--dep bank`. This command created a new module and added the +`bank` keeper to the `loan` module, which allows you to add and use bank's +keeper methods in loan's keeper methods. + +To see the changes made by `--dep bank`, review the following files: +`x/loan/keeper/keeper.go` and `x/loan/module.go`. + +Ignite takes care of adding the `bank` keeper, but you still need to tell the +`loan` module which `bank` methods you will be using. You will be using three +methods: `SendCoins`, `SendCoinsFromAccountToModule`, and +`SendCoinsFromModuleToAccount`. You can do that by adding method signatures to +the `BankKeeper` interface: + +```go title="x/loan/types/expected_keepers.go" +package types + +import ( + sdk "github.com/cosmos/cosmos-sdk/types" +) + +type BankKeeper interface { + SpendableCoins(ctx sdk.Context, addr sdk.AccAddress) sdk.Coins + // highlight-start + SendCoins(ctx sdk.Context, fromAddr sdk.AccAddress, toAddr sdk.AccAddress, amt sdk.Coins) error + SendCoinsFromAccountToModule(ctx sdk.Context, senderAddr sdk.AccAddress, recipientModule string, amt sdk.Coins) error + SendCoinsFromModuleToAccount(ctx sdk.Context, senderModule string, recipientAddr sdk.AccAddress, amt sdk.Coins) error + // highlight-end +} +``` \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.27/02-guide/05-loan/03-request.md b/docs/versioned_docs/version-v0.27/02-guide/05-loan/03-request.md new file mode 100644 index 0000000..5640ff8 --- /dev/null +++ b/docs/versioned_docs/version-v0.27/02-guide/05-loan/03-request.md @@ -0,0 +1,117 @@ +# Request a loan + +Implement `RequestLoan` keeper method that will be called whenever a user +requests a loan. `RequestLoan` creates a new loan with the provided data, sends +the collateral from the borrower's account to a module account, and adds the +loan to the blockchain's store. + +## Keeper method + +```go title="x/loan/keeper/msg_server_request_loan.go" +package keeper + +import ( + "context" + + sdk "github.com/cosmos/cosmos-sdk/types" + + "loan/x/loan/types" +) + +func (k msgServer) RequestLoan(goCtx context.Context, msg *types.MsgRequestLoan) (*types.MsgRequestLoanResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + var loan = types.Loan{ + Amount: msg.Amount, + Fee: msg.Fee, + Collateral: msg.Collateral, + Deadline: msg.Deadline, + State: "requested", + Borrower: msg.Creator, + } + borrower, err := sdk.AccAddressFromBech32(msg.Creator) + if err != nil { + panic(err) + } + collateral, err := sdk.ParseCoinsNormalized(loan.Collateral) + if err != nil { + panic(err) + } + sdkError := k.bankKeeper.SendCoinsFromAccountToModule(ctx, borrower, types.ModuleName, collateral) + if sdkError != nil { + return nil, sdkError + } + k.AppendLoan(ctx, loan) + return &types.MsgRequestLoanResponse{}, nil +} +``` + +The function takes in two arguments: a `context.Context` object and a pointer to +a `types.MsgRequestLoan` struct. It returns a pointer to a +`types.MsgRequestLoanResponse` struct and an `error` object. + +The first thing the function does is create a new `types.Loan` struct with the +data from the input `types.MsgRequestLoan` struct. It sets the `State` field of +`the types.Loan` struct to "requested". + +Next, the function gets the borrower's address from the `msg.Creator` field of +the input `types.MsgRequestLoan` struct. It then parses the `loan.Collateral` +field (which is a string) into `sdk.Coins` using the `sdk.ParseCoinsNormalized` +function. + +The function then sends the collateral from the borrower's account to a module +account using the `k.bankKeeper.SendCoinsFromAccountToModule` function. Finally, +it adds the new loan to a keeper using the `k.AppendLoan` function. The function +returns a `types.MsgRequestLoanResponse` struct and a `nil` error if all goes +well. + +## Basic message validation + +When a loan is created, a certain message input validation is required. You want +to throw error messages in case the end user tries impossible inputs. + +```go title="x/loan/types/message_request_loan.go" +package types + +import ( + // highlight-next-line + "strconv" + + sdk "github.com/cosmos/cosmos-sdk/types" + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" +) + +func (msg *MsgRequestLoan) ValidateBasic() error { + _, err := sdk.AccAddressFromBech32(msg.Creator) + if err != nil { + return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid creator address (%s)", err) + } + // highlight-start + amount, _ := sdk.ParseCoinsNormalized(msg.Amount) + if !amount.IsValid() { + return sdkerrors.Wrap(sdkerrors.ErrInvalidRequest, "amount is not a valid Coins object") + } + if amount.Empty() { + return sdkerrors.Wrap(sdkerrors.ErrInvalidRequest, "amount is empty") + } + fee, _ := sdk.ParseCoinsNormalized(msg.Fee) + if !fee.IsValid() { + return sdkerrors.Wrap(sdkerrors.ErrInvalidRequest, "fee is not a valid Coins object") + } + deadline, err := strconv.ParseInt(msg.Deadline, 10, 64) + if err != nil { + return sdkerrors.Wrap(sdkerrors.ErrInvalidRequest, "deadline is not an integer") + } + if deadline <= 0 { + return sdkerrors.Wrap(sdkerrors.ErrInvalidRequest, "deadline should be a positive integer") + } + collateral, _ := sdk.ParseCoinsNormalized(msg.Collateral) + if !collateral.IsValid() { + return sdkerrors.Wrap(sdkerrors.ErrInvalidRequest, "collateral is not a valid Coins object") + } + if collateral.Empty() { + return sdkerrors.Wrap(sdkerrors.ErrInvalidRequest, "collateral is empty") + } + // highlight-end + return nil +} +``` diff --git a/docs/versioned_docs/version-v0.27/02-guide/05-loan/04-approve.md b/docs/versioned_docs/version-v0.27/02-guide/05-loan/04-approve.md new file mode 100644 index 0000000..b0f0e6c --- /dev/null +++ b/docs/versioned_docs/version-v0.27/02-guide/05-loan/04-approve.md @@ -0,0 +1,97 @@ +# Approve a loan + +After a loan request has been made, it is possible for another account to +approve the loan and accept the terms proposed by the borrower. This process +involves the transfer of the requested funds from the lender to the borrower. + +To be eligible for approval, a loan must have a status of "requested." This +means that the borrower has made a request for a loan and is waiting for a +lender to agree to the terms and provide the funds. Once a lender has decided to +approve the loan, they can initiate the transfer of the funds to the borrower. + +Upon loan approval, the status of the loan is changed to "approved." This +signifies that the funds have been successfully transferred and that the loan +agreement is now in effect. + +## Keeper method + +```go title="x/loan/keeper/msg_server_approve_loan.go" +package keeper + +import ( + "context" + + sdk "github.com/cosmos/cosmos-sdk/types" + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + + "loan/x/loan/types" +) + +func (k msgServer) ApproveLoan(goCtx context.Context, msg *types.MsgApproveLoan) (*types.MsgApproveLoanResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + loan, found := k.GetLoan(ctx, msg.Id) + if !found { + return nil, sdkerrors.Wrapf(sdkerrors.ErrKeyNotFound, "key %d doesn't exist", msg.Id) + } + if loan.State != "requested" { + return nil, sdkerrors.Wrapf(types.ErrWrongLoanState, "%v", loan.State) + } + lender, _ := sdk.AccAddressFromBech32(msg.Creator) + borrower, _ := sdk.AccAddressFromBech32(loan.Borrower) + amount, err := sdk.ParseCoinsNormalized(loan.Amount) + if err != nil { + return nil, sdkerrors.Wrap(types.ErrWrongLoanState, "Cannot parse coins in loan amount") + } + err = k.bankKeeper.SendCoins(ctx, lender, borrower, amount) + if err != nil { + return nil, err + } + loan.Lender = msg.Creator + loan.State = "approved" + k.SetLoan(ctx, loan) + return &types.MsgApproveLoanResponse{}, nil +} +``` + +`ApproveLoan` takes a context and a message of type `*types.MsgApproveLoan` as +input, and returns a pointer to a `types.MsgApproveLoanResponse` and an `error`. + +The function first retrieves a loan object by calling `k.GetLoan(ctx, msg.Id)`, +where `ctx` is a context object, `k` is the `msgServer` object, `GetLoan` is a +method on `k`, and `msg.Id` is a field of the msg object passed as an argument. +If the loan is not found, it returns `nil` and an error wrapped with +`sdkerrors.ErrKeyNotFound`. + +Next, the function checks if the loan's state is `"requested"`. If it is not, it +returns `nil` and an error wrapped with `types.ErrWrongLoanState`. + +If the loan's state is `"requested"`, the function parses the addresses of the +lender and borrower from bech32 strings, and then parses the `amount` of the +loan from a string. If there is an error parsing the coins in the loan amount, +it returns `nil` and an error wrapped with `types.ErrWrongLoanState`. + +Otherwise, the function calls the `SendCoins` method on the `k.bankKeeper` +object, passing it the context, the lender and borrower addresses, and the +amount of the loan. It then updates the lender field of the loan object and sets +its state to `"approved"`. Finally, it stores the updated loan object by calling +`k.SetLoan(ctx, loan)`. + +At the end, the function returns a `types.MsgApproveLoanResponse` object and +`nil` for the error. + +## Register a custom error + +To register the custom error `ErrWrongLoanState` that is used in the +`ApproveLoan` function, modify the "errors.go" file: + +```go title="x/loan/types/errors.go" +package types + +import ( + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" +) + +var ( + ErrWrongLoanState = sdkerrors.Register(ModuleName, 2, "wrong loan state") +) +``` \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.27/02-guide/05-loan/05-repay.md b/docs/versioned_docs/version-v0.27/02-guide/05-loan/05-repay.md new file mode 100644 index 0000000..6ad9d4c --- /dev/null +++ b/docs/versioned_docs/version-v0.27/02-guide/05-loan/05-repay.md @@ -0,0 +1,97 @@ +# Repay a loan + +The `RepayLoan` method is responsible for handling the repayment of a loan. This +involves transferring the borrowed funds, along with any agreed upon fees, from +the borrower to the lender. In addition, the collateral that was provided as +part of the loan agreement will be released from the escrow account and returned +to the borrower. + +It is important to note that the `RepayLoan` method can only be called under +certain conditions. Firstly, the transaction must be signed by the borrower of +the loan. This ensures that only the borrower has the ability to initiate the +repayment process. Secondly, the loan must be in an approved status. This means +that the loan has received approval and is ready to be repaid. + +To implement the `RepayLoan` method, we must ensure that these conditions are +met before proceeding with the repayment process. Once the necessary checks have +been performed, the method can then handle the transfer of funds and the release +of the collateral from the escrow account. + +## Keeper method + +```go title="x/loan/keeper/msg_server_repay_loan.go" +package keeper + +import ( + "context" + + sdk "github.com/cosmos/cosmos-sdk/types" + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + + "loan/x/loan/types" +) + +func (k msgServer) RepayLoan(goCtx context.Context, msg *types.MsgRepayLoan) (*types.MsgRepayLoanResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + loan, found := k.GetLoan(ctx, msg.Id) + if !found { + return nil, sdkerrors.Wrapf(sdkerrors.ErrKeyNotFound, "key %d doesn't exist", msg.Id) + } + if loan.State != "approved" { + return nil, sdkerrors.Wrapf(types.ErrWrongLoanState, "%v", loan.State) + } + lender, _ := sdk.AccAddressFromBech32(loan.Lender) + borrower, _ := sdk.AccAddressFromBech32(loan.Borrower) + if msg.Creator != loan.Borrower { + return nil, sdkerrors.Wrap(sdkerrors.ErrUnauthorized, "Cannot repay: not the borrower") + } + amount, _ := sdk.ParseCoinsNormalized(loan.Amount) + fee, _ := sdk.ParseCoinsNormalized(loan.Fee) + collateral, _ := sdk.ParseCoinsNormalized(loan.Collateral) + err := k.bankKeeper.SendCoins(ctx, borrower, lender, amount) + if err != nil { + return nil, err + } + err = k.bankKeeper.SendCoins(ctx, borrower, lender, fee) + if err != nil { + return nil, err + } + err = k.bankKeeper.SendCoinsFromModuleToAccount(ctx, types.ModuleName, borrower, collateral) + if err != nil { + return nil, err + } + loan.State = "repayed" + k.SetLoan(ctx, loan) + return &types.MsgRepayLoanResponse{}, nil +} +``` + +`RepayLoan` takes in two arguments: a context and a pointer to a +`types.MsgRepayLoan` type. It returns a pointer to a +`types.MsgRepayLoanResponse` type and an `error`. + +The method first retrieves a loan from storage by passing the provided loan ID +to the `k.GetLoan` method. If the loan cannot be found, the method returns an +error wrapped in a `sdkerrors.ErrKeyNotFound` error. + +The method then checks that the state of the loan is "approved". If it is not, +the method returns an error wrapped in a `types.ErrWrongLoanState` error. + +Next, the method converts the lender and borrower addresses stored in the loan +struct to `sdk.AccAddress` types using the `sdk.AccAddressFromBech32` function. +It then checks that the transaction is signed by the borrower of the loan by +comparing the `msg.Creator` field to the borrower address stored in the loan +struct. If these do not match, the method returns an error wrapped in a +`sdkerrors.ErrUnauthorized` error. + +The method then parses the loan amount, fee, and collateral stored in the loan +struct as `sdk.Coins` using the `sdk.ParseCoinsNormalized` function. It then +uses the `k.bankKeeper.SendCoins` function to transfer the loan amount and fee +from the borrower to the lender. It then uses the +`k.bankKeeper.SendCoinsFromModuleToAccount` function to transfer the collateral +from the escrow account to the borrower. + +Finally, the method updates the state of the loan to "repayed" and stores the +updated loan in storage using the `k.SetLoan` method. The method returns a +`types.MsgRepayLoanResponse` and a `nil` error to indicate that the repayment +process was successful. diff --git a/docs/versioned_docs/version-v0.27/02-guide/05-loan/06-liquidate.md b/docs/versioned_docs/version-v0.27/02-guide/05-loan/06-liquidate.md new file mode 100644 index 0000000..2c4f67b --- /dev/null +++ b/docs/versioned_docs/version-v0.27/02-guide/05-loan/06-liquidate.md @@ -0,0 +1,89 @@ +# Liquidate loan + +The `LiquidateLoan` method is a function that allows the lender to sell off the +collateral belonging to the borrower in the event that the borrower has failed +to repay the loan by the specified deadline. This process is known as +"liquidation" and is typically carried out as a way for the lender to recoup +their losses in the event that the borrower is unable to fulfill their repayment +obligations. + +During the liquidation process, the collateral tokens that have been pledged by +the borrower as security for the loan are transferred from the borrower's +account to the lender's account. This transfer is initiated by the lender and is +typically triggered when the borrower fails to repay the loan by the agreed upon +deadline. Once the collateral has been transferred, the lender can then sell it +off in order to recoup their losses and compensate for the unpaid loan. + +## Keeper method + +```go title="x/loan/keeper/msg_server_liquidate_loan.go" +package keeper + +import ( + "context" + "strconv" + + sdk "github.com/cosmos/cosmos-sdk/types" + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + + "loan/x/loan/types" +) + +func (k msgServer) LiquidateLoan(goCtx context.Context, msg *types.MsgLiquidateLoan) (*types.MsgLiquidateLoanResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + loan, found := k.GetLoan(ctx, msg.Id) + if !found { + return nil, sdkerrors.Wrapf(sdkerrors.ErrKeyNotFound, "key %d doesn't exist", msg.Id) + } + if loan.Lender != msg.Creator { + return nil, sdkerrors.Wrap(sdkerrors.ErrUnauthorized, "Cannot liquidate: not the lender") + } + if loan.State != "approved" { + return nil, sdkerrors.Wrapf(types.ErrWrongLoanState, "%v", loan.State) + } + lender, _ := sdk.AccAddressFromBech32(loan.Lender) + collateral, _ := sdk.ParseCoinsNormalized(loan.Collateral) + deadline, err := strconv.ParseInt(loan.Deadline, 10, 64) + if err != nil { + panic(err) + } + if ctx.BlockHeight() < deadline { + return nil, sdkerrors.Wrap(types.ErrDeadline, "Cannot liquidate before deadline") + } + err = k.bankKeeper.SendCoinsFromModuleToAccount(ctx, types.ModuleName, lender, collateral) + if err != nil { + return nil, err + } + loan.State = "liquidated" + k.SetLoan(ctx, loan) + return &types.MsgLiquidateLoanResponse{}, nil +} +``` + +`LiquidateLoan` takes in a context and a `types.MsgLiquidateLoan` message as input and returns a types.MsgLiquidateLoanResponse message and an error as output. + +The function first retrieves a loan using the `GetLoan` method and the `Id` field of the input message. If the loan is not found, it returns an error using the `sdkerrors.Wrap` function and the `sdkerrors.ErrKeyNotFound` error code. + +Next, the function checks that the `Creator` field of the input message is the same as the `Lender` field of the loan. If they are not the same, it returns an error using the `sdkerrors.Wrap` function and the `sdkerrors.ErrUnauthorized` error code. + +The function then checks that the State field of the loan is equal to "approved". If it is not, it returns an error using the `sdkerrors.Wrapf` function and the `types.ErrWrongLoanState` error code. + +The function then converts the Lender field of the loan to an address using the `sdk.AccAddressFromBech32` function and the `Collateral` field to coins using the `sdk.ParseCoinsNormalized` function. It also converts the `Deadline` field to an integer using the `strconv.ParseInt` function. If this function returns an error, it panics. + +Finally, the function checks that the current block height is greater than or equal to the deadline. If it is not, it returns an error using the `sdkerrors.Wrap` function and the `types.ErrDeadline` error code. If all checks pass, the function uses the `bankKeeper.SendCoinsFromModuleToAccount` method to transfer the collateral from the module account to the lender's account and updates the `State` field of the loan to `"liquidated"`. It then stores the updated loan using the `SetLoan` method and returns a `types.MsgLiquidateLoanResponse` message with no error. + +## Register a custom error + +```go title="x/loan/types/errors.go" +package types + +import ( + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" +) + +var ( + ErrWrongLoanState = sdkerrors.Register(ModuleName, 2, "wrong loan state") + // highlight-next-line + ErrDeadline = sdkerrors.Register(ModuleName, 3, "deadline") +) +``` diff --git a/docs/versioned_docs/version-v0.27/02-guide/05-loan/07-cancel.md b/docs/versioned_docs/version-v0.27/02-guide/05-loan/07-cancel.md new file mode 100644 index 0000000..06e7792 --- /dev/null +++ b/docs/versioned_docs/version-v0.27/02-guide/05-loan/07-cancel.md @@ -0,0 +1,73 @@ +# Cancel a loan + +As a borrower, you have the option to cancel a loan you have created if you no +longer want to proceed with it. However, this action is only possible if the +loan's current status is marked as "requested". + +If you decide to cancel the loan, the collateral tokens that were being held as +security for the loan will be transferred back to your account from the module +account. This means that you will regain possession of the collateral tokens you +had originally put up for the loan. + +```go title="x/loan/keeper/msg_server_cancel_loan.go" +package keeper + +import ( + "context" + + sdk "github.com/cosmos/cosmos-sdk/types" + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + + "loan/x/loan/types" +) + +func (k msgServer) CancelLoan(goCtx context.Context, msg *types.MsgCancelLoan) (*types.MsgCancelLoanResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + loan, found := k.GetLoan(ctx, msg.Id) + if !found { + return nil, sdkerrors.Wrapf(sdkerrors.ErrKeyNotFound, "key %d doesn't exist", msg.Id) + } + if loan.Borrower != msg.Creator { + return nil, sdkerrors.Wrap(sdkerrors.ErrUnauthorized, "Cannot cancel: not the borrower") + } + if loan.State != "requested" { + return nil, sdkerrors.Wrapf(types.ErrWrongLoanState, "%v", loan.State) + } + borrower, _ := sdk.AccAddressFromBech32(loan.Borrower) + collateral, _ := sdk.ParseCoinsNormalized(loan.Collateral) + err := k.bankKeeper.SendCoinsFromModuleToAccount(ctx, types.ModuleName, borrower, collateral) + if err != nil { + return nil, err + } + loan.State = "cancelled" + k.SetLoan(ctx, loan) + return &types.MsgCancelLoanResponse{}, nil +} +``` + +`CancelLoan` takes in two arguments: a `context.Context` named `goCtx` and a +pointer to a `types.MsgCancelLoan` named `msg`. It returns a pointer to a +`types.MsgCancelLoanResponse` and an error. + +The function begins by using the `sdk.UnwrapSDKContext` method to get the +`sdk.Context` from the `context.Context` object. It then uses the `GetLoan` +method of the `msgServer` type to retrieve a loan identified by the `Id` field +of the `msg` argument. If the loan is not found, the function returns an error +using the `sdk.ErrKeyNotFound` error wrapped with the `sdk.Wrap` method. + +Next, the function checks if the `Creator` field of the msg argument is the same +as the `Borrower` field of the loan. If they are not the same, the function +returns an error using the `sdk.ErrUnauthorized` error wrapped with the +`sdk.Wrap` method. + +The function then checks if the `State` field of the loan is equal to the string +`"requested"`. If it is not, the function returns an error using the +types.`ErrWrongLoanState` error wrapped with the `sdk.Wrapf` method. + +If the loan has the correct state and the creator of the message is the borrower +of the loan, the function proceeds to send the collateral coins held in the +`Collateral` field of the loan back to the borrower's account using the +`SendCoinsFromModuleToAccount` method of the `bankKeeper`. The function then +updates the State field of the loan to the string "cancelled" and sets the +updated loan using the `SetLoan` method. Finally, the function returns a +`types.MsgCancelLoanResponse` object and a nil error. \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.27/02-guide/05-loan/08-play.md b/docs/versioned_docs/version-v0.27/02-guide/05-loan/08-play.md new file mode 100644 index 0000000..f0b0bc9 --- /dev/null +++ b/docs/versioned_docs/version-v0.27/02-guide/05-loan/08-play.md @@ -0,0 +1,318 @@ +# Play + +Add `10000foocoin` to Alice's account. These tokens will be used as a loan +collateral. + +```yml title="config.yml" +version: 1 +accounts: + - name: alice + coins: + - 20000token + # highlight-next-line + - 10000foocoin + - 200000000stake + - name: bob + coins: + - 10000token + - 100000000stake +client: + openapi: + path: docs/static/openapi.yml +faucet: + name: bob + coins: + - 5token + - 100000stake +validators: + - name: alice + bonded: 100000000stake +``` + +Start a blockchain node: + +``` +ignite chain serve +``` + +## Repaying a loan + +Request a loan of `1000token` with `100token` as a fee and `1000foocoin` as a +collateral from Alice's account. The deadline is set to `500` blocks: + +``` +loand tx loan request-loan 1000token 100token 1000foocoin 500 --from alice +``` + +``` +loand q loan list-loan +``` + +```yml +Loan: +- amount: 1000token + borrower: cosmos153dk8qh56v4yg6e4uzrvvqjueu6d36fptlr2kw + collateral: 1000foocoin + deadline: "500" + fee: 100token + id: "0" + lender: "" + state: requested +``` + +Please be aware that the addresses displayed in your terminal window (such as those in the `borrower` field) will not match the ones provided in this tutorial. This is because Ignite generates new accounts each time a chain is started, unless an account has a mnemonic specified in the `config.yml` file. + +Approve the loan from Bob's account: + +``` +loand tx loan approve-loan 0 --from bob +``` + +``` +loand q loan list-loan +``` + +The `lender` field has been updated to Bob's address and the `state` field has +been updated to `approved`: + +```yml +Loan: +- amount: 1000token + borrower: cosmos153dk8qh56v4yg6e4uzrvvqjueu6d36fptlr2kw + collateral: 1000foocoin + deadline: "500" + fee: 100token + id: "0" + # highlight-start + lender: cosmos1qfzpxfhsu2qfy2exkukuanrkzrrexh9yeg2pr4 + state: approved + # highlight-end +``` + +``` +loand q bank balances $(loand keys show alice -a) +``` + +The `foocoin` balance has been updated to `9000`, because `1000foocoin` has been +transferred as collateral to the module account. The `token` balance has been +updated to `21000`, because `1000token` has been transferred from Bob's account +to Alice's account as a loan: + +```yml +balances: + # highlight-start +- amount: "9000" + denom: foocoin + # highlight-end +- amount: "100000000" + denom: stake + # highlight-start +- amount: "21000" + denom: token + # highlight-end +``` + +``` +loand q bank balances $(loand keys show bob -a) +``` + +The `token` balance has been updated to `9000`, because `1000token` has been +transferred from Bob's account to Alice's account as a loan: + +```yml +balances: +- amount: "100000000" + denom: stake + # highlight-start +- amount: "9000" + denom: token + # highlight-end +``` + +Repay the loan from Alice's account: + +``` +loand tx loan repay-loan 0 --from alice +``` + +``` +loand q loan list-loan +``` + +The `state` field has been updated to `repayed`: + +```yml +Loan: +- amount: 1000token + borrower: cosmos153dk8qh56v4yg6e4uzrvvqjueu6d36fptlr2kw + collateral: 1000foocoin + deadline: "500" + fee: 100token + id: "0" + lender: cosmos1qfzpxfhsu2qfy2exkukuanrkzrrexh9yeg2pr4 + # highlight-next-line + state: repayed +``` + +``` +loand q bank balances $(loand keys show alice -a) +``` + +The `foocoin` balance has been updated to `10000`, because `1000foocoin` has +been transferred from the module account to Alice's account. The `token` balance +has been updated to `19900`, because `1000token` has been transferred from +Alice's account to Bob's account as a repayment and `100token` has been +transferred from Alice's account to Bob's account as a fee: + +```yml +balances: + # highlight-start +- amount: "10000" + denom: foocoin + # highlight-end +- amount: "100000000" + denom: stake + # highlight-start +- amount: "19900" + denom: token + # highlight-end +``` + +``` +loand q bank balances $(loand keys show bob -a) +``` + +The `token` balance has been updated to `10100`, because `1000token` has been +transferred from Alice's account to Bob's account as a repayment and `100token` +has been transferred from Alice's account to Bob's account as a fee: + +```yml +balances: +- amount: "100000000" + denom: stake + # highlight-start +- amount: "10100" + denom: token + # highlight-end +``` + +## Liquidating a loan + +Request a loan of `1000token` with `100token` as a fee and `1000foocoin` as a +collateral from Alice's account. The deadline is set to `20` blocks. The +deadline is set to a very small value, so that the loan can be quickly +liquidated in the next step: + +``` +loand tx loan request-loan 1000token 100token 1000foocoin 20 --from alice +``` + +``` +loand q loan list-loan +``` + +A loan has been added to the list: + +```yml +Loan: +- amount: 1000token + borrower: cosmos153dk8qh56v4yg6e4uzrvvqjueu6d36fptlr2kw + collateral: 1000foocoin + deadline: "500" + fee: 100token + id: "0" + lender: cosmos1qfzpxfhsu2qfy2exkukuanrkzrrexh9yeg2pr4 + state: repayed + # highlight-start +- amount: 1000token + borrower: cosmos153dk8qh56v4yg6e4uzrvvqjueu6d36fptlr2kw + collateral: 1000foocoin + deadline: "20" + fee: 100token + id: "1" + lender: "" + state: requested + # highlight-end +``` + +Approve the loan from Bob's account: + +``` +loand tx loan approve-loan 1 --from bob +``` + +Liquidate the loan from Bob's account: + +``` +loand tx loan liquidate-loan 1 --from bob +``` + +``` +loand q loan list-loan +``` + +The loan has been liquidated: + +```yml +Loan: +- amount: 1000token + borrower: cosmos153dk8qh56v4yg6e4uzrvvqjueu6d36fptlr2kw + collateral: 1000foocoin + deadline: "500" + fee: 100token + id: "0" + lender: cosmos1qfzpxfhsu2qfy2exkukuanrkzrrexh9yeg2pr4 + state: repayed + # highlight-start +- amount: 1000token + borrower: cosmos153dk8qh56v4yg6e4uzrvvqjueu6d36fptlr2kw + collateral: 1000foocoin + deadline: "20" + fee: 100token + id: "1" + lender: cosmos1qfzpxfhsu2qfy2exkukuanrkzrrexh9yeg2pr4 + state: liquidated + # highlight-end +``` + +``` +loand q bank balances $(loand keys show alice -a) +``` + +The `foocoin` balance has been updated to `9000`, because `1000foocoin` has been +transferred from Alice's account to the module account as a collateral. Alice +has lost her collateral, but she has kept the loan amount: + +```yml +balances: + # highlight-start +- amount: "9000" + denom: foocoin + # highlight-end +- amount: "100000000" + denom: stake + # highlight-start +- amount: "20900" + denom: token + # highlight-end +``` + +``` +loand q bank balances $(loand keys show bob -a) +``` + +The `foocoin` balance has been updated to `1000`, because `1000foocoin` has been +transferred from the module account to Bob's account as a collateral. Bob has +gained the collateral, but he has lost the loan amount: + +```yml +balances: + # highlight-start +- amount: "1000" + denom: foocoin + # highlight-end +- amount: "100000000" + denom: stake +- amount: "9100" + denom: token +``` diff --git a/docs/versioned_docs/version-v0.27/02-guide/05-loan/_category_.json b/docs/versioned_docs/version-v0.27/02-guide/05-loan/_category_.json new file mode 100644 index 0000000..029e81b --- /dev/null +++ b/docs/versioned_docs/version-v0.27/02-guide/05-loan/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Advanced Module: DeFi Loan", + "link": null +} \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.27/02-guide/06-ibc.md b/docs/versioned_docs/version-v0.27/02-guide/06-ibc.md new file mode 100644 index 0000000..4a5a3c6 --- /dev/null +++ b/docs/versioned_docs/version-v0.27/02-guide/06-ibc.md @@ -0,0 +1,710 @@ +--- +sidebar_position: 7 +description: Build an understanding of how to create and send packets across blockchains and navigate between blockchains. +title: "Inter-Blockchain Communication: Basics" +--- + +# Inter-Blockchain Communication: Basics + +The Inter-Blockchain Communication protocol (IBC) is an important part of the +Cosmos SDK ecosystem. The Hello World tutorial is a time-honored tradition in +computer programming. This tutorial builds an understanding of how to create and +send packets across blockchain. This foundational knowledge helps you navigate +between blockchains with the Cosmos SDK. + +**You will learn how to** + +- Use IBC to create and send packets between blockchains. +- Navigate between blockchains using the Cosmos SDK and the Ignite CLI Relayer. +- Create a basic blog post and save the post on another blockchain. + +## What is IBC? + +The Inter-Blockchain Communication protocol (IBC) allows blockchains to talk to +each other. IBC handles transport across different sovereign blockchains. This +end-to-end, connection-oriented, stateful protocol provides reliable, ordered, +and authenticated communication between heterogeneous blockchains. + +The [IBC protocol in the Cosmos +SDK](https://ibc.cosmos.network/main/ibc/overview) is the standard for the +interaction between two blockchains. The IBCmodule interface defines how packets +and messages are constructed to be interpreted by the sending and the receiving +blockchain. + +The IBC relayer lets you connect between sets of IBC-enabled chains. This +tutorial teaches you how to create two blockchains and then start and use the +relayer with Ignite CLI to connect two blockchains. + +This tutorial covers essentials like modules, IBC packets, relayer, and the +lifecycle of packets routed through IBC. + +## Create a blockchain + +Create a blockchain app with a blog module to write posts on other blockchains +that contain the Hello World message. For this tutorial, you can write posts for +the Cosmos SDK universe that contain Hello Mars, Hello Cosmos, and Hello Earth +messages. + +For this simple example, create an app that contains a blog module that has a +post transaction with title and text. + +After you define the logic, run two blockchains that have this module installed. + +- The chains can send posts between each other using IBC. + +- On the sending chain, save the `acknowledged` and `timed out` posts. + +After the transaction is acknowledged by the receiving chain, you know that the +post is saved on both blockchains. + +- The sending chain has the additional data `postID`. + +- Sent posts that are acknowledged and timed out contain the title and the + target chain of the post. These identifiers +- are visible on the parameter `chain`. The following chart shows the lifecycle + of a packet that travels through IBC. + +![The Lifecycle of an IBC packet](./images/packet_sendpost.png) + +## Build your blockchain app + +Use Ignite CLI to scaffold the blockchain app and the blog module. + +### Build a new blockchain + +To scaffold a new blockchain named `planet`: + +```bash +ignite scaffold chain planet --no-module +cd planet +``` + +A new directory named `planet` is created in your home directory. The `planet` +directory contains a working blockchain app. + +### Scaffold the blog module inside your blockchain + +Next, use Ignite CLI to scaffold a blog module with IBC capabilities. The blog +module contains the logic for creating blog posts and routing them through IBC +to the second blockchain. + +To scaffold a module named `blog`: + +```bash +ignite scaffold module blog --ibc +``` + +A new directory with the code for an IBC module is created in `planet/x/blog`. +Modules scaffolded with the `--ibc` flag include all the logic for the +scaffolded IBC module. + +### Generate CRUD actions for types + +Next, create the CRUD actions for the blog module types. + +Use the `ignite scaffold list` command to scaffold the boilerplate code for the +create, read, update, and delete (CRUD) actions. + +These `ignite scaffold list` commands create CRUD code for the following +transactions: + +- Creating blog posts + + ```bash + ignite scaffold list post title content creator --no-message --module blog + ``` + +- Processing acknowledgments for sent posts + + ```bash + ignite scaffold list sentPost postID title chain creator --no-message --module blog + ``` + +- Managing post timeouts + + ```bash + ignite scaffold list timedoutPost title chain creator --no-message --module blog + ``` + +The scaffolded code includes proto files for defining data structures, messages, +messages handlers, keepers for modifying the state, and CLI commands. + +### Ignite CLI Scaffold List Command Overview + +``` +ignite scaffold list [typeName] [field1] [field2] ... [flags] +``` + +The first argument of the `ignite scaffold list [typeName]` command specifies +the name of the type being created. For the blog app, you created `post`, +`sentPost`, and `timedoutPost` types. + +The next arguments define the fields that are associated with the type. For the +blog app, you created `title`, `content`, `postID`, and `chain` fields. + +The `--module` flag defines which module the new transaction type is added to. +This optional flag lets you manage multiple modules within your Ignite CLI app. +When the flag is not present, the type is scaffolded in the module that matches +the name of the repo. + +When a new type is scaffolded, the default behavior is to scaffold messages that +can be sent by users for CRUD operations. The `--no-message` flag disables this +feature. Disable the messages option for the app since you want the posts to be +created upon reception of IBC packets and not directly created from a user's +messages. + +### Scaffold a sendable and interpretable IBC packet + +You must generate code for a packet that contains the title and the content of +the blog post. + +The `ignite packet` command creates the logic for an IBC packet that can be sent +to another blockchain. + +- The `title` and `content` are stored on the target chain. + +- The `postID` is acknowledged on the sending chain. + +To scaffold a sendable and interpretable IBC packet: + +```bash +ignite scaffold packet ibcPost title content --ack postID --module blog +``` + +Notice the fields in the `ibcPost` packet match the fields in the `post` type +that you created earlier. + +- The `--ack` flag defines which identifier is returned to the sending + blockchain. + +- The `--module` flag specifies to create the packet in a particular IBC module. + +The `ignite packet` command also scaffolds the CLI command that is capable of +sending an IBC packet: + +```bash +planetd tx blog send-ibcPost [portID] [channelID] [title] [content] +``` + +## Modify the source code + +After you create the types and transactions, you must manually insert the logic +to manage updates in the database. Modify the source code to save the data as +specified earlier in this tutorial. + +### Add creator to the blog post packet + +Start with the proto file that defines the structure of the IBC packet. + +To identify the creator of the post in the receiving blockchain, add the +`creator` field inside the packet. This field was not specified directly in the +command because it would automatically become a parameter in the `SendIbcPost` +CLI command. + +```protobuf title="proto/planet/blog/packet.proto" +message IbcPostPacketData { + string title = 1; + string content = 2; + // highlight-next-line + string creator = 3; +} +``` + +To make sure the receiving chain has content on the creator of a blog post, add +the `msg.Creator` value to the IBC `packet`. + +- The content of the `sender` of the message is automatically included in + `SendIbcPost` message. +- The sender is verified as the signer of the message, so you can add the + `msg.Sender` as the creator to the new packet +- before it is sent over IBC. + +```go title="x/blog/keeper/msg_server_ibc_post.go" +package keeper + +import ( + // ... + "planet/x/blog/types" +) + +func (k msgServer) SendIbcPost(goCtx context.Context, msg *types.MsgSendIbcPost) (*types.MsgSendIbcPostResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + + // TODO: logic before transmitting the packet + + // Construct the packet + var packet types.IbcPostPacketData + + packet.Title = msg.Title + packet.Content = msg.Content + // highlight-next-line + packet.Creator = msg.Creator + + // Transmit the packet + err := k.TransmitIbcPostPacket( + ctx, + packet, + msg.Port, + msg.ChannelID, + clienttypes.ZeroHeight(), + msg.TimeoutTimestamp, + ) + if err != nil { + return nil, err + } + + return &types.MsgSendIbcPostResponse{}, nil +} +``` + +### Receive the post + +The methods for primary transaction logic are in the `x/blog/keeper/ibc_post.go` +file. Use these methods to manage IBC packets: + +- `TransmitIbcPostPacket` is called manually to send the packet over IBC. This + method also defines the logic before the packet is sent over IBC to another + blockchain app. +- `OnRecvIbcPostPacket` hook is automatically called when a packet is received + on the chain. This method defines the packet reception logic. +- `OnAcknowledgementIbcPostPacket` hook is called when a sent packet is + acknowledged on the source chain. This method defines the logic when the + packet has been received. +- `OnTimeoutIbcPostPacket` hook is called when a sent packet times out. This + method defines the logic when the packet is not received on the target chain + +You must modify the source code to add the logic inside those functions so that +the data tables are modified accordingly. + +On reception of the post message, create a new post with the title and the +content on the receiving chain. + +To identify the blockchain app that a message is originating from and who +created the message, use an identifier in the following format: + +`<portID>-<channelID>-<creatorAddress>` + +Finally, the Ignite CLI-generated AppendPost function returns the ID of the new +appended post. You can return this value to the source chain through +acknowledgment. + +Append the type instance as `PostID` on receiving the packet: + +- The context `ctx` is an [immutable data + structure](https://docs.cosmos.network/main/learn/advanced/context#go-context-package) + that has header data from the transaction. See [how the context is + initiated](https://github.com/cosmos/cosmos-sdk/blob/main/types/context.go#L71) +- The identifier format that you defined earlier +- The `title` is the Title of the blog post +- The `content` is the Content of the blog post + +In the `x/blog/keeper/ibc_post.go` file, make sure to import `"strconv"` below +`"errors"`: + +```go title="x/blog/keeper/ibc_post.go" +import ( + //... + + "strconv" + +// ... +) +``` + +Then modify the `OnRecvIbcPostPacket` keeper function with the following code: + +```go +package keeper + +// ... + +func (k Keeper) OnRecvIbcPostPacket(ctx sdk.Context, packet channeltypes.Packet, data types.IbcPostPacketData) (packetAck types.IbcPostPacketAck, err error) { + // validate packet data upon receiving + if err := data.ValidateBasic(); err != nil { + return packetAck, err + } + + id := k.AppendPost( + ctx, + types.Post{ + Creator: packet.SourcePort + "-" + packet.SourceChannel + "-" + data.Creator, + Title: data.Title, + Content: data.Content, + }, + ) + + packetAck.PostID = strconv.FormatUint(id, 10) + + return packetAck, nil +} +``` + +### Receive the post acknowledgement + +On the sending blockchain, store a `sentPost` so you know that the post has been +received on the target chain. + +Store the title and the target to identify the post. + +When a packet is scaffolded, the default type for the received acknowledgment +data is a type that identifies if the packet treatment has failed. The +`Acknowledgement_Error` type is set if `OnRecvIbcPostPacket` returns an error +from the packet. + +```go title="x/blog/keeper/ibc_post.go" +package keeper + +// ... + +// x/blog/keeper/ibc_post.go +func (k Keeper) OnAcknowledgementIbcPostPacket(ctx sdk.Context, packet channeltypes.Packet, data types.IbcPostPacketData, ack channeltypes.Acknowledgement) error { + switch dispatchedAck := ack.Response.(type) { + case *channeltypes.Acknowledgement_Error: + // We will not treat acknowledgment error in this tutorial + return nil + case *channeltypes.Acknowledgement_Result: + // Decode the packet acknowledgment + var packetAck types.IbcPostPacketAck + + if err := types.ModuleCdc.UnmarshalJSON(dispatchedAck.Result, &packetAck); err != nil { + // The counter-party module doesn't implement the correct acknowledgment format + return errors.New("cannot unmarshal acknowledgment") + } + + k.AppendSentPost( + ctx, + types.SentPost{ + Creator: data.Creator, + PostID: packetAck.PostID, + Title: data.Title, + Chain: packet.DestinationPort + "-" + packet.DestinationChannel, + }, + ) + + return nil + default: + return errors.New("the counter-party module does not implement the correct acknowledgment format") + } +} +``` + +### Store information about the timed-out packet + +Store posts that have not been received by target chains in `timedoutPost` +posts. This logic follows the same format as `sentPost`. + +```go title="x/blog/keeper/ibc_post.go" +func (k Keeper) OnTimeoutIbcPostPacket(ctx sdk.Context, packet channeltypes.Packet, data types.IbcPostPacketData) error { + k.AppendTimedoutPost( + ctx, + types.TimedoutPost{ + Creator: data.Creator, + Title: data.Title, + Chain: packet.DestinationPort + "-" + packet.DestinationChannel, + }, + ) + + return nil +} + +``` + +This last step completes the basic `blog` module setup. The blockchain is now +ready! + +## Use the IBC modules + +You can now spin up the blockchain and send a blog post from one blockchain app +to the other. Multiple terminal windows are required to complete these next +steps. + +### Test the IBC modules + +To test the IBC module, start two blockchain networks on the same machine. Both +blockchains use the same source code. Each blockchain has a unique chain ID. + +One blockchain is named `earth` and the other blockchain is named `mars`. + +The `earth.yml` and `mars.yml` files are required in the project directory: + +```yaml title="earth.yml" +version: 1 +build: + proto: + path: proto + third_party_paths: + - third_party/proto + - proto_vendor +accounts: +- name: alice + coins: + - 1000token + - 100000000stake +- name: bob + coins: + - 500token + - 100000000stake +faucet: + name: bob + coins: + - 5token + - 100000stake + host: 0.0.0.0:4500 +genesis: + chain_id: earth +validators: +- name: alice + bonded: 100000000stake + home: $HOME/.earth +``` + +```yaml title="mars.yml" +version: 1 +build: + proto: + path: proto + third_party_paths: + - third_party/proto + - proto_vendor +accounts: +- name: alice + coins: + - 1000token + - 1000000000stake +- name: bob + coins: + - 500token + - 100000000stake +faucet: + name: bob + coins: + - 5token + - 100000stake + host: :4501 +genesis: + chain_id: mars +validators: +- name: alice + bonded: 100000000stake + app: + api: + address: :1318 + grpc: + address: :9092 + grpc-web: + address: :9093 + config: + p2p: + laddr: :26658 + rpc: + laddr: :26659 + pprof_laddr: :6061 + home: $HOME/.mars +``` + +Open a terminal window and run the following command to start the `earth` +blockchain: + +```bash +ignite chain serve -c earth.yml +``` + +Open a different terminal window and run the following command to start the +`mars` blockchain: + +```bash +ignite chain serve -c mars.yml +``` + +### Remove Existing Relayer and Ignite CLI Configurations + +If you previously used the relayer, follow these steps to remove exiting relayer +and Ignite CLI configurations: + +- Stop your blockchains and delete previous configuration files: + + ```bash + rm -rf ~/.ignite/relayer + ``` + +If existing relayer configurations do not exist, the command returns `no matches +found` and no action is taken. + +### Configure and start the relayer + +First, configure the relayer. Use the Ignite CLI `configure` command with the +`--advanced` option: + +```bash +ignite relayer configure -a \ + --source-rpc "http://0.0.0.0:26657" \ + --source-faucet "http://0.0.0.0:4500" \ + --source-port "blog" \ + --source-version "blog-1" \ + --source-gasprice "0.0000025stake" \ + --source-prefix "cosmos" \ + --source-gaslimit 300000 \ + --target-rpc "http://0.0.0.0:26659" \ + --target-faucet "http://0.0.0.0:4501" \ + --target-port "blog" \ + --target-version "blog-1" \ + --target-gasprice "0.0000025stake" \ + --target-prefix "cosmos" \ + --target-gaslimit 300000 +``` + +When prompted, press Enter to accept the default values for `Source Account` and +`Target Account`. + +The output looks like: + +``` +--------------------------------------------- +Setting up chains +--------------------------------------------- + +🔐 Account on "source" is "cosmos1xcxgzq75yrxzd0tu2kwmwajv7j550dkj7m00za" + + |· received coins from a faucet + |· (balance: 100000stake,5token) + +🔐 Account on "target" is "cosmos1nxg8e4mfp5v7sea6ez23a65rvy0j59kayqr8cx" + + |· received coins from a faucet + |· (balance: 100000stake,5token) + +⛓ Configured chains: earth-mars +``` + +In a new terminal window, start the relayer process: + +```bash +ignite relayer connect +``` + +Results: + +``` +------ +Paths +------ + +earth-mars: + earth > (port: blog) (channel: channel-0) + mars > (port: blog) (channel: channel-0) + +------ +Listening and relaying packets between chains... +------ +``` + +### Send packets + +You can now send packets and verify the received posts: + +```bash +planetd tx blog send-ibc-post blog channel-0 "Hello" "Hello Mars, I'm Alice from Earth" --from alice --chain-id earth --home ~/.earth +``` + +To verify that the post has been received on Mars: + +```bash +planetd q blog list-post --node tcp://localhost:26659 +``` + +The packet has been received: + +```yaml +Post: + - content: Hello Mars, I'm Alice from Earth + creator: blog-channel-0-cosmos1aew8dk9cs3uzzgeldatgzvm5ca2k4m98xhy20x + id: "0" + title: Hello +pagination: + next_key: null + total: "1" +``` + +To check if the packet has been acknowledged on Earth: + +```bash +planetd q blog list-sent-post +``` + +Output: + +```yaml +SentPost: + - chain: blog-channel-0 + creator: cosmos1aew8dk9cs3uzzgeldatgzvm5ca2k4m98xhy20x + id: "0" + postID: "0" + title: Hello +pagination: + next_key: null + total: "1" +``` + +To test timeout, set the timeout time of a packet to 1 nanosecond, verify that +the packet is timed out, and check the timed-out posts: + +```bash +planetd tx blog send-ibc-post blog channel-0 "Sorry" "Sorry Mars, you will never see this post" --from alice --chain-id earth --home ~/.earth --packet-timeout-timestamp 1 +``` + +Check the timed-out posts: + +```bash +planetd q blog list-timedout-post +``` + +Results: + +```yaml +TimedoutPost: + - chain: blog-channel-0 + creator: cosmos1fhpcsxn0g8uask73xpcgwxlfxtuunn3ey5ptjv + id: "0" + title: Sorry +pagination: + next_key: null + total: "2" +``` + +You can also send a post from Mars: + +```bash +planetd tx blog send-ibc-post blog channel-0 "Hello" "Hello Earth, I'm Alice from Mars" --from alice --chain-id mars --home ~/.mars --node tcp://localhost:26659 +``` + +List post on Earth: + +```bash +planetd q blog list-post +``` + +Results: + +```yaml +Post: + - content: Hello Earth, I'm Alice from Mars + creator: blog-channel-0-cosmos1xtpx43l826348s59au24p22pxg6q248638q2tf + id: "0" + title: Hello +pagination: + next_key: null + total: "1" +``` + +## Congratulations 🎉 + +By completing this tutorial, you've learned to use the Inter-Blockchain +Communication protocol (IBC). + +Here's what you accomplished in this tutorial: + +- Built two Hello blockchain apps as IBC modules +- Modified the generated code to add CRUD action logic +- Configured and used the Ignite CLI relayer to connect two blockchains with + each other +- Transferred IBC packets from one blockchain to another diff --git a/docs/versioned_docs/version-v0.27/02-guide/07-interchange/00-introduction.md b/docs/versioned_docs/version-v0.27/02-guide/07-interchange/00-introduction.md new file mode 100644 index 0000000..8b7bc6c --- /dev/null +++ b/docs/versioned_docs/version-v0.27/02-guide/07-interchange/00-introduction.md @@ -0,0 +1,49 @@ +--- +sidebar_position: 0 +slug: /guide/interchange +--- + +# Introduction + +The Interchain Exchange is a module to create buy and sell orders between blockchains. + +In this tutorial, you learn how to create a Cosmos SDK module that can create order pairs, buy orders, and sell orders. +You create order books and buy and sell orders across blockchains, which in turn enables you to swap token from one +blockchain to another. + +**Note:** The code in this tutorial is written specifically for this tutorial and is intended only for educational +purposes. This tutorial code is not intended to be used in production. + +If you want to see the end result, see the example implementation in +the [interchange repo](https://github.com/tendermint/interchange). + +**You will learn how to:** + +- Create a blockchain with Ignite CLI +- Create a Cosmos SDK IBC module +- Create an order book that hosts buy and sell orders with a module +- Send IBC packets from one blockchain to another +- Deal with timeouts and acknowledgements of IBC packets + +## How the Interchange Exchange Module Works + +To build an exchange that works with two or more blockchains, follow the steps in this tutorial to create a Cosmos SDK +module called `dex`. + +The new `dex` module allows you to open an exchange order book for a pair of token: a token from one blockchain and a token +on another blockchain. The blockchains are required to have the `dex` module available. + +Token can be bought or sold with limit orders on a simple order book. In this tutorial, there is no notion of a +liquidity pool or automated market maker (AMM). + +The market is unidirectional: + +- The token sold on the source chain cannot be bought back as it is +- The token bought from the target chain cannot be sold back using the same pair. + +If a token on a source chain is sold, it can only be bought back by creating a new pair on the order book. +This workflow is due to the nature of the Inter-Blockchain Communication protocol (IBC) which creates a `voucher` +token on the target blockchain. There is a difference of a native blockchain token and a `voucher` token that is minted +on another blockchain. You must create a second order book pair in order to receive the native token back. + +In the next chapter, you learn details about the design of the interblockchain exchange. diff --git a/docs/versioned_docs/version-v0.27/02-guide/07-interchange/01-design.md b/docs/versioned_docs/version-v0.27/02-guide/07-interchange/01-design.md new file mode 100644 index 0000000..448a0f5 --- /dev/null +++ b/docs/versioned_docs/version-v0.27/02-guide/07-interchange/01-design.md @@ -0,0 +1,108 @@ +--- +sidebar_position: 1 +description: Learn about the interchain exchange module design. +--- + +# App Design + +In this chapter, you learn how the interchain exchange module is designed. The module has order books, buy orders, and +sell orders. + +- First, create an order book for a pair of token. +- After an order book exists, you can create buy and sell orders for this pair of token. + +The module uses the Inter-Blockchain Communication +protocol [IBC](https://github.com/cosmos/ibc/blob/old/ibc/2_IBC_ARCHITECTURE.md). +By using IBC, the module can create order books so that multiple blockchains can interact and exchange their token. + +You create an order book pair with a token from one blockchain and another token from another blockchain. In this +tutorial, call the module you create the `dex` module. + +> When a user exchanges a token with the `dex` module, a `voucher` of that token is received on the other blockchain. +> This voucher is similar to how an `ibc-transfer` is constructed. Since a blockchain module does not have the rights +> to mint new token of a blockchain into existence, the token on the target chain is locked up, and the buyer receives +> a `voucher` of that token. + +This process can be reversed when the `voucher` gets burned to unlock the original token. This exchange process is +explained in more detail throughout the tutorial. + +## Assumption of the Design + +An order book can be created for the exchange of any tokens between any pair of chains. + +- Both blockchains require the `dex` module to be installed and running. +- There can only be one order book for a pair of token at the same time. + +<!-- There is no condition to check for open channels between two chains. --> + +A specific chain cannot mint new coins of its native token. + +<!-- The module is trustless, there is no condition to check when opening a channel between two chains. +Any pair of tokens can be exchanged between any pair of chains. --> + +This module is inspired by the [`ibc transfer`](https://github.com/cosmos/ibc-go/tree/main/modules/apps/transfer) +module on the Cosmos SDK. The `dex` module you create in this tutorial has similarities, like the `voucher` creation. + +However, the new `dex` module you are creating is more complex because it supports creation of: + +- Several types of packets to send +- Several types of acknowledgments to treat +- More complex logic on how to treat a packet on receipt, on timeout, and more + +## Interchain Exchange Overview + +Assume you have two blockchains: Venus and Mars. + +- The native token on Venus is `venuscoin`. +- The native token on Mars is `marscoin`. + +When a token is exchanged from Mars to Venus: + +- The Venus blockchain has an IBC `voucher` token with a denom that looks like `ibc/B5CB286...A7B21307F`. +- The long string of characters after `ibc/` is a denom trace hash of a token that was transferred using IBC. + +Using the blockchain's API you can get a denom trace from that hash. The denom trace consists of a `base_denom` and a +`path`. In our example: + +- The `base_denom` is `marscoin`. +- The `path` contains pairs of ports and channels through which the token has been transferred. + +For a single-hop transfer, the `path` is identified by `transfer/channel-0`. + +Learn more about token paths +in [ICS 20 Fungible Token Transfer](https://github.com/cosmos/ibc/tree/main/spec/app/ics-020-fungible-token-transfer). + +**Note:** This token `ibc/Venus/marscoin` cannot be sold back using the same order book. If you want to "reverse" the +exchange and receive the Mars token back, you must create and use a new order book for the `ibc/Venus/marscoin` to +`marscoin` transfer. + +## The Design of the Order Books + +As a typical exchange, a new pair implies the creation of an order book with orders to sell `marscoin` or orders to buy +`venuscoin`. Here, you have two chains and this data structure must be split between Mars and Venus. + +- Users from chain Mars sell `marscoin`. +- Users from chain Venus buy `marscoin`. + +Therefore, we represent: + +- All orders to sell `marscoin` on chain Mars. +- All orders to buy `marscoin` on chain Venus. + +In this example, blockchain Mars holds the sell orders and blockchain Venus holds the buy orders. + +## Exchanging Tokens Back + +Like `ibc-transfer`, each blockchain keeps a trace of the token voucher that was created on the other blockchain. + +If blockchain Mars sells `marscoin` to chain Venus and `ibc/Venus/marscoin` is minted on Venus then, if +`ibc/Venus/marscoin` is sold back to Mars, the token is unlocked and the token that is received is `marscoin`. + +## Features + +The features supported by the interchain exchange module are: + +- Create an exchange order book for a token pair between two chains +- Send sell orders on source chain +- Send buy orders on target chain +- Cancel sell or buy orders diff --git a/docs/versioned_docs/version-v0.27/02-guide/07-interchange/02-init.md b/docs/versioned_docs/version-v0.27/02-guide/07-interchange/02-init.md new file mode 100644 index 0000000..5261268 --- /dev/null +++ b/docs/versioned_docs/version-v0.27/02-guide/07-interchange/02-init.md @@ -0,0 +1,219 @@ +--- +sidebar_position: 2 +description: Create the blockchain for the interchain exchange app. +--- + +# App Init + +## Initialize the Blockchain + +In this chapter, you create the basic blockchain module for the interchain exchange app. You scaffold the blockchain, +the module, the transaction, the IBC packets, and messages. In later chapters, you integrate more code into each of the +transaction handlers. + +## Create the Blockchain + +Scaffold a new blockchain called `interchange`: + +```bash +ignite scaffold chain interchange --no-module +``` + +A new directory named `interchange` is created. + +Change into this directory where you can scaffold modules, types, and maps: + +```bash +cd interchange +``` + +The `interchange` directory contains a working blockchain app. + +A local GitHub repository has been created for you with the initial scaffold. + +Next, create a new IBC module. + +## Create the dex Module + +Scaffold a module inside your blockchain named `dex` with IBC capabilities. + +The dex module contains the logic to create and maintain order books and route them through IBC to the second +blockchain. + +```bash +ignite scaffold module dex --ibc --ordering unordered --dep bank +``` + +## Create CRUD logic for Buy and Sell Order Books + +Scaffold two types with create, read, update, and delete (CRUD) actions. + +Run the following Ignite CLI `type` commands to create `sellOrderBook` and `buyOrderBook` types: + +```bash +ignite scaffold map sell-order-book amountDenom priceDenom --no-message --module dex +ignite scaffold map buy-order-book amountDenom priceDenom --no-message --module dex +``` + +The values are: + +- `amountDenom`: the token to be sold and in which quantity +- `priceDenom`: the token selling price + +The `--no-message` flag specifies to skip the message creation. Custom messages will be created in the next steps. + +The `--module dex` flag specifies to scaffold the type in the `dex` module. + +## Create the IBC Packets + +Create three packets for IBC: + +- An order book pair `createPair` +- A sell order `sellOrder` +- A buy order `buyOrder` + +```bash +ignite scaffold packet create-pair sourceDenom targetDenom --module dex +ignite scaffold packet sell-order amountDenom amount:int priceDenom price:int --ack remainingAmount:int,gain:int --module dex +ignite scaffold packet buy-order amountDenom amount:int priceDenom price:int --ack remainingAmount:int,purchase:int --module dex +``` + +The optional `--ack` flag defines field names and types of the acknowledgment returned after the packet has been +received by the target chain. The value of the `--ack` flag is a comma-separated list of names (no spaces). Append +optional types after a colon (`:`). + +## Cancel messages + +Cancelling orders is done locally in the network, there is no packet to send. + +Use the `message` command to create a message to cancel a sell or buy order: + +```bash +ignite scaffold message cancel-sell-order port channel amountDenom priceDenom orderID:int --desc "Cancel a sell order" --module dex +ignite scaffold message cancel-buy-order port channel amountDenom priceDenom orderID:int --desc "Cancel a buy order" --module dex +``` + +Use the optional `--desc` flag to define a description of the CLI command that is used to broadcast a transaction with +the message. + +## Trace the Denom + +The token denoms must have the same behavior as described in the `ibc-transfer` module: + +- An external token received from a chain has a unique `denom`, referred to as `voucher`. +- When a token is sent to a blockchain and then sent back and received, the chain can resolve the voucher and convert + it back to the original token denomination. + +`Voucher` tokens are represented as hashes, therefore you must store which original denomination is related to a +voucher. +You can do this with an indexed type. + +For a `voucher` you store, define the source port ID, source channel ID, and the original denom: + +```bash +ignite scaffold map denom-trace port channel origin --no-message --module dex +``` + +## Create the Configuration for Two Blockchains + +Add two config files `mars.yml` and `venus.yml` to test two blockchain networks with specific token for each. + +Add the config files in the `interchange` folder. + +The native denoms for Mars are `marscoin`, and for Venus `venuscoin`. + +Create the `mars.yml` file with your content: + +```yaml title="mars.yml" +version: 1 +build: + proto: + path: proto + third_party_paths: + - third_party/proto + - proto_vendor +accounts: +- name: alice + coins: + - 1000token + - 100000000stake + - 1000marscoin +- name: bob + coins: + - 500token + - 1000marscoin + - 100000000stake +faucet: + name: bob + coins: + - 5token + - 100000stake + host: 0.0.0.0:4500 +genesis: + chain_id: mars +validators: +- name: alice + bonded: 100000000stake + home: $HOME/.mars +``` + +Create the `venus.yml` file with your content: + +```yaml title="venus.yml" +version: 1 +build: + proto: + path: proto + third_party_paths: + - third_party/proto + - proto_vendor +accounts: +- name: alice + coins: + - 1000token + - 1000000000stake + - 1000venuscoin +- name: bob + coins: + - 500token + - 1000venuscoin + - 100000000stake +faucet: + name: bob + coins: + - 5token + - 100000stake + host: :4501 +genesis: + chain_id: venus +validators: +- name: alice + bonded: 100000000stake + app: + api: + address: :1318 + grpc: + address: :9092 + grpc-web: + address: :9093 + config: + p2p: + laddr: :26658 + rpc: + laddr: :26659 + pprof_laddr: :6061 + home: $HOME/.venus +``` + +In order to run two blockchains side by side on a single machine, you need to +start them on different ports. `venus.yml` has a validators configuration that +stars services HTTP API, gRPC, P2P and RPC services on custom ports. + +After scaffolding, now is a good time to make a commit to the local GitHub repository that was created for you. + +```bash +git add . +git commit -m "Scaffold module, maps, packages and messages for the dex" +``` + +Implement the code for the order book in the next chapter. diff --git a/docs/versioned_docs/version-v0.27/02-guide/07-interchange/03-walkthrough.md b/docs/versioned_docs/version-v0.27/02-guide/07-interchange/03-walkthrough.md new file mode 100644 index 0000000..78a4a0f --- /dev/null +++ b/docs/versioned_docs/version-v0.27/02-guide/07-interchange/03-walkthrough.md @@ -0,0 +1,687 @@ +--- +sidebar_position: 3 +description: Walkthrough of commands to use the interchain exchange module. +--- + +# Use the Interchain Exchange + +In this chapter, you will learn about the exchange and how it will function once +it is implemented. This will give you a better understanding of what you will be +building in the coming chapters. + +To achieve this, we will perform the following tasks: + +* Start two local blockchains +* Set up an IBC relayer between the two chains +* Create an exchange order book for a token pair on the two chains +* Submit sell orders on the Mars chain +* Submit buy orders on the Venus chain +* Cancel sell or buy orders + +Starting the two local blockchains and setting up the IBC relayer will allow us +to create an exchange order book between the two chains. This order book will +allow us to submit sell and buy orders, as well as cancel any orders that we no +longer want to maintain. + +It is important to note that the commands in this chapter will only work +properly if you have completed all the following chapters in this tutorial. By +the end of this chapter, you should have a good understanding of how the +exchange will operate. + +## Start blockchain nodes + +To start using the interchain exchange, you will need to start two separate +blockchains. This can be done by running the `ignite chain serve` command, +followed by the `-c` flag and the path to the configuration file for each +blockchain. For example, to start the `mars` blockchain, you would run: + +``` +ignite chain serve -c mars.yml +``` + +To start the `venus` blockchain, you would run a similar command, but with the +path to the `venus.yml` configuration file: + +``` +ignite chain serve -c venus.yml +``` + +Once both blockchains are running, you can proceed with configuring the relayer +to enable interchain exchange between the two chains. + +## Relayer + +Next, let's set up an IBC relayer between two chains. If you have used a relayer +in the past, reset the relayer configuration directory: + +``` +rm -rf ~/.ignite/relayer +``` + +Now you can use the `ignite relayer configure` command. This command allows you +to specify the source and target chains, along with their respective RPC +endpoints, faucet URLs, port numbers, versions, gas prices, and gas limits. + +``` +ignite relayer configure -a --source-rpc "http://0.0.0.0:26657" --source-faucet "http://0.0.0.0:4500" --source-port "dex" --source-version "dex-1" --source-gasprice "0.0000025stake" --source-prefix "cosmos" --source-gaslimit 300000 --target-rpc "http://0.0.0.0:26659" --target-faucet "http://0.0.0.0:4501" --target-port "dex" --target-version "dex-1" --target-gasprice "0.0000025stake" --target-prefix "cosmos" --target-gaslimit 300000 +``` + +To create a connection between the two chains, you can use the ignite relayer +connect command. This command will establish a connection between the source and +target chains, allowing you to transfer data and assets between them. + +``` +ignite relayer connect +``` + +Now that we have two separate blockchain networks up and running, and a relayer +connection established to facilitate communication between them, we are ready to +begin using the interchain exchange binary to interact with these networks. This +will allow us to create order books and buy/sell orders, enabling us to trade +assets between the two chains. + +## Order Book + +To create an order book for a pair of tokens, you can use the following command: + +``` +interchanged tx dex send-create-pair dex channel-0 marscoin venuscoin --from alice --chain-id mars --home ~/.mars +``` + +This command will create an order book for the pair of tokens `marscoin` and +`venuscoin`. The command will be executed by the user `alice` on the Mars +blockchain. The `--home` parameter specifies the location of the configuration +directory for the Mars blockchain. + +Creating an order book affects state on the Mars blockchain to which the +transaction was broadcast and the Venus blockchain. + +On the Mars blockchain, the `send-create-pair` command creates an empty sell +order book. + +``` +interchanged q dex list-sell-order-book +``` + +```yml +sellOrderBook: +- amountDenom: marscoin + book: + idCount: 0 + orders: [] + index: dex-channel-0-marscoin-venuscoin + priceDenom: venuscoin +``` + +On the Venus blockchain, the same `send-createPair` command creates a buy order +book: + +``` +interchanged q dex list-buy-order-book --node tcp://localhost:26659 +``` + +```yml +buyOrderBook: +- amountDenom: marscoin + book: + idCount: 0 + orders: [] + index: dex-channel-0-marscoin-venuscoin + priceDenom: venuscoin +``` + +In the `create-pair` command on the Mars blockchain, an IBC packet is sent to +the Venus chain. This packet contains information that is used to create a buy +order book on the Venus chain. + +When the Venus chain receives the IBC packet, it processes the information +contained in the packet and creates a buy order book. The Venus chain then sends +an acknowledgement back to the Mars chain to confirm that the buy order book has +been successfully created. + +Upon receiving the acknowledgement from the Venus chain, the Mars chain creates +a sell order book. This sell order book is associated with the buy order book on +the Venus chain, allowing users to trade assets between the two chains. + +## Sell Order + +After creating an order book, the next step is to create a sell order. This can +be done using the `send-sell-order` command, which is used to broadcast a +transaction with a message that locks a specified amount of tokens and creates a +sell order on the Mars blockchain. + +``` +interchanged tx dex send-sell-order dex channel-0 marscoin 10 venuscoin 15 --from alice --chain-id mars --home ~/.mars +``` + +In the example provided, the `send-sell-order` command is used to create a sell +order for 10 `marscoin` token and 15 `venuscoin` token. This sell order will be +added to the order book on the Mars blockchain. + +``` +interchanged q bank balances $(interchanged keys show -a alice --home ~/.mars) +``` + +```yml +balances: +- amount: "990" # decreased from 1000 + denom: marscoin +- amount: "1000" + denom: token +``` + +``` +interchanged q dex list-sell-order-book +``` + +```yml +sellOrderBook: +- amountDenom: marscoin + book: + idCount: 1 + orders: # a new sell order is created + - amount: 10 + creator: cosmos14ntyzr6d2dx4ppds9tvenx53fn0xl5jcakrtm4 + id: 0 + price: 15 + index: dex-channel-0-marscoin-venuscoin + priceDenom: venuscoin +``` + +## Buy order + +After creating a sell order, the next step in the trading process is typically +to create a buy order. This can be done using the `send-buy-order` command, +which is used to lock a specified amount of tokens and create a buy order on the +Venus blockchain + +``` +interchanged tx dex send-buy-order dex channel-0 marscoin 10 venuscoin 5 --from alice --chain-id venus --home ~/.venus --node tcp://localhost:26659 +``` + +In the example provided, the `send-buy-order` command is used to create a buy +order for 10 `marscoin` token and 5 `venuscoin` token. This buy order will be +added to the order book on the Venus blockchain. + +``` +interchanged q bank balances $(interchanged keys show -a alice --home ~/.venus) --node tcp://localhost:26659 +``` + +```yml +balances: +- amount: "900000000" + denom: stake +- amount: "1000" + denom: token +- amount: "950" # decreased from 1000 + denom: venuscoin +``` + +``` +interchanged q dex list-buy-order-book --node tcp://localhost:26659 +``` + +```yml +buyOrderBook: +- amountDenom: marscoin + book: + idCount: 1 + orders: # a new buy order is created + - amount: 10 + creator: cosmos1mrrttwtdcp47pl4hq6sar3mwqpmtc7pcl9e6ss + id: 0 + price: 5 + index: dex-channel-0-marscoin-venuscoin + priceDenom: venuscoin +``` + +## Perform an Exchange with a Sell Order + +You currently have two open orders for `marscoin`: + +* A sell order on the Mars chain, where you are offering to sell 10 `marscoin` + for 15 `venuscoin`. +* A buy order on the Venus chain, where you are willing to buy 5 `marscoin` for + 5 `venuscoin`. + +To perform an exchange, you can send a sell order to the Mars chain using the +following command: + +``` +interchanged tx dex send-sell-order dex channel-0 marscoin 5 venuscoin 3 --from alice --home ~/.mars +``` + +This sell order, offering to sell 5 `marscoin` for 3 `venuscoin`, will be filled +on the Venus chain by the existing buy order. This will result in the amount of +the buy order on the Venus chain being reduced by 5 `marscoin`. + +``` +interchanged q dex list-buy-order-book --node tcp://localhost:26659 +``` + +```yml +buyOrderBook: +- amountDenom: marscoin + book: + idCount: 1 + orders: + - amount: 5 # decreased from 10 + creator: cosmos1mrrttwtdcp47pl4hq6sar3mwqpmtc7pcl9e6ss + id: 0 + price: 5 + index: dex-channel-0-marscoin-venuscoin + priceDenom: venuscoin +``` + +The sender of the filled sell order traded 5 `marscoin` for 25 `venuscoin` +tokens. This means that the amount of the sell order (5 `marscoin`) was +multiplied by the price of the buy order (5 `venuscoin`) to determine the value +of the exchange. In this case, the value of the exchange was 25 `venuscoin` +vouchers. + +``` +interchanged q bank balances $(interchanged keys show -a alice --home ~/.mars) +``` + +```yml +balances: +- amount: "25" # increased from 0 + denom: ibc/BB38C24E9877 +- amount: "985" # decreased from 990 + denom: marscoin +- amount: "1000" + denom: token +``` + +The counterparty, or the sender of the buy `marscoin` order, will receive 5 +`marscoin` as a result of the exchange. + +``` +interchanged q bank balances $(interchanged keys show -a alice --home ~/.venus) --node tcp://localhost:26659 +``` + +```yml +balances: +- amount: "5" # increased from 0 + denom: ibc/745B473BFE24 # marscoin voucher +- amount: "900000000" + denom: stake +- amount: "1000" + denom: token +- amount: "950" + denom: venuscoin +``` + +The `venuscoin` balance has remained unchanged because the appropriate amount of +`venuscoin` (50) was already locked at the time the buy order was created in the +previous step. + + +## Perform an Exchange with a Buy Order + +To perform an exchange with a buy order, send a transaction to the decentralized +exchange to buy 5 `marscoin` for 15 `venuscoin`. This is done by running the +following command: + +``` +interchanged tx dex send-buy-order dex channel-0 marscoin 5 venuscoin 15 --from alice --home ~/.venus --node tcp://localhost:26659 +``` + +This buy order will be immediately filled on the Mars chain, and the creator of +the sell order will receive 75 `venuscoin` vouchers as payment. + +``` +interchanged q bank balances $(interchanged keys show -a alice --home ~/.mars) +``` + +```yml +balances: +- amount: "100" # increased from 25 + denom: ibc/BB38C24E9877 # venuscoin voucher +- amount: "985" + denom: marscoin +- amount: "1000" + denom: token +``` + +The amount of the sell order will be decreased by the amount of the filled buy +order, so in this case it will be decreased by 5 `marscoin`. + +``` +interchanged q dex list-sell-order-book +``` + +```yml +sellOrderBook: +- amountDenom: marscoin + book: + idCount: 1 + orders: + - amount: 5 # decreased from 10 + creator: cosmos14ntyzr6d2dx4ppds9tvenx53fn0xl5jcakrtm4 + id: 0 + price: 15 + index: dex-channel-0-marscoin-venuscoin + priceDenom: venuscoin +``` + +The creator of the buy order receives 5 marscoin vouchers for 75 venuscoin +(5marscoin * 15venuscoin): + +``` +interchanged q bank balances $(interchanged keys show -a alice --home ~/.venus) --node tcp://localhost:26659 +``` + +```yml +balances: +- amount: "10" # increased from 5 + denom: ibc/745B473BFE24 # marscoin vouchers +- amount: "900000000" + denom: stake +- amount: "1000" + denom: token +- amount: "875" # decreased from 950 + denom: venuscoin +``` + +## Complete Exchange with a Partially Filled Sell Order + +To complete the exchange with a partially filled sell order, send a transaction +to the decentralized exchange to sell 10 `marscoin` for 3 `venuscoin`. This is +done by running the following command: + +``` +interchanged tx dex send-sell-order dex channel-0 marscoin 10 venuscoin 3 --from alice --home ~/.mars +``` + +In this scenario, the sell amount is 10 `marscoin`, but there is an existing buy +order for only 5 `marscoin`. The buy order will be filled completely and removed +from the order book. The author of the previously created buy order will receive +10 `marscoin` vouchers from the exchange. + +To check the balances, she can run the following command: + +``` +interchanged q bank balances $(interchanged keys show -a alice --home ~/.venus) --node tcp://localhost:26659 +``` + +```yml +balances: +- amount: "15" # increased from 5 + denom: ibc/745B473BFE24 # marscoin voucher +- amount: "900000000" + denom: stake +- amount: "1000" + denom: token +- amount: "875" + denom: venuscoin +``` + +``` +interchanged q dex list-buy-order-book --node tcp://localhost:26659 +``` + +```yml +buyOrderBook: +- amountDenom: marscoin + book: + idCount: 1 + orders: [] # buy order with amount 5marscoin has been closed + index: dex-channel-0-marscoin-venuscoin + priceDenom: venuscoin +``` + +``` +interchanged q bank balances $(interchanged keys show -a alice --home ~/.mars) +``` + +The author of the sell order successfuly exchanged 5 marscoin and received 25 +venuscoin vouchers. The other 5marscoin created a sell order: + +```yml +balances: +- amount: "125" # increased from 100 + denom: ibc/BB38C24E9877 # venuscoin vouchers +- amount: "975" # decreased from 985 + denom: marscoin +- amount: "1000" + denom: token +``` + +``` +interchanged q dex list-sell-order-book +``` + +```yml +sellOrderBook: +- amountDenom: marscoin + book: + idCount: 2 + orders: + - amount: 5 # hasn't changed + creator: cosmos14ntyzr6d2dx4ppds9tvenx53fn0xl5jcakrtm4 + id: 0 + price: 15 + - amount: 5 # new order is created + creator: cosmos14ntyzr6d2dx4ppds9tvenx53fn0xl5jcakrtm4 + id: 1 + price: 3 + index: dex-channel-0-marscoin-venuscoin + priceDenom: venuscoin +``` + +## Complete Exchange with a Partially Filled Buy Order + +To complete the exchange with a partially filled buy order, send a transaction +to the decentralized exchange to buy 10 `marscoin` for 5 `venuscoin`. This is +done by running the following command: + +``` +interchanged tx dex send-buy-order dex channel-0 marscoin 10 venuscoin 5 --from alice --home ~/.venus --node tcp://localhost:26659 +``` + +In this scenario, the buy order is only partially filled for 5 `marscoin`. There +is an existing sell order for 5 `marscoin` (with a price of 3 `venuscoin`) on +the Mars chain, which is completely filled and removed from the order book. The +author of the closed sell order will receive 15 `venuscoin` vouchers as payment, +which is the product of 5 `marscoin` and 3 `venuscoin`. + +``` +interchanged q bank balances $(interchanged keys show -a alice --home ~/.mars) +``` + +```yml +balances: +- amount: "140" # increased from 125 + denom: ibc/BB38C24E9877 # venuscoin vouchers +- amount: "975" + denom: marscoin +- amount: "1000" + denom: token +``` + +``` +interchanged q dex list-sell-order-book +``` + +```yml +sellOrderBook: +- amountDenom: marscoin + book: + idCount: 2 + orders: + - amount: 5 # order hasn't changed + creator: cosmos14ntyzr6d2dx4ppds9tvenx53fn0xl5jcakrtm4 + id: 0 + price: 15 + # a sell order for 5 marscoin has been closed + index: dex-channel-0-marscoin-venuscoin + priceDenom: venuscoin +``` + +In this scenario, the author of the buy order will receive 5 `marscoin` vouchers +as payment, which locks up 50 `venuscoin` of their token. The remaining 5 +`marscoin` that is not filled by the sell order will create a new buy order on +the Venus chain. This means that the author of the buy order is still interested +in purchasing 5 `marscoin`, and is willing to pay the specified price for it. +The new buy order will remain on the order book until it is filled by another +sell order, or it is cancelled by the buyer. + +``` +interchanged q bank balances $(interchanged keys show -a alice --home ~/.venus) --node tcp://localhost:26659 +``` + +```yml +balances: +- amount: "20" # increased from 15 + denom: ibc/745B473BFE24 # marscoin vouchers +- amount: "900000000" + denom: stake +- amount: "1000" + denom: token +- amount: "825" # decreased from 875 + denom: venuscoin +``` + +``` +interchanged q dex list-buy-order-book --node tcp://localhost:26659 +``` + +```yml +buyOrderBook: +- amountDenom: marscoin + book: + idCount: 2 + orders: + - amount: 5 # new buy order is created + creator: cosmos1mrrttwtdcp47pl4hq6sar3mwqpmtc7pcl9e6ss + id: 1 + price: 5 + index: dex-channel-0-marscoin-venuscoin + priceDenom: venuscoin +``` + +## Cancel an Order + +After the exchanges described, there are still two open orders: a sell order on +the Mars chain (5 `marscoin` for 15 `venuscoin`), and a buy order on the Venus +chain (5 `marscoin` for 5 `venuscoin`). + +To cancel an order on a blockchain, you can use the `cancel-sell-order` or +`cancel-buy-order` command, depending on the type of order you want to cancel. +The command takes several arguments, including the `channel-id` of the IBC +connection, the `amount-denom` and `price-denom` of the order, and the +`order-id` of the order you want to cancel. + +To cancel a sell order on the Mars chain, you would run the following command: + +``` +interchanged tx dex cancel-sell-order dex channel-0 marscoin venuscoin 0 --from alice --home ~/.mars +``` + +This will cancel the sell order and remove it from the order book. The balance +of Alice's `marscoin` will be increased by the amount of the cancelled sell +order. + +To check Alice's balances, including her updated `marscoin` balance, run the +following command: + +``` +interchanged q bank balances $(interchanged keys show -a alice --home ~/.mars) +``` + +This will return a list of Alice's balances, including her updated `marscoin` +balance. + +```yml +balances: +- amount: "140" + denom: ibc/BB38C24E9877 +- amount: "980" # increased from 975 + denom: marscoin +- amount: "1000" + denom: token +``` + +After the sell order on the Mars chain has been cancelled, the sell order book +on that blockchain will be empty. This means that there are no longer any active +sell orders on the Mars chain, and anyone interested in purchasing `marscoin` +will need to create a new buy order. The sell order book will remain empty until +a new sell order is created and added to it. + +``` +interchanged q dex list-sell-order-book +``` + +```yml +sellOrderBook: +- amountDenom: marscoin + book: + idCount: 2 + orders: [] + index: dex-channel-0-marscoin-venuscoin + priceDenom: venuscoin +``` + +To cancel a buy order on the `Venus` chain, you can run the following command: + +``` +interchanged tx dex cancel-buy-order dex channel-0 marscoin venuscoin 1 --from alice --home ~/.venus --node tcp://localhost:26659 +``` + +This will cancel the buy order and remove it from the order book. The balance of +Alice's `venuscoin` will be increased by the amount of the cancelled buy order. + +To check Alice's balances, including her updated `venuscoin` balance, you can +run the following command: + +``` +interchanged q bank balances $(interchanged keys show -a alice --home ~/.venus) --node tcp://localhost:26659 +``` + +The amount of `venuscoin` is increased: + +```yml +balances: +- amount: "20" + denom: ibc/745B473BFE24 +- amount: "900000000" + denom: stake +- amount: "1000" + denom: token +- amount: "850" # increased from 825 + denom: venuscoin +``` + +This will return a list of Alice's balances, including her updated `venuscoin` +balance. + +After canceling a buy order, the buy order book on the Venus blockchain will be +empty. This means that there are no longer any active buy orders on the chain, +and anyone interested in selling `marscoin` will need to create a new sell +order. The buy order book will remain empty until a new buy order is created and +added to it. + +``` +interchanged q dex list-buy-order-book --node tcp://localhost:26659 +``` + +```yml +buyOrderBook: +- amountDenom: marscoin + book: + idCount: 2 + orders: [] + index: dex-channel-0-marscoin-venuscoin + priceDenom: venuscoin +``` + +In this walkthrough, we demonstrated how to set up an interchain exchange for +trading tokens between two different blockchain networks. This involved creating +an exchange order book for a specific token pair and establishing a fixed +exchange rate between the two. + +Once the exchange was set up, users could send sell orders on the Mars chain and +buy orders on the Venus chain. This allowed them to offer their tokens for sale +or purchase tokens from the exchange. In addition, users could also cancel their +orders if needed. \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.27/02-guide/07-interchange/04-creating-order-books.md b/docs/versioned_docs/version-v0.27/02-guide/07-interchange/04-creating-order-books.md new file mode 100644 index 0000000..ffe5a2b --- /dev/null +++ b/docs/versioned_docs/version-v0.27/02-guide/07-interchange/04-creating-order-books.md @@ -0,0 +1,480 @@ +--- +sidebar_position: 4 +description: Implement logic to create order books. +--- + +# Implement the Order Books + +In this chapter, you implement the logic to create order books. + +In the Cosmos SDK, the state is stored in a key-value store. Each order book is stored under a unique key that is +composed of four values: + +- Port ID +- Channel ID +- Source denom +- Target denom + +For example, an order book for marscoin and venuscoin could be stored under `dex-channel-4-marscoin-venuscoin`. + +First, define a function that returns an order book store key: + +```go +// x/dex/types/keys.go +package types + +import "fmt" + +// ... +func OrderBookIndex(portID string, channelID string, sourceDenom string, targetDenom string) string { + return fmt.Sprintf("%s-%s-%s-%s", portID, channelID, sourceDenom, targetDenom) +} +``` + +The `send-create-pair` command is used to create order books. This command: + +- Creates and broadcasts a transaction with a message of type `SendCreatePair`. +- The message gets routed to the `dex` module. +- Finally, a `SendCreatePair` keeper method is called. + +You need the `send-create-pair` command to do the following: + +- When processing `SendCreatePair` message on the source chain: + - Check that an order book with the given pair of denoms does not yet exist. + - Transmit an IBC packet with information about port, channel, source denoms, and target denoms. +- After the packet is received on the target chain: + - Check that an order book with the given pair of denoms does not yet exist on the target chain. + - Create a new order book for buy orders. + - Transmit an IBC acknowledgement back to the source chain. +- After the acknowledgement is received on the source chain: + - Create a new order book for sell orders. + +## Message Handling in SendCreatePair + +The `SendCreatePair` function was created during the IBC packet scaffolding. The function creates an IBC packet, +populates it with source and target denoms, and transmits this packet over IBC. + +Now, add the logic to check for an existing order book for a particular pair of denoms: + +```go +// x/dex/keeper/msg_server_create_pair.go + +package keeper + +import ( + "errors" + // ... +) + +func (k msgServer) SendCreatePair(goCtx context.Context, msg *types.MsgSendCreatePair) (*types.MsgSendCreatePairResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + + // Get an order book index + pairIndex := types.OrderBookIndex(msg.Port, msg.ChannelID, msg.SourceDenom, msg.TargetDenom) + + // If an order book is found, return an error + _, found := k.GetSellOrderBook(ctx, pairIndex) + if found { + return &types.MsgSendCreatePairResponse{}, errors.New("the pair already exist") + } + + // Construct the packet + var packet types.CreatePairPacketData + + packet.SourceDenom = msg.SourceDenom + packet.TargetDenom = msg.TargetDenom + + // Transmit the packet + _, err := k.TransmitCreatePairPacket( + ctx, + packet, + msg.Port, + msg.ChannelID, + clienttypes.ZeroHeight(), + msg.TimeoutTimestamp, + ) + if err != nil { + return nil, err + } + + return &types.MsgSendCreatePairResponse{}, nil +} +``` + +## Lifecycle of an IBC Packet + +During a successful transmission, an IBC packet goes through these stages: + +1. Message processing before packet transmission on the source chain +2. Reception of a packet on the target chain +3. Acknowledgment of a packet on the source chain +4. Timeout of a packet on the source chain + +In the following section, implement the packet reception logic in the `OnRecvCreatePairPacket` function and the packet +acknowledgement logic in the `OnAcknowledgementCreatePairPacket` function. + +Leave the Timeout function empty. + +## Receive an IBC packet + +The protocol buffer definition defines the data that an order book contains. + +Add the `OrderBook` and `Order` messages to the `order.proto` file. + +First, add the proto buffer files to build the Go code files. You can modify these files for the purpose of your app. + +Create a new `order.proto` file in the `proto/interchange/dex` directory and add the content: + +```protobuf +// proto/interchange/dex/order.proto + +syntax = "proto3"; + +package interchange.dex; + +option go_package = "interchange/x/dex/types"; + +message OrderBook { + int32 idCount = 1; + repeated Order orders = 2; +} + +message Order { + int32 id = 1; + string creator = 2; + int32 amount = 3; + int32 price = 4; +} +``` + +Modify the `buy_order_book.proto` file to have the fields for creating a buy order on the order book. +Don't forget to add the import as well. + +**Tip:** Don't forget to add the import as well. + +```protobuf +// proto/interchange/dex/buy_order_book.proto + +// ... + +import "interchange/dex/order.proto"; + +message BuyOrderBook { + // ... + OrderBook book = 4; +} +``` + +Modify the `sell_order_book.proto` file to add the order book into the buy order book. + +The proto definition for the `SellOrderBook` looks like: + +```protobuf +// proto/interchange/dex/sell_order_book.proto + +// ... +import "interchange/dex/order.proto"; + +message SellOrderBook { + // ... + OrderBook book = 4; +} +``` + +Now, use Ignite CLI to build the proto files for the `send-create-pair` command: + +```bash +ignite generate proto-go --yes +``` + +Start enhancing the functions for the IBC packets. + +Create a new file `x/dex/types/order_book.go`. + +Add the new order book function to the corresponding Go file: + +```go +// x/dex/types/order_book.go + +package types + +func NewOrderBook() OrderBook { + return OrderBook{ + IdCount: 0, + } +} +``` + +To create a new buy order book type, define `NewBuyOrderBook` in a new file `x/dex/types/buy_order_book.go` : + +```go +// x/dex/types/buy_order_book.go + +package types + +func NewBuyOrderBook(AmountDenom string, PriceDenom string) BuyOrderBook { + book := NewOrderBook() + return BuyOrderBook{ + AmountDenom: AmountDenom, + PriceDenom: PriceDenom, + Book: &book, + } +} +``` + +When an IBC packet is received on the target chain, the module must check whether a book already exists. If not, then +create a buy order book for the specified denoms. + +```go +// x/dex/keeper/create_pair.go + +package keeper + +// ... + +func (k Keeper) OnRecvCreatePairPacket(ctx sdk.Context, packet channeltypes.Packet, data types.CreatePairPacketData) (packetAck types.CreatePairPacketAck, err error) { + // ... + + // Get an order book index + pairIndex := types.OrderBookIndex(packet.SourcePort, packet.SourceChannel, data.SourceDenom, data.TargetDenom) + + // If an order book is found, return an error + _, found := k.GetBuyOrderBook(ctx, pairIndex) + if found { + return packetAck, errors.New("the pair already exist") + } + + // Create a new buy order book for source and target denoms + book := types.NewBuyOrderBook(data.SourceDenom, data.TargetDenom) + + // Assign order book index + book.Index = pairIndex + + // Save the order book to the store + k.SetBuyOrderBook(ctx, book) + return packetAck, nil +} +``` + +## Receive an IBC Acknowledgement + +When an IBC acknowledgement is received on the source chain, the module must check whether a book already exists. If +not, +create a sell order book for the specified denoms. + +Create a new file `x/dex/types/sell_order_book.go`. +Insert the `NewSellOrderBook` function which creates a new sell order book. + +```go +// x/dex/types/sell_order_book.go + +package types + +func NewSellOrderBook(AmountDenom string, PriceDenom string) SellOrderBook { + book := NewOrderBook() + return SellOrderBook{ + AmountDenom: AmountDenom, + PriceDenom: PriceDenom, + Book: &book, + } +} +``` + +Modify the Acknowledgement function in the `x/dex/keeper/create_pair.go` file: + +```go +// x/dex/keeper/create_pair.go + +package keeper + +// ... + +func (k Keeper) OnAcknowledgementCreatePairPacket(ctx sdk.Context, packet channeltypes.Packet, data types.CreatePairPacketData, ack channeltypes.Acknowledgement) error { + switch dispatchedAck := ack.Response.(type) { + case *channeltypes.Acknowledgement_Error: + return nil + case *channeltypes.Acknowledgement_Result: + // Decode the packet acknowledgment + var packetAck types.CreatePairPacketAck + if err := types.ModuleCdc.UnmarshalJSON(dispatchedAck.Result, &packetAck); err != nil { + // The counter-party module doesn't implement the correct acknowledgment format + return errors.New("cannot unmarshal acknowledgment") + } + + // Set the sell order book + pairIndex := types.OrderBookIndex(packet.SourcePort, packet.SourceChannel, data.SourceDenom, data.TargetDenom) + book := types.NewSellOrderBook(data.SourceDenom, data.TargetDenom) + book.Index = pairIndex + k.SetSellOrderBook(ctx, book) + + return nil + default: + // The counter-party module doesn't implement the correct acknowledgment format + return errors.New("invalid acknowledgment format") + } +} +``` + +In this section, you implemented the logic behind the new `send-create-pair` command: + +- When an IBC packet is received on the target chain, `send-create-pair` command creates a buy order book. +- When an IBC acknowledgement is received on the source chain, the `send-create-pair` command creates a sell order book. + +### Implement the appendOrder Function to Add Orders to the Order Book + +```go +// x/dex/types/order_book.go + +package types + +import ( + "errors" + "sort" +) + +func NewOrderBook() OrderBook { + return OrderBook{ + IdCount: 0, + } +} + +const ( + MaxAmount = int32(100000) + MaxPrice = int32(100000) +) + +type Ordering int + +const ( + Increasing Ordering = iota + Decreasing +) + +var ( + ErrMaxAmount = errors.New("max amount reached") + ErrMaxPrice = errors.New("max price reached") + ErrZeroAmount = errors.New("amount is zero") + ErrZeroPrice = errors.New("price is zero") + ErrOrderNotFound = errors.New("order not found") +) +``` + +The `AppendOrder` function initializes and appends a new order to an order book from the order information: + +```go +// x/dex/types/order_book.go + +func (book *OrderBook) appendOrder(creator string, amount int32, price int32, ordering Ordering) (int32, error) { + if err := checkAmountAndPrice(amount, price); err != nil { + return 0, err + } + + // Initialize the order + var order Order + order.Id = book.GetNextOrderID() + order.Creator = creator + order.Amount = amount + order.Price = price + + // Increment ID tracker + book.IncrementNextOrderID() + + // Insert the order + book.insertOrder(order, ordering) + return order.Id, nil +} +``` + +#### Implement the checkAmountAndPrice Function For an Order + +The `checkAmountAndPrice` function checks for the correct amount or price: + +```go +// x/dex/types/order_book.go + +func checkAmountAndPrice(amount int32, price int32) error { + if amount == int32(0) { + return ErrZeroAmount + } + if amount > MaxAmount { + return ErrMaxAmount + } + + if price == int32(0) { + return ErrZeroPrice + } + if price > MaxPrice { + return ErrMaxPrice + } + + return nil +} +``` + +#### Implement the GetNextOrderID Function + +The `GetNextOrderID` function gets the ID of the next order to append: + +```go +// x/dex/types/order_book.go + +func (book OrderBook) GetNextOrderID() int32 { + return book.IdCount +} +``` + +#### Implement the IncrementNextOrderID Function + +The `IncrementNextOrderID` function updates the ID count for orders: + +```go +// x/dex/types/order_book.go + +func (book *OrderBook) IncrementNextOrderID() { + // Even numbers to have different ID than buy orders + book.IdCount++ +} +``` + +#### Implement the insertOrder Function + +The `insertOrder` function inserts the order in the book with the provided order: + +```go +// x/dex/types/order_book.go + +func (book *OrderBook) insertOrder(order Order, ordering Ordering) { + if len(book.Orders) > 0 { + var i int + + // get the index of the new order depending on the provided ordering + if ordering == Increasing { + i = sort.Search(len(book.Orders), func(i int) bool { return book.Orders[i].Price > order.Price }) + } else { + i = sort.Search(len(book.Orders), func(i int) bool { return book.Orders[i].Price < order.Price }) + } + + // insert order + orders := append(book.Orders, &order) + copy(orders[i+1:], orders[i:]) + orders[i] = &order + book.Orders = orders + } else { + book.Orders = append(book.Orders, &order) + } +} +``` + +This completes the order book setup. + +Now is a good time to save the state of your implementation. +Because your project is in a local repository, you can use git. Saving your current state lets you jump back and forth +in case you introduce errors or need a break. + +```bash +git add . +git commit -m "Create Order Books" +``` + +In the next chapter, you learn how to deal with vouchers by minting and burning vouchers and locking and unlocking +native blockchain token in your app. diff --git a/docs/versioned_docs/version-v0.27/02-guide/07-interchange/05-mint-and-burn-voucher.md b/docs/versioned_docs/version-v0.27/02-guide/07-interchange/05-mint-and-burn-voucher.md new file mode 100644 index 0000000..5b29c3c --- /dev/null +++ b/docs/versioned_docs/version-v0.27/02-guide/07-interchange/05-mint-and-burn-voucher.md @@ -0,0 +1,351 @@ +--- +order: 5 +description: Mint vouchers and lock and unlock native token from a blockchain. +--- + +# Mint and Burn Vouchers + +In this chapter, you learn about vouchers. The `dex` module implementation mints vouchers and locks and unlocks native +token from a blockchain. + +There is a lot to learn from this `dex` module implementation: + +- You work with the `bank` keeper and use several methods it offers. +- You interact with another module and use the module account to lock tokens. + +This implementation can teach you how to use various interactions with module accounts or minting, locking or burning +tokens. + +## Create the SafeBurn Function to Burn Vouchers or Lock Tokens + +The `SafeBurn` function burns tokens if they are IBC vouchers (have an `ibc/` prefix) and locks tokens if they are +native to the chain. + +Create a new `x/dex/keeper/mint.go` file: + +```go +// x/dex/keeper/mint.go + +package keeper + +import ( + "fmt" + "strings" + + sdkmath "cosmossdk.io/math" + sdk "github.com/cosmos/cosmos-sdk/types" + ibctransfertypes "github.com/cosmos/ibc-go/v6/modules/apps/transfer/types" + + "interchange/x/dex/types" +) + +// isIBCToken checks if the token came from the IBC module +// Each IBC token starts with an ibc/ denom, the check is rather simple +func isIBCToken(denom string) bool { + return strings.HasPrefix(denom, "ibc/") +} + +func (k Keeper) SafeBurn(ctx sdk.Context, port string, channel string, sender sdk.AccAddress, denom string, amount int32) error { + if isIBCToken(denom) { + // Burn the tokens + if err := k.BurnTokens(ctx, sender, sdk.NewCoin(denom, sdkmath.NewInt(int64(amount)))); err != nil { + return err + } + } else { + // Lock the tokens + if err := k.LockTokens(ctx, port, channel, sender, sdk.NewCoin(denom, sdkmath.NewInt(int64(amount)))); err != nil { + return err + } + } + + return nil +} +``` + +If the token comes from another blockchain as an IBC token, the burning method actually burns those IBC tokens on one +chain and unlocks them on the other chain. The native token are locked away. + +Now, implement the `BurnTokens` keeper method as used in the previous function. The `bankKeeper` has a useful function +for this: + +```go +// x/dex/keeper/mint.go + +package keeper + +// ... + +func (k Keeper) BurnTokens(ctx sdk.Context, sender sdk.AccAddress, tokens sdk.Coin) error { + // transfer the coins to the module account and burn them + if err := k.bankKeeper.SendCoinsFromAccountToModule(ctx, sender, types.ModuleName, sdk.NewCoins(tokens)); err != nil { + return err + } + + if err := k.bankKeeper.BurnCoins( + ctx, types.ModuleName, sdk.NewCoins(tokens), + ); err != nil { + // NOTE: should not happen as the module account was + // retrieved on the step above and it has enough balance + // to burn. + panic(fmt.Sprintf("cannot burn coins after a successful send to a module account: %v", err)) + } + + return nil +} +``` + +Implement the `LockTokens` keeper method. + +To lock token from a native chain, you can send the native token to the Escrow Address: + +```go +// x/dex/keeper/mint.go + +package keeper + +// ... + +func (k Keeper) LockTokens(ctx sdk.Context, sourcePort string, sourceChannel string, sender sdk.AccAddress, tokens sdk.Coin) error { + // create the escrow address for the tokens + escrowAddress := ibctransfertypes.GetEscrowAddress(sourcePort, sourceChannel) + + // escrow source tokens. It fails if balance insufficient + if err := k.bankKeeper.SendCoins( + ctx, sender, escrowAddress, sdk.NewCoins(tokens), + ); err != nil { + return err + } + + return nil +} +``` + +`BurnTokens` and `LockTokens` use `SendCoinsFromAccountToModule`, `BurnCoins`, and `SendCoins` keeper methods of the +`bank` module. + +To start using these function from the `dex` module, first add them to the `BankKeeper` interface in the +`x/dex/types/expected_keepers.go` file. + +```go +// x/dex/types/expected_keepers.go + +package types + +import sdk "github.com/cosmos/cosmos-sdk/types" + +// BankKeeper defines the expected bank keeper +type BankKeeper interface { + //... + SendCoinsFromAccountToModule(ctx sdk.Context, senderAddr sdk.AccAddress, recipientModule string, amt sdk.Coins) error + BurnCoins(ctx sdk.Context, moduleName string, amt sdk.Coins) error + SendCoins(ctx sdk.Context, fromAddr sdk.AccAddress, toAddr sdk.AccAddress, amt sdk.Coins) error +} +``` + +## SaveVoucherDenom + +The `SaveVoucherDenom` function saves the voucher denom to be able to convert it back later. + +Create a new `x/dex/keeper/denom.go` file: + +```go +// x/dex/keeper/denom.go + +package keeper + +import ( + sdk "github.com/cosmos/cosmos-sdk/types" + ibctransfertypes "github.com/cosmos/ibc-go/v6/modules/apps/transfer/types" + + "interchange/x/dex/types" +) + +func (k Keeper) SaveVoucherDenom(ctx sdk.Context, port string, channel string, denom string) { + voucher := VoucherDenom(port, channel, denom) + + // Store the origin denom + _, saved := k.GetDenomTrace(ctx, voucher) + if !saved { + k.SetDenomTrace(ctx, types.DenomTrace{ + Index: voucher, + Port: port, + Channel: channel, + Origin: denom, + }) + } +} +``` + +Finally, the last function to implement is the `VoucherDenom` function that returns the voucher of the denom from the +port ID and channel ID: + +```go +// x/dex/keeper/denom.go + +package keeper + +// ... + +func VoucherDenom(port string, channel string, denom string) string { + // since SendPacket did not prefix the denomination, we must prefix denomination here + sourcePrefix := ibctransfertypes.GetDenomPrefix(port, channel) + + // NOTE: sourcePrefix contains the trailing "/" + prefixedDenom := sourcePrefix + denom + + // construct the denomination trace from the full raw denomination + denomTrace := ibctransfertypes.ParseDenomTrace(prefixedDenom) + voucher := denomTrace.IBCDenom() + return voucher[:16] +} +``` + +### Implement an OriginalDenom Function + +The `OriginalDenom` function returns back the original denom of the voucher. + +False is returned if the port ID and channel ID provided are not the origins of the voucher: + +```go +// x/dex/keeper/denom.go + +package keeper + +// ... + +func (k Keeper) OriginalDenom(ctx sdk.Context, port string, channel string, voucher string) (string, bool) { + trace, exist := k.GetDenomTrace(ctx, voucher) + if exist { + // Check if original port and channel + if trace.Port == port && trace.Channel == channel { + return trace.Origin, true + } + } + + // Not the original chain + return "", false +} +``` + +### Implement a SafeMint Function + +If a token is an IBC token (has an `ibc/` prefix), the `SafeMint` function mints IBC token with `MintTokens`. +Otherwise, it unlocks native token with `UnlockTokens`. + +Go back to the `x/dex/keeper/mint.go` file and add the following code: + +```go +// x/dex/keeper/mint.go + +package keeper + +// ... + +func (k Keeper) SafeMint(ctx sdk.Context, port string, channel string, receiver sdk.AccAddress, denom string, amount int32) error { + if isIBCToken(denom) { + // Mint IBC tokens + if err := k.MintTokens(ctx, receiver, sdk.NewCoin(denom, sdkmath.NewInt(int64(amount)))); err != nil { + return err + } + } else { + // Unlock native tokens + if err := k.UnlockTokens( + ctx, + port, + channel, + receiver, + sdk.NewCoin(denom, sdkmath.NewInt(int64(amount))), + ); err != nil { + return err + } + } + + return nil +} +``` + +#### Implement a `MintTokens` Function + +You can use the `bankKeeper` function again to MintCoins. These token will then be sent to the receiver account: + +```go +// x/dex/keeper/mint.go + +package keeper + +// ... + +func (k Keeper) MintTokens(ctx sdk.Context, receiver sdk.AccAddress, tokens sdk.Coin) error { + // mint new tokens if the source of the transfer is the same chain + if err := k.bankKeeper.MintCoins( + ctx, types.ModuleName, sdk.NewCoins(tokens), + ); err != nil { + return err + } + + // send to receiver + if err := k.bankKeeper.SendCoinsFromModuleToAccount( + ctx, types.ModuleName, receiver, sdk.NewCoins(tokens), + ); err != nil { + panic(fmt.Sprintf("unable to send coins from module to account despite previously minting coins to module account: %v", err)) + } + + return nil +} +``` + +Finally, add the function to unlock token after they are sent back to the native blockchain: + +```go +// x/dex/keeper/mint.go + +package keeper + +// ... + +func (k Keeper) UnlockTokens(ctx sdk.Context, sourcePort string, sourceChannel string, receiver sdk.AccAddress, tokens sdk.Coin) error { + // create the escrow address for the tokens + escrowAddress := ibctransfertypes.GetEscrowAddress(sourcePort, sourceChannel) + + // escrow source tokens. It fails if balance insufficient + if err := k.bankKeeper.SendCoins( + ctx, escrowAddress, receiver, sdk.NewCoins(tokens), + ); err != nil { + return err + } + + return nil +} +``` + +The `MintTokens` function uses two keeper methods from the `bank` module: `MintCoins` and `SendCoinsFromModuleToAccount` +. +To import these methods, add their signatures to the `BankKeeper` interface in the `x/dex/types/expected_keepers.go` +file: + +```go +// x/dex/types/expected_keepers.go + +package types + +// ... + +type BankKeeper interface { + // ... + MintCoins(ctx sdk.Context, moduleName string, amt sdk.Coins) error + SendCoinsFromModuleToAccount(ctx sdk.Context, senderModule string, recipientAddr sdk.AccAddress, amt sdk.Coins) error +} +``` + +## Summary + +You finished the mint and burn voucher logic. + +It is a good time to make another git commit to save the state of your work: + +```bash +git add . +git commit -m "Add Mint and Burn Voucher" +``` + +In the next chapter, you look into creating sell orders. diff --git a/docs/versioned_docs/version-v0.27/02-guide/07-interchange/06-creating-sell-orders.md b/docs/versioned_docs/version-v0.27/02-guide/07-interchange/06-creating-sell-orders.md new file mode 100644 index 0000000..46f527d --- /dev/null +++ b/docs/versioned_docs/version-v0.27/02-guide/07-interchange/06-creating-sell-orders.md @@ -0,0 +1,402 @@ +--- +sidebar_position: 6 +description: Implement logic to create sell orders. +--- + +# Create Sell Orders + +In this chapter, you implement the logic for creating sell orders. + +The packet proto file for a sell order is already generated. Add the seller information: + +```protobuf +// proto/dex/packet.proto + +message SellOrderPacketData { + // ... + string seller = 5; +} +``` + +Now, use Ignite CLI to build the proto files for the `send-sell-order` command. You used this command in a previous +chapter. + +```bash +ignite generate proto-go --yes +``` + +## Message Handling in SendSellOrder + +Sell orders are created using the `send-sell-order` command. This command creates a transaction with a `SendSellOrder` +message that triggers the `SendSellOrder` keeper method. + +The `SendSellOrder` command: + +* Checks that an order book for a specified denom pair exists. +* Safely burns or locks token. + * If the token is an IBC token, burn the token. + * If the token is a native token, lock the token. +* Saves the voucher that is received on the target chain to later resolve a denom. +* Transmits an IBC packet to the target chain. + +```go +// x/dex/keeper/msg_server_sell_order.go + +package keeper + +import ( + "context" + "errors" + + sdk "github.com/cosmos/cosmos-sdk/types" + clienttypes "github.com/cosmos/ibc-go/v6/modules/core/02-client/types" + + "interchange/x/dex/types" +) + +func (k msgServer) SendSellOrder(goCtx context.Context, msg *types.MsgSendSellOrder) (*types.MsgSendSellOrderResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + + // If an order book doesn't exist, throw an error + pairIndex := types.OrderBookIndex(msg.Port, msg.ChannelID, msg.AmountDenom, msg.PriceDenom) + _, found := k.GetSellOrderBook(ctx, pairIndex) + if !found { + return &types.MsgSendSellOrderResponse{}, errors.New("the pair doesn't exist") + } + + // Get sender's address + sender, err := sdk.AccAddressFromBech32(msg.Creator) + if err != nil { + return &types.MsgSendSellOrderResponse{}, err + } + + // Use SafeBurn to ensure no new native tokens are minted + if err := k.SafeBurn(ctx, msg.Port, msg.ChannelID, sender, msg.AmountDenom, msg.Amount); err != nil { + return &types.MsgSendSellOrderResponse{}, err + } + + // Save the voucher received on the other chain, to have the ability to resolve it into the original denom + k.SaveVoucherDenom(ctx, msg.Port, msg.ChannelID, msg.AmountDenom) + + var packet types.SellOrderPacketData + packet.Seller = msg.Creator + packet.AmountDenom = msg.AmountDenom + packet.Amount = msg.Amount + packet.PriceDenom = msg.PriceDenom + packet.Price = msg.Price + + // Transmit the packet + err = k.TransmitSellOrderPacket(ctx, packet, msg.Port, msg.ChannelID, clienttypes.ZeroHeight(), msg.TimeoutTimestamp) + if err != nil { + return nil, err + } + + return &types.MsgSendSellOrderResponse{}, nil +} +``` + +## On Receiving a Sell Order + +When a "sell order" packet is received on the target chain, you want the module to: + +* Update the sell order book +* Distribute sold token to the buyer +* Send the sell order to chain A after the fill attempt + +```go +// x/dex/keeper/sell_order.go + +package keeper + +// ... + +func (k Keeper) OnRecvSellOrderPacket(ctx sdk.Context, packet channeltypes.Packet, data types.SellOrderPacketData) (packetAck types.SellOrderPacketAck, err error) { + if err := data.ValidateBasic(); err != nil { + return packetAck, err + } + + pairIndex := types.OrderBookIndex(packet.SourcePort, packet.SourceChannel, data.AmountDenom, data.PriceDenom) + book, found := k.GetBuyOrderBook(ctx, pairIndex) + if !found { + return packetAck, errors.New("the pair doesn't exist") + } + + // Fill sell order + remaining, liquidated, gain, _ := book.FillSellOrder(types.Order{ + Amount: data.Amount, + Price: data.Price, + }) + + // Return remaining amount and gains + packetAck.RemainingAmount = remaining.Amount + packetAck.Gain = gain + + // Before distributing sales, we resolve the denom + // First we check if the denom received comes from this chain originally + finalAmountDenom, saved := k.OriginalDenom(ctx, packet.DestinationPort, packet.DestinationChannel, data.AmountDenom) + if !saved { + // If it was not from this chain we use voucher as denom + finalAmountDenom = VoucherDenom(packet.SourcePort, packet.SourceChannel, data.AmountDenom) + } + + // Dispatch liquidated buy orders + for _, liquidation := range liquidated { + liquidation := liquidation + addr, err := sdk.AccAddressFromBech32(liquidation.Creator) + if err != nil { + return packetAck, err + } + + if err := k.SafeMint(ctx, packet.DestinationPort, packet.DestinationChannel, addr, finalAmountDenom, liquidation.Amount); err != nil { + return packetAck, err + } + } + + // Save the new order book + k.SetBuyOrderBook(ctx, book) + + return packetAck, nil +} +``` + +### Implement the FillSellOrder Function + +The `FillSellOrder` function tries to fill the buy order with the order book and returns all the side effects: + +```go +// x/dex/types/buy_order_book.go + +package types + +// ... + +func (b *BuyOrderBook) FillSellOrder(order Order) ( + remainingSellOrder Order, + liquidated []Order, + gain int32, + filled bool, +) { + var liquidatedList []Order + totalGain := int32(0) + remainingSellOrder = order + + // Liquidate as long as there is match + for { + var match bool + var liquidation Order + remainingSellOrder, liquidation, gain, match, filled = b.LiquidateFromSellOrder( + remainingSellOrder, + ) + if !match { + break + } + + // Update gains + totalGain += gain + + // Update liquidated + liquidatedList = append(liquidatedList, liquidation) + + if filled { + break + } + } + + return remainingSellOrder, liquidatedList, totalGain, filled +} +``` + +### Implement The LiquidateFromSellOrder Function + +The `LiquidateFromSellOrder` function liquidates the first sell order of the book from the buy order. If no match is +found, return false for match: + +```go +// x/dex/types/buy_order_book.go + +package types + +// ... + +func (b *BuyOrderBook) LiquidateFromSellOrder(order Order) ( + remainingSellOrder Order, + liquidatedBuyOrder Order, + gain int32, + match bool, + filled bool, +) { + remainingSellOrder = order + + // No match if no order + orderCount := len(b.Book.Orders) + if orderCount == 0 { + return order, liquidatedBuyOrder, gain, false, false + } + + // Check if match + highestBid := b.Book.Orders[orderCount-1] + if order.Price > highestBid.Price { + return order, liquidatedBuyOrder, gain, false, false + } + + liquidatedBuyOrder = *highestBid + + // Check if sell order can be entirely filled + if highestBid.Amount >= order.Amount { + remainingSellOrder.Amount = 0 + liquidatedBuyOrder.Amount = order.Amount + gain = order.Amount * highestBid.Price + + // Remove the highest bid if it has been entirely liquidated + highestBid.Amount -= order.Amount + if highestBid.Amount == 0 { + b.Book.Orders = b.Book.Orders[:orderCount-1] + } else { + b.Book.Orders[orderCount-1] = highestBid + } + + return remainingSellOrder, liquidatedBuyOrder, gain, true, true + } + + // Not entirely filled + gain = highestBid.Amount * highestBid.Price + b.Book.Orders = b.Book.Orders[:orderCount-1] + remainingSellOrder.Amount -= highestBid.Amount + + return remainingSellOrder, liquidatedBuyOrder, gain, true, false +} +``` + +### Implement the OnAcknowledgement Function for Sell Order Packets + +After an IBC packet is processed on the target chain, an acknowledgement is returned to the source chain and processed +by the `OnAcknowledgementSellOrderPacket` function. + +The dex module on the source chain: + +* Stores the remaining sell order in the sell order book. +* Distributes sold tokens to the buyers. +* Distributes the price of the amount sold to the seller. +* On error, mints the burned tokens. + +```go +// x/dex/keeper/sell_order.go + +package keeper + +// ... + +func (k Keeper) OnAcknowledgementSellOrderPacket(ctx sdk.Context, packet channeltypes.Packet, data types.SellOrderPacketData, ack channeltypes.Acknowledgement) error { + switch dispatchedAck := ack.Response.(type) { + case *channeltypes.Acknowledgement_Error: + // In case of error we mint back the native token + receiver, err := sdk.AccAddressFromBech32(data.Seller) + if err != nil { + return err + } + + if err := k.SafeMint(ctx, packet.SourcePort, packet.SourceChannel, receiver, data.AmountDenom, data.Amount); err != nil { + return err + } + + return nil + case *channeltypes.Acknowledgement_Result: + // Decode the packet acknowledgment + var packetAck types.SellOrderPacketAck + if err := types.ModuleCdc.UnmarshalJSON(dispatchedAck.Result, &packetAck); err != nil { + // The counter-party module doesn't implement the correct acknowledgment format + return errors.New("cannot unmarshal acknowledgment") + } + + // Get the sell order book + pairIndex := types.OrderBookIndex(packet.SourcePort, packet.SourceChannel, data.AmountDenom, data.PriceDenom) + book, found := k.GetSellOrderBook(ctx, pairIndex) + if !found { + panic("sell order book must exist") + } + + // Append the remaining amount of the order + if packetAck.RemainingAmount > 0 { + _, err := book.AppendOrder(data.Seller, packetAck.RemainingAmount, data.Price) + if err != nil { + return err + } + + // Save the new order book + k.SetSellOrderBook(ctx, book) + } + + // Mint the gains + if packetAck.Gain > 0 { + receiver, err := sdk.AccAddressFromBech32(data.Seller) + if err != nil { + return err + } + + finalPriceDenom, saved := k.OriginalDenom(ctx, packet.SourcePort, packet.SourceChannel, data.PriceDenom) + if !saved { + // If it was not from this chain we use voucher as denom + finalPriceDenom = VoucherDenom(packet.DestinationPort, packet.DestinationChannel, data.PriceDenom) + } + + if err := k.SafeMint(ctx, packet.SourcePort, packet.SourceChannel, receiver, finalPriceDenom, packetAck.Gain); err != nil { + return err + } + } + + return nil + default: + // The counter-party module doesn't implement the correct acknowledgment format + return errors.New("invalid acknowledgment format") + } +} +``` + +```go +// x/dex/types/sell_order_book.go + +package types + +// ... + +func (s *SellOrderBook) AppendOrder(creator string, amount int32, price int32) (int32, error) { + return s.Book.appendOrder(creator, amount, price, Decreasing) +} +``` + +### Add the OnTimeout of a Sell Order Packet Function + +If a timeout occurs, mint back the native token: + +```go +// x/dex/keeper/sell_order.go + +package keeper + +// ... + +func (k Keeper) OnTimeoutSellOrderPacket(ctx sdk.Context, packet channeltypes.Packet, data types.SellOrderPacketData) error { + // In case of error we mint back the native token + receiver, err := sdk.AccAddressFromBech32(data.Seller) + if err != nil { + return err + } + + if err := k.SafeMint(ctx, packet.SourcePort, packet.SourceChannel, receiver, data.AmountDenom, data.Amount); err != nil { + return err + } + + return nil +} +``` + +## Summary + +Great, you have completed the sell order logic. + +It is a good time to make another git commit again to save the state of your work: + +```bash +git add . +git commit -m "Add Sell Orders" +``` diff --git a/docs/versioned_docs/version-v0.27/02-guide/07-interchange/07-creating-buy-orders.md b/docs/versioned_docs/version-v0.27/02-guide/07-interchange/07-creating-buy-orders.md new file mode 100644 index 0000000..e1ed165 --- /dev/null +++ b/docs/versioned_docs/version-v0.27/02-guide/07-interchange/07-creating-buy-orders.md @@ -0,0 +1,440 @@ +--- +sidebar_position: 7 +description: Implement the buy order logic. +--- + +# Creating Buy Orders + +In this chapter, you implement the creation of buy orders. The logic is very similar to the sell order logic you +implemented in the previous chapter. + +## Modify the Proto Definition + +Add the buyer to the proto file definition: + +```protobuf +// proto/interchange/dex/packet.proto + +message BuyOrderPacketData { + // ... + string buyer = 5; +} +``` + +Now, use Ignite CLI to build the proto files for the `send-buy-order` command. You used this command in previous +chapters. + +```bash +ignite generate proto-go --yes +``` + +## IBC Message Handling in SendBuyOrder + +* Check if the pair exists on the order book +* If the token is an IBC token, burn the tokens +* If the token is a native token, lock the tokens +* Save the voucher received on the target chain to later resolve a denom + +```go +// x/dex/keeper/msg_server_buy_order.go + +package keeper + +import ( + "context" + "errors" + + sdk "github.com/cosmos/cosmos-sdk/types" + + "interchange/x/dex/types" +) + +func (k msgServer) SendBuyOrder(goCtx context.Context, msg *types.MsgSendBuyOrder) (*types.MsgSendBuyOrderResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + + // Cannot send a order if the pair doesn't exist + pairIndex := types.OrderBookIndex(msg.Port, msg.ChannelID, msg.AmountDenom, msg.PriceDenom) + _, found := k.GetBuyOrderBook(ctx, pairIndex) + if !found { + return &types.MsgSendBuyOrderResponse{}, errors.New("the pair doesn't exist") + } + + // Lock the token to send + sender, err := sdk.AccAddressFromBech32(msg.Creator) + if err != nil { + return &types.MsgSendBuyOrderResponse{}, err + } + + // Use SafeBurn to ensure no new native tokens are minted + if err := k.SafeBurn(ctx, msg.Port, msg.ChannelID, sender, msg.PriceDenom, msg.Amount*msg.Price); err != nil { + return &types.MsgSendBuyOrderResponse{}, err + } + + // Save the voucher received on the other chain, to have the ability to resolve it into the original denom + k.SaveVoucherDenom(ctx, msg.Port, msg.ChannelID, msg.PriceDenom) + + // Construct the packet + var packet types.BuyOrderPacketData + + packet.Buyer = msg.Creator + packet.AmountDenom = msg.AmountDenom + packet.Amount = msg.Amount + packet.PriceDenom = msg.PriceDenom + packet.Price = msg.Price + + // Transmit the packet + err = k.TransmitBuyOrderPacket( + ctx, + packet, + msg.Port, + msg.ChannelID, + clienttypes.ZeroHeight(), + msg.TimeoutTimestamp, + ) + if err != nil { + return nil, err + } + + // Transmit an IBC packet... + return &types.MsgSendBuyOrderResponse{}, nil +} +``` + +## On Receiving a Buy Order + +* Update the buy order book +* Distribute sold token to the buyer +* Send the sell order to chain A after the fill attempt + +```go +// x/dex/keeper/buy_order.go + +package keeper + +// ... + +func (k Keeper) OnRecvBuyOrderPacket(ctx sdk.Context, packet channeltypes.Packet, data types.BuyOrderPacketData) (packetAck types.BuyOrderPacketAck, err error) { + // validate packet data upon receiving + if err := data.ValidateBasic(); err != nil { + return packetAck, err + } + + // Check if the sell order book exists + pairIndex := types.OrderBookIndex(packet.SourcePort, packet.SourceChannel, data.AmountDenom, data.PriceDenom) + book, found := k.GetSellOrderBook(ctx, pairIndex) + if !found { + return packetAck, errors.New("the pair doesn't exist") + } + + // Fill buy order + remaining, liquidated, purchase, _ := book.FillBuyOrder(types.Order{ + Amount: data.Amount, + Price: data.Price, + }) + + // Return remaining amount and gains + packetAck.RemainingAmount = remaining.Amount + packetAck.Purchase = purchase + + // Before distributing gains, we resolve the denom + // First we check if the denom received comes from this chain originally + finalPriceDenom, saved := k.OriginalDenom(ctx, packet.DestinationPort, packet.DestinationChannel, data.PriceDenom) + if !saved { + // If it was not from this chain we use voucher as denom + finalPriceDenom = VoucherDenom(packet.SourcePort, packet.SourceChannel, data.PriceDenom) + } + + // Dispatch liquidated buy order + for _, liquidation := range liquidated { + liquidation := liquidation + addr, err := sdk.AccAddressFromBech32(liquidation.Creator) + if err != nil { + return packetAck, err + } + + if err := k.SafeMint( + ctx, + packet.DestinationPort, + packet.DestinationChannel, + addr, + finalPriceDenom, + liquidation.Amount*liquidation.Price, + ); err != nil { + return packetAck, err + } + } + + // Save the new order book + k.SetSellOrderBook(ctx, book) + + return packetAck, nil +} +``` + +### Implement a FillBuyOrder Function + +The `FillBuyOrder` function tries to fill the sell order with the order book and returns all the side effects: + +```go +// x/dex/types/sell_order_book.go + +package types + +// ... + +func (s *SellOrderBook) FillBuyOrder(order Order) ( + remainingBuyOrder Order, + liquidated []Order, + purchase int32, + filled bool, +) { + var liquidatedList []Order + totalPurchase := int32(0) + remainingBuyOrder = order + + // Liquidate as long as there is match + for { + var match bool + var liquidation Order + remainingBuyOrder, liquidation, purchase, match, filled = s.LiquidateFromBuyOrder( + remainingBuyOrder, + ) + if !match { + break + } + + // Update gains + totalPurchase += purchase + + // Update liquidated + liquidatedList = append(liquidatedList, liquidation) + + if filled { + break + } + } + + return remainingBuyOrder, liquidatedList, totalPurchase, filled +} +``` + +### Implement a LiquidateFromBuyOrder Function + +The `LiquidateFromBuyOrder` function liquidates the first buy order of the book from the sell order. If no match is +found, return false for match: + +```go +// x/dex/types/sell_order_book.go + +package types + +// ... + +func (s *SellOrderBook) LiquidateFromBuyOrder(order Order) ( + remainingBuyOrder Order, + liquidatedSellOrder Order, + purchase int32, + match bool, + filled bool, +) { + remainingBuyOrder = order + + // No match if no order + orderCount := len(s.Book.Orders) + if orderCount == 0 { + return order, liquidatedSellOrder, purchase, false, false + } + + // Check if match + lowestAsk := s.Book.Orders[orderCount-1] + if order.Price < lowestAsk.Price { + return order, liquidatedSellOrder, purchase, false, false + } + + liquidatedSellOrder = *lowestAsk + + // Check if buy order can be entirely filled + if lowestAsk.Amount >= order.Amount { + remainingBuyOrder.Amount = 0 + liquidatedSellOrder.Amount = order.Amount + purchase = order.Amount + + // Remove lowest ask if it has been entirely liquidated + lowestAsk.Amount -= order.Amount + if lowestAsk.Amount == 0 { + s.Book.Orders = s.Book.Orders[:orderCount-1] + } else { + s.Book.Orders[orderCount-1] = lowestAsk + } + + return remainingBuyOrder, liquidatedSellOrder, purchase, true, true + } + + // Not entirely filled + purchase = lowestAsk.Amount + s.Book.Orders = s.Book.Orders[:orderCount-1] + remainingBuyOrder.Amount -= lowestAsk.Amount + + return remainingBuyOrder, liquidatedSellOrder, purchase, true, false +} +``` + +## Receiving a Buy Order Acknowledgment + +After a buy order acknowledgement is received, chain `Mars`: + +* Stores the remaining sell order in the sell order book. +* Distributes sold `marscoin` to the buyers. +* Distributes to the seller the price of the amount sold. +* On error, mints back the burned tokens. + +```go +// x/dex/keeper/buy_order.go + +package keeper + +// ... + +func (k Keeper) OnAcknowledgementBuyOrderPacket(ctx sdk.Context, packet channeltypes.Packet, data types.BuyOrderPacketData, ack channeltypes.Acknowledgement) error { + switch dispatchedAck := ack.Response.(type) { + case *channeltypes.Acknowledgement_Error: + // In case of error we mint back the native token + receiver, err := sdk.AccAddressFromBech32(data.Buyer) + if err != nil { + return err + } + + if err := k.SafeMint( + ctx, + packet.SourcePort, + packet.SourceChannel, + receiver, + data.PriceDenom, + data.Amount*data.Price, + ); err != nil { + return err + } + + return nil + case *channeltypes.Acknowledgement_Result: + // Decode the packet acknowledgment + var packetAck types.BuyOrderPacketAck + + if err := types.ModuleCdc.UnmarshalJSON(dispatchedAck.Result, &packetAck); err != nil { + // The counter-party module doesn't implement the correct acknowledgment format + return errors.New("cannot unmarshal acknowledgment") + } + + // Get the sell order book + pairIndex := types.OrderBookIndex(packet.SourcePort, packet.SourceChannel, data.AmountDenom, data.PriceDenom) + book, found := k.GetBuyOrderBook(ctx, pairIndex) + if !found { + panic("buy order book must exist") + } + + // Append the remaining amount of the order + if packetAck.RemainingAmount > 0 { + _, err := book.AppendOrder( + data.Buyer, + packetAck.RemainingAmount, + data.Price, + ) + if err != nil { + return err + } + + // Save the new order book + k.SetBuyOrderBook(ctx, book) + } + + // Mint the purchase + if packetAck.Purchase > 0 { + receiver, err := sdk.AccAddressFromBech32(data.Buyer) + if err != nil { + return err + } + + finalAmountDenom, saved := k.OriginalDenom(ctx, packet.SourcePort, packet.SourceChannel, data.AmountDenom) + if !saved { + // If it was not from this chain we use voucher as denom + finalAmountDenom = VoucherDenom(packet.DestinationPort, packet.DestinationChannel, data.AmountDenom) + } + + if err := k.SafeMint( + ctx, + packet.SourcePort, + packet.SourceChannel, + receiver, + finalAmountDenom, + packetAck.Purchase, + ); err != nil { + return err + } + } + + return nil + default: + // The counter-party module doesn't implement the correct acknowledgment format + return errors.New("invalid acknowledgment format") + } +} +``` + +`AppendOrder` appends an order in the buy order book. +Add the following function to the `x/dex/types/buy_order_book.go` file in the `types` directory. + +```go +// x/dex/types/buy_order_book.go + +package types + +// ... + +func (b *BuyOrderBook) AppendOrder(creator string, amount int32, price int32) (int32, error) { + return b.Book.appendOrder(creator, amount, price, Increasing) +} +``` + +## OnTimeout of a Buy Order Packet + +If a timeout occurs, mint back the native token: + +```go +// x/dex/keeper/buy_order.go + +package keeper + +// ... + +func (k Keeper) OnTimeoutBuyOrderPacket(ctx sdk.Context, packet channeltypes.Packet, data types.BuyOrderPacketData) error { + // In case of error we mint back the native token + receiver, err := sdk.AccAddressFromBech32(data.Buyer) + if err != nil { + return err + } + + if err := k.SafeMint( + ctx, + packet.SourcePort, + packet.SourceChannel, + receiver, + data.PriceDenom, + data.Amount*data.Price, + ); err != nil { + return err + } + + return nil +} +``` + +## Summary + +Congratulations, you implemented the buy order logic. + +Again, it's a good time to save your current state to your local GitHub repository: + +```bash +git add . +git commit -m "Add Buy Orders" +``` diff --git a/docs/versioned_docs/version-v0.27/02-guide/07-interchange/08-cancelling-orders.md b/docs/versioned_docs/version-v0.27/02-guide/07-interchange/08-cancelling-orders.md new file mode 100644 index 0000000..f6c44ee --- /dev/null +++ b/docs/versioned_docs/version-v0.27/02-guide/07-interchange/08-cancelling-orders.md @@ -0,0 +1,200 @@ +--- +sidebar_position: 8 +description: Enable cancelling of buy and sell orders. +--- + +# Cancelling Orders + +You have implemented order books, buy and sell orders. In this chapter, you enable cancelling of buy and sell orders. + +## Cancel a Sell Order + +To cancel a sell order, you have to get the ID of the specific sell order. Then you can use the function +`RemoveOrderFromID` to remove the specific order from the order book and update the keeper accordingly. + +Move to the keeper directory and edit the `x/dex/keeper/msg_server_cancel_sell_order.go` file: + +```go +// x/dex/keeper/msg_server_cancel_sell_order.go + +package keeper + +import ( + "context" + "errors" + + sdk "github.com/cosmos/cosmos-sdk/types" + + "interchange/x/dex/types" +) + +func (k msgServer) CancelSellOrder(goCtx context.Context, msg *types.MsgCancelSellOrder) (*types.MsgCancelSellOrderResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + + // Retrieve the book + pairIndex := types.OrderBookIndex(msg.Port, msg.Channel, msg.AmountDenom, msg.PriceDenom) + s, found := k.GetSellOrderBook(ctx, pairIndex) + if !found { + return &types.MsgCancelSellOrderResponse{}, errors.New("the pair doesn't exist") + } + + // Check order creator + order, err := s.Book.GetOrderFromID(msg.OrderID) + if err != nil { + return &types.MsgCancelSellOrderResponse{}, err + } + + if order.Creator != msg.Creator { + return &types.MsgCancelSellOrderResponse{}, errors.New("canceller must be creator") + } + + // Remove order + if err := s.Book.RemoveOrderFromID(msg.OrderID); err != nil { + return &types.MsgCancelSellOrderResponse{}, err + } + + k.SetSellOrderBook(ctx, s) + + // Refund seller with remaining amount + seller, err := sdk.AccAddressFromBech32(order.Creator) + if err != nil { + return &types.MsgCancelSellOrderResponse{}, err + } + + if err := k.SafeMint(ctx, msg.Port, msg.Channel, seller, msg.AmountDenom, order.Amount); err != nil { + return &types.MsgCancelSellOrderResponse{}, err + } + + return &types.MsgCancelSellOrderResponse{}, nil +} +``` + +### Implement the GetOrderFromID Function + +The `GetOrderFromID` function gets an order from the book from its ID. + +Add this function to the `x/dex/types/order_book.go` function in the `types` directory: + +```go +// x/dex/types/order_book.go + +func (book OrderBook) GetOrderFromID(id int32) (Order, error) { + for _, order := range book.Orders { + if order.Id == id { + return *order, nil + } + } + + return Order{}, ErrOrderNotFound +} +``` + +### Implement the RemoveOrderFromID Function + +The `RemoveOrderFromID` function removes an order from the book and keeps it ordered: + +```go +// x/dex/types/order_book.go + +package types + +// ... + +func (book *OrderBook) RemoveOrderFromID(id int32) error { + for i, order := range book.Orders { + if order.Id == id { + book.Orders = append(book.Orders[:i], book.Orders[i+1:]...) + return nil + } + } + + return ErrOrderNotFound +} +``` + +## Cancel a Buy Order + +To cancel a buy order, you have to get the ID of the specific buy order. Then you can use the function +`RemoveOrderFromID` to remove the specific order from the order book and update the keeper accordingly: + +```go +// x/dex/keeper/msg_server_cancel_buy_order.go + +package keeper + +import ( + "context" + "errors" + + sdk "github.com/cosmos/cosmos-sdk/types" + + "interchange/x/dex/types" +) + +func (k msgServer) CancelBuyOrder(goCtx context.Context, msg *types.MsgCancelBuyOrder) (*types.MsgCancelBuyOrderResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + + // Retrieve the book + pairIndex := types.OrderBookIndex(msg.Port, msg.Channel, msg.AmountDenom, msg.PriceDenom) + b, found := k.GetBuyOrderBook(ctx, pairIndex) + if !found { + return &types.MsgCancelBuyOrderResponse{}, errors.New("the pair doesn't exist") + } + + // Check order creator + order, err := b.Book.GetOrderFromID(msg.OrderID) + if err != nil { + return &types.MsgCancelBuyOrderResponse{}, err + } + + if order.Creator != msg.Creator { + return &types.MsgCancelBuyOrderResponse{}, errors.New("canceller must be creator") + } + + // Remove order + if err := b.Book.RemoveOrderFromID(msg.OrderID); err != nil { + return &types.MsgCancelBuyOrderResponse{}, err + } + + k.SetBuyOrderBook(ctx, b) + + // Refund buyer with remaining price amount + buyer, err := sdk.AccAddressFromBech32(order.Creator) + if err != nil { + return &types.MsgCancelBuyOrderResponse{}, err + } + + if err := k.SafeMint( + ctx, + msg.Port, + msg.Channel, + buyer, + msg.PriceDenom, + order.Amount*order.Price, + ); err != nil { + return &types.MsgCancelBuyOrderResponse{}, err + } + + return &types.MsgCancelBuyOrderResponse{}, nil +} +``` + +## Summary + +You have completed implementing the functions that are required for the `dex` module. In this chapter, you have +implemented the design for cancelling specific buy or sell orders. + +To test if your Ignite CLI blockchain builds correctly, use the `chain build` command: + +```bash +ignite chain build +``` + +Again, it is a good time (a great time!) to add your state to the local GitHub repository: + +```bash +git add . +git commit -m "Add Cancelling Orders" +``` + +Finally, it's now time to write test files. diff --git a/docs/versioned_docs/version-v0.27/02-guide/07-interchange/09-tests.md b/docs/versioned_docs/version-v0.27/02-guide/07-interchange/09-tests.md new file mode 100644 index 0000000..8d3d933 --- /dev/null +++ b/docs/versioned_docs/version-v0.27/02-guide/07-interchange/09-tests.md @@ -0,0 +1,729 @@ +--- +sidebar_position: 9 +description: Add test files. +--- + +# Write Test Files + +To test your application, add the test files to your code. + +After you add the test files, change into the `interchange` directory with your terminal, then run: + +```bash +go test -timeout 30s ./x/dex/types +``` + +## Order Book Tests + +Create a new `x/dex/types/order_book_test.go` file in the `types` directory. + +Add the following testsuite: + +```go +// x/dex/types/order_book_test.go + +package types_test + +import ( + "math/rand" + "testing" + + "github.com/cosmos/cosmos-sdk/crypto/keys/ed25519" + sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/stretchr/testify/require" + + "interchange/x/dex/types" +) + +func GenString(n int) string { + alpha := []rune("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ") + + buf := make([]rune, n) + for i := range buf { + buf[i] = alpha[rand.Intn(len(alpha))] + } + + return string(buf) +} + +func GenAddress() string { + pk := ed25519.GenPrivKey().PubKey() + addr := pk.Address() + return sdk.AccAddress(addr).String() +} + +func GenAmount() int32 { + return int32(rand.Intn(int(types.MaxAmount)) + 1) +} + +func GenPrice() int32 { + return int32(rand.Intn(int(types.MaxPrice)) + 1) +} + +func GenPair() (string, string) { + return GenString(10), GenString(10) +} + +func GenOrder() (string, int32, int32) { + return GenLocalAccount(), GenAmount(), GenPrice() +} + +func GenLocalAccount() string { + return GenAddress() +} + +func MockAccount(str string) string { + return str +} + +func OrderListToOrderBook(list []types.Order) types.OrderBook { + listCopy := make([]*types.Order, len(list)) + for i, order := range list { + order := order + listCopy[i] = &order + } + + return types.OrderBook{ + IdCount: 0, + Orders: listCopy, + } +} + +func TestRemoveOrderFromID(t *testing.T) { + inputList := []types.Order{ + {Id: 3, Creator: MockAccount("3"), Amount: 2, Price: 10}, + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + } + + book := OrderListToOrderBook(inputList) + expectedList := []types.Order{ + {Id: 3, Creator: MockAccount("3"), Amount: 2, Price: 10}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + } + expectedBook := OrderListToOrderBook(expectedList) + err := book.RemoveOrderFromID(2) + require.NoError(t, err) + require.Equal(t, expectedBook, book) + + book = OrderListToOrderBook(inputList) + expectedList = []types.Order{ + {Id: 3, Creator: MockAccount("3"), Amount: 2, Price: 10}, + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + } + expectedBook = OrderListToOrderBook(expectedList) + err = book.RemoveOrderFromID(0) + require.NoError(t, err) + require.Equal(t, expectedBook, book) + + book = OrderListToOrderBook(inputList) + expectedList = []types.Order{ + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + } + expectedBook = OrderListToOrderBook(expectedList) + err = book.RemoveOrderFromID(3) + require.NoError(t, err) + require.Equal(t, expectedBook, book) + + book = OrderListToOrderBook(inputList) + err = book.RemoveOrderFromID(4) + require.ErrorIs(t, err, types.ErrOrderNotFound) +} +``` + +## Buy Order Tests + +Create a new `x/dex/types/buy_order_book_test.go` file in the `types` directory to add the tests for the Buy Order Book: + +```go +// x/dex/types/buy_order_book_test.go + +package types_test + +import ( + "sort" + "testing" + + "github.com/stretchr/testify/require" + + "interchange/x/dex/types" +) + +func OrderListToBuyOrderBook(list []types.Order) types.BuyOrderBook { + listCopy := make([]*types.Order, len(list)) + for i, order := range list { + order := order + listCopy[i] = &order + } + + book := types.BuyOrderBook{ + AmountDenom: "foo", + PriceDenom: "bar", + Book: &types.OrderBook{ + IdCount: 0, + Orders: listCopy, + }, + } + return book +} + +func TestAppendOrder(t *testing.T) { + buyBook := types.NewBuyOrderBook(GenPair()) + + // Prevent zero amount + seller, amount, price := GenOrder() + _, err := buyBook.AppendOrder(seller, 0, price) + require.ErrorIs(t, err, types.ErrZeroAmount) + + // Prevent big amount + _, err = buyBook.AppendOrder(seller, types.MaxAmount+1, price) + require.ErrorIs(t, err, types.ErrMaxAmount) + + // Prevent zero price + _, err = buyBook.AppendOrder(seller, amount, 0) + require.ErrorIs(t, err, types.ErrZeroPrice) + + // Prevent big price + _, err = buyBook.AppendOrder(seller, amount, types.MaxPrice+1) + require.ErrorIs(t, err, types.ErrMaxPrice) + + // Can append buy orders + for i := 0; i < 20; i++ { + // Append a new order + creator, amount, price := GenOrder() + newOrder := types.Order{ + Id: buyBook.Book.IdCount, + Creator: creator, + Amount: amount, + Price: price, + } + orderID, err := buyBook.AppendOrder(creator, amount, price) + + // Checks + require.NoError(t, err) + require.Contains(t, buyBook.Book.Orders, &newOrder) + require.Equal(t, newOrder.Id, orderID) + } + + require.Len(t, buyBook.Book.Orders, 20) + require.True(t, sort.SliceIsSorted(buyBook.Book.Orders, func(i, j int) bool { + return buyBook.Book.Orders[i].Price < buyBook.Book.Orders[j].Price + })) +} + +type liquidateSellRes struct { + Book []types.Order + Remaining types.Order + Liquidated types.Order + Gain int32 + Match bool + Filled bool +} + +func simulateLiquidateFromSellOrder( + t *testing.T, + inputList []types.Order, + inputOrder types.Order, + expected liquidateSellRes, +) { + book := OrderListToBuyOrderBook(inputList) + expectedBook := OrderListToBuyOrderBook(expected.Book) + + require.True(t, sort.SliceIsSorted(book.Book.Orders, func(i, j int) bool { + return book.Book.Orders[i].Price < book.Book.Orders[j].Price + })) + require.True(t, sort.SliceIsSorted(expectedBook.Book.Orders, func(i, j int) bool { + return expectedBook.Book.Orders[i].Price < expectedBook.Book.Orders[j].Price + })) + + remaining, liquidated, gain, match, filled := book.LiquidateFromSellOrder(inputOrder) + + require.Equal(t, expectedBook, book) + require.Equal(t, expected.Remaining, remaining) + require.Equal(t, expected.Liquidated, liquidated) + require.Equal(t, expected.Gain, gain) + require.Equal(t, expected.Match, match) + require.Equal(t, expected.Filled, filled) +} + +func TestLiquidateFromSellOrder(t *testing.T) { + // No match for empty book + inputOrder := types.Order{Id: 10, Creator: MockAccount("1"), Amount: 100, Price: 30} + book := OrderListToBuyOrderBook([]types.Order{}) + _, _, _, match, _ := book.LiquidateFromSellOrder(inputOrder) + require.False(t, match) + + // Buy book + inputBook := []types.Order{ + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + } + + // Test no match if highest bid too low (25 < 30) + book = OrderListToBuyOrderBook(inputBook) + _, _, _, match, _ = book.LiquidateFromSellOrder(inputOrder) + require.False(t, match) + + // Entirely filled (30 < 50) + inputOrder = types.Order{Id: 10, Creator: MockAccount("1"), Amount: 30, Price: 22} + expected := liquidateSellRes{ + Book: []types.Order{ + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + {Id: 0, Creator: MockAccount("0"), Amount: 20, Price: 25}, + }, + Remaining: types.Order{Id: 10, Creator: MockAccount("1"), Amount: 0, Price: 22}, + Liquidated: types.Order{Id: 0, Creator: MockAccount("0"), Amount: 30, Price: 25}, + Gain: int32(30 * 25), + Match: true, + Filled: true, + } + simulateLiquidateFromSellOrder(t, inputBook, inputOrder, expected) + + // Entirely filled and liquidated ( 50 = 50) + inputOrder = types.Order{Id: 10, Creator: MockAccount("1"), Amount: 50, Price: 15} + expected = liquidateSellRes{ + Book: []types.Order{ + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + }, + Remaining: types.Order{Id: 10, Creator: MockAccount("1"), Amount: 0, Price: 15}, + Liquidated: types.Order{Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + Gain: int32(50 * 25), + Match: true, + Filled: true, + } + simulateLiquidateFromSellOrder(t, inputBook, inputOrder, expected) + + // Not filled and entirely liquidated (60 > 50) + inputOrder = types.Order{Id: 10, Creator: MockAccount("1"), Amount: 60, Price: 10} + expected = liquidateSellRes{ + Book: []types.Order{ + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + }, + Remaining: types.Order{Id: 10, Creator: MockAccount("1"), Amount: 10, Price: 10}, + Liquidated: types.Order{Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + Gain: int32(50 * 25), + Match: true, + Filled: false, + } + simulateLiquidateFromSellOrder(t, inputBook, inputOrder, expected) +} + +type fillSellRes struct { + Book []types.Order + Remaining types.Order + Liquidated []types.Order + Gain int32 + Filled bool +} + +func simulateFillSellOrder( + t *testing.T, + inputList []types.Order, + inputOrder types.Order, + expected fillSellRes, +) { + book := OrderListToBuyOrderBook(inputList) + expectedBook := OrderListToBuyOrderBook(expected.Book) + + require.True(t, sort.SliceIsSorted(book.Book.Orders, func(i, j int) bool { + return book.Book.Orders[i].Price < book.Book.Orders[j].Price + })) + require.True(t, sort.SliceIsSorted(expectedBook.Book.Orders, func(i, j int) bool { + return expectedBook.Book.Orders[i].Price < expectedBook.Book.Orders[j].Price + })) + + remaining, liquidated, gain, filled := book.FillSellOrder(inputOrder) + + require.Equal(t, expectedBook, book) + require.Equal(t, expected.Remaining, remaining) + require.Equal(t, expected.Liquidated, liquidated) + require.Equal(t, expected.Gain, gain) + require.Equal(t, expected.Filled, filled) +} + +func TestFillSellOrder(t *testing.T) { + var inputBook []types.Order + + // Empty book + inputOrder := types.Order{Id: 10, Creator: MockAccount("1"), Amount: 30, Price: 30} + expected := fillSellRes{ + Book: []types.Order{}, + Remaining: inputOrder, + Liquidated: []types.Order(nil), + Gain: int32(0), + Filled: false, + } + simulateFillSellOrder(t, inputBook, inputOrder, expected) + + // No match + inputBook = []types.Order{ + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + } + expected = fillSellRes{ + Book: inputBook, + Remaining: inputOrder, + Liquidated: []types.Order(nil), + Gain: int32(0), + Filled: false, + } + simulateFillSellOrder(t, inputBook, inputOrder, expected) + + // First order liquidated, not filled + inputOrder = types.Order{Id: 10, Creator: MockAccount("1"), Amount: 60, Price: 22} + expected = fillSellRes{ + Book: []types.Order{ + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + }, + Remaining: types.Order{Id: 10, Creator: MockAccount("1"), Amount: 10, Price: 22}, + Liquidated: []types.Order{ + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + }, + Gain: int32(50 * 25), + Filled: false, + } + simulateFillSellOrder(t, inputBook, inputOrder, expected) + + // Filled with two order + inputOrder = types.Order{Id: 10, Creator: MockAccount("1"), Amount: 60, Price: 18} + expected = fillSellRes{ + Book: []types.Order{ + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + {Id: 1, Creator: MockAccount("1"), Amount: 190, Price: 20}, + }, + Remaining: types.Order{Id: 10, Creator: MockAccount("1"), Amount: 0, Price: 18}, + Liquidated: []types.Order{ + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + {Id: 1, Creator: MockAccount("1"), Amount: 10, Price: 20}, + }, + Gain: int32(50*25 + 10*20), + Filled: true, + } + simulateFillSellOrder(t, inputBook, inputOrder, expected) + + // Not filled, buy order book liquidated + inputOrder = types.Order{Id: 10, Creator: MockAccount("1"), Amount: 300, Price: 10} + expected = fillSellRes{ + Book: []types.Order{}, + Remaining: types.Order{Id: 10, Creator: MockAccount("1"), Amount: 20, Price: 10}, + Liquidated: []types.Order{ + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + }, + Gain: int32(50*25 + 200*20 + 30*15), + Filled: false, + } + simulateFillSellOrder(t, inputBook, inputOrder, expected) +} +``` + +## Sell Order Tests + +Create a new testsuite for Sell Orders in a new file `x/dex/types/sell_order_book_test.go`: + +```go +// x/dex/types/sell_order_book_test.go + +package types_test + +import ( + "sort" + "testing" + + "github.com/stretchr/testify/require" + + "interchange/x/dex/types" +) + +func OrderListToSellOrderBook(list []types.Order) types.SellOrderBook { + listCopy := make([]*types.Order, len(list)) + for i, order := range list { + order := order + listCopy[i] = &order + } + + book := types.SellOrderBook{ + AmountDenom: "foo", + PriceDenom: "bar", + Book: &types.OrderBook{ + IdCount: 0, + Orders: listCopy, + }, + } + return book +} + +func TestSellOrderBook_AppendOrder(t *testing.T) { + sellBook := types.NewSellOrderBook(GenPair()) + + // Prevent zero amount + seller, amount, price := GenOrder() + _, err := sellBook.AppendOrder(seller, 0, price) + require.ErrorIs(t, err, types.ErrZeroAmount) + + // Prevent big amount + _, err = sellBook.AppendOrder(seller, types.MaxAmount+1, price) + require.ErrorIs(t, err, types.ErrMaxAmount) + + // Prevent zero price + _, err = sellBook.AppendOrder(seller, amount, 0) + require.ErrorIs(t, err, types.ErrZeroPrice) + + // Prevent big price + _, err = sellBook.AppendOrder(seller, amount, types.MaxPrice+1) + require.ErrorIs(t, err, types.ErrMaxPrice) + + // Can append sell orders + for i := 0; i < 20; i++ { + // Append a new order + creator, amount, price := GenOrder() + newOrder := types.Order{ + Id: sellBook.Book.IdCount, + Creator: creator, + Amount: amount, + Price: price, + } + orderID, err := sellBook.AppendOrder(creator, amount, price) + + // Checks + require.NoError(t, err) + require.Contains(t, sellBook.Book.Orders, &newOrder) + require.Equal(t, newOrder.Id, orderID) + } + require.Len(t, sellBook.Book.Orders, 20) + require.True(t, sort.SliceIsSorted(sellBook.Book.Orders, func(i, j int) bool { + return sellBook.Book.Orders[i].Price > sellBook.Book.Orders[j].Price + })) +} + +type liquidateBuyRes struct { + Book []types.Order + Remaining types.Order + Liquidated types.Order + Purchase int32 + Match bool + Filled bool +} + +func simulateLiquidateFromBuyOrder( + t *testing.T, + inputList []types.Order, + inputOrder types.Order, + expected liquidateBuyRes, +) { + book := OrderListToSellOrderBook(inputList) + expectedBook := OrderListToSellOrderBook(expected.Book) + require.True(t, sort.SliceIsSorted(book.Book.Orders, func(i, j int) bool { + return book.Book.Orders[i].Price > book.Book.Orders[j].Price + })) + require.True(t, sort.SliceIsSorted(expectedBook.Book.Orders, func(i, j int) bool { + return expectedBook.Book.Orders[i].Price > expectedBook.Book.Orders[j].Price + })) + + remaining, liquidated, purchase, match, filled := book.LiquidateFromBuyOrder(inputOrder) + + require.Equal(t, expectedBook, book) + require.Equal(t, expected.Remaining, remaining) + require.Equal(t, expected.Liquidated, liquidated) + require.Equal(t, expected.Purchase, purchase) + require.Equal(t, expected.Match, match) + require.Equal(t, expected.Filled, filled) +} + +func TestLiquidateFromBuyOrder(t *testing.T) { + // No match for empty book + inputOrder := types.Order{Id: 10, Creator: MockAccount("1"), Amount: 100, Price: 10} + book := OrderListToSellOrderBook([]types.Order{}) + _, _, _, match, _ := book.LiquidateFromBuyOrder(inputOrder) + require.False(t, match) + + // Sell book + inputBook := []types.Order{ + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + } + + // Test no match if lowest ask too high (25 < 30) + book = OrderListToSellOrderBook(inputBook) + _, _, _, match, _ = book.LiquidateFromBuyOrder(inputOrder) + require.False(t, match) + + // Entirely filled (30 > 15) + inputOrder = types.Order{Id: 10, Creator: MockAccount("1"), Amount: 20, Price: 30} + expected := liquidateBuyRes{ + Book: []types.Order{ + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + {Id: 2, Creator: MockAccount("2"), Amount: 10, Price: 15}, + }, + Remaining: types.Order{Id: 10, Creator: MockAccount("1"), Amount: 0, Price: 30}, + Liquidated: types.Order{Id: 2, Creator: MockAccount("2"), Amount: 20, Price: 15}, + Purchase: int32(20), + Match: true, + Filled: true, + } + simulateLiquidateFromBuyOrder(t, inputBook, inputOrder, expected) + + // Entirely filled (30 = 30) + inputOrder = types.Order{Id: 10, Creator: MockAccount("1"), Amount: 30, Price: 30} + expected = liquidateBuyRes{ + Book: []types.Order{ + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + }, + Remaining: types.Order{Id: 10, Creator: MockAccount("1"), Amount: 0, Price: 30}, + Liquidated: types.Order{Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + Purchase: int32(30), + Match: true, + Filled: true, + } + simulateLiquidateFromBuyOrder(t, inputBook, inputOrder, expected) + + // Not filled and entirely liquidated (60 > 30) + inputOrder = types.Order{Id: 10, Creator: MockAccount("1"), Amount: 60, Price: 30} + expected = liquidateBuyRes{ + Book: []types.Order{ + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + }, + Remaining: types.Order{Id: 10, Creator: MockAccount("1"), Amount: 30, Price: 30}, + Liquidated: types.Order{Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + Purchase: int32(30), + Match: true, + Filled: false, + } + simulateLiquidateFromBuyOrder(t, inputBook, inputOrder, expected) +} + +type fillBuyRes struct { + Book []types.Order + Remaining types.Order + Liquidated []types.Order + Purchase int32 + Filled bool +} + +func simulateFillBuyOrder( + t *testing.T, + inputList []types.Order, + inputOrder types.Order, + expected fillBuyRes, +) { + book := OrderListToSellOrderBook(inputList) + expectedBook := OrderListToSellOrderBook(expected.Book) + + require.True(t, sort.SliceIsSorted(book.Book.Orders, func(i, j int) bool { + return book.Book.Orders[i].Price > book.Book.Orders[j].Price + })) + require.True(t, sort.SliceIsSorted(expectedBook.Book.Orders, func(i, j int) bool { + return expectedBook.Book.Orders[i].Price > expectedBook.Book.Orders[j].Price + })) + + remaining, liquidated, purchase, filled := book.FillBuyOrder(inputOrder) + + require.Equal(t, expectedBook, book) + require.Equal(t, expected.Remaining, remaining) + require.Equal(t, expected.Liquidated, liquidated) + require.Equal(t, expected.Purchase, purchase) + require.Equal(t, expected.Filled, filled) +} + +func TestFillBuyOrder(t *testing.T) { + var inputBook []types.Order + + // Empty book + inputOrder := types.Order{Id: 10, Creator: MockAccount("1"), Amount: 30, Price: 10} + expected := fillBuyRes{ + Book: []types.Order{}, + Remaining: inputOrder, + Liquidated: []types.Order(nil), + Purchase: int32(0), + Filled: false, + } + simulateFillBuyOrder(t, inputBook, inputOrder, expected) + + // No match + inputBook = []types.Order{ + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + } + expected = fillBuyRes{ + Book: inputBook, + Remaining: inputOrder, + Liquidated: []types.Order(nil), + Purchase: int32(0), + Filled: false, + } + simulateFillBuyOrder(t, inputBook, inputOrder, expected) + + // First order liquidated, not filled + inputOrder = types.Order{Id: 10, Creator: MockAccount("1"), Amount: 60, Price: 18} + expected = fillBuyRes{ + Book: []types.Order{ + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + }, + Remaining: types.Order{Id: 10, Creator: MockAccount("1"), Amount: 30, Price: 18}, + Liquidated: []types.Order{ + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + }, + Purchase: int32(30), + Filled: false, + } + simulateFillBuyOrder(t, inputBook, inputOrder, expected) + + // Filled with two order + inputOrder = types.Order{Id: 10, Creator: MockAccount("1"), Amount: 60, Price: 22} + expected = fillBuyRes{ + Book: []types.Order{ + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + {Id: 1, Creator: MockAccount("1"), Amount: 170, Price: 20}, + }, + Remaining: types.Order{Id: 10, Creator: MockAccount("1"), Amount: 0, Price: 22}, + Liquidated: []types.Order{ + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + {Id: 1, Creator: MockAccount("1"), Amount: 30, Price: 20}, + }, + Purchase: int32(30 + 30), + Filled: true, + } + simulateFillBuyOrder(t, inputBook, inputOrder, expected) + + // Not filled, sell order book liquidated + inputOrder = types.Order{Id: 10, Creator: MockAccount("1"), Amount: 300, Price: 30} + expected = fillBuyRes{ + Book: []types.Order{}, + Remaining: types.Order{Id: 10, Creator: MockAccount("1"), Amount: 20, Price: 30}, + Liquidated: []types.Order{ + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + }, + Purchase: int32(30 + 200 + 50), + Filled: false, + } + simulateFillBuyOrder(t, inputBook, inputOrder, expected) +} +``` + +## Successful Test Output + +When the tests are successful, your output is: + +``` +ok interchange/x/dex/types 0.550s +``` diff --git a/docs/versioned_docs/version-v0.27/02-guide/07-interchange/_category_.json b/docs/versioned_docs/version-v0.27/02-guide/07-interchange/_category_.json new file mode 100644 index 0000000..f427e86 --- /dev/null +++ b/docs/versioned_docs/version-v0.27/02-guide/07-interchange/_category_.json @@ -0,0 +1,5 @@ +{ + "label": "Advanced Module: Interchange", + "position": 8, + "link": null + } \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.27/02-guide/08-debug.md b/docs/versioned_docs/version-v0.27/02-guide/08-debug.md new file mode 100644 index 0000000..eff25fc --- /dev/null +++ b/docs/versioned_docs/version-v0.27/02-guide/08-debug.md @@ -0,0 +1,209 @@ +--- +description: Debugging your Cosmos SDK blockchain +--- + +# Debugging a chain + +Ignite chain debug command can help you find issues during development. It uses +[Delve](https://github.com/go-delve/delve) debugger which enables you to +interact with your blockchain app by controlling the execution of the process, +evaluating variables, and providing information of thread / goroutine state, CPU +register state and more. + +## Debug Command + +The debug command requires that the blockchain app binary is build with +debugging support by removing optimizations and inlining. A debug binary is +built by default by the `ignite chain serve` command or can optionally be +created using the `--debug` flag when running `ignite chain init` or `ignite +chain build` sub-commands. + +To start a debugging session in the terminal run: + +``` +ignite chain debug +``` + +The command runs your blockchan app in the background, attaches to it and +launches a terminal debugger shell: + +``` +Type 'help' for list of commands. +(dlv) +``` + +At this point the blockchain app blocks execution, so you can set one or more +breakpoints before continuing execution. + +Use the +[break](https://github.com/go-delve/delve/blob/master/Documentation/cli/README.md#break) +(alias `b`) command to set any number of breakpoints using, for example the +`<filename>:<line>` notation: + +``` +(dlv) break x/hello/client/cli/query_say_hello.go:14 +``` + +This command adds a breakpoint to the `x/hello/client/cli/query_say_hello.go` +file at line 14. + +Once all breakpoints are set resume blockchain execution using the +[continue](https://github.com/go-delve/delve/blob/master/Documentation/cli/README.md#continue) +(alias `c`) command: + +``` +(dlv) continue +``` + +The debugger will launch the shell and stop blockchain execution again when a +breakpoint is triggered. + +Within the debugger shell use the `quit` (alias `q`) or `exit` commands to stop +the blockchain app and exit the debugger. + +## Debug Server + +A debug server can optionally be started in cases where the default terminal +client is not desirable. When the server starts it first runs the blockchain +app, attaches to it and finally waits for a client connection. The default +server address is *tcp://127.0.0.1:30500* and it accepts both JSON-RPC or DAP +client connections. + +To start a debug server use the following flag: + +``` +ignite chain debug --server +``` + +To start a debug server with a custom address use the following flags: + +``` +ignite chain debug --server --server-address 127.0.0.1:30500 +``` + +The debug server stops automatically when the client connection is closed. + +## Debugging Clients + +### Gdlv: Multiplatform Delve UI + +[Gdlv](https://github.com/aarzilli/gdlv) is a graphical frontend to Delve for +Linux, Windows and macOS. + +Using it as debugging client is straightforward as it doesn't require any +configuration. Once the debug server is running and listening for client +requests connect to it by running: + +``` +gdlv connect 127.0.0.1:30500 +``` + +Setting breakpoints and continuing execution is done in the same way as Delve, +by using the `break` and `continue` commands. + +### Visual Studio Code + +Using [Visual Studio Code](https://code.visualstudio.com/) as debugging client +requires an initial configuration to allow it to connect to the debug server. + +Make sure that the [Go](https://code.visualstudio.com/docs/languages/go) +extension is installed. + +VS Code debugging is configured using the `launch.json` file which is usually +located inside the `.vscode` folder in your workspace. + +You can use the following launch configuration to set up VS Code as debugging +client: + +```json title=launch.json +{ + "version": "0.2.0", + "configurations": [ + { + "name": "Connect to Debug Server", + "type": "go", + "request": "attach", + "mode": "remote", + "remotePath": "${workspaceFolder}", + "port": 30500, + "host": "127.0.0.1" + } + ] +} +``` + +Alternatively it's possible to create a custom `launch.json` file from the "Run +and Debug" panel. When prompted choose the Go debugger option labeled "Go: +Connect to Server" and enter the debug host address and then the port number. + +## Example: Debugging a Blockchain App + +In this short example we will be using Ignite CLI to create a new blockchain and +a query to be able to trigger a debugging breakpoint when the query is called. + +Create a new blockchain: + +``` +ignite scaffold chain hello +``` + +Scaffold a new query in the `hello` directory: + +``` +ignite scaffold query say-hello name --response name +``` + +The next step initializes the blockchain's data directory and compiles a debug +binary: + +``` +ignite chain init --debug +``` + +Once the initialization finishes launch the debugger shell: + +``` +ignite chain debug +``` + +Within the debugger shell create a breakpoint that will be triggered when the +`SayHello` function is called and then continue execution: + +``` +(dlv) break x/hello/keeper/query_say_hello.go:12 +(dlv) continue +``` + +From a different terminal use the `hellod` binary to call the query: + +``` +hellod query hello say-hello bob +``` + +A debugger shell will be launched when the breakpoint is triggered: + +``` + 7: "google.golang.org/grpc/codes" + 8: "google.golang.org/grpc/status" + 9: "hello/x/hello/types" + 10: ) + 11: +=> 12: func (k Keeper) SayHello(goCtx context.Context, req *types.QuerySayHelloRequest) (*types.QuerySayHelloResponse, error) { + 13: if req == nil { + 14: return nil, status.Error(codes.InvalidArgument, "invalid request") + 15: } + 16: + 17: ctx := sdk.UnwrapSDKContext(goCtx) +``` + +From then on you can use Delve commands like `next` (alias `n`) or `print` +(alias `p`) to control execution and print values. For example, to print the +*name* argument value use the `print` command followed by "req.Name": + +``` +(dlv) print req.Name +"bob" +``` + +Finally, use `quit` (alias `q`) to stop the blockchain app and finish the +debugging session. diff --git a/docs/versioned_docs/version-v0.27/02-guide/09-docker.md b/docs/versioned_docs/version-v0.27/02-guide/09-docker.md new file mode 100644 index 0000000..753e47b --- /dev/null +++ b/docs/versioned_docs/version-v0.27/02-guide/09-docker.md @@ -0,0 +1,142 @@ +--- +description: Run Ignite CLI using a Docker container. +--- + +# Running inside a Docker container + +You can run Ignite CLI inside a Docker container without installing the Ignite +CLI binary directly on your machine. + +Running Ignite CLI in Docker can be useful for various reasons; isolating your +test environment, running Ignite CLI on an unsupported operating system, or +experimenting with a different version of Ignite CLI without installing it. + +Docker containers are like virtual machines because they provide an isolated +environment to programs that runs inside them. In this case, you can run Ignite +CLI in an isolated environment. + +Experimentation and file system impact is limited to the Docker instance. The +host machine is not impacted by changes to the container. + +## Prerequisites + +Docker must be installed. See [Get Started with +Docker](https://www.docker.com/get-started). + +## Ignite CLI Commands in Docker + +After you scaffold and start a chain in your Docker container, all Ignite CLI +commands are available. Just type the commands after `docker run -ti +ignite/cli`. For example: + +```bash +docker run -ti ignitehq/cli -h +docker run -ti ignitehq/cli scaffold chain planet +docker run -ti ignitehq/cli chain serve +``` + +## Scaffolding a chain + +When Docker is installed, you can build a blockchain with a single command. + +Ignite CLI, and the chains you serve with Ignite CLI, persist some files. When +using the CLI binary directly, those files are located in `$HOME/.ignite` and +`$HOME/.cache`, but in the context of Docker it's better to use a directory +different from `$HOME`, so we use `$HOME/sdh`. This folder should be created +manually prior to the docker commands below, or else Docker creates it with the +root user. + +```bash +mkdir $HOME/sdh +``` + +To scaffold a blockchain `planet` in the `/apps` directory in the container, run +this command in a terminal window: + +```bash +docker run -ti -v $HOME/sdh:/home/tendermint -v $PWD:/apps ignitehq/cli:0.25.2 scaffold chain planet +``` + +Be patient, this command takes a minute or two to run because it does everything +for you: + +- Creates a container that runs from the `ignitehq/cli:0.25.2` image. +- Executes the Ignite CLI binary inside the image. +- `-v $HOME/sdh:/home/tendermint` maps the `$HOME/sdh` directory in your local + computer (the host machine) to the home directory `/home/tendermint` inside + the container. +- `-v $PWD:/apps` maps the current directory in the terminal window on the host + machine to the `/apps` directory in the container. You can optionally specify + an absolute path instead of `$PWD`. + + Using `-w` and `-v` together provides file persistence on the host machine. + The application source code on the Docker container is mirrored to the file + system of the host machine. + + **Note:** The directory name for the `-w` and `-v` flags can be a name other + than `/app`, but the same directory must be specified for both flags. If you + omit `-w` and `-v`, the changes are made in the container only and are lost + when that container is shut down. + +## Starting a blockchain + +To start the blockchain node in the Docker container you just created, run this +command: + +```bash +docker run -ti -v $HOME/sdh:/home/tendermint -v $PWD:/apps -p 1317:1317 -p 26657:26657 ignitehq/cli:0.25.2 chain serve -p planet +``` + +This command does the following: + +- `-v $HOME/sdh:/home/tendermint` maps the `$HOME/sdh` directory in your local + computer (the host machine) to the home directory `/home/tendermint` inside + the container. +- `-v $PWD:/apps` persists the scaffolded app in the container to the host + machine at current working directory. +- `serve -p planet` specifies to use the `planet` directory that contains the + source code of the blockchain. +- `-p 1317:1317` maps the API server port (cosmos-sdk) to the host machine to + forward port 1317 listening inside the container to port 1317 on the host + machine. +- `-p 26657:26657` maps RPC server port 26657 (tendermint) on the host machine + to port 26657 in Docker. +- After the blockchain is started, open `http://localhost:26657` to see the + Tendermint API. +- The `-v` flag specifies for the container to access the application's source + code from the host machine, so it can build and run it. + +## Versioning + +You can specify which version of Ignite CLI to install and run in your Docker +container. + +### Latest version + +- By default, `ignite/cli` resolves to `ignite/cli:latest`. +- The `latest` image tag is always the latest stable [Ignite CLI + release](https://github.com/ignite/cli/releases). + +For example, if latest release is +[v0.25.2](https://github.com/ignite/cli/releases/tag/v0.25.2), the `latest` tag +points to the `0.25.2` tag. + +### Specific version + +You can specify to use a specific version of Ignite CLI. All available tags are +in the [ignite/cli +image](https://hub.docker.com/r/ignitehq/cli/tags?page=1&ordering=last_updated) on +Docker Hub. + +For example: + +- Use `ignitehq/cli:0.25.2` (without the `v` prefix) to use version `0.25.2`. +- Use `ignitehq/cli` to use the latest version. +- Use `ignitehq/cli:main` to use the `main` branch, so you can experiment with + the upcoming version. + +To get the latest image, run `docker pull`. + +```bash +docker pull ignitehq/cli:main +``` diff --git a/docs/versioned_docs/version-v0.27/02-guide/10-simapp.md b/docs/versioned_docs/version-v0.27/02-guide/10-simapp.md new file mode 100644 index 0000000..dbf33c3 --- /dev/null +++ b/docs/versioned_docs/version-v0.27/02-guide/10-simapp.md @@ -0,0 +1,163 @@ +--- +sidebar_position: 10 +description: Test different scenarios for your chain. +--- + +# Chain simulation + +The Ignite CLI chain simulator can help you to run your chain based in +randomized inputs for you can make fuzz testing and also benchmark test for your +chain, simulating the messages, blocks, and accounts. You can scaffold a +template to perform simulation testing in each module along with a boilerplate +simulation methods for each scaffolded message. + +## Module simulation + +Every new module that is scaffolded with Ignite CLI implements the Cosmos SDK +[Module Simulation](https://docs.cosmos.network/main/learn/advanced/simulation). + +- Each new message creates a file with the simulation methods required for the + tests. +- Scaffolding a `CRUD` type like a `list` or `map` creates a simulation file + with `create`, `update`, and `delete` simulation methods in the + `x/<module>/simulation` folder and registers these methods in + `x/<module>/module_simulation.go`. +- Scaffolding a single message creates an empty simulation method to be + implemented by the user. + +We recommend that you maintain the simulation methods for each new modification +into the message keeper methods. + +Every simulation is weighted because the sender of the operation is assigned +randomly. The weight defines how much the simulation calls the message. + +For better randomizations, you can define a random seed. The simulation with the +same random seed is deterministic with the same output. + +## Scaffold a simulation + +To create a new chain: + +``` +ignite scaffold chain mars +``` + +Review the empty `x/mars/simulation` folder and the +`x/mars/module_simulation.go` file to see that a simulation is not registered. + +Now, scaffold a new message: + +``` +ignite scaffold list user address balance:uint state +``` + +A new file `x/mars/simulation/user.go` is created and is registered with the +weight in the `x/mars/module_simulation.go` file. + +Be sure to define the proper simulation weight with a minimum weight of 0 and a +maximum weight of 100. + +For this example, change the `defaultWeightMsgDeleteUser` to 30 and the +`defaultWeightMsgUpdateUser` to 50. + +Run the `BenchmarkSimulation` method into `app/simulation_test.go` to run +simulation tests for all modules: + +``` +ignite chain simulate +``` + +You can also define flags that are provided by the simulation. Flags are defined +by the method `simapp.GetSimulatorFlags()`: + +``` +ignite chain simulate -v --numBlocks 200 --blockSize 50 --seed 33 +``` + +Wait for the entire simulation to finish and check the result of the messages. + +The default `go test` command works to run the simulation: + +``` +go test -v -benchmem -run=^$ -bench ^BenchmarkSimulation -cpuprofile cpu.out ./app -Commit=true +``` + +### Skip message + +Use logic to avoid sending a message without returning an error. Return only +`simtypes.NoOpMsg(...)` into the simulation message handler. + +## Params + +Scaffolding a module with params automatically adds the module in the +`module_simulaton.go` file: + +``` +ignite s module earth --params channel:string,minLaunch:uint,maxLaunch:int +``` + +After the parameters are scaffolded, change the +`x/<module>/module_simulation.go` file to set the random parameters into the +`RandomizedParams` method. The simulation will change the params randomly +according to call the function. + +## Invariants + +Simulating a chain can help you prevent [chain invariants +errors](https://docs.cosmos.network/main/build/building-modules/invariants). An +invariant is a function called by the chain to check if something broke, +invalidating the chain data. To create a new invariant and check the chain +integrity, you must create a method to validate the invariants and register all +invariants. + + +For example, in `x/earth/keeper/invariants.go`: + +```go title="x/earth/keeper/invariants.go" +package keeper + +import ( + "fmt" + + sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/tendermint/spn/x/launch/types" +) + +const zeroLaunchTimestampRoute = "zero-launch-timestamp" + +// RegisterInvariants registers all module invariants +func RegisterInvariants(ir sdk.InvariantRegistry, k Keeper) { + ir.RegisterRoute(types.ModuleName, zeroLaunchTimestampRoute, + ZeroLaunchTimestampInvariant(k)) +} + +// ZeroLaunchTimestampInvariant invariant that checks if the +// `LaunchTimestamp is zero +func ZeroLaunchTimestampInvariant(k Keeper) sdk.Invariant { + return func(ctx sdk.Context) (string, bool) { + all := k.GetAllChain(ctx) + for _, chain := range all { + if chain.LaunchTimestamp == 0 { + return sdk.FormatInvariant( + types.ModuleName, zeroLaunchTimestampRoute, + "LaunchTimestamp is not set while LaunchTriggered is set", + ), true + } + } + return "", false + } +} +``` + +Now, register the keeper invariants into the `x/earth/module.go` file: + +```go +package earth + +// ... + +// RegisterInvariants registers the capability module's invariants. +func (am AppModule) RegisterInvariants(ir sdk.InvariantRegistry) { + keeper.RegisterInvariants(ir, am.keeper) +} +``` diff --git a/docs/versioned_docs/version-v0.27/02-guide/_category_.json b/docs/versioned_docs/version-v0.27/02-guide/_category_.json new file mode 100644 index 0000000..3c599cc --- /dev/null +++ b/docs/versioned_docs/version-v0.27/02-guide/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Develop a chain", + "link": null +} \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.27/02-guide/images/api.png b/docs/versioned_docs/version-v0.27/02-guide/images/api.png new file mode 100644 index 0000000..081df8f Binary files /dev/null and b/docs/versioned_docs/version-v0.27/02-guide/images/api.png differ diff --git a/docs/versioned_docs/version-v0.27/02-guide/images/packet_sendpost.png b/docs/versioned_docs/version-v0.27/02-guide/images/packet_sendpost.png new file mode 100644 index 0000000..0bb080c Binary files /dev/null and b/docs/versioned_docs/version-v0.27/02-guide/images/packet_sendpost.png differ diff --git a/docs/versioned_docs/version-v0.27/03-clients/01-go-client.md b/docs/versioned_docs/version-v0.27/03-clients/01-go-client.md new file mode 100644 index 0000000..b32cb41 --- /dev/null +++ b/docs/versioned_docs/version-v0.27/03-clients/01-go-client.md @@ -0,0 +1,360 @@ +--- +description: Blockchain client in Go +title: Go client +--- + +# A client in the Go programming language + +In this tutorial, we will show you how to create a standalone Go program that +serves as a client for a blockchain. We will use the Ignite CLI to set up a +standard blockchain. To communicate with the blockchain, we will utilize the +`cosmosclient` package, which provides an easy-to-use interface for interacting +with the blockchain. You will learn how to use the `cosmosclient` package to +send transactions and query the blockchain. By the end of this tutorial, you +will have a good understanding of how to build a client for a blockchain using +Go and the `cosmosclient` package. + +## Create a blockchain + +To create a blockchain using the Ignite CLI, use the following command: + +``` +ignite scaffold chain blog +``` + +This will create a new Cosmos SDK blockchain called "blog". + +Once the blockchain has been created, you can generate code for a "blog" model +that will enable you to perform create, read, update, and delete (CRUD) +operations on blog posts. To do this, you can use the following command: + +``` +cd blog +ignite scaffold list post title body +``` + +This will generate the necessary code for the "blog" model, including functions +for creating, reading, updating, and deleting blog posts. With this code in +place, you can now use your blockchain to perform CRUD operations on blog posts. +You can use the generated code to create new blog posts, retrieve existing ones, +update their content, and delete them as needed. This will give you a fully +functional Cosmos SDK blockchain with the ability to manage blog posts. + +Start your blockchain node with the following command: + +``` +ignite chain serve +``` + +## Creating a blockchain client + +Create a new directory called `blogclient` on the same level as `blog` +directory. As the name suggests, `blogclient` will contain a standalone Go +program that acts as a client to your `blog` blockchain. + +```bash +mkdir blogclient +``` + +This command will create a new directory called `blogclient` in your current +location. If you type `ls` in your terminal window, you should see both the +`blog` and `blogclient` directories listed. + +To initialize a new Go package inside the `blogclient` directory, you can use +the following command: + +``` +cd blogclient +go mod init blogclient +``` + +This will create a `go.mod` file in the `blogclient` directory, which contains +information about the package and the Go version being used. + +To import dependencies for your package, you can add the following code to the +`go.mod` file: + +```text title="blogclient/go.mod" +module blogclient + +go 1.20 + +require ( + blog v0.0.0-00010101000000-000000000000 + github.com/ignite/cli v0.27.2 +) + +replace blog => ../blog +replace github.com/gogo/protobuf => github.com/regen-network/protobuf v1.3.3-alpha.regen.1 +``` + +Your package will import two dependencies: + +* `blog`, which contains `types` of messages and a query client +* `ignite` for the `cosmosclient` package + +The `replace` directive uses the package from the local `blog` directory and is +specified as a relative path to the `blogclient` directory. + +Cosmos SDK uses a custom version of the `protobuf` package, so use the `replace` +directive to specify the correct dependency. + +Finally, install dependencies for your `blogclient`: + +```bash +go mod tidy +``` + +### Main logic of the client in `main.go` + +Create a `main.go` file inside the `blogclient` directory and add the following +code: + +```go title="blogclient/main.go" +package main + +import ( + "context" + "fmt" + "log" + + // Importing the general purpose Cosmos blockchain client + "github.com/ignite/cli/ignite/pkg/cosmosclient" + + // Importing the types package of your blog blockchain + "blog/x/blog/types" +) + +func main() { + ctx := context.Background() + addressPrefix := "cosmos" + + // Create a Cosmos client instance + client, err := cosmosclient.New(ctx, cosmosclient.WithAddressPrefix(addressPrefix)) + if err != nil { + log.Fatal(err) + } + + // Account `alice` was initialized during `ignite chain serve` + accountName := "alice" + + // Get account from the keyring + account, err := client.Account(accountName) + if err != nil { + log.Fatal(err) + } + + addr, err := account.Address(addressPrefix) + if err != nil { + log.Fatal(err) + } + + // Define a message to create a post + msg := &types.MsgCreatePost{ + Creator: addr, + Title: "Hello!", + Body: "This is the first post", + } + + // Broadcast a transaction from account `alice` with the message + // to create a post and store the response in txResp + txResp, err := client.BroadcastTx(ctx, account, msg) + if err != nil { + log.Fatal(err) + } + + // Print response from broadcasting a transaction + fmt.Print("MsgCreatePost:\n\n") + fmt.Println(txResp) + + // Instantiate a query client for your `blog` blockchain + queryClient := types.NewQueryClient(client.Context()) + + // Query the blockchain using the client's `PostAll` method + // to get all posts store all posts in queryResp + queryResp, err := queryClient.PostAll(ctx, &types.QueryAllPostRequest{}) + if err != nil { + log.Fatal(err) + } + + // Print response from querying all the posts + fmt.Print("\n\nAll posts:\n\n") + fmt.Println(queryResp) +} +``` + +The code above creates a standalone Go program that acts as a client to the +`blog` blockchain. It begins by importing the required packages, including the +general purpose Cosmos blockchain client and the `types` package of the `blog` +blockchain. + +In the `main` function, the code creates a Cosmos client instance and sets the +address prefix to "cosmos". It then retrieves an account named `"alice"` from +the keyring and gets the address of the account using the address prefix. + +Next, the code defines a message to create a blog post with the title "Hello!" +and body "This is the first post". It then broadcasts a transaction from the +account "alice" with the message to create the post, and stores the response in +the variable `txResp`. + +The code then instantiates a query client for the blog blockchain and uses it to +query the blockchain to retrieve all the posts. It stores the response in the +variable `queryResp` and prints it to the console. + +Finally, the code prints the response from broadcasting the transaction to the +console. This allows the user to see the results of creating and querying a blog +post on the `blog` blockchain using the client. + +To find out more about the `cosmosclient` package, you can refer to the Go +package documentation for +[`cosmosclient`](https://pkg.go.dev/github.com/ignite/cli/ignite/pkg/cosmosclient). +This documentation provides information on how to use the `Client` type with +`Options` and `KeyringBackend`. + +## Blockchain and Client App Are on Different Machines + +If the blockchain and the client app are not on the same machine, replace ../blog with the github +repository pointing to your blog GitHub repository: + +dependencies for your package + +`go.mod` file: + +```text title="blogclient/go.mod" +... +replace blog => github.com/<github-user-name>/blog v0.0.0-00010101000000-000000000000 +... +``` + +and `main.go` file: + +```go title="blogclient/main.go" + // Importing the types package of your blog blockchain + "github.com/<github-user-name>/blog/x/blog/types" +``` + +Then, update the dependencies again: + +```bash +go mod tidy +``` + +## Using the Test Keyring Backend + +***Only for testing*** + +Create a new directory inside the blog client named 'keyring-test'. Next, export the blockchain account keys from the user you want to be sign and broadcast the transaction too. After exporting, import the keys +to the 'keyring-test' directory you just created in root directory of your client app. You can use the following `ignite account import` command: + +```bash +ignite account import alice --keyring-dir /path/to/client/blogclient/keyring-test +``` + +Define the path inside 'main.go': + +```go title="blogclient/main.go" +// ... +func main() { + ctx := context.Background() + addressPrefix := "cosmos" + + // Create a Cosmos client instance + client, err := cosmosclient.New(ctx, cosmosclient.WithAddressPrefix(addressPrefix), + cosmosclient.WithKeyringBackend("test"), cosmosclient.WithKeyringDir(".")) + if err != nil { + log.Fatal(err) + } + + // Account `alice` was initialized during `ignite chain serve` + accountName := "aliceAddress" + // ... +} +``` + +## Run the blockchain and the client + +Make sure your blog blockchain is still running with `ignite chain serve`. + +Run the blockchain client: + +```bash +go run main.go +``` + +If the command is successful, the results of running the command will be printed +to the terminal. The output may include some warnings, which can be ignored. + +```yml +MsgCreatePost: + +code: 0 +codespace: "" +data: 12220A202F626C6F672E626C6F672E4D7367437265617465506F7374526573706F6E7365 +events: +- attributes: + - index: true + key: ZmVl + value: null + - index: true + key: ZmVlX3BheWVy + value: Y29zbW9zMWR6ZW13NzZ3enQ3cDBnajd3MzQyN2E0eHg3MjRkejAzd3hnOGhk + type: tx +- attributes: + - index: true + key: YWNjX3NlcQ== + value: Y29zbW9zMWR6ZW13NzZ3enQ3cDBnajd3MzQyN2E0eHg3MjRkejAzd3hnOGhkLzE= + type: tx +- attributes: + - index: true + key: c2lnbmF0dXJl + value: UWZncUJCUFQvaWxWVzJwNUJNTngzcDlvRzVpSXp0elhXdE9yMHcwVE00OEtlSkRqR0FEdU9VNjJiY1ZRNVkxTHdEbXNuYUlsTmc3VE9uMnJ2ZWRHSlE9PQ== + type: tx +- attributes: + - index: true + key: YWN0aW9u + value: L2Jsb2cuYmxvZy5Nc2dDcmVhdGVQb3N0 + type: message +gas_used: "52085" +gas_wanted: "300000" +height: "20" +info: "" +logs: +- events: + - attributes: + - key: action + value: /blog.blog.MsgCreatePost + type: message + log: "" + msg_index: 0 +raw_log: '[{"msg_index":0,"events":[{"type":"message","attributes":[{"key":"action","value":"/blog.blog.MsgCreatePost"}]}]}]' +timestamp: "" +tx: null +txhash: 4F53B75C18254F96EF159821DDD665E965DBB576A5AC2B94CE863EB62E33156A + +All posts: + +Post:<title:"Hello!" body:"This is the first post" creator:"cosmos1dzemw76wzt7p0gj7w3427a4xx724dz03wxg8hd" > pagination:<total:1 > +``` + +As you can see the client has successfully broadcasted a transaction and queried +the chain for blog posts. + +Please note, that some values in the output on your terminal (like transaction +hash and block height) might be different from the output above. + +You can confirm the new post with using the `blogd q blog list-post` command: + +```yaml +Post: +- body: This is the first post + creator: cosmos1dzemw76wzt7p0gj7w3427a4xx724dz03wxg8hd + id: "0" + title: Hello! +pagination: + next_key: null + total: "0" +``` + +Great job! You have successfully completed the process of creating a Go client +for your Cosmos SDK blockchain, submitting a transaction, and querying the +chain. diff --git a/docs/versioned_docs/version-v0.27/03-clients/02-typescript.md b/docs/versioned_docs/version-v0.27/03-clients/02-typescript.md new file mode 100644 index 0000000..e51bf24 --- /dev/null +++ b/docs/versioned_docs/version-v0.27/03-clients/02-typescript.md @@ -0,0 +1,431 @@ +--- +description: Information about the generated TypeScript client code. +--- + +# TypeScript frontend + +Ignite offers powerful functionality for generating client-side code for your +blockchain. Think of this as a one-click client SDK generation tailored +specifically for your blockchain. + +See [`ignite generate ts-client --help`](/references/cli#ignite-generate-ts-client) learn +more on how to use TypeScript code generation. + +## Starting a node + +Create a new blockchain with `ignite scaffold chain`. You can use an existing +blockchain project if you have one, instead. + +``` +ignite scaffold chain example +``` + +For testing purposes add a new account to `config.yml` with a mnemonic: + +```yml title="config.yml" +accounts: + - name: frank + coins: ["1000token", "100000000stake"] + mnemonic: play butter frown city voyage pupil rabbit wheat thrive mind skate turkey helmet thrive door either differ gate exhibit impose city swallow goat faint +``` + +Run a command to generate TypeScript clients for both standard and custom Cosmos +SDK modules: + +``` +ignite generate ts-client --clear-cache +``` + +Run a command to start your blockchain node: + +``` +ignite chain serve -r +``` + +## Setting up a TypeScript frontend client + +The best way to get started building with the TypeScript client is by using +[Vite](https://vitejs.dev). Vite provides boilerplate code for +vanilla TS projects as well as React, Vue, Lit, Svelte and Preact frameworks. +You can find additional information at the [Vite Getting Started +guide](https://vitejs.dev/guide). + +You will also need to [polyfill](https://developer.mozilla.org/en-US/docs/Glossary/Polyfill) the client's dependencies. The following is an +example of setting up a vanilla TS project with the necessary polyfills: + +```bash +npm create vite@latest my-frontend-app -- --template vanilla-ts +cd my-frontend-app +npm install --save-dev @esbuild-plugins/node-globals-polyfill @rollup/plugin-node-resolve +``` + +You must then create the necessary `vite.config.ts` file. + +```typescript title="my-frontend-app/vite.config.ts" +import { nodeResolve } from "@rollup/plugin-node-resolve"; +import { NodeGlobalsPolyfillPlugin } from "@esbuild-plugins/node-globals-polyfill"; +import { defineConfig } from "vite"; + +export default defineConfig({ + plugins: [nodeResolve()], + + optimizeDeps: { + esbuildOptions: { + define: { + global: "globalThis", + }, + plugins: [ + NodeGlobalsPolyfillPlugin({ + buffer: true, + }), + ], + }, + }, +}); +``` + +You are then ready to use the generated client code inside this project directly +or by publishing the client and installing it like any other `npm` package. + +After the chain starts, you will see Frank's address is +`cosmos13xkhcx2dquhqdml0k37sr7yndquwteuvt2cml7`. We'll be using Frank's account +for querying data and broadcasting transactions in the next section. + +## Querying + +The code generated in `ts-client` comes with a `package.json` file ready to +publish which you can modify to suit your needs. To use`ts-client` install the +required dependencies: + +``` +cd ts-client +npm install +``` + +The client is based on a modular architecture where you can configure a client +class to support the modules you need and instantiate it. + +By default, the generated client exports a client class that includes all the +Cosmos SDK, custom and 3rd party modules in use in your project. + +To instantiate the client you need to provide environment information (endpoints +and chain prefix). For querying that's all you need: + +```typescript title="my-frontend-app/src/main.ts" +import { Client } from "../../ts-client"; + +const client = new Client( + { + apiURL: "http://localhost:1317", + rpcURL: "http://localhost:26657", + prefix: "cosmos", + } +); +``` + +The example above uses `ts-client` from a local directory. If you have published +your `ts-client` on `npm` replace `../../ts-client` with a package name. + +The resulting client instance contains namespaces for each module, each with a +`query` and `tx` namespace containing the module's relevant querying and +transacting methods with full type and auto-completion support. + +To query for a balance of an address: + +```typescript +const balances = await client.CosmosBankV1Beta1.query.queryAllBalances( + 'cosmos13xkhcx2dquhqdml0k37sr7yndquwteuvt2cml7' +); +``` + +## Broadcasting a transaction + +Add signing capabilities to the client by creating a wallet from a mnemonic +(we're using the Frank's mnemonic added to `config.yml` earlier) and passing it +as an optional argument to `Client()`. The wallet implements the CosmJS +OfflineSigner` interface. + +```typescript title="my-frontend-app/src/main.ts" +import { Client } from "../../ts-client"; +// highlight-start +import { DirectSecp256k1HdWallet } from "@cosmjs/proto-signing"; + +const mnemonic = + "play butter frown city voyage pupil rabbit wheat thrive mind skate turkey helmet thrive door either differ gate exhibit impose city swallow goat faint"; +const wallet = await DirectSecp256k1HdWallet.fromMnemonic(mnemonic); +// highlight-end + +const client = new Client( + { + apiURL: "http://localhost:1317", + rpcURL: "http://localhost:26657", + prefix: "cosmos", + }, + // highlight-next-line + wallet +); +``` + +Broadcasting a transaction: + +```typescript title="my-frontend-app/src/main.ts" +const tx_result = await client.CosmosBankV1Beta1.tx.sendMsgSend({ + value: { + amount: [ + { + amount: '200', + denom: 'token', + }, + ], + fromAddress: 'cosmos13xkhcx2dquhqdml0k37sr7yndquwteuvt2cml7', + toAddress: 'cosmos15uw6qpxqs6zqh0zp3ty2ac29cvnnzd3qwjntnc', + }, + fee: { + amount: [{ amount: '0', denom: 'stake' }], + gas: '200000', + }, + memo: '', +}) +``` + +## Broadcasting a transaction with a custom message + +If your chain already has custom messages defined, you can use those. If not, +we'll be using Ignite's scaffolded code as an example. Create a post with CRUD +messages: + +``` +ignite scaffold list post title body +``` + +After adding messages to your chain you may need to re-generate the TypeScript +client: + +``` +ignite generate ts-client --clear-cache +``` + +Broadcast a transaction containing the custom `MsgCreatePost`: + +```typescript title="my-frontend-app/src/main.ts" +import { Client } from "../../ts-client"; +import { DirectSecp256k1HdWallet } from "@cosmjs/proto-signing"; + +const mnemonic = + "play butter frown city voyage pupil rabbit wheat thrive mind skate turkey helmet thrive door either differ gate exhibit impose city swallow goat faint"; +const wallet = await DirectSecp256k1HdWallet.fromMnemonic(mnemonic); + +const client = new Client( + { + apiURL: "http://localhost:1317", + rpcURL: "http://localhost:26657", + prefix: "cosmos", + }, + wallet +); +// highlight-start +const tx_result = await client.ExampleExample.tx.sendMsgCreatePost({ + value: { + title: 'foo', + body: 'bar', + creator: 'cosmos13xkhcx2dquhqdml0k37sr7yndquwteuvt2cml7', + }, + fee: { + amount: [{ amount: '0', denom: 'stake' }], + gas: '200000', + }, + memo: '', +}) +// highlight-end +``` + +## Lightweight client + +If you prefer, you can construct a lighter client using only the modules you are +interested in by importing the generic client class and expanding it with the +modules you need: + +```typescript title="my-frontend-app/src/main.ts" +// highlight-start +import { IgniteClient } from '../../ts-client/client' +import { Module as CosmosBankV1Beta1 } from '../../ts-client/cosmos.bank.v1beta1' +import { Module as CosmosStakingV1Beta1 } from '../../ts-client/cosmos.staking.v1beta1' +// highlight-end +import { DirectSecp256k1HdWallet } from '@cosmjs/proto-signing' + +const mnemonic = + 'play butter frown city voyage pupil rabbit wheat thrive mind skate turkey helmet thrive door either differ gate exhibit impose city swallow goat faint' +const wallet = await DirectSecp256k1HdWallet.fromMnemonic(mnemonic) +// highlight-next-line +const Client = IgniteClient.plugin([CosmosBankV1Beta1, CosmosStakingV1Beta1]) + +const client = new Client( + { + apiURL: 'http://localhost:1317', + rpcURL: 'http://localhost:26657', + prefix: 'cosmos', + }, + wallet, +) +``` + +## Broadcasting a multi-message transaction + +You can also construct TX messages separately and send them in a single TX using +a global signing client like so: + +```typescript title="my-frontend-app/src/main.ts" +const msg1 = await client.CosmosBankV1Beta1.tx.msgSend({ + value: { + amount: [ + { + amount: '200', + denom: 'token', + }, + ], + fromAddress: 'cosmos13xkhcx2dquhqdml0k37sr7yndquwteuvt2cml7', + toAddress: 'cosmos15uw6qpxqs6zqh0zp3ty2ac29cvnnzd3qwjntnc', + }, +}) + +const msg2 = await client.CosmosBankV1Beta1.tx.msgSend({ + value: { + amount: [ + { + amount: '200', + denom: 'token', + }, + ], + fromAddress: 'cosmos13xkhcx2dquhqdml0k37sr7yndquwteuvt2cml7', + toAddress: 'cosmos15uw6qpxqs6zqh0zp3ty2ac29cvnnzd3qwjntnc', + }, +}) + +const tx_result = await client.signAndBroadcast( + [msg1, msg2], + { + amount: [{ amount: '0', denom: 'stake' }], + gas: '200000', + }, + '', +) +``` + +Finally, for additional ease-of-use, apart from the modular client mentioned +above, each generated module is usable on its own in a stripped-down way by +exposing a separate txClient and queryClient. + +```typescript title="my-frontend-app/src/main.ts" +import { txClient } from '../../ts-client/cosmos.bank.v1beta1' +import { DirectSecp256k1HdWallet } from '@cosmjs/proto-signing' + +const mnemonic = + 'play butter frown city voyage pupil rabbit wheat thrive mind skate turkey helmet thrive door either differ gate exhibit impose city swallow goat faint' +const wallet = await DirectSecp256k1HdWallet.fromMnemonic(mnemonic) + +const client = txClient({ + signer: wallet, + prefix: 'cosmos', + addr: 'http://localhost:26657', +}) + +const tx_result = await client.sendMsgSend({ + value: { + amount: [ + { + amount: '200', + denom: 'token', + }, + ], + fromAddress: 'cosmos13xkhcx2dquhqdml0k37sr7yndquwteuvt2cml7', + toAddress: 'cosmos15uw6qpxqs6zqh0zp3ty2ac29cvnnzd3qwjntnc', + }, + fee: { + amount: [{ amount: '0', denom: 'stake' }], + gas: '200000', + }, + memo: '', +}) +``` + +## Usage with Keplr + +Normally, Keplr provides a wallet object implementing the `OfflineSigner` +interface, so you can simply replace the `wallet` argument in client +instantiation with `window.keplr.getOfflineSigner(chainId)`. However, Keplr +requires information about your chain, like chain ID, denoms, fees, etc. +[`experimentalSuggestChain()`](https://docs.keplr.app/api/guide/suggest-chain) is +a method Keplr provides to pass this information to the Keplr extension. + +The generated client makes this easier by offering a `useKeplr()` method that +automatically discovers the chain information and sets it up for you. Thus, you +can instantiate the client without a wallet and then call `useKeplr()` to enable +transacting via Keplr like so: + +```typescript title="my-frontend-app/src/main.ts" +import { Client } from '../../ts-client'; + +const client = new Client({ + apiURL: "http://localhost:1317", + rpcURL: "http://localhost:26657", + prefix: "cosmos" + } +); +await client.useKeplr(); +``` + +`useKeplr()` optionally accepts an object argument that contains one or more of +the same keys as the `ChainInfo` type argument of `experimentalSuggestChain()` +allowing you to override the auto-discovered values. + +For example, the default chain name and token precision (which are not recorded +on-chain) are set to `<chainId> Network` and `0` while the ticker for the denom +is set to the denom name in uppercase. If you want to override these, you can do +something like: + +```typescript title="my-frontend-app/src/main.ts" +import { Client } from '../../ts-client'; + +const client = new Client({ + apiURL: "http://localhost:1317", + rpcURL: "http://localhost:26657", + prefix: "cosmos" + } +); +await client.useKeplr({ + chainName: 'My Great Chain', + stakeCurrency: { + coinDenom: 'TOKEN', + coinMinimalDenom: 'utoken', + coinDecimals: '6', + }, +}) +``` + +## Wallet switching + +The client also allows you to switch out the wallet for a different one on an +already instantiated client like so: + +```typescript +import { Client } from '../../ts-client'; +import { DirectSecp256k1HdWallet } from "@cosmjs/proto-signing"; + +const mnemonic = + 'play butter frown city voyage pupil rabbit wheat thrive mind skate turkey helmet thrive door either differ gate exhibit impose city swallow goat faint' +const wallet = await DirectSecp256k1HdWallet.fromMnemonic(mnemonic); + +const client = new Client({ + apiURL: "http://localhost:1317", + rpcURL: "http://localhost:26657", + prefix: "cosmos" + } +); +await client.useKeplr(); + +// broadcast transactions using the Keplr wallet + +client.useSigner(wallet); + +// broadcast transactions using the CosmJS wallet +``` diff --git a/docs/versioned_docs/version-v0.27/03-clients/03-vue.md b/docs/versioned_docs/version-v0.27/03-clients/03-vue.md new file mode 100644 index 0000000..074b642 --- /dev/null +++ b/docs/versioned_docs/version-v0.27/03-clients/03-vue.md @@ -0,0 +1,174 @@ +# Vue frontend + +Welcome to this tutorial on using Ignite to develop a web application for your +blockchain with Vue 3. Ignite is a tool that simplifies the process of building +a blockchain application by providing a set of templates and generators that can +be used to get up and running quickly. + +One of the features of Ignite is its support for [Vue 3](https://vuejs.org/), a +popular JavaScript framework for building user interfaces. In this tutorial, you +will learn how to use Ignite to create a new blockchain and scaffold a Vue +frontend template. This will give you a basic foundation for your web +application and make it easier to get started building out the rest of your +application. + +Once you have your blockchain and Vue template set up, the next step is to +generate an API client. This will allow you to easily interact with your +blockchain from your web application, enabling you to retrieve data and make +transactions. By the end of this tutorial, you will have a fully functional web +application that is connected to your own blockchain. + +Prerequisites: + +* [Node.js](https://nodejs.org/en/) +* [Keplr](https://www.keplr.app/) Chrome extension + +## Create a blockchain and a Vue app + +Create a new blockchain project: + +``` +ignite scaffold chain example +``` + +To create a Vue frontend template, go to the `example` directory and run the +following command: + +``` +ignite scaffold vue +``` + +This will create a new Vue project in the `vue` directory. This project can be +used with any blockchain, but it depends on an API client to interact with the +blockchain. To generate an API client, run the following command in the +`example` directory: + +``` +ignite generate composables +``` + +This command generates two directories: + +* `ts-client`: a framework-agnostic TypeScript client that can be used to + interact with your blockchain. You can learn more about how to use this client + in the [TypeScript client tutorial](/clients/typescript). +* `vue/src/composables`: a collection of Vue 3 + [composables](https://vuejs.org/guide/reusability/composables.html) that wrap + the TypeScript client and make it easier to interact with your blockchain from + your Vue application. + +## Set up Keplr and an account + +Open your browser with the Keplr wallet extension installed. Follow [the +instructions](https://keplr.crunch.help/en/getting-started/creating-a-new-keplr-account) +to create a new account or use an existing one. Make sure to save the mnemonic +phrase as you will need it in the next step. + +Do not use a mnemonic phrase that is associated with an account that holds +assets you care about. If you do, you risk losing those assets. It's a good +practice to create a new account for development purposes. + +Add the account you're using in Keplr to your blockchain's `config.yml` file: + +```yml +accounts: + - name: alice + coins: [20000token, 200000000stake] + - name: bob + coins: [10000token, 100000000stake] + # highlight-start + - name: frank + coins: [10000token, 100000000stake] + mnemonic: struggle since inmate safe logic kite tag web win stay security wonder + # highlight-end +``` + +Replace the `struggle since...` mnemonic with the one you saved in the previous +step. + +Adding an account with a mnemonic to the config file will tell Ignite CLI to add +the account to the blockchain when you start it. This is useful for development +purposes, but you should not do this in production. + +## Start a blockchain and a Vue app + +In the `example` directory run the following command to start your blockchain: + +``` +ignite chain serve +``` + +To start your Vue application, go to the `vue` directory and run the following +command in a separate terminal window: + +``` +npm install && npm run dev +``` + +It is recommended to run `npm install` before starting your app with `npm run +dev` to ensure that all dependencies are installed (including the ones that the +API client has, see `vue/postinstall.js`). + +Open your browser and navigate to +[http://localhost:5173/](http://localhost:5173/). + +![Web app](/img/web-1.png) + +Press "Connect wallet", enter your password into Keplr and press "Approve" to +add your blockchain to Keplr. + +<img src="/img/web-4.png" width="300"/> + +Make sure to select the account you're using for development purposes and the +"Example Network" in Keplr's blockchain dropdown. You should see a list of +assets in your Vue app. + +![Web app](/img/web-5.png) + +Congratulations! You have successfully created a client-side Vue application and +connected it to your blockchain. You can modify the source code of your Vue +application to build out the rest of your project. + +## Setting the address prefix + +It is necessary to set the correct address prefix in order for the Vue app to +properly interact with a Cosmos chain. The address prefix is used to identify +the chain that the app is connected to, and must match the prefix used by the +chain. + +By default, Ignite creates a chain with the `cosmos` prefix. If you have +created your chain with `ignite scaffold chain ... --adddress-prefix foo` or +manually changed the prefix in the source code of the chain, you need to set the +prefix in the Vue app. + +There are two ways to set the address prefix in a Vue app. + +### Using an environment variable + +You can set the `VITE_ADDRESS_PREFIX` environment variable to the correct +address prefix for your chain. This will override the default prefix used by the +app. + +To set the `VITE_ADDRESS_PREFIX` environment variable, you can use the following +command: + +```bash +export VITE_ADDRESS_PREFIX=your-prefix +``` + +Replace `your-prefix` with the actual address prefix for your chain. + +### Setting address prefix in the code + +Alternatively, you can manually set the correct address prefix by replacing the +fallback value of the `prefix` variable in the file `./vue/src/env.ts`. + +To do this, open the file `./vue/src/env.ts` and find the following line: + +```ts title="./vue/src/env.ts" +const prefix = process.env.VITE_ADDRESS_PREFIX || 'your-prefix'; +``` + +Replace `your-prefix` with the actual address prefix for your chain. + +Save the file and restart the Vue app to apply the changes. diff --git a/docs/versioned_docs/version-v0.27/03-clients/04-react.md b/docs/versioned_docs/version-v0.27/03-clients/04-react.md new file mode 100644 index 0000000..9d688d9 --- /dev/null +++ b/docs/versioned_docs/version-v0.27/03-clients/04-react.md @@ -0,0 +1,130 @@ +# React frontend + +Welcome to this tutorial on using Ignite to develop a web application for your +blockchain with React. Ignite is a tool that simplifies the process of building +a blockchain application by providing a set of templates and generators that can +be used to get up and running quickly. + +One of the features of Ignite is its support for [React](https://reactjs.org/), a +popular JavaScript framework for building user interfaces. In this tutorial, you +will learn how to use Ignite to create a new blockchain and scaffold a React +frontend template. This will give you a basic foundation for your web +application and make it easier to get started building out the rest of your +application. + +Once you have your blockchain and React template set up, the next step is to +generate an API client. This will allow you to easily interact with your +blockchain from your web application, enabling you to retrieve data and make +transactions. By the end of this tutorial, you will have a fully functional web +application that is connected to your own blockchain. + +Prerequisites: + +* [Node.js](https://nodejs.org/en/) +* [Keplr](https://www.keplr.app/) Chrome extension + +## Create a blockchain and a React app + +Create a new blockchain project: + +``` +ignite scaffold chain example +``` + +To create a React frontend template, go to the `example` directory and run the +following command: + +``` +ignite scaffold react +``` + +This will create a new React project in the `react` directory. This project can be +used with any blockchain, but it depends on an API client to interact with the +blockchain. To generate an API client, run the following command in the +`example` directory: + +``` +ignite generate hooks +``` + +This command generates two directories: + +* `ts-client`: a framework-agnostic TypeScript client that can be used to + interact with your blockchain. You can learn more about how to use this client + in the [TypeScript client tutorial](/clients/typescript). +* `react/src/hooks`: a collection of + [React Hooks](https://reactjs.org/docs/hooks-intro.html) that wrap + the TypeScript client and make it easier to interact with your blockchain from + your React application. + +## Set up Keplr and an account + +Open your browser with the Keplr wallet extension installed. Follow [the +instructions](https://keplr.crunch.help/en/getting-started/creating-a-new-keplr-account) +to create a new account or use an existing one. Make sure to save the mnemonic +phrase as you will need it in the next step. + +Do not use a mnemonic phrase that is associated with an account that holds +assets you care about. If you do, you risk losing those assets. It's a good +practice to create a new account for development purposes. + +Add the account you're using in Keplr to your blockchain's `config.yml` file: + +```yml +accounts: + - name: alice + coins: [20000token, 200000000stake] + - name: bob + coins: [10000token, 100000000stake] + # highlight-start + - name: frank + coins: [10000token, 100000000stake] + mnemonic: struggle since inmate safe logic kite tag web win stay security wonder + # highlight-end +``` + +Replace the `struggle since...` mnemonic with the one you saved in the previous +step. + +Adding an account with a mnemonic to the config file will tell Ignite CLI to add +the account to the blockchain when you start it. This is useful for development +purposes, but you should not do this in production. + +## Start a blockchain and a React app + +In the `example` directory run the following command to start your blockchain: + +``` +ignite chain serve +``` + +To start your React application, go to the `react` directory and run the following +command in a separate terminal window: + +``` +npm install && npm run dev +``` + +It is recommended to run `npm install` before starting your app with `npm run +dev` to ensure that all dependencies are installed (including the ones that the +API client has, see `react/postinstall.js`). + +Open your browser and navigate to +[http://localhost:5173/](http://localhost:5173/). + +![Web app](/img/web-1.png) + +Press "Connect wallet", enter your password into Keplr and press "Approve" to +add your blockchain to Keplr. + +<img src="/img/web-4.png" width="300"/> + +Make sure to select the account you're using for development purposes and the +"Example Network" in Keplr's blockchain dropdown. You should see a list of +assets in your React app. + +![Web app](/img/web-5.png) + +Congratulations! You have successfully created a client-side React application and +connected it to your blockchain. You can modify the source code of your React +application to build out the rest of your project. \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.27/03-clients/_category_.json b/docs/versioned_docs/version-v0.27/03-clients/_category_.json new file mode 100644 index 0000000..036cfbe --- /dev/null +++ b/docs/versioned_docs/version-v0.27/03-clients/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Develop a client app", + "link": null +} \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.27/04-network/01-chain.md b/docs/versioned_docs/version-v0.27/04-network/01-chain.md new file mode 100644 index 0000000..9cec1e9 --- /dev/null +++ b/docs/versioned_docs/version-v0.27/04-network/01-chain.md @@ -0,0 +1,237 @@ +--- +sidebar_position: 1 +description: Ignite Chain. +--- + +# Ignite Chain + +## Introduction + +_Ignite is a blockchain to help launch Cosmos SDK-based blockchains._ + +Using Cosmos SDK and Ignite CLI, developers can quickly create a crypto application that is decentralized, economical for usage, and scalable. The Cosmos SDK framework allows developers to create sovereign application-specific blockchains that become part of the wider [Cosmos ecosystem](https://cosmos.network/ecosystem/apps). Blockchains created with Cosmos SDK use a Proof-of-Stake (PoS) consensus protocol that requires validators to secure the chain. + +Even though tools like Ignite CLI simplify the development of a Cosmos SDK blockchain, launching a new chain is a highly complex process. One of the major challenges of developing and launching your own sovereign blockchain is ensuring the security of the underlying consensus. Since Cosmos SDK chains are based on the PoS consensus, each blockchain requires initial coin allocations and validators before they can be launched, which presents developers with significant challenges, such as determining their chain's tokenomics or coordinating a robust validator set. + +The initial coin allocations and validators are described in a JSON-formatted genesis file that is shared among all initial nodes in the network. This genesis file defines the initial state of the application. Based on PoS, secure chains require the initial allocation of coins to be well distributed so that no single validator holds more than 1/3 of all tokens and receives a disproportionate amount of voting power. + +Along with ensuring the security of the underlying consensus, another highly difficult task in launching a new blockchain is attracting a diverse set of validators for the genesis file. Many promising projects fail to capture the attention of a sufficient number of trustworthy validators to secure their chains due to a lack of resources or experience. + +The Ignite Chain has, therefore, been conceived to facilitate the launch of Cosmos SDK blockchains by helping developers to navigate the complexities of launching a blockchain and coordinate the genesis of a new chain. Using the decentralized nature of blockchain, Ignite's coordination features help blockchain builders connect with validators and investors, speeding up the time to market of their projects and chances of success. + +Commands to interact with Ignite Chain are integrated into Ignite CLI and allow launching chains from it. Integration with Ignite Chain allows the CLI to support the developer in the entire lifecycle of realizing a Cosmos project, from the development and experimentation of the blockchain to the launch of its mainnet. + +## What is Ignite Chain + +Ignite Chain is a secure platform that simplifies the launch of Cosmos SDK-based chains, lending vital resources and support at the coordination, preparation, and launch stages. Ignite provides the tools that blockchain projects need to overcome the complexities of launching their chain, from validator coordination and token issuance to fundraising and community building. + +Ignite facilitates the launch of new chains with an overall launch process during three phases: + +- Coordination +- Preparation +- Launch + +To reduce friction at each phase, Ignite provides an immutable and universal database for validator coordination. + +In the future, Ignite will also offer: + +- Token issuance: Ignite allows the issuance of tokens (called vouchers) that represent a share + allocation of a future mainnet network +- A fundraising platform for selling vouchers +- A permissionless framework to reward validator activities on a launched testnet network + +## Validator coordination + +To launch a chain in the Cosmos ecosystem, the validators must start nodes that connect to each other to create the new blockchain network. A node must be started from a file called the genesis file. The genesis file must be identical on all validator nodes before the new chain can be started. + +![genesis](./assets/genesis.png) + +The JSON-formatted genesis file contains information on the initial state of the chain, including coin allocations, the list of validators, various parameters for the chain like the maximum number of validators actively signing blocks, and the specific launch time. Because each validator has the same genesis file, the blockchain network starts automatically when the genesis time is reached. + +![launch](./assets/launch.png) + +### Ignite as a coordination source of truth + +Ignite Chain acts as a source of truth for new chains to coordinate a validator set and for validators to generate the genesis for a chain launch. The blockchain doesn’t directly store the final genesis file in its own ledger but rather stores information that allows generating the genesis file in a deterministic manner. + +The information stored on Ignite that supports deterministic generation of the genesis file for a specific chain launch is referred to as the _launch information_. When creating a new chain on Ignite, the coordinator provides the initial launch information. Then, through on-chain coordination, this launch information is updated by interacting with the blockchain by sending messages. When the chain is ready to be launched, the genesis file is generated by calling a genesis generation algorithm that uses the launch information. + +**GenesisGenerate(LaunchInformation) => genesis.json** + +The genesis generation algorithm is officially and formally specified. The official implementation of the genesis generation algorithm is developed in Go using Ignite CLI. However, any project is free to develop its own implementation of the algorithm as long as it complies with the specification of the algorithm. + +The genesis generation algorithm is not part of the on-chain protocol. In order to successfully launch a new chain, all validators must use the algorithm to generate their genesis using the launch information. The algorithm deterministically generates the genesis from the launch information that is stored on the Ignite chain. + +If any element of the launch information is censored, for example, removing an account balance, the launched chain reputation is negatively impacted and implies that the majority of validators agree on not using: + +- The tamper-proof launch information +- The official genesis generation algorithm + +Outside of the genesis generation, the genesis generation algorithm specification gives guidance on how to set up your network configuration. For example, the launch information can contain the addresses of the persistent peers of the blockchain network. + +![generation](./assets/generation.png) + +## Launch information + +Launch information can be created or updated in three different ways: + +1. Defined during chain creation but updatable by the coordinator after creation +2. Determined through coordination +3. Determined through specific on-chain logic not related to coordination + +### 1 - Launch information determined during chain creation: + +- `GenesisChainID`: The identifier for the network +- `SourceURL`: The URL of the git repository of the source code for building the blockchain + node binary +- `SourceHash`: The specific hash that identifies the release of the source code +- `InitialGenesis`: A multiformat structure that specifies the initial genesis for the chain + launch before running the genesis generation algorithm + +### 2 - Launch information determined through coordination: + +- `GenesisAccounts`: A list of genesis accounts for the chain, comprised of addresses with associated balances +- `VestingAccounts`: A list of genesis accounts with vesting options +- `GenesisValidators`: A list of the initial validators at chain launch +- `ParamChanges`: A list of module param changes in the genesis state + +### 3 - Launch information determined through on-chain logic: + +- `GenesisTime`: The timestamp for the network start, also referred to as LaunchTime + +### Initial genesis + +The launch information contains the initial genesis structure. This structure provides the information for generating the initial genesis before running the genesis generation algorithm and finalizing the genesis file. + +The initial genesis structure can be: + +- `DefaultGenesis`: the default genesis file is generated by the chain binary init command +- `GenesisURL`: the initial genesis for a chain launch is an existing genesis file that is + fetched from a URL and then modified with the required algorithm - this initial genesis type should be used when the initial genesis state is extensive, + containing a lot of accounts for token distribution, containing records for an + airdrop +- `GenesisConfig`: the initial genesis for a chain launch is generated from an Ignite CLI + config that contains genesis accounts and module parameters - this initial genesis type should be used when the coordinator doesn’t have extensive state for the initial genesis but some module parameters must be customized. For example, the staking bond denom for the staking token + +## Coordination process + +The coordination process starts immediately after the chain is created and ends when the coordinator triggers the launch of the chain. + +The launch information is updated during the coordination process. + +During the coordination process, any entity can send requests to the network. A request is an object whose content specifies updates to the launch information. + +The chain coordinator approves or rejects the requests: + +- If a request is approved, the content is applied to the launch information +- If the request is rejected, no change is made to the launch information + +The request creator can also directly reject or cancel the request. + +Each chain contains a request pool that contains all requests. Each request has a status: + +- _PENDING_: Waiting for the approval of the coordinator +- _APPROVED_: Approved by the coordinator, its content has been applied to the launch + information +- _REJECTED_: Rejected by the coordinator or the request creator + +Approving or rejecting a request is irreversible. The only possible status transitions are: + +- _PENDING_ to _APPROVED_ +- _PENDING_ to _REJECTED_ + +To revert the effect on launch information from a request, a user must send the eventual opposite request (example: AddAccount → RemoveAccount). + +Since the coordinator is the sole approver for requests, each request created by the coordinator is immediately set to APPROVED and its content is applied to the launch information. + +![requests](./assets/requests.png) + +## Available requests + +Six types of requests can be sent to the Ignite chain: + +- `AddGenesisAccount` +- `AddVestingAccount` +- `AddGenesisValidator` +- `RemoveAccount` +- `RemoveValidator` +- `ChangeParam` + +**`AddGenesisAccount`** requests a new account for the chain genesis with a coin balance. This request content is composed of two fields: + +- Account address, must be unique in launch information +- Account balance + +The request automatically fails to be applied if a genesis account or a vesting account with an identical address is already specified in the launch information. + +**`AddVestingAccount`** requests a new account for the chain genesis with a coin balance and vesting options. This request content is composed of two fields: + +- Address of the account +- Vesting options of the account + +The currently supported vesting option is delayed vesting where the total balance of the account is specified and a number of tokens of the total balance of the account are vested only after an end time is reached. + +The request automatically fails to be applied if a genesis account or a vesting account with an identical address is already specified in the launch information. + +**`AddGenesisValidator`** requests a new genesis validator for the chain. A genesis validator in a Cosmos SDK blockchain represents an account with an existing balance in the genesis that self-delegates part of its balance during genesis initialization to become a bonded validator when the network starts. In most cases, the validator must first request an account with `AddGenesisAccount` before requesting to be a validator, unless they already have an account with a balance in the initial genesis of the chain. + +Self-delegation during genesis initialization is performed with a [Cosmos SDK module named genutils](https://pkg.go.dev/github.com/cosmos/cosmos-sdk/x/genutil). In the genesis, the _genutils_ module contains objects called gentx that represent transactions that were executed before the network launch. To be a validator when the network starts, a future validator must provide a gentx that contains the transaction for the self-delegation from their account. + +The request content is composed of five fields: + +- The gentx for the validator self-delegation +- The address of the validator +- The consensus public key of the validator node +- The self-delegation +- The peer information for the validator node + +The request automatically fails to be applied if a validator with the same address already exists in the launch information. + +**`RemoveAccount`** requests the removal of a genesis or vesting account from the launch information. The request content contains the address of the account to be removed. The request automatically fails to be applied if no genesis or vesting account with the specified address exists in the launch information. + +**`RemoveValidator`** requests the removal of a genesis validator from the launch information. The request content contains the address of the validator to be removed. The request automatically fails to be applied if no validator account with the specified address exists in the launch information. + +**`ChangeParam`** requests the modification of a module parameter in the genesis. Modules in a Cosmos SDK blockchain can have parameters that will configure the logic of the blockchain. The parameters can be changed through governance once the blockchain network is live. During the launch process, the initial parameters of the chain are set in the genesis. + +This request content is composed of three fields: + +- The name of the module +- The name of the parameter +- The value of the parameter represented as generic data + +### Request validity + +Some checks are verified on-chain when applying a request. For example, a genesis account can’t be added twice. However, some other validity properties can’t be checked on-chain. For example, because a gentx is represented through a generic byte array in the blockchain, an on-chain check is not possible to verify that the gentx is correctly signed or that the provided consensus public key that is stored on-chain corresponds to the consensus public key in the gentx. This gentx verification is the responsibility of the client interacting with the blockchain to ensure the requests have a valid format and allow for the start of the chain. Some validity checks are specified in the genesis generation algorithm. + +## Launch process + +The overall launch process of a chain through Ignite is composed of three phases: + +- Coordination phase +- Preparation phase +- Launch phase + +After the coordinator creates the chain on Ignite and provides the initial launch information, the launch process enters the coordination phase where users can send requests for the chain genesis. After the coordinator deems the chain as ready to be launched, they trigger the launch of the chain. During this operation, the coordinator provides the launch time, or genesis, time for the chain. + +Once the launch is triggered and before the launch time is reached, the chain launch process enters the preparation phase. During the preparation phase, requests can no longer be sent and the launch information of the chain is finalized. The validators run the genesis generation algorithm to get the final genesis of the chain and prepare their node. The remaining time must provide enough time for the validators to prepare their nodes. This launch time is set by the coordinator, although a specific range for the remaining time is imposed. + +Once the launch time is reached, the chain network is started and the chain launch process enters the launch phase. At this point, since the chain is live, no further action is required from the coordinator. However, under some circumstances, the chain might have failed to start. For example, a chain does not start if every validator in the genesis does not start their node. + +The coordinator has the ability to revert the chain launch. Reverting the chain launch sets the launch process back to the coordination phase where requests can be sent again to allow addressing the issue related to the launch failure. Reverting the launch has an effect only on Ignite. If the new chain is effectively launched, reverting the launch on Ignite has no effect on the chain liveness. Reverting the launch of the chain can be performed only by the coordinator after the launch time plus a delay called the revert delay. + +![process](./assets/process.png) + +## Genesis generation + +To ensure determinism, genesis generation rules must be rigorously specified depending on the launch information of the chain. + +The general steps for the genesis generation are: + +- Building the blockchain node binary from source +- Generating the initial genesis +- Setting the chain ID +- Setting the genesis time +- Adding genesis accounts +- Adding genesis accounts with vesting options +- Adding gentxs for genesis validators +- Changing module params from param changes diff --git a/docs/versioned_docs/version-v0.27/04-network/02-introduction.md b/docs/versioned_docs/version-v0.27/04-network/02-introduction.md new file mode 100644 index 0000000..dc711fe --- /dev/null +++ b/docs/versioned_docs/version-v0.27/04-network/02-introduction.md @@ -0,0 +1,75 @@ +--- +sidebar_position: 2 +description: Introduction to Ignite Network commands. +--- + +# Ignite Network commands + +The `ignite network` commands allow to coordinate the launch of sovereign Cosmos blockchains by interacting with the +Ignite Chain. + +To launch a Cosmos blockchain you need someone to be a coordinator and others to be validators. These are just roles, +anyone can be a coordinator or a validator. + +- A coordinator publishes information about a chain to be launched on the Ignite blockchain, approves validator requests + and coordinates the launch. +- Validators send requests to join a chain and start their nodes when a blockchain is ready for launch. + +## Launching a chain on Ignite + +Launching with the CLI can be as simple as a few short commands with the CLI using `ignite network` command +namespace. + +> **NOTE:** `ignite n` can also be used as a shortcut for `ignite network`. + +To publish the information about your chain as a coordinator, run the following command (the URL should point to a +repository with a Cosmos SDK chain): + +``` +ignite network chain publish github.com/ignite/example +``` + +This command will return the launch identifier you will be using in the following +commands. Let's say this identifier is 42. +Next, ask validators to initialize their nodes and request to join the network. +For a testnet you can use the default values suggested by the +CLI. + +``` +ignite network chain init 42 +ignite network chain join 42 --amount 95000000stake +``` + +As a coordinator, list all validator requests: + +``` +ignite network request list 42 +``` + +Approve validator requests: + +``` +ignite network request approve 42 1,2 +``` + +Once you've approved all validators you need in the validator set, announce that +the chain is ready for launch: + +``` +ignite network chain launch 42 +``` + +Validators can now prepare their nodes for launch: + +``` +ignite network chain prepare 42 +``` + +The output of this command will show a command that a validator would use to +launch their node, for example `exampled --home ~/.example`. After enough +validators launch their nodes, a blockchain will be live. + +--- + +The next two sections provide more information on the process of coordinating a chain launch from a coordinator and +participating in a chain launch as a validator. diff --git a/docs/versioned_docs/version-v0.27/04-network/03-coordinator.md b/docs/versioned_docs/version-v0.27/04-network/03-coordinator.md new file mode 100644 index 0000000..76d5997 --- /dev/null +++ b/docs/versioned_docs/version-v0.27/04-network/03-coordinator.md @@ -0,0 +1,146 @@ +--- +sidebar_position: 3 +description: Ignite Network commands for coordinators. +--- + +# Coordinator Guide + +Coordinators organize and launch new chains on Ignite Chain. + +--- + +## Publish a chain + +The first step in the process of a chain launch is for the coordinator to publish the intention of launching a chain. +The `publish` command publishes the intention of launching a chain on Ignite from a project git repository. + +```shell +ignite n chain publish https://github.com/ignite/example +``` + +**Output** + +``` +✔ Source code fetched +✔ Blockchain set up +✔ Chain's binary built +✔ Blockchain initialized +✔ Genesis initialized +✔ Network published +⋆ Launch ID: 3 +``` + +`LaunchID` identifies the published blockchain on Ignite blockchain. + +### Specify a initial genesis + +During coordination, new genesis accounts and genesis validators are added into the chain genesis. +The initial genesis where these accounts are added is by default the default genesis generated by the chain binary. + +The coordinator can specify a custom initial genesis for the chain launch with the `--genesis` flag. This custom initial +genesis can contain additional default genesis accounts and custom params for the chain modules. + +A URL must be provided for the `--genesis-url` flag. This can either directly point to a JSON genesis file or a tarball +containing a genesis file. + +```shell +ignite n chain publish https://github.com/ignite/example --genesis-url https://raw.githubusercontent.com/ignite/example/master/genesis/gen.json +``` + +## Approve validator requests + +When coordinating for a chain launch, validators send requests. These represent requests to be part of the genesis as a +validator for the chain. + +The coordinator can list these requests: + +``` +ignite n request list 3 +``` + +> **NOTE:** here "3" is specifying the `LaunchID`. + +**Output** + +``` +Id Status Type Content +1 APPROVED Add Genesis Account spn1daefnhnupn85e8vv0yc5epmnkcr5epkqncn2le, 100000000stake +2 APPROVED Add Genesis Validator e3d3ca59d8214206839985712282967aaeddfb01@84.118.211.157:26656, spn1daefnhnupn85e8vv0yc5epmnkcr5epkqncn2le, 95000000stake +3 PENDING Add Genesis Account spn1daefnhnupn85e8vv0yc5epmnkcr5epkqncn2le, 95000000stake +4 PENDING Add Genesis Validator b10f3857133907a14dca5541a14df9e8e3389875@84.118.211.157:26656, spn1daefnhnupn85e8vv0yc5epmnkcr5epkqncn2le, 95000000stake +``` + +The coordinator can either approve or reject these requests. + +To approve the requests: + +``` +ignite n request approve 3 3,4 +``` + +> **NOTE:** when selecting a list of requests, both syntaxes can be used: `1,2,3,4` and `1-3,4`. + +**Output** + +``` +✔ Source code fetched +✔ Blockchain set up +✔ Requests format verified +✔ Blockchain initialized +✔ Genesis initialized +✔ Genesis built +✔ The network can be started +✔ Request(s) #3, #4 verified +✔ Request(s) #3, #4 approved +``` + +Ignite CLI automatically verifies that the requests can be applied for the genesis, the approved requests don't generate +an invalid genesis. + +To reject the requests: + +``` +ignite n request reject 3 3,4 +``` + +**Output** + +``` +✔ Request(s) #3, #4 rejected +``` + +--- + +## Initiate the launch of a chain + +When enough validators are approved for the genesis and the coordinator deems the chain ready to be launched, the +coordinator can initiate the launch of the chain. + +This action will finalize the genesis of chain, meaning that no new requests can be approved for the chain. + +This action also sets the launch time (or genesis time) for the chain, the time when the blockchain network will go +live. + +``` +ignite n chain launch 3 +``` + +**Output** + +``` +✔ Chain 3 will be launched on 2022-10-01 09:00:00.000000 +0200 CEST +``` + +This example output shows the launch time of the chain on the network. + +### Set a custom launch time + +By default, the launch time will be set to the earliest date possible. In practice, the validators should have time to +prepare their node for the network launch. If a validator fails to be online, they can get jailed for inactivity in the +validator set. + +The coordinator can specify a custom time with the `--launch-time` flag. + +``` +ignite n chain launch --launch-time 2022-01-01T00:00:00Z +``` diff --git a/docs/versioned_docs/version-v0.27/04-network/04-validator.md b/docs/versioned_docs/version-v0.27/04-network/04-validator.md new file mode 100644 index 0000000..51a55e7 --- /dev/null +++ b/docs/versioned_docs/version-v0.27/04-network/04-validator.md @@ -0,0 +1,161 @@ +--- +sidebar_position: 4 +description: Ignite Network commands for validators. +--- + +# Validator Guide + +Validators join as genesis validators for chain launches on Ignite Chain. + +--- + +## List all published chains + +Validators can list and explore published chains to be launched on Ignite. + +``` +ignite n chain list +``` + +**Output** + +``` +Launch Id Chain Id Source Phase + +3 example-1 https://github.com/ignite/example coordinating +2 spn-10 https://github.com/tendermint/spn launched +1 example-20 https://github.com/tendermint/spn launching +``` + +- `Launch ID` is the unique identifier of the chain on Ignite. This is the ID used to interact with the chain launch. +- `Chain ID` represents the identifer of the chain network once it will be launched. It should be a unique identifier in + practice but doesn't need to be unique on Ignite. +- `Source` is the repository URL of the project. +- `Phase` is the current phase of the chain launch. A chain can have 3 different phases: + - `coordinating`: means the chain is open to receive requests from validators + - `launching`: means the chain no longer receives requests but it hasn't been launched yet + - `launched`: means the chain network has been launched + +--- + +## Request network participation + +When the chain is in the coordination phase, validators can request to be a genesis validator for the chain. +Ignite CLI supports an automatic workflow that can setup a node for the validator and a workflow for advanced users with +a specific setup for their node. + +### Simple Flow + +`ignite` can handle validator setup automatically. Initialize the node and generate a gentx file with default values: + +``` +ignite n chain init 3 +``` + +**Output** + +``` +✔ Source code fetched +✔ Blockchain set up +✔ Blockchain initialized +✔ Genesis initialized +? Staking amount 95000000stake +? Commission rate 0.10 +? Commission max rate 0.20 +? Commission max change rate 0.01 +⋆ Gentx generated: /Users/lucas/spn/3/config/gentx/gentx.json +``` + +Now, create and broadcast a request to join a chain as a validator: + +``` +ignite n chain join 3 --amount 100000000stake +``` + +The join command accepts a `--amount` flag with a comma-separated list of tokens. If the flag is provided, the +command will broadcast a request to add the validator’s address as an account to the genesis with the specific amount. + +**Output** + +``` +? Peer's address 192.168.0.1:26656 +✔ Source code fetched +✔ Blockchain set up +✔ Account added to the network by the coordinator! +✔ Validator added to the network by the coordinator! +``` + +--- + +### Advanced Flow + +Using a more advanced setup (e.g. custom `gentx`), validators must provide an additional flag to their command +to point to the custom file: + +``` +ignite n chain join 3 --amount 100000000stake --gentx ~/chain/config/gentx/gentx.json +``` + +--- + +## Launch the network + +### Simple Flow + +Generate the final genesis and config of the node: + +``` +ignite n chain prepare 3 +``` + +**Output** + +``` +✔ Source code fetched +✔ Blockchain set up +✔ Chain's binary built +✔ Genesis initialized +✔ Genesis built +✔ Chain is prepared for launch +``` + +Next, start the node: + +``` +exampled start --home ~/spn/3 +``` + +--- + +### Advanced Flow + +Fetch the final genesis for the chain: + +``` +ignite n chain show genesis 3 +``` + +**Output** + +``` +✔ Source code fetched +✔ Blockchain set up +✔ Blockchain initialized +✔ Genesis initialized +✔ Genesis built +⋆ Genesis generated: ./genesis.json +``` + +Next, fetch the persistent peer list: + +``` +ignite n chain show peers 3 +``` + +**Output** + +``` +⋆ Peer list generated: ./peers.txt +``` + +The fetched genesis file and peer list can be used for a manual node setup. diff --git a/docs/versioned_docs/version-v0.27/04-network/05-coordination.md b/docs/versioned_docs/version-v0.27/04-network/05-coordination.md new file mode 100644 index 0000000..eb26b71 --- /dev/null +++ b/docs/versioned_docs/version-v0.27/04-network/05-coordination.md @@ -0,0 +1,72 @@ +--- +sidebar_position: 5 +description: Other commands for coordination. +--- + +# Other commands for coordination + +Ignite CLI offers various other commands to coordinate chain launches that can be used by coordinators, validators, or other participants. + +The requests follow the same logic as the request for validator participation; they must be approved by the chain coordinator to be effective in the genesis. + +--- + +## Request a genesis account + +Any participant can request a genesis account with an associated balance for the chain. +The participant must provide an address with a comma-separated list of token balances. + +Any prefix can be used for the Bech32 address, it is automatically converted into `spn` on the Ignite Chain. + +``` +ignite n request add-account 3 spn1pe5h2gelhu8aukmrnj0clmec56aspxzuxcy99y 1000stake +``` + +**Output** + +``` +Source code fetched +Blockchain set up +⋆ Request 10 to add account to the network has been submitted! +``` +--- + +## Request to remove a genesis account + +Any participant can request to remove a genesis account from the chain genesis. +It might be the case if, for example, a user suggests an account balance that is so high it could harm the network. +The participant must provide the address of the account. + +Any prefix can be used for the Bech32 address, it is automatically converted into `spn` on the Ignite Chain. + +``` +ignite n request remove-account 3 spn1pe5h2gelhu8aukmrnj0clmec56aspxzuxcy99y +``` + +**Output** + +``` +Request 11 to remove account from the network has been submitted! +``` +--- + +## Request to remove a genesis validator + +Any participant can request to remove a genesis validator (gentx) from the chain genesis. +It might be the case if, for example, a chain failed to launch because of some validators, and they must be removed from genesis. +The participant must provide the address of the validator account (same format as genesis account). + +Any prefix can be used for the Bech32 address, it is automatically converted into `spn` on the Ignite Chain. + +The request removes only the gentx from the genesis but not the associated account balance. + +``` +ignite n request remove-validator 429 spn1pe5h2gelhu8aukmrnj0clmec56aspxzuxcy99y +``` + +**Output** + +``` +Request 12 to remove validator from the network has been submitted! +``` +--- \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.27/04-network/_category_.json b/docs/versioned_docs/version-v0.27/04-network/_category_.json new file mode 100644 index 0000000..c45c6eb --- /dev/null +++ b/docs/versioned_docs/version-v0.27/04-network/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Launch a chain", + "link": null +} \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.27/04-network/assets/generation.png b/docs/versioned_docs/version-v0.27/04-network/assets/generation.png new file mode 100644 index 0000000..c10cac2 Binary files /dev/null and b/docs/versioned_docs/version-v0.27/04-network/assets/generation.png differ diff --git a/docs/versioned_docs/version-v0.27/04-network/assets/genesis.png b/docs/versioned_docs/version-v0.27/04-network/assets/genesis.png new file mode 100644 index 0000000..b640db4 Binary files /dev/null and b/docs/versioned_docs/version-v0.27/04-network/assets/genesis.png differ diff --git a/docs/versioned_docs/version-v0.27/04-network/assets/launch.png b/docs/versioned_docs/version-v0.27/04-network/assets/launch.png new file mode 100644 index 0000000..4ea50b2 Binary files /dev/null and b/docs/versioned_docs/version-v0.27/04-network/assets/launch.png differ diff --git a/docs/versioned_docs/version-v0.27/04-network/assets/process.png b/docs/versioned_docs/version-v0.27/04-network/assets/process.png new file mode 100644 index 0000000..a5c21d2 Binary files /dev/null and b/docs/versioned_docs/version-v0.27/04-network/assets/process.png differ diff --git a/docs/versioned_docs/version-v0.27/04-network/assets/requests.png b/docs/versioned_docs/version-v0.27/04-network/assets/requests.png new file mode 100644 index 0000000..d097e68 Binary files /dev/null and b/docs/versioned_docs/version-v0.27/04-network/assets/requests.png differ diff --git a/docs/versioned_docs/version-v0.27/05-contributing/01-docs.md b/docs/versioned_docs/version-v0.27/05-contributing/01-docs.md new file mode 100644 index 0000000..4c05ed2 --- /dev/null +++ b/docs/versioned_docs/version-v0.27/05-contributing/01-docs.md @@ -0,0 +1,105 @@ +--- +sidebar_position: 1 +slug: /contributing +--- + +# Improving documentation + +Thank you for visiting our repository and considering making contributions. We +appreciate your interest in helping us to create and maintain awesome tutorials +and documentation. + +## Using this repo + +Review existing [Ignite CLI issues](https://github.com/ignite/cli/issues) to see +if your question has already been asked and answered. + +- To provide feedback, file an issue and provide generous details to help us + understand how we can make it better. +- To provide a fix, make a direct contribution. If you're not a member or + maintainer, fork the repo and then submit a pull request (PR) from your forked + repo to the `main` branch. +- Start by creating a draft pull request. Create your draft PR early, even if + your work is just beginning or incomplete. Your draft PR indicates to the + community that you're working on something and provides a space for + conversations early in the development process. Merging is blocked for `Draft` + PRs, so they provide a safe place to experiment and invite comments. + +## Reviewing technical content PRs + +Some of the best content contributions come during the PR review cycles. Follow +best practices for technical content PR reviews just like you do for code +reviews. + +- For in-line suggestions, use the [GitHub suggesting + feature](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/reviewing-changes-in-pull-requests/commenting-on-a-pull-request) + . +- The PR owner can merge in your suggested commits one at a time or in batch + (preferred). +- When you are providing a more granular extensive review that results in more + than 20 in-line suggestions, go ahead and check out the branch and make the + changes yourself. + +## Writing and contributing + +We welcome contributions to the docs and tutorials. + +Our technical content follows the [Google developer documentation style +guide](https://developers.google.com/style). Highlights to help you get started: + +- [Highlights](https://developers.google.com/style/highlights) +- [Word list](https://developers.google.com/style/word-list) +- [Style and tone](https://developers.google.com/style/tone) +- [Writing for a global + audience](https://developers.google.com/style/translation) +- [Cross-references](https://developers.google.com/style/cross-references) +- [Present tense](https://developers.google.com/style/tense) + +The Google guidelines include more material than is listed here and are used as +a guide that enables easy decision-making about proposed content changes. + +Other useful resources: + +- [Google Technical Writing Courses](https://developers.google.com/tech-writing) +- [GitHub Guides Mastering + Markdown](https://docs.github.com/en/get-started/writing-on-github/getting-started-with-writing-and-formatting-on-github/basic-writing-and-formatting-syntax) + +## Where can I find the tutorials and docs? + +Technical content includes knowledge base articles and interactive tutorials. + +- The Ignite CLI Developer Tutorials content is in the `docs/guide` folder. +- The Knowledge Base content is in the `docs/kb` folder. +- Upgrade information is in the `docs/migration` folder. + +Note: The CLI docs are auto-generated and do not support doc updates. + +Locations and folders for other content can vary. Explore the self-describing +folders for the content that you are interested in. Some articles and tutorials +reside in a single Markdown file while sub-folders might be present for other +tutorials. + +As always, work-in-progress content might be happening in other locations and +repos. + +## Who works on the tutorials? + +The Ignite product team developers are focused on building Ignite CLI and +improving the developer experience. The Ignite Ecosystem Development team owns +the technical content and tutorials and manages developer onboarding. + +Meet the [people behind Ignite CLI and our +contributors](https://github.com/ignite/cli/graphs/contributors). + +## Viewing docs builds + +Use a preview to see what your changes will look like in production before the +updated pages are published. + +- While a PR is in draft mode, you can rely on using the preview feature in + Markdown. +- After the PR moves from **Draft** to **Ready for review**, the CI status + checks generate a deployment preview. This preview stays up to date as you + continue to work and commit new changes to the same branch. A `Docs Deploy + Preview / build_and_deploy (pull_request)` preview on a GitHub actions URL is + unique for that PR. diff --git a/docs/versioned_docs/version-v0.27/05-contributing/_category_.json b/docs/versioned_docs/version-v0.27/05-contributing/_category_.json new file mode 100644 index 0000000..094b1f3 --- /dev/null +++ b/docs/versioned_docs/version-v0.27/05-contributing/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Contribute to Ignite", + "link": null +} \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.27/06-migration/_category_.json b/docs/versioned_docs/version-v0.27/06-migration/_category_.json new file mode 100644 index 0000000..9460d57 --- /dev/null +++ b/docs/versioned_docs/version-v0.27/06-migration/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Migration", + "link": null +} \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.27/06-migration/readme.md b/docs/versioned_docs/version-v0.27/06-migration/readme.md new file mode 100644 index 0000000..978e90e --- /dev/null +++ b/docs/versioned_docs/version-v0.27/06-migration/readme.md @@ -0,0 +1,14 @@ +--- +sidebar_position: 0 +--- + +# Migration Guides + +Welcome to the section on upgrading to a newer version of Ignite CLI! If you're +looking to update to the latest version, you'll want to start by checking the +documentation to see if there are any special considerations or instructions you +need to follow. + +If there is no documentation for the latest version of Ignite CLI, it's +generally safe to assume that there were no breaking changes, and you can +proceed with using the latest version with your project. \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.27/06-migration/v0.18.md b/docs/versioned_docs/version-v0.27/06-migration/v0.18.md new file mode 100644 index 0000000..c516986 --- /dev/null +++ b/docs/versioned_docs/version-v0.27/06-migration/v0.18.md @@ -0,0 +1,458 @@ +--- +sidebar_position: 999 +title: v0.18.0 +description: For chains that were scaffolded with Ignite CLI versions lower than v0.18, changes are required to use Ignite CLI v0.18. +--- + +# Upgrading a Blockchain to use Ignite CLI v0.18 + +Ignite CLI v0.18 comes with Cosmos SDK v0.44. This version of Cosmos SDK introduced changes that are not compatible with +chains that were scaffolded with Ignite CLI versions lower than v0.18. + +**Important:** After upgrading from Ignite CLI v0.17.3 to Ignite CLI v0.18, you must update the default blockchain +template to use blockchains that were scaffolded with earlier versions. + +These instructions are written for a blockchain that was scaffolded with the following command: + +``` +ignite scaffold chain github.com/username/mars +``` + +If you used a different module path, replace `username` and `mars` with the correct values for your blockchain. + +## Blockchain + +For each file listed, make the required changes to the source code of the blockchain template. + +### go.mod + +``` +module github.com/username/mars + +go 1.16 + +require ( + github.com/cosmos/cosmos-sdk v0.44.0 + github.com/cosmos/ibc-go v1.2.0 + github.com/gogo/protobuf v1.3.3 + github.com/google/go-cmp v0.5.6 // indirect + github.com/gorilla/mux v1.8.0 + github.com/grpc-ecosystem/grpc-gateway v1.16.0 + github.com/spf13/cast v1.3.1 + github.com/spf13/cobra v1.1.3 + github.com/stretchr/testify v1.7.0 + github.com/tendermint/spm v0.1.6 + github.com/tendermint/tendermint v0.34.13 + github.com/tendermint/tm-db v0.6.4 + google.golang.org/genproto v0.0.0-20210903162649-d08c68adba83 + google.golang.org/grpc v1.40.0 +) + +replace ( + github.com/99designs/keyring => github.com/cosmos/keyring v1.1.7-0.20210622111912-ef00f8ac3d76 + github.com/gogo/protobuf => github.com/regen-network/protobuf v1.3.3-alpha.regen.1 + google.golang.org/grpc => google.golang.org/grpc v1.33.2 +) +``` + +### app/app.go + +```go +package app + +import ( + //... + // Add the following packages: + "github.com/cosmos/cosmos-sdk/x/feegrant" + feegrantkeeper "github.com/cosmos/cosmos-sdk/x/feegrant/keeper" + feegrantmodule "github.com/cosmos/cosmos-sdk/x/feegrant/module" + + "github.com/cosmos/ibc-go/modules/apps/transfer" + ibctransferkeeper "github.com/cosmos/ibc-go/modules/apps/transfer/keeper" + ibctransfertypes "github.com/cosmos/ibc-go/modules/apps/transfer/types" + ibc "github.com/cosmos/ibc-go/modules/core" + ibcclient "github.com/cosmos/ibc-go/modules/core/02-client" + ibcporttypes "github.com/cosmos/ibc-go/modules/core/05-port/types" + ibchost "github.com/cosmos/ibc-go/modules/core/24-host" + ibckeeper "github.com/cosmos/ibc-go/modules/core/keeper" + // Remove the following packages: + // transfer "github.com/cosmos/cosmos-sdk/x/ibc/applications/transfer" + // ibctransferkeeper "github.com/cosmos/cosmos-sdk/x/ibc/applications/transfer/keeper" + // ibctransfertypes "github.com/cosmos/cosmos-sdk/x/ibc/applications/transfer/types" + // ibc "github.com/cosmos/cosmos-sdk/x/ibc/core" + // ibcclient "github.com/cosmos/cosmos-sdk/x/ibc/core/02-client" + // porttypes "github.com/cosmos/cosmos-sdk/x/ibc/core/05-port/types" + // ibchost "github.com/cosmos/cosmos-sdk/x/ibc/core/24-host" + // ibckeeper "github.com/cosmos/cosmos-sdk/x/ibc/core/keeper" +) + +var ( + //... + ModuleBasics = module.NewBasicManager( + //... + slashing.AppModuleBasic{}, + // Add feegrantmodule.AppModuleBasic{}, + feegrantmodule.AppModuleBasic{}, // <-- + ibc.AppModuleBasic{}, + //... + ) + //... +) + +type App struct { + //... + // Replace codec.Marshaler with codec.Codec + appCodec codec.Codec // <-- + // Add FeeGrantKeeper + FeeGrantKeeper feegrantkeeper.Keeper // <-- +} + +func New( /*...*/ ) { + //bApp.SetAppVersion(version.Version) + bApp.SetVersion(version.Version) // <-- + + keys := sdk.NewKVStoreKeys( + //... + upgradetypes.StoreKey, + // Add feegrant.StoreKey + feegrant.StoreKey, // <-- + evidencetypes.StoreKey, + //... + ) + + app.FeeGrantKeeper = feegrantkeeper.NewKeeper(appCodec, keys[feegrant.StoreKey], app.AccountKeeper) // <-- + // Add app.BaseApp as the last argument to upgradekeeper.NewKeeper + app.UpgradeKeeper = upgradekeeper.NewKeeper(skipUpgradeHeights, keys[upgradetypes.StoreKey], appCodec, homePath, app.BaseApp) + + app.IBCKeeper = ibckeeper.NewKeeper( + // Add app.UpgradeKeeper + appCodec, keys[ibchost.StoreKey], app.GetSubspace(ibchost.ModuleName), app.StakingKeeper, app.UpgradeKeeper, scopedIBCKeeper, + ) + + govRouter.AddRoute(govtypes.RouterKey, govtypes.ProposalHandler). + //... + // Replace NewClientUpdateProposalHandler with NewClientProposalHandler + AddRoute(ibchost.RouterKey, ibcclient.NewClientProposalHandler(app.IBCKeeper.ClientKeeper)) + + // Replace porttypes with ibcporttypes + ibcRouter := ibcporttypes.NewRouter() + + app.mm.SetOrderBeginBlockers( + upgradetypes.ModuleName, + // Add capabilitytypes.ModuleName, + capabilitytypes.ModuleName, + minttypes.ModuleName, + //... + // Add feegrant.ModuleName, + feegrant.ModuleName, + ) + + // Add app.appCodec as an argument to module.NewConfigurator: + app.mm.RegisterServices(module.NewConfigurator(app.appCodec, app.MsgServiceRouter(), app.GRPCQueryRouter())) + + // Replace: + // app.SetAnteHandler( + // ante.NewAnteHandler( + // app.AccountKeeper, app.BankKeeper, ante.DefaultSigVerificationGasConsumer, + // encodingConfig.TxConfig.SignModeHandler(), + // ), + // ) + + // With the following: + anteHandler, err := ante.NewAnteHandler( + ante.HandlerOptions{ + AccountKeeper: app.AccountKeeper, + BankKeeper: app.BankKeeper, + SignModeHandler: encodingConfig.TxConfig.SignModeHandler(), + FeegrantKeeper: app.FeeGrantKeeper, + SigGasConsumer: ante.DefaultSigVerificationGasConsumer, + }, + ) + if err != nil { + panic(err) + } + app.SetAnteHandler(anteHandler) + + // Remove the following: + // ctx := app.BaseApp.NewUncachedContext(true, tmproto.Header{}) + // app.CapabilityKeeper.InitializeAndSeal(ctx) +} + +func (app *App) InitChainer(ctx sdk.Context, req abci.RequestInitChain) abci.ResponseInitChain { + var genesisState GenesisState + if err := tmjson.Unmarshal(req.AppStateBytes, &genesisState); err != nil { + panic(err) + } + // Add the following: + app.UpgradeKeeper.SetModuleVersionMap(ctx, app.mm.GetVersionMap()) + return app.mm.InitGenesis(ctx, app.appCodec, genesisState) +} + +// Replace Marshaler with Codec +func (app *App) AppCodec() codec.Codec { + return app.appCodec +} + +// Replace BinaryMarshaler with BinaryCodec +func initParamsKeeper(appCodec codec.BinaryCodec, legacyAmino *codec.LegacyAmino, key, tkey sdk.StoreKey) paramskeeper.Keeper { + //... +} +``` + +### app/genesis.go + +```go +// Replace codec.JSONMarshaler with codec.JSONCodec +func NewDefaultGenesisState(cdc codec.JSONCodec) GenesisState { + // ... +} +``` + +### testutil/keeper/mars.go + +Add the following code: + +```go +package keeper + +import ( + "testing" + + "github.com/cosmos/cosmos-sdk/codec" + codectypes "github.com/cosmos/cosmos-sdk/codec/types" + "github.com/cosmos/cosmos-sdk/store" + storetypes "github.com/cosmos/cosmos-sdk/store/types" + sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/stretchr/testify/require" + "github.com/tendermint/tendermint/libs/log" + tmproto "github.com/tendermint/tendermint/proto/tendermint/types" + tmdb "github.com/tendermint/tm-db" + "github.com/username/mars/x/mars/keeper" + "github.com/username/mars/x/mars/types" +) + +func MarsKeeper(t testing.TB) (*keeper.Keeper, sdk.Context) { + storeKey := sdk.NewKVStoreKey(types.StoreKey) + memStoreKey := storetypes.NewMemoryStoreKey(types.MemStoreKey) + + db := tmdb.NewMemDB() + stateStore := store.NewCommitMultiStore(db) + stateStore.MountStoreWithDB(storeKey, sdk.StoreTypeIAVL, db) + stateStore.MountStoreWithDB(memStoreKey, sdk.StoreTypeMemory, nil) + require.NoError(t, stateStore.LoadLatestVersion()) + + registry := codectypes.NewInterfaceRegistry() + k := keeper.NewKeeper( + codec.NewProtoCodec(registry), + storeKey, + memStoreKey, + ) + + ctx := sdk.NewContext(stateStore, tmproto.Header{}, false, log.NewNopLogger()) + return k, ctx +} +``` + +If `mars` is an IBC-enabled module, add the following code, instead: + +```go +package keeper + +import ( + "testing" + + "github.com/cosmos/cosmos-sdk/codec" + codectypes "github.com/cosmos/cosmos-sdk/codec/types" + "github.com/cosmos/cosmos-sdk/store" + storetypes "github.com/cosmos/cosmos-sdk/store/types" + sdk "github.com/cosmos/cosmos-sdk/types" + capabilitykeeper "github.com/cosmos/cosmos-sdk/x/capability/keeper" + typesparams "github.com/cosmos/cosmos-sdk/x/params/types" + ibckeeper "github.com/cosmos/ibc-go/modules/core/keeper" + "github.com/stretchr/testify/require" + "github.com/tendermint/tendermint/libs/log" + tmproto "github.com/tendermint/tendermint/proto/tendermint/types" + tmdb "github.com/tendermint/tm-db" + "github.com/username/test/x/mars/keeper" + "github.com/username/test/x/mars/types" +) + +func MarsKeeper(t testing.TB) (*keeper.Keeper, sdk.Context) { + logger := log.NewNopLogger() + + storeKey := sdk.NewKVStoreKey(types.StoreKey) + memStoreKey := storetypes.NewMemoryStoreKey(types.MemStoreKey) + + db := tmdb.NewMemDB() + stateStore := store.NewCommitMultiStore(db) + stateStore.MountStoreWithDB(storeKey, sdk.StoreTypeIAVL, db) + stateStore.MountStoreWithDB(memStoreKey, sdk.StoreTypeMemory, nil) + require.NoError(t, stateStore.LoadLatestVersion()) + + registry := codectypes.NewInterfaceRegistry() + appCodec := codec.NewProtoCodec(registry) + capabilityKeeper := capabilitykeeper.NewKeeper(appCodec, storeKey, memStoreKey) + + amino := codec.NewLegacyAmino() + ss := typesparams.NewSubspace(appCodec, + amino, + storeKey, + memStoreKey, + "MarsSubSpace", + ) + IBCKeeper := ibckeeper.NewKeeper( + appCodec, + storeKey, + ss, + nil, + nil, + capabilityKeeper.ScopeToModule("MarsIBCKeeper"), + ) + + k := keeper.NewKeeper( + codec.NewProtoCodec(registry), + storeKey, + memStoreKey, + IBCKeeper.ChannelKeeper, + &IBCKeeper.PortKeeper, + capabilityKeeper.ScopeToModule("MarsScopedKeeper"), + ) + + ctx := sdk.NewContext(stateStore, tmproto.Header{}, false, logger) + return k, ctx +} +``` + +### testutil/network/network.go + +```go +func DefaultConfig() network.Config { + // ... + return network.Config{ + // ... + // Add sdk.DefaultPowerReduction + AccountTokens: sdk.TokensFromConsensusPower(1000, sdk.DefaultPowerReduction), + StakingTokens: sdk.TokensFromConsensusPower(500, sdk.DefaultPowerReduction), + BondedTokens: sdk.TokensFromConsensusPower(100, sdk.DefaultPowerReduction), + // ... + } +} +``` + +### testutil/sample/sample.go + +Add the following code: + +```go +package sample + +import ( + "github.com/cosmos/cosmos-sdk/crypto/keys/ed25519" + sdk "github.com/cosmos/cosmos-sdk/types" +) + +// AccAddress returns a sample account address +func AccAddress() string { + pk := ed25519.GenPrivKey().PubKey() + addr := pk.Address() + return sdk.AccAddress(addr).String() +} +``` + +### BandChain Support + +If your module includes integration with BandChain, added manually or scaffolded with `ignite scaffold band`, upgrade +the `github.com/bandprotocol/bandchain-packet` package to `v0.0.2` in `go.mod`. + +## Module + +### x/mars/keeper/keeper.go + +```go +package keeper + +// ... + +type ( + Keeper struct { + // Replace Marshaler with BinaryCodec + cdc codec.BinaryCodec + //... + } +) + +func NewKeeper( + // Replace Marshaler with BinaryCodec + cdc codec.BinaryCodec, + // ... +) *Keeper { + // ... +} +``` + +### x/mars/keeper/msg_server_test.go + +```go +package keeper_test + +import ( + //... + // Add the following: + keepertest "github.com/username/mars/testutil/keeper" + "github.com/username/mars/x/mars/keeper" +) + +func setupMsgServer(t testing.TB) (types.MsgServer, context.Context) { + // Replace + // keeper, ctx := setupKeeper(t) + // return NewMsgServerImpl(*keeper), sdk.WrapSDKContext(ctx) + + // With the following: + k, ctx := keepertest.MarsKeeper(t) + return keeper.NewMsgServerImpl(*k), sdk.WrapSDKContext(ctx) +} +``` + +### x/mars/module.go + +```go +package mars + +type AppModuleBasic struct { + // Replace Marshaler with BinaryCodec + cdc codec.BinaryCodec +} + +// Replace Marshaler with BinaryCodec +func NewAppModuleBasic(cdc codec.BinaryCodec) AppModuleBasic { + return AppModuleBasic{cdc: cdc} +} + +// Replace JSONMarshaler with JSONCodec +func (AppModuleBasic) DefaultGenesis(cdc codec.JSONCodec) json.RawMessage { + return cdc.MustMarshalJSON(types.DefaultGenesis()) +} + +// Replace JSONMarshaler with JSONCodec +func (AppModuleBasic) ValidateGenesis(cdc codec.JSONCodec, config client.TxEncodingConfig, bz json.RawMessage) error { + //... +} + +// Replace codec.Marshaller with codec.Codec +func NewAppModule(cdc codec.Codec, keeper keeper.Keeper) AppModule { + //... +} + +// Replace JSONMarshaler with JSONCodec +func (am AppModule) InitGenesis(ctx sdk.Context, cdc codec.JSONCodec, gs json.RawMessage) []abci.ValidatorUpdate { + //... +} + +// Replace JSONMarshaler with JSONCodec +func (am AppModule) ExportGenesis(ctx sdk.Context, cdc codec.JSONCodec) json.RawMessage { + //... +} + +// Add the following +func (AppModule) ConsensusVersion() uint64 { return 2 } +``` diff --git a/docs/versioned_docs/version-v0.27/06-migration/v0.19.2.md b/docs/versioned_docs/version-v0.27/06-migration/v0.19.2.md new file mode 100644 index 0000000..92936fa --- /dev/null +++ b/docs/versioned_docs/version-v0.27/06-migration/v0.19.2.md @@ -0,0 +1,26 @@ +--- +sidebar_position: 998 +title: v0.19.2 +description: For chains that were scaffolded with Ignite CLI versions lower than v0.19.2, changes are required to use Ignite CLI v0.19.2. +--- + +# Upgrading a blockchain to use Ignite CLI v0.19.2 + +Ignite CLI v0.19.2 comes with IBC v2.0.2. + +With Ignite CLI v0.19.2, the contents of the deprecated Ignite CLI Modules `tendermint/spm` repo are moved to the +official Ignite CLI repo which introduces breaking changes. + +To migrate your chain that was scaffolded with Ignite CLI versions lower than v0.19.2: + +1. IBC upgrade: Use + the [IBC migration documents](https://github.com/cosmos/ibc-go/blob/main/docs/docs/05-migrations/03-v1-to-v2.md) + +2. In your chain's `go.mod` file, remove `tendermint/spm` and add the v0.19.2 version of `tendermint/starport`. If your + chain uses these packages, change the import paths as shown: + + - `github.com/tendermint/spm/ibckeeper` moved to `github.com/tendermint/starport/starport/pkg/cosmosibckeeper` + - `github.com/tendermint/spm/cosmoscmd` moved to `github.com/tendermint/starport/starport/pkg/cosmoscmd` + - `github.com/tendermint/spm/openapiconsole` moved to `github.com/tendermint/starport/starport/pkg/openapiconsole` + - `github.com/tendermint/spm/testutil/sample` moved + to `github.com/tendermint/starport/starport/pkg/cosmostestutil/sample` diff --git a/docs/versioned_docs/version-v0.27/06-migration/v0.20.0.md b/docs/versioned_docs/version-v0.27/06-migration/v0.20.0.md new file mode 100644 index 0000000..197dafc --- /dev/null +++ b/docs/versioned_docs/version-v0.27/06-migration/v0.20.0.md @@ -0,0 +1,12 @@ +--- +sidebar_position: 997 +title: v0.20.0 +description: For chains that were scaffolded with Ignite CLI versions lower than v0.20.0, changes are required to use Ignite CLI v0.20.0. +--- + +# Upgrading a blockchain to use Ignite CLI v0.20.2 + +1. Upgrade your Cosmos SDK version to [v0.45.3](https://github.com/cosmos/cosmos-sdk/releases/tag/v0.45.3). + +2. Update your `SetOrderBeginBlockers` and `SetOrderEndBlockers` in your `app/app.go` to explicitly add entries for all + the modules you use in your chain. diff --git a/docs/versioned_docs/version-v0.27/06-migration/v0.22.0.md b/docs/versioned_docs/version-v0.27/06-migration/v0.22.0.md new file mode 100644 index 0000000..e2d82e6 --- /dev/null +++ b/docs/versioned_docs/version-v0.27/06-migration/v0.22.0.md @@ -0,0 +1,36 @@ +--- +sidebar_position: 996 +title: v0.22.0 +description: For chains that were scaffolded with Ignite CLI versions lower than v0.22.0, changes are required to use Ignite CLI v0.22.0. +--- + +# Upgrading a blockchain to use Ignite CLI v0.22.0 + +Ignite CLI v0.22.2 changed the GitHub username from "ignite-hq" to "ignite", which means the imports must be fixed to +reflect this change. + +1. In your `go.mod` file find the require line for Ignite CLI that starts with `github.com/ignite-hq/cli` and is + followed by a version. + It looks something like `github.com/ignite-hq/cli v0.22.0`, and replace it by `github.com/ignite/cli v0.22.2`. + +2. Make a bulk find and replace in the import statements for `github.com/ignite-hq/cli` to be replaced + by `github.com/ignite/cli`. + +3. Finally, run `go mod tidy` and ensure there's no mention if `ignite-hq/cli` in your `go.sum` file. + +This update includes an upgrade to the `ibc-go` packages. Please make the according changes: + +1. Upgrade your IBC version to [v3](https://github.com/cosmos/ibc-go/releases/tag/v3.0.0). + + 1. Search for `github.com/cosmos/ibc-go/v2` in the import statements of your `.go` files and replace `v2` in the end + with `v3` + + 1. Open your `app.go`, + + - Update your transfer keeper by adding another `app.IBCKeeper.ChannelKeeper` as an argument + after `app.IBCKeeper.ChannelKeeper` + + - Define `var transferIBCModule = transfer.NewIBCModule(app.TransferKeeper)` in your `New()` func, and update + your existent IBC router to use it: `ibcRouter.AddRoute(ibctransfertypes.ModuleName, transferIBCModule)` + + 3. Open your `go.mod` and change the IBC line with `github.com/cosmos/ibc-go/v3 v3.0.0` diff --git a/docs/versioned_docs/version-v0.27/06-migration/v0.24.0.md b/docs/versioned_docs/version-v0.27/06-migration/v0.24.0.md new file mode 100644 index 0000000..ccce0e1 --- /dev/null +++ b/docs/versioned_docs/version-v0.27/06-migration/v0.24.0.md @@ -0,0 +1,330 @@ +--- +sidebar_position: 995 +title: v0.24.0 +description: For chains that were scaffolded with Ignite CLI versions lower than v0.24, changes are required to use Ignite CLI v0.24.0. +--- + +## Cosmos SDK v0.46 upgrade notes + +### Update dependencies + +Cosmos SDK v0.46 is compatible with the latest version of IBC Go v5. If you have a chain that is using an older version, +update the dependencies in your project. + +Throughout the code you might see the following dependencies: + +```go +package pkg_name + +import ( + "github.com/cosmos/ibc-go/v3/..." +) +``` + +Where `v3` is the version of IBC Go and `...` are different IBC Go packages. + +To upgrade the version to `v5`, a global find-and-replace should work. Replace `cosmos/ibc-go/v3` (or whicherver version +you're using) with `cosmos/ibc-go/v5` only in `*.go` files (to exclude unwated changes to files like `go,sum`). + +### Module keeper + +Add an import: + +```go +// x/{moduleName}/keeper/keeper.go + +package keeper + +// ... + +import ( + //... + storetypes "github.com/cosmos/cosmos-sdk/store/types" +) +``` + +In the `Keeper` struct replace `sdk.StoreKey` with `storetypes.StoreKey`: + +```go +// x/{moduleName}/keeper/keeper.go + +package keeper + +// ... + +type ( + Keeper struct { + cdc codec.BinaryCodec + storeKey storetypes.StoreKey + memKey storetypes.StoreKey + paramstore paramtypes.Subspace + } +) +``` + +In the argument list of the `NewKeeper` function definition: + +```go +package keeper + +// ... + +// x/{moduleName}/keeper/keeper.go + +func NewKeeper( + //... + memKey storetypes.StoreKey, +) +``` + +Store type aliases have been removed from the Cosmos SDK `types` package and now have to be imported from `store/types`, +instead. + +In the `testutil/keeper/{moduleName}.go` replace `types.StoreKey` with `storetypes.StoreKey` and `types.MemStoreKey` +with `storetypes.MemStoreKey`. + +```go +// testutil/keeper/{moduleName}.go + +package keeper + +// ... + +func {moduleName}Keeper(t testing.TB) (*keeper.Keeper, sdk.Context) { + storeKey := sdk.NewKVStoreKey(storetypes.StoreKey) + memStoreKey := storetypes.NewMemoryStoreKey(storetypes.MemStoreKey) + //... +} +``` + +### Testutil network package + +Add the `require` package for testing and `pruningtypes` and remove `storetypes`: + +```go +// testutil/network/network.go + +package network + +// ... + +import ( + pruningtypes "github.com/cosmos/cosmos-sdk/pruning/types" + "github.com/stretchr/testify/require" + // storetypes "github.com/cosmos/cosmos-sdk/store/types" <-- remove this line +) +``` + +In the `DefaultConfig` function replace `storetypes.NewPruningOptionsFromString` +with `pruningtypes.NewPruningOptionsFromString` + +```go +// testutil/network/network.go + +package network + +// ... + +func DefaultConfig() network.Config { + //... + return network.Config{ + AppConstructor: func(val network.Validator) servertypes.Application { + return app.New( + baseapp.SetPruning(pruningtypes.NewPruningOptionsFromString(val.AppConfig.Pruning)), + //... + ) + }, + //... + } +} +``` + +The `New` function in the Cosmos SDK `testutil/network` package now +accepts [three arguments](https://github.com/cosmos/cosmos-sdk/blob/v0.46.0/testutil/network/network.go#L206) instead of +two. + +In the `New` function add `t.TempDir()` as the second argument to `network.New()` and test that no error is thrown +with `require.NoError(t, err)`: + +```go +// testutil/network/network.go + +package network + +// ... + +func New(t *testing.T, configs ...network.Config) *network.Network { + //... + net, err := network.New(t, t.TempDir(), cfg) + require.NoError(t, err) + //... +} +``` + +### Testutil keeper package + +In the `{moduleName}Keeper` function make the following replacements: + +- `storetypes.StoreKey` → `types.StoreKey` +- `storetypes.MemStoreKey` → `types.MemStoreKey` +- `sdk.StoreTypeIAVL` → `storetypes.StoreTypeIAVL` +- `sdk.StoreTypeMemory` → `storetypes.StoreTypeMemory` + +```go +// testutil/keeper/{moduleName}.go + +package keeper + +// ... + +func {moduleName}Keeper(t testing.TB) (*keeper.Keeper, sdk.Context) { + storeKey := sdk.NewKVStoreKey(types.StoreKey) + memStoreKey := storetypes.NewMemoryStoreKey(types.MemStoreKey) + //... + stateStore.MountStoreWithDB(storeKey, storetypes.StoreTypeIAVL, db) + stateStore.MountStoreWithDB(memStoreKey, storetypes.StoreTypeMemory, nil) + //... +} +``` + +### IBC modules + +If you have IBC-enabled modules (for example, added with `ignite scaffold module ... --ibc` or created manually), make +the following changes to the source code. + +Cosmos SDK expects IBC modules +to [implement the `IBCModule` interface](https://ibc.cosmos.network/main/ibc/apps/ibcmodule/). Create a `IBCModule` +type that embeds the module's keeper and a method that returns a new `IBCModule`. Methods in this file will be defined +on this type. + +```go +// x/{moduleName}/module_ibc.go + +package module_name + +// ... + +type IBCModule struct { + keeper keeper.Keeper +} + +func NewIBCModule(k keeper.Keeper) IBCModule { + return IBCModule{ + keeper: k, + } +} +``` + +Replace receivers for all methods in this file from `(am AppModule)` to `(im IBCModule)`. Replace all instances of `am.` +with `im.` to fix the errors. + +`OnChanOpenInit` now returns to values: a `string` and an `error`: + +```go +// x/{moduleName}/module_ibc.go + +package module_name + +// ... + +func (im IBCModule) OnChanOpenInit( /*...*/ ) (string, error) +``` + +Ensure that all return statements (five, in the default template) in `OnChanOpenInit` return two values. For example: + +```go +// x/{moduleName}/module_ibc.go + +package module_name + +// ... + +func (im IBCModule) OnChanOpenInit( /*...*/ ) (string, error) { + //... + return "", sdkerrors.Wrapf(porttypes.ErrInvalidPort, "invalid port: %s, expected %s", portID, boundPort) + //... +} +``` + +Error acknowledgments returned from Transfer `OnRecvPacket` now include a deterministic ABCI code and error message. +Remove the `.Error()` call: + +```go +// x/{moduleName}/module_ibc.go + +package module_name + +// ... + +func (im IBCModule) OnRecvPacket( /*...*/ ) { + //... + if err := modulePacketData.Unmarshal(modulePacket.GetData()); err != nil { + // return channeltypes.NewErrorAcknowledgement(sdkerrors.Wrapf(sdkerrors.ErrUnknownRequest, "cannot unmarshal packet data: %s", err.Error()).Error()) + return channeltypes.NewErrorAcknowledgement(sdkerrors.Wrapf(sdkerrors.ErrUnknownRequest, "cannot unmarshal packet data: %s", err.Error())) + } + + // ... + + // Dispatch packet + switch packet := modulePacketData.Packet.(type) { + // ... + default: + // errMsg := fmt.Sprintf("unrecognized %s packet type: %T", types.ModuleName, packet) + // return channeltypes.NewErrorAcknowledgement(errMsg) + err := fmt.Errorf("unrecognized %s packet type: %T", types.ModuleName, packet) + return channeltypes.NewErrorAcknowledgement(err) + } +} +``` + +After switching to using both `AppModule` and `IBCModule`, modifying the following line: + +```go +// x/{moduleName}/module.go + +package module_name + +// ... + +var ( + //... + _ porttypes.IBCModule = IBCModule{} // instead of "= AppModule{}" +) +``` + +### Main + +The `Execute` function in Cosmos SDK `server/cmd` package now +accepts [three arguments](https://github.com/cosmos/cosmos-sdk/blob/v0.46.0/server/cmd/execute.go#L20) instead of two. + +```go +// cmd/{{projectName}}d/main.go + +package projectNamed + +// ... + +func main() { + //... + if err := svrcmd.Execute(rootCmd, "", app.DefaultNodeHome); err != nil { + os.Exit(1) + } +} +``` + +### Handler + +Cosmos SDK v0.46 no longer needs a `NewHandler` function that was used to handle messages and call appropriate keeper +methods based on message types. Feel free to remove `x/{moduleName}/handler.go` file. + +Since there is no `NewHandler` now, modify the deprecated `Route` function to return `sdk.Route{}`: + +```go +// x/{moduleName}/module.go + +package module_name + +// ... + +func (am AppModule) Route() sdk.Route { return sdk.Route{} } +``` diff --git a/docs/versioned_docs/version-v0.27/06-migration/v0.25.0.md b/docs/versioned_docs/version-v0.27/06-migration/v0.25.0.md new file mode 100644 index 0000000..66ec75c --- /dev/null +++ b/docs/versioned_docs/version-v0.27/06-migration/v0.25.0.md @@ -0,0 +1,1187 @@ +--- +sidebar_position: 994 +title: v0.25.0 +description: For chains that were scaffolded with Ignite CLI versions lower than v0.25.0. changes are required to use Ignite CLI v0.25.0. +--- + +## Protobuf directory migration + +`v0.25.0` changes the location of scaffolded `.proto` files. Previously, `.proto` files were located in `./proto/{moduleName}/`, +where `moduleName` is the same name of the Cosmos SDK module found in `./x/{moduleName}/`. This new version of `ignite` +modifies the scaffolded protobuf files so that they are now generated in `./proto/{appName}/{moduleName}`. + +The only change that is needed to be made is to create an `{appName}` folder in the `proto` directory, and then place the +sub-directories within it. An example below demonstrates this change: + +### Previous Directory Structure + +This example shows a chain that was generated using `ignite` with `v0.24.0` using the following command: + +```bash +ignite s chain github.com/cosmos/planet --no-module +ignite s module mars +``` + +```bash +├── app +├── cmd +├── docs +├── proto +│ ├── mars +├── x +│ ├── mars +├── README.md +├── config.yml +├── go.mod +├── go.sum +└── .gitignore +``` + +### `v0.25.0` Directory Structure + +This example shows a chain that was generated using `ignite` with `v0.25.0` using the following command: + +```bash +ignite s chain github.com/cosmos/planet --no-module +ignite s module mars +``` + +```bash +├── app +├── cmd +├── docs +├── proto +│ ├── planet +│ │ ├── mars +├── x +│ ├── mars +├── README.md +├── config.yml +├── go.mod +├── go.sum +└── .gitignore +``` + +The only difference is the additional directory `planet` which is the name of the application. The name of the app can +be verified by checking the package in the `go.mod` file. In this example, the package is `github.com/cosmos/planet` +where `planet` is the app name. + + --- + +## Removing `cosmoscmd` + +`v0.25.0` removes the `cosmoscmd` package from scaffolded chains. This package provided utility for creating +commands and starting up their application. The `cosmoscmd` package is now deprecated, and it is suggested that chains +implement this functionality in their codebase so they can be more easily upgraded and customized. + +The main functionality of `cosmoscmd` will be moved to the `app` package of your chain. Some imports in these +examples contain the sample string, `{ModulePath}`. Replace this string with the Go module path of your blockchain. +For example, if your blockchain module path is `github.com/planet/mars`, `{ModulePath}/app/params` would be become +`github.com/planet/mars/app/params`. + +#### Migration in `app` package + +To begin, create a new file, `./app/params/encoding.go`, containing the following code: + +```go +package params + +import ( + "github.com/cosmos/cosmos-sdk/client" + "github.com/cosmos/cosmos-sdk/codec" + "github.com/cosmos/cosmos-sdk/codec/types" +) + +// EncodingConfig specifies the concrete encoding types to use for a given app. +// This is provided for compatibility between protobuf and amino implementations. +type EncodingConfig struct { + InterfaceRegistry types.InterfaceRegistry + Marshaler codec.Codec + TxConfig client.TxConfig + Amino *codec.LegacyAmino +} +``` + +Next, create a new file, `./app/encoding.go`, containing the following code: + +```go +package app + +import ( + "github.com/cosmos/cosmos-sdk/codec" + "github.com/cosmos/cosmos-sdk/codec/types" + "github.com/cosmos/cosmos-sdk/std" + "github.com/cosmos/cosmos-sdk/x/auth/tx" + + "{ModulePath}/app/params" +) + +// makeEncodingConfig creates an EncodingConfig for an amino based test configuration. +func makeEncodingConfig() params.EncodingConfig { + amino := codec.NewLegacyAmino() + interfaceRegistry := types.NewInterfaceRegistry() + marshaler := codec.NewProtoCodec(interfaceRegistry) + txCfg := tx.NewTxConfig(marshaler, tx.DefaultSignModes) + + return params.EncodingConfig{ + InterfaceRegistry: interfaceRegistry, + Marshaler: marshaler, + TxConfig: txCfg, + Amino: amino, + } +} + +// MakeEncodingConfig creates an EncodingConfig for testing +func MakeEncodingConfig() params.EncodingConfig { + encodingConfig := makeEncodingConfig() + std.RegisterLegacyAminoCodec(encodingConfig.Amino) + std.RegisterInterfaces(encodingConfig.InterfaceRegistry) + ModuleBasics.RegisterLegacyAminoCodec(encodingConfig.Amino) + ModuleBasics.RegisterInterfaces(encodingConfig.InterfaceRegistry) + return encodingConfig +} +``` + +Next, modify `./app/simulation_test.go` so that it looks like the following: + +```go +package app_test + +import ( + "os" + "testing" + "time" + + "github.com/cosmos/cosmos-sdk/simapp" + simulationtypes "github.com/cosmos/cosmos-sdk/types/simulation" + "github.com/cosmos/cosmos-sdk/x/simulation" + "github.com/stretchr/testify/require" + abci "github.com/tendermint/tendermint/abci/types" + tmproto "github.com/tendermint/tendermint/proto/tendermint/types" + tmtypes "github.com/tendermint/tendermint/types" + // remove-next-line + "github.com/ignite/cli/ignite/pkg/cosmoscmd" + + // highlight-next-line + "{ModulePath}/app" +) + +// remove-start +type SimApp interface { + cosmoscmd.App + GetBaseApp() *baseapp.BaseApp + AppCodec() codec.Codec + SimulationManager() *module.SimulationManager + ModuleAccountAddrs() map[string]bool + Name() string + LegacyAmino() *codec.LegacyAmino + BeginBlocker(ctx sdk.Context, req abci.RequestBeginBlock) + abci.ResponseBeginBlock + EndBlocker(ctx sdk.Context, req abci.RequestEndBlock) + abci.ResponseEndBlock + InitChainer(ctx sdk.Context, req abci.RequestInitChain) + abci.ResponseInitChain +} + +// remove-end + +// ... + +// BenchmarkSimulation run the chain simulation +// Running using starport command: +// `starport chain simulate -v --numBlocks 200 --blockSize 50` +// Running as go benchmark test: +// `go test -benchmem -run=^$ -bench ^BenchmarkSimulation ./app -NumBlocks=200 -BlockSize 50 -Commit=true -Verbose=true -Enabled=true` +func BenchmarkSimulation(b *testing.B) { + + // ... + + // remove-next-line + encoding := cosmoscmd.MakeEncodingConfig(app.ModuleBasics) + // highlight-next-line + encoding := app.MakeEncodingConfig() + + app := app.New( + logger, + db, + nil, + true, + map[int64]bool{}, + app.DefaultNodeHome, + 0, + encoding, + simapp.EmptyAppOptions{}, + ) + + // remove-start + simApp, ok := app.(SimApp) + require.True(b, ok, "can't use simapp") + // remove-end + + // Run randomized simulations + _, simParams, simErr := simulation.SimulateFromSeed( + b, + os.Stdout, + // highlight-next-line + app.BaseApp, + // highlight-next-line + simapp.AppStateFn(app.AppCodec(), app.SimulationManager()), + simulationtypes.RandomAccounts, + // highlight-next-line + simapp.SimulationOperations(app, app.AppCodec(), config), + // highlight-next-line + app.ModuleAccountAddrs(), + config, + // highlight-next-line + app.AppCodec(), + ) + + // export state and simParams before the simulation error is checked + // highlight-next-line + err = simapp.CheckExportSimulation(app, config, simParams) + require.NoError(b, err) + require.NoError(b, simErr) + + // ... +} +``` + +The main changes here are that the `SimApp` interface has been removed and is being replaced with `app`. + +The final modification in the `app` package is in `app/app.go`: + +```go +package app + +import ( + // ... + + // this line is used by starport scaffolding # stargate/app/moduleImport + + // remove-next-line + "github.com/ignite/cli/ignite/pkg/cosmoscmd" + + // highlight-start + appparams "{ModulePath}/app/params" + "{ModulePath}/docs" + // highlight-end +) + +// ... + +var ( + // remove-next-line + _ cosmoscmd.App = (*App)(nil) + _ servertypes.Application = (*App)(nil) + _ simapp.App = (*App)(nil) +) + +// ... + +// New returns a reference to an initialized blockchain app +func New( + logger log.Logger, + db dbm.DB, + traceStore io.Writer, + loadLatest bool, + skipUpgradeHeights map[int64]bool, + homePath string, + invCheckPeriod uint, + // highlight-next-line + encodingConfig appparams.EncodingConfig, + appOpts servertypes.AppOptions, + baseAppOptions ...func(*baseapp.BaseApp), + // highlight-next-line +) *App { + appCodec := encodingConfig.Marshaler + cdc := encodingConfig.Amino + interfaceRegistry := encodingConfig.InterfaceRegistry + + bApp := baseapp.NewBaseApp( + Name, + logger, + db, + encodingConfig.TxConfig.TxDecoder(), + baseAppOptions..., + ) + + // ... + +} + +// ... + +// Name returns the name of the App +func (app *App) Name() string { return app.BaseApp.Name() } + +// remove-start +// GetBaseApp returns the base app of the application +func (app App) GetBaseApp() *baseapp.BaseApp { return app.BaseApp } + +// remove-end + +// BeginBlocker application updates every begin block +func (app *App) BeginBlocker(ctx sdk.Context, req abci.RequestBeginBlock) abci.ResponseBeginBlock { + return app.mm.BeginBlock(ctx, req) +} + +// ... +``` + +Again, here we are removing the use of `cosmoscmd` and replacing it with `app`. + +#### Migration in `cmd` package + +Some imports in these +examples contain the sample string, `{binaryNamePrefix}d`. Replace this string with the binary name of your blockchain. +For example, if your blockchain module path is `github.com/planet/mars`, `./cmd/{binaryNamePrefix}d/cmd/` would be +become `./cmd/marsd/cmd/`. + +First, create the new file `./cmd/{binaryNamePrefix}d/cmd/config.go` with the following code: + +```go +package cmd + +import ( + sdk "github.com/cosmos/cosmos-sdk/types" + + "{ModulePath}/app" +) + +func initSDKConfig() { + // Set prefixes + accountPubKeyPrefix := app.AccountAddressPrefix + "pub" + validatorAddressPrefix := app.AccountAddressPrefix + "valoper" + validatorPubKeyPrefix := app.AccountAddressPrefix + "valoperpub" + consNodeAddressPrefix := app.AccountAddressPrefix + "valcons" + consNodePubKeyPrefix := app.AccountAddressPrefix + "valconspub" + + // Set and seal config + config := sdk.GetConfig() + config.SetBech32PrefixForAccount(app.AccountAddressPrefix, accountPubKeyPrefix) + config.SetBech32PrefixForValidator(validatorAddressPrefix, validatorPubKeyPrefix) + config.SetBech32PrefixForConsensusNode(consNodeAddressPrefix, consNodePubKeyPrefix) + config.Seal() +} +``` + +Next, create the new file `./cmd/{binaryNamePrefix}d/cmd/genaccounts.go` with the following code: + +```go +package cmd + +import ( + "bufio" + "encoding/json" + "errors" + "fmt" + + "github.com/cosmos/cosmos-sdk/client" + "github.com/cosmos/cosmos-sdk/client/flags" + "github.com/cosmos/cosmos-sdk/crypto/keyring" + "github.com/cosmos/cosmos-sdk/server" + sdk "github.com/cosmos/cosmos-sdk/types" + authtypes "github.com/cosmos/cosmos-sdk/x/auth/types" + authvesting "github.com/cosmos/cosmos-sdk/x/auth/vesting/types" + banktypes "github.com/cosmos/cosmos-sdk/x/bank/types" + "github.com/cosmos/cosmos-sdk/x/genutil" + genutiltypes "github.com/cosmos/cosmos-sdk/x/genutil/types" + "github.com/spf13/cobra" +) + +const ( + flagVestingStart = "vesting-start-time" + flagVestingEnd = "vesting-end-time" + flagVestingAmt = "vesting-amount" +) + +// AddGenesisAccountCmd returns add-genesis-account cobra Command. +func AddGenesisAccountCmd(defaultNodeHome string) *cobra.Command { + cmd := &cobra.Command{ + Use: "add-genesis-account [address_or_key_name] [coin][,[coin]]", + Short: "Add a genesis account to genesis.json", + Long: `Add a genesis account to genesis.json. The provided account must specify +the account address or key name and a list of initial coins. If a key name is given, +the address will be looked up in the local Keybase. The list of initial tokens must +contain valid denominations. Accounts may optionally be supplied with vesting parameters. +`, + Args: cobra.ExactArgs(2), + RunE: func(cmd *cobra.Command, args []string) error { + clientCtx := client.GetClientContextFromCmd(cmd) + cdc := clientCtx.Codec + + serverCtx := server.GetServerContextFromCmd(cmd) + config := serverCtx.Config + + config.SetRoot(clientCtx.HomeDir) + + coins, err := sdk.ParseCoinsNormalized(args[1]) + if err != nil { + return fmt.Errorf("failed to parse coins: %w", err) + } + + addr, err := sdk.AccAddressFromBech32(args[0]) + if err != nil { + inBuf := bufio.NewReader(cmd.InOrStdin()) + keyringBackend, err := cmd.Flags().GetString(flags.FlagKeyringBackend) + if err != nil { + return err + } + + // attempt to lookup address from Keybase if no address was provided + kb, err := keyring.New(sdk.KeyringServiceName(), keyringBackend, clientCtx.HomeDir, inBuf, cdc) + if err != nil { + return err + } + + info, err := kb.Key(args[0]) + if err != nil { + return fmt.Errorf("failed to get address from Keybase: %w", err) + } + + addr, err = info.GetAddress() + if err != nil { + return fmt.Errorf("failed to get address from Keybase: %w", err) + } + } + + vestingStart, err := cmd.Flags().GetInt64(flagVestingStart) + if err != nil { + return err + } + vestingEnd, err := cmd.Flags().GetInt64(flagVestingEnd) + if err != nil { + return err + } + vestingAmtStr, err := cmd.Flags().GetString(flagVestingAmt) + if err != nil { + return err + } + + vestingAmt, err := sdk.ParseCoinsNormalized(vestingAmtStr) + if err != nil { + return fmt.Errorf("failed to parse vesting amount: %w", err) + } + + // create concrete account type based on input parameters + var genAccount authtypes.GenesisAccount + + balances := banktypes.Balance{Address: addr.String(), Coins: coins.Sort()} + baseAccount := authtypes.NewBaseAccount(addr, nil, 0, 0) + + if !vestingAmt.IsZero() { + baseVestingAccount := authvesting.NewBaseVestingAccount(baseAccount, vestingAmt.Sort(), vestingEnd) + + if (balances.Coins.IsZero() && !baseVestingAccount.OriginalVesting.IsZero()) || + baseVestingAccount.OriginalVesting.IsAnyGT(balances.Coins) { + return errors.New("vesting amount cannot be greater than total amount") + } + + switch { + case vestingStart != 0 && vestingEnd != 0: + genAccount = authvesting.NewContinuousVestingAccountRaw(baseVestingAccount, vestingStart) + + case vestingEnd != 0: + genAccount = authvesting.NewDelayedVestingAccountRaw(baseVestingAccount) + + default: + return errors.New("invalid vesting parameters; must supply start and end time or end time") + } + } else { + genAccount = baseAccount + } + + if err := genAccount.Validate(); err != nil { + return fmt.Errorf("failed to validate new genesis account: %w", err) + } + + genFile := config.GenesisFile() + appState, genDoc, err := genutiltypes.GenesisStateFromGenFile(genFile) + if err != nil { + return fmt.Errorf("failed to unmarshal genesis state: %w", err) + } + + authGenState := authtypes.GetGenesisStateFromAppState(cdc, appState) + + accs, err := authtypes.UnpackAccounts(authGenState.Accounts) + if err != nil { + return fmt.Errorf("failed to get accounts from any: %w", err) + } + + if accs.Contains(addr) { + return fmt.Errorf("cannot add account at existing address %s", addr) + } + + // Add the new account to the set of genesis accounts and sanitize the + // accounts afterwards. + accs = append(accs, genAccount) + accs = authtypes.SanitizeGenesisAccounts(accs) + + genAccs, err := authtypes.PackAccounts(accs) + if err != nil { + return fmt.Errorf("failed to convert accounts into any's: %w", err) + } + authGenState.Accounts = genAccs + + authGenStateBz, err := cdc.MarshalJSON(&authGenState) + if err != nil { + return fmt.Errorf("failed to marshal auth genesis state: %w", err) + } + + appState[authtypes.ModuleName] = authGenStateBz + + bankGenState := banktypes.GetGenesisStateFromAppState(cdc, appState) + bankGenState.Balances = append(bankGenState.Balances, balances) + bankGenState.Balances = banktypes.SanitizeGenesisBalances(bankGenState.Balances) + + bankGenStateBz, err := cdc.MarshalJSON(bankGenState) + if err != nil { + return fmt.Errorf("failed to marshal bank genesis state: %w", err) + } + + appState[banktypes.ModuleName] = bankGenStateBz + + appStateJSON, err := json.Marshal(appState) + if err != nil { + return fmt.Errorf("failed to marshal application genesis state: %w", err) + } + + genDoc.AppState = appStateJSON + return genutil.ExportGenesisFile(genDoc, genFile) + }, + } + + cmd.Flags().String(flags.FlagKeyringBackend, flags.DefaultKeyringBackend, "Select keyring's backend (os|file|kwallet|pass|test)") + cmd.Flags().String(flags.FlagHome, defaultNodeHome, "The application home directory") + cmd.Flags().String(flagVestingAmt, "", "amount of coins for vesting accounts") + cmd.Flags().Int64(flagVestingStart, 0, "schedule start time (unix epoch) for vesting accounts") + cmd.Flags().Int64(flagVestingEnd, 0, "schedule end time (unix epoch) for vesting accounts") + flags.AddQueryFlagsToCmd(cmd) + + return cmd +} +``` + +This command allows one to generate new accounts: `appd add-genesis-account`. + +Next, create the new file `./cmd/{binaryNamePrefix}d/cmd/root.go` with the following code: + +```go +package cmd + +import ( + "errors" + "io" + "os" + "path/filepath" + "strings" + + "github.com/cosmos/cosmos-sdk/baseapp" + "github.com/cosmos/cosmos-sdk/client" + "github.com/cosmos/cosmos-sdk/client/config" + "github.com/cosmos/cosmos-sdk/client/debug" + "github.com/cosmos/cosmos-sdk/client/flags" + "github.com/cosmos/cosmos-sdk/client/keys" + "github.com/cosmos/cosmos-sdk/client/rpc" + "github.com/cosmos/cosmos-sdk/server" + serverconfig "github.com/cosmos/cosmos-sdk/server/config" + servertypes "github.com/cosmos/cosmos-sdk/server/types" + "github.com/cosmos/cosmos-sdk/snapshots" + snapshottypes "github.com/cosmos/cosmos-sdk/snapshots/types" + "github.com/cosmos/cosmos-sdk/store" + sdk "github.com/cosmos/cosmos-sdk/types" + authcmd "github.com/cosmos/cosmos-sdk/x/auth/client/cli" + "github.com/cosmos/cosmos-sdk/x/auth/types" + banktypes "github.com/cosmos/cosmos-sdk/x/bank/types" + "github.com/cosmos/cosmos-sdk/x/crisis" + genutilcli "github.com/cosmos/cosmos-sdk/x/genutil/client/cli" + "github.com/ignite/cli/ignite/services/network" + "github.com/spf13/cast" + "github.com/spf13/cobra" + "github.com/spf13/pflag" + tmcfg "github.com/tendermint/tendermint/config" + tmcli "github.com/tendermint/tendermint/libs/cli" + "github.com/tendermint/tendermint/libs/log" + dbm "github.com/tendermint/tm-db" + // this line is used by starport scaffolding # root/moduleImport + + "{ModulePath}/app" + appparams "{ModulePath}/app/params" +) + +// NewRootCmd creates a new root command for a Cosmos SDK application +func NewRootCmd() (*cobra.Command, appparams.EncodingConfig) { + encodingConfig := app.MakeEncodingConfig() + initClientCtx := client.Context{}. + WithCodec(encodingConfig.Marshaler). + WithInterfaceRegistry(encodingConfig.InterfaceRegistry). + WithTxConfig(encodingConfig.TxConfig). + WithLegacyAmino(encodingConfig.Amino). + WithInput(os.Stdin). + WithAccountRetriever(types.AccountRetriever{}). + WithHomeDir(app.DefaultNodeHome). + WithViper("") + + rootCmd := &cobra.Command{ + Use: app.Name + "d", + Short: "Stargate CosmosHub App", + PersistentPreRunE: func(cmd *cobra.Command, _ []string) error { + // set the default command outputs + cmd.SetOut(cmd.OutOrStdout()) + cmd.SetErr(cmd.ErrOrStderr()) + initClientCtx, err := client.ReadPersistentCommandFlags(initClientCtx, cmd.Flags()) + if err != nil { + return err + } + initClientCtx, err = config.ReadFromClientConfig(initClientCtx) + if err != nil { + return err + } + + if err := client.SetCmdClientContextHandler(initClientCtx, cmd); err != nil { + return err + } + + customAppTemplate, customAppConfig := initAppConfig() + customTMConfig := initTendermintConfig() + return server.InterceptConfigsPreRunHandler( + cmd, customAppTemplate, customAppConfig, customTMConfig, + ) + }, + } + + initRootCmd(rootCmd, encodingConfig) + overwriteFlagDefaults(rootCmd, map[string]string{ + flags.FlagChainID: strings.ReplaceAll(app.Name, "-", ""), + flags.FlagKeyringBackend: "test", + }) + + return rootCmd, encodingConfig +} + +// initTendermintConfig helps to override default Tendermint Config values. +// return tmcfg.DefaultConfig if no custom configuration is required for the application. +func initTendermintConfig() *tmcfg.Config { + cfg := tmcfg.DefaultConfig() + return cfg +} + +func initRootCmd( + rootCmd *cobra.Command, + encodingConfig appparams.EncodingConfig, +) { + // Set config + initSDKConfig() + + rootCmd.AddCommand( + genutilcli.InitCmd(app.ModuleBasics, app.DefaultNodeHome), + genutilcli.CollectGenTxsCmd(banktypes.GenesisBalancesIterator{}, app.DefaultNodeHome), + genutilcli.MigrateGenesisCmd(), + genutilcli.GenTxCmd( + app.ModuleBasics, + encodingConfig.TxConfig, + banktypes.GenesisBalancesIterator{}, + app.DefaultNodeHome, + ), + genutilcli.ValidateGenesisCmd(app.ModuleBasics), + AddGenesisAccountCmd(app.DefaultNodeHome), + tmcli.NewCompletionCmd(rootCmd, true), + debug.Cmd(), + config.Cmd(), + // this line is used by starport scaffolding # root/commands + ) + + a := appCreator{ + encodingConfig, + } + + // add server commands + server.AddCommands( + rootCmd, + app.DefaultNodeHome, + a.newApp, + a.appExport, + addModuleInitFlags, + ) + + // add keybase, auxiliary RPC, query, and tx child commands + rootCmd.AddCommand( + rpc.StatusCommand(), + queryCommand(), + txCommand(), + keys.Commands(app.DefaultNodeHome), + ) +} + +// queryCommand returns the sub-command to send queries to the app +func queryCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "query", + Aliases: []string{"q"}, + Short: "Querying subcommands", + DisableFlagParsing: true, + SuggestionsMinimumDistance: 2, + RunE: client.ValidateCmd, + } + + cmd.AddCommand( + authcmd.GetAccountCmd(), + rpc.ValidatorCommand(), + rpc.BlockCommand(), + authcmd.QueryTxsByEventsCmd(), + authcmd.QueryTxCmd(), + ) + + app.ModuleBasics.AddQueryCommands(cmd) + cmd.PersistentFlags().String(flags.FlagChainID, "", "The network chain ID") + + return cmd +} + +// txCommand returns the sub-command to send transactions to the app +func txCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "tx", + Short: "Transactions subcommands", + DisableFlagParsing: true, + SuggestionsMinimumDistance: 2, + RunE: client.ValidateCmd, + } + + cmd.AddCommand( + authcmd.GetSignCommand(), + authcmd.GetSignBatchCommand(), + authcmd.GetMultiSignCommand(), + authcmd.GetValidateSignaturesCommand(), + flags.LineBreak, + authcmd.GetBroadcastCommand(), + authcmd.GetEncodeCommand(), + authcmd.GetDecodeCommand(), + ) + + app.ModuleBasics.AddTxCommands(cmd) + cmd.PersistentFlags().String(flags.FlagChainID, "", "The network chain ID") + + return cmd +} + +func addModuleInitFlags(startCmd *cobra.Command) { + crisis.AddModuleInitFlags(startCmd) + // this line is used by starport scaffolding # root/arguments +} + +func overwriteFlagDefaults(c *cobra.Command, defaults map[string]string) { + set := func(s *pflag.FlagSet, key, val string) { + if f := s.Lookup(key); f != nil { + f.DefValue = val + f.Value.Set(val) + } + } + for key, val := range defaults { + set(c.Flags(), key, val) + set(c.PersistentFlags(), key, val) + } + for _, c := range c.Commands() { + overwriteFlagDefaults(c, defaults) + } +} + +type appCreator struct { + encodingConfig appparams.EncodingConfig +} + +// newApp creates a new Cosmos SDK app +func (a appCreator) newApp( + logger log.Logger, + db dbm.DB, + traceStore io.Writer, + appOpts servertypes.AppOptions, +) servertypes.Application { + var cache sdk.MultiStorePersistentCache + + if cast.ToBool(appOpts.Get(server.FlagInterBlockCache)) { + cache = store.NewCommitKVStoreCacheManager() + } + + skipUpgradeHeights := make(map[int64]bool) + for _, h := range cast.ToIntSlice(appOpts.Get(server.FlagUnsafeSkipUpgrades)) { + skipUpgradeHeights[int64(h)] = true + } + + pruningOpts, err := server.GetPruningOptionsFromFlags(appOpts) + if err != nil { + panic(err) + } + + snapshotDir := filepath.Join(cast.ToString(appOpts.Get(flags.FlagHome)), "data", "snapshots") + snapshotDB, err := dbm.NewDB("metadata", dbm.GoLevelDBBackend, snapshotDir) + if err != nil { + panic(err) + } + snapshotStore, err := snapshots.NewStore(snapshotDB, snapshotDir) + if err != nil { + panic(err) + } + + snapshotOptions := snapshottypes.NewSnapshotOptions( + cast.ToUint64(appOpts.Get(server.FlagStateSyncSnapshotInterval)), + cast.ToUint32(appOpts.Get(server.FlagStateSyncSnapshotKeepRecent)), + ) + + return app.New( + logger, + db, + traceStore, + true, + skipUpgradeHeights, + cast.ToString(appOpts.Get(flags.FlagHome)), + cast.ToUint(appOpts.Get(server.FlagInvCheckPeriod)), + a.encodingConfig, + appOpts, + baseapp.SetPruning(pruningOpts), + baseapp.SetMinGasPrices(cast.ToString(appOpts.Get(server.FlagMinGasPrices))), + baseapp.SetMinRetainBlocks(cast.ToUint64(appOpts.Get(server.FlagMinRetainBlocks))), + baseapp.SetHaltHeight(cast.ToUint64(appOpts.Get(server.FlagHaltHeight))), + baseapp.SetHaltTime(cast.ToUint64(appOpts.Get(server.FlagHaltTime))), + baseapp.SetInterBlockCache(cache), + baseapp.SetTrace(cast.ToBool(appOpts.Get(server.FlagTrace))), + baseapp.SetIndexEvents(cast.ToStringSlice(appOpts.Get(server.FlagIndexEvents))), + baseapp.SetSnapshot(snapshotStore, snapshotOptions), + ) +} + +// appExport creates a new simapp (optionally at a given height) +func (a appCreator) appExport( + logger log.Logger, + db dbm.DB, + traceStore io.Writer, + height int64, + forZeroHeight bool, + jailAllowedAddrs []string, + appOpts servertypes.AppOptions, +) (servertypes.ExportedApp, error) { + homePath, ok := appOpts.Get(flags.FlagHome).(string) + if !ok || homePath == "" { + return servertypes.ExportedApp{}, errors.New("application home not set") + } + + app := app.New( + logger, + db, + traceStore, + height == -1, // -1: no height provided + map[int64]bool{}, + homePath, + uint(1), + a.encodingConfig, + appOpts, + ) + + if height != -1 { + if err := app.LoadHeight(height); err != nil { + return servertypes.ExportedApp{}, err + } + } + + return app.ExportAppStateAndValidators(forZeroHeight, jailAllowedAddrs) +} + +// initAppConfig helps to override default appConfig template and configs. +// return "", nil if no custom configuration is required for the application. +func initAppConfig() (string, interface{}) { + // The following code snippet is just for reference. + + // WASMConfig defines configuration for the wasm module. + type WASMConfig struct { + // This is the maximum sdk gas (wasm and storage) that we allow for any x/wasm "smart" queries + QueryGasLimit uint64 `mapstructure:"query_gas_limit"` + + // Address defines the gRPC-web server to listen on + LruSize uint64 `mapstructure:"lru_size"` + } + + type CustomAppConfig struct { + serverconfig.Config + + WASM WASMConfig `mapstructure:"wasm"` + } + + // Optionally allow the chain developer to overwrite the SDK's default + // server config. + srvCfg := serverconfig.DefaultConfig() + // The SDK's default minimum gas price is set to "" (empty value) inside + // app.toml. If left empty by validators, the node will halt on startup. + // However, the chain developer can set a default app.toml value for their + // validators here. + // + // In summary: + // - if you leave srvCfg.MinGasPrices = "", all validators MUST tweak their + // own app.toml config, + // - if you set srvCfg.MinGasPrices non-empty, validators CAN tweak their + // own app.toml to override, or use this default value. + // + // In simapp, we set the min gas prices to 0. + srvCfg.MinGasPrices = "0stake" + + customAppConfig := CustomAppConfig{ + Config: *srvCfg, + WASM: WASMConfig{ + LruSize: 1, + QueryGasLimit: 300000, + }, + } + + customAppTemplate := serverconfig.DefaultConfigTemplate + ` +[wasm] +# This is the maximum sdk gas (wasm and storage) that we allow for any x/wasm "smart" queries +query_gas_limit = 300000 +# This is the number of wasm vm instances we keep cached in memory for speed-up +# Warning: this is currently unstable and may lead to crashes, best to keep for 0 unless testing locally +lru_size = 0` + + return customAppTemplate, customAppConfig +} +``` + +Finally, modify `./cmd/{binaryNamePrefix}d/main.go` to include the new changes: + +```go +package main + +import ( + "os" + + "github.com/cosmos/cosmos-sdk/server" + svrcmd "github.com/cosmos/cosmos-sdk/server/cmd" + // remove-next-line + "github.com/ignite/cli/ignite/pkg/cosmoscmd" + + "{ModulePath}/app" + "{ModulePath}/cmd/{BinaryNamePrefix}d/cmd" +) + +func main() { + // highlight-start + rootCmd, _ := cmd.NewRootCmd() + if err := svrcmd.Execute(rootCmd, "", app.DefaultNodeHome); err != nil { + switch e := err.(type) { + case server.ErrorCode: + os.Exit(e.Code) + + default: + os.Exit(1) + } + } + // highlight-end +} +``` + +#### Migration in `testutil` package + +Modify `./testutil/network/network.go` to include the new changes: + + +```go +package network + +import ( + "fmt" + "testing" + "time" + + "github.com/cosmos/cosmos-sdk/baseapp" + "github.com/cosmos/cosmos-sdk/crypto/hd" + "github.com/cosmos/cosmos-sdk/crypto/keyring" + pruningtypes "github.com/cosmos/cosmos-sdk/pruning/types" + servertypes "github.com/cosmos/cosmos-sdk/server/types" + "github.com/cosmos/cosmos-sdk/simapp" + "github.com/cosmos/cosmos-sdk/testutil/network" + sdk "github.com/cosmos/cosmos-sdk/types" + authtypes "github.com/cosmos/cosmos-sdk/x/auth/types" + "github.com/stretchr/testify/require" + tmrand "github.com/tendermint/tendermint/libs/rand" + tmdb "github.com/tendermint/tm-db" + + // highlight-next-line + "{ModulePath}/app" + + // remove-next-line + "github.com/ignite/cli/ignite/pkg/cosmoscmd" +) + +// ... + +// DefaultConfig will initialize config for the network with custom application, +// genesis and single validator. All other parameters are inherited from cosmos-sdk/testutil/network.DefaultConfig +func DefaultConfig() network.Config { + // highlight-next-line + encoding := app.MakeEncodingConfig() + // remove-next-line + encoding := cosmoscmd.MakeEncodingConfig(app.ModuleBasics) + return network.Config{ + Codec: encoding.Marshaler, + TxConfig: encoding.TxConfig, + LegacyAmino: encoding.Amino, + InterfaceRegistry: encoding.InterfaceRegistry, + AccountRetriever: authtypes.AccountRetriever{}, + AppConstructor: func(val network.Validator) servertypes.Application { + return app.New( + val.Ctx.Logger, tmdb.NewMemDB(), nil, true, map[int64]bool{}, val.Ctx.Config.RootDir, 0, + encoding, + simapp.EmptyAppOptions{}, + baseapp.SetPruning(pruningtypes.NewPruningOptionsFromString(val.AppConfig.Pruning)), + baseapp.SetMinGasPrices(val.AppConfig.MinGasPrices), + ) + }, + GenesisState: app.ModuleBasics.DefaultGenesis(encoding.Marshaler), + TimeoutCommit: 2 * time.Second, + ChainID: "chain-" + tmrand.NewRand().Str(6), + NumValidators: 1, + BondDenom: sdk.DefaultBondDenom, + MinGasPrices: fmt.Sprintf("0.000006%s", sdk.DefaultBondDenom), + AccountTokens: sdk.TokensFromConsensusPower(1000, sdk.DefaultPowerReduction), + StakingTokens: sdk.TokensFromConsensusPower(500, sdk.DefaultPowerReduction), + BondedTokens: sdk.TokensFromConsensusPower(100, sdk.DefaultPowerReduction), + PruningStrategy: pruningtypes.PruningOptionNothing, + CleanupDir: true, + SigningAlgo: string(hd.Secp256k1Type), + KeyringOptions: []keyring.Option{}, + } +} +``` + + --- + +## Fix ICA controller keeper wiring + +Related issue: https://github.com/ignite/cli/issues/2867 + +Apply the following changes to `app/app.go` file : + +```go +package app + +import ( + + // highlight-start + icacontrollerkeeper "github.com/cosmos/ibc-go/v5/modules/apps/27-interchain-accounts/controller/keeper" + icacontrollertypes "github.com/cosmos/ibc-go/v5/modules/apps/27-interchain-accounts/controller/types" + // highlight-end + // ... +) + +// New returns a reference to an initialized blockchain app +func New( + logger log.Logger, + db dbm.DB, + traceStore io.Writer, + loadLatest bool, + skipUpgradeHeights map[int64]bool, + homePath string, + invCheckPeriod uint, + encodingConfig appparams.EncodingConfig, + appOpts servertypes.AppOptions, + baseAppOptions ...func(*baseapp.BaseApp), +) *App { + + // ... + + keys := sdk.NewKVStoreKeys( + authtypes.StoreKey, authz.ModuleName, banktypes.StoreKey, + stakingtypes.StoreKey, + minttypes.StoreKey, distrtypes.StoreKey, slashingtypes.StoreKey, + govtypes.StoreKey, + paramstypes.StoreKey, ibchost.StoreKey, upgradetypes.StoreKey, + feegrant.StoreKey, evidencetypes.StoreKey, + ibctransfertypes.StoreKey, icahosttypes.StoreKey, + capabilitytypes.StoreKey, group.StoreKey, + // highlight-next-line + icacontrollertypes.StoreKey, + yourchainmoduletypes.StoreKey, + // this line is used by starport scaffolding # stargate/app/storeKey + ) + + // ... + + // remove-next-line + icaModule := ica.NewAppModule(nil, &app.ICAHostKeeper) + // highlight-start + icaControllerKeeper := icacontrollerkeeper.NewKeeper( + appCodec, keys[icacontrollertypes.StoreKey], + app.GetSubspace(icacontrollertypes.SubModuleName), + app.IBCKeeper.ChannelKeeper, // may be replaced with middleware such as ics29 fee + app.IBCKeeper.ChannelKeeper, &app.IBCKeeper.PortKeeper, + scopedICAControllerKeeper, app.MsgServiceRouter(), + ) + icaModule := ica.NewAppModule(&icaControllerKeeper, &app.ICAHostKeeper) + // highlight-end + icaHostIBCModule := icahost.NewIBCModule(app.ICAHostKeeper) + + // ... +} + +// ... + +// initParamsKeeper init params keeper and its subspaces +func initParamsKeeper(appCodec codec.BinaryCodec, legacyAmino *codec.LegacyAmino, key, tkey storetypes.StoreKey) paramskeeper.Keeper { + paramsKeeper := paramskeeper.NewKeeper(appCodec, legacyAmino, key, tkey) + + paramsKeeper.Subspace(authtypes.ModuleName) + paramsKeeper.Subspace(banktypes.ModuleName) + paramsKeeper.Subspace(stakingtypes.ModuleName) + paramsKeeper.Subspace(minttypes.ModuleName) + paramsKeeper.Subspace(distrtypes.ModuleName) + paramsKeeper.Subspace(slashingtypes.ModuleName) + paramsKeeper.Subspace(govtypes.ModuleName).WithKeyTable(govv1.ParamKeyTable()) + paramsKeeper.Subspace(crisistypes.ModuleName) + paramsKeeper.Subspace(ibctransfertypes.ModuleName) + paramsKeeper.Subspace(ibchost.ModuleName) + // highlight-next-line + paramsKeeper.Subspace(icacontrollertypes.SubModuleName) + paramsKeeper.Subspace(icahosttypes.SubModuleName) + paramsKeeper.Subspace(mychainmoduletypes.ModuleName) + // this line is used by starport scaffolding # stargate/app/paramSubspace + + return paramsKeeper +} +``` + + --- + +## Fix capability keeper not sealed + +Related issue: https://github.com/ignite/cli/issues/1921 + +Apply the following change to `app/app.go` file : + +```go +package app + +// New returns a reference to an initialized blockchain app +func New( + logger log.Logger, + db dbm.DB, + traceStore io.Writer, + loadLatest bool, + skipUpgradeHeights map[int64]bool, + homePath string, + invCheckPeriod uint, + encodingConfig appparams.EncodingConfig, + appOpts servertypes.AppOptions, + baseAppOptions ...func(*baseapp.BaseApp), +) *App { + + // ... + + // this line is used by starport scaffolding # stargate/app/keeperDefinition + + // highlight-start + // Sealing prevents other modules from creating scoped sub-keepers + app.CapabilityKeeper.Seal() + // highlight-end + + // Create static IBC router, add transfer route, then set and seal it + + // ... +} +``` diff --git a/docs/versioned_docs/version-v0.27/06-migration/v0.25.1.md b/docs/versioned_docs/version-v0.27/06-migration/v0.25.1.md new file mode 100644 index 0000000..f3d1cc2 --- /dev/null +++ b/docs/versioned_docs/version-v0.27/06-migration/v0.25.1.md @@ -0,0 +1,67 @@ +--- +sidebar_position: 993 +title: v0.25.1 +description: For chains that were scaffolded with Ignite CLI versions lower than v0.25.1. changes are required to use Ignite CLI v0.25.1. +--- + +## Drabonberry fix + +`v0.25.1` contains the Dragonberry fix, update your `go.mod` as : + +```sh +require ( + // remove-next-line + github.com/ignite/cli v0.24.0 + // highlight-next-line + github.com/ignite/cli v0.25.1 +) + +// highlight-next-line +replace github.com/confio/ics23/go => github.com/cosmos/cosmos-sdk/ics23/go v0.8.0 +``` + +Then run: + +``` +$ go mod tidy +``` + +As a result, you should see `cosmos-sdk` and `ibc-go` upgraded as well. + +Finally, apply the following change to `app/app.go`: + +```go +package app + +// New returns a reference to an initialized blockchain app +func New( + logger log.Logger, + db dbm.DB, + traceStore io.Writer, + loadLatest bool, + skipUpgradeHeights map[int64]bool, + homePath string, + invCheckPeriod uint, + encodingConfig appparams.EncodingConfig, + appOpts servertypes.AppOptions, + baseAppOptions ...func(*baseapp.BaseApp), +) *App { + + // ... + + app.ICAHostKeeper = icahostkeeper.NewKeeper( + appCodec, keys[icahosttypes.StoreKey], + app.GetSubspace(icahosttypes.SubModuleName), + app.IBCKeeper.ChannelKeeper, + // highlight-next-line + app.IBCKeeper.ChannelKeeper, + &app.IBCKeeper.PortKeeper, + app.AccountKeeper, + scopedICAHostKeeper, + app.MsgServiceRouter(), + ) + + // ... + +} +``` diff --git a/docs/versioned_docs/version-v0.27/06-migration/v0.26.0.md b/docs/versioned_docs/version-v0.27/06-migration/v0.26.0.md new file mode 100644 index 0000000..0106a14 --- /dev/null +++ b/docs/versioned_docs/version-v0.27/06-migration/v0.26.0.md @@ -0,0 +1,262 @@ +--- +sidebar_position: 992 +title: v0.26.0 +description: For chains that were scaffolded with Ignite CLI versions lower than v0.26.0. changes are required to use Ignite CLI v0.26.0. +--- + +Ignite CLI `v0.26.0` is fully compatible with chains that are compatible with `v0.25.1`. Please follow the existing +migration guides if your chain is not upgraded to `v0.25.1` support. + +## Go Version + +Chains that are newly scaffolded with Ignite CLI `v0.26.0` now require `go 1.19` in their `go.mod` files. It is +recommended that chains scaffolded with an older version of Ignite CLI also bump their required `go` version and update +their tooling to the latest version. + +## ibc-go v6 + +Chains that are newly scaffolded with Ignite CLI `v0.26.0` now use `ibc-go/v6` for ibc functionality. It is not +necessary, but recommended to upgrade to the newest version of `ibc-go`. Most migrations can be done by following the +`ibc-go` [migration guide](https://github.com/cosmos/ibc-go/blob/main/docs/docs/05-migrations/07-v5-to-v6.md), but there are some +specific changes that will need to be followed for Ignite scaffolded chains. + +### Removing `cosmosibckeeper` + +Ignite CLI `v0.26.0` has deprecated `pkg/cosmosibckeeper`. This package contained interfaces for ibc-related keepers. +Newly scaffolded chains now include the interface files in their `./x/{moduleName}/types` directory in a new `expected_ibc_keeper.go` +file. To migrate, create the following file for each module: + +```go title="x/{moduleName}/types/expected_ibc_keeper.go" +package types + +import ( + sdk "github.com/cosmos/cosmos-sdk/types" + capabilitytypes "github.com/cosmos/cosmos-sdk/x/capability/types" + clienttypes "github.com/cosmos/ibc-go/v6/modules/core/02-client/types" + channeltypes "github.com/cosmos/ibc-go/v6/modules/core/04-channel/types" +) + +// ChannelKeeper defines the expected IBC channel keeper. +type ChannelKeeper interface { + GetChannel(ctx sdk.Context, portID, channelID string) (channeltypes.Channel, bool) + GetNextSequenceSend(ctx sdk.Context, portID, channelID string) (uint64, bool) + SendPacket( + ctx sdk.Context, + channelCap *capabilitytypes.Capability, + sourcePort string, + sourceChannel string, + timeoutHeight clienttypes.Height, + timeoutTimestamp uint64, + data []byte, + ) (uint64, error) + ChanCloseInit(ctx sdk.Context, portID, channelID string, chanCap *capabilitytypes.Capability) error +} + +// PortKeeper defines the expected IBC port keeper. +type PortKeeper interface { + BindPort(ctx sdk.Context, portID string) *capabilitytypes.Capability +} + +// ScopedKeeper defines the expected IBC scoped keeper. +type ScopedKeeper interface { + GetCapability(ctx sdk.Context, name string) (*capabilitytypes.Capability, bool) + AuthenticateCapability(ctx sdk.Context, cap *capabilitytypes.Capability, name string) bool + ClaimCapability(ctx sdk.Context, cap *capabilitytypes.Capability, name string) error +} +``` + +Next, make the following updates to each `x/{moduleName}/keeper/keeper.go` file for each ibc-enabled +module in your project: + +```go title="x/{moduleName}/keeper/keeper.go" +package keeper + +import ( + "fmt" + + // remove-start + "blogibc/x/testibc/types" + "github.com/cosmos/cosmos-sdk/codec" + storetypes "github.com/cosmos/cosmos-sdk/store/types" + sdk "github.com/cosmos/cosmos-sdk/types" + paramtypes "github.com/cosmos/cosmos-sdk/x/params/types" + "github.com/ignite/cli/ignite/pkg/cosmosibckeeper" + "github.com/tendermint/tendermint/libs/log" + // remove-end + // highlight-start + "github.com/cosmos/cosmos-sdk/codec" + storetypes "github.com/cosmos/cosmos-sdk/store/types" + sdk "github.com/cosmos/cosmos-sdk/types" + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + capabilitytypes "github.com/cosmos/cosmos-sdk/x/capability/types" + paramtypes "github.com/cosmos/cosmos-sdk/x/params/types" + channeltypes "github.com/cosmos/ibc-go/v6/modules/core/04-channel/types" + host "github.com/cosmos/ibc-go/v6/modules/core/24-host" + "github.com/cosmos/ibc-go/v6/modules/core/exported" + "github.com/tendermint/tendermint/libs/log" + + "{appName}/x/{moduleName}/types" + // highlight-end +) + +type ( + Keeper struct { + // remove-line-next + *cosmosibckeeper.Keeper + cdc codec.BinaryCodec + storeKey storetypes.StoreKey + memKey storetypes.StoreKey + paramstore paramtypes.Subspace + + // highlight-start + channelKeeper types.ChannelKeeper + portKeeper types.PortKeeper + scopedKeeper exported.ScopedKeeper + // highlight-end + } +) + +func NewKeeper( + cdc codec.BinaryCodec, + storeKey, + memKey storetypes.StoreKey, + ps paramtypes.Subspace, + // highlight-start + channelKeeper types.ChannelKeeper, + portKeeper types.PortKeeper, + scopedKeeper types.ScopedKeeper, + // highlight-end +) *Keeper { + // set KeyTable if it has not already been set + if !ps.HasKeyTable() { + ps = ps.WithKeyTable(types.ParamKeyTable()) + } + + return &Keeper{ + // remove-start + Keeper: cosmosibckeeper.NewKeeper( + types.PortKey, + storeKey, + channelKeeper, + portKeeper, + scopedKeeper, + ), + // remove-end + cdc: cdc, + storeKey: storeKey, + memKey: memKey, + paramstore: ps, + // highlight-start + channelKeeper: channelKeeper, + portKeeper: portKeeper, + scopedKeeper: scopedKeeper, + // highlight-end + } +} + +// highlight-start +// ---------------------------------------------------------------------------- +// IBC Keeper Logic +// ---------------------------------------------------------------------------- + +// ChanCloseInit defines a wrapper function for the channel Keeper's function. +func (k Keeper) ChanCloseInit(ctx sdk.Context, portID, channelID string) error { + capName := host.ChannelCapabilityPath(portID, channelID) + chanCap, ok := k.scopedKeeper.GetCapability(ctx, capName) + if !ok { + return sdkerrors.Wrapf(channeltypes.ErrChannelCapabilityNotFound, "could not retrieve channel capability at: %s", capName) + } + return k.channelKeeper.ChanCloseInit(ctx, portID, channelID, chanCap) +} + +// IsBound checks if the IBC app module is already bound to the desired port +func (k Keeper) IsBound(ctx sdk.Context, portID string) bool { + _, ok := k.scopedKeeper.GetCapability(ctx, host.PortPath(portID)) + return ok +} + +// BindPort defines a wrapper function for the port Keeper's function in +// order to expose it to module's InitGenesis function +func (k Keeper) BindPort(ctx sdk.Context, portID string) error { + cap := k.portKeeper.BindPort(ctx, portID) + return k.ClaimCapability(ctx, cap, host.PortPath(portID)) +} + +// GetPort returns the portID for the IBC app module. Used in ExportGenesis +func (k Keeper) GetPort(ctx sdk.Context) string { + store := ctx.KVStore(k.storeKey) + return string(store.Get(types.PortKey)) +} + +// SetPort sets the portID for the IBC app module. Used in InitGenesis +func (k Keeper) SetPort(ctx sdk.Context, portID string) { + store := ctx.KVStore(k.storeKey) + store.Set(types.PortKey, []byte(portID)) +} + +// AuthenticateCapability wraps the scopedKeeper's AuthenticateCapability function +func (k Keeper) AuthenticateCapability(ctx sdk.Context, cap *capabilitytypes.Capability, name string) bool { + return k.scopedKeeper.AuthenticateCapability(ctx, cap, name) +} + +// ClaimCapability allows the IBC app module to claim a capability that core IBC +// passes to it +func (k Keeper) ClaimCapability(ctx sdk.Context, cap *capabilitytypes.Capability, name string) error { + return k.scopedKeeper.ClaimCapability(ctx, cap, name) +} + +//highlight-end + +func (k Keeper) Logger(ctx sdk.Context) log.Logger { + return ctx.Logger().With("module", fmt.Sprintf("x/%s", types.ModuleName)) +} +``` + +### Remaining migration + +After all uses of `cosmosibckeeper` have been removed, you can follow any remaining steps in the`ibc-go`[migration guide](https://github.com/cosmos/ibc-go/blob/main/docs/docs/05-migrations/07-v5-to-v6.md). + +## Scaffolded Release Workflow + +The develop branch of the CLI has been deprecated. To continue using the release workflow that uses the CLI to +automatically build and release your chain's binaries, replace develop with main in the following lines: + +```yaml title=".github/workflows/release.yml" +... + +jobs: + might_release: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v2 + with: + fetch-depth: 0 + - name: Prepare Release Variables + id: vars + // highlight-next-line + uses: ignite/cli/actions/release/vars@main + - name: Issue Release Assets + // highlight-next-line + uses: ignite/cli/actions/cli@main + if: ${{ steps.vars.outputs.should_release == 'true' }} + with: + args: chain build --release --release.prefix ${{ steps.vars.outputs.tarball_prefix }} -t linux:amd64 -t darwin:amd64 -t darwin:arm64 + - name: Delete the "latest" Release + uses: dev-drprasad/delete-tag-and-release@v0.2.0 + if: ${{ steps.vars.outputs.is_release_type_latest == 'true' }} + with: + tag_name: ${{ steps.vars.outputs.tag_name }} + delete_release: true + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Publish the Release + uses: softprops/action-gh-release@v1 + if: ${{ steps.vars.outputs.should_release == 'true' }} + with: + tag_name: ${{ steps.vars.outputs.tag_name }} + files: release/* + prerelease: true + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} +``` diff --git a/docs/versioned_docs/version-v0.27/07-packages/_category_.json b/docs/versioned_docs/version-v0.27/07-packages/_category_.json new file mode 100644 index 0000000..6dbb883 --- /dev/null +++ b/docs/versioned_docs/version-v0.27/07-packages/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Packages", + "link": null +} \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.27/07-packages/cosmostxcollector.md b/docs/versioned_docs/version-v0.27/07-packages/cosmostxcollector.md new file mode 100644 index 0000000..df0fccd --- /dev/null +++ b/docs/versioned_docs/version-v0.27/07-packages/cosmostxcollector.md @@ -0,0 +1,200 @@ +--- +sidebar_position: 0 +title: cosmostxcollector +slug: /packages/cosmostxcollector +--- + +# cosmostxcollector + +The package implements support for collecting transactions and events from Cosmos blockchains +into a data backend and it also adds support for querying the collected data. + +## Transaction and event data collecting + +Transactions and events can be collected using the `cosmostxcollector.Collector` type. This +type uses a `cosmosclient.Client` instance to fetch the data from each block and a data backend +adapter to save the data. + +### Data backend adapters + +Data backend adapters are used to query and save the collected data into different types of data +backends and must implement the `cosmostxcollector.adapter.Adapter` interface. + +An adapter for PostgreSQL is already implemented in `cosmostxcollector.adapter.postgres.Adapter`. +This is the one used in the examples. + +### Example: Data collection + +The data collection example assumes that there is a PostgreSQL database running in the local +environment containing an empty database named "cosmos". + +The required database tables will be created automatically by the collector the first time it is run. + +When the application is run it will fetch all the transactions and events starting from one of the +recent blocks until the current block height and populate the database: + +```go +package main + +import ( + "context" + "log" + + "github.com/ignite/cli/ignite/pkg/clictx" + "github.com/ignite/cli/ignite/pkg/cosmosclient" + "github.com/ignite/cli/ignite/pkg/cosmostxcollector" + "github.com/ignite/cli/ignite/pkg/cosmostxcollector/adapter/postgres" +) + +const ( + // Name of a local PostgreSQL database + dbName = "cosmos" + + // Cosmos RPC address + rpcAddr = "https://rpc.cosmos.network:443" +) + +func collect(ctx context.Context, db postgres.Adapter) error { + // Make sure that the data backend schema is up to date + if err := db.Init(ctx); err != nil { + return err + } + + // Init the Cosmos client + client, err := cosmosclient.New(ctx, cosmosclient.WithNodeAddress(rpcAddr)) + if err != nil { + return err + } + + // Get the latest block height + latestHeight, err := client.LatestBlockHeight(ctx) + if err != nil { + return err + } + + // Collect transactions and events starting from a block height. + // The collector stops at the latest height available at the time of the call. + collector := cosmostxcollector.New(db, client) + if err := collector.Collect(ctx, latestHeight-50); err != nil { + return err + } + + return nil +} + +func main() { + ctx := clictx.From(context.Background()) + + // Init an adapter for a local PostgreSQL database running with the default values + params := map[string]string{"sslmode": "disable"} + db, err := postgres.NewAdapter(dbName, postgres.WithParams(params)) + if err != nil { + log.Fatal(err) + } + + if err := collect(ctx, db); err != nil { + log.Fatal(err) + } +} +``` + +## Queries + +Collected data can be queried through the data backend adapters using event queries or +cursor-based queries. + +Queries support sorting, paging and filtering by using different options during creation. +The cursor-based ones also support the selection of specific fields or properties and also +passing arguments in cases where the query is a function. + +By default no sorting, filtering nor paging is applied to the queries. + +### Event queries + +The event queries return events and their attributes as `[]cosmostxcollector.query.Event`. + +### Example: Query events + +The example reads transfer events from Cosmos' bank module and paginates the results. + +```go +import ( + "context" + + banktypes "github.com/cosmos/cosmos-sdk/x/bank/types" + "github.com/ignite/cli/ignite/pkg/cosmostxcollector/adapter/postgres" + "github.com/ignite/cli/ignite/pkg/cosmostxcollector/query" +) + +func queryBankTransferEvents(ctx context.Context, db postgres.Adapter) ([]query.Event, error) { + // Create an event query that returns events of type "transfer" + qry := query.NewEventQuery( + query.WithFilters( + // Filter transfer events from Cosmos' bank module + postgres.FilterByEventType(banktypes.EventTypeTransfer), + ), + query.WithPageSize(10), + query.AtPage(1), + ) + + // Execute the query + return db.QueryEvents(ctx, qry) +} +``` + +### Cursor-based queries + +This type of queries is meant to be used in contexts where the Event queries are not +useful. + +Cursor-based queries can query a single "entity" which can be a table, view or function +in relational databases or a collection or function in non relational data backends. + +The result of these types of queries is a cursor that implements the `cosmostxcollector.query.Cursor` +interface. + +### Example: Query events using cursors + +```go +import ( + "context" + + banktypes "github.com/cosmos/cosmos-sdk/x/bank/types" + "github.com/ignite/cli/ignite/pkg/cosmostxcollector/adapter/postgres" + "github.com/ignite/cli/ignite/pkg/cosmostxcollector/query" +) + +func queryBankTransferEventIDs(ctx context.Context, db postgres.Adapter) (ids []int64, err error) { + // Create a query that returns the IDs for events of type "transfer" + qry := query.New( + "event", + query.Fields("id"), + query.WithFilters( + // Filter transfer events from Cosmos' bank module + postgres.NewFilter("type", banktypes.EventTypeTransfer), + ), + query.WithPageSize(10), + query.AtPage(1), + query.SortByFields(query.SortOrderAsc, "id"), + ) + + // Execute the query + cr, err := db.Query(ctx, qry) + if err != nil { + return nil, err + } + + // Read the results + for cr.Next() { + var eventID int64 + + if err := cr.Scan(&eventID); err != nil { + return nil, err + } + + ids = append(ids, eventID) + } + + return ids, nil +} +``` diff --git a/docs/versioned_docs/version-v0.27/08-references/01-cli.md b/docs/versioned_docs/version-v0.27/08-references/01-cli.md new file mode 100644 index 0000000..6d8146c --- /dev/null +++ b/docs/versioned_docs/version-v0.27/08-references/01-cli.md @@ -0,0 +1,4146 @@ +--- +description: Ignite CLI docs. +--- + +# CLI commands + +Documentation for Ignite CLI. +## ignite + +Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain + +**Synopsis** + +Ignite CLI is a tool for creating sovereign blockchains built with Cosmos SDK, the world’s +most popular modular blockchain framework. Ignite CLI offers everything you need to scaffold, +test, build, and launch your blockchain. + +To get started, create a blockchain: + + ignite scaffold chain example + + +**Options** + +``` + -h, --help help for ignite +``` + +**SEE ALSO** + +* [ignite account](#ignite-account) - Create, delete, and show Ignite accounts +* [ignite chain](#ignite-chain) - Build, init and start a blockchain node +* [ignite completion](#ignite-completion) - Generate the autocompletion script for the specified shell +* [ignite docs](#ignite-docs) - Show Ignite CLI docs +* [ignite generate](#ignite-generate) - Generate clients, API docs from source code +* [ignite network](#ignite-network) - Launch a blockchain in production +* [ignite node](#ignite-node) - Make requests to a live blockchain node +* [ignite plugin](#ignite-plugin) - Handle plugins +* [ignite relayer](#ignite-relayer) - Connect blockchains with an IBC relayer +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more +* [ignite tools](#ignite-tools) - Tools for advanced users +* [ignite version](#ignite-version) - Print the current build information + + +## ignite account + +Create, delete, and show Ignite accounts + +**Synopsis** + +Commands for managing Ignite accounts. An Ignite account is a private/public +keypair stored in a keyring. Currently Ignite accounts are used when interacting +with Ignite relayer commands and when using "ignite network" commands. + +Note: Ignite account commands are not for managing your chain's keys and accounts. Use +you chain's binary to manage accounts from "config.yml". For example, if your +blockchain is called "mychain", use "mychaind keys" to manage keys for the +chain. + + +**Options** + +``` + -h, --help help for account + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain +* [ignite account create](#ignite-account-create) - Create a new account +* [ignite account delete](#ignite-account-delete) - Delete an account by name +* [ignite account export](#ignite-account-export) - Export an account as a private key +* [ignite account import](#ignite-account-import) - Import an account by using a mnemonic or a private key +* [ignite account list](#ignite-account-list) - Show a list of all accounts +* [ignite account show](#ignite-account-show) - Show detailed information about a particular account + + +## ignite account create + +Create a new account + +``` +ignite account create [name] [flags] +``` + +**Options** + +``` + -h, --help help for create +``` + +**Options inherited from parent commands** + +``` + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**SEE ALSO** + +* [ignite account](#ignite-account) - Create, delete, and show Ignite accounts + + +## ignite account delete + +Delete an account by name + +``` +ignite account delete [name] [flags] +``` + +**Options** + +``` + -h, --help help for delete +``` + +**Options inherited from parent commands** + +``` + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**SEE ALSO** + +* [ignite account](#ignite-account) - Create, delete, and show Ignite accounts + + +## ignite account export + +Export an account as a private key + +``` +ignite account export [name] [flags] +``` + +**Options** + +``` + -h, --help help for export + --non-interactive do not enter into interactive mode + --passphrase string passphrase to encrypt the exported key + --path string path to export private key. default: ./key_[name] +``` + +**Options inherited from parent commands** + +``` + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**SEE ALSO** + +* [ignite account](#ignite-account) - Create, delete, and show Ignite accounts + + +## ignite account import + +Import an account by using a mnemonic or a private key + +``` +ignite account import [name] [flags] +``` + +**Options** + +``` + -h, --help help for import + --non-interactive do not enter into interactive mode + --passphrase string passphrase to decrypt the imported key (ignored when secret is a mnemonic) + --secret string Your mnemonic or path to your private key (use interactive mode instead to securely pass your mnemonic) +``` + +**Options inherited from parent commands** + +``` + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**SEE ALSO** + +* [ignite account](#ignite-account) - Create, delete, and show Ignite accounts + + +## ignite account list + +Show a list of all accounts + +``` +ignite account list [flags] +``` + +**Options** + +``` + --address-prefix string account address prefix (default "cosmos") + -h, --help help for list +``` + +**Options inherited from parent commands** + +``` + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**SEE ALSO** + +* [ignite account](#ignite-account) - Create, delete, and show Ignite accounts + + +## ignite account show + +Show detailed information about a particular account + +``` +ignite account show [name] [flags] +``` + +**Options** + +``` + --address-prefix string account address prefix (default "cosmos") + -h, --help help for show +``` + +**Options inherited from parent commands** + +``` + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**SEE ALSO** + +* [ignite account](#ignite-account) - Create, delete, and show Ignite accounts + + +## ignite chain + +Build, init and start a blockchain node + +**Synopsis** + +Commands in this namespace let you to build, initialize, and start your +blockchain node locally for development purposes. + +To run these commands you should be inside the project's directory so that +Ignite can find the source code. To ensure that you are, run "ls", you should +see the following files in the output: "go.mod", "x", "proto", "app", etc. + +By default the "build" command will identify the "main" package of the project, +install dependencies if necessary, set build flags, compile the project into a +binary and install the binary. The "build" command is useful if you just want +the compiled binary, for example, to initialize and start the chain manually. It +can also be used to release your chain's binaries automatically as part of +continuous integration workflow. + +The "init" command will build the chain's binary and use it to initialize a +local validator node. By default the validator node will be initialized in your +$HOME directory in a hidden directory that matches the name of your project. +This directory is called a data directory and contains a chain's genesis file +and a validator key. This command is useful if you want to quickly build and +initialize the data directory and use the chain's binary to manually start the +blockchain. The "init" command is meant only for development purposes, not +production. + +The "serve" command builds, initializes, and starts your blockchain locally with +a single validator node for development purposes. "serve" also watches the +source code directory for file changes and intelligently +re-builds/initializes/starts the chain, essentially providing "code-reloading". +The "serve" command is meant only for development purposes, not production. + +To distinguish between production and development consider the following. + +In production, blockchains often run the same software on many validator nodes +that are run by different people and entities. To launch a blockchain in +production, the validator entities coordinate the launch process to start their +nodes simultaneously. + +During development, a blockchain can be started locally on a single validator +node. This convenient process lets you restart a chain quickly and iterate +faster. Starting a chain on a single node in development is similar to starting +a traditional web application on a local server. + +The "faucet" command lets you send tokens to an address from the "faucet" +account defined in "config.yml". Alternatively, you can use the chain's binary +to send token from any other account that exists on chain. + +The "simulate" command helps you start a simulation testing process for your +chain. + + +**Options** + +``` + -c, --config string path to Ignite config file (default: ./config.yml) + -h, --help help for chain + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain +* [ignite chain build](#ignite-chain-build) - Build a node binary +* [ignite chain debug](#ignite-chain-debug) - Launch a debugger for a blockchain app +* [ignite chain faucet](#ignite-chain-faucet) - Send coins to an account +* [ignite chain init](#ignite-chain-init) - Initialize your chain +* [ignite chain serve](#ignite-chain-serve) - Start a blockchain node in development +* [ignite chain simulate](#ignite-chain-simulate) - Run simulation testing for the blockchain + + +## ignite chain build + +Build a node binary + +**Synopsis** + + +The build command compiles the source code of the project into a binary and +installs the binary in the $(go env GOPATH)/bin directory. + +You can customize the output directory for the binary using a flag: + + ignite chain build --output dist + +To compile the binary Ignite first compiles protocol buffer (proto) files into +Go source code. Proto files contain required type and services definitions. If +you're using another program to compile proto files, you can use a flag to tell +Ignite to skip the proto compilation step: + + ignite chain build --skip-proto + +Afterwards, Ignite install dependencies specified in the go.mod file. By default +Ignite doesn't check that dependencies of the main module stored in the module +cache have not been modified since they were downloaded. To enforce dependency +checking (essentially, running "go mod verify") use a flag: + + ignite chain build --check-dependencies + +Next, Ignite identifies the "main" package of the project. By default the "main" +package is located in "cmd/{app}d" directory, where "{app}" is the name of the +scaffolded project and "d" stands for daemon. If your project contains more +than one "main" package, specify the path to the one that Ignite should compile +in config.yml: + + build: + main: custom/path/to/main + +By default the binary name will match the top-level module name (specified in +go.mod) with a suffix "d". This can be customized in config.yml: + + build: + binary: mychaind + +You can also specify custom linker flags: + + build: + ldflags: + - "-X main.Version=development" + - "-X main.Date=01/05/2022T19:54" + +To build binaries for a release, use the --release flag. The binaries for one or +more specified release targets are built in a "release/" directory in the +project's source directory. Specify the release targets with GOOS:GOARCH build +tags. If the optional --release.targets is not specified, a binary is created +for your current environment. + + ignite chain build --release -t linux:amd64 -t darwin:amd64 -t darwin:arm64 + + +``` +ignite chain build [flags] +``` + +**Options** + +``` + --check-dependencies verify that cached dependencies have not been modified since they were downloaded + --clear-cache clear the build cache (advanced) + --debug build a debug binary + -h, --help help for build + -o, --output string binary output path + -p, --path string path of the app (default ".") + --release build for a release + --release.prefix string tarball prefix for each release target. Available only with --release flag + -t, --release.targets strings release targets. Available only with --release flag + --skip-proto skip file generation from proto + -v, --verbose verbose output +``` + +**Options inherited from parent commands** + +``` + -c, --config string path to Ignite config file (default: ./config.yml) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite chain](#ignite-chain) - Build, init and start a blockchain node + + +## ignite chain debug + +Launch a debugger for a blockchain app + +**Synopsis** + +The debug command starts a debug server and launches a debugger. + +Ignite uses the Delve debugger by default. Delve enables you to interact with +your program by controlling the execution of the process, evaluating variables, +and providing information of thread / goroutine state, CPU register state and +more. + +A debug server can optionally be started in cases where default terminal client +is not desirable. When the server starts it first runs the blockchain app, +attaches to it and finally waits for a client connection. It accepts both +JSON-RPC or DAP client connections. + +To start a debug server use the following flag: + + ignite chain debug --server + +To start a debug server with a custom address use the following flags: + + ignite chain debug --server --server-address 127.0.0.1:30500 + +The debug server stops automatically when the client connection is closed. + + +``` +ignite chain debug [flags] +``` + +**Options** + +``` + -h, --help help for debug + -p, --path string path of the app (default ".") + --server start a debug server + --server-address string debug server address (default "127.0.0.1:30500") +``` + +**Options inherited from parent commands** + +``` + -c, --config string path to Ignite config file (default: ./config.yml) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite chain](#ignite-chain) - Build, init and start a blockchain node + + +## ignite chain faucet + +Send coins to an account + +``` +ignite chain faucet [address] [coin<,...>] [flags] +``` + +**Options** + +``` + -h, --help help for faucet + --home string directory where the blockchain node is initialized + -p, --path string path of the app (default ".") + -v, --verbose verbose output +``` + +**Options inherited from parent commands** + +``` + -c, --config string path to Ignite config file (default: ./config.yml) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite chain](#ignite-chain) - Build, init and start a blockchain node + + +## ignite chain init + +Initialize your chain + +**Synopsis** + +The init command compiles and installs the binary (like "ignite chain build") +and uses that binary to initialize the blockchain's data directory for one +validator. To learn how the build process works, refer to "ignite chain build +--help". + +By default, the data directory will be initialized in $HOME/.mychain, where +"mychain" is the name of the project. To set a custom data directory use the +--home flag or set the value in config.yml: + + validators: + - name: alice + bonded: '100000000stake' + home: "~/.customdir" + +The data directory contains three files in the "config" directory: app.toml, +config.toml, client.toml. These files let you customize the behavior of your +blockchain node and the client executable. When a chain is re-initialized the +data directory can be reset. To make some values in these files persistent, set +them in config.yml: + + validators: + - name: alice + bonded: '100000000stake' + app: + minimum-gas-prices: "0.025stake" + config: + consensus: + timeout_commit: "5s" + timeout_propose: "5s" + client: + output: "json" + +The configuration above changes the minimum gas price of the validator (by +default the gas price is set to 0 to allow "free" transactions), sets the block +time to 5s, and changes the output format to JSON. To see what kind of values +this configuration accepts see the generated TOML files in the data directory. + +As part of the initialization process Ignite creates on-chain accounts with +token balances. By default, config.yml has two accounts in the top-level +"accounts" property. You can add more accounts and change their token balances. +Refer to config.yml guide to see which values you can set. + +One of these accounts is a validator account and the amount of self-delegated +tokens can be set in the top-level "validator" property. + +One of the most important components of an initialized chain is the genesis +file, the 0th block of the chain. The genesis file is stored in the data +directory "config" subdirectory and contains the initial state of the chain, +including consensus and module parameters. You can customize the values of the +genesis in config.yml: + + genesis: + app_state: + staking: + params: + bond_denom: "foo" + +The example above changes the staking token to "foo". If you change the staking +denom, make sure the validator account has the right tokens. + +The init command is meant to be used ONLY FOR DEVELOPMENT PURPOSES. Under the +hood it runs commands like "appd init", "appd add-genesis-account", "appd +gentx", and "appd collect-gentx". For production, you may want to run these +commands manually to ensure a production-level node initialization. + + +``` +ignite chain init [flags] +``` + +**Options** + +``` + --check-dependencies verify that cached dependencies have not been modified since they were downloaded + --clear-cache clear the build cache (advanced) + --debug build a debug binary + -h, --help help for init + --home string directory where the blockchain node is initialized + -p, --path string path of the app (default ".") + --skip-proto skip file generation from proto +``` + +**Options inherited from parent commands** + +``` + -c, --config string path to Ignite config file (default: ./config.yml) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite chain](#ignite-chain) - Build, init and start a blockchain node + + +## ignite chain serve + +Start a blockchain node in development + +**Synopsis** + +The serve command compiles and installs the binary (like "ignite chain build"), +uses that binary to initialize the blockchain's data directory for one validator +(like "ignite chain init"), and starts the node locally for development purposes +with automatic code reloading. + +Automatic code reloading means Ignite starts watching the project directory. +Whenever a file change is detected, Ignite automatically rebuilds, reinitializes +and restarts the node. + +Whenever possible Ignite will try to keep the current state of the chain by +exporting and importing the genesis file. + +To force Ignite to start from a clean slate even if a genesis file exists, use +the following flag: + + ignite chain serve --reset-once + +To force Ignite to reset the state every time the source code is modified, use +the following flag: + + ignite chain serve --force-reset + +With Ignite it's possible to start more than one blockchain from the same source +code using different config files. This is handy if you're building +inter-blockchain functionality and, for example, want to try sending packets +from one blockchain to another. To start a node using a specific config file: + + ignite chain serve --config mars.yml + +The serve command is meant to be used ONLY FOR DEVELOPMENT PURPOSES. Under the +hood, it runs "appd start", where "appd" is the name of your chain's binary. For +production, you may want to run "appd start" manually. + + +``` +ignite chain serve [flags] +``` + +**Options** + +``` + --check-dependencies verify that cached dependencies have not been modified since they were downloaded + --clear-cache clear the build cache (advanced) + -f, --force-reset force reset of the app state on start and every source change + --generate-clients generate code for the configured clients on reset or source code change + -h, --help help for serve + --home string directory where the blockchain node is initialized + -p, --path string path of the app (default ".") + --quit-on-fail quit program if the app fails to start + -r, --reset-once reset the app state once on init + --skip-proto skip file generation from proto + -v, --verbose verbose output +``` + +**Options inherited from parent commands** + +``` + -c, --config string path to Ignite config file (default: ./config.yml) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite chain](#ignite-chain) - Build, init and start a blockchain node + + +## ignite chain simulate + +Run simulation testing for the blockchain + +**Synopsis** + +Run simulation testing for the blockchain. It sends many randomized-input messages of each module to a simulated node and checks if invariants break + +``` +ignite chain simulate [flags] +``` + +**Options** + +``` + --blockSize int operations per block (default 30) + --exportParamsHeight int height to which export the randomly generated params + --exportParamsPath string custom file path to save the exported params JSON + --exportStatePath string custom file path to save the exported app state JSON + --exportStatsPath string custom file path to save the exported simulation statistics JSON + --genesis string custom simulation genesis file; cannot be used with params file + --genesisTime int override genesis UNIX time instead of using a random UNIX time + -h, --help help for simulate + --initialBlockHeight int initial block to start the simulation (default 1) + --lean lean simulation log output + --numBlocks int number of new blocks to simulate from the initial block height (default 200) + --params string custom simulation params file which overrides any random params; cannot be used with genesis + --period uint run slow invariants only once every period assertions + --printAllInvariants print all invariants if a broken invariant is found + --seed int simulation random seed (default 42) + --simulateEveryOperation run slow invariants every operation + -v, --verbose verbose log output +``` + +**Options inherited from parent commands** + +``` + -c, --config string path to Ignite config file (default: ./config.yml) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite chain](#ignite-chain) - Build, init and start a blockchain node + + +## ignite completion + +Generate the autocompletion script for the specified shell + +**Synopsis** + +Generate the autocompletion script for ignite for the specified shell. +See each sub-command's help for details on how to use the generated script. + + +**Options** + +``` + -h, --help help for completion +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain +* [ignite completion bash](#ignite-completion-bash) - Generate the autocompletion script for bash +* [ignite completion fish](#ignite-completion-fish) - Generate the autocompletion script for fish +* [ignite completion powershell](#ignite-completion-powershell) - Generate the autocompletion script for powershell +* [ignite completion zsh](#ignite-completion-zsh) - Generate the autocompletion script for zsh + + +## ignite completion bash + +Generate the autocompletion script for bash + +**Synopsis** + +Generate the autocompletion script for the bash shell. + +This script depends on the 'bash-completion' package. +If it is not installed already, you can install it via your OS's package manager. + +To load completions in your current shell session: + + source <(ignite completion bash) + +To load completions for every new session, execute once: + +**#### Linux:** + + ignite completion bash > /etc/bash_completion.d/ignite + +**#### macOS:** + + ignite completion bash > $(brew --prefix)/etc/bash_completion.d/ignite + +You will need to start a new shell for this setup to take effect. + + +``` +ignite completion bash +``` + +**Options** + +``` + -h, --help help for bash + --no-descriptions disable completion descriptions +``` + +**SEE ALSO** + +* [ignite completion](#ignite-completion) - Generate the autocompletion script for the specified shell + + +## ignite completion fish + +Generate the autocompletion script for fish + +**Synopsis** + +Generate the autocompletion script for the fish shell. + +To load completions in your current shell session: + + ignite completion fish | source + +To load completions for every new session, execute once: + + ignite completion fish > ~/.config/fish/completions/ignite.fish + +You will need to start a new shell for this setup to take effect. + + +``` +ignite completion fish [flags] +``` + +**Options** + +``` + -h, --help help for fish + --no-descriptions disable completion descriptions +``` + +**SEE ALSO** + +* [ignite completion](#ignite-completion) - Generate the autocompletion script for the specified shell + + +## ignite completion powershell + +Generate the autocompletion script for powershell + +**Synopsis** + +Generate the autocompletion script for powershell. + +To load completions in your current shell session: + + ignite completion powershell | Out-String | Invoke-Expression + +To load completions for every new session, add the output of the above command +to your powershell profile. + + +``` +ignite completion powershell [flags] +``` + +**Options** + +``` + -h, --help help for powershell + --no-descriptions disable completion descriptions +``` + +**SEE ALSO** + +* [ignite completion](#ignite-completion) - Generate the autocompletion script for the specified shell + + +## ignite completion zsh + +Generate the autocompletion script for zsh + +**Synopsis** + +Generate the autocompletion script for the zsh shell. + +If shell completion is not already enabled in your environment you will need +to enable it. You can execute the following once: + + echo "autoload -U compinit; compinit" >> ~/.zshrc + +To load completions in your current shell session: + + source <(ignite completion zsh); compdef _ignite ignite + +To load completions for every new session, execute once: + +**#### Linux:** + + ignite completion zsh > "${fpath[1]}/_ignite" + +**#### macOS:** + + ignite completion zsh > $(brew --prefix)/share/zsh/site-functions/_ignite + +You will need to start a new shell for this setup to take effect. + + +``` +ignite completion zsh [flags] +``` + +**Options** + +``` + -h, --help help for zsh + --no-descriptions disable completion descriptions +``` + +**SEE ALSO** + +* [ignite completion](#ignite-completion) - Generate the autocompletion script for the specified shell + + +## ignite docs + +Show Ignite CLI docs + +``` +ignite docs [flags] +``` + +**Options** + +``` + -h, --help help for docs +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain + + +## ignite generate + +Generate clients, API docs from source code + +**Synopsis** + +Generate clients, API docs from source code. + +Such as compiling protocol buffer files into Go or implement particular +functionality, for example, generating an OpenAPI spec. + +Produced source code can be regenerated by running a command again and is not +meant to be edited by hand. + + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -h, --help help for generate + -p, --path string path of the app (default ".") +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain +* [ignite generate composables](#ignite-generate-composables) - TypeScript frontend client and Vue 3 composables +* [ignite generate hooks](#ignite-generate-hooks) - TypeScript frontend client and React hooks +* [ignite generate openapi](#ignite-generate-openapi) - OpenAPI spec for your chain +* [ignite generate proto-go](#ignite-generate-proto-go) - Compile protocol buffer files to Go source code required by Cosmos SDK +* [ignite generate ts-client](#ignite-generate-ts-client) - TypeScript frontend client +* [ignite generate vuex](#ignite-generate-vuex) - *DEPRECATED* TypeScript frontend client and Vuex stores + + +## ignite generate composables + +TypeScript frontend client and Vue 3 composables + +``` +ignite generate composables [flags] +``` + +**Options** + +``` + -h, --help help for composables + -o, --output string Vue 3 composables output path + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + --clear-cache clear the build cache (advanced) + -p, --path string path of the app (default ".") +``` + +**SEE ALSO** + +* [ignite generate](#ignite-generate) - Generate clients, API docs from source code + + +## ignite generate hooks + +TypeScript frontend client and React hooks + +``` +ignite generate hooks [flags] +``` + +**Options** + +``` + -h, --help help for hooks + -o, --output string React hooks output path + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + --clear-cache clear the build cache (advanced) + -p, --path string path of the app (default ".") +``` + +**SEE ALSO** + +* [ignite generate](#ignite-generate) - Generate clients, API docs from source code + + +## ignite generate openapi + +OpenAPI spec for your chain + +``` +ignite generate openapi [flags] +``` + +**Options** + +``` + -h, --help help for openapi + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + --clear-cache clear the build cache (advanced) + -p, --path string path of the app (default ".") +``` + +**SEE ALSO** + +* [ignite generate](#ignite-generate) - Generate clients, API docs from source code + + +## ignite generate proto-go + +Compile protocol buffer files to Go source code required by Cosmos SDK + +``` +ignite generate proto-go [flags] +``` + +**Options** + +``` + -h, --help help for proto-go + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + --clear-cache clear the build cache (advanced) + -p, --path string path of the app (default ".") +``` + +**SEE ALSO** + +* [ignite generate](#ignite-generate) - Generate clients, API docs from source code + + +## ignite generate ts-client + +TypeScript frontend client + +**Synopsis** + +Generate a framework agnostic TypeScript client for your blockchain project. + +By default the TypeScript client is generated in the "ts-client/" directory. You +can customize the output directory in config.yml: + + client: + typescript: + path: new-path + +Output can also be customized by using a flag: + + ignite generate ts-client --output new-path + +TypeScript client code can be automatically regenerated on reset or source code +changes when the blockchain is started with a flag: + + ignite chain serve --generate-clients + + +``` +ignite generate ts-client [flags] +``` + +**Options** + +``` + -h, --help help for ts-client + -o, --output string TypeScript client output path + --use-cache use build cache to speed-up generation + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + --clear-cache clear the build cache (advanced) + -p, --path string path of the app (default ".") +``` + +**SEE ALSO** + +* [ignite generate](#ignite-generate) - Generate clients, API docs from source code + + +## ignite generate vuex + +*DEPRECATED* TypeScript frontend client and Vuex stores + +``` +ignite generate vuex [flags] +``` + +**Options** + +``` + -h, --help help for vuex + -o, --output string Vuex store output path + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + --clear-cache clear the build cache (advanced) + -p, --path string path of the app (default ".") +``` + +**SEE ALSO** + +* [ignite generate](#ignite-generate) - Generate clients, API docs from source code + + +## ignite network + +Launch a blockchain in production + +**Synopsis** + + +Ignite Network commands allow to coordinate the launch of sovereign Cosmos blockchains. + +To launch a Cosmos blockchain you need someone to be a coordinator and others to +be validators. These are just roles, anyone can be a coordinator or a validator. +A coordinator publishes information about a chain to be launched on the Ignite +blockchain, approves validator requests and coordinates the launch. Validators +send requests to join a chain and start their nodes when a blockchain is ready +for launch. + +To publish the information about your chain as a coordinator run the following +command (the URL should point to a repository with a Cosmos SDK chain): + + ignite network chain publish github.com/ignite/example + +This command will return a launch identifier you will be using in the following +commands. Let's say this identifier is 42. + +Next, ask validators to initialize their nodes and request to join the network +as validators. For a testnet you can use the default values suggested by the +CLI. + + ignite network chain init 42 + + ignite network chain join 42 --amount 95000000stake + +As a coordinator list all validator requests: + + ignite network request list 42 + +Approve validator requests: + + ignite network request approve 42 1,2 + +Once you've approved all validators you need in the validator set, announce that +the chain is ready for launch: + + ignite network chain launch 42 + +Validators can now prepare their nodes for launch: + + ignite network chain prepare 42 + +The output of this command will show a command that a validator would use to +launch their node, for example “exampled --home ~/.example”. After enough +validators launch their nodes, a blockchain will be live. + + +**Options** + +``` + -h, --help help for network + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain +* [ignite network chain](#ignite-network-chain) - Publish a chain, join as a validator and prepare node for launch +* [ignite network coordinator](#ignite-network-coordinator) - Show and update a coordinator profile +* [ignite network profile](#ignite-network-profile) - Show the address profile info +* [ignite network project](#ignite-network-project) - Handle projects +* [ignite network request](#ignite-network-request) - Create, show, reject and approve requests +* [ignite network reward](#ignite-network-reward) - Manage network rewards +* [ignite network tool](#ignite-network-tool) - Commands to run subsidiary tools +* [ignite network validator](#ignite-network-validator) - Show and update a validator profile +* [ignite network version](#ignite-network-version) - Version of the plugin + + +## ignite network chain + +Publish a chain, join as a validator and prepare node for launch + +**Synopsis** + +The "chain" namespace features the most commonly used commands for launching +blockchains with Ignite. + +As a coordinator you "publish" your blockchain to Ignite. When enough validators +are approved for the genesis and no changes are excepted to be made to the +genesis, a coordinator announces that the chain is ready for launch with the +"launch" command. In the case of an unsuccessful launch, the coordinator can revert it +using the "revert-launch" command. + +As a validator, you "init" your node and apply to become a validator for a +blockchain with the "join" command. After the launch of the chain is announced, +validators can generate the finalized genesis and download the list of peers with the +"prepare" command. + +The "install" command can be used to download, compile the source code and +install the chain's binary locally. The binary can be used, for example, to +initialize a validator node or to interact with the chain after it has been +launched. + +All chains published to Ignite can be listed by using the "list" command. + + +**Options** + +``` + -h, --help help for chain +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network](#ignite-network) - Launch a blockchain in production +* [ignite network chain init](#ignite-network-chain-init) - Initialize a chain from a published chain ID +* [ignite network chain install](#ignite-network-chain-install) - Install chain binary for a launch +* [ignite network chain join](#ignite-network-chain-join) - Request to join a network as a validator +* [ignite network chain launch](#ignite-network-chain-launch) - Trigger the launch of a chain +* [ignite network chain list](#ignite-network-chain-list) - List published chains +* [ignite network chain prepare](#ignite-network-chain-prepare) - Prepare the chain for launch +* [ignite network chain publish](#ignite-network-chain-publish) - Publish a new chain to start a new network +* [ignite network chain revert-launch](#ignite-network-chain-revert-launch) - Revert launch of a network as a coordinator +* [ignite network chain show](#ignite-network-chain-show) - Show details of a chain + + +## ignite network chain init + +Initialize a chain from a published chain ID + +**Synopsis** + +Ignite network chain init is a command used by validators to initialize a +validator node for a blockchain from the information stored on the Ignite chain. + + ignite network chain init 42 + +This command fetches the information about a chain with launch ID 42. The source +code of the chain is cloned in a temporary directory, and the node's binary is +compiled from the source. The binary is then used to initialize the node. By +default, Ignite uses "~/spn/[launch-id]/" as the home directory for the blockchain. + +An important part of initializing a validator node is creation of the gentx (a +transaction that adds a validator at the genesis of the chain). + +The "init" command will prompt for values like self-delegation and commission. +These values will be used in the validator's gentx. You can use flags to provide +the values in non-interactive mode. + +Use the "--home" flag to choose a different path for the home directory of the +blockchain: + + ignite network chain init 42 --home ~/mychain + +The end result of the "init" command is a validator home directory with a +genesis validator transaction (gentx) file. + +``` +ignite network chain init [launch-id] [flags] +``` + +**Options** + +``` + --check-dependencies verify that cached dependencies have not been modified since they were downloaded + --clear-cache clear the build cache (advanced) + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for init + --home string home directory used for blockchains + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") + --validator-account string account for the chain validator (default "default") + --validator-details string details about the validator + --validator-gas-price string validator gas price + --validator-identity string validator identity signature (ex. UPort or Keybase) + --validator-moniker string custom validator moniker + --validator-security-contact string validator security contact email + --validator-self-delegation string validator minimum self delegation + --validator-website string associate a website with the validator + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network chain](#ignite-network-chain) - Publish a chain, join as a validator and prepare node for launch + + +## ignite network chain install + +Install chain binary for a launch + +``` +ignite network chain install [launch-id] [flags] +``` + +**Options** + +``` + --check-dependencies verify that cached dependencies have not been modified since they were downloaded + --clear-cache clear the build cache (advanced) + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for install +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network chain](#ignite-network-chain) - Publish a chain, join as a validator and prepare node for launch + + +## ignite network chain join + +Request to join a network as a validator + +**Synopsis** + +The "join" command is used by validators to send a request to join a blockchain. +The required argument is a launch ID of a blockchain. The "join" command expects +that the validator has already setup a home directory for the blockchain and has +a gentx either by running "ignite network chain init" or initializing the data +directory manually with the chain's binary. + +By default the "join" command just sends the request to join as a validator. +However, often a validator also needs to request an genesis account with a token +balance to afford self-delegation. + +The following command will send a request to join blockchain with launch ID 42 +as a validator and request to be added as an account with a token balance of +95000000 STAKE. + + ignite network chain join 42 --amount 95000000stake + +A request to join as a validator contains a gentx file. Ignite looks for gentx +in a home directory used by "ignite network chain init" by default. To use a +different directory, use the "--home" flag or pass a gentx file directly with +the "--gentx" flag. + +To join a chain as a validator, you must provide the IP address of your node so +that other validators can connect to it. The join command will ask you for the +IP address and will attempt to automatically detect and fill in the value. If +you want to manually specify the IP address, you can use the "--peer-address" +flag: + + ignite network chain join 42 --peer-address 0.0.0.0 + +Since "join" broadcasts a transaction to the Ignite blockchain, you will need an +account on the Ignite blockchain. During the testnet phase, however, Ignite +automatically requests tokens from a faucet. + + +``` +ignite network chain join [launch-id] [flags] +``` + +**Options** + +``` + --amount string amount of coins for account request (ignored if coordinator has fixed the account balances or if --no-acount flag is set) + --check-dependencies verify that cached dependencies have not been modified since they were downloaded + --from string account name to use for sending transactions to SPN (default "default") + --gentx string path to a gentx json file + -h, --help help for join + --home string home directory used for blockchains + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") + --no-account prevent sending a request for a genesis account + --peer-address string peer's address + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network chain](#ignite-network-chain) - Publish a chain, join as a validator and prepare node for launch + + +## ignite network chain launch + +Trigger the launch of a chain + +**Synopsis** + +The launch command communicates to the world that the chain is ready to be +launched. + +Only the coordinator of the chain can execute the launch command. + + ignite network chain launch 42 + +After the launch command is executed no changes to the genesis are accepted. For +example, validators will no longer be able to successfully execute the "ignite +network chain join" command to apply as a validator. + +The launch command sets the date and time after which the chain will start. By +default, the current time is set. To give validators more time to prepare for +the launch, set the time with the "--launch-time" flag: + + ignite network chain launch 42 --launch-time 2023-01-01T00:00:00Z + +After the launch command is executed, validators can generate the finalized +genesis and prepare their nodes for the launch. For example, validators can run +"ignite network chain prepare" to generate the genesis and populate the peer +list. + +If you want to change the launch time or open up the genesis file for changes +you can use "ignite network chain revert-launch" to make it possible, for +example, to accept new validators and add accounts. + + +``` +ignite network chain launch [launch-id] [flags] +``` + +**Options** + +``` + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for launch + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") + --launch-time string timestamp the chain is effectively launched (example "2022-01-01T00:00:00Z") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network chain](#ignite-network-chain) - Publish a chain, join as a validator and prepare node for launch + + +## ignite network chain list + +List published chains + +``` +ignite network chain list [flags] +``` + +**Options** + +``` + --advanced show advanced information about the chains + -h, --help help for list + --limit uint limit of results per page (default 100) + --page uint page for chain list result (default 1) +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network chain](#ignite-network-chain) - Publish a chain, join as a validator and prepare node for launch + + +## ignite network chain prepare + +Prepare the chain for launch + +**Synopsis** + +The prepare command prepares a validator node for the chain launch by generating +the final genesis and adding IP addresses of peers to the validator's +configuration file. + + ignite network chain prepare 42 + +By default, Ignite uses "$HOME/spn/LAUNCH_ID" as the data directory. If you used +a different data directory when initializing the node, use the "--home" flag and +set the correct path to the data directory. + +Ignite generates the genesis file in "config/genesis.json" and adds peer IPs by +modifying "config/config.toml". + +The prepare command should be executed after the coordinator has triggered the +chain launch and finalized the genesis with "ignite network chain launch". You +can force Ignite to run the prepare command without checking if the launch has +been triggered with the "--force" flag (this is not recommended). + +After the prepare command is executed the node is ready to be started. + + +``` +ignite network chain prepare [launch-id] [flags] +``` + +**Options** + +``` + --check-dependencies verify that cached dependencies have not been modified since they were downloaded + --clear-cache clear the build cache (advanced) + -f, --force force the prepare command to run even if the chain is not launched + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for prepare + --home string home directory used for blockchains + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network chain](#ignite-network-chain) - Publish a chain, join as a validator and prepare node for launch + + +## ignite network chain publish + +Publish a new chain to start a new network + +**Synopsis** + +To begin the process of launching a blockchain with Ignite, a coordinator needs +to publish the information about a blockchain. The only required bit of +information is the URL of the source code of the blockchain. + +The following command publishes the information about an example blockchain: + + ignite network chain publish github.com/ignite/example + +This command fetches the source code of the blockchain, compiles the binary, +verifies that a blockchain can be started with the binary, and publishes the +information about the blockchain to Ignite. Currently, only public repositories +are supported. The command returns an integer number that acts as an identifier +of the chain on Ignite. + +By publishing a blockchain on Ignite you become the "coordinator" of this +blockchain. A coordinator is an account that has the authority to approve and +reject validator requests, set parameters of the blockchain and trigger the +launch of the chain. + +The default Git branch is used when publishing a chain. If you want to use a +specific branch, tag or a commit hash, use "--branch", "--tag", or "--hash" +flags respectively. + +The repository name is used as the default chain ID. Ignite does not ensure that +chain IDs are unique, but they have to have a valid format: [string]-[integer]. +To set a custom chain ID use the "--chain-id" flag. + + ignite network chain publish github.com/ignite/example --chain-id foo-1 + +Once the chain is published users can request accounts with coin balances to be +added to the chain's genesis. By default, users are free to request any number +of tokens. If you want all users requesting tokens to get the same amount, use +the "--account-balance" flag with a list of coins. + + ignite network chain publish github.com/ignite/example --account-balance 2000foocoin + + +``` +ignite network chain publish [source-url] [flags] +``` + +**Options** + +``` + --account-balance string balance for each approved genesis account for the chain + --amount string amount of coins for account request + --branch string Git branch to use for the repo + --chain-id string chain ID to use for this network + --check-dependencies verify that cached dependencies have not been modified since they were downloaded + --clear-cache clear the build cache (advanced) + --from string account name to use for sending transactions to SPN (default "default") + --genesis-config string name of an Ignite config file in the repo for custom Genesis + --genesis-url string URL to a custom Genesis + --hash string Git hash to use for the repo + -h, --help help for publish + --home string home directory used for blockchains + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") + --mainnet initialize a mainnet project + --metadata string add chain metadata + --no-check skip verifying chain's integrity + --project uint project ID to use for this network + --reward.coins string reward coins + --reward.height int last reward height + --shares string add shares for the project + --tag string Git tag to use for the repo + --total-supply string add a total of the mainnet of a project + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network chain](#ignite-network-chain) - Publish a chain, join as a validator and prepare node for launch + + +## ignite network chain revert-launch + +Revert launch of a network as a coordinator + +**Synopsis** + +The revert launch command reverts the previously scheduled launch of a chain. + +Only the coordinator of the chain can execute the launch command. + + ignite network chain revert-launch 42 + +After the revert launch command is executed, changes to the genesis of the chain +are allowed again. For example, validators will be able to request to join the +chain. Revert launch also resets the launch time. + + +``` +ignite network chain revert-launch [launch-id] [flags] +``` + +**Options** + +``` + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for revert-launch + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network chain](#ignite-network-chain) - Publish a chain, join as a validator and prepare node for launch + + +## ignite network chain show + +Show details of a chain + +**Options** + +``` + -h, --help help for show +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network chain](#ignite-network-chain) - Publish a chain, join as a validator and prepare node for launch +* [ignite network chain show accounts](#ignite-network-chain-show-accounts) - Show all vesting and genesis accounts of the chain +* [ignite network chain show genesis](#ignite-network-chain-show-genesis) - Show the chain genesis file +* [ignite network chain show info](#ignite-network-chain-show-info) - Show info details of the chain +* [ignite network chain show peers](#ignite-network-chain-show-peers) - Show peers list of the chain +* [ignite network chain show validators](#ignite-network-chain-show-validators) - Show all validators of the chain + + +## ignite network chain show accounts + +Show all vesting and genesis accounts of the chain + +``` +ignite network chain show accounts [launch-id] [flags] +``` + +**Options** + +``` + -h, --help help for accounts + --prefix string account address prefix (default "spn") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network chain show](#ignite-network-chain-show) - Show details of a chain + + +## ignite network chain show genesis + +Show the chain genesis file + +``` +ignite network chain show genesis [launch-id] [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -h, --help help for genesis + --out string path to output Genesis file (default "./genesis.json") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network chain show](#ignite-network-chain-show) - Show details of a chain + + +## ignite network chain show info + +Show info details of the chain + +``` +ignite network chain show info [launch-id] [flags] +``` + +**Options** + +``` + -h, --help help for info +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network chain show](#ignite-network-chain-show) - Show details of a chain + + +## ignite network chain show peers + +Show peers list of the chain + +``` +ignite network chain show peers [launch-id] [flags] +``` + +**Options** + +``` + -h, --help help for peers + --out string path to output peers list (default "./peers.txt") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network chain show](#ignite-network-chain-show) - Show details of a chain + + +## ignite network chain show validators + +Show all validators of the chain + +``` +ignite network chain show validators [launch-id] [flags] +``` + +**Options** + +``` + -h, --help help for validators + --prefix string account address prefix (default "spn") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network chain show](#ignite-network-chain-show) - Show details of a chain + + +## ignite network coordinator + +Show and update a coordinator profile + +**Options** + +``` + -h, --help help for coordinator +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network](#ignite-network) - Launch a blockchain in production +* [ignite network coordinator set](#ignite-network-coordinator-set) - Set an information in a coordinator profile +* [ignite network coordinator show](#ignite-network-coordinator-show) - Show a coordinator profile + + +## ignite network coordinator set + +Set an information in a coordinator profile + +**Synopsis** + +Coordinators on Ignite can set a profile containing a description for the coordinator. +The coordinator set command allows to set information for the coordinator. +The following information can be set: +- details: general information about the coordinator. +- identity: a piece of information to verify the identity of the coordinator with a system like Keybase or Veramo. +- website: website of the coordinator. + + +``` +ignite network coordinator set details|identity|website [value] [flags] +``` + +**Options** + +``` + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for set + --home string home directory used for blockchains + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network coordinator](#ignite-network-coordinator) - Show and update a coordinator profile + + +## ignite network coordinator show + +Show a coordinator profile + +``` +ignite network coordinator show [address] [flags] +``` + +**Options** + +``` + -h, --help help for show +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network coordinator](#ignite-network-coordinator) - Show and update a coordinator profile + + +## ignite network profile + +Show the address profile info + +``` +ignite network profile [project-id] [flags] +``` + +**Options** + +``` + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for profile + --home string home directory used for blockchains + --keyring-backend string keyring backend to store your account keys (default "test") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network](#ignite-network) - Launch a blockchain in production + + +## ignite network project + +Handle projects + +**Options** + +``` + -h, --help help for project +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network](#ignite-network) - Launch a blockchain in production +* [ignite network project account](#ignite-network-project-account) - Handle project accounts +* [ignite network project create](#ignite-network-project-create) - Create a project +* [ignite network project list](#ignite-network-project-list) - List published projects +* [ignite network project show](#ignite-network-project-show) - Show published project +* [ignite network project update](#ignite-network-project-update) - Update details fo the project of the project + + +## ignite network project account + +Handle project accounts + +**Options** + +``` + -h, --help help for account +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network project](#ignite-network-project) - Handle projects +* [ignite network project account list](#ignite-network-project-account-list) - Show all mainnet and mainnet vesting of the project + + +## ignite network project account list + +Show all mainnet and mainnet vesting of the project + +``` +ignite network project account list [project-id] [flags] +``` + +**Options** + +``` + -h, --help help for list +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network project account](#ignite-network-project-account) - Handle project accounts + + +## ignite network project create + +Create a project + +``` +ignite network project create [name] [total-supply] [flags] +``` + +**Options** + +``` + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for create + --home string home directory used for blockchains + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") + --metadata string Add a metadata to the chain +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network project](#ignite-network-project) - Handle projects + + +## ignite network project list + +List published projects + +``` +ignite network project list [flags] +``` + +**Options** + +``` + -h, --help help for list +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network project](#ignite-network-project) - Handle projects + + +## ignite network project show + +Show published project + +``` +ignite network project show [project-id] [flags] +``` + +**Options** + +``` + -h, --help help for show +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network project](#ignite-network-project) - Handle projects + + +## ignite network project update + +Update details fo the project of the project + +``` +ignite network project update [project-id] [flags] +``` + +**Options** + +``` + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for update + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") + --metadata string update the project metadata + --name string update the project name + --total-supply string update the total of the mainnet of a project +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network project](#ignite-network-project) - Handle projects + + +## ignite network request + +Create, show, reject and approve requests + +**Synopsis** + +The "request" namespace contains commands for creating, showing, approving, and +rejecting requests. + +A request is mechanism in Ignite that allows changes to be made to the genesis +file like adding accounts with token balances and validators. Anyone can submit +a request, but only the coordinator of a chain can approve or reject a request. + +Each request has a status: + +* Pending: waiting for the approval of the coordinator +* Approved: approved by the coordinator, its content has been applied to the + launch information +* Rejected: rejected by the coordinator or the request creator + + +**Options** + +``` + -h, --help help for request +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network](#ignite-network) - Launch a blockchain in production +* [ignite network request add-account](#ignite-network-request-add-account) - Send request to add account +* [ignite network request approve](#ignite-network-request-approve) - Approve requests +* [ignite network request change-param](#ignite-network-request-change-param) - Send request to change a module param +* [ignite network request list](#ignite-network-request-list) - List all requests for a chain +* [ignite network request reject](#ignite-network-request-reject) - Reject requests +* [ignite network request remove-account](#ignite-network-request-remove-account) - Send request to remove a genesis account +* [ignite network request remove-validator](#ignite-network-request-remove-validator) - Send request to remove a validator +* [ignite network request show](#ignite-network-request-show) - Show detailed information about a request +* [ignite network request verify](#ignite-network-request-verify) - Verify the request and simulate the chain genesis from them + + +## ignite network request add-account + +Send request to add account + +**Synopsis** + +The "add account" command creates a new request to add an account with a given +address and a specified coin balance to the genesis of the chain. + +The request automatically fails to be applied if a genesis account or a vesting +account with an identical address is already specified in the launch +information. + +If a coordinator has specified that all genesis accounts on a chain should have +the same balance (useful for testnets, for example), the "add account" expects +only an address as an argument. Attempt to provide a token balance will result +in an error. + + +``` +ignite network request add-account [launch-id] [address] [coins] [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for add-account + --home string home directory used for blockchains + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network request](#ignite-network-request) - Create, show, reject and approve requests + + +## ignite network request approve + +Approve requests + +**Synopsis** + +The "approve" command is used by a chain's coordinator to approve requests. +Multiple requests can be approved using a comma-separated list and/or using a +dash syntax. + + ignite network request approve 42 1,2,3-6,7,8 + +The command above approves requests with IDs from 1 to 8 included on a chain +with a launch ID 42. + +When requests are approved Ignite applies the requested changes and simulates +initializing and launching the chain locally. If the chain starts successfully, +requests are considered to be "verified" and are approved. If one or more +requested changes stop the chain from launching locally, the verification +process fails and the approval of all requests is canceled. To skip the +verification process use the "--no-verification" flag. + +Note that Ignite will try to approve requests in the same order as request IDs +are submitted to the "approve" command. + +``` +ignite network request approve [launch-id] [number<,...>] [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for approve + --home string home directory used for blockchains + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") + --no-verification approve the requests without verifying them +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network request](#ignite-network-request) - Create, show, reject and approve requests + + +## ignite network request change-param + +Send request to change a module param + +``` +ignite network request change-param [launch-id] [module-name] [param-name] [value (json, string, number)] [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for change-param + --home string home directory used for blockchains + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network request](#ignite-network-request) - Create, show, reject and approve requests + + +## ignite network request list + +List all requests for a chain + +``` +ignite network request list [launch-id] [flags] +``` + +**Options** + +``` + -h, --help help for list + --prefix string account address prefix (default "spn") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network request](#ignite-network-request) - Create, show, reject and approve requests + + +## ignite network request reject + +Reject requests + +**Synopsis** + +The "reject" command is used by a chain's coordinator to reject requests. + + ignite network request reject 42 1,2,3-6,7,8 + +The syntax of the "reject" command is similar to that of the "approve" command. + + +``` +ignite network request reject [launch-id] [number<,...>] [flags] +``` + +**Options** + +``` + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for reject + --home string home directory used for blockchains + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network request](#ignite-network-request) - Create, show, reject and approve requests + + +## ignite network request remove-account + +Send request to remove a genesis account + +``` +ignite network request remove-account [launch-id] [address] [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for remove-account + --home string home directory used for blockchains + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network request](#ignite-network-request) - Create, show, reject and approve requests + + +## ignite network request remove-validator + +Send request to remove a validator + +``` +ignite network request remove-validator [launch-id] [address] [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for remove-validator + --home string home directory used for blockchains + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network request](#ignite-network-request) - Create, show, reject and approve requests + + +## ignite network request show + +Show detailed information about a request + +``` +ignite network request show [launch-id] [request-id] [flags] +``` + +**Options** + +``` + -h, --help help for show +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network request](#ignite-network-request) - Create, show, reject and approve requests + + +## ignite network request verify + +Verify the request and simulate the chain genesis from them + +**Synopsis** + +The "verify" command applies selected requests to the genesis of a chain locally +to verify that approving these requests will result in a valid genesis that +allows a chain to launch without issues. This command does not approve requests, +only checks them. + + +``` +ignite network request verify [launch-id] [number<,...>] [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for verify + --home string home directory used for blockchains + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network request](#ignite-network-request) - Create, show, reject and approve requests + + +## ignite network reward + +Manage network rewards + +**Options** + +``` + -h, --help help for reward +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network](#ignite-network) - Launch a blockchain in production +* [ignite network reward release](#ignite-network-reward-release) - Connect the monitoring modules of launched chains with SPN +* [ignite network reward set](#ignite-network-reward-set) - set a network chain reward + + +## ignite network reward release + +Connect the monitoring modules of launched chains with SPN + +``` +ignite network reward release [launch-id] [chain-rpc] [flags] +``` + +**Options** + +``` + --create-client-only only create the network client id + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for release + --keyring-backend string keyring backend to store your account keys (default "test") + --spn-gaslimit int gas limit used for transactions on SPN (default 400000) + --spn-gasprice string gas price used for transactions on SPN (default "0.0000025uspn") + --testnet-account string testnet chain account (default "default") + --testnet-faucet string faucet address of the testnet chain + --testnet-gaslimit int gas limit used for transactions on testnet chain (default 400000) + --testnet-gasprice string gas price used for transactions on testnet chain (default "0.0000025stake") + --testnet-prefix string address prefix of the testnet chain (default "cosmos") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network reward](#ignite-network-reward) - Manage network rewards + + +## ignite network reward set + +set a network chain reward + +``` +ignite network reward set [launch-id] [last-reward-height] [coins] [flags] +``` + +**Options** + +``` + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for set + --home string home directory used for blockchains + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network reward](#ignite-network-reward) - Manage network rewards + + +## ignite network tool + +Commands to run subsidiary tools + +**Options** + +``` + -h, --help help for tool +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network](#ignite-network) - Launch a blockchain in production +* [ignite network tool proxy-tunnel](#ignite-network-tool-proxy-tunnel) - Setup a proxy tunnel via HTTP + + +## ignite network tool proxy-tunnel + +Setup a proxy tunnel via HTTP + +**Synopsis** + +Starts an HTTP proxy server and HTTP proxy clients for each node that +needs HTTP tunneling. + +HTTP tunneling is activated **ONLY** if SPN_CONFIG_FILE has "tunneled_peers" +field inside with a list of tunneled peers/nodes. + +If you're using SPN as coordinator and do not want to allow HTTP tunneling +feature at all, you can prevent "spn.yml" file to being generated by not +approving validator requests that has HTTP tunneling enabled instead of plain +TCP connections. + +``` +ignite network tool proxy-tunnel SPN_CONFIG_FILE [flags] +``` + +**Options** + +``` + -h, --help help for proxy-tunnel +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network tool](#ignite-network-tool) - Commands to run subsidiary tools + + +## ignite network validator + +Show and update a validator profile + +**Options** + +``` + -h, --help help for validator +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network](#ignite-network) - Launch a blockchain in production +* [ignite network validator set](#ignite-network-validator-set) - Set an information in a validator profile +* [ignite network validator show](#ignite-network-validator-show) - Show a validator profile + + +## ignite network validator set + +Set an information in a validator profile + +**Synopsis** + +Validators on Ignite can set a profile containing a description for the validator. +The validator set command allows to set information for the validator. +The following information can be set: +- details: general information about the validator. +- identity: piece of information to verify identity of the validator with a system like Keybase of Veramo. +- website: website of the validator. +- security: security contact for the validator. + + +``` +ignite network validator set details|identity|website|security [value] [flags] +``` + +**Options** + +``` + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for set + --home string home directory used for blockchains + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network validator](#ignite-network-validator) - Show and update a validator profile + + +## ignite network validator show + +Show a validator profile + +``` +ignite network validator show [address] [flags] +``` + +**Options** + +``` + -h, --help help for show +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network validator](#ignite-network-validator) - Show and update a validator profile + + +## ignite network version + +Version of the plugin + +**Synopsis** + +The version of the plugin to use to interact with a chain might be specified by the coordinator. + + +``` +ignite network version [flags] +``` + +**Options** + +``` + -h, --help help for version +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network](#ignite-network) - Launch a blockchain in production + + +## ignite node + +Make requests to a live blockchain node + +**Options** + +``` + -h, --help help for node + --node string <host>:<port> to tendermint rpc interface for this chain (default "https://rpc.cosmos.network:443") +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain +* [ignite node query](#ignite-node-query) - Querying subcommands +* [ignite node tx](#ignite-node-tx) - Transactions subcommands + + +## ignite node query + +Querying subcommands + +**Options** + +``` + -h, --help help for query +``` + +**Options inherited from parent commands** + +``` + --node string <host>:<port> to tendermint rpc interface for this chain (default "https://rpc.cosmos.network:443") +``` + +**SEE ALSO** + +* [ignite node](#ignite-node) - Make requests to a live blockchain node +* [ignite node query bank](#ignite-node-query-bank) - Querying commands for the bank module +* [ignite node query tx](#ignite-node-query-tx) - Query for transaction by hash + + +## ignite node query bank + +Querying commands for the bank module + +**Options** + +``` + -h, --help help for bank +``` + +**Options inherited from parent commands** + +``` + --node string <host>:<port> to tendermint rpc interface for this chain (default "https://rpc.cosmos.network:443") +``` + +**SEE ALSO** + +* [ignite node query](#ignite-node-query) - Querying subcommands +* [ignite node query bank balances](#ignite-node-query-bank-balances) - Query for account balances by account name or address + + +## ignite node query bank balances + +Query for account balances by account name or address + +``` +ignite node query bank balances [from_account_or_address] [flags] +``` + +**Options** + +``` + --address-prefix string account address prefix (default "cosmos") + --count-total count total number of records in all balances to query for + -h, --help help for balances + --home string directory where the blockchain node is initialized + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") + --limit uint pagination limit of all balances to query for (default 100) + --offset uint pagination offset of all balances to query for + --page uint pagination page of all balances to query for. This sets offset to a multiple of limit (default 1) + --page-key string pagination page-key of all balances to query for + --reverse results are sorted in descending order +``` + +**Options inherited from parent commands** + +``` + --node string <host>:<port> to tendermint rpc interface for this chain (default "https://rpc.cosmos.network:443") +``` + +**SEE ALSO** + +* [ignite node query bank](#ignite-node-query-bank) - Querying commands for the bank module + + +## ignite node query tx + +Query for transaction by hash + +``` +ignite node query tx [hash] [flags] +``` + +**Options** + +``` + -h, --help help for tx +``` + +**Options inherited from parent commands** + +``` + --node string <host>:<port> to tendermint rpc interface for this chain (default "https://rpc.cosmos.network:443") +``` + +**SEE ALSO** + +* [ignite node query](#ignite-node-query) - Querying subcommands + + +## ignite node tx + +Transactions subcommands + +**Options** + +``` + --address-prefix string account address prefix (default "cosmos") + --fees string fees to pay along with transaction; eg: 10uatom + --gas string gas limit to set per-transaction; set to "auto" to calculate sufficient gas automatically (default "auto") + --gas-prices string gas prices in decimal format to determine the transaction fee (e.g. 0.1uatom) + --generate-only build an unsigned transaction and write it to STDOUT + -h, --help help for tx + --home string directory where the blockchain node is initialized + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**Options inherited from parent commands** + +``` + --node string <host>:<port> to tendermint rpc interface for this chain (default "https://rpc.cosmos.network:443") +``` + +**SEE ALSO** + +* [ignite node](#ignite-node) - Make requests to a live blockchain node +* [ignite node tx bank](#ignite-node-tx-bank) - Bank transaction subcommands + + +## ignite node tx bank + +Bank transaction subcommands + +**Options** + +``` + -h, --help help for bank +``` + +**Options inherited from parent commands** + +``` + --address-prefix string account address prefix (default "cosmos") + --fees string fees to pay along with transaction; eg: 10uatom + --gas string gas limit to set per-transaction; set to "auto" to calculate sufficient gas automatically (default "auto") + --gas-prices string gas prices in decimal format to determine the transaction fee (e.g. 0.1uatom) + --generate-only build an unsigned transaction and write it to STDOUT + --home string directory where the blockchain node is initialized + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") + --node string <host>:<port> to tendermint rpc interface for this chain (default "https://rpc.cosmos.network:443") +``` + +**SEE ALSO** + +* [ignite node tx](#ignite-node-tx) - Transactions subcommands +* [ignite node tx bank send](#ignite-node-tx-bank-send) - Send funds from one account to another. + + +## ignite node tx bank send + +Send funds from one account to another. + +``` +ignite node tx bank send [from_account_or_address] [to_account_or_address] [amount] [flags] +``` + +**Options** + +``` + -h, --help help for send +``` + +**Options inherited from parent commands** + +``` + --address-prefix string account address prefix (default "cosmos") + --fees string fees to pay along with transaction; eg: 10uatom + --gas string gas limit to set per-transaction; set to "auto" to calculate sufficient gas automatically (default "auto") + --gas-prices string gas prices in decimal format to determine the transaction fee (e.g. 0.1uatom) + --generate-only build an unsigned transaction and write it to STDOUT + --home string directory where the blockchain node is initialized + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") + --node string <host>:<port> to tendermint rpc interface for this chain (default "https://rpc.cosmos.network:443") +``` + +**SEE ALSO** + +* [ignite node tx bank](#ignite-node-tx-bank) - Bank transaction subcommands + + +## ignite plugin + +Handle plugins + +**Options** + +``` + -h, --help help for plugin +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain +* [ignite plugin add](#ignite-plugin-add) - Adds a plugin declaration to a plugin configuration +* [ignite plugin describe](#ignite-plugin-describe) - Output information about the a registered plugin +* [ignite plugin list](#ignite-plugin-list) - List declared plugins and status +* [ignite plugin remove](#ignite-plugin-remove) - Removes a plugin declaration from a chain's plugin configuration +* [ignite plugin scaffold](#ignite-plugin-scaffold) - Scaffold a new plugin +* [ignite plugin update](#ignite-plugin-update) - Update plugins + + +## ignite plugin add + +Adds a plugin declaration to a plugin configuration + +**Synopsis** + +Adds a plugin declaration to a plugin configuration. +Respects key value pairs declared after the plugin path to be added to the +generated configuration definition. +Example: + ignite plugin add github.com/org/my-plugin/ foo=bar baz=qux + +``` +ignite plugin add [path] [key=value]... [flags] +``` + +**Options** + +``` + -g, --global use global plugins configuration ($HOME/.ignite/plugins/plugins.yml) + -h, --help help for add +``` + +**SEE ALSO** + +* [ignite plugin](#ignite-plugin) - Handle plugins + + +## ignite plugin describe + +Output information about the a registered plugin + +**Synopsis** + +Output information about a registered plugins commands and hooks. + +``` +ignite plugin describe [path] [flags] +``` + +**Options** + +``` + -h, --help help for describe +``` + +**SEE ALSO** + +* [ignite plugin](#ignite-plugin) - Handle plugins + + +## ignite plugin list + +List declared plugins and status + +**Synopsis** + +Prints status and information of declared plugins + +``` +ignite plugin list [flags] +``` + +**Options** + +``` + -h, --help help for list +``` + +**SEE ALSO** + +* [ignite plugin](#ignite-plugin) - Handle plugins + + +## ignite plugin remove + +Removes a plugin declaration from a chain's plugin configuration + +``` +ignite plugin remove [path] [flags] +``` + +**Options** + +``` + -g, --global use global plugins configuration ($HOME/.ignite/plugins/plugins.yml) + -h, --help help for remove +``` + +**SEE ALSO** + +* [ignite plugin](#ignite-plugin) - Handle plugins + + +## ignite plugin scaffold + +Scaffold a new plugin + +**Synopsis** + +Scaffolds a new plugin in the current directory with the given repository path configured. A git repository will be created with the given module name, unless the current directory is already a git repository. + +``` +ignite plugin scaffold [github.com/org/repo] [flags] +``` + +**Options** + +``` + -h, --help help for scaffold +``` + +**SEE ALSO** + +* [ignite plugin](#ignite-plugin) - Handle plugins + + +## ignite plugin update + +Update plugins + +**Synopsis** + +Updates a plugin specified by path. If no path is specified all declared plugins are updated + +``` +ignite plugin update [path] [flags] +``` + +**Options** + +``` + -h, --help help for update +``` + +**SEE ALSO** + +* [ignite plugin](#ignite-plugin) - Handle plugins + + +## ignite relayer + +Connect blockchains with an IBC relayer + +**Options** + +``` + -h, --help help for relayer +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain +* [ignite relayer configure](#ignite-relayer-configure) - Configure source and target chains for relaying +* [ignite relayer connect](#ignite-relayer-connect) - Link chains associated with paths and start relaying tx packets in between + + +## ignite relayer configure + +Configure source and target chains for relaying + +``` +ignite relayer configure [flags] +``` + +**Options** + +``` + -a, --advanced advanced configuration options for custom IBC modules + -h, --help help for configure + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") + --ordered set the channel as ordered + -r, --reset reset the relayer config + --source-account string source Account + --source-client-id string use a custom client id for source + --source-faucet string faucet address of the source chain + --source-gaslimit int gas limit used for transactions on source chain + --source-gasprice string gas price used for transactions on source chain + --source-port string IBC port ID on the source chain + --source-prefix string address prefix of the source chain + --source-rpc string RPC address of the source chain + --source-version string module version on the source chain + --target-account string target Account + --target-client-id string use a custom client id for target + --target-faucet string faucet address of the target chain + --target-gaslimit int gas limit used for transactions on target chain + --target-gasprice string gas price used for transactions on target chain + --target-port string IBC port ID on the target chain + --target-prefix string address prefix of the target chain + --target-rpc string RPC address of the target chain + --target-version string module version on the target chain +``` + +**SEE ALSO** + +* [ignite relayer](#ignite-relayer) - Connect blockchains with an IBC relayer + + +## ignite relayer connect + +Link chains associated with paths and start relaying tx packets in between + +``` +ignite relayer connect [<path>,...] [flags] +``` + +**Options** + +``` + -h, --help help for connect + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**SEE ALSO** + +* [ignite relayer](#ignite-relayer) - Connect blockchains with an IBC relayer + + +## ignite scaffold + +Create a new blockchain, module, message, query, and more + +**Synopsis** + +Scaffolding is a quick way to generate code for major pieces of your +application. + +For details on each scaffolding target (chain, module, message, etc.) run the +corresponding command with a "--help" flag, for example, "ignite scaffold chain +--help". + +The Ignite team strongly recommends committing the code to a version control +system before running scaffolding commands. This will make it easier to see the +changes to the source code as well as undo the command if you've decided to roll +back the changes. + +This blockchain you create with the chain scaffolding command uses the modular +Cosmos SDK framework and imports many standard modules for functionality like +proof of stake, token transfer, inter-blockchain connectivity, governance, and +more. Custom functionality is implemented in modules located by convention in +the "x/" directory. By default, your blockchain comes with an empty custom +module. Use the module scaffolding command to create an additional module. + +An empty custom module doesn't do much, it's basically a container for logic +that is responsible for processing transactions and changing the application +state. Cosmos SDK blockchains work by processing user-submitted signed +transactions, which contain one or more messages. A message contains data that +describes a state transition. A module can be responsible for handling any +number of messages. + +A message scaffolding command will generate the code for handling a new type of +Cosmos SDK message. Message fields describe the state transition that the +message is intended to produce if processed without errors. + +Scaffolding messages is useful to create individual "actions" that your module +can perform. Sometimes, however, you want your blockchain to have the +functionality to create, read, update and delete (CRUD) instances of a +particular type. Depending on how you want to store the data there are three +commands that scaffold CRUD functionality for a type: list, map, and single. +These commands create four messages (one for each CRUD action), and the logic to +add, delete, and fetch the data from the store. If you want to scaffold only the +logic, for example, you've decided to scaffold messages separately, you can do +that as well with the "--no-message" flag. + +Reading data from a blockchain happens with a help of queries. Similar to how +you can scaffold messages to write data, you can scaffold queries to read the +data back from your blockchain application. + +You can also scaffold a type, which just produces a new protocol buffer file +with a proto message description. Note that proto messages produce (and +correspond with) Go types whereas Cosmos SDK messages correspond to proto "rpc" +in the "Msg" service. + +If you're building an application with custom IBC logic, you might need to +scaffold IBC packets. An IBC packet represents the data sent from one blockchain +to another. You can only scaffold IBC packets in IBC-enabled modules scaffolded +with an "--ibc" flag. Note that the default module is not IBC-enabled. + + +**Options** + +``` + -h, --help help for scaffold +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain +* [ignite scaffold chain](#ignite-scaffold-chain) - New Cosmos SDK blockchain +* [ignite scaffold list](#ignite-scaffold-list) - CRUD for data stored as an array +* [ignite scaffold map](#ignite-scaffold-map) - CRUD for data stored as key-value pairs +* [ignite scaffold message](#ignite-scaffold-message) - Message to perform state transition on the blockchain +* [ignite scaffold module](#ignite-scaffold-module) - Custom Cosmos SDK module +* [ignite scaffold packet](#ignite-scaffold-packet) - Message for sending an IBC packet +* [ignite scaffold query](#ignite-scaffold-query) - Query for fetching data from a blockchain +* [ignite scaffold react](#ignite-scaffold-react) - React web app template +* [ignite scaffold single](#ignite-scaffold-single) - CRUD for data stored in a single location +* [ignite scaffold type](#ignite-scaffold-type) - Type definition +* [ignite scaffold vue](#ignite-scaffold-vue) - Vue 3 web app template + + +## ignite scaffold chain + +New Cosmos SDK blockchain + +**Synopsis** + +Create a new application-specific Cosmos SDK blockchain. + +For example, the following command will create a blockchain called "hello" in +the "hello/" directory: + + ignite scaffold chain hello + +A project name can be a simple name or a URL. The name will be used as the Go +module path for the project. Examples of project names: + + ignite scaffold chain foo + ignite scaffold chain foo/bar + ignite scaffold chain example.org/foo + ignite scaffold chain github.com/username/foo + +A new directory with source code files will be created in the current directory. +To use a different path use the "--path" flag. + +Most of the logic of your blockchain is written in custom modules. Each module +effectively encapsulates an independent piece of functionality. Following the +Cosmos SDK convention, custom modules are stored inside the "x/" directory. By +default, Ignite creates a module with a name that matches the name of the +project. To create a blockchain without a default module use the "--no-module" +flag. Additional modules can be added after a project is created with "ignite +scaffold module" command. + +Account addresses on Cosmos SDK-based blockchains have string prefixes. For +example, the Cosmos Hub blockchain uses the default "cosmos" prefix, so that +addresses look like this: "cosmos12fjzdtqfrrve7zyg9sv8j25azw2ua6tvu07ypf". To +use a custom address prefix use the "--address-prefix" flag. For example: + + ignite scaffold chain foo --address-prefix bar + +By default when compiling a blockchain's source code Ignite creates a cache to +speed up the build process. To clear the cache when building a blockchain use +the "--clear-cache" flag. It is very unlikely you will ever need to use this +flag. + +The blockchain is using the Cosmos SDK modular blockchain framework. Learn more +about Cosmos SDK on https://docs.cosmos.network + + +``` +ignite scaffold chain [name] [flags] +``` + +**Options** + +``` + --address-prefix string account address prefix (default "cosmos") + --clear-cache clear the build cache (advanced) + -h, --help help for chain + --no-module create a project without a default module + -p, --path string create a project in a specific path + --skip-git skip Git repository initialization +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold list + +CRUD for data stored as an array + +**Synopsis** + +The "list" scaffolding command is used to generate files that implement the +logic for storing and interacting with data stored as a list in the blockchain +state. + +The command accepts a NAME argument that will be used as the name of a new type +of data. It also accepts a list of FIELDs that describe the type. + +The interaction with the data follows the create, read, updated, and delete +(CRUD) pattern. For each type three Cosmos SDK messages are defined for writing +data to the blockchain: MsgCreate{Name}, MsgUpdate{Name}, MsgDelete{Name}. For +reading data two queries are defined: {Name} and {Name}All. The type, messages, +and queries are defined in the "proto/" directory as protocol buffer messages. +Messages and queries are mounted in the "Msg" and "Query" services respectively. + +When messages are handled, the appropriate keeper methods are called. By +convention, the methods are defined in +"x/{moduleName}/keeper/msg_server_{name}.go". Helpful methods for getting, +setting, removing, and appending are defined in the same "keeper" package in +"{name}.go". + +The "list" command essentially allows you to define a new type of data and +provides the logic to create, read, update, and delete instances of the type. +For example, let's review a command that generates the code to handle a list of +posts and each post has "title" and "body" fields: + + ignite scaffold list post title body + +This provides you with a "Post" type, MsgCreatePost, MsgUpdatePost, +MsgDeletePost and two queries: Post and PostAll. The compiled CLI, let's say the +binary is "blogd" and the module is "blog", has commands to query the chain (see +"blogd q blog") and broadcast transactions with the messages above (see "blogd +tx blog"). + +The code generated with the list command is meant to be edited and tailored to +your application needs. Consider the code to be a "skeleton" for the actual +business logic you will implement next. + +By default, all fields are assumed to be strings. If you want a field of a +different type, you can specify it after a colon ":". The following types are +supported: string, bool, int, uint, coin, array.string, array.int, array.uint, +array.coin. An example of using field types: + + ignite scaffold list pool amount:coin tags:array.string height:int + +Supported types: + +| Type | Alias | Index | Code Type | Description | +|--------------|---------|-------|-----------|---------------------------------| +| string | - | yes | string | Text type | +| array.string | strings | no | []string | List of text type | +| bool | - | yes | bool | Boolean type | +| int | - | yes | int32 | Integer type | +| array.int | ints | no | []int32 | List of integers types | +| uint | - | yes | uint64 | Unsigned integer type | +| array.uint | uints | no | []uint64 | List of unsigned integers types | +| coin | - | no | sdk.Coin | Cosmos SDK coin type | +| array.coin | coins | no | sdk.Coins | List of Cosmos SDK coin types | + +"Index" indicates whether the type can be used as an index in +"ignite scaffold map". + +Ignite also supports custom types: + + ignite scaffold list product-details name desc + ignite scaffold list product price:coin details:ProductDetails + +In the example above the "ProductDetails" type was defined first, and then used +as a custom type for the "details" field. Ignite doesn't support arrays of +custom types yet. + +Your chain will accept custom types in JSON-notation: + + exampled tx example create-product 100coin '{"name": "x", "desc": "y"}' --from alice + +By default the code will be scaffolded in the module that matches your project's +name. If you have several modules in your project, you might want to specify a +different module: + + ignite scaffold list post title body --module blog + +By default, each message comes with a "creator" field that represents the +address of the transaction signer. You can customize the name of this field with +a flag: + + ignite scaffold list post title body --signer author + +It's possible to scaffold just the getter/setter logic without the CRUD +messages. This is useful when you want the methods to handle a type, but would +like to scaffold messages manually. Use a flag to skip message scaffolding: + + ignite scaffold list post title body --no-message + +The "creator" field is not generated if a list is scaffolded with the +"--no-message" flag. + + +``` +ignite scaffold list NAME [field]... [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -h, --help help for list + --module string specify which module to generate code in + --no-message skip generating message handling logic + --no-simulation skip simulation logic + -p, --path string path of the app (default ".") + --signer string label for the message signer (default: creator) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold map + +CRUD for data stored as key-value pairs + +**Synopsis** + +The "map" scaffolding command is used to generate files that implement the logic +for storing and interacting with data stored as key-value pairs (or a +dictionary) in the blockchain state. + +The "map" command is very similar to "ignite scaffold list" with the main +difference in how values are indexed. With "list" values are indexed by an +incrementing integer, whereas "list" values are indexed by a user-provided value +(or multiple values). + +Let's use the same blog post example: + + ignite scaffold map post title body + +This command scaffolds a "Post" type and CRUD functionality to create, read, +updated, and delete posts. However, when creating a new post with your chain's +binary (or by submitting a transaction through the chain's API) you will be +required to provide an "index": + + blogd tx blog create-post [index] [title] [body] + blogd tx blog create-post hello "My first post" "This is the body" + +This command will create a post and store it in the blockchain's state under the +"hello" index. You will be able to fetch back the value of the post by querying +for the "hello" key. + + blogd q blog show-post hello + +To customize the index, use the "--index" flag. Multiple indices can be +provided, which simplifies querying values. For example: + + ignite scaffold map product price desc --index category,guid + +With this command, you would get a "Product" value indexed by both a category +and a GUID (globally unique ID). This will let you programmatically fetch +product values that have the same category but are using different GUIDs. + +Since the behavior of "list" and "map" scaffolding is very similar, you can use +the "--no-message", "--module", "--signer" flags as well as the colon syntax for +custom types. + + +``` +ignite scaffold map NAME [field]... [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -h, --help help for map + --index strings fields that index the value (default [index]) + --module string specify which module to generate code in + --no-message skip generating message handling logic + --no-simulation skip simulation logic + -p, --path string path of the app (default ".") + --signer string label for the message signer (default: creator) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold message + +Message to perform state transition on the blockchain + +**Synopsis** + +Message scaffolding is useful for quickly adding functionality to your +blockchain to handle specific Cosmos SDK messages. + +Messages are objects whose end goal is to trigger state transitions on the +blockchain. A message is a container for fields of data that affect how the +blockchain's state will change. You can think of messages as "actions" that a +user can perform. + +For example, the bank module has a "Send" message for token transfers between +accounts. The send message has three fields: from address (sender), to address +(recipient), and a token amount. When this message is successfully processed, +the token amount will be deducted from the sender's account and added to the +recipient's account. + +Ignite's message scaffolding lets you create new types of messages and add them +to your chain. For example: + + ignite scaffold message add-pool amount:coins denom active:bool --module dex + +The command above will create a new message MsgAddPool with three fields: amount +(in tokens), denom (a string), and active (a boolean). The message will be added +to the "dex" module. + +By default, the message is defined as a proto message in the +"proto/{app}/{module}/tx.proto" and registered in the "Msg" service. A CLI command to +create and broadcast a transaction with MsgAddPool is created in the module's +"cli" package. Additionally, Ignite scaffolds a message constructor and the code +to satisfy the sdk.Msg interface and register the message in the module. + +Most importantly in the "keeper" package Ignite scaffolds an "AddPool" function. +Inside this function, you can implement message handling logic. + +When successfully processed a message can return data. Use the —response flag to +specify response fields and their types. For example + + ignite scaffold message create-post title body --response id:int,title + +The command above will scaffold MsgCreatePost which returns both an ID (an +integer) and a title (a string). + +Message scaffolding follows the rules as "ignite scaffold list/map/single" and +supports fields with standard and custom types. See "ignite scaffold list —help" +for details. + + +``` +ignite scaffold message [name] [field1] [field2] ... [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -d, --desc string description of the command + -h, --help help for message + --module string module to add the message into. Default: app's main module + --no-simulation disable CRUD simulation scaffolding + -p, --path string path of the app (default ".") + -r, --response strings response fields + --signer string label for the message signer (default: creator) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold module + +Custom Cosmos SDK module + +**Synopsis** + +Scaffold a new Cosmos SDK module. + +Cosmos SDK is a modular framework and each independent piece of functionality is +implemented in a separate module. By default your blockchain imports a set of +standard Cosmos SDK modules. To implement custom functionality of your +blockchain, scaffold a module and implement the logic of your application. + +This command does the following: + +* Creates a directory with module's protocol buffer files in "proto/" +* Creates a directory with module's boilerplate Go code in "x/" +* Imports the newly created module by modifying "app/app.go" +* Creates a file in "testutil/keeper/" that contains logic to create a keeper + for testing purposes + +This command will proceed with module scaffolding even if "app/app.go" doesn't +have the required default placeholders. If the placeholders are missing, you +will need to modify "app/app.go" manually to import the module. If you want the +command to fail if it can't import the module, use the "--require-registration" +flag. + +To scaffold an IBC-enabled module use the "--ibc" flag. An IBC-enabled module is +like a regular module with the addition of IBC-specific logic and placeholders +to scaffold IBC packets with "ignite scaffold packet". + +A module can depend on one or more other modules and import their keeper +methods. To scaffold a module with a dependency use the "--dep" flag + +For example, your new custom module "foo" might have functionality that requires +sending tokens between accounts. The method for sending tokens is a defined in +the "bank"'s module keeper. You can scaffold a "foo" module with the dependency +on "bank" with the following command: + + ignite scaffold module foo --dep bank + +You can then define which methods you want to import from the "bank" keeper in +"expected_keepers.go". + +You can also scaffold a module with a list of dependencies that can include both +standard and custom modules (provided they exist): + + ignite scaffold module bar --dep foo,mint,account,FeeGrant + +Note: the "--dep" flag doesn't install third-party modules into your +application, it just generates extra code that specifies which existing modules +your new custom module depends on. + +A Cosmos SDK module can have parameters (or "params"). Params are values that +can be set at the genesis of the blockchain and can be modified while the +blockchain is running. An example of a param is "Inflation rate change" of the +"mint" module. A module can be scaffolded with params using the "--params" flag +that accepts a list of param names. By default params are of type "string", but +you can specify a type for each param. For example: + + ignite scaffold module foo --params baz:uint,bar:bool + +Refer to Cosmos SDK documentation to learn more about modules, dependencies and +params. + + +``` +ignite scaffold module [name] [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + --dep strings add a dependency on another module + -h, --help help for module + --ibc add IBC functionality + --ordering string channel ordering of the IBC module [none|ordered|unordered] (default "none") + --params strings add module parameters + -p, --path string path of the app (default ".") + --require-registration fail if module can't be registered + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold packet + +Message for sending an IBC packet + +**Synopsis** + +Scaffold an IBC packet in a specific IBC-enabled Cosmos SDK module + +``` +ignite scaffold packet [packetName] [field1] [field2] ... --module [moduleName] [flags] +``` + +**Options** + +``` + --ack strings custom acknowledgment type (field1,field2,...) + --clear-cache clear the build cache (advanced) + -h, --help help for packet + --module string IBC Module to add the packet into + --no-message disable send message scaffolding + -p, --path string path of the app (default ".") + --signer string label for the message signer (default: creator) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold query + +Query for fetching data from a blockchain + +``` +ignite scaffold query [name] [request_field1] [request_field2] ... [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -d, --desc string description of the CLI to broadcast a tx with the message + -h, --help help for query + --module string module to add the query into. Default: app's main module + --paginated define if the request can be paginated + -p, --path string path of the app (default ".") + -r, --response strings response fields + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold react + +React web app template + +``` +ignite scaffold react [flags] +``` + +**Options** + +``` + -h, --help help for react + -p, --path string path to scaffold content of the React app (default "./react") + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold single + +CRUD for data stored in a single location + +``` +ignite scaffold single NAME [field]... [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -h, --help help for single + --module string specify which module to generate code in + --no-message skip generating message handling logic + --no-simulation skip simulation logic + -p, --path string path of the app (default ".") + --signer string label for the message signer (default: creator) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold type + +Type definition + +``` +ignite scaffold type NAME [field]... [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -h, --help help for type + --module string specify which module to generate code in + --no-message skip generating message handling logic + --no-simulation skip simulation logic + -p, --path string path of the app (default ".") + --signer string label for the message signer (default: creator) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold vue + +Vue 3 web app template + +``` +ignite scaffold vue [flags] +``` + +**Options** + +``` + -h, --help help for vue + -p, --path string path to scaffold content of the Vue.js app (default "./vue") + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite tools + +Tools for advanced users + +**Options** + +``` + -h, --help help for tools +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain +* [ignite tools ibc-relayer](#ignite-tools-ibc-relayer) - TypeScript implementation of an IBC relayer +* [ignite tools ibc-setup](#ignite-tools-ibc-setup) - Collection of commands to quickly setup a relayer +* [ignite tools protoc](#ignite-tools-protoc) - Execute the protoc command + + +## ignite tools ibc-relayer + +TypeScript implementation of an IBC relayer + +``` +ignite tools ibc-relayer [--] [...] [flags] +``` + +**Examples** + +``` +ignite tools ibc-relayer -- -h +``` + +**Options** + +``` + -h, --help help for ibc-relayer +``` + +**SEE ALSO** + +* [ignite tools](#ignite-tools) - Tools for advanced users + + +## ignite tools ibc-setup + +Collection of commands to quickly setup a relayer + +``` +ignite tools ibc-setup [--] [...] [flags] +``` + +**Examples** + +``` +ignite tools ibc-setup -- -h +ignite tools ibc-setup -- init --src relayer_test_1 --dest relayer_test_2 +``` + +**Options** + +``` + -h, --help help for ibc-setup +``` + +**SEE ALSO** + +* [ignite tools](#ignite-tools) - Tools for advanced users + + +## ignite tools protoc + +Execute the protoc command + +**Synopsis** + +The protoc command. You don't need to setup the global protoc include folder with -I, it's automatically handled + +``` +ignite tools protoc [--] [...] [flags] +``` + +**Examples** + +``` +ignite tools protoc -- --version +``` + +**Options** + +``` + -h, --help help for protoc +``` + +**SEE ALSO** + +* [ignite tools](#ignite-tools) - Tools for advanced users + + +## ignite version + +Print the current build information + +``` +ignite version [flags] +``` + +**Options** + +``` + -h, --help help for version +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain + diff --git a/docs/versioned_docs/version-v0.27/08-references/02-config.md b/docs/versioned_docs/version-v0.27/08-references/02-config.md new file mode 100644 index 0000000..5cc484c --- /dev/null +++ b/docs/versioned_docs/version-v0.27/08-references/02-config.md @@ -0,0 +1,263 @@ +--- +sidebar_position: 3 +description: Primary configuration file to describe the development environment for your blockchain. +title: Configuration file +--- + +# Configuration file reference + +The `config.yml` file generated in your blockchain folder uses key-value pairs +to describe the development environment for your blockchain. + +Only a default set of parameters is provided. If more nuanced configuration is +required, you can add these parameters to the `config.yml` file. + +## Accounts + +A list of user accounts created during genesis of the blockchain. + +```yml +accounts: + - name: alice + coins: ['20000token', '200000000stake'] + - name: bob + coins: ['10000token', '100000000stake'] +``` + +Ignite uses information from `accounts` when initializing the chain with `ignite +chain init` and `ignite chain start`. In the example above Ignite will add two +accounts to the `genesis.json` file of the chain. + +`name` is a local name of a key pair associated with an account. Once the chain +is initialized and started, you will be able to use `name` when signing +transactions. With the configuration above, you'd be able to sign transactions +both with Alice's and Bob's accounts like so `exampled tx bank send ... --from +alice`. + +`coins` is a list of token balances for the account. If a token denomination is +in this list, it will exist in the genesis balance and will be a valid token. +When initialized with the config file above, a chain will only have two accounts +at genesis (Alice and Bob) and two native tokens (with denominations `token` and +`stake`). + +By default, every time a chain is re-initialized, Ignite will create a new key +pair for each account. So even though the account name can remain the same +(`bob`), every chain reinitialize it will have a different mnemonic and address. + +If you want an account to have a specific address, provide the `address` field +with a valid bech32 address. The prefix (by default, `cosmos`) should match the +one expected by your chain. When an account is provided with an `address` a key +pair will not be generated, because it's impossible to derive a key from an +address. An account with a given address will be added to the genesis file (with +an associated token balance), but because there is no key pair, you will not be +able to broadcast transactions from that address. This is useful when you have +generated a key pair outside of Ignite (for example, using your chain's CLI or +in an extension wallet) and want to have a token balance associated with the +address of this key pair. + +```yml +accounts: + - name: bob + coins: ['20000token', '200000000stake'] + address: cosmos1s39200s6v4c96ml2xzuh389yxpd0guk2mzn3mz +``` + +If you want an account to be initialized from a specific mnemonic, provide the +`mnemonic` field with a valid mnemonic. A private key, a public key and an +address will be derived from a mnemonic. + +```yml +accounts: + - name: bob + coins: ['20000token', '200000000stake'] + mnemonic: cargo ramp supreme review change various throw air figure humble soft steel slam pole betray inhale already dentist enough away office apple sample glue +``` + +You cannot have both `address` and `mnemonic` defined for a single account. + +Some accounts are used as validator accounts (see `validators` section). +Validator accounts cannot have an `address` field, because Ignite needs to be +able to derive a private key (either from a random mnemonic or from a specific +one provided in the `mnemonic` field). Validator accounts should have enough +tokens of the staking denomination for self-delegation. + +By default, the `alice` account is used as a validator account, its key is +derived from a mnemonic generated randomly at genesis, the staking denomination +is `stake`, and this account has enough `stake` for self-delegation. + +If your chain is using its own +[cointype](https://github.com/satoshilabs/slips/blob/master/slip-0044.md), you +can use the `cointype` field to provide the integer value + +```yml +accounts: + - name: bob + coins: ['20000token', '200000000stake'] + cointype: 7777777 +``` + +## Validators + +Commands like `ignite chain init` and `ignite chain serve` initialize and launch +a validator node for development purposes. + +```yml +validators: + - name: alice + bonded: '100000000stake' +``` + +`name` refers to key name in the `accounts` list. + +`bonded` is the self-delegation amount of a validator. The `bonded` amount +should not be lower than `1000000` nor higher than the account's +balance in the `account` list. + +Validators store their node configuration files in the data directory. By +default, Ignite uses the name of the project as the name of the data directory, +for example, `$HOME/.example/`. To use a different path for the data directory +you can customize the `home` property. + +Configuration in the data directory is reset frequently by Ignite. To persist +some changes to configuration files you can use `app`, `config` and `client` +properties that correspond to `$HOME/.example/config/app.toml`, +`$HOME/.example/config/config.toml` and `$HOME/.example/config/client.toml`. + +```yml +validators: + - name: alice + bonded: '100000000stake' + home: "~/.mychain" + app: + pruning: "nothing" + config: + moniker: "mychain" + client: + output: "json" +``` + +To see which properties are available for `config.toml`, `app.toml` and +`client.toml`, initialize a chain with `ignite chain init` and open the file you +want to know more about. + +Currently, Ignite starts only one validator node, so the first item in the +`validators` list is used (the rest is ignored). Support for multiple validators +is in progress. + +## Build + +The `build` property lets you customize how Ignite builds your chain's binary. + +By default, Ignite builds the `main` package from `cmd/PROJECT_NAME/main.go`. If +you more than one `main` package in your project, or you have renamed the +directory, use the `main` property to provide the path to the `main` Go package: + +```yml +build: + main: cmd/hello/cmd +``` + +Ignite compiles your project into a binary and uses the project's name with a +`d` suffix as name for the binary. To customize the binary name use the `binary` +property: + +```yml +build: + binary: "helloworldd" +``` + +To customize the linker flags used in the build process: + +```yml +build: + ldflags: [ "-X main.Version=development", "-X main.Date=01/05/2022T19:54" ] +``` + +By default, custom protocol buffer (proto) files are located in the `proto` +directory. If your project keeps proto files in a different directory, you +should tell Ignite about this: + +```yml +build: + proto: + path: "myproto" +``` + +Ignite comes with required third-party proto out of the box. Ignite also looks +into `third_party/proto` and `proto_vendor` directories for extra proto files. +If your project keeps third-party proto files in a different directory, you +should tell Ignite about this: + +```yml +build: + proto: + third_party_paths: ["my_third_party/proto"] +``` + +## Faucet + +The faucet service sends tokens to addresses. + +```yml +faucet: + name: bob + coins: ["5token", "100000stake"] +``` + +`name` refers to a key name in the `accounts` list. This is a required property. + +`coins` is the amount of tokens that will be sent to a user by the faucet. This +is a required property. + +`coins_max` is a maximum amount of tokens that can be sent to a single address. +To reset the token limit use the `rate_limit_window` property (in seconds). + +The default the faucet works on port `4500`. To use a different port number use +the `port` property. + +```yml +faucet: + name: faucet + coins: [ "100token", "5foo" ] + coins_max: [ "2000token", "1000foo" ] + port: 4500 + rate_limit_window: 3600 +``` + +## Genesis + +Genesis file is the initial block in the blockchain. It is required to launch a +blockchain, because it contains important information like token balances, and +modules' state. Genesis is stored in `$DATA_DIR/config/genesis.json`. + +Since the genesis file is reinitialized frequently during development, you can +set persistent options in the `genesis` property: + +```yml +genesis: + app_state: + staking: + params: + bond_denom: "denom" +``` + +To know which properties a genesis file supports, initialize a chain and look up +the genesis file in the data directory. + +## Client code generation + +Ignite can generate client-side code for interacting with your chain with the +`ignite generate` set of commands. Use the following properties to customize the +paths where the client-side code is generated. + +```yml +client: + openapi: + path: "docs/static/openapi.yml" + typescript: + path: "ts-client" + composables: + path: "vue/src/composables" + hooks: + path: "react/src/hooks" +``` diff --git a/docs/versioned_docs/version-v0.27/08-references/_category_.json b/docs/versioned_docs/version-v0.27/08-references/_category_.json new file mode 100644 index 0000000..3bcc076 --- /dev/null +++ b/docs/versioned_docs/version-v0.27/08-references/_category_.json @@ -0,0 +1,5 @@ +{ + "label": "References", + "link": null, + "collapsed": false +} \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.27/plugins/01-using-plugins.md b/docs/versioned_docs/version-v0.27/plugins/01-using-plugins.md new file mode 100644 index 0000000..ed3b261 --- /dev/null +++ b/docs/versioned_docs/version-v0.27/plugins/01-using-plugins.md @@ -0,0 +1,43 @@ +--- +description: Using and Developing plugins +--- + +# Using Plugins + +Ignite plugins offer a way to extend the functionality of the Ignite CLI. There +are two core concepts within plugins : `Commands` and `Hooks`. Where `Commands` +extend the cli's functionality, and `Hooks` extend existing command +functionality. + +Plugins are registered in an Ignite scaffolded Blockchain project through the +`plugins.yml`, or globally through `$HOME/.ignite/plugins/plugins.yml`. + +To use a plugin within your project, execute the following command inside the +project directory: + +```sh +ignite plugin add github.com/project/cli-plugin +``` + +The plugin will be available only when running `ignite` inside the project +directory. + +To use a plugin globally on the other hand, execute the following command: + +```sh +ignite plugin add -g github.com/project/cli-plugin +``` + +The command will compile the plugin and make it immediately available to the +`ignite` command lists. + +## Listing installed plugins + +When in an ignite scaffolded blockchain you can use the command `ignite plugin +list` to list all plugins and there statuses. + +## Updating plugins + +When a plugin in a remote repository releases updates, running `ignite plugin +update <path/to/plugin>` will update a specific plugin declared in your +project's `config.yml`. diff --git a/docs/versioned_docs/version-v0.27/plugins/02-dev-plugins.md b/docs/versioned_docs/version-v0.27/plugins/02-dev-plugins.md new file mode 100644 index 0000000..819025e --- /dev/null +++ b/docs/versioned_docs/version-v0.27/plugins/02-dev-plugins.md @@ -0,0 +1,244 @@ +--- +description: Using and Developing plugins +--- + +# Developing Plugins + +It's easy to create a plugin and use it immediately in your project. First +choose a directory outside your project and run : + +```sh +$ ignite plugin scaffold my-plugin +``` + +This will create a new directory `my-plugin` that contains the plugin's code, +and will output some instructions about how to use your plugin with the +`ignite` command. Indeed, a plugin path can be a local directory, which has +several benefits: + +- you don't need to use a git repository during the development of your plugin. +- the plugin is recompiled each time you run the `ignite` binary in your +project, if the source files are older than the plugin binary. + +Thus, the plugin development workflow is as simple as : + +1. scaffold a plugin with `ignite plugin scaffold my-plugin` +2. add it to your config via `ignite plugin add -g /path/to/my-plugin` +3. update plugin code +4. run `ignite my-plugin` binary to compile and run the plugin. +5. go back to 3. + +Once your plugin is ready, you can publish it to a git repository, and the +community can use it by calling `ignite plugin add github.com/foo/my-plugin`. + +Now let's detail how to update your plugin's code. + +## The plugin interface + +The `ignite` plugin system uses `github.com/hashicorp/go-plugin` under the hood, +which implies to implement a predefined interface: + +```go title=ignite/services/plugin/interface.go +// An ignite plugin must implements the Plugin interface. +type Interface interface { + // Manifest declares the plugin's Command(s) and Hook(s). + Manifest() (Manifest, error) + + // Execute will be invoked by ignite when a plugin Command is executed. + // It is global for all commands declared in Manifest, if you have declared + // multiple commands, use cmd.Path to distinguish them. + Execute(cmd ExecutedCommand) error + + // ExecuteHookPre is invoked by ignite when a command specified by the Hook + // path is invoked. + // It is global for all hooks declared in Manifest, if you have declared + // multiple hooks, use hook.Name to distinguish them. + ExecuteHookPre(hook ExecutedHook) error + + // ExecuteHookPost is invoked by ignite when a command specified by the hook + // path is invoked. + // It is global for all hooks declared in Manifest, if you have declared + // multiple hooks, use hook.Name to distinguish them. + ExecuteHookPost(hook ExecutedHook) error + + // ExecuteHookCleanUp is invoked by ignite when a command specified by the + // hook path is invoked. Unlike ExecuteHookPost, it is invoked regardless of + // execution status of the command and hooks. + // It is global for all hooks declared in Manifest, if you have declared + // multiple hooks, use hook.Name to distinguish them. + ExecuteHookCleanUp(hook ExecutedHook) error +} +``` + +The code scaffolded already implements this interface, you just need to update +the methods' body. + + +## Defining plugin's manifest + +Here is the `Manifest` struct : + +```go title=ignite/services/plugin/interface.go +type Manifest struct { + Name string + // Commands contains the commands that will be added to the list of ignite + // commands. Each commands are independent, for nested commands use the + // inner Commands field. + Commands []Command + // Hooks contains the hooks that will be attached to the existing ignite + // commands. + Hooks []Hook + // SharedHost enables sharing a single plugin server across all running instances + // of a plugin. Useful if a plugin adds or extends long running commands + // + // Example: if a plugin defines a hook on `ignite chain serve`, a plugin server is instanciated + // when the command is run. Now if you want to interact with that instance from commands + // defined in that plugin, you need to enable `SharedHost`, or else the commands will just + // instantiate separate plugin servers. + // + // When enabled, all plugins of the same `Path` loaded from the same configuration will + // attach it's rpc client to a an existing rpc server. + // + // If a plugin instance has no other running plugin servers, it will create one and it will be the host. + SharedHost bool `yaml:"shared_host"` +} +``` + +In your plugin's code, the `Manifest` method already returns a predefined +`Manifest` struct as an example. Adapt it according to your need. + +If your plugin adds one or more new commands to `ignite`, feeds the `Commands` +field. + +If your plugin adds features to existing commands, feeds the `Hooks` field. + +Of course a plugin can declare `Commands` *and* `Hooks`. + +A plugin may also share a host process by setting `SharedHost` to `true`. +`SharedHost` is desirable if a plugin hooks into, or declares long running commands. +Commands executed from the same plugin context interact with the same plugin server. +Allowing all executing commands to share the same server instance, giving shared execution context. + +## Adding new command + +Plugin commands are custom commands added to the ignite cli by a registered +plugin. Commands can be of any path not defined already by ignite. All plugin +commands will extend of the command root `ignite`. + +For instance, let's say your plugin adds a new `oracle` command to `ignite +scaffold`, the `Manifest()` method will look like : + +```go +func (p) Manifest() (plugin.Manifest, error) { + return plugin.Manifest{ + Name: "oracle", + Commands: []plugin.Command{ + { + Use: "oracle [name]", + Short: "Scaffold an oracle module", + Long: "Long description goes here...", + // Optionnal flags is required + Flags: []plugin.Flag{ + {Name: "source", Type: plugin.FlagTypeString, Usage: "the oracle source"}, + }, + // Attach the command to `scaffold` + PlaceCommandUnder: "ignite scaffold", + }, + }, + }, nil +} +``` + +To update the plugin execution, you have to change the plugin `Execute` command, +for instance : + +```go +func (p) Execute(cmd plugin.ExecutedCommand) error { + if len(cmd.Args) == 0 { + return fmt.Errorf("oracle name missing") + } + var ( + name = cmd.Args[0] + source, _ = cmd.Flags().GetString("source") + ) + // Read chain information + c, err := getChain(cmd) + if err != nil { + return err + } + + //... +} +``` + +Then, run `ignite scaffold oracle` to execute the plugin. + +## Adding hooks + +Plugin `Hooks` allow existing ignite commands to be extended with new +functionality. Hooks are useful when you want to streamline functionality +without needing to run custom scripts after or before a command has been run. +this can streamline processes that where once error prone or forgotten all +together. + +The following are hooks defined which will run on a registered `ignite` commands + +| Name | Description | +| -------- | ----------------------------------------------------------------------------------------------------------------------------------------------------- | +| Pre | Runs before a commands main functionality is invoked in the `PreRun` scope | +| Post | Runs after a commands main functionality is invoked in the `PostRun` scope | +| Clean Up | Runs after a commands main functionality is invoked. if the command returns an error it will run before the error is returned to guarantee execution. | + +*Note*: If a hook causes an error in the pre step the command will not run +resulting in `post` and `clean up` not executing. + +The following is an example of a `hook` definition. + +```go +func (p) Manifest() (plugin.Manifest, error) { + return plugin.Manifest{ + Name: "oracle", + Hooks: []plugin.Hook{ + { + Name: "my-hook", + PlaceHookOn: "ignite chain build", + }, + }, + }, nil +} + +func (p) ExecuteHookPre(hook plugin.ExecutedHook) error { + switch hook.Name { + case "my-hook": + fmt.Println("I'm executed before ignite chain build") + default: + return fmt.Errorf("hook not defined") + } + return nil +} + +func (p) ExecuteHookPost(hook plugin.ExecutedHook) error { + switch hook.Name { + case "my-hook": + fmt.Println("I'm executed after ignite chain build (if no error)") + default: + return fmt.Errorf("hook not defined") + } + return nil +} + +func (p) ExecuteHookCleanUp(hook plugin.ExecutedHook) error { + switch hook.Name { + case "my-hook": + fmt.Println("I'm executed after ignite chain build (regardless errors)") + default: + return fmt.Errorf("hook not defined") + } + return nil +} +``` + +Above we can see a similar definition to `Command` where a hook has a `Name` and +a `PlaceHookOn`. You'll notice that the `Execute*` methods map directly to each +life cycle of the hook. All hooks defined within the plugin will invoke these +methods. diff --git a/docs/versioned_docs/version-v0.27/plugins/_category_.json b/docs/versioned_docs/version-v0.27/plugins/_category_.json new file mode 100644 index 0000000..6596f50 --- /dev/null +++ b/docs/versioned_docs/version-v0.27/plugins/_category_.json @@ -0,0 +1,5 @@ +{ + "label": "Plugins", + "position": 7, + "link": null +} diff --git a/docs/versioned_docs/version-v28/01-welcome/01-index.md b/docs/versioned_docs/version-v28/01-welcome/01-index.md new file mode 100644 index 0000000..5a366aa --- /dev/null +++ b/docs/versioned_docs/version-v28/01-welcome/01-index.md @@ -0,0 +1,60 @@ +--- +slug: /welcome +--- + +import ProjectsTable from '@site/src/components/ProjectsTable'; + +# Introduction to Ignite CLI: Your Gateway to Blockchain Innovation + +[Ignite CLI](https://github.com/ignite/cli) is a powerful tool that simplifies the journey of building, testing, and launching diverse blockchain applications. Developed on top of the [Cosmos SDK](https://docs.cosmos.network), the leading framework for blockchain technology, Ignite CLI is pivotal in streamlining the development process. It enables developers to focus on the unique aspects of their projects, from DeFi and NFTs to supply chain solutions and smart contracts. +Beyond these, Ignite has been instrumental in a wide array of blockchain applications, ranging from VPNs and gaming platforms to blogs, oracle systems, and innovative consensus mechanisms. This demonstrates its versatility in supporting a broad spectrum of blockchain-based solutions. + +## Key Features of Ignite CLI + +- **Simplified Blockchain Development:** Ignite CLI, leveraging Cosmos SDK, makes building sovereign application-specific blockchains intuitive and efficient. +- **Comprehensive Scaffolding:** Easily scaffold modules, messages, CRUD operations, IBC packets, and more, expediting the development of complex functionalities. +- **Development with Live Reloading:** Start and test your blockchain node with real-time updates, enhancing your development workflow. +- **Frontend Flexibility:** Utilize pre-built templates for Vue.js, React, Typescript or Go, catering to diverse frontend development needs. +- **Inter-Blockchain Communication (IBC):** Seamlessly connect and interact with other blockchains using an integrated IBC relayer, a key feature of the Cosmos SDK. +- **CometBFT Integration:** Built with the CometBFT consensus engine (formerly Tendermint), ensuring robust consensus mechanisms in your blockchain solutions. +- **Cross-Domain Applications:** Ignite is perfectly suited for developing a diverse array of use cases across various sectors. These include DeFi, NFTs, supply chain management, smart contracts (both EVM and WASM), and decentralized exchanges (DEXes). + +## Install Ignite CLI + +Get started with Ignite CLI by running this simple installation command: + +``` +curl https://get.ignite.com/cli! | bash +``` + +## Embracing the Cosmos Ecosystem + +Ignite CLI is your entry point into the vibrant Cosmos ecosystem, a hub of innovation where you can explore a range of applications, from wallets and explorers to smart contracts and DEXes, all powered by CometBFT and the Cosmos SDK. +This ecosystem is home to over [$50 billion worth of blockchain projects](https://cosmos.network/ecosystem/tokens/), showcasing the scalability and versatility of the technologies at play. + +## Projects using Tendermint and Cosmos SDK + +Many projects already showcase the Tendermint BFT consensus engine and the Cosmos SDK. Explore +the [Cosmos ecosystem](https://cosmos.network/ecosystem/apps) to discover a wide variety of apps, blockchains, wallets, +and explorers that are built in the Cosmos ecosystem. + +## Projects building with Ignite CLI + +<ProjectsTable data={[ + { name: "Stride Labs", logo: "img/logo/stride.svg"}, + { name: "KYVE Network", logo: "img/logo/kyve.svg"}, + { name: "Umee", logo: "img/logo/umee.svg"}, + { name: "MediBloc Core", logo: "img/logo/medibloc.svg"}, + { name: "Cudos", logo: "img/logo/cudos.svg"}, + { name: "Firma Chain", logo: "img/logo/firmachain.svg"}, + { name: "BitCanna", logo: "img/logo/bitcanna.svg"}, + { name: "Source Protocol", logo: "img/logo/source.svg"}, + { name: "Sonr", logo: "img/logo/sonr.svg"}, + { name: "Neutron", logo: "img/logo/neutron.svg"}, + { name: "OKP4 Blockchain", logo: "img/logo/okp4.svg"}, + { name: "Dymension Hub", logo: "img/logo/dymension.svg"}, + { name: "Electra Blockchain", logo: "img/logo/electra.svg"}, + { name: "OLLO Station", logo: "img/logo/ollostation.svg"}, + { name: "Mun", logo: "img/logo/mun.svg"}, + { name: "Aura Network", logo: "img/logo/aura.svg"}, +]}/> diff --git a/docs/versioned_docs/version-v28/01-welcome/02-install.md b/docs/versioned_docs/version-v28/01-welcome/02-install.md new file mode 100644 index 0000000..178200b --- /dev/null +++ b/docs/versioned_docs/version-v28/01-welcome/02-install.md @@ -0,0 +1,114 @@ +--- +sidebar_position: 1 +description: Steps to install Ignite CLI on your local computer. +--- + +# Install Ignite CLI + +You can run [Ignite CLI](https://github.com/ignite/cli) in a web-based Gitpod IDE or you can install Ignite CLI on your +local computer. + +## Prerequisites + +Be sure you have met the prerequisites before you install and use Ignite CLI. + +### Operating systems + +Ignite CLI is supported for the following operating systems: + +- GNU/Linux +- macOS +- Windows Subsystem for Linux (WSL) + +### Go + +Ignite CLI is written in the Go programming language. To use Ignite CLI on a local system: + +- Install [Go](https://golang.org/doc/install) (**version 1.23** or higher) +- Ensure the Go environment variables are [set properly](https://golang.org/doc/gopath_code#GOPATH) on your system + +## Verify your Ignite CLI version + +To verify the version of Ignite CLI you have installed, run the following command: + +```bash +ignite version +``` + +## Installing Ignite CLI + +To install the latest version of the `ignite` binary use the following command. + +```bash +curl https://get.ignite.com/cli! | bash +``` + +This command invokes `curl` to download the installation script and pipes the output to `bash` to perform the +installation. The `ignite` binary is installed in `/usr/local/bin`. + +To learn more or customize the installation process, see the [installer docs](https://github.com/ignite/installer) on +GitHub. + +### Write permission + +Ignite CLI installation requires write permission to the `/usr/local/bin/` directory. If the installation fails because +you do not have write permission to `/usr/local/bin/`, run the following command: + +```bash +curl https://get.ignite.com/cli | bash +``` + +Then run this command to move the `ignite` executable to `/usr/local/bin/`: + +```bash +sudo mv ignite /usr/local/bin/ +``` + +On some machines, a permissions error occurs: + +```bash +mv: rename ./ignite to /usr/local/bin/ignite: Permission denied +============ +Error: mv failed +``` + +In this case, use sudo before `curl` and before `bash`: + +```bash +sudo curl https://get.ignite.com/cli | sudo bash +``` + +## Upgrading your Ignite CLI installation {#upgrade} + +Before you install a new version of Ignite CLI, remove all existing Ignite CLI installations. + +To remove the current Ignite CLI installation: + +1. On your terminal window, press `Ctrl+C` to stop the chain that you started with `ignite chain serve`. +2. Remove the Ignite CLI binary with `rm $(which ignite)`. + Depending on your user permissions, run the command with or without `sudo`. +3. Repeat this step until all `ignite` installations are removed from your system. + +After all existing Ignite CLI installations are removed, follow the [Installing Ignite CLI](#installing-ignite-cli) +instructions. + +For details on version features and changes, see +the [changelog.md](https://github.com/ignite/cli/blob/main/changelog.md) +in the repo. + +## Build from source + +To experiment with the source code, you can build from source: + +```bash +git clone https://github.com/ignite/cli --depth=1 +cd cli && make install +``` + +## Summary + +- Verify the prerequisites. +- To set up a local development environment, install Ignite CLI locally on your computer. +- Install Ignite CLI by fetching the binary using cURL or by building from source. +- The latest version is installed by default. You can install previous versions of the precompiled `ignite` binary. +- Stop the chain and remove existing versions before installing a new version. diff --git a/docs/versioned_docs/version-v28/01-welcome/_category_.json b/docs/versioned_docs/version-v28/01-welcome/_category_.json new file mode 100644 index 0000000..ac625fc --- /dev/null +++ b/docs/versioned_docs/version-v28/01-welcome/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Welcome", + "link": null +} \ No newline at end of file diff --git a/docs/versioned_docs/version-v28/02-guide/00-introduction.md b/docs/versioned_docs/version-v28/02-guide/00-introduction.md new file mode 100644 index 0000000..ecdc5ff --- /dev/null +++ b/docs/versioned_docs/version-v28/02-guide/00-introduction.md @@ -0,0 +1,37 @@ +--- +sidebar_position: 0 +title: Introduction +slug: /guide +--- + +# Introduction to Ignite's Developer Tutorials + +Welcome to the Ignite Developer Tutorials, your gateway to mastering blockchain development. These comprehensive tutorials are designed for learners at all levels, from beginners to seasoned developers, offering both foundational knowledge and hands-on experience. + +## What You Will Learn + +- **Getting Started with Ignite CLI**: Install the Ignite CLI and set up your development environment. This foundational step is necessary for all the tutorials that follow. + +- **Create and Run Your First Blockchain**: Learn to create and run your own blockchain, understanding how to start and manage a node locally for development purposes. + +- **Hello World Tutorial**: Engage in the excitement of blockchain development by making your blockchain respond with "Hello, World!" This includes learning to scaffold a Cosmos SDK query and modify keeper methods. + +- **Blog Tutorial**: Step into decentralized applications (dApps) with the ability to write and read blog posts on your blockchain. This tutorial covers everything from defining new types in protocol buffer files to writing and reading data from the store. + +- **DeFi Loan Tutorial**: Dive into Decentralized Finance (DeFi) by building a blockchain for managing loans. Gain insights into CRUD logic, module method integration, and token transaction management. + +- **Token Factory Tutorial**: Master the creation and management of digital assets on your blockchain by building a token factory module, learning module development, CRUD operations without delete functionality, and native denomination integration. + +- **Inter-blockchain Communication (IBC) Basics**: Explore the interconnected world of blockchains with the IBC protocol. Learn how to scaffold an IBC-enabled module, manage IBC packets, and configure a built-in IBC relayer. + +- **Interchange Module**: Advance your IBC knowledge by building a module for decentralized token exchanges and order books. + +- **Debugging a Blockchain**: Develop essential skills in debugging to maintain efficient and effective blockchain development. + +- **Running in a Docker Container**: Learn how to use Docker to containerize your blockchain environment, ensuring consistency and portability across development stages. + +- **Chain Simulation**: Understand the importance and method of simulating blockchain environments for testing and validating functionality under various scenarios. + +Each tutorial builds upon the previous, enhancing your understanding and skills progressively. By completing these tutorials, you will gain a robust understanding of blockchain principles, the Cosmos SDK, and practical experience in developing and managing blockchain projects. + +Embark on your journey to become a proficient blockchain developer with Ignite's Developer Tutorials! diff --git a/docs/versioned_docs/version-v28/02-guide/02-getting-started.md b/docs/versioned_docs/version-v28/02-guide/02-getting-started.md new file mode 100644 index 0000000..fb2d68f --- /dev/null +++ b/docs/versioned_docs/version-v28/02-guide/02-getting-started.md @@ -0,0 +1,150 @@ +--- +sidebar_position: 2 +--- + +# Getting started + +In this tutorial, we will be using Ignite CLI to create a new blockchain. Ignite +CLI is a command line interface that allows users to quickly and easily create +blockchain networks. By using Ignite CLI, we can quickly create a new blockchain +without having to manually set up all the necessary components. + +Once we have created our blockchain with Ignite CLI, we will take a look at the +directory structure and files that were created. This will give us an +understanding of how the blockchain is organized and how the different +components of the blockchain interact with each other. + +By the end of this tutorial, you will have a basic understanding of how to use +Ignite CLI to create a new blockchain, and you will have a high-level +understanding of the directory structure and files that make up a blockchain. +This knowledge will be useful as you continue to explore the world of blockchain +development. + +## Creating a new blockchain + +To create a new blockchain project with Ignite, you will need to run the +following command: + +``` +ignite scaffold chain example +``` + +The `ignite scaffold chain` command will create a new blockchain in a new +directory `example`. + +The new blockchain is built using the Cosmos SDK framework and imports several +standard modules to provide a range of functionality. These modules include +`staking`, which enables a delegated Proof-of-Stake consensus mechanism, `bank` +for facilitating fungible token transfers between accounts, and `gov` for +on-chain governance. In addition to these modules, the blockchain also imports +other modules from the Cosmos SDK framework. + +The `example` directory contains the generated files and directories that make +up the structure of a Cosmos SDK blockchain. This directory includes files for +the chain's configuration, application logic, and tests, among others. It +provides a starting point for developers to quickly set up a new Cosmos SDK +blockchain and build their desired functionality on top of it. + +By default, Ignite creates a new empty custom module with the same name as the +blockchain being created (in this case, `example`) in the `x/` directory. This +module doesn't have any functionality by itself, but can serve as a starting +point for building out the features of your application. If you don't want to +create this module, you can use the `--no-module` flag to skip it. + +## Directory structure + +In order to understand what the Ignite CLI has generated for your project, you +can inspect the contents of the `example/` directory. + +The `app/` directory contains the files that connect the different parts of the +blockchain together. The most important file in this directory is `app.go`, +which includes the type definition of the blockchain and functions for creating +and initializing it. This file is responsible for wiring together the various +components of the blockchain and defining how they will interact with each +other. + +The `cmd/` directory contains the main package responsible for the command-line +interface (CLI) of the compiled binary. This package defines the commands that +can be run from the CLI and how they should be executed. It is an important part +of the blockchain project as it provides a way for developers and users to +interact with the blockchain and perform various tasks, such as querying the +blockchain state or sending transactions. + +The `docs/` directory is used for storing project documentation. By default, +this directory includes an OpenAPI specification file, which is a +machine-readable format for defining the API of a software project. The OpenAPI +specification can be used to automatically generate human-readable documentation +for the project, as well as provide a way for other tools and services to +interact with the API. The `docs/` directory can be used to store any additional +documentation that is relevant to the project. + +The `proto/` directory contains protocol buffer files, which are used to +describe the data structure of the blockchain. Protocol buffers are a language- +and platform-neutral mechanism for serializing structured data, and are often +used in the development of distributed systems, such as blockchain networks. The +protocol buffer files in the `proto/` directory define the data structures and +messages that are used by the blockchain, and are used to generate code for +various programming languages that can be used to interact with the blockchain. +In the context of the Cosmos SDK, protocol buffer files are used to define the +specific types of data that can be sent and received by the blockchain, as well +as the specific RPC endpoints that can be used to access the blockchain's +functionality. + +The `testutil/` directory contains helper functions that are used for testing. +These functions provide a convenient way to perform common tasks that are needed +when writing tests for the blockchain, such as creating test accounts, +generating transactions, and checking the state of the blockchain. By using the +helper functions in the `testutil/` directory, developers can write tests more +quickly and efficiently, and can ensure that their tests are comprehensive and +effective. + +The `x/` directory contains custom Cosmos SDK modules that have been added to +the blockchain. Standard Cosmos SDK modules are pre-built components that +provide common functionality for Cosmos SDK-based blockchains, such as support +for staking and governance. Custom modules, on the other hand, are modules that +have been developed specifically for the blockchain project and provide +project-specific functionality. + +The `config.yml` file is a configuration file that can be used to customize the +blockchain during development. This file includes settings that control various +aspects of the blockchain, such as the network's ID, account balances, and the +node parameters. + +The `.github` directory contains a GitHub Actions workflow that can be used to +automatically build and release a blockchain binary. GitHub Actions is a tool +that allows developers to automate their software development workflows, +including building, testing, and deploying their projects. The workflow in the +`.github` directory is used to automate the process of building the blockchain +binary and releasing it, which can save time and effort for developers. + +The `readme.md` file is a readme file that provides an overview of the +blockchain project. This file typically includes information such as the +project's name and purpose, as well as instructions on how to build and run the +blockchain. By reading the `readme.md` file, developers and users can quickly +understand the purpose and capabilities of the blockchain project and get +started using it. + +## Starting a blockchain node + +To start a blockchain node in development, you can run the following command: + +``` +ignite chain serve +``` + +The `ignite chain serve` command is used to start a blockchain node in +development mode. It first compiles and installs the binary using the +`ignite chain build` command, then initializes the blockchain's data directory +for a single validator using the `ignite chain init` command. After that, it +starts the node locally and enables automatic code reloading so that changes to +the code can be reflected in the running blockchain without having to restart +the node. This allows for faster development and testing of the blockchain. + +Congratulations! 🥳 You have successfully created a brand-new Cosmos blockchain +using the Ignite CLI. This blockchain uses the delegated proof of stake (DPoS) +consensus algorithm, and comes with a set of standard modules for token +transfers, governance, and inflation. Now that you have a basic understanding of +your Cosmos blockchain, it's time to start building custom functionality. In the +following tutorials, you will learn how to build custom modules and add new +features to your blockchain, allowing you to create a unique and powerful +decentralized application. diff --git a/docs/versioned_docs/version-v28/02-guide/03-hello-world.md b/docs/versioned_docs/version-v28/02-guide/03-hello-world.md new file mode 100644 index 0000000..f0ef4c2 --- /dev/null +++ b/docs/versioned_docs/version-v28/02-guide/03-hello-world.md @@ -0,0 +1,100 @@ +--- +description: Build your first blockchain and your first Cosmos SDK query. +title: Hello World +--- + +# "Hello world!" Blockchain Tutorial with Ignite CLI + +**Introduction** + +In this tutorial, you'll build a simple blockchain using Ignite CLI that responds to a custom query with "Hello %s!", where "%s" is a name passed in the query. +This will enhance your understanding of creating custom queries in a Cosmos SDK blockchain. + +## Setup and Scaffold + +1. **Create a New Blockchain:** + +```bash +ignite scaffold chain hello +``` + +2. **Navigate to the Blockchain Directory:** + +```bash +cd hello +``` + +## Adding a Custom Query + +- **Scaffold the Query:** + +```bash +ignite scaffold query say-hello name --response name +``` + +This command generates code for a new query, `say-hello`, which accepts a name, an input, and returns it in the response. + +- **Understanding the Scaffolded Code:** + + - `proto/hello/hello/query.proto`: Defines the request and response structure. + - `x/hello/client/cli/query_say_hello.go`: Contains the CLI commands for the query. + - `x/hello/keeper/query_say_hello.go`: Houses the logic for the query response. + + +## Customizing the Query Response + +In the Cosmos SDK, queries are requests for information from the blockchain, used to access data like the ledger's current state or transaction details. While the SDK offers several built-in query methods, developers can also craft custom queries for specific data retrieval or complex operations. + +- **Modify `query_say_hello.go`:** + +Update the `SayHello` function in `x/hello/keeper/query_say_hello.go` to return a personalized greeting query. + +```go title="x/hello/keeper/query_say_hello.go" +package keeper + +import ( + "context" + "fmt" + + "hello/x/hello/types" + + sdk "github.com/cosmos/cosmos-sdk/types" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" +) + +func (q queryServer) SayHello(ctx context.Context, req *types.QuerySayHelloRequest) (*types.QuerySayHelloResponse, error) { + if req == nil { + return nil, status.Error(codes.InvalidArgument, "invalid request") + } + + // Validation and Context unwrapping + sdkCtx := sdk.UnwrapSDKContext(ctx) + + _ = sdkCtx + // Custom Response + return &types.QuerySayHelloResponse{Name: fmt.Sprintf("Hello %s!", req.Name)}, nil +} +``` + +## Running the Blockchain + +1. **Start the Blockchain:** + +```bash +ignite chain serve +``` + +2. **Test the Query:** + +Use the command-line interface to submit a query. + +``` +hellod q hello say-hello world +``` + +Expect a response: `Hello world!` + +## Conclusion + +Congratulations! 🎉 You've successfully created a blockchain module with a custom query using Ignite CLI. Through this tutorial, you've learned how to scaffold a chain, add a custom query, and modify the logic for personalized responses. This experience illustrates the power of Ignite CLI in streamlining blockchain development and the importance of understanding the underlying code for customization. \ No newline at end of file diff --git a/docs/versioned_docs/version-v28/02-guide/04-blog.md b/docs/versioned_docs/version-v28/02-guide/04-blog.md new file mode 100644 index 0000000..95418c3 --- /dev/null +++ b/docs/versioned_docs/version-v28/02-guide/04-blog.md @@ -0,0 +1,415 @@ +--- +description: Explore the essentials of module development while creating a dynamic blogging platform on your blockchain, where users can seamlessly submit and access blog posts, gaining practical experience in decentralized application functionalities. +title: Blog tutorial +--- + +# Build a Blog on a Blockchain with Ignite CLI + +## Introduction + +This tutorial guides you through creating a blog application as a Cosmos SDK blockchain using Ignite CLI. You'll learn how to set up types, messages, queries, and write logic for creating, reading, updating, and deleting blog posts. + +## Creating the Blog Blockchain + +1. **Initialize the Blockchain:** + +```bash +ignite scaffold chain blog +cd blog +``` + +2. **Define the Post Type:** + +```bash +ignite scaffold type post title body creator id:uint +``` +This step creates a Post type with title (string), body (string), creator (string), and id (unsigned integer) fields. + +## Implementing CRUD operations + +**Creating Posts** + +1. **Scaffold Create Message** + +```bash +ignite scaffold message create-post title body --response id:uint +``` + +This message allows users to create posts with a title and body. + +2. **Append Posts to the Store:** + +Create the file `x/blog/keeper/post.go`. + +Implement `AppendPost` and the following functions in `x/blog/keeper/post.go` to add posts to the store. + +```go title="x/blog/keeper/post.go" +package keeper + +import ( + "encoding/binary" + + "cosmossdk.io/store/prefix" + "github.com/cosmos/cosmos-sdk/runtime" + sdk "github.com/cosmos/cosmos-sdk/types" + + "blog/x/blog/types" +) + +func (k Keeper) AppendPost(ctx sdk.Context, post types.Post) uint64 { + count := k.GetPostCount(ctx) + post.Id = count + storeAdapter := runtime.KVStoreAdapter(k.storeService.OpenKVStore(ctx)) + store := prefix.NewStore(storeAdapter, types.KeyPrefix(types.PostKey)) + appendedValue := k.cdc.MustMarshal(&post) + store.Set(GetPostIDBytes(post.Id), appendedValue) + k.SetPostCount(ctx, count+1) + return count +} + +func (k Keeper) GetPostCount(ctx sdk.Context) uint64 { + storeAdapter := runtime.KVStoreAdapter(k.storeService.OpenKVStore(ctx)) + store := prefix.NewStore(storeAdapter, []byte{}) + byteKey := types.KeyPrefix(types.PostCountKey) + bz := store.Get(byteKey) + if bz == nil { + return 0 + } + return binary.BigEndian.Uint64(bz) +} + +func GetPostIDBytes(id uint64) []byte { + bz := make([]byte, 8) + binary.BigEndian.PutUint64(bz, id) + return bz +} + +func (k Keeper) SetPostCount(ctx sdk.Context, count uint64) { + storeAdapter := runtime.KVStoreAdapter(k.storeService.OpenKVStore(ctx)) + store := prefix.NewStore(storeAdapter, []byte{}) + byteKey := types.KeyPrefix(types.PostCountKey) + bz := make([]byte, 8) + binary.BigEndian.PutUint64(bz, count) + store.Set(byteKey, bz) +} + +func (k Keeper) GetPost(ctx sdk.Context, id uint64) (val types.Post, found bool) { + storeAdapter := runtime.KVStoreAdapter(k.storeService.OpenKVStore(ctx)) + store := prefix.NewStore(storeAdapter, types.KeyPrefix(types.PostKey)) + b := store.Get(GetPostIDBytes(id)) + if b == nil { + return val, false + } + k.cdc.MustUnmarshal(b, &val) + return val, true +} +``` + +3. **Add Post key prefix:** + +Add the `PostKey` and `PostCountKey` functions to the `x/blog/types/keys.go` file: + +```go title="x/blog/types/keys.go" + // PostKey is used to uniquely identify posts within the system. + // It will be used as the beginning of the key for each post, followed by their unique ID + PostKey = "Post/value/" + + // This key will be used to keep track of the ID of the latest post added to the store. + PostCountKey = "Post/count/" +``` + +4. **Update Create Post:** + +Update the `x/blog/keeper/msg_server_create_post.go` file with the `CreatePost` function: + +```go title="x/blog/keeper/msg_server_create_post.go" +package keeper + +import ( + "context" + + sdk "github.com/cosmos/cosmos-sdk/types" + + "blog/x/blog/types" +) + +func (k msgServer) CreatePost(goCtx context.Context, msg *types.MsgCreatePost) (*types.MsgCreatePostResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + var post = types.Post{ + Creator: msg.Creator, + Title: msg.Title, + Body: msg.Body, + } + id := k.AppendPost( + ctx, + post, + ) + return &types.MsgCreatePostResponse{ + Id: id, + }, nil +} +``` + +**Updating Posts** + +1. **Scaffold Update Message:** + +```bash +ignite scaffold message update-post title body id:uint +``` + +This command allows for updating existing posts specified by their ID. + +2. **Update Logic** + +Implement `SetPost` in `x/blog/keeper/post.go` for updating posts in the store. + +```go title="x/blog/keeper/post.go" +func (k Keeper) SetPost(ctx sdk.Context, post types.Post) { + storeAdapter := runtime.KVStoreAdapter(k.storeService.OpenKVStore(ctx)) + store := prefix.NewStore(storeAdapter, types.KeyPrefix(types.PostKey)) + b := k.cdc.MustMarshal(&post) + store.Set(GetPostIDBytes(post.Id), b) +} +``` + +Refine the `UpdatePost` function in `x/blog/keeper/msg_server_update_post.go`. + +```go title="x/blog/keeper/msg_server_update_post.go" +package keeper + +import ( + "context" + "fmt" + + errorsmod "cosmossdk.io/errors" + sdk "github.com/cosmos/cosmos-sdk/types" + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + + "blog/x/blog/types" +) + +func (k msgServer) UpdatePost(goCtx context.Context, msg *types.MsgUpdatePost) (*types.MsgUpdatePostResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + var post = types.Post{ + Creator: msg.Creator, + Id: msg.Id, + Title: msg.Title, + Body: msg.Body, + } + val, found := k.GetPost(ctx, msg.Id) + if !found { + return nil, errorsmod.Wrap(sdkerrors.ErrKeyNotFound, fmt.Sprintf("key %d doesn't exist", msg.Id)) + } + if msg.Creator != val.Creator { + return nil, errorsmod.Wrap(sdkerrors.ErrUnauthorized, "incorrect owner") + } + k.SetPost(ctx, post) + return &types.MsgUpdatePostResponse{}, nil +} +``` + +**Deleting Posts** + +1. **Scaffold Delete Message:** + +```bash +ignite scaffold message delete-post id:uint +``` + +This command enables the deletion of posts by their ID. + +2. **Delete Logic:** + +Implement RemovePost in `x/blog/keeper/post.go` to delete posts from the store. + +```go title="x/blog/keeper/post.go" +func (k Keeper) RemovePost(ctx sdk.Context, id uint64) { + storeAdapter := runtime.KVStoreAdapter(k.storeService.OpenKVStore(ctx)) + store := prefix.NewStore(storeAdapter, types.KeyPrefix(types.PostKey)) + store.Delete(GetPostIDBytes(id)) +} +``` + +Add the according logic to `x/blog/keeper/msg_server_delete_post`. + +```go title="x/blog/keeper/msg_server_delete_post.go" +package keeper + +import ( + "context" + "fmt" + + errorsmod "cosmossdk.io/errors" + sdk "github.com/cosmos/cosmos-sdk/types" + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + + "blog/x/blog/types" +) + +func (k msgServer) DeletePost(goCtx context.Context, msg *types.MsgDeletePost) (*types.MsgDeletePostResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + val, found := k.GetPost(ctx, msg.Id) + if !found { + return nil, errorsmod.Wrap(sdkerrors.ErrKeyNotFound, fmt.Sprintf("key %d doesn't exist", msg.Id)) + } + if msg.Creator != val.Creator { + return nil, errorsmod.Wrap(sdkerrors.ErrUnauthorized, "incorrect owner") + } + k.RemovePost(ctx, msg.Id) + return &types.MsgDeletePostResponse{}, nil +} +``` + +**Reading Posts** + +1. **Scaffold Query Messages:** + +```bash title="proto/blog/blog/query.proto" +ignite scaffold query show-post id:uint --response post:Post +ignite scaffold query list-post --response post:Post --paginated +``` + +These queries allow for retrieving a single post by ID and listing all posts with pagination. + +2. **Query Implementation:** + +Implement `ShowPost` in `x/blog/keeper/query_show_post.go`. + +```go title="x/blog/keeper/query_show_post.go" +package keeper + +import ( + "context" + + sdk "github.com/cosmos/cosmos-sdk/types" + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" + + "blog/x/blog/types" +) + +func (k Keeper) ShowPost(goCtx context.Context, req *types.QueryShowPostRequest) (*types.QueryShowPostResponse, error) { + if req == nil { + return nil, status.Error(codes.InvalidArgument, "invalid request") + } + + ctx := sdk.UnwrapSDKContext(goCtx) + post, found := k.GetPost(ctx, req.Id) + if !found { + return nil, sdkerrors.ErrKeyNotFound + } + + return &types.QueryShowPostResponse{Post: &post}, nil +} +``` + +Implement `ListPost` in `x/blog/keeper/query_list_post.go`. + +```go title="x/blog/keeper/query_list_post.go" +package keeper + +import ( + "context" + + "cosmossdk.io/store/prefix" + "github.com/cosmos/cosmos-sdk/runtime" + "github.com/cosmos/cosmos-sdk/types/query" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" + + "blog/x/blog/types" +) + +func (k Keeper) ListPost(ctx context.Context, req *types.QueryListPostRequest) (*types.QueryListPostResponse, error) { + if req == nil { + return nil, status.Error(codes.InvalidArgument, "invalid request") + } + + storeAdapter := runtime.KVStoreAdapter(k.storeService.OpenKVStore(ctx)) + store := prefix.NewStore(storeAdapter, types.KeyPrefix(types.PostKey)) + + var posts []types.Post + pageRes, err := query.Paginate(store, req.Pagination, func(key []byte, value []byte) error { + var post types.Post + if err := k.cdc.Unmarshal(value, &post); err != nil { + return err + } + + posts = append(posts, post) + return nil + }) + + if err != nil { + return nil, status.Error(codes.Internal, err.Error()) + } + + return &types.QueryListPostResponse{Post: posts, Pagination: pageRes}, nil +} +``` + +3. **Proto Implementation:** + +Add a `repeated` keyword to return a list of posts in `QueryListPostResponse` and include the option +`[(gogoproto.nullable) = false]` in `QueryShowPostResponse` and `QueryListPostResponse` to generate the field without a pointer. + +```proto title="proto/blog/blog/query.proto" +message QueryShowPostResponse { + Post post = 1 [(gogoproto.nullable) = false]; +} + +message QueryListPostResponse { + // highlight-next-line + repeated Post post = 1 [(gogoproto.nullable) = false]; + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} +``` + +Build the blockchain: + +``` +ignite chain build +``` + +Start the blockchain: + +``` +ignite chain serve +``` + +**Interacting with the Blog** + +1. **Create a Post:** + +```bash +blogd tx blog create-post hello world --from alice --chain-id blog +``` + +2. **View a Post:** + +```bash +blogd q blog show-post 0 +``` + +3. **List All Posts:** + +```bash +blogd q blog list-post +``` + +4. **Update a Post:** + +```bash +blogd tx blog update-post "Hello" "Cosmos" 0 --from alice --chain-id blog +``` + +5. **Delete a Post:** + +```bash +blogd tx blog delete-post 0 --from alice --chain-id blog +``` + +## Conclusion + +Congratulations on completing the Blog tutorial! You've successfully built a functional blockchain application using Ignite and Cosmos SDK. This tutorial equipped you with the skills to generate code for key blockchain operations and implement business-specific logic in a blockchain context. Continue developing your skills and expanding your blockchain applications with the next tutorials. diff --git a/docs/versioned_docs/version-v28/02-guide/05-loan.md b/docs/versioned_docs/version-v28/02-guide/05-loan.md new file mode 100644 index 0000000..8c103a7 --- /dev/null +++ b/docs/versioned_docs/version-v28/02-guide/05-loan.md @@ -0,0 +1,510 @@ +# DeFi Loan + +## Introduction + +Decentralized finance (DeFi) is a rapidly growing sector that is transforming the way we think about financial instruments and provides an array of inventive financial products and services. These include lending, borrowing, spot trading, margin trading, and flash loans, all of which are available to anyone possessing an internet connection. + +A DeFi loan represents a financial contract where the borrower is granted a certain asset, like currency or digital tokens. +In return, the borrower agrees to pay an additional fee and repay the loan within a set period of time. +To secure a loan, the borrower provides collateral that the lender can claim in the event of default. + +## You Will Learn + +In this tutorial you will learn how to: + +- **Scaffold a DeFi Module:** Learn how to use Ignite CLI to scaffold the basic structure of a DeFi module tailored for loan services. +- **Implement Loan Transactions:** Walk through coding the logic for initiating, managing, and closing loans. +- **Create Custom Tokens:** Understand how to create and manage custom tokens within your DeFi ecosystem, vital for lending and borrowing mechanisms. +- **Integrate Interest Rate Models:** Dive into implementing interest rate models to calculate loan interests dynamically. +- **Ensure Security and Compliance:** Focus on security, ensure your DeFi module is resistant to common vulnerabilities by validating inputs. +- **Test and Debug:** Learn effective strategies for testing your DeFi module and debugging issues that arise during development. + +## Setup and Scaffold + +1. **Create a New Blockchain:** + +```bash +ignite scaffold chain loan --no-module && cd loan +``` + +Notice the `--no-module` flag, in the next step we make sure the `bank` dependency is included with scaffolding the module. + +2. **Create a Module:** + +Create a new "loan" module that is based on the standard Cosmos SDK `bank` module. + +```bash +ignite scaffold module loan --dep bank +``` + +3. **Define the loan Module:** + +The "list" scaffolding command is used to generate files that implement the logic for storing and interacting with data stored as a list in the blockchain state. + +```bash +ignite scaffold list loan amount fee collateral deadline state borrower lender --no-message +``` + +4. **Scaffold the Messages:** + +Scaffold the code for handling the messages for requesting, approving, repaying, liquidating, and cancelling loans. + +- Handling Loan Requests + +```bash +ignite scaffold message request-loan amount fee collateral deadline +``` + +- Approving and Canceling Loans + +```bash +ignite scaffold message approve-loan id:uint +``` + +```bash +ignite scaffold message cancel-loan id:uint +``` + +- Repaying and Liquidating Loans + +```bash +ignite scaffold message repay-loan id:uint +``` + +```bash +ignite scaffold message liquidate-loan id:uint +``` + +## Additional Features + +- **Extend the BankKeeper:** + +Ignite takes care of adding the `bank` keeper, but you still need to tell the loan module which bank methods you will be using. You will be using three methods: `SendCoins`, `SendCoinsFromAccountToModule`, and `SendCoinsFromModuleToAccount`. +Remove the `SpendableCoins` function from the `BankKeeper`. + +Add these to the `Bankkeeper` interface. + +```go title="x/loan/types/expected_keepers.go" +package types + +import ( + "context" + + sdk "github.com/cosmos/cosmos-sdk/types" +) + +// AccountKeeper defines the expected interface for the Account module. +type AccountKeeper interface { + GetAccount(context.Context, sdk.AccAddress) sdk.AccountI // only used for simulation + // Methods imported from account should be defined here +} + +// BankKeeper defines the expected interface for the Bank module. +type BankKeeper interface { + // SpendableCoins(ctx sdk.Context, addr sdk.AccAddress) sdk.Coins + SendCoins(ctx context.Context, fromAddr sdk.AccAddress, toAddr sdk.AccAddress, amt sdk.Coins) error + SendCoinsFromAccountToModule(ctx context.Context, senderAddr sdk.AccAddress, recipientModule string, amt sdk.Coins) error + SendCoinsFromModuleToAccount(ctx context.Context, senderModule string, recipientAddr sdk.AccAddress, amt sdk.Coins) error +} + +// ParamSubspace defines the expected Subspace interface for parameters. +type ParamSubspace interface { + Get(context.Context, []byte, interface{}) + Set(context.Context, []byte, interface{}) +} +``` + +- **Implement basic Validation to ensure proper loan requests:** + +When a loan is created, a certain message input validation is required. You want to throw error messages in case the end user tries impossible inputs. + +The `ValidateBasic` function plays a crucial role in maintaining the security and compliance of loan input parameters. By implementing comprehensive input validations, you enhance the security of your application. It's important to rigorously verify all user inputs to ensure they align with the established standards and rules of your platform. + +```go title="x/loan/types/message_request_loan.go" +import ( + "strconv" + + errorsmod "cosmossdk.io/errors" + sdk "github.com/cosmos/cosmos-sdk/types" + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" +) + +func (msg *MsgRequestLoan) ValidateBasic() error { + _, err := sdk.AccAddressFromBech32(msg.Creator) + if err != nil { + return errorsmod.Wrapf(sdkerrors.ErrInvalidAddress, "invalid creator address (%s)", err) + } + amount, _ := sdk.ParseCoinsNormalized(msg.Amount) + if !amount.IsValid() { + return errorsmod.Wrap(sdkerrors.ErrInvalidRequest, "amount is not a valid Coins object") + } + if amount.Empty() { + return errorsmod.Wrap(sdkerrors.ErrInvalidRequest, "amount is empty") + } + fee, _ := sdk.ParseCoinsNormalized(msg.Fee) + if !fee.IsValid() { + return errorsmod.Wrap(sdkerrors.ErrInvalidRequest, "fee is not a valid Coins object") + } + deadline, err := strconv.ParseInt(msg.Deadline, 10, 64) + if err != nil { + return errorsmod.Wrap(sdkerrors.ErrInvalidRequest, "deadline is not an integer") + } + if deadline <= 0 { + return errorsmod.Wrap(sdkerrors.ErrInvalidRequest, "deadline should be a positive integer") + } + collateral, _ := sdk.ParseCoinsNormalized(msg.Collateral) + if !collateral.IsValid() { + return errorsmod.Wrap(sdkerrors.ErrInvalidRequest, "collateral is not a valid Coins object") + } + if collateral.Empty() { + return errorsmod.Wrap(sdkerrors.ErrInvalidRequest, "collateral is empty") + } + return nil +} +``` + +## Using the Platform + +1. **As a Borrower:** + +Implement `RequestLoan` keeper method that will be called whenever a user requests a loan. `RequestLoan` creates a new loan; Set terms like amount, fee, collateral, and repayment deadline. The collateral from the borrower's account is sent to a module account, and adds the loan to the blockchain's store. + +Replace your scaffolded templates with the following code. + +```go title="x/loan/keeper/msg_server_request_loan.go" +package keeper + +import ( + "context" + + sdk "github.com/cosmos/cosmos-sdk/types" + + "loan/x/loan/types" +) + +func (k msgServer) RequestLoan(goCtx context.Context, msg *types.MsgRequestLoan) (*types.MsgRequestLoanResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + var loan = types.Loan{ + Amount: msg.Amount, + Fee: msg.Fee, + Collateral: msg.Collateral, + Deadline: msg.Deadline, + State: "requested", + Borrower: msg.Creator, + } + borrower, err := sdk.AccAddressFromBech32(msg.Creator) + if err != nil { + panic(err) + } + collateral, err := sdk.ParseCoinsNormalized(loan.Collateral) + if err != nil { + panic(err) + } + sdkError := k.bankKeeper.SendCoinsFromAccountToModule(ctx, borrower, types.ModuleName, collateral) + if sdkError != nil { + return nil, sdkError + } + k.AppendLoan(ctx, loan) + return &types.MsgRequestLoanResponse{}, nil +} +``` + +As a borrower, you have the option to cancel a loan you have created if you no longer want to proceed with it. However, this action is only possible if the loan's current status is marked as "requested". + +```go title="x/loan/keeper/msg_server_cancel_loan.go" +package keeper + +import ( + "context" + + errorsmod "cosmossdk.io/errors" + sdk "github.com/cosmos/cosmos-sdk/types" + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + + "loan/x/loan/types" +) + +func (k msgServer) CancelLoan(goCtx context.Context, msg *types.MsgCancelLoan) (*types.MsgCancelLoanResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + loan, found := k.GetLoan(ctx, msg.Id) + if !found { + return nil, errorsmod.Wrapf(sdkerrors.ErrKeyNotFound, "key %d doesn't exist", msg.Id) + } + if loan.Borrower != msg.Creator { + return nil, errorsmod.Wrap(sdkerrors.ErrUnauthorized, "Cannot cancel: not the borrower") + } + if loan.State != "requested" { + return nil, errorsmod.Wrapf(types.ErrWrongLoanState, "%v", loan.State) + } + borrower, _ := sdk.AccAddressFromBech32(loan.Borrower) + collateral, _ := sdk.ParseCoinsNormalized(loan.Collateral) + err := k.bankKeeper.SendCoinsFromModuleToAccount(ctx, types.ModuleName, borrower, collateral) + if err != nil { + return nil, err + } + loan.State = "cancelled" + k.SetLoan(ctx, loan) + return &types.MsgCancelLoanResponse{}, nil +} +``` + +2. **As a Lender:** + +Approve loan requests and liquidate loans if borrowers fail to repay. + +```go title="x/loan/keeper/msg_server_approve_loan.go" +package keeper + +import ( + "context" + + errorsmod "cosmossdk.io/errors" + sdk "github.com/cosmos/cosmos-sdk/types" + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + + "loan/x/loan/types" +) + +func (k msgServer) ApproveLoan(goCtx context.Context, msg *types.MsgApproveLoan) (*types.MsgApproveLoanResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + loan, found := k.GetLoan(ctx, msg.Id) + if !found { + return nil, errorsmod.Wrapf(sdkerrors.ErrKeyNotFound, "key %d doesn't exist", msg.Id) + } + if loan.State != "requested" { + return nil, errorsmod.Wrapf(types.ErrWrongLoanState, "%v", loan.State) + } + lender, _ := sdk.AccAddressFromBech32(msg.Creator) + borrower, _ := sdk.AccAddressFromBech32(loan.Borrower) + amount, err := sdk.ParseCoinsNormalized(loan.Amount) + if err != nil { + return nil, errorsmod.Wrap(types.ErrWrongLoanState, "Cannot parse coins in loan amount") + } + err = k.bankKeeper.SendCoins(ctx, lender, borrower, amount) + if err != nil { + return nil, err + } + loan.Lender = msg.Creator + loan.State = "approved" + k.SetLoan(ctx, loan) + return &types.MsgApproveLoanResponse{}, nil +} +``` + +```go title="x/loan/keeper/msg_server_repay_loan.go" +package keeper + +import ( + "context" + + errorsmod "cosmossdk.io/errors" + sdk "github.com/cosmos/cosmos-sdk/types" + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + + "loan/x/loan/types" +) + +func (k msgServer) RepayLoan(goCtx context.Context, msg *types.MsgRepayLoan) (*types.MsgRepayLoanResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + loan, found := k.GetLoan(ctx, msg.Id) + if !found { + return nil, errorsmod.Wrapf(sdkerrors.ErrKeyNotFound, "key %d doesn't exist", msg.Id) + } + if loan.State != "approved" { + return nil, errorsmod.Wrapf(types.ErrWrongLoanState, "%v", loan.State) + } + lender, _ := sdk.AccAddressFromBech32(loan.Lender) + borrower, _ := sdk.AccAddressFromBech32(loan.Borrower) + if msg.Creator != loan.Borrower { + return nil, errorsmod.Wrap(sdkerrors.ErrUnauthorized, "Cannot repay: not the borrower") + } + amount, _ := sdk.ParseCoinsNormalized(loan.Amount) + fee, _ := sdk.ParseCoinsNormalized(loan.Fee) + collateral, _ := sdk.ParseCoinsNormalized(loan.Collateral) + err := k.bankKeeper.SendCoins(ctx, borrower, lender, amount) + if err != nil { + return nil, err + } + err = k.bankKeeper.SendCoins(ctx, borrower, lender, fee) + if err != nil { + return nil, err + } + err = k.bankKeeper.SendCoinsFromModuleToAccount(ctx, types.ModuleName, borrower, collateral) + if err != nil { + return nil, err + } + loan.State = "repayed" + k.SetLoan(ctx, loan) + return &types.MsgRepayLoanResponse{}, nil +} +``` + +```go title="x/loan/keeper/msg_server_liquidate_loan.go" +package keeper + +import ( + "context" + "strconv" + + errorsmod "cosmossdk.io/errors" + sdk "github.com/cosmos/cosmos-sdk/types" + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + + "loan/x/loan/types" +) + +func (k msgServer) LiquidateLoan(goCtx context.Context, msg *types.MsgLiquidateLoan) (*types.MsgLiquidateLoanResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + loan, found := k.GetLoan(ctx, msg.Id) + if !found { + return nil, errorsmod.Wrapf(sdkerrors.ErrKeyNotFound, "key %d doesn't exist", msg.Id) + } + if loan.Lender != msg.Creator { + return nil, errorsmod.Wrap(sdkerrors.ErrUnauthorized, "Cannot liquidate: not the lender") + } + if loan.State != "approved" { + return nil, errorsmod.Wrapf(types.ErrWrongLoanState, "%v", loan.State) + } + lender, _ := sdk.AccAddressFromBech32(loan.Lender) + collateral, _ := sdk.ParseCoinsNormalized(loan.Collateral) + deadline, err := strconv.ParseInt(loan.Deadline, 10, 64) + if err != nil { + panic(err) + } + if ctx.BlockHeight() < deadline { + return nil, errorsmod.Wrap(types.ErrDeadline, "Cannot liquidate before deadline") + } + err = k.bankKeeper.SendCoinsFromModuleToAccount(ctx, types.ModuleName, lender, collateral) + if err != nil { + return nil, err + } + loan.State = "liquidated" + k.SetLoan(ctx, loan) + return &types.MsgLiquidateLoanResponse{}, nil +} +``` + +```go title="x/loan/keeper/msg_update_params.go" +package keeper + +import ( + "context" + + errorsmod "cosmossdk.io/errors" + sdk "github.com/cosmos/cosmos-sdk/types" + + "loan/x/loan/types" +) + +func (k msgServer) UpdateParams(goCtx context.Context, req *types.MsgUpdateParams) (*types.MsgUpdateParamsResponse, error) { + if k.GetAuthority() != req.Authority { + return nil, errorsmod.Wrapf(types.ErrInvalidSigner, "invalid authority; expected %s, got %s", k.GetAuthority(), req.Authority) + } + + ctx := sdk.UnwrapSDKContext(goCtx) + if err := k.SetParams(ctx, req.Params); err != nil { + return nil, err + } + + return &types.MsgUpdateParamsResponse{}, nil +} +``` + +Add the custom errors `ErrWrongLoanState` and `ErrDeadline`: + +```go title="x/loan/types/errors.go" +package types + +import ( + sdkerrors "cosmossdk.io/errors" +) + +var ( + ErrInvalidSigner = sdkerrors.Register(ModuleName, 1100, "expected gov account as only signer for proposal message") + ErrWrongLoanState = sdkerrors.Register(ModuleName, 2, "wrong loan state") + ErrDeadline = sdkerrors.Register(ModuleName, 3, "deadline") +) +``` + +## Testing the Application + +- **Add Test Tokens:** + +Configure config.yml to add tokens (e.g., 10000foocoin) to test accounts. + +```bash title="config.yml" +version: 1 +accounts: + - name: alice + coins: + - 20000token + - 10000foocoin + - 200000000stake + - name: bob + coins: + - 10000token + - 100000000stake +client: + openapi: + path: docs/static/openapi.yml +faucet: + name: bob + coins: + - 5token + - 100000stake +validators: + - name: alice + bonded: 100000000stake +``` + +- **Start the Blockchain:** + +```bash +ignite chain serve +``` + +If everything works successful, you should see the `Blockchain is running` message in the Terminal. + +- **Request a loan:** + +In a new terminal window, request a loan of `1000token` with `100token` as a fee and `1000foocoin` as a collateral from Alice's account. The deadline is set to `500` blocks: + +```bash +loand tx loan request-loan 1000token 100token 1000foocoin 500 --from alice --chain-id loan +``` + +- **Approve the loan:** + +```bash +loand tx loan approve-loan 0 --from bob --chain-id loan +``` + +- **Repay a loan:** + +```bash +loand tx loan repay-loan 0 --from alice --chain-id loan +``` + +- **Liquidate a loan:** + +```bash +loand tx loan request-loan 1000token 100token 1000foocoin 20 --from alice --chain-id loan -y +loand tx loan approve-loan 1 --from bob --chain-id loan -y +loand tx loan liquidate-loan 1 --from bob --chain-id loan -y +``` + +At any state in the process, use `q list loan` to see the active state of all loans. + +```bash +loand q loan list-loan +``` + +Query the blockchain for balances to confirm they have changed according to your transactions. + +```bash +loand q bank balances $(loand keys show alice -a) +``` + +## Conclusion + +This tutorial outlines the process of setting up a decentralized platform for digital asset loans using blockchain technology. By following these steps, you can create a DeFi platform that allows users to engage in secure and transparent lending and borrowing activities. diff --git a/docs/versioned_docs/version-v28/02-guide/06-ibc.md b/docs/versioned_docs/version-v28/02-guide/06-ibc.md new file mode 100644 index 0000000..f29bf3e --- /dev/null +++ b/docs/versioned_docs/version-v28/02-guide/06-ibc.md @@ -0,0 +1,673 @@ +--- +sidebar_position: 7 +description: Build an understanding of how to create and send packets across blockchains and navigate between blockchains. +title: "Inter-Blockchain Communication: Basics" +--- + +# Inter-Blockchain Communication: Basics + +The Inter-Blockchain Communication protocol (IBC) is an important part of the +Cosmos SDK ecosystem. The Hello World tutorial is a time-honored tradition in +computer programming. This tutorial builds an understanding of how to create and +send packets across blockchain. This foundational knowledge helps you navigate +between blockchains with the Cosmos SDK. + +**You will learn how to** + +- Use IBC to create and send packets between blockchains. +- Navigate between blockchains using the Cosmos SDK and the Ignite CLI Relayer. +- Create a basic blog post and save the post on another blockchain. + +## What is IBC? + +The Inter-Blockchain Communication protocol (IBC) allows blockchains to talk to +each other. IBC handles transport across different sovereign blockchains. This +end-to-end, connection-oriented, stateful protocol provides reliable, ordered, +and authenticated communication between heterogeneous blockchains. + +The [IBC protocol in the Cosmos +SDK](https://ibc.cosmos.network/main/ibc/overview) is the standard for the +interaction between two blockchains. The IBCmodule interface defines how packets +and messages are constructed to be interpreted by the sending and the receiving +blockchain. + +The IBC relayer lets you connect between sets of IBC-enabled chains. This +tutorial teaches you how to create two blockchains and then start and use the +relayer with Ignite CLI to connect two blockchains. + +This tutorial covers essentials like modules, IBC packets, relayer, and the +lifecycle of packets routed through IBC. + +## Create a blockchain + +Create a blockchain app with a blog module to write posts on other blockchains +that contain the Hello World message. For this tutorial, you can write posts for +the Cosmos SDK universe that contain Hello Mars, Hello Cosmos, and Hello Earth +messages. + +For this simple example, create an app that contains a blog module that has a +post transaction with title and text. + +After you define the logic, run two blockchains that have this module installed. + +- The chains can send posts between each other using IBC. + +- On the sending chain, save the `acknowledged` and `timed out` posts. + +After the transaction is acknowledged by the receiving chain, you know that the +post is saved on both blockchains. + +- The sending chain has the additional data `postID`. + +- Sent posts that are acknowledged and timed out contain the title and the + target chain of the post. These identifiers +- are visible on the parameter `chain`. The following chart shows the lifecycle + of a packet that travels through IBC. + +![The Lifecycle of an IBC packet](./images/packet_sendpost.png) + +## Build your blockchain app + +Use Ignite CLI to scaffold the blockchain app and the blog module. + +### Build a new blockchain + +To scaffold a new blockchain named `planet`: + +```bash +ignite scaffold chain planet --no-module +cd planet +``` + +A new directory named `planet` is created in your home directory. The `planet` +directory contains a working blockchain app. + +### Scaffold the blog module inside your blockchain + +Next, use Ignite CLI to scaffold a blog module with IBC capabilities. The blog +module contains the logic for creating blog posts and routing them through IBC +to the second blockchain. + +To scaffold a module named `blog`: + +```bash +ignite scaffold module blog --ibc +``` + +A new directory with the code for an IBC module is created in `planet/x/blog`. +Modules scaffolded with the `--ibc` flag include all the logic for the +scaffolded IBC module. + +### Generate CRUD actions for types + +Next, create the CRUD actions for the blog module types. + +Use the `ignite scaffold list` command to scaffold the boilerplate code for the +create, read, update, and delete (CRUD) actions. + +These `ignite scaffold list` commands create CRUD code for the following +transactions: + +- Creating blog posts + +```bash +ignite scaffold list post title content creator --no-message --module blog +``` + +- Processing acknowledgments for sent posts + +```bash +ignite scaffold list sentPost postID:uint title chain creator --no-message --module blog +``` + +- Managing post timeouts + +```bash +ignite scaffold list timeoutPost title chain creator --no-message --module blog +``` + +The scaffolded code includes proto files for defining data structures, messages, +messages handlers, keepers for modifying the state, and CLI commands. + +### Ignite CLI Scaffold List Command Overview + +``` +ignite scaffold list [typeName] [field1] [field2] ... [flags] +``` + +The first argument of the `ignite scaffold list [typeName]` command specifies +the name of the type being created. For the blog app, you created `post`, +`sentPost`, and `timeoutPost` types. + +The next arguments define the fields that are associated with the type. For the +blog app, you created `title`, `content`, `postID`, and `chain` fields. + +The `--module` flag defines which module the new transaction type is added to. +This optional flag lets you manage multiple modules within your Ignite CLI app. +When the flag is not present, the type is scaffolded in the module that matches +the name of the repo. + +When a new type is scaffolded, the default behavior is to scaffold messages that +can be sent by users for CRUD operations. The `--no-message` flag disables this +feature. Disable the messages option for the app since you want the posts to be +created upon reception of IBC packets and not directly created from a user's +messages. + +### Scaffold a sendable and interpretable IBC packet + +You must generate code for a packet that contains the title and the content of +the blog post. + +The `ignite packet` command creates the logic for an IBC packet that can be sent +to another blockchain. + +- The `title` and `content` are stored on the target chain. + +- The `postID` is acknowledged on the sending chain. + +To scaffold a sendable and interpretable IBC packet: + +```bash +ignite scaffold packet ibcPost title content --ack postID:uint --module blog +``` + +Notice the fields in the `ibcPost` packet match the fields in the `post` type +that you created earlier. + +- The `--ack` flag defines which identifier is returned to the sending + blockchain. + +- The `--module` flag specifies to create the packet in a particular IBC module. + +The `ignite packet` command also scaffolds the CLI command that is capable of +sending an IBC packet: + +```bash +planetd tx blog send-ibcPost [portID] [channelID] [title] [content] +``` + +## Modify the source code + +After you create the types and transactions, you must manually insert the logic +to manage updates in the database. Modify the source code to save the data as +specified earlier in this tutorial. + +### Add creator to the blog post packet + +Start with the proto file that defines the structure of the IBC packet. + +To identify the creator of the post in the receiving blockchain, add the +`creator` field inside the packet. This field was not specified directly in the +command because it would automatically become a parameter in the `SendIbcPost` +CLI command. + +```protobuf title="proto/planet/blog/packet.proto" +message IbcPostPacketData { + string title = 1; + string content = 2; + // highlight-next-line + string creator = 3; +} +``` + +To make sure the receiving chain has content on the creator of a blog post, add +the `msg.Creator` value to the IBC `packet`. + +- The content of the `sender` of the message is automatically included in + `SendIbcPost` message. +- The sender is verified as the signer of the message, so you can add the + `msg.Sender` as the creator to the new packet +- before it is sent over IBC. + +```go title="x/blog/keeper/msg_server_ibc_post.go" +package keeper + +func (k msgServer) SendIbcPost(goCtx context.Context, msg *types.MsgSendIbcPost) (*types.MsgSendIbcPostResponse, error) { + // validate incoming message + if _, err := k.addressCodec.StringToBytes(msg.Creator); err != nil { + return nil, errorsmod.Wrap(sdkerrors.ErrInvalidAddress, fmt.Sprintf("invalid address: %s", err)) + } + + if msg.Port == "" { + return nil, errorsmod.Wrap(sdkerrors.ErrInvalidRequest, "invalid packet port") + } + + if msg.ChannelID == "" { + return nil, errorsmod.Wrap(sdkerrors.ErrInvalidRequest, "invalid packet channel") + } + + if msg.TimeoutTimestamp == 0 { + return nil, errorsmod.Wrap(sdkerrors.ErrInvalidRequest, "invalid packet timeout") + } + + // TODO: logic before transmitting the packet + + // Construct the packet + var packet types.IbcPostPacketData + + packet.Title = msg.Title + packet.Content = msg.Content + // highlight-next-line + packet.Creator = msg.Creator + + // Transmit the packet + ctx := sdk.UnwrapSDKContext(goCtx) + _, err := k.TransmitIbcPostPacket( + ctx, + packet, + msg.Port, + msg.ChannelID, + clienttypes.ZeroHeight(), + msg.TimeoutTimestamp, + ) + if err != nil { + return nil, err + } + + return &types.MsgSendIbcPostResponse{}, nil +} +``` + +### Receive the post + +The methods for primary transaction logic are in the `x/blog/keeper/ibc_post.go` +file. Use these methods to manage IBC packets: + +- `TransmitIbcPostPacket` is called manually to send the packet over IBC. This + method also defines the logic before the packet is sent over IBC to another + blockchain app. +- `OnRecvIbcPostPacket` hook is automatically called when a packet is received + on the chain. This method defines the packet reception logic. +- `OnAcknowledgementIbcPostPacket` hook is called when a sent packet is + acknowledged on the source chain. This method defines the logic when the + packet has been received. +- `OnTimeoutIbcPostPacket` hook is called when a sent packet times out. This + method defines the logic when the packet is not received on the target chain + +You must modify the source code to add the logic inside those functions so that +the data tables are modified accordingly. + +On reception of the post message, create a new post with the title and the +content on the receiving chain. + +To identify the blockchain app that a message is originating from and who +created the message, use an identifier in the following format: + +`<portID>-<channelID>-<creatorAddress>` + +Finally, the Ignite CLI-generated AppendPost function returns the ID of the new +appended post. You can return this value to the source chain through +acknowledgment. + +Append the type instance as `PostId` on receiving the packet: + +- The context `ctx` is an [immutable data + structure](https://docs.cosmos.network/main/core/context#go-context-package) + that has header data from the transaction. See [how the context is + initiated](https://github.com/cosmos/cosmos-sdk/blob/main/types/context.go#L71) +- The identifier format that you defined earlier +- The `title` is the Title of the blog post +- The `content` is the Content of the blog post + +Then modify the `OnRecvIbcPostPacket` keeper function with the following code: + +```go title="x/blog/keeper/ibc_post.go" +package keeper + +func (k Keeper) OnRecvIbcPostPacket(ctx sdk.Context, packet channeltypes.Packet, data types.IbcPostPacketData) (packetAck types.IbcPostPacketAck, err error) { + packetAck.PostId, err = k.PostSeq.Next(ctx) + if err != nil { + return packetAck, err + } + return packetAck, k.Post.Set(ctx, packetAck.PostId, types.Post{Title: data.Title, Content: data.Content}) +} +``` + +### Receive the post acknowledgement + +On the sending blockchain, store a `sentPost` so you know that the post has been +received on the target chain. + +Store the title and the target to identify the post. + +When a packet is scaffolded, the default type for the received acknowledgment +data is a type that identifies if the packet treatment has failed. The +`Acknowledgement_Error` type is set if `OnRecvIbcPostPacket` returns an error +from the packet. + +```go title="x/blog/keeper/ibc_post.go" +package keeper + +func (k Keeper) OnAcknowledgementIbcPostPacket(ctx sdk.Context, packet channeltypes.Packet, data types.IbcPostPacketData, ack channeltypes.Acknowledgement) error { + switch dispatchedAck := ack.Response.(type) { + case *channeltypes.Acknowledgement_Error: + // We will not treat acknowledgment error in this tutorial + return nil + case *channeltypes.Acknowledgement_Result: + // Decode the packet acknowledgment + var packetAck types.IbcPostPacketAck + if err := types.ModuleCdc.UnmarshalJSON(dispatchedAck.Result, &packetAck); err != nil { + // The counter-party module doesn't implement the correct acknowledgment format + return errors.New("cannot unmarshal acknowledgment") + } + + seq, err := k.SentPostSeq.Next(ctx) + if err != nil { + return err + } + + return k.SentPost.Set(ctx, seq, + types.SentPost{ + PostId: packetAck.PostId, + Title: data.Title, + Chain: packet.DestinationPort + "-" + packet.DestinationChannel, + }, + ) + default: + return errors.New("the counter-party module does not implement the correct acknowledgment format") + } +} +``` + +### Store information about the timed-out packet + +Store posts that have not been received by target chains in `timeoutPost` +posts. This logic follows the same format as `sentPost`. + +```go title="x/blog/keeper/ibc_post.go" +func (k Keeper) OnTimeoutIbcPostPacket(ctx sdk.Context, packet channeltypes.Packet, data types.IbcPostPacketData) error { + seq, err := k.TimeoutPostSeq.Next(ctx) + if err != nil { + return err + } + + return k.TimeoutPost.Set(ctx, seq, + types.TimeoutPost{ + Title: data.Title, + Chain: packet.DestinationPort + "-" + packet.DestinationChannel, + }, + ) +} +``` + +This last step completes the basic `blog` module setup. The blockchain is now +ready! + +## Use the IBC modules + +You can now spin up the blockchain and send a blog post from one blockchain app +to the other. Multiple terminal windows are required to complete these next +steps. + +### Test the IBC modules + +To test the IBC module, start two blockchain networks on the same machine. Both +blockchains use the same source code. Each blockchain has a unique chain ID. + +One blockchain is named `earth` and the other blockchain is named `mars`. + +The `earth.yml` and `mars.yml` files are required in the project directory: + +```yaml title="earth.yml" +version: 1 +build: + proto: + path: proto + third_party_paths: + - third_party/proto + - proto_vendor +accounts: +- name: alice + coins: + - 1000token + - 100000000stake +- name: bob + coins: + - 500token + - 100000000stake +faucet: + name: bob + coins: + - 5token + - 100000stake + host: 0.0.0.0:4500 +genesis: + chain_id: earth +validators: +- name: alice + bonded: 100000000stake + home: $HOME/.earth +``` + +```yaml title="mars.yml" +version: 1 +build: + proto: + path: proto + third_party_paths: + - third_party/proto + - proto_vendor +accounts: +- name: alice + coins: + - 1000token + - 1000000000stake +- name: bob + coins: + - 500token + - 100000000stake +faucet: + name: bob + coins: + - 5token + - 100000stake + host: :4501 +genesis: + chain_id: mars +validators: +- name: alice + bonded: 100000000stake + app: + api: + address: :1318 + grpc: + address: :9092 + grpc-web: + address: :9093 + config: + p2p: + laddr: :26658 + rpc: + laddr: :26659 + pprof_laddr: :6061 + home: $HOME/.mars +``` + +Open a terminal window and run the following command to start the `earth` +blockchain: + +```bash +ignite chain serve -c earth.yml +``` + +Open a different terminal window and run the following command to start the +`mars` blockchain: + +```bash +ignite chain serve -c mars.yml +``` + +### Remove Existing Relayer and Ignite CLI Configurations + +If you previously used the relayer, follow these steps to remove exiting relayer +and Ignite CLI configurations: + +- Stop your blockchains and delete previous configuration files: + + ```bash + rm -rf ~/.ignite/relayer + ``` + +If existing relayer configurations do not exist, the command returns `no matches +found` and no action is taken. + +### Configure and start the relayer + +First, add the Hermes relayer app. + +```bash +ignite app install -g github.com/ignite/apps/hermes +``` + +and after configure the relayer. + +```bash +ignite relayer hermes configure \ +"earth" "http://localhost:26657" "http://localhost:9090" \ +"mars" "http://localhost:26659" "http://localhost:9092" \ +--chain-a-faucet "http://0.0.0.0:4500" \ +--chain-b-faucet "http://0.0.0.0:4501" \ +--chain-a-port-id "blog" \ +--chain-b-port-id "blog" \ +--channel-version "blog-1" +``` + +When prompted, press Enter to accept the default values for `Chain A Account` and +`Chain B Account`. + +The output looks like: + +``` +Hermes config created at /Users/danilopantani/.ignite/relayer/hermes/earth_mars +? Chain earth doesn't have a default Hermes key. Type your mnemonic to continue or type enter to generate a new one: (optional) +New mnemonic generated: danger plate flavor twist chimney myself sketch assist copy expand core tattoo ignore ensure quote mean forum carbon enroll gadget immense grab early maze +Chain earth key created +Chain earth relayer wallet: cosmos1jk6wmyl880j6t9vw6umy9v8ex0yhrfwgx0vv2d +New balance from faucet: 100000stake,5token +? Chain mars doesn't have a default Hermes key. Type your mnemonic to continue or type enter to generate a new one: (optional) +New mnemonic generated: invest box icon session lens demise purse link boss dwarf give minimum jazz eye vocal seven sunset coach express want ask version anger ranch +Chain mars key created +Chain mars relayer wallet: cosmos1x9kt37c0sutanaqwy9gxpvq5990yt0qnpqntmp +New balance from faucet: 100000stake,5token +Client '07-tendermint-0' created (earth -> mars) +Client 07-tendermint-0' created (mars -> earth) +Connection 'earth (connection-0) <-> mars (connection-0)' created +Channel 'earth (channel-0) <-> mars (channel-0)' created +``` + +Now start the relayer: + +```bash +ignite relayer hermes start "earth" "mars" +``` + +### Send packets + +You can now send packets and verify the received posts: + +```bash +planetd tx blog send-ibc-post blog channel-0 "Hello" "Hello Mars, I'm Alice from Earth" --from alice --chain-id earth --home ~/.earth +``` + +To verify that the post has been received on Mars: + +```bash +planetd q blog list-post --node tcp://localhost:26659 +``` + +The packet has been received: + +```yaml +Post: + - content: Hello Mars, I'm Alice from Earth + creator: blog-channel-0-cosmos1aew8dk9cs3uzzgeldatgzvm5ca2k4m98xhy20x + id: "0" + title: Hello +pagination: + next_key: null + total: "1" +``` + +To check if the packet has been acknowledged on Earth: + +```bash +planetd q blog list-sent-post +``` + +Output: + +```yaml +SentPost: + - chain: blog-channel-0 + creator: cosmos1aew8dk9cs3uzzgeldatgzvm5ca2k4m98xhy20x + id: "0" + postID: "0" + title: Hello +pagination: + next_key: null + total: "1" +``` + +To test timeout, set the timeout time of a packet to 1 nanosecond, verify that +the packet is timed out, and check the timed-out posts: + +```bash +planetd tx blog send-ibc-post blog channel-0 "Sorry" "Sorry Mars, you will never see this post" --from alice --chain-id earth --home ~/.earth --packet-timeout-timestamp 1 +``` + +Check the timed-out posts: + +```bash +planetd q blog list-timeout-post +``` + +Results: + +```yaml +TimeoutPost: + - chain: blog-channel-0 + creator: cosmos1fhpcsxn0g8uask73xpcgwxlfxtuunn3ey5ptjv + id: "0" + title: Sorry +pagination: + next_key: null + total: "2" +``` + +You can also send a post from Mars: + +```bash +planetd tx blog send-ibc-post blog channel-0 "Hello" "Hello Earth, I'm Alice from Mars" --from alice --chain-id mars --home ~/.mars --node tcp://localhost:26659 +``` + +List post on Earth: + +```bash +planetd q blog list-post +``` + +Results: + +```yaml +Post: + - content: Hello Earth, I'm Alice from Mars + creator: blog-channel-0-cosmos1xtpx43l826348s59au24p22pxg6q248638q2tf + id: "0" + title: Hello +pagination: + next_key: null + total: "1" +``` + +## Congratulations 🎉 + +By completing this tutorial, you've learned to use the Inter-Blockchain +Communication protocol (IBC). + +Here's what you accomplished in this tutorial: + +- Built two Hello blockchain apps as IBC modules +- Modified the generated code to add CRUD action logic +- Configured and used the Ignite CLI relayer to connect two blockchains with + each other +- Transferred IBC packets from one blockchain to another diff --git a/docs/versioned_docs/version-v28/02-guide/06-tokenfactory/01-tokenfactory.md b/docs/versioned_docs/version-v28/02-guide/06-tokenfactory/01-tokenfactory.md new file mode 100644 index 0000000..52aff9a --- /dev/null +++ b/docs/versioned_docs/version-v28/02-guide/06-tokenfactory/01-tokenfactory.md @@ -0,0 +1,622 @@ +# Token Factory + +## Introduction to Building a Token Factory Module with Ignite CLI + +In this tutorial, we will guide you through the process of building a token factory module using the Ignite CLI. This module is a powerful tool for creating native denominations (denoms) on your blockchain, providing you with the capability to issue and manage digital assets natively within your network. + +Digital assets, characterized by their uniqueness and scarcity, are fundamental to the value proposition of blockchain technology. A well-known example is the ERC20 standard on Ethereum, which has gained widespread popularity. By learning to create and manage native denoms on your blockchain, you will gain hands-on experience with one of blockchain's key functionalities. + +**You will learn how to:** + +* Develop a module from scratch. +* Implement a CRUD (Create, Read, Update, Delete) operation while specifically removing the delete functionality to safeguard the integrity of initialized denoms. +* Integrate logic for creating new denoms. +* Engage with various components such as the client, types, keeper, expected keeper, and handlers to effectively implement the Token Factory module. + +**Note:** The code provided in this tutorial is tailored for educational purposes. It is not designed for deployment in production environments. + +## Understanding the Module Design + +The Token Factory module empowers you to create and manage native denoms on your blockchain. In the Cosmos ecosystem and with Ignite CLI, a denom represents the name of a token that is universally usable. To learn more, see [Denom](02-denoms.md). + +## What is a Denom? + +Denoms are essentially identifiers for tokens on a blockchain, synonymous with terms like 'coin' or 'token'. For an in-depth understanding, refer to the Cosmos SDK's [ADR 024: Coin Metadata](https://docs.cosmos.network/main/build/architecture/adr-024-coin-metadata#context). + +A denom in this module always has an owner. An owner is allowed to issue new tokens, change the denoms name, and transfer the ownership to a different account. Learn more about [denoms](02-denoms.md). + +In our Token Factory module: + +1. Ownership and Control: Each denom is assigned an owner, who has the authority to issue new tokens, rename the denom, and transfer ownership. + +2. Properties of a Denom: + + - denom: The unique name of the denom. + - description: A brief about the denom. + - ticker: The symbolic representation. + - precision: Determines the number of decimal places for the denom. + - url: Provides additional information. + - maxSupply & supply: Define the total and current circulating supply. + - canChangeMaxSupply: A boolean indicating if maxSupply can be altered post-issuance. + - owner: The account holding ownership rights. + +3. Proto Definition: + +```proto +message Denom { + string denom = 1; + string description = 2; + string ticker = 3; + int32 precision = 4; + string url = 5; + int32 maxSupply = 6; + int32 supply = 7; + bool canChangeMaxSupply = 8; + string owner = 9; +} +``` + +4. Core Functionalities: + +- Issuing new tokens. +- Transferring ownership of tokens. +- Keeping a ledger of all tokens. + +## Chapter 2: Getting Started with Your Token Factory Module + +Welcome to the next step in your journey of building a token factory module. In this chapter, we'll walk you through setting up your blockchain and beginning the development of your token factory module. + +### Setting up your blockchain + +First, we'll scaffold a new blockchain specifically for your token factory. We use the --no-module flag to ensure that we add the token factory module with the required dependencies later. Run the following command in your terminal: + +```bash +ignite scaffold chain tokenfactory --no-module +``` + +This command establishes a new Cosmos SDK blockchain named `tokenfactory` and places it in a directory of the same name. Inside this directory, you'll find a fully functional blockchain ready for further customization. + +Now, navigate into your newly created blockchain directory: + +```bash +cd tokenfactory +``` + +### Scaffold Your Token Factory Module + +Next, we'll scaffold a new module for your token factory. This module will depend on the Cosmos SDK's [bank](https://docs.cosmos.network/main/build/modules/bank#abstract) and [auth](https://docs.cosmos.network/main/build/modules/auth#abstract) modules, which provide essential functionalities like account access and token management. Use the following command: + +```bash +ignite scaffold module tokenfactory --dep account,bank +``` + +The successful execution of this command will be confirmed with a message indicating that the `tokenfactory` module has been created. + +### Defining Denom Data Structure + +To manage denoms within your token factory, define their structure using an Ignite map. This will store the data as key-value pairs. Run this command: + +```bash +ignite scaffold map Denom description:string ticker:string precision:int url:string maxSupply:int supply:int canChangeMaxSupply:bool --signer owner --index denom --module tokenfactory +``` + +Review the `proto/tokenfactory/tokenfactory/denom.proto` file to see the scaffolding results, which include modifications to various files indicating successful creation of the denom structure. + +### Git Commit + +After scaffolding your denom map, it's a good practice to save your progress. Use the following commands to make your first Git commit: + +```bash +git add . +git commit -m "Add tokenfactory module and denom map" +``` + +This saves a snapshot of your project, allowing you to revert back if needed. + +## Removing Delete Functionality + +In a blockchain context, once a denom is created, it's crucial to ensure it remains immutable and cannot be deleted. This immutability is key to maintaining the integrity and trust in the blockchain. Therefore, we'll remove the delete functionality from the scaffolded CRUD operations. Follow these steps: + +**Proto Adjustments** + +In `proto/tokenfactory/tokenfactory/tx.proto`, remove the `DeleteDenom` RPC method and the associated message types. + +**Client Updates** + +Navigate to the client in `x/tokenfactory/client` and make these changes: + +- Remove `TestDeleteDenom()` from `tx_denom_test.go`. +- Eliminate `CmdDeleteDenom()` from `tx_denom.go`. +- In `tx.go`, delete the line referencing the delete command. + +**Keeper Modifications** + +In `denom_test.go`, remove `TestDenomRemove()`. +Delete `RemoveDenom()` from `denom.go`. +Exclude `TestDenomMsgServerDelete()` and `DeleteDenom()` functions from `msg_server_denom_test.go` and `msg_server_denom.go`, respectively. + +**Types Directory Changes** + +- Update `codec.go` to remove references to `MsgDeleteDenom`. +- Remove `TestMsgDeleteDenom_ValidateBasic()` from `messages_denom_test.go`. +- Eliminate all references to `MsgDeleteDenom()` in `messages_denom.go`. + +After making these changes, commit your updates: + +```bash +git add . +git commit -m "Remove the delete denom functionality" +``` + +This concludes the second chapter, setting a solid foundation for your token factory module. In the next chapter, we'll delve into implementing the application logic that will bring your token factory to life. + +## Chapter 3: Implementing Core Functionality in Your Token Factory + +Having disabled the deletion of denoms, we now turn our attention to the heart of the token factory module: defining the structure of new denoms and implementing their creation and update logic. + +**Proto Definition Updates** + +Start by defining the structure of a new token denom in `proto/tokenfactory/tokenfactory/tx.proto`. + +For `MsgCreateDenom`: + +- Remove `int32 supply = 8;` and adjust the field order so `canChangeMaxSupply` becomes the 8th field. + +Resulting `MsgCreateDenom` message: + +```proto +message MsgCreateDenom { + string owner = 1; + string denom = 2; + string description = 3; + string ticker = 4; + int32 precision = 5; + string url = 6; + int32 maxSupply = 7; + bool canChangeMaxSupply = 8; +} +``` + +For `MsgUpdateDenom`: + +- Omit `string ticker = 4;`, `int32 precision = 5;`, and `int32 supply = 8;`, and reorder the remaining fields. + +Resulting `MsgUpdateDenom` message: + +```proto +message MsgUpdateDenom { + string owner = 1; + string denom = 2; + string description = 3; + string url = 4; + int32 maxSupply = 5; + bool canChangeMaxSupply = 6; +} +``` + +### Client Logic + +In the `x/tokenfactory/client/cli/tx_denom.go` file, update the client application logic. + +**For `CmdCreateDenom`:** + +- Adjust the number of arguments from 8 to 7, removing references to the supply argument, and update the usage descriptions. + +**For `CmdUpdateDenom()`:** + +- Reduce the number of arguments to 5, excluding `supply`, `precision`, and `ticker`, and modify the usage descriptions accordingly. + +Also, update the tests in `x/tokenfactory/client/cli/tx_denom_test.go` to reflect these changes. + +### Types Updates + +When creating new denoms, they initially have no supply. The supply is determined only when tokens are minted. + +In `x/tokenfactory/types/messages_denom.go`: + +- Remove the `supply` parameter from `NewMsgCreateDenom`. +- Update `NewMsgUpdateDenom` to exclude unchangeable parameters like `ticker`, `precision`, and `supply`. + +Implement basic input validation in `x/tokenfactory/types/messages_denom.go`: + +- Ensure the ticker length is between 3 and 10 characters. +```go +func (msg *MsgCreateDenom) ValidateBasic() error { + _, err := sdk.AccAddressFromBech32(msg.Owner) + if err != nil { + return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid owner address (%s)", err) + } + + tickerLength := len(msg.Ticker) + if tickerLength < 3 { + return sdkerrors.Wrapf(sdkerrors.ErrInvalidRequest, "Ticker length must be at least 3 chars long") + } + if tickerLength > 10 { + return sdkerrors.Wrapf(sdkerrors.ErrInvalidRequest, "Ticker length must be 10 chars long maximum") + } + if msg.MaxSupply == 0 { + return sdkerrors.Wrapf(sdkerrors.ErrInvalidRequest, "Max Supply must be greater than 0") + } + + return nil +} +``` + +- Set `maxSupply` to be greater than 0. + +```go +func (msg *MsgUpdateDenom) ValidateBasic() error { + _, err := sdk.AccAddressFromBech32(msg.Owner) + if err != nil { + return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid owner address (%s)", err) + } + if msg.MaxSupply == 0 { + return sdkerrors.Wrapf(sdkerrors.ErrInvalidRequest, "Max Supply must be greater than 0") + } + return nil +} +``` + +### Keeper Logic + +The keeper is where you define the business logic for manipulating the database and writing to the key-value store. + +**In `x/tokenfactory/keeper/msg_server_denom.go`:** + +- Update `CreateDenom()` to include logic for creating unique denoms. Modify the error message to point to existing denoms. Set `Supply` to `0`. +- Modify `UpdateDenom()` to verify ownership and manage max supply changes. + +```go +func (k msgServer) UpdateDenom(goCtx context.Context, msg *types.MsgUpdateDenom) (*types.MsgUpdateDenomResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + + // Check if the value exists + valFound, isFound := k.GetDenom( + ctx, + msg.Denom, + ) + if !isFound { + return nil, sdkerrors.Wrap(sdkerrors.ErrKeyNotFound, "Denom to update not found") + } + + // Checks if the msg owner is the same as the current owner + if msg.Owner != valFound.Owner { + return nil, sdkerrors.Wrap(sdkerrors.ErrUnauthorized, "incorrect owner") + } + + if !valFound.CanChangeMaxSupply && valFound.MaxSupply != msg.MaxSupply { + return nil, sdkerrors.Wrap(sdkerrors.ErrUnauthorized, "cannot change maxsupply") + } + if !valFound.CanChangeMaxSupply && msg.CanChangeMaxSupply { + return nil, sdkerrors.Wrap(sdkerrors.ErrUnauthorized, "Cannot revert change maxsupply flag") + } + var denom = types.Denom{ + Owner: msg.Owner, + Denom: msg.Denom, + Description: msg.Description, + Ticker: valFound.Ticker, + Precision: valFound.Precision, + Url: msg.Url, + MaxSupply: msg.MaxSupply, + Supply: valFound.Supply, + CanChangeMaxSupply: msg.CanChangeMaxSupply, + } + + k.SetDenom(ctx, denom) + + return &types.MsgUpdateDenomResponse{}, nil +} +``` + +### Expected Keepers + +`x/tokenfactory/types/expected_keepers.go` is where you define interactions with other modules. Since your module relies on the `auth` and `bank` modules, specify which of their functions your module can access. + +Replace the existing code in `expected_keepers.go` with the updated definitions that interface with `auth` and `bank` modules. + +```go +package types + +import ( + sdk "github.com/cosmos/cosmos-sdk/types" + authtypes "github.com/cosmos/cosmos-sdk/x/auth/types" +) + +type AccountKeeper interface { + GetAccount(ctx sdk.Context, addr sdk.AccAddress) authtypes.AccountI + GetModuleAddress(name string) sdk.AccAddress + GetModuleAccount(ctx sdk.Context, moduleName string) authtypes.ModuleAccountI +} + +type BankKeeper interface { + SendCoins(ctx sdk.Context, fromAddr sdk.AccAddress, toAddr sdk.AccAddress, amt sdk.Coins) error + MintCoins(ctx sdk.Context, moduleName string, amt sdk.Coins) error + SpendableCoins(ctx sdk.Context, addr sdk.AccAddress) sdk.Coins +} +``` + +### Committing Your Changes + +Regular commits are vital for tracking progress and ensuring a stable rollback point if needed. After implementing these changes, use the following commands to commit: + +```bash +git add . +git commit -m "Add token factory create and update logic" +``` + +To review your progress, use `git log` to see the list of commits, illustrating the journey from initialization to the current state of your module. + + +## Chapter 4: Expanding Functionality with New Messages + +In this chapter, we focus on enhancing the token factory module by adding two critical messages: `MintAndSendTokens` and `UpdateOwner`. These functionalities are key to managing tokens within your blockchain. + +### Scaffolding New Messages + +**MintAndSendTokens:** + +This message allows the creation (minting) of new tokens and their allocation to a specified recipient. The necessary inputs are the denom, the amount to mint, and the recipient's address. + +Scaffold this message with: + +```bash +ignite scaffold message MintAndSendTokens denom:string amount:int recipient:string --module tokenfactory --signer owner +``` + +**UpdateOwner:** + +This message facilitates the transfer of ownership of a denom. It requires the denom name and the new owner's address. + +Scaffold this message with: + +```bash +ignite scaffold message UpdateOwner denom:string newOwner:string --module tokenfactory --signer owner +``` + +### Implementing Logic for New Messages + +**In the `MintAndSendTokens` Functionality:** + +Located in `x/tokenfactory/keeper/msg_server_mint_and_send_tokens.go`, this function encompasses the logic for minting new tokens. Key steps include: + +- Verifying the existence and ownership of the denom. +- Ensuring minting does not exceed the maximum supply. +- Minting the specified amount and sending it to the recipient. + +**In the `UpdateOwner` Functionality:** + +Found in `x/tokenfactory/keeper/msg_server_update_owner.go`, this function allows transferring ownership of a denom. It involves: + +- Checking if the denom exists. +- Ensuring that the request comes from the current owner. +- Updating the owner field in the denom's record. + +### Keeper Logic + +- For `MintAndSendTokens`, add logic to mint new tokens as per the request parameters. This includes checking for maximum supply limits and transferring the minted tokens to the specified recipient. + +```go +package keeper + +import ( + "context" + + sdk "github.com/cosmos/cosmos-sdk/types" + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + "tokenfactory/x/tokenfactory/types" +) + +func (k msgServer) MintAndSendTokens(goCtx context.Context, msg *types.MsgMintAndSendTokens) (*types.MsgMintAndSendTokensResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + + // Check if the value exists + valFound, isFound := k.GetDenom( + ctx, + msg.Denom, + ) + if !isFound { + return nil, sdkerrors.Wrap(sdkerrors.ErrKeyNotFound, "denom does not exist") + } + + // Checks if the msg owner is the same as the current owner + if msg.Owner != valFound.Owner { + return nil, sdkerrors.Wrap(sdkerrors.ErrUnauthorized, "incorrect owner") + } + + if valFound.Supply+msg.Amount > valFound.MaxSupply { + return nil, sdkerrors.Wrap(sdkerrors.ErrInvalidRequest, "Cannot mint more than Max Supply") + } + moduleAcct := k.accountKeeper.GetModuleAddress(types.ModuleName) + + recipientAddress, err := sdk.AccAddressFromBech32(msg.Recipient) + if err != nil { + return nil, err + } + + var mintCoins sdk.Coins + + mintCoins = mintCoins.Add(sdk.NewCoin(msg.Denom, sdk.NewInt(int64(msg.Amount)))) + if err := k.bankKeeper.MintCoins(ctx, types.ModuleName, mintCoins); err != nil { + return nil, err + } + if err := k.bankKeeper.SendCoins(ctx, moduleAcct, recipientAddress, mintCoins); err != nil { + return nil, err + } + + var denom = types.Denom{ + Owner: valFound.Owner, + Denom: valFound.Denom, + Description: valFound.Description, + MaxSupply: valFound.MaxSupply, + Supply: valFound.Supply + msg.Amount, + Precision: valFound.Precision, + Ticker: valFound.Ticker, + Url: valFound.Url, + CanChangeMaxSupply: valFound.CanChangeMaxSupply, + } + + k.SetDenom( + ctx, + denom, + ) + return &types.MsgMintAndSendTokensResponse{}, nil +} +``` + +- For `UpdateOwner`, implement the logic to update the owner of a denom, ensuring that only the current owner can initiate this change. + +```go +package keeper + +import ( + "context" + + sdk "github.com/cosmos/cosmos-sdk/types" + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + "tokenfactory/x/tokenfactory/types" +) + +func (k msgServer) UpdateOwner(goCtx context.Context, msg *types.MsgUpdateOwner) (*types.MsgUpdateOwnerResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + + // Check if the value exists + valFound, isFound := k.GetDenom( + ctx, + msg.Denom, + ) + if !isFound { + return nil, sdkerrors.Wrap(sdkerrors.ErrKeyNotFound, "denom does not exist") + } + + // Checks if the msg owner is the same as the current owner + if msg.Owner != valFound.Owner { + return nil, sdkerrors.Wrap(sdkerrors.ErrUnauthorized, "incorrect owner") + } + + var denom = types.Denom{ + Owner: msg.NewOwner, + Denom: msg.Denom, + Description: valFound.Description, + MaxSupply: valFound.MaxSupply, + Supply: valFound.Supply, + Precision: valFound.Precision, + Ticker: valFound.Ticker, + Url: valFound.Url, + CanChangeMaxSupply: valFound.CanChangeMaxSupply, + } + + k.SetDenom( + ctx, + denom, + ) + + return &types.MsgUpdateOwnerResponse{}, nil +} +``` + +### Committing Your Changes + +After implementing these new functionalities, it's crucial to save your progress. Use the following commands: + +```bash +git add . +git commit -m "Add minting and sending functionality" +``` + +This commit not only tracks your latest changes but also acts as a checkpoint to which you can revert if needed. + +## Chapter 5: Walkthrough and Manual Testing of the Token Factory Module + +Congratulations on reaching the final stage! It's time to put your token factory module to the test. This walkthrough will guide you through building, starting your chain, and testing the functionalities you've implemented. + +### Building and Starting the Chain + +First, build and initiate your blockchain: + +```bash +ignite chain serve +``` + +Keep this terminal running as you proceed with the tests. + +### Testing Functionalities + +**1. Creating a New Denom:** + +- In a new terminal, create a denom named uignite with the command: + +```bash +tokenfactoryd tx tokenfactory create-denom uignite "My denom" IGNITE 6 "some/url" 1000000000 true --from alice +``` + +- Confirm the transaction in your blockchain. + +**2. Querying the Denom:** + +Check the list of denoms to see your new creation: + +```bash +tokenfactoryd query tokenfactory list-denom +``` + +**3. Updating the Denom:** + +- Modify the uignite denom: + +```bash +tokenfactoryd tx tokenfactory update-denom uignite "Ignite" "newurl" 2000000000 false --from alice +``` + +- Query the denoms again to observe the changes: +```bash +tokenfactoryd query tokenfactory list-denom +``` + +**4. Minting and Sending Tokens:** + +- Mint uignite tokens and send them to a recipient: +```bash +tokenfactoryd tx tokenfactory mint-and-send-tokens uignite 1200 cosmos16x46rxvtkmgph6jnkqs80tzlzk6wpy6ftrgh6t --from alice +``` + +- Check the recipient’s balance: +```bash +tokenfactoryd query bank balances cosmos16x46rxvtkmgph6jnkqs80tzlzk6wpy6ftrgh6t +``` + +- Verify the updated supply in denom list: +```bash +tokenfactoryd query tokenfactory list-denom +``` + +**5. Transferring Ownership:** + +- Transfer the ownership of uignite: +```bash +tokenfactoryd tx tokenfactory update-owner uignite cosmos16x46rxvtkmgph6jnkqs80tzlzk6wpy6ftrgh6t --from alice +``` + +- Confirm the ownership change: +```bash +tokenfactoryd query tokenfactory list-denom +``` + +**6. Confirming Minting Restrictions:** + +- Test minting with alice to ensure restrictions apply: + +```bash +tokenfactoryd tx tokenfactory mint-and-send-tokens uignite 1200 cosmos16x46rxvtkmgph6jnkqs80tzlzk6wpy6ftrgh6t --from alice +``` + +## Congratulations! + +You've successfully built and tested a token factory module. This advanced tutorial has equipped you with the skills to: + +- Integrate other modules and utilize their functionalities. +- Customize CRUD operations to fit your blockchain's needs. +- Scaffold modules and messages effectively. + +## Looking Ahead: IBC Functionality + +As you progress, the next learning adventure involves exploring IBC (Inter-Blockchain Communication). If you're up for a challenge, consider adding IBC functionality to your token factory module. This will not only enhance your module's capabilities but also deepen your understanding of the Cosmos ecosystem. diff --git a/docs/versioned_docs/version-v28/02-guide/06-tokenfactory/02-denoms.md b/docs/versioned_docs/version-v28/02-guide/06-tokenfactory/02-denoms.md new file mode 100644 index 0000000..938228c --- /dev/null +++ b/docs/versioned_docs/version-v28/02-guide/06-tokenfactory/02-denoms.md @@ -0,0 +1,32 @@ +# Understanding Denoms in Cosmos SDK and Ignite + +## What is a Denom? + +**Denom** stands for `denomination` and represents the name of a token within the Cosmos SDK and Ignite. In the Cosmos ecosystem, denoms play a crucial role in identifying and managing tokens. + +In Ignite, the configuration of your blockchain, including the specification of denoms, is set in the `config.yml` file within your blockchain directory. This file allows the definition of various denoms before initializing your blockchain. + +Common examples of denoms include formats like `token` or `stake`. + +## Usage of Denoms + +In the Cosmos SDK, assets are represented as a `Coins` type, which combines an amount with a denom. The amount is flexible, allowing for a wide range of values. Accounts in the Cosmos SDK, including both basic and module accounts, maintain balances comprised of these `Coins`. + +The `x/bank` module is pivotal in the Cosmos SDK as it tracks all account balances and the total supply of tokens in the application. + +### Key Points on Denoms and Balances: + +- **Fixed Denomination Unit:** The Cosmos SDK treats the amount of a balance as a single, fixed unit of denomination, regardless of the denom itself. +- **Client and App Flexibility:** While clients and apps built on Cosmos SDK chains can define arbitrary denomination units, all transactions and operations in the Cosmos SDK ultimately use these fixed units. +- **Example:** On the Cosmos Hub (Gaia), the common assumption is 1 ATOM = 10^6 uatom, and operations are based on these units of 10^6. + +## Denoms and IBC (Inter-Blockchain Communication) + +One of the primary uses of IBC is the transfer of tokens between blockchains. This process involves creating a token `voucher` on the target blockchain upon receiving tokens from a source chain. + +### Characteristics of IBC Voucher Tokens: + +- **Naming Convention:** IBC voucher tokens are denoted with a naming syntax that starts with `ibc/`. This convention helps in identifying and managing IBC tokens on a blockchain. +- **Native vs. Voucher Tokens:** With IBC, a native token on one blockchain can be referenced as a `voucher` token on another. These tokens are differentiated by their `denom` names. + +For a comprehensive understanding of IBC denoms and their application, refer to [Understand IBC Denoms with Gaia](https://tutorials.cosmos.network/tutorials/6-ibc-dev/), which provides detailed insights into the format and utilization of voucher tokens in the IBC context. diff --git a/docs/versioned_docs/version-v28/02-guide/06-tokenfactory/_category_.json b/docs/versioned_docs/version-v28/02-guide/06-tokenfactory/_category_.json new file mode 100644 index 0000000..fc9d4ae --- /dev/null +++ b/docs/versioned_docs/version-v28/02-guide/06-tokenfactory/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Advanced Module: Tokenfactory", + "link": null +} \ No newline at end of file diff --git a/docs/versioned_docs/version-v28/02-guide/07-interchange/00-introduction.md b/docs/versioned_docs/version-v28/02-guide/07-interchange/00-introduction.md new file mode 100644 index 0000000..8b7bc6c --- /dev/null +++ b/docs/versioned_docs/version-v28/02-guide/07-interchange/00-introduction.md @@ -0,0 +1,49 @@ +--- +sidebar_position: 0 +slug: /guide/interchange +--- + +# Introduction + +The Interchain Exchange is a module to create buy and sell orders between blockchains. + +In this tutorial, you learn how to create a Cosmos SDK module that can create order pairs, buy orders, and sell orders. +You create order books and buy and sell orders across blockchains, which in turn enables you to swap token from one +blockchain to another. + +**Note:** The code in this tutorial is written specifically for this tutorial and is intended only for educational +purposes. This tutorial code is not intended to be used in production. + +If you want to see the end result, see the example implementation in +the [interchange repo](https://github.com/tendermint/interchange). + +**You will learn how to:** + +- Create a blockchain with Ignite CLI +- Create a Cosmos SDK IBC module +- Create an order book that hosts buy and sell orders with a module +- Send IBC packets from one blockchain to another +- Deal with timeouts and acknowledgements of IBC packets + +## How the Interchange Exchange Module Works + +To build an exchange that works with two or more blockchains, follow the steps in this tutorial to create a Cosmos SDK +module called `dex`. + +The new `dex` module allows you to open an exchange order book for a pair of token: a token from one blockchain and a token +on another blockchain. The blockchains are required to have the `dex` module available. + +Token can be bought or sold with limit orders on a simple order book. In this tutorial, there is no notion of a +liquidity pool or automated market maker (AMM). + +The market is unidirectional: + +- The token sold on the source chain cannot be bought back as it is +- The token bought from the target chain cannot be sold back using the same pair. + +If a token on a source chain is sold, it can only be bought back by creating a new pair on the order book. +This workflow is due to the nature of the Inter-Blockchain Communication protocol (IBC) which creates a `voucher` +token on the target blockchain. There is a difference of a native blockchain token and a `voucher` token that is minted +on another blockchain. You must create a second order book pair in order to receive the native token back. + +In the next chapter, you learn details about the design of the interblockchain exchange. diff --git a/docs/versioned_docs/version-v28/02-guide/07-interchange/01-design.md b/docs/versioned_docs/version-v28/02-guide/07-interchange/01-design.md new file mode 100644 index 0000000..448a0f5 --- /dev/null +++ b/docs/versioned_docs/version-v28/02-guide/07-interchange/01-design.md @@ -0,0 +1,108 @@ +--- +sidebar_position: 1 +description: Learn about the interchain exchange module design. +--- + +# App Design + +In this chapter, you learn how the interchain exchange module is designed. The module has order books, buy orders, and +sell orders. + +- First, create an order book for a pair of token. +- After an order book exists, you can create buy and sell orders for this pair of token. + +The module uses the Inter-Blockchain Communication +protocol [IBC](https://github.com/cosmos/ibc/blob/old/ibc/2_IBC_ARCHITECTURE.md). +By using IBC, the module can create order books so that multiple blockchains can interact and exchange their token. + +You create an order book pair with a token from one blockchain and another token from another blockchain. In this +tutorial, call the module you create the `dex` module. + +> When a user exchanges a token with the `dex` module, a `voucher` of that token is received on the other blockchain. +> This voucher is similar to how an `ibc-transfer` is constructed. Since a blockchain module does not have the rights +> to mint new token of a blockchain into existence, the token on the target chain is locked up, and the buyer receives +> a `voucher` of that token. + +This process can be reversed when the `voucher` gets burned to unlock the original token. This exchange process is +explained in more detail throughout the tutorial. + +## Assumption of the Design + +An order book can be created for the exchange of any tokens between any pair of chains. + +- Both blockchains require the `dex` module to be installed and running. +- There can only be one order book for a pair of token at the same time. + +<!-- There is no condition to check for open channels between two chains. --> + +A specific chain cannot mint new coins of its native token. + +<!-- The module is trustless, there is no condition to check when opening a channel between two chains. +Any pair of tokens can be exchanged between any pair of chains. --> + +This module is inspired by the [`ibc transfer`](https://github.com/cosmos/ibc-go/tree/main/modules/apps/transfer) +module on the Cosmos SDK. The `dex` module you create in this tutorial has similarities, like the `voucher` creation. + +However, the new `dex` module you are creating is more complex because it supports creation of: + +- Several types of packets to send +- Several types of acknowledgments to treat +- More complex logic on how to treat a packet on receipt, on timeout, and more + +## Interchain Exchange Overview + +Assume you have two blockchains: Venus and Mars. + +- The native token on Venus is `venuscoin`. +- The native token on Mars is `marscoin`. + +When a token is exchanged from Mars to Venus: + +- The Venus blockchain has an IBC `voucher` token with a denom that looks like `ibc/B5CB286...A7B21307F`. +- The long string of characters after `ibc/` is a denom trace hash of a token that was transferred using IBC. + +Using the blockchain's API you can get a denom trace from that hash. The denom trace consists of a `base_denom` and a +`path`. In our example: + +- The `base_denom` is `marscoin`. +- The `path` contains pairs of ports and channels through which the token has been transferred. + +For a single-hop transfer, the `path` is identified by `transfer/channel-0`. + +Learn more about token paths +in [ICS 20 Fungible Token Transfer](https://github.com/cosmos/ibc/tree/main/spec/app/ics-020-fungible-token-transfer). + +**Note:** This token `ibc/Venus/marscoin` cannot be sold back using the same order book. If you want to "reverse" the +exchange and receive the Mars token back, you must create and use a new order book for the `ibc/Venus/marscoin` to +`marscoin` transfer. + +## The Design of the Order Books + +As a typical exchange, a new pair implies the creation of an order book with orders to sell `marscoin` or orders to buy +`venuscoin`. Here, you have two chains and this data structure must be split between Mars and Venus. + +- Users from chain Mars sell `marscoin`. +- Users from chain Venus buy `marscoin`. + +Therefore, we represent: + +- All orders to sell `marscoin` on chain Mars. +- All orders to buy `marscoin` on chain Venus. + +In this example, blockchain Mars holds the sell orders and blockchain Venus holds the buy orders. + +## Exchanging Tokens Back + +Like `ibc-transfer`, each blockchain keeps a trace of the token voucher that was created on the other blockchain. + +If blockchain Mars sells `marscoin` to chain Venus and `ibc/Venus/marscoin` is minted on Venus then, if +`ibc/Venus/marscoin` is sold back to Mars, the token is unlocked and the token that is received is `marscoin`. + +## Features + +The features supported by the interchain exchange module are: + +- Create an exchange order book for a token pair between two chains +- Send sell orders on source chain +- Send buy orders on target chain +- Cancel sell or buy orders diff --git a/docs/versioned_docs/version-v28/02-guide/07-interchange/02-init.md b/docs/versioned_docs/version-v28/02-guide/07-interchange/02-init.md new file mode 100644 index 0000000..5261268 --- /dev/null +++ b/docs/versioned_docs/version-v28/02-guide/07-interchange/02-init.md @@ -0,0 +1,219 @@ +--- +sidebar_position: 2 +description: Create the blockchain for the interchain exchange app. +--- + +# App Init + +## Initialize the Blockchain + +In this chapter, you create the basic blockchain module for the interchain exchange app. You scaffold the blockchain, +the module, the transaction, the IBC packets, and messages. In later chapters, you integrate more code into each of the +transaction handlers. + +## Create the Blockchain + +Scaffold a new blockchain called `interchange`: + +```bash +ignite scaffold chain interchange --no-module +``` + +A new directory named `interchange` is created. + +Change into this directory where you can scaffold modules, types, and maps: + +```bash +cd interchange +``` + +The `interchange` directory contains a working blockchain app. + +A local GitHub repository has been created for you with the initial scaffold. + +Next, create a new IBC module. + +## Create the dex Module + +Scaffold a module inside your blockchain named `dex` with IBC capabilities. + +The dex module contains the logic to create and maintain order books and route them through IBC to the second +blockchain. + +```bash +ignite scaffold module dex --ibc --ordering unordered --dep bank +``` + +## Create CRUD logic for Buy and Sell Order Books + +Scaffold two types with create, read, update, and delete (CRUD) actions. + +Run the following Ignite CLI `type` commands to create `sellOrderBook` and `buyOrderBook` types: + +```bash +ignite scaffold map sell-order-book amountDenom priceDenom --no-message --module dex +ignite scaffold map buy-order-book amountDenom priceDenom --no-message --module dex +``` + +The values are: + +- `amountDenom`: the token to be sold and in which quantity +- `priceDenom`: the token selling price + +The `--no-message` flag specifies to skip the message creation. Custom messages will be created in the next steps. + +The `--module dex` flag specifies to scaffold the type in the `dex` module. + +## Create the IBC Packets + +Create three packets for IBC: + +- An order book pair `createPair` +- A sell order `sellOrder` +- A buy order `buyOrder` + +```bash +ignite scaffold packet create-pair sourceDenom targetDenom --module dex +ignite scaffold packet sell-order amountDenom amount:int priceDenom price:int --ack remainingAmount:int,gain:int --module dex +ignite scaffold packet buy-order amountDenom amount:int priceDenom price:int --ack remainingAmount:int,purchase:int --module dex +``` + +The optional `--ack` flag defines field names and types of the acknowledgment returned after the packet has been +received by the target chain. The value of the `--ack` flag is a comma-separated list of names (no spaces). Append +optional types after a colon (`:`). + +## Cancel messages + +Cancelling orders is done locally in the network, there is no packet to send. + +Use the `message` command to create a message to cancel a sell or buy order: + +```bash +ignite scaffold message cancel-sell-order port channel amountDenom priceDenom orderID:int --desc "Cancel a sell order" --module dex +ignite scaffold message cancel-buy-order port channel amountDenom priceDenom orderID:int --desc "Cancel a buy order" --module dex +``` + +Use the optional `--desc` flag to define a description of the CLI command that is used to broadcast a transaction with +the message. + +## Trace the Denom + +The token denoms must have the same behavior as described in the `ibc-transfer` module: + +- An external token received from a chain has a unique `denom`, referred to as `voucher`. +- When a token is sent to a blockchain and then sent back and received, the chain can resolve the voucher and convert + it back to the original token denomination. + +`Voucher` tokens are represented as hashes, therefore you must store which original denomination is related to a +voucher. +You can do this with an indexed type. + +For a `voucher` you store, define the source port ID, source channel ID, and the original denom: + +```bash +ignite scaffold map denom-trace port channel origin --no-message --module dex +``` + +## Create the Configuration for Two Blockchains + +Add two config files `mars.yml` and `venus.yml` to test two blockchain networks with specific token for each. + +Add the config files in the `interchange` folder. + +The native denoms for Mars are `marscoin`, and for Venus `venuscoin`. + +Create the `mars.yml` file with your content: + +```yaml title="mars.yml" +version: 1 +build: + proto: + path: proto + third_party_paths: + - third_party/proto + - proto_vendor +accounts: +- name: alice + coins: + - 1000token + - 100000000stake + - 1000marscoin +- name: bob + coins: + - 500token + - 1000marscoin + - 100000000stake +faucet: + name: bob + coins: + - 5token + - 100000stake + host: 0.0.0.0:4500 +genesis: + chain_id: mars +validators: +- name: alice + bonded: 100000000stake + home: $HOME/.mars +``` + +Create the `venus.yml` file with your content: + +```yaml title="venus.yml" +version: 1 +build: + proto: + path: proto + third_party_paths: + - third_party/proto + - proto_vendor +accounts: +- name: alice + coins: + - 1000token + - 1000000000stake + - 1000venuscoin +- name: bob + coins: + - 500token + - 1000venuscoin + - 100000000stake +faucet: + name: bob + coins: + - 5token + - 100000stake + host: :4501 +genesis: + chain_id: venus +validators: +- name: alice + bonded: 100000000stake + app: + api: + address: :1318 + grpc: + address: :9092 + grpc-web: + address: :9093 + config: + p2p: + laddr: :26658 + rpc: + laddr: :26659 + pprof_laddr: :6061 + home: $HOME/.venus +``` + +In order to run two blockchains side by side on a single machine, you need to +start them on different ports. `venus.yml` has a validators configuration that +stars services HTTP API, gRPC, P2P and RPC services on custom ports. + +After scaffolding, now is a good time to make a commit to the local GitHub repository that was created for you. + +```bash +git add . +git commit -m "Scaffold module, maps, packages and messages for the dex" +``` + +Implement the code for the order book in the next chapter. diff --git a/docs/versioned_docs/version-v28/02-guide/07-interchange/03-walkthrough.md b/docs/versioned_docs/version-v28/02-guide/07-interchange/03-walkthrough.md new file mode 100644 index 0000000..198e222 --- /dev/null +++ b/docs/versioned_docs/version-v28/02-guide/07-interchange/03-walkthrough.md @@ -0,0 +1,687 @@ +--- +sidebar_position: 3 +description: Walkthrough of commands to use the interchain exchange module. +--- + +# Use the Interchain Exchange + +In this chapter, you will learn about the exchange and how it will function once +it is implemented. This will give you a better understanding of what you will be +building in the coming chapters. + +To achieve this, we will perform the following tasks: + +* Start two local blockchains +* Set up an IBC relayer between the two chains +* Create an exchange order book for a token pair on the two chains +* Submit sell orders on the Mars chain +* Submit buy orders on the Venus chain +* Cancel sell or buy orders + +Starting the two local blockchains and setting up the IBC relayer will allow us +to create an exchange order book between the two chains. This order book will +allow us to submit sell and buy orders, as well as cancel any orders that we no +longer want to maintain. + +It is important to note that the commands in this chapter will only work +properly if you have completed all the following chapters in this tutorial. By +the end of this chapter, you should have a good understanding of how the +exchange will operate. + +## Start blockchain nodes + +To start using the interchain exchange, you will need to start two separate +blockchains. This can be done by running the `ignite chain serve` command, +followed by the `-c` flag and the path to the configuration file for each +blockchain. For example, to start the `mars` blockchain, you would run: + +``` +ignite chain serve -c mars.yml +``` + +To start the `venus` blockchain, you would run a similar command, but with the +path to the `venus.yml` configuration file: + +``` +ignite chain serve -c venus.yml +``` + +Once both blockchains are running, you can proceed with configuring the relayer +to enable interchain exchange between the two chains. + +## Relayer + +Next, let's set up an IBC relayer between two chains. If you have used a relayer +in the past, reset the relayer configuration directory: + +``` +rm -rf ~/.ignite/relayer +``` + +Now you can use the `ignite relayer configure` command. This command allows you +to specify the source and target chains, along with their respective RPC +endpoints, faucet URLs, port numbers, versions, gas prices, and gas limits. + +``` +ignite relayer configure -a --source-rpc "http://0.0.0.0:26657" --source-faucet "http://0.0.0.0:4500" --source-port "dex" --source-version "dex-1" --source-gasprice "0.0000025stake" --source-prefix "cosmos" --source-gaslimit 300000 --target-rpc "http://0.0.0.0:26659" --target-faucet "http://0.0.0.0:4501" --target-port "dex" --target-version "dex-1" --target-gasprice "0.0000025stake" --target-prefix "cosmos" --target-gaslimit 300000 +``` + +To create a connection between the two chains, you can use the ignite relayer +connect command. This command will establish a connection between the source and +target chains, allowing you to transfer data and assets between them. + +``` +ignite relayer connect +``` + +Now that we have two separate blockchain networks up and running, and a relayer +connection established to facilitate communication between them, we are ready to +begin using the interchain exchange binary to interact with these networks. This +will allow us to create order books and buy/sell orders, enabling us to trade +assets between the two chains. + +## Order Book + +To create an order book for a pair of tokens, you can use the following command: + +``` +interchanged tx dex send-create-pair dex channel-0 marscoin venuscoin --from alice --chain-id mars --home ~/.mars +``` + +This command will create an order book for the pair of tokens `marscoin` and +`venuscoin`. The command will be executed by the user `alice` on the Mars +blockchain. The `--home` parameter specifies the location of the configuration +directory for the Mars blockchain. + +Creating an order book affects state on the Mars blockchain to which the +transaction was broadcast and the Venus blockchain. + +On the Mars blockchain, the `send-create-pair` command creates an empty sell +order book. + +``` +interchanged q dex list-sell-order-book +``` + +```yml +sellOrderBook: +- amountDenom: marscoin + book: + idCount: 0 + orders: [] + index: dex-channel-0-marscoin-venuscoin + priceDenom: venuscoin +``` + +On the Venus blockchain, the same `send-createPair` command creates a buy order +book: + +``` +interchanged q dex list-buy-order-book --node tcp://localhost:26659 +``` + +```yml +buyOrderBook: +- amountDenom: marscoin + book: + idCount: 0 + orders: [] + index: dex-channel-0-marscoin-venuscoin + priceDenom: venuscoin +``` + +In the `create-pair` command on the Mars blockchain, an IBC packet is sent to +the Venus chain. This packet contains information that is used to create a buy +order book on the Venus chain. + +When the Venus chain receives the IBC packet, it processes the information +contained in the packet and creates a buy order book. The Venus chain then sends +an acknowledgement back to the Mars chain to confirm that the buy order book has +been successfully created. + +Upon receiving the acknowledgement from the Venus chain, the Mars chain creates +a sell order book. This sell order book is associated with the buy order book on +the Venus chain, allowing users to trade assets between the two chains. + +## Sell Order + +After creating an order book, the next step is to create a sell order. This can +be done using the `send-sell-order` command, which is used to broadcast a +transaction with a message that locks a specified amount of tokens and creates a +sell order on the Mars blockchain. + +``` +interchanged tx dex send-sell-order dex channel-0 marscoin 10 venuscoin 15 --from alice --chain-id mars --home ~/.mars +``` + +In the example provided, the `send-sell-order` command is used to create a sell +order for 10 `marscoin` token and 15 `venuscoin` token. This sell order will be +added to the order book on the Mars blockchain. + +``` +interchanged q bank balances $(interchanged keys show -a alice --home ~/.mars) +``` + +```yml +balances: +- amount: "990" # decreased from 1000 + denom: marscoin +- amount: "1000" + denom: token +``` + +``` +interchanged q dex list-sell-order-book +``` + +```yml +sellOrderBook: +- amountDenom: marscoin + book: + idCount: 1 + orders: # a new sell order is created + - amount: 10 + creator: cosmos14ntyzr6d2dx4ppds9tvenx53fn0xl5jcakrtm4 + id: 0 + price: 15 + index: dex-channel-0-marscoin-venuscoin + priceDenom: venuscoin +``` + +## Buy order + +After creating a sell order, the next step in the trading process is typically +to create a buy order. This can be done using the `send-buy-order` command, +which is used to lock a specified amount of tokens and create a buy order on the +Venus blockchain + +``` +interchanged tx dex send-buy-order dex channel-0 marscoin 10 venuscoin 5 --from alice --chain-id venus --home ~/.venus --node tcp://localhost:26659 +``` + +In the example provided, the `send-buy-order` command is used to create a buy +order for 10 `marscoin` token and 5 `venuscoin` token. This buy order will be +added to the order book on the Venus blockchain. + +``` +interchanged q bank balances $(interchanged keys show -a alice --home ~/.venus) --node tcp://localhost:26659 +``` + +```yml +balances: +- amount: "900000000" + denom: stake +- amount: "1000" + denom: token +- amount: "950" # decreased from 1000 + denom: venuscoin +``` + +``` +interchanged q dex list-buy-order-book --node tcp://localhost:26659 +``` + +```yml +buyOrderBook: +- amountDenom: marscoin + book: + idCount: 1 + orders: # a new buy order is created + - amount: 10 + creator: cosmos1mrrttwtdcp47pl4hq6sar3mwqpmtc7pcl9e6ss + id: 0 + price: 5 + index: dex-channel-0-marscoin-venuscoin + priceDenom: venuscoin +``` + +## Perform an Exchange with a Sell Order + +You currently have two open orders for `marscoin`: + +* A sell order on the Mars chain, where you are offering to sell 10 `marscoin` + for 15 `venuscoin`. +* A buy order on the Venus chain, where you are willing to buy 5 `marscoin` for + 5 `venuscoin`. + +To perform an exchange, you can send a sell order to the Mars chain using the +following command: + +``` +interchanged tx dex send-sell-order dex channel-0 marscoin 5 venuscoin 3 --from alice --home ~/.mars +``` + +This sell order, offering to sell 5 `marscoin` for 3 `venuscoin`, will be filled +on the Venus chain by the existing buy order. This will result in the amount of +the buy order on the Venus chain being reduced by 5 `marscoin`. + +``` +interchanged q dex list-buy-order-book --node tcp://localhost:26659 +``` + +```yml +buyOrderBook: +- amountDenom: marscoin + book: + idCount: 1 + orders: + - amount: 5 # decreased from 10 + creator: cosmos1mrrttwtdcp47pl4hq6sar3mwqpmtc7pcl9e6ss + id: 0 + price: 5 + index: dex-channel-0-marscoin-venuscoin + priceDenom: venuscoin +``` + +The sender of the filled sell order traded 5 `marscoin` for 25 `venuscoin` +tokens. This means that the amount of the sell order (5 `marscoin`) was +multiplied by the price of the buy order (5 `venuscoin`) to determine the value +of the exchange. In this case, the value of the exchange was 25 `venuscoin` +vouchers. + +``` +interchanged q bank balances $(interchanged keys show -a alice --home ~/.mars) +``` + +```yml +balances: +- amount: "25" # increased from 0 + denom: ibc/BB38C24E9877 +- amount: "985" # decreased from 990 + denom: marscoin +- amount: "1000" + denom: token +``` + +The counterparty, or the sender of the buy `marscoin` order, will receive 5 +`marscoin` as a result of the exchange. + +``` +interchanged q bank balances $(interchanged keys show -a alice --home ~/.venus) --node tcp://localhost:26659 +``` + +```yml +balances: +- amount: "5" # increased from 0 + denom: ibc/745B473BFE24 # marscoin voucher +- amount: "900000000" + denom: stake +- amount: "1000" + denom: token +- amount: "950" + denom: venuscoin +``` + +The `venuscoin` balance has remained unchanged because the appropriate amount of +`venuscoin` (50) was already locked at the time the buy order was created in the +previous step. + + +## Perform an Exchange with a Buy Order + +To perform an exchange with a buy order, send a transaction to the decentralized +exchange to buy 5 `marscoin` for 15 `venuscoin`. This is done by running the +following command: + +``` +interchanged tx dex send-buy-order dex channel-0 marscoin 5 venuscoin 15 --from alice --home ~/.venus --node tcp://localhost:26659 +``` + +This buy order will be immediately filled on the Mars chain, and the creator of +the sell order will receive 75 `venuscoin` vouchers as payment. + +``` +interchanged q bank balances $(interchanged keys show -a alice --home ~/.mars) +``` + +```yml +balances: +- amount: "100" # increased from 25 + denom: ibc/BB38C24E9877 # venuscoin voucher +- amount: "985" + denom: marscoin +- amount: "1000" + denom: token +``` + +The amount of the sell order will be decreased by the amount of the filled buy +order, so in this case it will be decreased by 5 `marscoin`. + +``` +interchanged q dex list-sell-order-book +``` + +```yml +sellOrderBook: +- amountDenom: marscoin + book: + idCount: 1 + orders: + - amount: 5 # decreased from 10 + creator: cosmos14ntyzr6d2dx4ppds9tvenx53fn0xl5jcakrtm4 + id: 0 + price: 15 + index: dex-channel-0-marscoin-venuscoin + priceDenom: venuscoin +``` + +The creator of the buy order receives 5 marscoin vouchers for 75 venuscoin +(5marscoin * 15venuscoin): + +``` +interchanged q bank balances $(interchanged keys show -a alice --home ~/.venus) --node tcp://localhost:26659 +``` + +```yml +balances: +- amount: "10" # increased from 5 + denom: ibc/745B473BFE24 # marscoin vouchers +- amount: "900000000" + denom: stake +- amount: "1000" + denom: token +- amount: "875" # decreased from 950 + denom: venuscoin +``` + +## Complete Exchange with a Partially Filled Sell Order + +To complete the exchange with a partially filled sell order, send a transaction +to the decentralized exchange to sell 10 `marscoin` for 3 `venuscoin`. This is +done by running the following command: + +``` +interchanged tx dex send-sell-order dex channel-0 marscoin 10 venuscoin 3 --from alice --home ~/.mars +``` + +In this scenario, the sell amount is 10 `marscoin`, but there is an existing buy +order for only 5 `marscoin`. The buy order will be filled completely and removed +from the order book. The author of the previously created buy order will receive +10 `marscoin` vouchers from the exchange. + +To check the balances, she can run the following command: + +``` +interchanged q bank balances $(interchanged keys show -a alice --home ~/.venus) --node tcp://localhost:26659 +``` + +```yml +balances: +- amount: "15" # increased from 5 + denom: ibc/745B473BFE24 # marscoin voucher +- amount: "900000000" + denom: stake +- amount: "1000" + denom: token +- amount: "875" + denom: venuscoin +``` + +``` +interchanged q dex list-buy-order-book --node tcp://localhost:26659 +``` + +```yml +buyOrderBook: +- amountDenom: marscoin + book: + idCount: 1 + orders: [] # buy order with amount 5marscoin has been closed + index: dex-channel-0-marscoin-venuscoin + priceDenom: venuscoin +``` + +``` +interchanged q bank balances $(interchanged keys show -a alice --home ~/.mars) +``` + +The author of the sell order successfully exchanged 5 marscoin and received 25 +venuscoin vouchers. The other 5marscoin created a sell order: + +```yml +balances: +- amount: "125" # increased from 100 + denom: ibc/BB38C24E9877 # venuscoin vouchers +- amount: "975" # decreased from 985 + denom: marscoin +- amount: "1000" + denom: token +``` + +``` +interchanged q dex list-sell-order-book +``` + +```yml +sellOrderBook: +- amountDenom: marscoin + book: + idCount: 2 + orders: + - amount: 5 # hasn't changed + creator: cosmos14ntyzr6d2dx4ppds9tvenx53fn0xl5jcakrtm4 + id: 0 + price: 15 + - amount: 5 # new order is created + creator: cosmos14ntyzr6d2dx4ppds9tvenx53fn0xl5jcakrtm4 + id: 1 + price: 3 + index: dex-channel-0-marscoin-venuscoin + priceDenom: venuscoin +``` + +## Complete Exchange with a Partially Filled Buy Order + +To complete the exchange with a partially filled buy order, send a transaction +to the decentralized exchange to buy 10 `marscoin` for 5 `venuscoin`. This is +done by running the following command: + +``` +interchanged tx dex send-buy-order dex channel-0 marscoin 10 venuscoin 5 --from alice --home ~/.venus --node tcp://localhost:26659 +``` + +In this scenario, the buy order is only partially filled for 5 `marscoin`. There +is an existing sell order for 5 `marscoin` (with a price of 3 `venuscoin`) on +the Mars chain, which is completely filled and removed from the order book. The +author of the closed sell order will receive 15 `venuscoin` vouchers as payment, +which is the product of 5 `marscoin` and 3 `venuscoin`. + +``` +interchanged q bank balances $(interchanged keys show -a alice --home ~/.mars) +``` + +```yml +balances: +- amount: "140" # increased from 125 + denom: ibc/BB38C24E9877 # venuscoin vouchers +- amount: "975" + denom: marscoin +- amount: "1000" + denom: token +``` + +``` +interchanged q dex list-sell-order-book +``` + +```yml +sellOrderBook: +- amountDenom: marscoin + book: + idCount: 2 + orders: + - amount: 5 # order hasn't changed + creator: cosmos14ntyzr6d2dx4ppds9tvenx53fn0xl5jcakrtm4 + id: 0 + price: 15 + # a sell order for 5 marscoin has been closed + index: dex-channel-0-marscoin-venuscoin + priceDenom: venuscoin +``` + +In this scenario, the author of the buy order will receive 5 `marscoin` vouchers +as payment, which locks up 50 `venuscoin` of their token. The remaining 5 +`marscoin` that is not filled by the sell order will create a new buy order on +the Venus chain. This means that the author of the buy order is still interested +in purchasing 5 `marscoin`, and is willing to pay the specified price for it. +The new buy order will remain on the order book until it is filled by another +sell order, or it is cancelled by the buyer. + +``` +interchanged q bank balances $(interchanged keys show -a alice --home ~/.venus) --node tcp://localhost:26659 +``` + +```yml +balances: +- amount: "20" # increased from 15 + denom: ibc/745B473BFE24 # marscoin vouchers +- amount: "900000000" + denom: stake +- amount: "1000" + denom: token +- amount: "825" # decreased from 875 + denom: venuscoin +``` + +``` +interchanged q dex list-buy-order-book --node tcp://localhost:26659 +``` + +```yml +buyOrderBook: +- amountDenom: marscoin + book: + idCount: 2 + orders: + - amount: 5 # new buy order is created + creator: cosmos1mrrttwtdcp47pl4hq6sar3mwqpmtc7pcl9e6ss + id: 1 + price: 5 + index: dex-channel-0-marscoin-venuscoin + priceDenom: venuscoin +``` + +## Cancel an Order + +After the exchanges described, there are still two open orders: a sell order on +the Mars chain (5 `marscoin` for 15 `venuscoin`), and a buy order on the Venus +chain (5 `marscoin` for 5 `venuscoin`). + +To cancel an order on a blockchain, you can use the `cancel-sell-order` or +`cancel-buy-order` command, depending on the type of order you want to cancel. +The command takes several arguments, including the `channel-id` of the IBC +connection, the `amount-denom` and `price-denom` of the order, and the +`order-id` of the order you want to cancel. + +To cancel a sell order on the Mars chain, you would run the following command: + +``` +interchanged tx dex cancel-sell-order dex channel-0 marscoin venuscoin 0 --from alice --home ~/.mars +``` + +This will cancel the sell order and remove it from the order book. The balance +of Alice's `marscoin` will be increased by the amount of the cancelled sell +order. + +To check Alice's balances, including her updated `marscoin` balance, run the +following command: + +``` +interchanged q bank balances $(interchanged keys show -a alice --home ~/.mars) +``` + +This will return a list of Alice's balances, including her updated `marscoin` +balance. + +```yml +balances: +- amount: "140" + denom: ibc/BB38C24E9877 +- amount: "980" # increased from 975 + denom: marscoin +- amount: "1000" + denom: token +``` + +After the sell order on the Mars chain has been cancelled, the sell order book +on that blockchain will be empty. This means that there are no longer any active +sell orders on the Mars chain, and anyone interested in purchasing `marscoin` +will need to create a new buy order. The sell order book will remain empty until +a new sell order is created and added to it. + +``` +interchanged q dex list-sell-order-book +``` + +```yml +sellOrderBook: +- amountDenom: marscoin + book: + idCount: 2 + orders: [] + index: dex-channel-0-marscoin-venuscoin + priceDenom: venuscoin +``` + +To cancel a buy order on the `Venus` chain, you can run the following command: + +``` +interchanged tx dex cancel-buy-order dex channel-0 marscoin venuscoin 1 --from alice --home ~/.venus --node tcp://localhost:26659 +``` + +This will cancel the buy order and remove it from the order book. The balance of +Alice's `venuscoin` will be increased by the amount of the cancelled buy order. + +To check Alice's balances, including her updated `venuscoin` balance, you can +run the following command: + +``` +interchanged q bank balances $(interchanged keys show -a alice --home ~/.venus) --node tcp://localhost:26659 +``` + +The amount of `venuscoin` is increased: + +```yml +balances: +- amount: "20" + denom: ibc/745B473BFE24 +- amount: "900000000" + denom: stake +- amount: "1000" + denom: token +- amount: "850" # increased from 825 + denom: venuscoin +``` + +This will return a list of Alice's balances, including her updated `venuscoin` +balance. + +After canceling a buy order, the buy order book on the Venus blockchain will be +empty. This means that there are no longer any active buy orders on the chain, +and anyone interested in selling `marscoin` will need to create a new sell +order. The buy order book will remain empty until a new buy order is created and +added to it. + +``` +interchanged q dex list-buy-order-book --node tcp://localhost:26659 +``` + +```yml +buyOrderBook: +- amountDenom: marscoin + book: + idCount: 2 + orders: [] + index: dex-channel-0-marscoin-venuscoin + priceDenom: venuscoin +``` + +In this walkthrough, we demonstrated how to set up an interchain exchange for +trading tokens between two different blockchain networks. This involved creating +an exchange order book for a specific token pair and establishing a fixed +exchange rate between the two. + +Once the exchange was set up, users could send sell orders on the Mars chain and +buy orders on the Venus chain. This allowed them to offer their tokens for sale +or purchase tokens from the exchange. In addition, users could also cancel their +orders if needed. \ No newline at end of file diff --git a/docs/versioned_docs/version-v28/02-guide/07-interchange/04-creating-order-books.md b/docs/versioned_docs/version-v28/02-guide/07-interchange/04-creating-order-books.md new file mode 100644 index 0000000..ffe5a2b --- /dev/null +++ b/docs/versioned_docs/version-v28/02-guide/07-interchange/04-creating-order-books.md @@ -0,0 +1,480 @@ +--- +sidebar_position: 4 +description: Implement logic to create order books. +--- + +# Implement the Order Books + +In this chapter, you implement the logic to create order books. + +In the Cosmos SDK, the state is stored in a key-value store. Each order book is stored under a unique key that is +composed of four values: + +- Port ID +- Channel ID +- Source denom +- Target denom + +For example, an order book for marscoin and venuscoin could be stored under `dex-channel-4-marscoin-venuscoin`. + +First, define a function that returns an order book store key: + +```go +// x/dex/types/keys.go +package types + +import "fmt" + +// ... +func OrderBookIndex(portID string, channelID string, sourceDenom string, targetDenom string) string { + return fmt.Sprintf("%s-%s-%s-%s", portID, channelID, sourceDenom, targetDenom) +} +``` + +The `send-create-pair` command is used to create order books. This command: + +- Creates and broadcasts a transaction with a message of type `SendCreatePair`. +- The message gets routed to the `dex` module. +- Finally, a `SendCreatePair` keeper method is called. + +You need the `send-create-pair` command to do the following: + +- When processing `SendCreatePair` message on the source chain: + - Check that an order book with the given pair of denoms does not yet exist. + - Transmit an IBC packet with information about port, channel, source denoms, and target denoms. +- After the packet is received on the target chain: + - Check that an order book with the given pair of denoms does not yet exist on the target chain. + - Create a new order book for buy orders. + - Transmit an IBC acknowledgement back to the source chain. +- After the acknowledgement is received on the source chain: + - Create a new order book for sell orders. + +## Message Handling in SendCreatePair + +The `SendCreatePair` function was created during the IBC packet scaffolding. The function creates an IBC packet, +populates it with source and target denoms, and transmits this packet over IBC. + +Now, add the logic to check for an existing order book for a particular pair of denoms: + +```go +// x/dex/keeper/msg_server_create_pair.go + +package keeper + +import ( + "errors" + // ... +) + +func (k msgServer) SendCreatePair(goCtx context.Context, msg *types.MsgSendCreatePair) (*types.MsgSendCreatePairResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + + // Get an order book index + pairIndex := types.OrderBookIndex(msg.Port, msg.ChannelID, msg.SourceDenom, msg.TargetDenom) + + // If an order book is found, return an error + _, found := k.GetSellOrderBook(ctx, pairIndex) + if found { + return &types.MsgSendCreatePairResponse{}, errors.New("the pair already exist") + } + + // Construct the packet + var packet types.CreatePairPacketData + + packet.SourceDenom = msg.SourceDenom + packet.TargetDenom = msg.TargetDenom + + // Transmit the packet + _, err := k.TransmitCreatePairPacket( + ctx, + packet, + msg.Port, + msg.ChannelID, + clienttypes.ZeroHeight(), + msg.TimeoutTimestamp, + ) + if err != nil { + return nil, err + } + + return &types.MsgSendCreatePairResponse{}, nil +} +``` + +## Lifecycle of an IBC Packet + +During a successful transmission, an IBC packet goes through these stages: + +1. Message processing before packet transmission on the source chain +2. Reception of a packet on the target chain +3. Acknowledgment of a packet on the source chain +4. Timeout of a packet on the source chain + +In the following section, implement the packet reception logic in the `OnRecvCreatePairPacket` function and the packet +acknowledgement logic in the `OnAcknowledgementCreatePairPacket` function. + +Leave the Timeout function empty. + +## Receive an IBC packet + +The protocol buffer definition defines the data that an order book contains. + +Add the `OrderBook` and `Order` messages to the `order.proto` file. + +First, add the proto buffer files to build the Go code files. You can modify these files for the purpose of your app. + +Create a new `order.proto` file in the `proto/interchange/dex` directory and add the content: + +```protobuf +// proto/interchange/dex/order.proto + +syntax = "proto3"; + +package interchange.dex; + +option go_package = "interchange/x/dex/types"; + +message OrderBook { + int32 idCount = 1; + repeated Order orders = 2; +} + +message Order { + int32 id = 1; + string creator = 2; + int32 amount = 3; + int32 price = 4; +} +``` + +Modify the `buy_order_book.proto` file to have the fields for creating a buy order on the order book. +Don't forget to add the import as well. + +**Tip:** Don't forget to add the import as well. + +```protobuf +// proto/interchange/dex/buy_order_book.proto + +// ... + +import "interchange/dex/order.proto"; + +message BuyOrderBook { + // ... + OrderBook book = 4; +} +``` + +Modify the `sell_order_book.proto` file to add the order book into the buy order book. + +The proto definition for the `SellOrderBook` looks like: + +```protobuf +// proto/interchange/dex/sell_order_book.proto + +// ... +import "interchange/dex/order.proto"; + +message SellOrderBook { + // ... + OrderBook book = 4; +} +``` + +Now, use Ignite CLI to build the proto files for the `send-create-pair` command: + +```bash +ignite generate proto-go --yes +``` + +Start enhancing the functions for the IBC packets. + +Create a new file `x/dex/types/order_book.go`. + +Add the new order book function to the corresponding Go file: + +```go +// x/dex/types/order_book.go + +package types + +func NewOrderBook() OrderBook { + return OrderBook{ + IdCount: 0, + } +} +``` + +To create a new buy order book type, define `NewBuyOrderBook` in a new file `x/dex/types/buy_order_book.go` : + +```go +// x/dex/types/buy_order_book.go + +package types + +func NewBuyOrderBook(AmountDenom string, PriceDenom string) BuyOrderBook { + book := NewOrderBook() + return BuyOrderBook{ + AmountDenom: AmountDenom, + PriceDenom: PriceDenom, + Book: &book, + } +} +``` + +When an IBC packet is received on the target chain, the module must check whether a book already exists. If not, then +create a buy order book for the specified denoms. + +```go +// x/dex/keeper/create_pair.go + +package keeper + +// ... + +func (k Keeper) OnRecvCreatePairPacket(ctx sdk.Context, packet channeltypes.Packet, data types.CreatePairPacketData) (packetAck types.CreatePairPacketAck, err error) { + // ... + + // Get an order book index + pairIndex := types.OrderBookIndex(packet.SourcePort, packet.SourceChannel, data.SourceDenom, data.TargetDenom) + + // If an order book is found, return an error + _, found := k.GetBuyOrderBook(ctx, pairIndex) + if found { + return packetAck, errors.New("the pair already exist") + } + + // Create a new buy order book for source and target denoms + book := types.NewBuyOrderBook(data.SourceDenom, data.TargetDenom) + + // Assign order book index + book.Index = pairIndex + + // Save the order book to the store + k.SetBuyOrderBook(ctx, book) + return packetAck, nil +} +``` + +## Receive an IBC Acknowledgement + +When an IBC acknowledgement is received on the source chain, the module must check whether a book already exists. If +not, +create a sell order book for the specified denoms. + +Create a new file `x/dex/types/sell_order_book.go`. +Insert the `NewSellOrderBook` function which creates a new sell order book. + +```go +// x/dex/types/sell_order_book.go + +package types + +func NewSellOrderBook(AmountDenom string, PriceDenom string) SellOrderBook { + book := NewOrderBook() + return SellOrderBook{ + AmountDenom: AmountDenom, + PriceDenom: PriceDenom, + Book: &book, + } +} +``` + +Modify the Acknowledgement function in the `x/dex/keeper/create_pair.go` file: + +```go +// x/dex/keeper/create_pair.go + +package keeper + +// ... + +func (k Keeper) OnAcknowledgementCreatePairPacket(ctx sdk.Context, packet channeltypes.Packet, data types.CreatePairPacketData, ack channeltypes.Acknowledgement) error { + switch dispatchedAck := ack.Response.(type) { + case *channeltypes.Acknowledgement_Error: + return nil + case *channeltypes.Acknowledgement_Result: + // Decode the packet acknowledgment + var packetAck types.CreatePairPacketAck + if err := types.ModuleCdc.UnmarshalJSON(dispatchedAck.Result, &packetAck); err != nil { + // The counter-party module doesn't implement the correct acknowledgment format + return errors.New("cannot unmarshal acknowledgment") + } + + // Set the sell order book + pairIndex := types.OrderBookIndex(packet.SourcePort, packet.SourceChannel, data.SourceDenom, data.TargetDenom) + book := types.NewSellOrderBook(data.SourceDenom, data.TargetDenom) + book.Index = pairIndex + k.SetSellOrderBook(ctx, book) + + return nil + default: + // The counter-party module doesn't implement the correct acknowledgment format + return errors.New("invalid acknowledgment format") + } +} +``` + +In this section, you implemented the logic behind the new `send-create-pair` command: + +- When an IBC packet is received on the target chain, `send-create-pair` command creates a buy order book. +- When an IBC acknowledgement is received on the source chain, the `send-create-pair` command creates a sell order book. + +### Implement the appendOrder Function to Add Orders to the Order Book + +```go +// x/dex/types/order_book.go + +package types + +import ( + "errors" + "sort" +) + +func NewOrderBook() OrderBook { + return OrderBook{ + IdCount: 0, + } +} + +const ( + MaxAmount = int32(100000) + MaxPrice = int32(100000) +) + +type Ordering int + +const ( + Increasing Ordering = iota + Decreasing +) + +var ( + ErrMaxAmount = errors.New("max amount reached") + ErrMaxPrice = errors.New("max price reached") + ErrZeroAmount = errors.New("amount is zero") + ErrZeroPrice = errors.New("price is zero") + ErrOrderNotFound = errors.New("order not found") +) +``` + +The `AppendOrder` function initializes and appends a new order to an order book from the order information: + +```go +// x/dex/types/order_book.go + +func (book *OrderBook) appendOrder(creator string, amount int32, price int32, ordering Ordering) (int32, error) { + if err := checkAmountAndPrice(amount, price); err != nil { + return 0, err + } + + // Initialize the order + var order Order + order.Id = book.GetNextOrderID() + order.Creator = creator + order.Amount = amount + order.Price = price + + // Increment ID tracker + book.IncrementNextOrderID() + + // Insert the order + book.insertOrder(order, ordering) + return order.Id, nil +} +``` + +#### Implement the checkAmountAndPrice Function For an Order + +The `checkAmountAndPrice` function checks for the correct amount or price: + +```go +// x/dex/types/order_book.go + +func checkAmountAndPrice(amount int32, price int32) error { + if amount == int32(0) { + return ErrZeroAmount + } + if amount > MaxAmount { + return ErrMaxAmount + } + + if price == int32(0) { + return ErrZeroPrice + } + if price > MaxPrice { + return ErrMaxPrice + } + + return nil +} +``` + +#### Implement the GetNextOrderID Function + +The `GetNextOrderID` function gets the ID of the next order to append: + +```go +// x/dex/types/order_book.go + +func (book OrderBook) GetNextOrderID() int32 { + return book.IdCount +} +``` + +#### Implement the IncrementNextOrderID Function + +The `IncrementNextOrderID` function updates the ID count for orders: + +```go +// x/dex/types/order_book.go + +func (book *OrderBook) IncrementNextOrderID() { + // Even numbers to have different ID than buy orders + book.IdCount++ +} +``` + +#### Implement the insertOrder Function + +The `insertOrder` function inserts the order in the book with the provided order: + +```go +// x/dex/types/order_book.go + +func (book *OrderBook) insertOrder(order Order, ordering Ordering) { + if len(book.Orders) > 0 { + var i int + + // get the index of the new order depending on the provided ordering + if ordering == Increasing { + i = sort.Search(len(book.Orders), func(i int) bool { return book.Orders[i].Price > order.Price }) + } else { + i = sort.Search(len(book.Orders), func(i int) bool { return book.Orders[i].Price < order.Price }) + } + + // insert order + orders := append(book.Orders, &order) + copy(orders[i+1:], orders[i:]) + orders[i] = &order + book.Orders = orders + } else { + book.Orders = append(book.Orders, &order) + } +} +``` + +This completes the order book setup. + +Now is a good time to save the state of your implementation. +Because your project is in a local repository, you can use git. Saving your current state lets you jump back and forth +in case you introduce errors or need a break. + +```bash +git add . +git commit -m "Create Order Books" +``` + +In the next chapter, you learn how to deal with vouchers by minting and burning vouchers and locking and unlocking +native blockchain token in your app. diff --git a/docs/versioned_docs/version-v28/02-guide/07-interchange/05-mint-and-burn-voucher.md b/docs/versioned_docs/version-v28/02-guide/07-interchange/05-mint-and-burn-voucher.md new file mode 100644 index 0000000..5b29c3c --- /dev/null +++ b/docs/versioned_docs/version-v28/02-guide/07-interchange/05-mint-and-burn-voucher.md @@ -0,0 +1,351 @@ +--- +order: 5 +description: Mint vouchers and lock and unlock native token from a blockchain. +--- + +# Mint and Burn Vouchers + +In this chapter, you learn about vouchers. The `dex` module implementation mints vouchers and locks and unlocks native +token from a blockchain. + +There is a lot to learn from this `dex` module implementation: + +- You work with the `bank` keeper and use several methods it offers. +- You interact with another module and use the module account to lock tokens. + +This implementation can teach you how to use various interactions with module accounts or minting, locking or burning +tokens. + +## Create the SafeBurn Function to Burn Vouchers or Lock Tokens + +The `SafeBurn` function burns tokens if they are IBC vouchers (have an `ibc/` prefix) and locks tokens if they are +native to the chain. + +Create a new `x/dex/keeper/mint.go` file: + +```go +// x/dex/keeper/mint.go + +package keeper + +import ( + "fmt" + "strings" + + sdkmath "cosmossdk.io/math" + sdk "github.com/cosmos/cosmos-sdk/types" + ibctransfertypes "github.com/cosmos/ibc-go/v6/modules/apps/transfer/types" + + "interchange/x/dex/types" +) + +// isIBCToken checks if the token came from the IBC module +// Each IBC token starts with an ibc/ denom, the check is rather simple +func isIBCToken(denom string) bool { + return strings.HasPrefix(denom, "ibc/") +} + +func (k Keeper) SafeBurn(ctx sdk.Context, port string, channel string, sender sdk.AccAddress, denom string, amount int32) error { + if isIBCToken(denom) { + // Burn the tokens + if err := k.BurnTokens(ctx, sender, sdk.NewCoin(denom, sdkmath.NewInt(int64(amount)))); err != nil { + return err + } + } else { + // Lock the tokens + if err := k.LockTokens(ctx, port, channel, sender, sdk.NewCoin(denom, sdkmath.NewInt(int64(amount)))); err != nil { + return err + } + } + + return nil +} +``` + +If the token comes from another blockchain as an IBC token, the burning method actually burns those IBC tokens on one +chain and unlocks them on the other chain. The native token are locked away. + +Now, implement the `BurnTokens` keeper method as used in the previous function. The `bankKeeper` has a useful function +for this: + +```go +// x/dex/keeper/mint.go + +package keeper + +// ... + +func (k Keeper) BurnTokens(ctx sdk.Context, sender sdk.AccAddress, tokens sdk.Coin) error { + // transfer the coins to the module account and burn them + if err := k.bankKeeper.SendCoinsFromAccountToModule(ctx, sender, types.ModuleName, sdk.NewCoins(tokens)); err != nil { + return err + } + + if err := k.bankKeeper.BurnCoins( + ctx, types.ModuleName, sdk.NewCoins(tokens), + ); err != nil { + // NOTE: should not happen as the module account was + // retrieved on the step above and it has enough balance + // to burn. + panic(fmt.Sprintf("cannot burn coins after a successful send to a module account: %v", err)) + } + + return nil +} +``` + +Implement the `LockTokens` keeper method. + +To lock token from a native chain, you can send the native token to the Escrow Address: + +```go +// x/dex/keeper/mint.go + +package keeper + +// ... + +func (k Keeper) LockTokens(ctx sdk.Context, sourcePort string, sourceChannel string, sender sdk.AccAddress, tokens sdk.Coin) error { + // create the escrow address for the tokens + escrowAddress := ibctransfertypes.GetEscrowAddress(sourcePort, sourceChannel) + + // escrow source tokens. It fails if balance insufficient + if err := k.bankKeeper.SendCoins( + ctx, sender, escrowAddress, sdk.NewCoins(tokens), + ); err != nil { + return err + } + + return nil +} +``` + +`BurnTokens` and `LockTokens` use `SendCoinsFromAccountToModule`, `BurnCoins`, and `SendCoins` keeper methods of the +`bank` module. + +To start using these function from the `dex` module, first add them to the `BankKeeper` interface in the +`x/dex/types/expected_keepers.go` file. + +```go +// x/dex/types/expected_keepers.go + +package types + +import sdk "github.com/cosmos/cosmos-sdk/types" + +// BankKeeper defines the expected bank keeper +type BankKeeper interface { + //... + SendCoinsFromAccountToModule(ctx sdk.Context, senderAddr sdk.AccAddress, recipientModule string, amt sdk.Coins) error + BurnCoins(ctx sdk.Context, moduleName string, amt sdk.Coins) error + SendCoins(ctx sdk.Context, fromAddr sdk.AccAddress, toAddr sdk.AccAddress, amt sdk.Coins) error +} +``` + +## SaveVoucherDenom + +The `SaveVoucherDenom` function saves the voucher denom to be able to convert it back later. + +Create a new `x/dex/keeper/denom.go` file: + +```go +// x/dex/keeper/denom.go + +package keeper + +import ( + sdk "github.com/cosmos/cosmos-sdk/types" + ibctransfertypes "github.com/cosmos/ibc-go/v6/modules/apps/transfer/types" + + "interchange/x/dex/types" +) + +func (k Keeper) SaveVoucherDenom(ctx sdk.Context, port string, channel string, denom string) { + voucher := VoucherDenom(port, channel, denom) + + // Store the origin denom + _, saved := k.GetDenomTrace(ctx, voucher) + if !saved { + k.SetDenomTrace(ctx, types.DenomTrace{ + Index: voucher, + Port: port, + Channel: channel, + Origin: denom, + }) + } +} +``` + +Finally, the last function to implement is the `VoucherDenom` function that returns the voucher of the denom from the +port ID and channel ID: + +```go +// x/dex/keeper/denom.go + +package keeper + +// ... + +func VoucherDenom(port string, channel string, denom string) string { + // since SendPacket did not prefix the denomination, we must prefix denomination here + sourcePrefix := ibctransfertypes.GetDenomPrefix(port, channel) + + // NOTE: sourcePrefix contains the trailing "/" + prefixedDenom := sourcePrefix + denom + + // construct the denomination trace from the full raw denomination + denomTrace := ibctransfertypes.ParseDenomTrace(prefixedDenom) + voucher := denomTrace.IBCDenom() + return voucher[:16] +} +``` + +### Implement an OriginalDenom Function + +The `OriginalDenom` function returns back the original denom of the voucher. + +False is returned if the port ID and channel ID provided are not the origins of the voucher: + +```go +// x/dex/keeper/denom.go + +package keeper + +// ... + +func (k Keeper) OriginalDenom(ctx sdk.Context, port string, channel string, voucher string) (string, bool) { + trace, exist := k.GetDenomTrace(ctx, voucher) + if exist { + // Check if original port and channel + if trace.Port == port && trace.Channel == channel { + return trace.Origin, true + } + } + + // Not the original chain + return "", false +} +``` + +### Implement a SafeMint Function + +If a token is an IBC token (has an `ibc/` prefix), the `SafeMint` function mints IBC token with `MintTokens`. +Otherwise, it unlocks native token with `UnlockTokens`. + +Go back to the `x/dex/keeper/mint.go` file and add the following code: + +```go +// x/dex/keeper/mint.go + +package keeper + +// ... + +func (k Keeper) SafeMint(ctx sdk.Context, port string, channel string, receiver sdk.AccAddress, denom string, amount int32) error { + if isIBCToken(denom) { + // Mint IBC tokens + if err := k.MintTokens(ctx, receiver, sdk.NewCoin(denom, sdkmath.NewInt(int64(amount)))); err != nil { + return err + } + } else { + // Unlock native tokens + if err := k.UnlockTokens( + ctx, + port, + channel, + receiver, + sdk.NewCoin(denom, sdkmath.NewInt(int64(amount))), + ); err != nil { + return err + } + } + + return nil +} +``` + +#### Implement a `MintTokens` Function + +You can use the `bankKeeper` function again to MintCoins. These token will then be sent to the receiver account: + +```go +// x/dex/keeper/mint.go + +package keeper + +// ... + +func (k Keeper) MintTokens(ctx sdk.Context, receiver sdk.AccAddress, tokens sdk.Coin) error { + // mint new tokens if the source of the transfer is the same chain + if err := k.bankKeeper.MintCoins( + ctx, types.ModuleName, sdk.NewCoins(tokens), + ); err != nil { + return err + } + + // send to receiver + if err := k.bankKeeper.SendCoinsFromModuleToAccount( + ctx, types.ModuleName, receiver, sdk.NewCoins(tokens), + ); err != nil { + panic(fmt.Sprintf("unable to send coins from module to account despite previously minting coins to module account: %v", err)) + } + + return nil +} +``` + +Finally, add the function to unlock token after they are sent back to the native blockchain: + +```go +// x/dex/keeper/mint.go + +package keeper + +// ... + +func (k Keeper) UnlockTokens(ctx sdk.Context, sourcePort string, sourceChannel string, receiver sdk.AccAddress, tokens sdk.Coin) error { + // create the escrow address for the tokens + escrowAddress := ibctransfertypes.GetEscrowAddress(sourcePort, sourceChannel) + + // escrow source tokens. It fails if balance insufficient + if err := k.bankKeeper.SendCoins( + ctx, escrowAddress, receiver, sdk.NewCoins(tokens), + ); err != nil { + return err + } + + return nil +} +``` + +The `MintTokens` function uses two keeper methods from the `bank` module: `MintCoins` and `SendCoinsFromModuleToAccount` +. +To import these methods, add their signatures to the `BankKeeper` interface in the `x/dex/types/expected_keepers.go` +file: + +```go +// x/dex/types/expected_keepers.go + +package types + +// ... + +type BankKeeper interface { + // ... + MintCoins(ctx sdk.Context, moduleName string, amt sdk.Coins) error + SendCoinsFromModuleToAccount(ctx sdk.Context, senderModule string, recipientAddr sdk.AccAddress, amt sdk.Coins) error +} +``` + +## Summary + +You finished the mint and burn voucher logic. + +It is a good time to make another git commit to save the state of your work: + +```bash +git add . +git commit -m "Add Mint and Burn Voucher" +``` + +In the next chapter, you look into creating sell orders. diff --git a/docs/versioned_docs/version-v28/02-guide/07-interchange/06-creating-sell-orders.md b/docs/versioned_docs/version-v28/02-guide/07-interchange/06-creating-sell-orders.md new file mode 100644 index 0000000..46f527d --- /dev/null +++ b/docs/versioned_docs/version-v28/02-guide/07-interchange/06-creating-sell-orders.md @@ -0,0 +1,402 @@ +--- +sidebar_position: 6 +description: Implement logic to create sell orders. +--- + +# Create Sell Orders + +In this chapter, you implement the logic for creating sell orders. + +The packet proto file for a sell order is already generated. Add the seller information: + +```protobuf +// proto/dex/packet.proto + +message SellOrderPacketData { + // ... + string seller = 5; +} +``` + +Now, use Ignite CLI to build the proto files for the `send-sell-order` command. You used this command in a previous +chapter. + +```bash +ignite generate proto-go --yes +``` + +## Message Handling in SendSellOrder + +Sell orders are created using the `send-sell-order` command. This command creates a transaction with a `SendSellOrder` +message that triggers the `SendSellOrder` keeper method. + +The `SendSellOrder` command: + +* Checks that an order book for a specified denom pair exists. +* Safely burns or locks token. + * If the token is an IBC token, burn the token. + * If the token is a native token, lock the token. +* Saves the voucher that is received on the target chain to later resolve a denom. +* Transmits an IBC packet to the target chain. + +```go +// x/dex/keeper/msg_server_sell_order.go + +package keeper + +import ( + "context" + "errors" + + sdk "github.com/cosmos/cosmos-sdk/types" + clienttypes "github.com/cosmos/ibc-go/v6/modules/core/02-client/types" + + "interchange/x/dex/types" +) + +func (k msgServer) SendSellOrder(goCtx context.Context, msg *types.MsgSendSellOrder) (*types.MsgSendSellOrderResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + + // If an order book doesn't exist, throw an error + pairIndex := types.OrderBookIndex(msg.Port, msg.ChannelID, msg.AmountDenom, msg.PriceDenom) + _, found := k.GetSellOrderBook(ctx, pairIndex) + if !found { + return &types.MsgSendSellOrderResponse{}, errors.New("the pair doesn't exist") + } + + // Get sender's address + sender, err := sdk.AccAddressFromBech32(msg.Creator) + if err != nil { + return &types.MsgSendSellOrderResponse{}, err + } + + // Use SafeBurn to ensure no new native tokens are minted + if err := k.SafeBurn(ctx, msg.Port, msg.ChannelID, sender, msg.AmountDenom, msg.Amount); err != nil { + return &types.MsgSendSellOrderResponse{}, err + } + + // Save the voucher received on the other chain, to have the ability to resolve it into the original denom + k.SaveVoucherDenom(ctx, msg.Port, msg.ChannelID, msg.AmountDenom) + + var packet types.SellOrderPacketData + packet.Seller = msg.Creator + packet.AmountDenom = msg.AmountDenom + packet.Amount = msg.Amount + packet.PriceDenom = msg.PriceDenom + packet.Price = msg.Price + + // Transmit the packet + err = k.TransmitSellOrderPacket(ctx, packet, msg.Port, msg.ChannelID, clienttypes.ZeroHeight(), msg.TimeoutTimestamp) + if err != nil { + return nil, err + } + + return &types.MsgSendSellOrderResponse{}, nil +} +``` + +## On Receiving a Sell Order + +When a "sell order" packet is received on the target chain, you want the module to: + +* Update the sell order book +* Distribute sold token to the buyer +* Send the sell order to chain A after the fill attempt + +```go +// x/dex/keeper/sell_order.go + +package keeper + +// ... + +func (k Keeper) OnRecvSellOrderPacket(ctx sdk.Context, packet channeltypes.Packet, data types.SellOrderPacketData) (packetAck types.SellOrderPacketAck, err error) { + if err := data.ValidateBasic(); err != nil { + return packetAck, err + } + + pairIndex := types.OrderBookIndex(packet.SourcePort, packet.SourceChannel, data.AmountDenom, data.PriceDenom) + book, found := k.GetBuyOrderBook(ctx, pairIndex) + if !found { + return packetAck, errors.New("the pair doesn't exist") + } + + // Fill sell order + remaining, liquidated, gain, _ := book.FillSellOrder(types.Order{ + Amount: data.Amount, + Price: data.Price, + }) + + // Return remaining amount and gains + packetAck.RemainingAmount = remaining.Amount + packetAck.Gain = gain + + // Before distributing sales, we resolve the denom + // First we check if the denom received comes from this chain originally + finalAmountDenom, saved := k.OriginalDenom(ctx, packet.DestinationPort, packet.DestinationChannel, data.AmountDenom) + if !saved { + // If it was not from this chain we use voucher as denom + finalAmountDenom = VoucherDenom(packet.SourcePort, packet.SourceChannel, data.AmountDenom) + } + + // Dispatch liquidated buy orders + for _, liquidation := range liquidated { + liquidation := liquidation + addr, err := sdk.AccAddressFromBech32(liquidation.Creator) + if err != nil { + return packetAck, err + } + + if err := k.SafeMint(ctx, packet.DestinationPort, packet.DestinationChannel, addr, finalAmountDenom, liquidation.Amount); err != nil { + return packetAck, err + } + } + + // Save the new order book + k.SetBuyOrderBook(ctx, book) + + return packetAck, nil +} +``` + +### Implement the FillSellOrder Function + +The `FillSellOrder` function tries to fill the buy order with the order book and returns all the side effects: + +```go +// x/dex/types/buy_order_book.go + +package types + +// ... + +func (b *BuyOrderBook) FillSellOrder(order Order) ( + remainingSellOrder Order, + liquidated []Order, + gain int32, + filled bool, +) { + var liquidatedList []Order + totalGain := int32(0) + remainingSellOrder = order + + // Liquidate as long as there is match + for { + var match bool + var liquidation Order + remainingSellOrder, liquidation, gain, match, filled = b.LiquidateFromSellOrder( + remainingSellOrder, + ) + if !match { + break + } + + // Update gains + totalGain += gain + + // Update liquidated + liquidatedList = append(liquidatedList, liquidation) + + if filled { + break + } + } + + return remainingSellOrder, liquidatedList, totalGain, filled +} +``` + +### Implement The LiquidateFromSellOrder Function + +The `LiquidateFromSellOrder` function liquidates the first sell order of the book from the buy order. If no match is +found, return false for match: + +```go +// x/dex/types/buy_order_book.go + +package types + +// ... + +func (b *BuyOrderBook) LiquidateFromSellOrder(order Order) ( + remainingSellOrder Order, + liquidatedBuyOrder Order, + gain int32, + match bool, + filled bool, +) { + remainingSellOrder = order + + // No match if no order + orderCount := len(b.Book.Orders) + if orderCount == 0 { + return order, liquidatedBuyOrder, gain, false, false + } + + // Check if match + highestBid := b.Book.Orders[orderCount-1] + if order.Price > highestBid.Price { + return order, liquidatedBuyOrder, gain, false, false + } + + liquidatedBuyOrder = *highestBid + + // Check if sell order can be entirely filled + if highestBid.Amount >= order.Amount { + remainingSellOrder.Amount = 0 + liquidatedBuyOrder.Amount = order.Amount + gain = order.Amount * highestBid.Price + + // Remove the highest bid if it has been entirely liquidated + highestBid.Amount -= order.Amount + if highestBid.Amount == 0 { + b.Book.Orders = b.Book.Orders[:orderCount-1] + } else { + b.Book.Orders[orderCount-1] = highestBid + } + + return remainingSellOrder, liquidatedBuyOrder, gain, true, true + } + + // Not entirely filled + gain = highestBid.Amount * highestBid.Price + b.Book.Orders = b.Book.Orders[:orderCount-1] + remainingSellOrder.Amount -= highestBid.Amount + + return remainingSellOrder, liquidatedBuyOrder, gain, true, false +} +``` + +### Implement the OnAcknowledgement Function for Sell Order Packets + +After an IBC packet is processed on the target chain, an acknowledgement is returned to the source chain and processed +by the `OnAcknowledgementSellOrderPacket` function. + +The dex module on the source chain: + +* Stores the remaining sell order in the sell order book. +* Distributes sold tokens to the buyers. +* Distributes the price of the amount sold to the seller. +* On error, mints the burned tokens. + +```go +// x/dex/keeper/sell_order.go + +package keeper + +// ... + +func (k Keeper) OnAcknowledgementSellOrderPacket(ctx sdk.Context, packet channeltypes.Packet, data types.SellOrderPacketData, ack channeltypes.Acknowledgement) error { + switch dispatchedAck := ack.Response.(type) { + case *channeltypes.Acknowledgement_Error: + // In case of error we mint back the native token + receiver, err := sdk.AccAddressFromBech32(data.Seller) + if err != nil { + return err + } + + if err := k.SafeMint(ctx, packet.SourcePort, packet.SourceChannel, receiver, data.AmountDenom, data.Amount); err != nil { + return err + } + + return nil + case *channeltypes.Acknowledgement_Result: + // Decode the packet acknowledgment + var packetAck types.SellOrderPacketAck + if err := types.ModuleCdc.UnmarshalJSON(dispatchedAck.Result, &packetAck); err != nil { + // The counter-party module doesn't implement the correct acknowledgment format + return errors.New("cannot unmarshal acknowledgment") + } + + // Get the sell order book + pairIndex := types.OrderBookIndex(packet.SourcePort, packet.SourceChannel, data.AmountDenom, data.PriceDenom) + book, found := k.GetSellOrderBook(ctx, pairIndex) + if !found { + panic("sell order book must exist") + } + + // Append the remaining amount of the order + if packetAck.RemainingAmount > 0 { + _, err := book.AppendOrder(data.Seller, packetAck.RemainingAmount, data.Price) + if err != nil { + return err + } + + // Save the new order book + k.SetSellOrderBook(ctx, book) + } + + // Mint the gains + if packetAck.Gain > 0 { + receiver, err := sdk.AccAddressFromBech32(data.Seller) + if err != nil { + return err + } + + finalPriceDenom, saved := k.OriginalDenom(ctx, packet.SourcePort, packet.SourceChannel, data.PriceDenom) + if !saved { + // If it was not from this chain we use voucher as denom + finalPriceDenom = VoucherDenom(packet.DestinationPort, packet.DestinationChannel, data.PriceDenom) + } + + if err := k.SafeMint(ctx, packet.SourcePort, packet.SourceChannel, receiver, finalPriceDenom, packetAck.Gain); err != nil { + return err + } + } + + return nil + default: + // The counter-party module doesn't implement the correct acknowledgment format + return errors.New("invalid acknowledgment format") + } +} +``` + +```go +// x/dex/types/sell_order_book.go + +package types + +// ... + +func (s *SellOrderBook) AppendOrder(creator string, amount int32, price int32) (int32, error) { + return s.Book.appendOrder(creator, amount, price, Decreasing) +} +``` + +### Add the OnTimeout of a Sell Order Packet Function + +If a timeout occurs, mint back the native token: + +```go +// x/dex/keeper/sell_order.go + +package keeper + +// ... + +func (k Keeper) OnTimeoutSellOrderPacket(ctx sdk.Context, packet channeltypes.Packet, data types.SellOrderPacketData) error { + // In case of error we mint back the native token + receiver, err := sdk.AccAddressFromBech32(data.Seller) + if err != nil { + return err + } + + if err := k.SafeMint(ctx, packet.SourcePort, packet.SourceChannel, receiver, data.AmountDenom, data.Amount); err != nil { + return err + } + + return nil +} +``` + +## Summary + +Great, you have completed the sell order logic. + +It is a good time to make another git commit again to save the state of your work: + +```bash +git add . +git commit -m "Add Sell Orders" +``` diff --git a/docs/versioned_docs/version-v28/02-guide/07-interchange/07-creating-buy-orders.md b/docs/versioned_docs/version-v28/02-guide/07-interchange/07-creating-buy-orders.md new file mode 100644 index 0000000..e1ed165 --- /dev/null +++ b/docs/versioned_docs/version-v28/02-guide/07-interchange/07-creating-buy-orders.md @@ -0,0 +1,440 @@ +--- +sidebar_position: 7 +description: Implement the buy order logic. +--- + +# Creating Buy Orders + +In this chapter, you implement the creation of buy orders. The logic is very similar to the sell order logic you +implemented in the previous chapter. + +## Modify the Proto Definition + +Add the buyer to the proto file definition: + +```protobuf +// proto/interchange/dex/packet.proto + +message BuyOrderPacketData { + // ... + string buyer = 5; +} +``` + +Now, use Ignite CLI to build the proto files for the `send-buy-order` command. You used this command in previous +chapters. + +```bash +ignite generate proto-go --yes +``` + +## IBC Message Handling in SendBuyOrder + +* Check if the pair exists on the order book +* If the token is an IBC token, burn the tokens +* If the token is a native token, lock the tokens +* Save the voucher received on the target chain to later resolve a denom + +```go +// x/dex/keeper/msg_server_buy_order.go + +package keeper + +import ( + "context" + "errors" + + sdk "github.com/cosmos/cosmos-sdk/types" + + "interchange/x/dex/types" +) + +func (k msgServer) SendBuyOrder(goCtx context.Context, msg *types.MsgSendBuyOrder) (*types.MsgSendBuyOrderResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + + // Cannot send a order if the pair doesn't exist + pairIndex := types.OrderBookIndex(msg.Port, msg.ChannelID, msg.AmountDenom, msg.PriceDenom) + _, found := k.GetBuyOrderBook(ctx, pairIndex) + if !found { + return &types.MsgSendBuyOrderResponse{}, errors.New("the pair doesn't exist") + } + + // Lock the token to send + sender, err := sdk.AccAddressFromBech32(msg.Creator) + if err != nil { + return &types.MsgSendBuyOrderResponse{}, err + } + + // Use SafeBurn to ensure no new native tokens are minted + if err := k.SafeBurn(ctx, msg.Port, msg.ChannelID, sender, msg.PriceDenom, msg.Amount*msg.Price); err != nil { + return &types.MsgSendBuyOrderResponse{}, err + } + + // Save the voucher received on the other chain, to have the ability to resolve it into the original denom + k.SaveVoucherDenom(ctx, msg.Port, msg.ChannelID, msg.PriceDenom) + + // Construct the packet + var packet types.BuyOrderPacketData + + packet.Buyer = msg.Creator + packet.AmountDenom = msg.AmountDenom + packet.Amount = msg.Amount + packet.PriceDenom = msg.PriceDenom + packet.Price = msg.Price + + // Transmit the packet + err = k.TransmitBuyOrderPacket( + ctx, + packet, + msg.Port, + msg.ChannelID, + clienttypes.ZeroHeight(), + msg.TimeoutTimestamp, + ) + if err != nil { + return nil, err + } + + // Transmit an IBC packet... + return &types.MsgSendBuyOrderResponse{}, nil +} +``` + +## On Receiving a Buy Order + +* Update the buy order book +* Distribute sold token to the buyer +* Send the sell order to chain A after the fill attempt + +```go +// x/dex/keeper/buy_order.go + +package keeper + +// ... + +func (k Keeper) OnRecvBuyOrderPacket(ctx sdk.Context, packet channeltypes.Packet, data types.BuyOrderPacketData) (packetAck types.BuyOrderPacketAck, err error) { + // validate packet data upon receiving + if err := data.ValidateBasic(); err != nil { + return packetAck, err + } + + // Check if the sell order book exists + pairIndex := types.OrderBookIndex(packet.SourcePort, packet.SourceChannel, data.AmountDenom, data.PriceDenom) + book, found := k.GetSellOrderBook(ctx, pairIndex) + if !found { + return packetAck, errors.New("the pair doesn't exist") + } + + // Fill buy order + remaining, liquidated, purchase, _ := book.FillBuyOrder(types.Order{ + Amount: data.Amount, + Price: data.Price, + }) + + // Return remaining amount and gains + packetAck.RemainingAmount = remaining.Amount + packetAck.Purchase = purchase + + // Before distributing gains, we resolve the denom + // First we check if the denom received comes from this chain originally + finalPriceDenom, saved := k.OriginalDenom(ctx, packet.DestinationPort, packet.DestinationChannel, data.PriceDenom) + if !saved { + // If it was not from this chain we use voucher as denom + finalPriceDenom = VoucherDenom(packet.SourcePort, packet.SourceChannel, data.PriceDenom) + } + + // Dispatch liquidated buy order + for _, liquidation := range liquidated { + liquidation := liquidation + addr, err := sdk.AccAddressFromBech32(liquidation.Creator) + if err != nil { + return packetAck, err + } + + if err := k.SafeMint( + ctx, + packet.DestinationPort, + packet.DestinationChannel, + addr, + finalPriceDenom, + liquidation.Amount*liquidation.Price, + ); err != nil { + return packetAck, err + } + } + + // Save the new order book + k.SetSellOrderBook(ctx, book) + + return packetAck, nil +} +``` + +### Implement a FillBuyOrder Function + +The `FillBuyOrder` function tries to fill the sell order with the order book and returns all the side effects: + +```go +// x/dex/types/sell_order_book.go + +package types + +// ... + +func (s *SellOrderBook) FillBuyOrder(order Order) ( + remainingBuyOrder Order, + liquidated []Order, + purchase int32, + filled bool, +) { + var liquidatedList []Order + totalPurchase := int32(0) + remainingBuyOrder = order + + // Liquidate as long as there is match + for { + var match bool + var liquidation Order + remainingBuyOrder, liquidation, purchase, match, filled = s.LiquidateFromBuyOrder( + remainingBuyOrder, + ) + if !match { + break + } + + // Update gains + totalPurchase += purchase + + // Update liquidated + liquidatedList = append(liquidatedList, liquidation) + + if filled { + break + } + } + + return remainingBuyOrder, liquidatedList, totalPurchase, filled +} +``` + +### Implement a LiquidateFromBuyOrder Function + +The `LiquidateFromBuyOrder` function liquidates the first buy order of the book from the sell order. If no match is +found, return false for match: + +```go +// x/dex/types/sell_order_book.go + +package types + +// ... + +func (s *SellOrderBook) LiquidateFromBuyOrder(order Order) ( + remainingBuyOrder Order, + liquidatedSellOrder Order, + purchase int32, + match bool, + filled bool, +) { + remainingBuyOrder = order + + // No match if no order + orderCount := len(s.Book.Orders) + if orderCount == 0 { + return order, liquidatedSellOrder, purchase, false, false + } + + // Check if match + lowestAsk := s.Book.Orders[orderCount-1] + if order.Price < lowestAsk.Price { + return order, liquidatedSellOrder, purchase, false, false + } + + liquidatedSellOrder = *lowestAsk + + // Check if buy order can be entirely filled + if lowestAsk.Amount >= order.Amount { + remainingBuyOrder.Amount = 0 + liquidatedSellOrder.Amount = order.Amount + purchase = order.Amount + + // Remove lowest ask if it has been entirely liquidated + lowestAsk.Amount -= order.Amount + if lowestAsk.Amount == 0 { + s.Book.Orders = s.Book.Orders[:orderCount-1] + } else { + s.Book.Orders[orderCount-1] = lowestAsk + } + + return remainingBuyOrder, liquidatedSellOrder, purchase, true, true + } + + // Not entirely filled + purchase = lowestAsk.Amount + s.Book.Orders = s.Book.Orders[:orderCount-1] + remainingBuyOrder.Amount -= lowestAsk.Amount + + return remainingBuyOrder, liquidatedSellOrder, purchase, true, false +} +``` + +## Receiving a Buy Order Acknowledgment + +After a buy order acknowledgement is received, chain `Mars`: + +* Stores the remaining sell order in the sell order book. +* Distributes sold `marscoin` to the buyers. +* Distributes to the seller the price of the amount sold. +* On error, mints back the burned tokens. + +```go +// x/dex/keeper/buy_order.go + +package keeper + +// ... + +func (k Keeper) OnAcknowledgementBuyOrderPacket(ctx sdk.Context, packet channeltypes.Packet, data types.BuyOrderPacketData, ack channeltypes.Acknowledgement) error { + switch dispatchedAck := ack.Response.(type) { + case *channeltypes.Acknowledgement_Error: + // In case of error we mint back the native token + receiver, err := sdk.AccAddressFromBech32(data.Buyer) + if err != nil { + return err + } + + if err := k.SafeMint( + ctx, + packet.SourcePort, + packet.SourceChannel, + receiver, + data.PriceDenom, + data.Amount*data.Price, + ); err != nil { + return err + } + + return nil + case *channeltypes.Acknowledgement_Result: + // Decode the packet acknowledgment + var packetAck types.BuyOrderPacketAck + + if err := types.ModuleCdc.UnmarshalJSON(dispatchedAck.Result, &packetAck); err != nil { + // The counter-party module doesn't implement the correct acknowledgment format + return errors.New("cannot unmarshal acknowledgment") + } + + // Get the sell order book + pairIndex := types.OrderBookIndex(packet.SourcePort, packet.SourceChannel, data.AmountDenom, data.PriceDenom) + book, found := k.GetBuyOrderBook(ctx, pairIndex) + if !found { + panic("buy order book must exist") + } + + // Append the remaining amount of the order + if packetAck.RemainingAmount > 0 { + _, err := book.AppendOrder( + data.Buyer, + packetAck.RemainingAmount, + data.Price, + ) + if err != nil { + return err + } + + // Save the new order book + k.SetBuyOrderBook(ctx, book) + } + + // Mint the purchase + if packetAck.Purchase > 0 { + receiver, err := sdk.AccAddressFromBech32(data.Buyer) + if err != nil { + return err + } + + finalAmountDenom, saved := k.OriginalDenom(ctx, packet.SourcePort, packet.SourceChannel, data.AmountDenom) + if !saved { + // If it was not from this chain we use voucher as denom + finalAmountDenom = VoucherDenom(packet.DestinationPort, packet.DestinationChannel, data.AmountDenom) + } + + if err := k.SafeMint( + ctx, + packet.SourcePort, + packet.SourceChannel, + receiver, + finalAmountDenom, + packetAck.Purchase, + ); err != nil { + return err + } + } + + return nil + default: + // The counter-party module doesn't implement the correct acknowledgment format + return errors.New("invalid acknowledgment format") + } +} +``` + +`AppendOrder` appends an order in the buy order book. +Add the following function to the `x/dex/types/buy_order_book.go` file in the `types` directory. + +```go +// x/dex/types/buy_order_book.go + +package types + +// ... + +func (b *BuyOrderBook) AppendOrder(creator string, amount int32, price int32) (int32, error) { + return b.Book.appendOrder(creator, amount, price, Increasing) +} +``` + +## OnTimeout of a Buy Order Packet + +If a timeout occurs, mint back the native token: + +```go +// x/dex/keeper/buy_order.go + +package keeper + +// ... + +func (k Keeper) OnTimeoutBuyOrderPacket(ctx sdk.Context, packet channeltypes.Packet, data types.BuyOrderPacketData) error { + // In case of error we mint back the native token + receiver, err := sdk.AccAddressFromBech32(data.Buyer) + if err != nil { + return err + } + + if err := k.SafeMint( + ctx, + packet.SourcePort, + packet.SourceChannel, + receiver, + data.PriceDenom, + data.Amount*data.Price, + ); err != nil { + return err + } + + return nil +} +``` + +## Summary + +Congratulations, you implemented the buy order logic. + +Again, it's a good time to save your current state to your local GitHub repository: + +```bash +git add . +git commit -m "Add Buy Orders" +``` diff --git a/docs/versioned_docs/version-v28/02-guide/07-interchange/08-cancelling-orders.md b/docs/versioned_docs/version-v28/02-guide/07-interchange/08-cancelling-orders.md new file mode 100644 index 0000000..f6c44ee --- /dev/null +++ b/docs/versioned_docs/version-v28/02-guide/07-interchange/08-cancelling-orders.md @@ -0,0 +1,200 @@ +--- +sidebar_position: 8 +description: Enable cancelling of buy and sell orders. +--- + +# Cancelling Orders + +You have implemented order books, buy and sell orders. In this chapter, you enable cancelling of buy and sell orders. + +## Cancel a Sell Order + +To cancel a sell order, you have to get the ID of the specific sell order. Then you can use the function +`RemoveOrderFromID` to remove the specific order from the order book and update the keeper accordingly. + +Move to the keeper directory and edit the `x/dex/keeper/msg_server_cancel_sell_order.go` file: + +```go +// x/dex/keeper/msg_server_cancel_sell_order.go + +package keeper + +import ( + "context" + "errors" + + sdk "github.com/cosmos/cosmos-sdk/types" + + "interchange/x/dex/types" +) + +func (k msgServer) CancelSellOrder(goCtx context.Context, msg *types.MsgCancelSellOrder) (*types.MsgCancelSellOrderResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + + // Retrieve the book + pairIndex := types.OrderBookIndex(msg.Port, msg.Channel, msg.AmountDenom, msg.PriceDenom) + s, found := k.GetSellOrderBook(ctx, pairIndex) + if !found { + return &types.MsgCancelSellOrderResponse{}, errors.New("the pair doesn't exist") + } + + // Check order creator + order, err := s.Book.GetOrderFromID(msg.OrderID) + if err != nil { + return &types.MsgCancelSellOrderResponse{}, err + } + + if order.Creator != msg.Creator { + return &types.MsgCancelSellOrderResponse{}, errors.New("canceller must be creator") + } + + // Remove order + if err := s.Book.RemoveOrderFromID(msg.OrderID); err != nil { + return &types.MsgCancelSellOrderResponse{}, err + } + + k.SetSellOrderBook(ctx, s) + + // Refund seller with remaining amount + seller, err := sdk.AccAddressFromBech32(order.Creator) + if err != nil { + return &types.MsgCancelSellOrderResponse{}, err + } + + if err := k.SafeMint(ctx, msg.Port, msg.Channel, seller, msg.AmountDenom, order.Amount); err != nil { + return &types.MsgCancelSellOrderResponse{}, err + } + + return &types.MsgCancelSellOrderResponse{}, nil +} +``` + +### Implement the GetOrderFromID Function + +The `GetOrderFromID` function gets an order from the book from its ID. + +Add this function to the `x/dex/types/order_book.go` function in the `types` directory: + +```go +// x/dex/types/order_book.go + +func (book OrderBook) GetOrderFromID(id int32) (Order, error) { + for _, order := range book.Orders { + if order.Id == id { + return *order, nil + } + } + + return Order{}, ErrOrderNotFound +} +``` + +### Implement the RemoveOrderFromID Function + +The `RemoveOrderFromID` function removes an order from the book and keeps it ordered: + +```go +// x/dex/types/order_book.go + +package types + +// ... + +func (book *OrderBook) RemoveOrderFromID(id int32) error { + for i, order := range book.Orders { + if order.Id == id { + book.Orders = append(book.Orders[:i], book.Orders[i+1:]...) + return nil + } + } + + return ErrOrderNotFound +} +``` + +## Cancel a Buy Order + +To cancel a buy order, you have to get the ID of the specific buy order. Then you can use the function +`RemoveOrderFromID` to remove the specific order from the order book and update the keeper accordingly: + +```go +// x/dex/keeper/msg_server_cancel_buy_order.go + +package keeper + +import ( + "context" + "errors" + + sdk "github.com/cosmos/cosmos-sdk/types" + + "interchange/x/dex/types" +) + +func (k msgServer) CancelBuyOrder(goCtx context.Context, msg *types.MsgCancelBuyOrder) (*types.MsgCancelBuyOrderResponse, error) { + ctx := sdk.UnwrapSDKContext(goCtx) + + // Retrieve the book + pairIndex := types.OrderBookIndex(msg.Port, msg.Channel, msg.AmountDenom, msg.PriceDenom) + b, found := k.GetBuyOrderBook(ctx, pairIndex) + if !found { + return &types.MsgCancelBuyOrderResponse{}, errors.New("the pair doesn't exist") + } + + // Check order creator + order, err := b.Book.GetOrderFromID(msg.OrderID) + if err != nil { + return &types.MsgCancelBuyOrderResponse{}, err + } + + if order.Creator != msg.Creator { + return &types.MsgCancelBuyOrderResponse{}, errors.New("canceller must be creator") + } + + // Remove order + if err := b.Book.RemoveOrderFromID(msg.OrderID); err != nil { + return &types.MsgCancelBuyOrderResponse{}, err + } + + k.SetBuyOrderBook(ctx, b) + + // Refund buyer with remaining price amount + buyer, err := sdk.AccAddressFromBech32(order.Creator) + if err != nil { + return &types.MsgCancelBuyOrderResponse{}, err + } + + if err := k.SafeMint( + ctx, + msg.Port, + msg.Channel, + buyer, + msg.PriceDenom, + order.Amount*order.Price, + ); err != nil { + return &types.MsgCancelBuyOrderResponse{}, err + } + + return &types.MsgCancelBuyOrderResponse{}, nil +} +``` + +## Summary + +You have completed implementing the functions that are required for the `dex` module. In this chapter, you have +implemented the design for cancelling specific buy or sell orders. + +To test if your Ignite CLI blockchain builds correctly, use the `chain build` command: + +```bash +ignite chain build +``` + +Again, it is a good time (a great time!) to add your state to the local GitHub repository: + +```bash +git add . +git commit -m "Add Cancelling Orders" +``` + +Finally, it's now time to write test files. diff --git a/docs/versioned_docs/version-v28/02-guide/07-interchange/09-tests.md b/docs/versioned_docs/version-v28/02-guide/07-interchange/09-tests.md new file mode 100644 index 0000000..8d3d933 --- /dev/null +++ b/docs/versioned_docs/version-v28/02-guide/07-interchange/09-tests.md @@ -0,0 +1,729 @@ +--- +sidebar_position: 9 +description: Add test files. +--- + +# Write Test Files + +To test your application, add the test files to your code. + +After you add the test files, change into the `interchange` directory with your terminal, then run: + +```bash +go test -timeout 30s ./x/dex/types +``` + +## Order Book Tests + +Create a new `x/dex/types/order_book_test.go` file in the `types` directory. + +Add the following testsuite: + +```go +// x/dex/types/order_book_test.go + +package types_test + +import ( + "math/rand" + "testing" + + "github.com/cosmos/cosmos-sdk/crypto/keys/ed25519" + sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/stretchr/testify/require" + + "interchange/x/dex/types" +) + +func GenString(n int) string { + alpha := []rune("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ") + + buf := make([]rune, n) + for i := range buf { + buf[i] = alpha[rand.Intn(len(alpha))] + } + + return string(buf) +} + +func GenAddress() string { + pk := ed25519.GenPrivKey().PubKey() + addr := pk.Address() + return sdk.AccAddress(addr).String() +} + +func GenAmount() int32 { + return int32(rand.Intn(int(types.MaxAmount)) + 1) +} + +func GenPrice() int32 { + return int32(rand.Intn(int(types.MaxPrice)) + 1) +} + +func GenPair() (string, string) { + return GenString(10), GenString(10) +} + +func GenOrder() (string, int32, int32) { + return GenLocalAccount(), GenAmount(), GenPrice() +} + +func GenLocalAccount() string { + return GenAddress() +} + +func MockAccount(str string) string { + return str +} + +func OrderListToOrderBook(list []types.Order) types.OrderBook { + listCopy := make([]*types.Order, len(list)) + for i, order := range list { + order := order + listCopy[i] = &order + } + + return types.OrderBook{ + IdCount: 0, + Orders: listCopy, + } +} + +func TestRemoveOrderFromID(t *testing.T) { + inputList := []types.Order{ + {Id: 3, Creator: MockAccount("3"), Amount: 2, Price: 10}, + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + } + + book := OrderListToOrderBook(inputList) + expectedList := []types.Order{ + {Id: 3, Creator: MockAccount("3"), Amount: 2, Price: 10}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + } + expectedBook := OrderListToOrderBook(expectedList) + err := book.RemoveOrderFromID(2) + require.NoError(t, err) + require.Equal(t, expectedBook, book) + + book = OrderListToOrderBook(inputList) + expectedList = []types.Order{ + {Id: 3, Creator: MockAccount("3"), Amount: 2, Price: 10}, + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + } + expectedBook = OrderListToOrderBook(expectedList) + err = book.RemoveOrderFromID(0) + require.NoError(t, err) + require.Equal(t, expectedBook, book) + + book = OrderListToOrderBook(inputList) + expectedList = []types.Order{ + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + } + expectedBook = OrderListToOrderBook(expectedList) + err = book.RemoveOrderFromID(3) + require.NoError(t, err) + require.Equal(t, expectedBook, book) + + book = OrderListToOrderBook(inputList) + err = book.RemoveOrderFromID(4) + require.ErrorIs(t, err, types.ErrOrderNotFound) +} +``` + +## Buy Order Tests + +Create a new `x/dex/types/buy_order_book_test.go` file in the `types` directory to add the tests for the Buy Order Book: + +```go +// x/dex/types/buy_order_book_test.go + +package types_test + +import ( + "sort" + "testing" + + "github.com/stretchr/testify/require" + + "interchange/x/dex/types" +) + +func OrderListToBuyOrderBook(list []types.Order) types.BuyOrderBook { + listCopy := make([]*types.Order, len(list)) + for i, order := range list { + order := order + listCopy[i] = &order + } + + book := types.BuyOrderBook{ + AmountDenom: "foo", + PriceDenom: "bar", + Book: &types.OrderBook{ + IdCount: 0, + Orders: listCopy, + }, + } + return book +} + +func TestAppendOrder(t *testing.T) { + buyBook := types.NewBuyOrderBook(GenPair()) + + // Prevent zero amount + seller, amount, price := GenOrder() + _, err := buyBook.AppendOrder(seller, 0, price) + require.ErrorIs(t, err, types.ErrZeroAmount) + + // Prevent big amount + _, err = buyBook.AppendOrder(seller, types.MaxAmount+1, price) + require.ErrorIs(t, err, types.ErrMaxAmount) + + // Prevent zero price + _, err = buyBook.AppendOrder(seller, amount, 0) + require.ErrorIs(t, err, types.ErrZeroPrice) + + // Prevent big price + _, err = buyBook.AppendOrder(seller, amount, types.MaxPrice+1) + require.ErrorIs(t, err, types.ErrMaxPrice) + + // Can append buy orders + for i := 0; i < 20; i++ { + // Append a new order + creator, amount, price := GenOrder() + newOrder := types.Order{ + Id: buyBook.Book.IdCount, + Creator: creator, + Amount: amount, + Price: price, + } + orderID, err := buyBook.AppendOrder(creator, amount, price) + + // Checks + require.NoError(t, err) + require.Contains(t, buyBook.Book.Orders, &newOrder) + require.Equal(t, newOrder.Id, orderID) + } + + require.Len(t, buyBook.Book.Orders, 20) + require.True(t, sort.SliceIsSorted(buyBook.Book.Orders, func(i, j int) bool { + return buyBook.Book.Orders[i].Price < buyBook.Book.Orders[j].Price + })) +} + +type liquidateSellRes struct { + Book []types.Order + Remaining types.Order + Liquidated types.Order + Gain int32 + Match bool + Filled bool +} + +func simulateLiquidateFromSellOrder( + t *testing.T, + inputList []types.Order, + inputOrder types.Order, + expected liquidateSellRes, +) { + book := OrderListToBuyOrderBook(inputList) + expectedBook := OrderListToBuyOrderBook(expected.Book) + + require.True(t, sort.SliceIsSorted(book.Book.Orders, func(i, j int) bool { + return book.Book.Orders[i].Price < book.Book.Orders[j].Price + })) + require.True(t, sort.SliceIsSorted(expectedBook.Book.Orders, func(i, j int) bool { + return expectedBook.Book.Orders[i].Price < expectedBook.Book.Orders[j].Price + })) + + remaining, liquidated, gain, match, filled := book.LiquidateFromSellOrder(inputOrder) + + require.Equal(t, expectedBook, book) + require.Equal(t, expected.Remaining, remaining) + require.Equal(t, expected.Liquidated, liquidated) + require.Equal(t, expected.Gain, gain) + require.Equal(t, expected.Match, match) + require.Equal(t, expected.Filled, filled) +} + +func TestLiquidateFromSellOrder(t *testing.T) { + // No match for empty book + inputOrder := types.Order{Id: 10, Creator: MockAccount("1"), Amount: 100, Price: 30} + book := OrderListToBuyOrderBook([]types.Order{}) + _, _, _, match, _ := book.LiquidateFromSellOrder(inputOrder) + require.False(t, match) + + // Buy book + inputBook := []types.Order{ + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + } + + // Test no match if highest bid too low (25 < 30) + book = OrderListToBuyOrderBook(inputBook) + _, _, _, match, _ = book.LiquidateFromSellOrder(inputOrder) + require.False(t, match) + + // Entirely filled (30 < 50) + inputOrder = types.Order{Id: 10, Creator: MockAccount("1"), Amount: 30, Price: 22} + expected := liquidateSellRes{ + Book: []types.Order{ + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + {Id: 0, Creator: MockAccount("0"), Amount: 20, Price: 25}, + }, + Remaining: types.Order{Id: 10, Creator: MockAccount("1"), Amount: 0, Price: 22}, + Liquidated: types.Order{Id: 0, Creator: MockAccount("0"), Amount: 30, Price: 25}, + Gain: int32(30 * 25), + Match: true, + Filled: true, + } + simulateLiquidateFromSellOrder(t, inputBook, inputOrder, expected) + + // Entirely filled and liquidated ( 50 = 50) + inputOrder = types.Order{Id: 10, Creator: MockAccount("1"), Amount: 50, Price: 15} + expected = liquidateSellRes{ + Book: []types.Order{ + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + }, + Remaining: types.Order{Id: 10, Creator: MockAccount("1"), Amount: 0, Price: 15}, + Liquidated: types.Order{Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + Gain: int32(50 * 25), + Match: true, + Filled: true, + } + simulateLiquidateFromSellOrder(t, inputBook, inputOrder, expected) + + // Not filled and entirely liquidated (60 > 50) + inputOrder = types.Order{Id: 10, Creator: MockAccount("1"), Amount: 60, Price: 10} + expected = liquidateSellRes{ + Book: []types.Order{ + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + }, + Remaining: types.Order{Id: 10, Creator: MockAccount("1"), Amount: 10, Price: 10}, + Liquidated: types.Order{Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + Gain: int32(50 * 25), + Match: true, + Filled: false, + } + simulateLiquidateFromSellOrder(t, inputBook, inputOrder, expected) +} + +type fillSellRes struct { + Book []types.Order + Remaining types.Order + Liquidated []types.Order + Gain int32 + Filled bool +} + +func simulateFillSellOrder( + t *testing.T, + inputList []types.Order, + inputOrder types.Order, + expected fillSellRes, +) { + book := OrderListToBuyOrderBook(inputList) + expectedBook := OrderListToBuyOrderBook(expected.Book) + + require.True(t, sort.SliceIsSorted(book.Book.Orders, func(i, j int) bool { + return book.Book.Orders[i].Price < book.Book.Orders[j].Price + })) + require.True(t, sort.SliceIsSorted(expectedBook.Book.Orders, func(i, j int) bool { + return expectedBook.Book.Orders[i].Price < expectedBook.Book.Orders[j].Price + })) + + remaining, liquidated, gain, filled := book.FillSellOrder(inputOrder) + + require.Equal(t, expectedBook, book) + require.Equal(t, expected.Remaining, remaining) + require.Equal(t, expected.Liquidated, liquidated) + require.Equal(t, expected.Gain, gain) + require.Equal(t, expected.Filled, filled) +} + +func TestFillSellOrder(t *testing.T) { + var inputBook []types.Order + + // Empty book + inputOrder := types.Order{Id: 10, Creator: MockAccount("1"), Amount: 30, Price: 30} + expected := fillSellRes{ + Book: []types.Order{}, + Remaining: inputOrder, + Liquidated: []types.Order(nil), + Gain: int32(0), + Filled: false, + } + simulateFillSellOrder(t, inputBook, inputOrder, expected) + + // No match + inputBook = []types.Order{ + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + } + expected = fillSellRes{ + Book: inputBook, + Remaining: inputOrder, + Liquidated: []types.Order(nil), + Gain: int32(0), + Filled: false, + } + simulateFillSellOrder(t, inputBook, inputOrder, expected) + + // First order liquidated, not filled + inputOrder = types.Order{Id: 10, Creator: MockAccount("1"), Amount: 60, Price: 22} + expected = fillSellRes{ + Book: []types.Order{ + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + }, + Remaining: types.Order{Id: 10, Creator: MockAccount("1"), Amount: 10, Price: 22}, + Liquidated: []types.Order{ + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + }, + Gain: int32(50 * 25), + Filled: false, + } + simulateFillSellOrder(t, inputBook, inputOrder, expected) + + // Filled with two order + inputOrder = types.Order{Id: 10, Creator: MockAccount("1"), Amount: 60, Price: 18} + expected = fillSellRes{ + Book: []types.Order{ + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + {Id: 1, Creator: MockAccount("1"), Amount: 190, Price: 20}, + }, + Remaining: types.Order{Id: 10, Creator: MockAccount("1"), Amount: 0, Price: 18}, + Liquidated: []types.Order{ + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + {Id: 1, Creator: MockAccount("1"), Amount: 10, Price: 20}, + }, + Gain: int32(50*25 + 10*20), + Filled: true, + } + simulateFillSellOrder(t, inputBook, inputOrder, expected) + + // Not filled, buy order book liquidated + inputOrder = types.Order{Id: 10, Creator: MockAccount("1"), Amount: 300, Price: 10} + expected = fillSellRes{ + Book: []types.Order{}, + Remaining: types.Order{Id: 10, Creator: MockAccount("1"), Amount: 20, Price: 10}, + Liquidated: []types.Order{ + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + }, + Gain: int32(50*25 + 200*20 + 30*15), + Filled: false, + } + simulateFillSellOrder(t, inputBook, inputOrder, expected) +} +``` + +## Sell Order Tests + +Create a new testsuite for Sell Orders in a new file `x/dex/types/sell_order_book_test.go`: + +```go +// x/dex/types/sell_order_book_test.go + +package types_test + +import ( + "sort" + "testing" + + "github.com/stretchr/testify/require" + + "interchange/x/dex/types" +) + +func OrderListToSellOrderBook(list []types.Order) types.SellOrderBook { + listCopy := make([]*types.Order, len(list)) + for i, order := range list { + order := order + listCopy[i] = &order + } + + book := types.SellOrderBook{ + AmountDenom: "foo", + PriceDenom: "bar", + Book: &types.OrderBook{ + IdCount: 0, + Orders: listCopy, + }, + } + return book +} + +func TestSellOrderBook_AppendOrder(t *testing.T) { + sellBook := types.NewSellOrderBook(GenPair()) + + // Prevent zero amount + seller, amount, price := GenOrder() + _, err := sellBook.AppendOrder(seller, 0, price) + require.ErrorIs(t, err, types.ErrZeroAmount) + + // Prevent big amount + _, err = sellBook.AppendOrder(seller, types.MaxAmount+1, price) + require.ErrorIs(t, err, types.ErrMaxAmount) + + // Prevent zero price + _, err = sellBook.AppendOrder(seller, amount, 0) + require.ErrorIs(t, err, types.ErrZeroPrice) + + // Prevent big price + _, err = sellBook.AppendOrder(seller, amount, types.MaxPrice+1) + require.ErrorIs(t, err, types.ErrMaxPrice) + + // Can append sell orders + for i := 0; i < 20; i++ { + // Append a new order + creator, amount, price := GenOrder() + newOrder := types.Order{ + Id: sellBook.Book.IdCount, + Creator: creator, + Amount: amount, + Price: price, + } + orderID, err := sellBook.AppendOrder(creator, amount, price) + + // Checks + require.NoError(t, err) + require.Contains(t, sellBook.Book.Orders, &newOrder) + require.Equal(t, newOrder.Id, orderID) + } + require.Len(t, sellBook.Book.Orders, 20) + require.True(t, sort.SliceIsSorted(sellBook.Book.Orders, func(i, j int) bool { + return sellBook.Book.Orders[i].Price > sellBook.Book.Orders[j].Price + })) +} + +type liquidateBuyRes struct { + Book []types.Order + Remaining types.Order + Liquidated types.Order + Purchase int32 + Match bool + Filled bool +} + +func simulateLiquidateFromBuyOrder( + t *testing.T, + inputList []types.Order, + inputOrder types.Order, + expected liquidateBuyRes, +) { + book := OrderListToSellOrderBook(inputList) + expectedBook := OrderListToSellOrderBook(expected.Book) + require.True(t, sort.SliceIsSorted(book.Book.Orders, func(i, j int) bool { + return book.Book.Orders[i].Price > book.Book.Orders[j].Price + })) + require.True(t, sort.SliceIsSorted(expectedBook.Book.Orders, func(i, j int) bool { + return expectedBook.Book.Orders[i].Price > expectedBook.Book.Orders[j].Price + })) + + remaining, liquidated, purchase, match, filled := book.LiquidateFromBuyOrder(inputOrder) + + require.Equal(t, expectedBook, book) + require.Equal(t, expected.Remaining, remaining) + require.Equal(t, expected.Liquidated, liquidated) + require.Equal(t, expected.Purchase, purchase) + require.Equal(t, expected.Match, match) + require.Equal(t, expected.Filled, filled) +} + +func TestLiquidateFromBuyOrder(t *testing.T) { + // No match for empty book + inputOrder := types.Order{Id: 10, Creator: MockAccount("1"), Amount: 100, Price: 10} + book := OrderListToSellOrderBook([]types.Order{}) + _, _, _, match, _ := book.LiquidateFromBuyOrder(inputOrder) + require.False(t, match) + + // Sell book + inputBook := []types.Order{ + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + } + + // Test no match if lowest ask too high (25 < 30) + book = OrderListToSellOrderBook(inputBook) + _, _, _, match, _ = book.LiquidateFromBuyOrder(inputOrder) + require.False(t, match) + + // Entirely filled (30 > 15) + inputOrder = types.Order{Id: 10, Creator: MockAccount("1"), Amount: 20, Price: 30} + expected := liquidateBuyRes{ + Book: []types.Order{ + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + {Id: 2, Creator: MockAccount("2"), Amount: 10, Price: 15}, + }, + Remaining: types.Order{Id: 10, Creator: MockAccount("1"), Amount: 0, Price: 30}, + Liquidated: types.Order{Id: 2, Creator: MockAccount("2"), Amount: 20, Price: 15}, + Purchase: int32(20), + Match: true, + Filled: true, + } + simulateLiquidateFromBuyOrder(t, inputBook, inputOrder, expected) + + // Entirely filled (30 = 30) + inputOrder = types.Order{Id: 10, Creator: MockAccount("1"), Amount: 30, Price: 30} + expected = liquidateBuyRes{ + Book: []types.Order{ + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + }, + Remaining: types.Order{Id: 10, Creator: MockAccount("1"), Amount: 0, Price: 30}, + Liquidated: types.Order{Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + Purchase: int32(30), + Match: true, + Filled: true, + } + simulateLiquidateFromBuyOrder(t, inputBook, inputOrder, expected) + + // Not filled and entirely liquidated (60 > 30) + inputOrder = types.Order{Id: 10, Creator: MockAccount("1"), Amount: 60, Price: 30} + expected = liquidateBuyRes{ + Book: []types.Order{ + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + }, + Remaining: types.Order{Id: 10, Creator: MockAccount("1"), Amount: 30, Price: 30}, + Liquidated: types.Order{Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + Purchase: int32(30), + Match: true, + Filled: false, + } + simulateLiquidateFromBuyOrder(t, inputBook, inputOrder, expected) +} + +type fillBuyRes struct { + Book []types.Order + Remaining types.Order + Liquidated []types.Order + Purchase int32 + Filled bool +} + +func simulateFillBuyOrder( + t *testing.T, + inputList []types.Order, + inputOrder types.Order, + expected fillBuyRes, +) { + book := OrderListToSellOrderBook(inputList) + expectedBook := OrderListToSellOrderBook(expected.Book) + + require.True(t, sort.SliceIsSorted(book.Book.Orders, func(i, j int) bool { + return book.Book.Orders[i].Price > book.Book.Orders[j].Price + })) + require.True(t, sort.SliceIsSorted(expectedBook.Book.Orders, func(i, j int) bool { + return expectedBook.Book.Orders[i].Price > expectedBook.Book.Orders[j].Price + })) + + remaining, liquidated, purchase, filled := book.FillBuyOrder(inputOrder) + + require.Equal(t, expectedBook, book) + require.Equal(t, expected.Remaining, remaining) + require.Equal(t, expected.Liquidated, liquidated) + require.Equal(t, expected.Purchase, purchase) + require.Equal(t, expected.Filled, filled) +} + +func TestFillBuyOrder(t *testing.T) { + var inputBook []types.Order + + // Empty book + inputOrder := types.Order{Id: 10, Creator: MockAccount("1"), Amount: 30, Price: 10} + expected := fillBuyRes{ + Book: []types.Order{}, + Remaining: inputOrder, + Liquidated: []types.Order(nil), + Purchase: int32(0), + Filled: false, + } + simulateFillBuyOrder(t, inputBook, inputOrder, expected) + + // No match + inputBook = []types.Order{ + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + } + expected = fillBuyRes{ + Book: inputBook, + Remaining: inputOrder, + Liquidated: []types.Order(nil), + Purchase: int32(0), + Filled: false, + } + simulateFillBuyOrder(t, inputBook, inputOrder, expected) + + // First order liquidated, not filled + inputOrder = types.Order{Id: 10, Creator: MockAccount("1"), Amount: 60, Price: 18} + expected = fillBuyRes{ + Book: []types.Order{ + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + }, + Remaining: types.Order{Id: 10, Creator: MockAccount("1"), Amount: 30, Price: 18}, + Liquidated: []types.Order{ + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + }, + Purchase: int32(30), + Filled: false, + } + simulateFillBuyOrder(t, inputBook, inputOrder, expected) + + // Filled with two order + inputOrder = types.Order{Id: 10, Creator: MockAccount("1"), Amount: 60, Price: 22} + expected = fillBuyRes{ + Book: []types.Order{ + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + {Id: 1, Creator: MockAccount("1"), Amount: 170, Price: 20}, + }, + Remaining: types.Order{Id: 10, Creator: MockAccount("1"), Amount: 0, Price: 22}, + Liquidated: []types.Order{ + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + {Id: 1, Creator: MockAccount("1"), Amount: 30, Price: 20}, + }, + Purchase: int32(30 + 30), + Filled: true, + } + simulateFillBuyOrder(t, inputBook, inputOrder, expected) + + // Not filled, sell order book liquidated + inputOrder = types.Order{Id: 10, Creator: MockAccount("1"), Amount: 300, Price: 30} + expected = fillBuyRes{ + Book: []types.Order{}, + Remaining: types.Order{Id: 10, Creator: MockAccount("1"), Amount: 20, Price: 30}, + Liquidated: []types.Order{ + {Id: 2, Creator: MockAccount("2"), Amount: 30, Price: 15}, + {Id: 1, Creator: MockAccount("1"), Amount: 200, Price: 20}, + {Id: 0, Creator: MockAccount("0"), Amount: 50, Price: 25}, + }, + Purchase: int32(30 + 200 + 50), + Filled: false, + } + simulateFillBuyOrder(t, inputBook, inputOrder, expected) +} +``` + +## Successful Test Output + +When the tests are successful, your output is: + +``` +ok interchange/x/dex/types 0.550s +``` diff --git a/docs/versioned_docs/version-v28/02-guide/07-interchange/_category_.json b/docs/versioned_docs/version-v28/02-guide/07-interchange/_category_.json new file mode 100644 index 0000000..f427e86 --- /dev/null +++ b/docs/versioned_docs/version-v28/02-guide/07-interchange/_category_.json @@ -0,0 +1,5 @@ +{ + "label": "Advanced Module: Interchange", + "position": 8, + "link": null + } \ No newline at end of file diff --git a/docs/versioned_docs/version-v28/02-guide/08-debug.md b/docs/versioned_docs/version-v28/02-guide/08-debug.md new file mode 100644 index 0000000..081b44f --- /dev/null +++ b/docs/versioned_docs/version-v28/02-guide/08-debug.md @@ -0,0 +1,209 @@ +--- +description: Debugging your Cosmos SDK blockchain +--- + +# Debugging a chain + +Ignite chain debug command can help you find issues during development. It uses +[Delve](https://github.com/go-delve/delve) debugger which enables you to +interact with your blockchain app by controlling the execution of the process, +evaluating variables, and providing information of thread / goroutine state, CPU +register state and more. + +## Debug Command + +The debug command requires that the blockchain app binary is build with +debugging support by removing optimizations and inlining. A debug binary is +built by default by the `ignite chain serve` command or can optionally be +created using the `--debug` flag when running `ignite chain init` or `ignite +chain build` sub-commands. + +To start a debugging session in the terminal run: + +``` +ignite chain debug +``` + +The command runs your blockchain app in the background, attaches to it and +launches a terminal debugger shell: + +``` +Type 'help' for list of commands. +(dlv) +``` + +At this point the blockchain app blocks execution, so you can set one or more +breakpoints before continuing execution. + +Use the +[break](https://github.com/go-delve/delve/blob/master/Documentation/cli/README.md#break) +(alias `b`) command to set any number of breakpoints using, for example the +`<filename>:<line>` notation: + +``` +(dlv) break x/hello/client/cli/query_say_hello.go:14 +``` + +This command adds a breakpoint to the `x/hello/client/cli/query_say_hello.go` +file at line 14. + +Once all breakpoints are set resume blockchain execution using the +[continue](https://github.com/go-delve/delve/blob/master/Documentation/cli/README.md#continue) +(alias `c`) command: + +``` +(dlv) continue +``` + +The debugger will launch the shell and stop blockchain execution again when a +breakpoint is triggered. + +Within the debugger shell use the `quit` (alias `q`) or `exit` commands to stop +the blockchain app and exit the debugger. + +## Debug Server + +A debug server can optionally be started in cases where the default terminal +client is not desirable. When the server starts it first runs the blockchain +app, attaches to it and finally waits for a client connection. The default +server address is *tcp://127.0.0.1:30500* and it accepts both JSON-RPC or DAP +client connections. + +To start a debug server use the following flag: + +``` +ignite chain debug --server +``` + +To start a debug server with a custom address use the following flags: + +``` +ignite chain debug --server --server-address 127.0.0.1:30500 +``` + +The debug server stops automatically when the client connection is closed. + +## Debugging Clients + +### Gdlv: Multiplatform Delve UI + +[Gdlv](https://github.com/aarzilli/gdlv) is a graphical frontend to Delve for +Linux, Windows and macOS. + +Using it as debugging client is straightforward as it doesn't require any +configuration. Once the debug server is running and listening for client +requests connect to it by running: + +``` +gdlv connect 127.0.0.1:30500 +``` + +Setting breakpoints and continuing execution is done in the same way as Delve, +by using the `break` and `continue` commands. + +### Visual Studio Code + +Using [Visual Studio Code](https://code.visualstudio.com/) as debugging client +requires an initial configuration to allow it to connect to the debug server. + +Make sure that the [Go](https://code.visualstudio.com/docs/languages/go) +extension is installed. + +VS Code debugging is configured using the `launch.json` file which is usually +located inside the `.vscode` folder in your workspace. + +You can use the following launch configuration to set up VS Code as debugging +client: + +```json title=launch.json +{ + "version": "0.2.0", + "configurations": [ + { + "name": "Connect to Debug Server", + "type": "go", + "request": "attach", + "mode": "remote", + "remotePath": "${workspaceFolder}", + "port": 30500, + "host": "127.0.0.1" + } + ] +} +``` + +Alternatively it's possible to create a custom `launch.json` file from the "Run +and Debug" panel. When prompted choose the Go debugger option labeled "Go: +Connect to Server" and enter the debug host address and then the port number. + +## Example: Debugging a Blockchain App + +In this short example we will be using Ignite CLI to create a new blockchain and +a query to be able to trigger a debugging breakpoint when the query is called. + +Create a new blockchain: + +``` +ignite scaffold chain hello +``` + +Scaffold a new query in the `hello` directory: + +``` +ignite scaffold query say-hello name --response name +``` + +The next step initializes the blockchain's data directory and compiles a debug +binary: + +``` +ignite chain init --debug +``` + +Once the initialization finishes launch the debugger shell: + +``` +ignite chain debug +``` + +Within the debugger shell create a breakpoint that will be triggered when the +`SayHello` function is called and then continue execution: + +``` +(dlv) break x/hello/keeper/query_say_hello.go:12 +(dlv) continue +``` + +From a different terminal use the `hellod` binary to call the query: + +``` +hellod query hello say-hello bob +``` + +A debugger shell will be launched when the breakpoint is triggered: + +``` + 7: "google.golang.org/grpc/codes" + 8: "google.golang.org/grpc/status" + 9: "hello/x/hello/types" + 10: ) + 11: +=> 12: func (k Keeper) SayHello(goCtx context.Context, req *types.QuerySayHelloRequest) (*types.QuerySayHelloResponse, error) { + 13: if req == nil { + 14: return nil, status.Error(codes.InvalidArgument, "invalid request") + 15: } + 16: + 17: ctx := sdk.UnwrapSDKContext(goCtx) +``` + +From then on you can use Delve commands like `next` (alias `n`) or `print` +(alias `p`) to control execution and print values. For example, to print the +*name* argument value use the `print` command followed by "req.Name": + +``` +(dlv) print req.Name +"bob" +``` + +Finally, use `quit` (alias `q`) to stop the blockchain app and finish the +debugging session. diff --git a/docs/versioned_docs/version-v28/02-guide/09-docker.md b/docs/versioned_docs/version-v28/02-guide/09-docker.md new file mode 100644 index 0000000..753e47b --- /dev/null +++ b/docs/versioned_docs/version-v28/02-guide/09-docker.md @@ -0,0 +1,142 @@ +--- +description: Run Ignite CLI using a Docker container. +--- + +# Running inside a Docker container + +You can run Ignite CLI inside a Docker container without installing the Ignite +CLI binary directly on your machine. + +Running Ignite CLI in Docker can be useful for various reasons; isolating your +test environment, running Ignite CLI on an unsupported operating system, or +experimenting with a different version of Ignite CLI without installing it. + +Docker containers are like virtual machines because they provide an isolated +environment to programs that runs inside them. In this case, you can run Ignite +CLI in an isolated environment. + +Experimentation and file system impact is limited to the Docker instance. The +host machine is not impacted by changes to the container. + +## Prerequisites + +Docker must be installed. See [Get Started with +Docker](https://www.docker.com/get-started). + +## Ignite CLI Commands in Docker + +After you scaffold and start a chain in your Docker container, all Ignite CLI +commands are available. Just type the commands after `docker run -ti +ignite/cli`. For example: + +```bash +docker run -ti ignitehq/cli -h +docker run -ti ignitehq/cli scaffold chain planet +docker run -ti ignitehq/cli chain serve +``` + +## Scaffolding a chain + +When Docker is installed, you can build a blockchain with a single command. + +Ignite CLI, and the chains you serve with Ignite CLI, persist some files. When +using the CLI binary directly, those files are located in `$HOME/.ignite` and +`$HOME/.cache`, but in the context of Docker it's better to use a directory +different from `$HOME`, so we use `$HOME/sdh`. This folder should be created +manually prior to the docker commands below, or else Docker creates it with the +root user. + +```bash +mkdir $HOME/sdh +``` + +To scaffold a blockchain `planet` in the `/apps` directory in the container, run +this command in a terminal window: + +```bash +docker run -ti -v $HOME/sdh:/home/tendermint -v $PWD:/apps ignitehq/cli:0.25.2 scaffold chain planet +``` + +Be patient, this command takes a minute or two to run because it does everything +for you: + +- Creates a container that runs from the `ignitehq/cli:0.25.2` image. +- Executes the Ignite CLI binary inside the image. +- `-v $HOME/sdh:/home/tendermint` maps the `$HOME/sdh` directory in your local + computer (the host machine) to the home directory `/home/tendermint` inside + the container. +- `-v $PWD:/apps` maps the current directory in the terminal window on the host + machine to the `/apps` directory in the container. You can optionally specify + an absolute path instead of `$PWD`. + + Using `-w` and `-v` together provides file persistence on the host machine. + The application source code on the Docker container is mirrored to the file + system of the host machine. + + **Note:** The directory name for the `-w` and `-v` flags can be a name other + than `/app`, but the same directory must be specified for both flags. If you + omit `-w` and `-v`, the changes are made in the container only and are lost + when that container is shut down. + +## Starting a blockchain + +To start the blockchain node in the Docker container you just created, run this +command: + +```bash +docker run -ti -v $HOME/sdh:/home/tendermint -v $PWD:/apps -p 1317:1317 -p 26657:26657 ignitehq/cli:0.25.2 chain serve -p planet +``` + +This command does the following: + +- `-v $HOME/sdh:/home/tendermint` maps the `$HOME/sdh` directory in your local + computer (the host machine) to the home directory `/home/tendermint` inside + the container. +- `-v $PWD:/apps` persists the scaffolded app in the container to the host + machine at current working directory. +- `serve -p planet` specifies to use the `planet` directory that contains the + source code of the blockchain. +- `-p 1317:1317` maps the API server port (cosmos-sdk) to the host machine to + forward port 1317 listening inside the container to port 1317 on the host + machine. +- `-p 26657:26657` maps RPC server port 26657 (tendermint) on the host machine + to port 26657 in Docker. +- After the blockchain is started, open `http://localhost:26657` to see the + Tendermint API. +- The `-v` flag specifies for the container to access the application's source + code from the host machine, so it can build and run it. + +## Versioning + +You can specify which version of Ignite CLI to install and run in your Docker +container. + +### Latest version + +- By default, `ignite/cli` resolves to `ignite/cli:latest`. +- The `latest` image tag is always the latest stable [Ignite CLI + release](https://github.com/ignite/cli/releases). + +For example, if latest release is +[v0.25.2](https://github.com/ignite/cli/releases/tag/v0.25.2), the `latest` tag +points to the `0.25.2` tag. + +### Specific version + +You can specify to use a specific version of Ignite CLI. All available tags are +in the [ignite/cli +image](https://hub.docker.com/r/ignitehq/cli/tags?page=1&ordering=last_updated) on +Docker Hub. + +For example: + +- Use `ignitehq/cli:0.25.2` (without the `v` prefix) to use version `0.25.2`. +- Use `ignitehq/cli` to use the latest version. +- Use `ignitehq/cli:main` to use the `main` branch, so you can experiment with + the upcoming version. + +To get the latest image, run `docker pull`. + +```bash +docker pull ignitehq/cli:main +``` diff --git a/docs/versioned_docs/version-v28/02-guide/10-simapp.md b/docs/versioned_docs/version-v28/02-guide/10-simapp.md new file mode 100644 index 0000000..2285744 --- /dev/null +++ b/docs/versioned_docs/version-v28/02-guide/10-simapp.md @@ -0,0 +1,164 @@ +--- +sidebar_position: 10 +description: Test different scenarios for your chain. +--- + +# Chain simulation + +The Ignite CLI chain simulator can help you to run your chain based in +randomized inputs for you can make fuzz testing and also benchmark test for your +chain, simulating the messages, blocks, and accounts. You can scaffold a +template to perform simulation testing in each module along with a boilerplate +simulation methods for each scaffolded message. + +## Module simulation + +Every new module that is scaffolded with Ignite CLI implements the Cosmos SDK +[Module +Simulation](https://docs.cosmos.network/main/building-modules/simulator). + +- Each new message creates a file with the simulation methods required for the + tests. +- Scaffolding a `CRUD` type like a `list` or `map` creates a simulation file + with `create`, `update`, and `delete` simulation methods in the + `x/<module>/simulation` folder and registers these methods in + `x/<module>/module_simulation.go`. +- Scaffolding a single message creates an empty simulation method to be + implemented by the user. + +We recommend that you maintain the simulation methods for each new modification +into the message keeper methods. + +Every simulation is weighted because the sender of the operation is assigned +randomly. The weight defines how much the simulation calls the message. + +For better randomizations, you can define a random seed. The simulation with the +same random seed is deterministic with the same output. + +## Scaffold a simulation + +To create a new chain: + +``` +ignite scaffold chain mars +``` + +Review the empty `x/mars/simulation` folder and the +`x/mars/module_simulation.go` file to see that a simulation is not registered. + +Now, scaffold a new message: + +``` +ignite scaffold list user address balance:uint state +``` + +A new file `x/mars/simulation/user.go` is created and is registered with the +weight in the `x/mars/module_simulation.go` file. + +Be sure to define the proper simulation weight with a minimum weight of 0 and a +maximum weight of 100. + +For this example, change the `defaultWeightMsgDeleteUser` to 30 and the +`defaultWeightMsgUpdateUser` to 50. + +Run the `BenchmarkSimulation` method into `app/simulation_test.go` to run +simulation tests for all modules: + +``` +ignite chain simulate +``` + +You can also define flags that are provided by the simulation. Flags are defined +by the method `simapp.GetSimulatorFlags()`: + +``` +ignite chain simulate -v --numBlocks 200 --blockSize 50 --seed 33 +``` + +Wait for the entire simulation to finish and check the result of the messages. + +The default `go test` command works to run the simulation: + +``` +go test -v -benchmem -run=^$ -bench ^BenchmarkSimulation -cpuprofile cpu.out ./app -Commit=true +``` + +### Skip message + +Use logic to avoid sending a message without returning an error. Return only +`simtypes.NoOpMsg(...)` into the simulation message handler. + +## Params + +Scaffolding a module with params automatically adds the module in the +`module_simulaton.go` file: + +``` +ignite s module earth --params channel:string,minLaunch:uint,maxLaunch:int +``` + +After the parameters are scaffolded, change the +`x/<module>/module_simulation.go` file to set the random parameters into the +`RandomizedParams` method. The simulation will change the params randomly +according to call the function. + +## Invariants + +Simulating a chain can help you prevent [chain invariants +errors](https://docs.cosmos.network/main/building-modules/invariants). An +invariant is a function called by the chain to check if something broke, +invalidating the chain data. To create a new invariant and check the chain +integrity, you must create a method to validate the invariants and register all +invariants. + + +For example, in `x/earth/keeper/invariants.go`: + +```go title="x/earth/keeper/invariants.go" +package keeper + +import ( + "fmt" + + sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/tendermint/spn/x/launch/types" +) + +const zeroLaunchTimestampRoute = "zero-launch-timestamp" + +// RegisterInvariants registers all module invariants +func RegisterInvariants(ir sdk.InvariantRegistry, k Keeper) { + ir.RegisterRoute(types.ModuleName, zeroLaunchTimestampRoute, + ZeroLaunchTimestampInvariant(k)) +} + +// ZeroLaunchTimestampInvariant invariant that checks if the +// `LaunchTimestamp is zero +func ZeroLaunchTimestampInvariant(k Keeper) sdk.Invariant { + return func(ctx sdk.Context) (string, bool) { + all := k.GetAllChain(ctx) + for _, chain := range all { + if chain.LaunchTimestamp == 0 { + return sdk.FormatInvariant( + types.ModuleName, zeroLaunchTimestampRoute, + "LaunchTimestamp is not set while LaunchTriggered is set", + ), true + } + } + return "", false + } +} +``` + +Now, register the keeper invariants into the `x/earth/module.go` file: + +```go +package earth + +// ... + +// RegisterInvariants registers the capability module's invariants. +func (am AppModule) RegisterInvariants(ir sdk.InvariantRegistry) { + keeper.RegisterInvariants(ir, am.keeper) +} +``` diff --git a/docs/versioned_docs/version-v28/02-guide/_category_.json b/docs/versioned_docs/version-v28/02-guide/_category_.json new file mode 100644 index 0000000..3c599cc --- /dev/null +++ b/docs/versioned_docs/version-v28/02-guide/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Develop a chain", + "link": null +} \ No newline at end of file diff --git a/docs/versioned_docs/version-v28/02-guide/images/api.png b/docs/versioned_docs/version-v28/02-guide/images/api.png new file mode 100644 index 0000000..081df8f Binary files /dev/null and b/docs/versioned_docs/version-v28/02-guide/images/api.png differ diff --git a/docs/versioned_docs/version-v28/02-guide/images/packet_sendpost.png b/docs/versioned_docs/version-v28/02-guide/images/packet_sendpost.png new file mode 100644 index 0000000..0bb080c Binary files /dev/null and b/docs/versioned_docs/version-v28/02-guide/images/packet_sendpost.png differ diff --git a/docs/versioned_docs/version-v28/03-CLI-Commands/01-cli-commands.md b/docs/versioned_docs/version-v28/03-CLI-Commands/01-cli-commands.md new file mode 100644 index 0000000..715bf25 --- /dev/null +++ b/docs/versioned_docs/version-v28/03-CLI-Commands/01-cli-commands.md @@ -0,0 +1,3784 @@ +--- +description: Ignite CLI docs. +--- + +# CLI commands + +Documentation for Ignite CLI. +## ignite + +Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain + +**Synopsis** + +Ignite CLI is a tool for creating sovereign blockchains built with Cosmos SDK, the world's +most popular modular blockchain framework. Ignite CLI offers everything you need to scaffold, +test, build, and launch your blockchain. + +To get started, create a blockchain: + + ignite scaffold chain example + + +**Options** + +``` + -h, --help help for ignite +``` + +**SEE ALSO** + +* [ignite account](#ignite-account) - Create, delete, and show Ignite accounts +* [ignite app](#ignite-app) - Create and manage Ignite Apps +* [ignite chain](#ignite-chain) - Build, init and start a blockchain node +* [ignite completion](#ignite-completion) - Generates shell completion script. +* [ignite docs](#ignite-docs) - Show Ignite CLI docs +* [ignite generate](#ignite-generate) - Generate clients, API docs from source code +* [ignite network](#ignite-network) - Launch a blockchain in production +* [ignite node](#ignite-node) - Make requests to a live blockchain node +* [ignite relayer](#ignite-relayer) - Connect blockchains with an IBC relayer +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more +* [ignite version](#ignite-version) - Print the current build information +* [ignite testnet](#ignite-testnet) - Start a testnet local + + +## ignite account + +Create, delete, and show Ignite accounts + +**Synopsis** + +Commands for managing Ignite accounts. An Ignite account is a private/public +keypair stored in a keyring. Currently Ignite accounts are used when interacting +with Ignite relayer commands and when using "ignite network" commands. + +Note: Ignite account commands are not for managing your chain's keys and accounts. Use +you chain's binary to manage accounts from "config.yml". For example, if your +blockchain is called "mychain", use "mychaind keys" to manage keys for the +chain. + + +**Options** + +``` + -h, --help help for account + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain +* [ignite account create](#ignite-account-create) - Create a new account +* [ignite account delete](#ignite-account-delete) - Delete an account by name +* [ignite account export](#ignite-account-export) - Export an account as a private key +* [ignite account import](#ignite-account-import) - Import an account by using a mnemonic or a private key +* [ignite account list](#ignite-account-list) - Show a list of all accounts +* [ignite account show](#ignite-account-show) - Show detailed information about a particular account + + +## ignite account create + +Create a new account + +``` +ignite account create [name] [flags] +``` + +**Options** + +``` + -h, --help help for create +``` + +**Options inherited from parent commands** + +``` + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**SEE ALSO** + +* [ignite account](#ignite-account) - Create, delete, and show Ignite accounts + + +## ignite account delete + +Delete an account by name + +``` +ignite account delete [name] [flags] +``` + +**Options** + +``` + -h, --help help for delete +``` + +**Options inherited from parent commands** + +``` + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**SEE ALSO** + +* [ignite account](#ignite-account) - Create, delete, and show Ignite accounts + + +## ignite account export + +Export an account as a private key + +``` +ignite account export [name] [flags] +``` + +**Options** + +``` + -h, --help help for export + --non-interactive do not enter into interactive mode + --passphrase string passphrase to encrypt the exported key + --path string path to export private key. default: ./key_[name] +``` + +**Options inherited from parent commands** + +``` + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**SEE ALSO** + +* [ignite account](#ignite-account) - Create, delete, and show Ignite accounts + + +## ignite account import + +Import an account by using a mnemonic or a private key + +``` +ignite account import [name] [flags] +``` + +**Options** + +``` + -h, --help help for import + --non-interactive do not enter into interactive mode + --passphrase string passphrase to decrypt the imported key (ignored when secret is a mnemonic) + --secret string Your mnemonic or path to your private key (use interactive mode instead to securely pass your mnemonic) +``` + +**Options inherited from parent commands** + +``` + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**SEE ALSO** + +* [ignite account](#ignite-account) - Create, delete, and show Ignite accounts + + +## ignite account list + +Show a list of all accounts + +``` +ignite account list [flags] +``` + +**Options** + +``` + --address-prefix string account address prefix (default "cosmos") + -h, --help help for list +``` + +**Options inherited from parent commands** + +``` + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**SEE ALSO** + +* [ignite account](#ignite-account) - Create, delete, and show Ignite accounts + + +## ignite account show + +Show detailed information about a particular account + +``` +ignite account show [name] [flags] +``` + +**Options** + +``` + --address-prefix string account address prefix (default "cosmos") + -h, --help help for show +``` + +**Options inherited from parent commands** + +``` + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**SEE ALSO** + +* [ignite account](#ignite-account) - Create, delete, and show Ignite accounts + + +## ignite app + +Create and manage Ignite Apps + +**Options** + +``` + -h, --help help for app +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain +* [ignite app describe](#ignite-app-describe) - Print information about installed apps +* [ignite app install](#ignite-app-install) - Install app +* [ignite app list](#ignite-app-list) - List installed apps +* [ignite app scaffold](#ignite-app-scaffold) - Scaffold a new Ignite App +* [ignite app uninstall](#ignite-app-uninstall) - Uninstall app +* [ignite app update](#ignite-app-update) - Update app + + +## ignite app describe + +Print information about installed apps + +**Synopsis** + +Print information about an installed Ignite App commands and hooks. + +``` +ignite app describe [path] [flags] +``` + +**Examples** + +``` +ignite app describe github.com/org/my-app/ +``` + +**Options** + +``` + -h, --help help for describe +``` + +**SEE ALSO** + +* [ignite app](#ignite-app) - Create and manage Ignite Apps + + +## ignite app install + +Install app + +**Synopsis** + +Installs an Ignite App. + +Respects key value pairs declared after the app path to be added to the generated configuration definition. + +``` +ignite app install [path] [key=value]... [flags] +``` + +**Examples** + +``` +ignite app install github.com/org/my-app/ foo=bar baz=qux +``` + +**Options** + +``` + -g, --global use global plugins configuration ($HOME/.ignite/apps/igniteapps.yml) + -h, --help help for install +``` + +**SEE ALSO** + +* [ignite app](#ignite-app) - Create and manage Ignite Apps + + +## ignite app list + +List installed apps + +**Synopsis** + +Prints status and information of all installed Ignite Apps. + +``` +ignite app list [flags] +``` + +**Options** + +``` + -h, --help help for list +``` + +**SEE ALSO** + +* [ignite app](#ignite-app) - Create and manage Ignite Apps + + +## ignite app scaffold + +Scaffold a new Ignite App + +**Synopsis** + +Scaffolds a new Ignite App in the current directory. + +A git repository will be created with the given module name, unless the current directory is already a git repository. + +``` +ignite app scaffold [name] [flags] +``` + +**Examples** + +``` +ignite app scaffold github.com/org/my-app/ +``` + +**Options** + +``` + -h, --help help for scaffold +``` + +**SEE ALSO** + +* [ignite app](#ignite-app) - Create and manage Ignite Apps + + +## ignite app uninstall + +Uninstall app + +**Synopsis** + +Uninstalls an Ignite App specified by path. + +``` +ignite app uninstall [path] [flags] +``` + +**Examples** + +``` +ignite app uninstall github.com/org/my-app/ +``` + +**Options** + +``` + -g, --global use global plugins configuration ($HOME/.ignite/apps/igniteapps.yml) + -h, --help help for uninstall +``` + +**SEE ALSO** + +* [ignite app](#ignite-app) - Create and manage Ignite Apps + + +## ignite app update + +Update app + +**Synopsis** + +Updates an Ignite App specified by path. + +If no path is specified all declared apps are updated. + +``` +ignite app update [path] [flags] +``` + +**Examples** + +``` +ignite app update github.com/org/my-app/ +``` + +**Options** + +``` + -h, --help help for update +``` + +**SEE ALSO** + +* [ignite app](#ignite-app) - Create and manage Ignite Apps + + +## ignite chain + +Build, init and start a blockchain node + +**Synopsis** + +Commands in this namespace let you to build, initialize, and start your +blockchain node locally for development purposes. + +To run these commands you should be inside the project's directory so that +Ignite can find the source code. To ensure that you are, run "ls", you should +see the following files in the output: "go.mod", "x", "proto", "app", etc. + +By default the "build" command will identify the "main" package of the project, +install dependencies if necessary, set build flags, compile the project into a +binary and install the binary. The "build" command is useful if you just want +the compiled binary, for example, to initialize and start the chain manually. It +can also be used to release your chain's binaries automatically as part of +continuous integration workflow. + +The "init" command will build the chain's binary and use it to initialize a +local validator node. By default the validator node will be initialized in your +$HOME directory in a hidden directory that matches the name of your project. +This directory is called a data directory and contains a chain's genesis file +and a validator key. This command is useful if you want to quickly build and +initialize the data directory and use the chain's binary to manually start the +blockchain. The "init" command is meant only for development purposes, not +production. + +The "serve" command builds, initializes, and starts your blockchain locally with +a single validator node for development purposes. "serve" also watches the +source code directory for file changes and intelligently +re-builds/initializes/starts the chain, essentially providing "code-reloading". +The "serve" command is meant only for development purposes, not production. + +To distinguish between production and development consider the following. + +In production, blockchains often run the same software on many validator nodes +that are run by different people and entities. To launch a blockchain in +production, the validator entities coordinate the launch process to start their +nodes simultaneously. + +During development, a blockchain can be started locally on a single validator +node. This convenient process lets you restart a chain quickly and iterate +faster. Starting a chain on a single node in development is similar to starting +a traditional web application on a local server. + +The "faucet" command lets you send tokens to an address from the "faucet" +account defined in "config.yml". Alternatively, you can use the chain's binary +to send token from any other account that exists on chain. + +The "simulate" command helps you start a simulation testing process for your +chain. + + +**Options** + +``` + -c, --config string path to Ignite config file (default: ./config.yml) + -h, --help help for chain + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain +* [ignite chain build](#ignite-chain-build) - Build a node binary +* [ignite chain debug](#ignite-chain-debug) - Launch a debugger for a blockchain app +* [ignite chain faucet](#ignite-chain-faucet) - Send coins to an account +* [ignite chain init](#ignite-chain-init) - Initialize your chain +* [ignite chain lint](#ignite-chain-lint) - Lint codebase using golangci-lint +* [ignite chain serve](#ignite-chain-serve) - Start a blockchain node in development +* [ignite chain simulate](#ignite-chain-simulate) - Run simulation testing for the blockchain + + +## ignite chain build + +Build a node binary + +**Synopsis** + + +The build command compiles the source code of the project into a binary and +installs the binary in the $(go env GOPATH)/bin directory. + +You can customize the output directory for the binary using a flag: + + ignite chain build --output dist + +To compile the binary Ignite first compiles protocol buffer (proto) files into +Go source code. Proto files contain required type and services definitions. If +you're using another program to compile proto files, you can use a flag to tell +Ignite to skip the proto compilation step: + + ignite chain build --skip-proto + +Afterwards, Ignite install dependencies specified in the go.mod file. By default +Ignite doesn't check that dependencies of the main module stored in the module +cache have not been modified since they were downloaded. To enforce dependency +checking (essentially, running "go mod verify") use a flag: + + ignite chain build --check-dependencies + +Next, Ignite identifies the "main" package of the project. By default the "main" +package is located in "cmd/{app}d" directory, where "{app}" is the name of the +scaffolded project and "d" stands for daemon. If your project contains more +than one "main" package, specify the path to the one that Ignite should compile +in config.yml: + + build: + main: custom/path/to/main + +By default the binary name will match the top-level module name (specified in +go.mod) with a suffix "d". This can be customized in config.yml: + + build: + binary: mychaind + +You can also specify custom linker flags: + + build: + ldflags: + - "-X main.Version=development" + - "-X main.Date=01/05/2022T19:54" + +To build binaries for a release, use the --release flag. The binaries for one or +more specified release targets are built in a "release/" directory in the +project's source directory. Specify the release targets with GOOS:GOARCH build +tags. If the optional --release.targets is not specified, a binary is created +for your current environment. + + ignite chain build --release -t linux:amd64 -t darwin:amd64 -t darwin:arm64 + + +``` +ignite chain build [flags] +``` + +**Options** + +``` + --build.tags strings parameters to build the chain binary + --check-dependencies verify that cached dependencies have not been modified since they were downloaded + --clear-cache clear the build cache (advanced) + --debug build a debug binary + -h, --help help for build + -o, --output string binary output path + -p, --path string path of the app (default ".") + --release build for a release + --release.prefix string tarball prefix for each release target. Available only with --release flag + -t, --release.targets strings release targets. Available only with --release flag + --skip-proto skip file generation from proto + -v, --verbose verbose output +``` + +**Options inherited from parent commands** + +``` + -c, --config string path to Ignite config file (default: ./config.yml) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite chain](#ignite-chain) - Build, init and start a blockchain node + + +## ignite chain debug + +Launch a debugger for a blockchain app + +**Synopsis** + +The debug command starts a debug server and launches a debugger. + +Ignite uses the Delve debugger by default. Delve enables you to interact with +your program by controlling the execution of the process, evaluating variables, +and providing information of thread / goroutine state, CPU register state and +more. + +A debug server can optionally be started in cases where default terminal client +is not desirable. When the server starts it first runs the blockchain app, +attaches to it and finally waits for a client connection. It accepts both +JSON-RPC or DAP client connections. + +To start a debug server use the following flag: + + ignite chain debug --server + +To start a debug server with a custom address use the following flags: + + ignite chain debug --server --server-address 127.0.0.1:30500 + +The debug server stops automatically when the client connection is closed. + + +``` +ignite chain debug [flags] +``` + +**Options** + +``` + -h, --help help for debug + -p, --path string path of the app (default ".") + --server start a debug server + --server-address string debug server address (default "127.0.0.1:30500") +``` + +**Options inherited from parent commands** + +``` + -c, --config string path to Ignite config file (default: ./config.yml) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite chain](#ignite-chain) - Build, init and start a blockchain node + + +## ignite chain faucet + +Send coins to an account + +``` +ignite chain faucet [address] [coin<,...>] [flags] +``` + +**Options** + +``` + -h, --help help for faucet + --home string directory where the blockchain node is initialized + -p, --path string path of the app (default ".") + -v, --verbose verbose output +``` + +**Options inherited from parent commands** + +``` + -c, --config string path to Ignite config file (default: ./config.yml) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite chain](#ignite-chain) - Build, init and start a blockchain node + + +## ignite chain init + +Initialize your chain + +**Synopsis** + +The init command compiles and installs the binary (like "ignite chain build") +and uses that binary to initialize the blockchain's data directory for one +validator. To learn how the build process works, refer to "ignite chain build +--help". + +By default, the data directory will be initialized in $HOME/.mychain, where +"mychain" is the name of the project. To set a custom data directory use the +--home flag or set the value in config.yml: + + validators: + - name: alice + bonded: '100000000stake' + home: "~/.customdir" + +The data directory contains three files in the "config" directory: app.toml, +config.toml, client.toml. These files let you customize the behavior of your +blockchain node and the client executable. When a chain is re-initialized the +data directory can be reset. To make some values in these files persistent, set +them in config.yml: + + validators: + - name: alice + bonded: '100000000stake' + app: + minimum-gas-prices: "0.025stake" + config: + consensus: + timeout_commit: "5s" + timeout_propose: "5s" + client: + output: "json" + +The configuration above changes the minimum gas price of the validator (by +default the gas price is set to 0 to allow "free" transactions), sets the block +time to 5s, and changes the output format to JSON. To see what kind of values +this configuration accepts see the generated TOML files in the data directory. + +As part of the initialization process Ignite creates on-chain accounts with +token balances. By default, config.yml has two accounts in the top-level +"accounts" property. You can add more accounts and change their token balances. +Refer to config.yml guide to see which values you can set. + +One of these accounts is a validator account and the amount of self-delegated +tokens can be set in the top-level "validator" property. + +One of the most important components of an initialized chain is the genesis +file, the 0th block of the chain. The genesis file is stored in the data +directory "config" subdirectory and contains the initial state of the chain, +including consensus and module parameters. You can customize the values of the +genesis in config.yml: + + genesis: + app_state: + staking: + params: + bond_denom: "foo" + +The example above changes the staking token to "foo". If you change the staking +denom, make sure the validator account has the right tokens. + +The init command is meant to be used ONLY FOR DEVELOPMENT PURPOSES. Under the +hood it runs commands like "appd init", "appd add-genesis-account", "appd +gentx", and "appd collect-gentx". For production, you may want to run these +commands manually to ensure a production-level node initialization. + + +``` +ignite chain init [flags] +``` + +**Options** + +``` + --build.tags strings parameters to build the chain binary + --check-dependencies verify that cached dependencies have not been modified since they were downloaded + --clear-cache clear the build cache (advanced) + --debug build a debug binary + -h, --help help for init + --home string directory where the blockchain node is initialized + -p, --path string path of the app (default ".") + --skip-proto skip file generation from proto +``` + +**Options inherited from parent commands** + +``` + -c, --config string path to Ignite config file (default: ./config.yml) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite chain](#ignite-chain) - Build, init and start a blockchain node + + +## ignite chain lint + +Lint codebase using golangci-lint + +**Synopsis** + +The lint command runs the golangci-lint tool to lint the codebase. + +``` +ignite chain lint [flags] +``` + +**Options** + +``` + -h, --help help for lint +``` + +**Options inherited from parent commands** + +``` + -c, --config string path to Ignite config file (default: ./config.yml) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite chain](#ignite-chain) - Build, init and start a blockchain node + + +## ignite chain serve + +Start a blockchain node in development + +**Synopsis** + +The serve command compiles and installs the binary (like "ignite chain build"), +uses that binary to initialize the blockchain's data directory for one validator +(like "ignite chain init"), and starts the node locally for development purposes +with automatic code reloading. + +Automatic code reloading means Ignite starts watching the project directory. +Whenever a file change is detected, Ignite automatically rebuilds, reinitializes +and restarts the node. + +Whenever possible Ignite will try to keep the current state of the chain by +exporting and importing the genesis file. + +To force Ignite to start from a clean slate even if a genesis file exists, use +the following flag: + + ignite chain serve --reset-once + +To force Ignite to reset the state every time the source code is modified, use +the following flag: + + ignite chain serve --force-reset + +With Ignite it's possible to start more than one blockchain from the same source +code using different config files. This is handy if you're building +inter-blockchain functionality and, for example, want to try sending packets +from one blockchain to another. To start a node using a specific config file: + + ignite chain serve --config mars.yml + +The serve command is meant to be used ONLY FOR DEVELOPMENT PURPOSES. Under the +hood, it runs "appd start", where "appd" is the name of your chain's binary. For +production, you may want to run "appd start" manually. + + +``` +ignite chain serve [flags] +``` + +**Options** + +``` + --build.tags strings parameters to build the chain binary + --check-dependencies verify that cached dependencies have not been modified since they were downloaded + --clear-cache clear the build cache (advanced) + -f, --force-reset force reset of the app state on start and every source change + --generate-clients generate code for the configured clients on reset or source code change + -h, --help help for serve + --home string directory where the blockchain node is initialized + -p, --path string path of the app (default ".") + --quit-on-fail quit program if the app fails to start + -r, --reset-once reset the app state once on init + --skip-proto skip file generation from proto + -v, --verbose verbose output +``` + +**Options inherited from parent commands** + +``` + -c, --config string path to Ignite config file (default: ./config.yml) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite chain](#ignite-chain) - Build, init and start a blockchain node + + +## ignite chain simulate + +Run simulation testing for the blockchain + +**Synopsis** + +Run simulation testing for the blockchain. It sends many randomized-input messages of each module to a simulated node and checks if invariants break + +``` +ignite chain simulate [flags] +``` + +**Options** + +``` + --blockSize int operations per block (default 30) + --exportParamsHeight int height to which export the randomly generated params + --exportParamsPath string custom file path to save the exported params JSON + --exportStatePath string custom file path to save the exported app state JSON + --exportStatsPath string custom file path to save the exported simulation statistics JSON + --genesis string custom simulation genesis file; cannot be used with params file + --genesisTime int override genesis UNIX time instead of using a random UNIX time + -h, --help help for simulate + --initialBlockHeight int initial block to start the simulation (default 1) + --lean lean simulation log output + --numBlocks int number of new blocks to simulate from the initial block height (default 200) + --params string custom simulation params file which overrides any random params; cannot be used with genesis + --period uint run slow invariants only once every period assertions + --printAllInvariants print all invariants if a broken invariant is found + --seed int simulation random seed (default 42) + --simulateEveryOperation run slow invariants every operation + -v, --verbose verbose log output +``` + +**Options inherited from parent commands** + +``` + -c, --config string path to Ignite config file (default: ./config.yml) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite chain](#ignite-chain) - Build, init and start a blockchain node + + +## ignite completion + +Generates shell completion script. + +``` +ignite completion [bash|zsh|fish|powershell] [flags] +``` + +**Options** + +``` + -h, --help help for completion +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain + + +## ignite docs + +Show Ignite CLI docs + +``` +ignite docs [flags] +``` + +**Options** + +``` + -h, --help help for docs +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain + + +## ignite generate + +Generate clients, API docs from source code + +**Synopsis** + +Generate clients, API docs from source code. + +Such as compiling protocol buffer files into Go or implement particular +functionality, for example, generating an OpenAPI spec. + +Produced source code can be regenerated by running a command again and is not +meant to be edited by hand. + + +**Options** + +``` + --clear-cache clear the build cache (advanced) + --enable-proto-vendor enable proto package vendor for missing Buf dependencies + -h, --help help for generate + -p, --path string path of the app (default ".") +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain +* [ignite generate composables](#ignite-generate-composables) - TypeScript frontend client and Vue 3 composables +* [ignite generate hooks](#ignite-generate-hooks) - TypeScript frontend client and React hooks +* [ignite generate openapi](#ignite-generate-openapi) - OpenAPI spec for your chain +* [ignite generate proto-go](#ignite-generate-proto-go) - Compile protocol buffer files to Go source code required by Cosmos SDK +* [ignite generate ts-client](#ignite-generate-ts-client) - TypeScript frontend client + + +## ignite generate composables + +TypeScript frontend client and Vue 3 composables + +``` +ignite generate composables [flags] +``` + +**Options** + +``` + -h, --help help for composables + -o, --output string Vue 3 composables output path + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + --clear-cache clear the build cache (advanced) + --enable-proto-vendor enable proto package vendor for missing Buf dependencies + -p, --path string path of the app (default ".") +``` + +**SEE ALSO** + +* [ignite generate](#ignite-generate) - Generate clients, API docs from source code + + +## ignite generate hooks + +TypeScript frontend client and React hooks + +``` +ignite generate hooks [flags] +``` + +**Options** + +``` + -h, --help help for hooks + -o, --output string React hooks output path + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + --clear-cache clear the build cache (advanced) + --enable-proto-vendor enable proto package vendor for missing Buf dependencies + -p, --path string path of the app (default ".") +``` + +**SEE ALSO** + +* [ignite generate](#ignite-generate) - Generate clients, API docs from source code + + +## ignite generate openapi + +OpenAPI spec for your chain + +``` +ignite generate openapi [flags] +``` + +**Options** + +``` + -h, --help help for openapi + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + --clear-cache clear the build cache (advanced) + --enable-proto-vendor enable proto package vendor for missing Buf dependencies + -p, --path string path of the app (default ".") +``` + +**SEE ALSO** + +* [ignite generate](#ignite-generate) - Generate clients, API docs from source code + + +## ignite generate proto-go + +Compile protocol buffer files to Go source code required by Cosmos SDK + +``` +ignite generate proto-go [flags] +``` + +**Options** + +``` + -h, --help help for proto-go + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + --clear-cache clear the build cache (advanced) + --enable-proto-vendor enable proto package vendor for missing Buf dependencies + -p, --path string path of the app (default ".") +``` + +**SEE ALSO** + +* [ignite generate](#ignite-generate) - Generate clients, API docs from source code + + +## ignite generate ts-client + +TypeScript frontend client + +**Synopsis** + +Generate a framework agnostic TypeScript client for your blockchain project. + +By default the TypeScript client is generated in the "ts-client/" directory. You +can customize the output directory in config.yml: + + client: + typescript: + path: new-path + +Output can also be customized by using a flag: + + ignite generate ts-client --output new-path + +TypeScript client code can be automatically regenerated on reset or source code +changes when the blockchain is started with a flag: + + ignite chain serve --generate-clients + + +``` +ignite generate ts-client [flags] +``` + +**Options** + +``` + -h, --help help for ts-client + -o, --output string TypeScript client output path + --use-cache use build cache to speed-up generation + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + --clear-cache clear the build cache (advanced) + --enable-proto-vendor enable proto package vendor for missing Buf dependencies + -p, --path string path of the app (default ".") +``` + +**SEE ALSO** + +* [ignite generate](#ignite-generate) - Generate clients, API docs from source code + + +## ignite network + +Launch a blockchain in production + +**Synopsis** + + +Ignite Network commands allow to coordinate the launch of sovereign Cosmos blockchains. + +To launch a Cosmos blockchain you need someone to be a coordinator and others to +be validators. These are just roles, anyone can be a coordinator or a validator. +A coordinator publishes information about a chain to be launched on the Ignite +blockchain, approves validator requests and coordinates the launch. Validators +send requests to join a chain and start their nodes when a blockchain is ready +for launch. + +To publish the information about your chain as a coordinator run the following +command (the URL should point to a repository with a Cosmos SDK chain): + + ignite network chain publish github.com/ignite/example + +This command will return a launch identifier you will be using in the following +commands. Let's say this identifier is 42. + +Next, ask validators to initialize their nodes and request to join the network +as validators. For a testnet you can use the default values suggested by the +CLI. + + ignite network chain init 42 + + ignite network chain join 42 --amount 95000000stake + +As a coordinator list all validator requests: + + ignite network request list 42 + +Approve validator requests: + + ignite network request approve 42 1,2 + +Once you've approved all validators you need in the validator set, announce that +the chain is ready for launch: + + ignite network chain launch 42 + +Validators can now prepare their nodes for launch: + + ignite network chain prepare 42 + +The output of this command will show a command that a validator would use to +launch their node, for example “exampled --home ~/.example”. After enough +validators launch their nodes, a blockchain will be live. + + +**Options** + +``` + -h, --help help for network + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain +* [ignite network chain](#ignite-network-chain) - Publish a chain, join as a validator and prepare node for launch +* [ignite network coordinator](#ignite-network-coordinator) - Show and update a coordinator profile +* [ignite network request](#ignite-network-request) - Create, show, reject and approve requests +* [ignite network tool](#ignite-network-tool) - Commands to run subsidiary tools +* [ignite network validator](#ignite-network-validator) - Show and update a validator profile +* [ignite network version](#ignite-network-version) - Version of the plugin + + +## ignite network chain + +Publish a chain, join as a validator and prepare node for launch + +**Synopsis** + +The "chain" namespace features the most commonly used commands for launching +blockchains with Ignite. + +As a coordinator you "publish" your blockchain to Ignite. When enough validators +are approved for the genesis and no changes are excepted to be made to the +genesis, a coordinator announces that the chain is ready for launch with the +"launch" command. In the case of an unsuccessful launch, the coordinator can revert it +using the "revert-launch" command. + +As a validator, you "init" your node and apply to become a validator for a +blockchain with the "join" command. After the launch of the chain is announced, +validators can generate the finalized genesis and download the list of peers with the +"prepare" command. + +The "install" command can be used to download, compile the source code and +install the chain's binary locally. The binary can be used, for example, to +initialize a validator node or to interact with the chain after it has been +launched. + +All chains published to Ignite can be listed by using the "list" command. + + +**Options** + +``` + -h, --help help for chain +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network](#ignite-network) - Launch a blockchain in production +* [ignite network chain init](#ignite-network-chain-init) - Initialize a chain from a published chain ID +* [ignite network chain install](#ignite-network-chain-install) - Install chain binary for a launch +* [ignite network chain join](#ignite-network-chain-join) - Request to join a network as a validator +* [ignite network chain launch](#ignite-network-chain-launch) - Trigger the launch of a chain +* [ignite network chain list](#ignite-network-chain-list) - List published chains +* [ignite network chain prepare](#ignite-network-chain-prepare) - Prepare the chain for launch +* [ignite network chain publish](#ignite-network-chain-publish) - Publish a new chain to start a new network +* [ignite network chain revert-launch](#ignite-network-chain-revert-launch) - Revert launch of a network as a coordinator +* [ignite network chain show](#ignite-network-chain-show) - Show details of a chain + + +## ignite network chain init + +Initialize a chain from a published chain ID + +**Synopsis** + +Ignite network chain init is a command used by validators to initialize a +validator node for a blockchain from the information stored on the Ignite chain. + + ignite network chain init 42 + +This command fetches the information about a chain with launch ID 42. The source +code of the chain is cloned in a temporary directory, and the node's binary is +compiled from the source. The binary is then used to initialize the node. By +default, Ignite uses "~/spn/[launch-id]/" as the home directory for the blockchain. + +An important part of initializing a validator node is creation of the gentx (a +transaction that adds a validator at the genesis of the chain). + +The "init" command will prompt for values like self-delegation and commission. +These values will be used in the validator's gentx. You can use flags to provide +the values in non-interactive mode. + +Use the "--home" flag to choose a different path for the home directory of the +blockchain: + + ignite network chain init 42 --home ~/mychain + +The end result of the "init" command is a validator home directory with a +genesis validator transaction (gentx) file. + +``` +ignite network chain init [launch-id] [flags] +``` + +**Options** + +``` + --check-dependencies verify that cached dependencies have not been modified since they were downloaded + --clear-cache clear the build cache (advanced) + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for init + --home string home directory used for blockchains + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") + --validator-account string account for the chain validator (default "default") + --validator-details string details about the validator + --validator-gas-price string validator gas price + --validator-identity string validator identity signature (ex. UPort or Keybase) + --validator-moniker string custom validator moniker + --validator-security-contact string validator security contact email + --validator-self-delegation string validator minimum self delegation + --validator-website string associate a website with the validator + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network chain](#ignite-network-chain) - Publish a chain, join as a validator and prepare node for launch + + +## ignite network chain install + +Install chain binary for a launch + +``` +ignite network chain install [launch-id] [flags] +``` + +**Options** + +``` + --check-dependencies verify that cached dependencies have not been modified since they were downloaded + --clear-cache clear the build cache (advanced) + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for install +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network chain](#ignite-network-chain) - Publish a chain, join as a validator and prepare node for launch + + +## ignite network chain join + +Request to join a network as a validator + +**Synopsis** + +The "join" command is used by validators to send a request to join a blockchain. +The required argument is a launch ID of a blockchain. The "join" command expects +that the validator has already setup a home directory for the blockchain and has +a gentx either by running "ignite network chain init" or initializing the data +directory manually with the chain's binary. + +By default the "join" command just sends the request to join as a validator. +However, often a validator also needs to request an genesis account with a token +balance to afford self-delegation. + +The following command will send a request to join blockchain with launch ID 42 +as a validator and request to be added as an account with a token balance of +95000000 STAKE. + + ignite network chain join 42 --amount 95000000stake + +A request to join as a validator contains a gentx file. Ignite looks for gentx +in a home directory used by "ignite network chain init" by default. To use a +different directory, use the "--home" flag or pass a gentx file directly with +the "--gentx" flag. + +To join a chain as a validator, you must provide the IP address of your node so +that other validators can connect to it. The join command will ask you for the +IP address and will attempt to automatically detect and fill in the value. If +you want to manually specify the IP address, you can use the "--peer-address" +flag: + + ignite network chain join 42 --peer-address 0.0.0.0 + +Since "join" broadcasts a transaction to the Ignite blockchain, you will need an +account on the Ignite blockchain. During the testnet phase, however, Ignite +automatically requests tokens from a faucet. + + +``` +ignite network chain join [launch-id] [flags] +``` + +**Options** + +``` + --amount string amount of coins for account request (ignored if coordinator has fixed the account balances or if --no-acount flag is set) + --check-dependencies verify that cached dependencies have not been modified since they were downloaded + --from string account name to use for sending transactions to SPN (default "default") + --gentx string path to a gentx json file + -h, --help help for join + --home string home directory used for blockchains + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") + --no-account prevent sending a request for a genesis account + --peer-address string peer's address + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network chain](#ignite-network-chain) - Publish a chain, join as a validator and prepare node for launch + + +## ignite network chain launch + +Trigger the launch of a chain + +**Synopsis** + +The launch command communicates to the world that the chain is ready to be +launched. + +Only the coordinator of the chain can execute the launch command. + + ignite network chain launch 42 + +After the launch command is executed no changes to the genesis are accepted. For +example, validators will no longer be able to successfully execute the "ignite +network chain join" command to apply as a validator. + +The launch command sets the date and time after which the chain will start. By +default, the current time is set. To give validators more time to prepare for +the launch, set the time with the "--launch-time" flag: + + ignite network chain launch 42 --launch-time 2023-01-01T00:00:00Z + +After the launch command is executed, validators can generate the finalized +genesis and prepare their nodes for the launch. For example, validators can run +"ignite network chain prepare" to generate the genesis and populate the peer +list. + +If you want to change the launch time or open up the genesis file for changes +you can use "ignite network chain revert-launch" to make it possible, for +example, to accept new validators and add accounts. + + +``` +ignite network chain launch [launch-id] [flags] +``` + +**Options** + +``` + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for launch + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") + --launch-time string timestamp the chain is effectively launched (example "2022-01-01T00:00:00Z") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network chain](#ignite-network-chain) - Publish a chain, join as a validator and prepare node for launch + + +## ignite network chain list + +List published chains + +``` +ignite network chain list [flags] +``` + +**Options** + +``` + --advanced show advanced information about the chains + -h, --help help for list + --limit uint limit of results per page (default 100) + --page uint page for chain list result (default 1) +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network chain](#ignite-network-chain) - Publish a chain, join as a validator and prepare node for launch + + +## ignite network chain prepare + +Prepare the chain for launch + +**Synopsis** + +The prepare command prepares a validator node for the chain launch by generating +the final genesis and adding IP addresses of peers to the validator's +configuration file. + + ignite network chain prepare 42 + +By default, Ignite uses "$HOME/spn/LAUNCH_ID" as the data directory. If you used +a different data directory when initializing the node, use the "--home" flag and +set the correct path to the data directory. + +Ignite generates the genesis file in "config/genesis.json" and adds peer IPs by +modifying "config/config.toml". + +The prepare command should be executed after the coordinator has triggered the +chain launch and finalized the genesis with "ignite network chain launch". You +can force Ignite to run the prepare command without checking if the launch has +been triggered with the "--force" flag (this is not recommended). + +After the prepare command is executed the node is ready to be started. + + +``` +ignite network chain prepare [launch-id] [flags] +``` + +**Options** + +``` + --check-dependencies verify that cached dependencies have not been modified since they were downloaded + --clear-cache clear the build cache (advanced) + -f, --force force the prepare command to run even if the chain is not launched + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for prepare + --home string home directory used for blockchains + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network chain](#ignite-network-chain) - Publish a chain, join as a validator and prepare node for launch + + +## ignite network chain publish + +Publish a new chain to start a new network + +**Synopsis** + +To begin the process of launching a blockchain with Ignite, a coordinator needs +to publish the information about a blockchain. The only required bit of +information is the URL of the source code of the blockchain. + +The following command publishes the information about an example blockchain: + + ignite network chain publish github.com/ignite/example + +This command fetches the source code of the blockchain, compiles the binary, +verifies that a blockchain can be started with the binary, and publishes the +information about the blockchain to Ignite. Currently, only public repositories +are supported. The command returns an integer number that acts as an identifier +of the chain on Ignite. + +By publishing a blockchain on Ignite you become the "coordinator" of this +blockchain. A coordinator is an account that has the authority to approve and +reject validator requests, set parameters of the blockchain and trigger the +launch of the chain. + +The default Git branch is used when publishing a chain. If you want to use a +specific branch, tag or a commit hash, use "--branch", "--tag", or "--hash" +flags respectively. + +The repository name is used as the default chain ID. Ignite does not ensure that +chain IDs are unique, but they have to have a valid format: [string]-[integer]. +To set a custom chain ID use the "--chain-id" flag. + + ignite network chain publish github.com/ignite/example --chain-id foo-1 + +Once the chain is published users can request accounts with coin balances to be +added to the chain's genesis. By default, users are free to request any number +of tokens. If you want all users requesting tokens to get the same amount, use +the "--account-balance" flag with a list of coins. + + ignite network chain publish github.com/ignite/example --account-balance 2000foocoin + + +``` +ignite network chain publish [source-url] [flags] +``` + +**Options** + +``` + --account-balance string balance for each approved genesis account for the chain + --amount string amount of coins for account request + --branch string Git branch to use for the repo + --chain-id string chain ID to use for this network + --check-dependencies verify that cached dependencies have not been modified since they were downloaded + --clear-cache clear the build cache (advanced) + --from string account name to use for sending transactions to SPN (default "default") + --genesis-config string name of an Ignite config file in the repo for custom Genesis + --genesis-url string URL to a custom Genesis + --hash string Git hash to use for the repo + -h, --help help for publish + --home string home directory used for blockchains + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") + --mainnet initialize a mainnet project + --metadata string add chain metadata + --no-check skip verifying chain's integrity + --project uint project ID to use for this network + --reward.coins string reward coins + --reward.height int last reward height + --shares string add shares for the project + --tag string Git tag to use for the repo + --total-supply string add a total of the mainnet of a project + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network chain](#ignite-network-chain) - Publish a chain, join as a validator and prepare node for launch + + +## ignite network chain revert-launch + +Revert launch of a network as a coordinator + +**Synopsis** + +The revert launch command reverts the previously scheduled launch of a chain. + +Only the coordinator of the chain can execute the launch command. + + ignite network chain revert-launch 42 + +After the revert launch command is executed, changes to the genesis of the chain +are allowed again. For example, validators will be able to request to join the +chain. Revert launch also resets the launch time. + + +``` +ignite network chain revert-launch [launch-id] [flags] +``` + +**Options** + +``` + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for revert-launch + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network chain](#ignite-network-chain) - Publish a chain, join as a validator and prepare node for launch + + +## ignite network chain show + +Show details of a chain + +**Options** + +``` + -h, --help help for show +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network chain](#ignite-network-chain) - Publish a chain, join as a validator and prepare node for launch +* [ignite network chain show accounts](#ignite-network-chain-show-accounts) - Show all vesting and genesis accounts of the chain +* [ignite network chain show genesis](#ignite-network-chain-show-genesis) - Show the chain genesis file +* [ignite network chain show info](#ignite-network-chain-show-info) - Show info details of the chain +* [ignite network chain show peers](#ignite-network-chain-show-peers) - Show peers list of the chain +* [ignite network chain show validators](#ignite-network-chain-show-validators) - Show all validators of the chain + + +## ignite network chain show accounts + +Show all vesting and genesis accounts of the chain + +``` +ignite network chain show accounts [launch-id] [flags] +``` + +**Options** + +``` + -h, --help help for accounts + --prefix string account address prefix (default "spn") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network chain show](#ignite-network-chain-show) - Show details of a chain + + +## ignite network chain show genesis + +Show the chain genesis file + +``` +ignite network chain show genesis [launch-id] [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -h, --help help for genesis + --out string path to output Genesis file (default "./genesis.json") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network chain show](#ignite-network-chain-show) - Show details of a chain + + +## ignite network chain show info + +Show info details of the chain + +``` +ignite network chain show info [launch-id] [flags] +``` + +**Options** + +``` + -h, --help help for info +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network chain show](#ignite-network-chain-show) - Show details of a chain + + +## ignite network chain show peers + +Show peers list of the chain + +``` +ignite network chain show peers [launch-id] [flags] +``` + +**Options** + +``` + -h, --help help for peers + --out string path to output peers list (default "./peers.txt") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network chain show](#ignite-network-chain-show) - Show details of a chain + + +## ignite network chain show validators + +Show all validators of the chain + +``` +ignite network chain show validators [launch-id] [flags] +``` + +**Options** + +``` + -h, --help help for validators + --prefix string account address prefix (default "spn") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network chain show](#ignite-network-chain-show) - Show details of a chain + + +## ignite network coordinator + +Show and update a coordinator profile + +**Options** + +``` + -h, --help help for coordinator +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network](#ignite-network) - Launch a blockchain in production +* [ignite network coordinator set](#ignite-network-coordinator-set) - Set an information in a coordinator profile +* [ignite network coordinator show](#ignite-network-coordinator-show) - Show a coordinator profile + + +## ignite network coordinator set + +Set an information in a coordinator profile + +**Synopsis** + +Coordinators on Ignite can set a profile containing a description for the coordinator. +The coordinator set command allows to set information for the coordinator. +The following information can be set: +- details: general information about the coordinator. +- identity: a piece of information to verify the identity of the coordinator with a system like Keybase or Veramo. +- website: website of the coordinator. + + +``` +ignite network coordinator set details|identity|website [value] [flags] +``` + +**Options** + +``` + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for set + --home string home directory used for blockchains + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network coordinator](#ignite-network-coordinator) - Show and update a coordinator profile + + +## ignite network coordinator show + +Show a coordinator profile + +``` +ignite network coordinator show [address] [flags] +``` + +**Options** + +``` + -h, --help help for show +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network coordinator](#ignite-network-coordinator) - Show and update a coordinator profile + + +## ignite network request + +Create, show, reject and approve requests + +**Synopsis** + +The "request" namespace contains commands for creating, showing, approving, and +rejecting requests. + +A request is mechanism in Ignite that allows changes to be made to the genesis +file like adding accounts with token balances and validators. Anyone can submit +a request, but only the coordinator of a chain can approve or reject a request. + +Each request has a status: + +* Pending: waiting for the approval of the coordinator +* Approved: approved by the coordinator, its content has been applied to the + launch information +* Rejected: rejected by the coordinator or the request creator + + +**Options** + +``` + -h, --help help for request +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network](#ignite-network) - Launch a blockchain in production +* [ignite network request add-account](#ignite-network-request-add-account) - Send request to add account +* [ignite network request approve](#ignite-network-request-approve) - Approve requests +* [ignite network request change-param](#ignite-network-request-change-param) - Send request to change a module param +* [ignite network request list](#ignite-network-request-list) - List all requests for a chain +* [ignite network request reject](#ignite-network-request-reject) - Reject requests +* [ignite network request remove-account](#ignite-network-request-remove-account) - Send request to remove a genesis account +* [ignite network request remove-validator](#ignite-network-request-remove-validator) - Send request to remove a validator +* [ignite network request show](#ignite-network-request-show) - Show detailed information about a request +* [ignite network request verify](#ignite-network-request-verify) - Verify the request and simulate the chain genesis from them + + +## ignite network request add-account + +Send request to add account + +**Synopsis** + +The "add account" command creates a new request to add an account with a given +address and a specified coin balance to the genesis of the chain. + +The request automatically fails to be applied if a genesis account or a vesting +account with an identical address is already specified in the launch +information. + +If a coordinator has specified that all genesis accounts on a chain should have +the same balance (useful for testnets, for example), the "add account" expects +only an address as an argument. Attempt to provide a token balance will result +in an error. + + +``` +ignite network request add-account [launch-id] [address] [coins] [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for add-account + --home string home directory used for blockchains + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network request](#ignite-network-request) - Create, show, reject and approve requests + + +## ignite network request approve + +Approve requests + +**Synopsis** + +The "approve" command is used by a chain's coordinator to approve requests. +Multiple requests can be approved using a comma-separated list and/or using a +dash syntax. + + ignite network request approve 42 1,2,3-6,7,8 + +The command above approves requests with IDs from 1 to 8 included on a chain +with a launch ID 42. + +When requests are approved Ignite applies the requested changes and simulates +initializing and launching the chain locally. If the chain starts successfully, +requests are considered to be "verified" and are approved. If one or more +requested changes stop the chain from launching locally, the verification +process fails and the approval of all requests is canceled. To skip the +verification process use the "--no-verification" flag. + +Note that Ignite will try to approve requests in the same order as request IDs +are submitted to the "approve" command. + +``` +ignite network request approve [launch-id] [number<,...>] [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for approve + --home string home directory used for blockchains + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") + --no-verification approve the requests without verifying them +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network request](#ignite-network-request) - Create, show, reject and approve requests + + +## ignite network request change-param + +Send request to change a module param + +``` +ignite network request change-param [launch-id] [module-name] [param-name] [value (json, string, number)] [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for change-param + --home string home directory used for blockchains + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network request](#ignite-network-request) - Create, show, reject and approve requests + + +## ignite network request list + +List all requests for a chain + +``` +ignite network request list [launch-id] [flags] +``` + +**Options** + +``` + -h, --help help for list + --prefix string account address prefix (default "spn") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network request](#ignite-network-request) - Create, show, reject and approve requests + + +## ignite network request reject + +Reject requests + +**Synopsis** + +The "reject" command is used by a chain's coordinator to reject requests. + + ignite network request reject 42 1,2,3-6,7,8 + +The syntax of the "reject" command is similar to that of the "approve" command. + + +``` +ignite network request reject [launch-id] [number<,...>] [flags] +``` + +**Options** + +``` + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for reject + --home string home directory used for blockchains + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network request](#ignite-network-request) - Create, show, reject and approve requests + + +## ignite network request remove-account + +Send request to remove a genesis account + +``` +ignite network request remove-account [launch-id] [address] [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for remove-account + --home string home directory used for blockchains + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network request](#ignite-network-request) - Create, show, reject and approve requests + + +## ignite network request remove-validator + +Send request to remove a validator + +``` +ignite network request remove-validator [launch-id] [address] [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for remove-validator + --home string home directory used for blockchains + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network request](#ignite-network-request) - Create, show, reject and approve requests + + +## ignite network request show + +Show detailed information about a request + +``` +ignite network request show [launch-id] [request-id] [flags] +``` + +**Options** + +``` + -h, --help help for show +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network request](#ignite-network-request) - Create, show, reject and approve requests + + +## ignite network request verify + +Verify the request and simulate the chain genesis from them + +**Synopsis** + +The "verify" command applies selected requests to the genesis of a chain locally +to verify that approving these requests will result in a valid genesis that +allows a chain to launch without issues. This command does not approve requests, +only checks them. + + +``` +ignite network request verify [launch-id] [number<,...>] [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for verify + --home string home directory used for blockchains + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network request](#ignite-network-request) - Create, show, reject and approve requests + + +## ignite network tool + +Commands to run subsidiary tools + +**Options** + +``` + -h, --help help for tool +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network](#ignite-network) - Launch a blockchain in production +* [ignite network tool proxy-tunnel](#ignite-network-tool-proxy-tunnel) - Setup a proxy tunnel via HTTP + + +## ignite network tool proxy-tunnel + +Setup a proxy tunnel via HTTP + +**Synopsis** + +Starts an HTTP proxy server and HTTP proxy clients for each node that +needs HTTP tunneling. + +HTTP tunneling is activated **ONLY** if SPN_CONFIG_FILE has "tunneled_peers" +field inside with a list of tunneled peers/nodes. + +If you're using SPN as coordinator and do not want to allow HTTP tunneling +feature at all, you can prevent "spn.yml" file to being generated by not +approving validator requests that has HTTP tunneling enabled instead of plain +TCP connections. + +``` +ignite network tool proxy-tunnel SPN_CONFIG_FILE [flags] +``` + +**Options** + +``` + -h, --help help for proxy-tunnel +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network tool](#ignite-network-tool) - Commands to run subsidiary tools + + +## ignite network validator + +Show and update a validator profile + +**Options** + +``` + -h, --help help for validator +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network](#ignite-network) - Launch a blockchain in production +* [ignite network validator set](#ignite-network-validator-set) - Set an information in a validator profile +* [ignite network validator show](#ignite-network-validator-show) - Show a validator profile + + +## ignite network validator set + +Set an information in a validator profile + +**Synopsis** + +Validators on Ignite can set a profile containing a description for the validator. +The validator set command allows to set information for the validator. +The following information can be set: +- details: general information about the validator. +- identity: piece of information to verify identity of the validator with a system like Keybase of Veramo. +- website: website of the validator. +- security: security contact for the validator. + + +``` +ignite network validator set details|identity|website|security [value] [flags] +``` + +**Options** + +``` + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for set + --home string home directory used for blockchains + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network validator](#ignite-network-validator) - Show and update a validator profile + + +## ignite network validator show + +Show a validator profile + +``` +ignite network validator show [address] [flags] +``` + +**Options** + +``` + -h, --help help for show +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network validator](#ignite-network-validator) - Show and update a validator profile + + +## ignite network version + +Version of the plugin + +**Synopsis** + +The version of the plugin to use to interact with a chain might be specified by the coordinator. + + +``` +ignite network version [flags] +``` + +**Options** + +``` + -h, --help help for version +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network](#ignite-network) - Launch a blockchain in production + + +## ignite node + +Make requests to a live blockchain node + +**Options** + +``` + -h, --help help for node + --node string <host>:<port> to tendermint rpc interface for this chain (default "https://rpc.cosmos.directory:443/cosmoshub") +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain +* [ignite node query](#ignite-node-query) - Querying subcommands +* [ignite node tx](#ignite-node-tx) - Transactions subcommands + + +## ignite node query + +Querying subcommands + +**Options** + +``` + -h, --help help for query +``` + +**Options inherited from parent commands** + +``` + --node string <host>:<port> to tendermint rpc interface for this chain (default "https://rpc.cosmos.directory:443/cosmoshub") +``` + +**SEE ALSO** + +* [ignite node](#ignite-node) - Make requests to a live blockchain node +* [ignite node query bank](#ignite-node-query-bank) - Querying commands for the bank module +* [ignite node query tx](#ignite-node-query-tx) - Query for transaction by hash + + +## ignite node query bank + +Querying commands for the bank module + +**Options** + +``` + -h, --help help for bank +``` + +**Options inherited from parent commands** + +``` + --node string <host>:<port> to tendermint rpc interface for this chain (default "https://rpc.cosmos.directory:443/cosmoshub") +``` + +**SEE ALSO** + +* [ignite node query](#ignite-node-query) - Querying subcommands +* [ignite node query bank balances](#ignite-node-query-bank-balances) - Query for account balances by account name or address + + +## ignite node query bank balances + +Query for account balances by account name or address + +``` +ignite node query bank balances [from_account_or_address] [flags] +``` + +**Options** + +``` + --address-prefix string account address prefix (default "cosmos") + --count-total count total number of records in all balances to query for + -h, --help help for balances + --home string directory where the blockchain node is initialized + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") + --limit uint pagination limit of all balances to query for (default 100) + --offset uint pagination offset of all balances to query for + --page uint pagination page of all balances to query for. This sets offset to a multiple of limit (default 1) + --page-key string pagination page-key of all balances to query for + --reverse results are sorted in descending order +``` + +**Options inherited from parent commands** + +``` + --node string <host>:<port> to tendermint rpc interface for this chain (default "https://rpc.cosmos.directory:443/cosmoshub") +``` + +**SEE ALSO** + +* [ignite node query bank](#ignite-node-query-bank) - Querying commands for the bank module + + +## ignite node query tx + +Query for transaction by hash + +``` +ignite node query tx [hash] [flags] +``` + +**Options** + +``` + -h, --help help for tx +``` + +**Options inherited from parent commands** + +``` + --node string <host>:<port> to tendermint rpc interface for this chain (default "https://rpc.cosmos.directory:443/cosmoshub") +``` + +**SEE ALSO** + +* [ignite node query](#ignite-node-query) - Querying subcommands + + +## ignite node tx + +Transactions subcommands + +**Options** + +``` + --address-prefix string account address prefix (default "cosmos") + --fees string fees to pay along with transaction; eg: 10uatom + --gas string gas limit to set per-transaction; set to "auto" to calculate sufficient gas automatically (default "auto") + --gas-adjustment float gas adjustment to set per-transaction + --gas-prices string gas prices in decimal format to determine the transaction fee (e.g. 0.1uatom) + --generate-only build an unsigned transaction and write it to STDOUT + -h, --help help for tx + --home string directory where the blockchain node is initialized + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**Options inherited from parent commands** + +``` + --node string <host>:<port> to tendermint rpc interface for this chain (default "https://rpc.cosmos.directory:443/cosmoshub") +``` + +**SEE ALSO** + +* [ignite node](#ignite-node) - Make requests to a live blockchain node +* [ignite node tx bank](#ignite-node-tx-bank) - Bank transaction subcommands + + +## ignite node tx bank + +Bank transaction subcommands + +**Options** + +``` + -h, --help help for bank +``` + +**Options inherited from parent commands** + +``` + --address-prefix string account address prefix (default "cosmos") + --fees string fees to pay along with transaction; eg: 10uatom + --gas string gas limit to set per-transaction; set to "auto" to calculate sufficient gas automatically (default "auto") + --gas-adjustment float gas adjustment to set per-transaction + --gas-prices string gas prices in decimal format to determine the transaction fee (e.g. 0.1uatom) + --generate-only build an unsigned transaction and write it to STDOUT + --home string directory where the blockchain node is initialized + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") + --node string <host>:<port> to tendermint rpc interface for this chain (default "https://rpc.cosmos.directory:443/cosmoshub") +``` + +**SEE ALSO** + +* [ignite node tx](#ignite-node-tx) - Transactions subcommands +* [ignite node tx bank send](#ignite-node-tx-bank-send) - Send funds from one account to another. + + +## ignite node tx bank send + +Send funds from one account to another. + +``` +ignite node tx bank send [from_account_or_address] [to_account_or_address] [amount] [flags] +``` + +**Options** + +``` + -h, --help help for send +``` + +**Options inherited from parent commands** + +``` + --address-prefix string account address prefix (default "cosmos") + --fees string fees to pay along with transaction; eg: 10uatom + --gas string gas limit to set per-transaction; set to "auto" to calculate sufficient gas automatically (default "auto") + --gas-adjustment float gas adjustment to set per-transaction + --gas-prices string gas prices in decimal format to determine the transaction fee (e.g. 0.1uatom) + --generate-only build an unsigned transaction and write it to STDOUT + --home string directory where the blockchain node is initialized + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") + --node string <host>:<port> to tendermint rpc interface for this chain (default "https://rpc.cosmos.directory:443/cosmoshub") +``` + +**SEE ALSO** + +* [ignite node tx bank](#ignite-node-tx-bank) - Bank transaction subcommands + + +## ignite relayer + +Connect blockchains with an IBC relayer + +``` +ignite relayer [flags] +``` + +**Options** + +``` + -h, --help help for relayer +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain + + +## ignite scaffold + +Create a new blockchain, module, message, query, and more + +**Synopsis** + +Scaffolding is a quick way to generate code for major pieces of your +application. + +For details on each scaffolding target (chain, module, message, etc.) run the +corresponding command with a "--help" flag, for example, "ignite scaffold chain +--help". + +The Ignite team strongly recommends committing the code to a version control +system before running scaffolding commands. This will make it easier to see the +changes to the source code as well as undo the command if you've decided to roll +back the changes. + +This blockchain you create with the chain scaffolding command uses the modular +Cosmos SDK framework and imports many standard modules for functionality like +proof of stake, token transfer, inter-blockchain connectivity, governance, and +more. Custom functionality is implemented in modules located by convention in +the "x/" directory. By default, your blockchain comes with an empty custom +module. Use the module scaffolding command to create an additional module. + +An empty custom module doesn't do much, it's basically a container for logic +that is responsible for processing transactions and changing the application +state. Cosmos SDK blockchains work by processing user-submitted signed +transactions, which contain one or more messages. A message contains data that +describes a state transition. A module can be responsible for handling any +number of messages. + +A message scaffolding command will generate the code for handling a new type of +Cosmos SDK message. Message fields describe the state transition that the +message is intended to produce if processed without errors. + +Scaffolding messages is useful to create individual "actions" that your module +can perform. Sometimes, however, you want your blockchain to have the +functionality to create, read, update and delete (CRUD) instances of a +particular type. Depending on how you want to store the data there are three +commands that scaffold CRUD functionality for a type: list, map, and single. +These commands create four messages (one for each CRUD action), and the logic to +add, delete, and fetch the data from the store. If you want to scaffold only the +logic, for example, you've decided to scaffold messages separately, you can do +that as well with the "--no-message" flag. + +Reading data from a blockchain happens with a help of queries. Similar to how +you can scaffold messages to write data, you can scaffold queries to read the +data back from your blockchain application. + +You can also scaffold a type, which just produces a new protocol buffer file +with a proto message description. Note that proto messages produce (and +correspond with) Go types whereas Cosmos SDK messages correspond to proto "rpc" +in the "Msg" service. + +If you're building an application with custom IBC logic, you might need to +scaffold IBC packets. An IBC packet represents the data sent from one blockchain +to another. You can only scaffold IBC packets in IBC-enabled modules scaffolded +with an "--ibc" flag. Note that the default module is not IBC-enabled. + + +**Options** + +``` + -h, --help help for scaffold +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain +* [ignite scaffold chain](#ignite-scaffold-chain) - New Cosmos SDK blockchain +* [ignite scaffold configs](#ignite-scaffold-configs) - Configs for a custom Cosmos SDK module +* [ignite scaffold list](#ignite-scaffold-list) - CRUD for data stored as an array +* [ignite scaffold map](#ignite-scaffold-map) - CRUD for data stored as key-value pairs +* [ignite scaffold message](#ignite-scaffold-message) - Message to perform state transition on the blockchain +* [ignite scaffold module](#ignite-scaffold-module) - Custom Cosmos SDK module +* [ignite scaffold packet](#ignite-scaffold-packet) - Message for sending an IBC packet +* [ignite scaffold params](#ignite-scaffold-params) - Parameters for a custom Cosmos SDK module +* [ignite scaffold query](#ignite-scaffold-query) - Query for fetching data from a blockchain +* [ignite scaffold react](#ignite-scaffold-react) - React web app template +* [ignite scaffold single](#ignite-scaffold-single) - CRUD for data stored in a single location +* [ignite scaffold type](#ignite-scaffold-type) - Type definition +* [ignite scaffold vue](#ignite-scaffold-vue) - Vue 3 web app template + + +## ignite scaffold chain + +New Cosmos SDK blockchain + +**Synopsis** + +Create a new application-specific Cosmos SDK blockchain. + +For example, the following command will create a blockchain called "hello" in +the "hello/" directory: + + ignite scaffold chain hello + +A project name can be a simple name or a URL. The name will be used as the Go +module path for the project. Examples of project names: + + ignite scaffold chain foo + ignite scaffold chain foo/bar + ignite scaffold chain example.org/foo + ignite scaffold chain github.com/username/foo + +A new directory with source code files will be created in the current directory. +To use a different path use the "--path" flag. + +Most of the logic of your blockchain is written in custom modules. Each module +effectively encapsulates an independent piece of functionality. Following the +Cosmos SDK convention, custom modules are stored inside the "x/" directory. By +default, Ignite creates a module with a name that matches the name of the +project. To create a blockchain without a default module use the "--no-module" +flag. Additional modules can be added after a project is created with "ignite +scaffold module" command. + +Account addresses on Cosmos SDK-based blockchains have string prefixes. For +example, the Cosmos Hub blockchain uses the default "cosmos" prefix, so that +addresses look like this: "cosmos12fjzdtqfrrve7zyg9sv8j25azw2ua6tvu07ypf". To +use a custom address prefix use the "--address-prefix" flag. For example: + + ignite scaffold chain foo --address-prefix bar + +By default when compiling a blockchain's source code Ignite creates a cache to +speed up the build process. To clear the cache when building a blockchain use +the "--clear-cache" flag. It is very unlikely you will ever need to use this +flag. + +The blockchain is using the Cosmos SDK modular blockchain framework. Learn more +about Cosmos SDK on https://docs.cosmos.network + + +``` +ignite scaffold chain [name] [flags] +``` + +**Options** + +``` + --address-prefix string account address prefix (default "cosmos") + --clear-cache clear the build cache (advanced) + --consumer scafffold an ICS consumer chain + -h, --help help for chain + --minimal create a minimal blockchain (with the minimum required Cosmos SDK modules) + --module-configs strings add module configs + --no-module create a project without a default module + --params strings add default module parameters + -p, --path string create a project in a specific path + --proto-dir string chain proto directory (default "proto") + --skip-git skip Git repository initialization + --skip-proto skip proto generation +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold configs + +Configs for a custom Cosmos SDK module + +**Synopsis** + +Scaffold a new config for a Cosmos SDK module. + +A Cosmos SDK module can have configurations. An example of a config is "address prefix" of the +"auth" module. A config can be scaffolded into a module using the "--module-configs" into +the scaffold module command or using the "scaffold configs" command. By default +configs are of type "string", but you can specify a type for each config. For example: + + ignite scaffold configs foo baz:uint bar:bool + +Refer to Cosmos SDK documentation to learn more about modules, dependencies and +configs. + + +``` +ignite scaffold configs [configs]... [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -h, --help help for configs + --module string module to add the query into (default: app's main module) + -p, --path string path of the app (default ".") + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold list + +CRUD for data stored as an array + +**Synopsis** + +The "list" scaffolding command is used to generate files that implement the +logic for storing and interacting with data stored as a list in the blockchain +state. + +The command accepts a NAME argument that will be used as the name of a new type +of data. It also accepts a list of FIELDs that describe the type. + +The interaction with the data follows the create, read, updated, and delete +(CRUD) pattern. For each type three Cosmos SDK messages are defined for writing +data to the blockchain: MsgCreate{Name}, MsgUpdate{Name}, MsgDelete{Name}. For +reading data two queries are defined: {Name} and {Name}All. The type, messages, +and queries are defined in the "proto/" directory as protocol buffer messages. +Messages and queries are mounted in the "Msg" and "Query" services respectively. + +When messages are handled, the appropriate keeper methods are called. By +convention, the methods are defined in +"x/{moduleName}/keeper/msg_server_{name}.go". Helpful methods for getting, +setting, removing, and appending are defined in the same "keeper" package in +"{name}.go". + +The "list" command essentially allows you to define a new type of data and +provides the logic to create, read, update, and delete instances of the type. +For example, let's review a command that generates the code to handle a list of +posts and each post has "title" and "body" fields: + + ignite scaffold list post title body + +This provides you with a "Post" type, MsgCreatePost, MsgUpdatePost, +MsgDeletePost and two queries: Post and PostAll. The compiled CLI, let's say the +binary is "blogd" and the module is "blog", has commands to query the chain (see +"blogd q blog") and broadcast transactions with the messages above (see "blogd +tx blog"). + +The code generated with the list command is meant to be edited and tailored to +your application needs. Consider the code to be a "skeleton" for the actual +business logic you will implement next. + +By default, all fields are assumed to be strings. If you want a field of a +different type, you can specify it after a colon ":". The following types are +supported: string, bool, int, uint, coin, array.string, array.int, array.uint, +array.coin. An example of using field types: + + ignite scaffold list pool amount:coin tags:array.string height:int + +For detailed type information use ignite scaffold type --help + +"Index" indicates whether the type can be used as an index in +"ignite scaffold map". + +Ignite also supports custom types: + + ignite scaffold list product-details name desc + ignite scaffold list product price:coin details:ProductDetails + +In the example above the "ProductDetails" type was defined first, and then used +as a custom type for the "details" field. Ignite doesn't support arrays of +custom types yet. + +Your chain will accept custom types in JSON-notation: + + exampled tx example create-product 100coin '{"name": "x", "desc": "y"}' --from alice + +By default the code will be scaffolded in the module that matches your project's +name. If you have several modules in your project, you might want to specify a +different module: + + ignite scaffold list post title body --module blog + +By default, each message comes with a "creator" field that represents the +address of the transaction signer. You can customize the name of this field with +a flag: + + ignite scaffold list post title body --signer author + +It's possible to scaffold just the getter/setter logic without the CRUD +messages. This is useful when you want the methods to handle a type, but would +like to scaffold messages manually. Use a flag to skip message scaffolding: + + ignite scaffold list post title body --no-message + +The "creator" field is not generated if a list is scaffolded with the +"--no-message" flag. + + +``` +ignite scaffold list NAME [field]... [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -h, --help help for list + --module string specify which module to generate code in + --no-message skip generating message handling logic + --no-simulation skip simulation logic + -p, --path string path of the app (default ".") + --signer string label for the message signer (default: creator) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold map + +CRUD for data stored as key-value pairs + +**Synopsis** + +The "map" scaffolding command is used to generate files that implement the logic +for storing and interacting with data stored as key-value pairs (or a +dictionary) in the blockchain state. + +The "map" command is very similar to "ignite scaffold list" with the main +difference in how values are indexed. With "list" values are indexed by an +incrementing integer, whereas "map" values are indexed by a user-provided value +(or multiple values). + +Let's use the same blog post example: + + ignite scaffold map post title body:string + +This command scaffolds a "Post" type and CRUD functionality to create, read, +updated, and delete posts. However, when creating a new post with your chain's +binary (or by submitting a transaction through the chain's API) you will be +required to provide an "index": + + blogd tx blog create-post [index] [title] [body] + blogd tx blog create-post hello "My first post" "This is the body" + +This command will create a post and store it in the blockchain's state under the +"hello" index. You will be able to fetch back the value of the post by querying +for the "hello" key. + + blogd q blog show-post hello + +By default, the index is called "index", to customize the index, use the "--index" flag. + +Since the behavior of "list" and "map" scaffolding is very similar, you can use +the "--no-message", "--module", "--signer" flags as well as the colon syntax for +custom types. + +For detailed type information use ignite scaffold type --help + + +``` +ignite scaffold map NAME [field]... [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -h, --help help for map + --index string field that index the value (default "index") + --module string specify which module to generate code in + --no-message skip generating message handling logic + --no-simulation skip simulation logic + -p, --path string path of the app (default ".") + --signer string label for the message signer (default: creator) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold message + +Message to perform state transition on the blockchain + +**Synopsis** + +Message scaffolding is useful for quickly adding functionality to your +blockchain to handle specific Cosmos SDK messages. + +Messages are objects whose end goal is to trigger state transitions on the +blockchain. A message is a container for fields of data that affect how the +blockchain's state will change. You can think of messages as "actions" that a +user can perform. + +For example, the bank module has a "Send" message for token transfers between +accounts. The send message has three fields: from address (sender), to address +(recipient), and a token amount. When this message is successfully processed, +the token amount will be deducted from the sender's account and added to the +recipient's account. + +Ignite's message scaffolding lets you create new types of messages and add them +to your chain. For example: + + ignite scaffold message add-pool amount:coins denom active:bool --module dex + +The command above will create a new message MsgAddPool with three fields: amount +(in tokens), denom (a string), and active (a boolean). The message will be added +to the "dex" module. + +For detailed type information use ignite scaffold type --help + +By default, the message is defined as a proto message in the +"proto/{app}/{module}/tx.proto" and registered in the "Msg" service. A CLI command to +create and broadcast a transaction with MsgAddPool is created in the module's +"cli" package. Additionally, Ignite scaffolds a message constructor and the code +to satisfy the sdk.Msg interface and register the message in the module. + +Most importantly in the "keeper" package Ignite scaffolds an "AddPool" function. +Inside this function, you can implement message handling logic. + +When successfully processed a message can return data. Use the —response flag to +specify response fields and their types. For example + + ignite scaffold message create-post title body --response id:int,title + +The command above will scaffold MsgCreatePost which returns both an ID (an +integer) and a title (a string). + +Message scaffolding follows the rules as "ignite scaffold list/map/single" and +supports fields with standard and custom types. See "ignite scaffold list —help" +for details. + + +``` +ignite scaffold message [name] [field1:type1] [field2:type2] ... [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -d, --desc string description of the command + -h, --help help for message + --module string module to add the message into. Default: app's main module + --no-simulation disable CRUD simulation scaffolding + -p, --path string path of the app (default ".") + -r, --response strings response fields + --signer string label for the message signer (default: creator) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold module + +Custom Cosmos SDK module + +**Synopsis** + +Scaffold a new Cosmos SDK module. + +Cosmos SDK is a modular framework and each independent piece of functionality is +implemented in a separate module. By default your blockchain imports a set of +standard Cosmos SDK modules. To implement custom functionality of your +blockchain, scaffold a module and implement the logic of your application. + +This command does the following: + +* Creates a directory with module's protocol buffer files in "proto/" +* Creates a directory with module's boilerplate Go code in "x/" +* Imports the newly created module by modifying "app/app.go" +* Creates a file in "testutil/keeper/" that contains logic to create a keeper + for testing purposes + +This command will proceed with module scaffolding even if "app/app.go" doesn't +have the required default placeholders. If the placeholders are missing, you +will need to modify "app/app.go" manually to import the module. If you want the +command to fail if it can't import the module, use the "--require-registration" +flag. + +To scaffold an IBC-enabled module use the "--ibc" flag. An IBC-enabled module is +like a regular module with the addition of IBC-specific logic and placeholders +to scaffold IBC packets with "ignite scaffold packet". + +A module can depend on one or more other modules and import their keeper +methods. To scaffold a module with a dependency use the "--dep" flag + +For example, your new custom module "foo" might have functionality that requires +sending tokens between accounts. The method for sending tokens is a defined in +the "bank"'s module keeper. You can scaffold a "foo" module with the dependency +on "bank" with the following command: + + ignite scaffold module foo --dep bank + +You can then define which methods you want to import from the "bank" keeper in +"expected_keepers.go". + +You can also scaffold a module with a list of dependencies that can include both +standard and custom modules (provided they exist): + + ignite scaffold module bar --dep foo,mint,account,FeeGrant + +Note: the "--dep" flag doesn't install third-party modules into your +application, it just generates extra code that specifies which existing modules +your new custom module depends on. + +A Cosmos SDK module can have parameters (or "params"). Params are values that +can be set at the genesis of the blockchain and can be modified while the +blockchain is running. An example of a param is "Inflation rate change" of the +"mint" module. A module can be scaffolded with params using the "--params" flag +that accepts a list of param names. By default params are of type "string", but +you can specify a type for each param. For example: + + ignite scaffold module foo --params baz:uint,bar:bool + +Refer to Cosmos SDK documentation to learn more about modules, dependencies and +params. + + +``` +ignite scaffold module [name] [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + --dep strings add a dependency on another module + -h, --help help for module + --ibc add IBC functionality + --module-configs strings add module configs + --ordering string channel ordering of the IBC module [none|ordered|unordered] (default "none") + --params strings add module parameters + -p, --path string path of the app (default ".") + --require-registration fail if module can't be registered + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold packet + +Message for sending an IBC packet + +**Synopsis** + +Scaffold an IBC packet in a specific IBC-enabled Cosmos SDK module + +``` +ignite scaffold packet [packetName] [field1] [field2] ... --module [moduleName] [flags] +``` + +**Options** + +``` + --ack strings custom acknowledgment type (field1,field2,...) + --clear-cache clear the build cache (advanced) + -h, --help help for packet + --module string IBC Module to add the packet into + --no-message disable send message scaffolding + -p, --path string path of the app (default ".") + --signer string label for the message signer (default: creator) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold params + +Parameters for a custom Cosmos SDK module + +**Synopsis** + +Scaffold a new parameter for a Cosmos SDK module. + +A Cosmos SDK module can have parameters (or "params"). Params are values that +can be set at the genesis of the blockchain and can be modified while the +blockchain is running. An example of a param is "Inflation rate change" of the +"mint" module. A params can be scaffolded into a module using the "--params" into +the scaffold module command or using the "scaffold params" command. By default +params are of type "string", but you can specify a type for each param. For example: + + ignite scaffold params foo baz:uint bar:bool + +Refer to Cosmos SDK documentation to learn more about modules, dependencies and +params. + + +``` +ignite scaffold params [param]... [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -h, --help help for params + --module string module to add the query into. Default: app's main module + -p, --path string path of the app (default ".") + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold query + +Query for fetching data from a blockchain + +**Synopsis** + +Query for fetching data from a blockchain. + +For detailed type information use ignite scaffold type --help. + +``` +ignite scaffold query [name] [field1:type1] [field2:type2] ... [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -d, --desc string description of the CLI to broadcast a tx with the message + -h, --help help for query + --module string module to add the query into. Default: app's main module + --paginated define if the request can be paginated + -p, --path string path of the app (default ".") + -r, --response strings response fields + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold react + +React web app template + +``` +ignite scaffold react [flags] +``` + +**Options** + +``` + -h, --help help for react + -p, --path string path to scaffold content of the React app (default "./react") + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold single + +CRUD for data stored in a single location + +**Synopsis** + +CRUD for data stored in a single location. + +For detailed type information use ignite scaffold type --help. + +``` +ignite scaffold single NAME [field:type]... [flags] +``` + +**Examples** + +``` + ignite scaffold single todo-single title:string done:bool +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -h, --help help for single + --module string specify which module to generate code in + --no-message skip generating message handling logic + --no-simulation skip simulation logic + -p, --path string path of the app (default ".") + --signer string label for the message signer (default: creator) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold type + +Type definition + +**Synopsis** + +Type information + +Currently supports: + +| Type | Alias | Index | Code Type | Description | +|--------------|---------|-------|-----------|---------------------------------| +| string | - | yes | string | Text type | +| array.string | strings | no | []string | List of text type | +| bool | - | yes | bool | Boolean type | +| int | - | yes | int64 | Integer type | +| array.int | ints | no | []int64 | List of integers types | +| uint | - | yes | uint64 | Unsigned integer type | +| array.uint | uints | no | []uint64 | List of unsigned integers types | +| coin | - | no | sdk.Coin | Cosmos SDK coin type | +| array.coin | coins | no | sdk.Coins | List of Cosmos SDK coin types | + +Field Usage: + - fieldName + - fieldName:fieldType + +If no :fieldType, default (string) is used + + + +``` +ignite scaffold type NAME [field:type] ... [flags] +``` + +**Examples** + +``` + ignite scaffold type todo-item priority:int desc:string tags:array.string done:bool +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -h, --help help for type + --module string specify which module to generate code in + --no-message skip generating message handling logic + --no-simulation skip simulation logic + -p, --path string path of the app (default ".") + --signer string label for the message signer (default: creator) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold vue + +Vue 3 web app template + +``` +ignite scaffold vue [flags] +``` + +**Options** + +``` + -h, --help help for vue + -p, --path string path to scaffold content of the Vue.js app (default "./vue") + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite version + +Print the current build information + +``` +ignite version [flags] +``` + +**Options** + +``` + -h, --help help for version +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain + + +## ignite testnet + +Start a testnet local + +**Synopsis** + +The commands in this namespace allow you to start your local testnet for development purposes. + + +The "in-place" command is used to create and start a testnet from current local net state(including mainnet). +After using this command in the repo containing the config.yml file, the network will start. +We can create a testnet from the local network state and mint additional coins for the desired accounts from the config.yml file. + +During development, in-place allows you to quickly reboot the chain from a multi-node network state to a node you have full control over. + +The "multi-node" initialization and start command is used to set up and launch a multi-node network, allowing you to enable, disable, and providing full interaction capabilities with the chain. The stake amount for each validator is defined in the config.yml file. + +**SEE ALSO** + +* [ignite testnet in-place](#ignite-testnet-in-place) - Create and start a testnet from current local net state +* [ignite testnet multi-node](#ignite-testnet-multi-node) - Initialize and provide multi-node on/off functionality + + +## ignite testnet in-place + +Create and start a testnet from current local net state + +**Synopsis** + +The "in-place" command is used to create and start a testnet from current local net state(including mainnet). + +We can create a testnet from the local network state and mint additional coins for the desired accounts from the config.yml file. + +During development, in-place allows you to quickly reboot the chain from a multi-node network state to a node you have full control over. + +By default, the data directory will be initialized in $HOME/.mychain, where "mychain" is the name of the project. To set a custom data directory use the --home flag or set the value in config.yml: + + validators: + - name: alice + bonded: '100000000stake' + home: "~/.customdir" + +Get mint coin just add account in config.yml file: + + accounts: + - name: charlie + coins: + - 20000token + - 200000000stake + + +``` +ignite chain debug [flags] +``` + +**Options** + +``` + -h, --help help for debug + -p, --path string path of the app (default ".") +``` + +**Options inherited from parent commands** + +``` + -c, --config string path to Ignite config file (default: ./config.yml) +``` + +## ignite testnet multi-node + +Initialize and start multiple nodes + +**Synopsis** + +The "multi-node" command allows developers to easily set up, initialize, and manage multiple nodes for a testnet environment. This command provides full flexibility in enabling or disabling each node as desired, making it a powerful tool for simulating a multi-node blockchain network during development. + +By using the config.yml file, you can define validators with custom bonded amounts, giving you control over how each node participates in the network: + +``` + validators: + - name: alice + bonded: 100000000stake + - name: validator1 + bonded: 100000000stake + - name: validator2 + bonded: 200000000stake + - name: validator3 + bonded: 300000000stake + +``` + +Each validator's bonded stake can be adjusted according to your testing needs, providing a realistic environment to simulate various scenarios. + +The multi-node command not only initializes these nodes but also gives you control over starting, stopping individual nodes. This level of control ensures you can test and iterate rapidly without needing to reinitialize the entire network each time a change is made. This makes it ideal for experimenting with validator behavior, network dynamics, and the impact of various configurations. + +All initialized nodes will be stored under the `.ignite/local-chains/<app>/testnet/` directory, which allows easy access and management. + + +Usage + +``` +ignite testnet multi-node [flags] +``` + +**Options** + +``` + -r, --reset-once reset the app state once on init + --node-dir-prefix dir prefix for node (default "validator") + -h, --help help for debug + -p, --path string path of the app (default ".") +``` + +**Options inherited from parent commands** + +``` + -c, --config string path to Ignite config file (default: ./config.yml) +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, start testnet and launch your blockchain \ No newline at end of file diff --git a/docs/versioned_docs/version-v28/03-clients/01-go-client.md b/docs/versioned_docs/version-v28/03-clients/01-go-client.md new file mode 100644 index 0000000..d9ae05a --- /dev/null +++ b/docs/versioned_docs/version-v28/03-clients/01-go-client.md @@ -0,0 +1,300 @@ +--- +description: Blockchain client in Go +title: Go client +--- + +# A client in the Go programming language + +In this tutorial, we will show you how to create a standalone Go program that +serves as a client for a blockchain. We will use the Ignite CLI to set up a +standard blockchain. To communicate with the blockchain, we will utilize the +`cosmosclient` package, which provides an easy-to-use interface for interacting +with the blockchain. You will learn how to use the `cosmosclient` package to +send transactions and query the blockchain. By the end of this tutorial, you +will have a good understanding of how to build a client for a blockchain using +Go and the `cosmosclient` package. + +## Create a blockchain + +To create a blockchain using the Ignite CLI, use the following command: + +``` +ignite scaffold chain blog +``` + +This will create a new Cosmos SDK blockchain called "blog". + +Once the blockchain has been created, you can generate code for a "blog" model +that will enable you to perform create, read, update, and delete (CRUD) +operations on blog posts. To do this, you can use the following command: + +``` +cd blog +ignite scaffold list post title body +``` + +This will generate the necessary code for the "blog" model, including functions +for creating, reading, updating, and deleting blog posts. With this code in +place, you can now use your blockchain to perform CRUD operations on blog posts. +You can use the generated code to create new blog posts, retrieve existing ones, +update their content, and delete them as needed. This will give you a fully +functional Cosmos SDK blockchain with the ability to manage blog posts. + +Start your blockchain node with the following command: + +``` +ignite chain serve +``` + +## Creating a blockchain client + +Create a new directory called `blogclient` on the same level as `blog` +directory. As the name suggests, `blogclient` will contain a standalone Go +program that acts as a client to your `blog` blockchain. + +```bash +mkdir blogclient +``` + +This command will create a new directory called `blogclient` in your current +location. If you type `ls` in your terminal window, you should see both the +`blog` and `blogclient` directories listed. + +To initialize a new Go package inside the `blogclient` directory, you can use +the following command: + +``` +cd blogclient +go mod init blogclient +``` + +This will create a `go.mod` file in the `blogclient` directory, which contains +information about the package and the Go version being used. + +To import dependencies for your package, you can add the following code to the +`go.mod` file: + +```text title="blogclient/go.mod" +module blogclient + +go 1.20 + +require ( + blog v0.0.0-00010101000000-000000000000 + github.com/ignite/cli v0.25.2 +) + +replace blog => ../blog +replace github.com/gogo/protobuf => github.com/regen-network/protobuf v1.3.3-alpha.regen.1 +``` + +Your package will import two dependencies: + +* `blog`, which contains `types` of messages and a query client +* `ignite` for the `cosmosclient` package + +The `replace` directive uses the package from the local `blog` directory and is +specified as a relative path to the `blogclient` directory. + +Cosmos SDK uses a custom version of the `protobuf` package, so use the `replace` +directive to specify the correct dependency. + +Finally, install dependencies for your `blogclient`: + +```bash +go mod tidy +``` + +### Main logic of the client in `main.go` + +Create a `main.go` file inside the `blogclient` directory and add the following +code: + +```go title="blogclient/main.go" +package main + +import ( + "context" + "fmt" + "log" + + // Importing the general purpose Cosmos blockchain client + "github.com/ignite/cli/v28/ignite/pkg/cosmosclient" + + // Importing the types package of your blog blockchain + "blog/x/blog/types" +) + +func main() { + ctx := context.Background() + addressPrefix := "cosmos" + + // Create a Cosmos client instance + client, err := cosmosclient.New(ctx, cosmosclient.WithAddressPrefix(addressPrefix)) + if err != nil { + log.Fatal(err) + } + + // Account `alice` was initialized during `ignite chain serve` + accountName := "alice" + + // Get account from the keyring + account, err := client.Account(accountName) + if err != nil { + log.Fatal(err) + } + + addr, err := account.Address(addressPrefix) + if err != nil { + log.Fatal(err) + } + + // Define a message to create a post + msg := &types.MsgCreatePost{ + Creator: addr, + Title: "Hello!", + Body: "This is the first post", + } + + // Broadcast a transaction from account `alice` with the message + // to create a post store response in txResp + txResp, err := client.BroadcastTx(ctx, account, msg) + if err != nil { + log.Fatal(err) + } + + // Print response from broadcasting a transaction + fmt.Print("MsgCreatePost:\n\n") + fmt.Println(txResp) + + // Instantiate a query client for your `blog` blockchain + queryClient := types.NewQueryClient(client.Context()) + + // Query the blockchain using the client's `PostAll` method + // to get all posts store all posts in queryResp + queryResp, err := queryClient.PostAll(ctx, &types.QueryAllPostRequest{}) + if err != nil { + log.Fatal(err) + } + + // Print response from querying all the posts + fmt.Print("\n\nAll posts:\n\n") + fmt.Println(queryResp) +} +``` + +The code above creates a standalone Go program that acts as a client to the +`blog` blockchain. It begins by importing the required packages, including the +general purpose Cosmos blockchain client and the `types` package of the `blog` +blockchain. + +In the `main` function, the code creates a Cosmos client instance and sets the +address prefix to "cosmos". It then retrieves an account named `"alice"` from +the keyring and gets the address of the account using the address prefix. + +Next, the code defines a message to create a blog post with the title "Hello!" +and body "This is the first post". It then broadcasts a transaction from the +account "alice" with the message to create the post, and stores the response in +the variable `txResp`. + +The code then instantiates a query client for the blog blockchain and uses it to +query the blockchain to retrieve all the posts. It stores the response in the +variable `queryResp` and prints it to the console. + +Finally, the code prints the response from broadcasting the transaction to the +console. This allows the user to see the results of creating and querying a blog +post on the `blog` blockchain using the client. + +To find out more about the `cosmosclient` package, you can refer to the Go +package documentation for +[`cosmosclient`](https://pkg.go.dev/github.com/ignite/cli/ignite/pkg/cosmosclient). +This documentation provides information on how to use the `Client` type with +`Options` and `KeyringBackend`. + +## Run the blockchain and the client + +Make sure your blog blockchain is still running with `ignite chain serve`. + +Run the blockchain client: + +```bash +go run main.go +``` + +If the command is successful, the results of running the command will be printed +to the terminal. The output may include some warnings, which can be ignored. + +```yml +MsgCreatePost: + +code: 0 +codespace: "" +data: 12220A202F626C6F672E626C6F672E4D7367437265617465506F7374526573706F6E7365 +events: +- attributes: + - index: true + key: ZmVl + value: null + - index: true + key: ZmVlX3BheWVy + value: Y29zbW9zMWR6ZW13NzZ3enQ3cDBnajd3MzQyN2E0eHg3MjRkejAzd3hnOGhk + type: tx +- attributes: + - index: true + key: YWNjX3NlcQ== + value: Y29zbW9zMWR6ZW13NzZ3enQ3cDBnajd3MzQyN2E0eHg3MjRkejAzd3hnOGhkLzE= + type: tx +- attributes: + - index: true + key: c2lnbmF0dXJl + value: UWZncUJCUFQvaWxWVzJwNUJNTngzcDlvRzVpSXp0elhXdE9yMHcwVE00OEtlSkRqR0FEdU9VNjJiY1ZRNVkxTHdEbXNuYUlsTmc3VE9uMnJ2ZWRHSlE9PQ== + type: tx +- attributes: + - index: true + key: YWN0aW9u + value: L2Jsb2cuYmxvZy5Nc2dDcmVhdGVQb3N0 + type: message +gas_used: "52085" +gas_wanted: "300000" +height: "20" +info: "" +logs: +- events: + - attributes: + - key: action + value: /blog.blog.MsgCreatePost + type: message + log: "" + msg_index: 0 +raw_log: '[{"msg_index":0,"events":[{"type":"message","attributes":[{"key":"action","value":"/blog.blog.MsgCreatePost"}]}]}]' +timestamp: "" +tx: null +txhash: 4F53B75C18254F96EF159821DDD665E965DBB576A5AC2B94CE863EB62E33156A + +All posts: + +Post:<title:"Hello!" body:"This is the first post" creator:"cosmos1dzemw76wzt7p0gj7w3427a4xx724dz03wxg8hd" > pagination:<total:1 > +``` + +As you can see the client has successfully broadcasted a transaction and queried +the chain for blog posts. + +Please note, that some values in the output on your terminal (like transaction +hash and block height) might be different from the output above. + +You can confirm the new post with using the `blogd q blog list-post` command: + +```yaml +Post: +- body: This is the first post + creator: cosmos1dzemw76wzt7p0gj7w3427a4xx724dz03wxg8hd + id: "0" + title: Hello! +pagination: + next_key: null + total: "0" +``` + +Great job! You have successfully completed the process of creating a Go client +for your Cosmos SDK blockchain, submitting a transaction, and querying the +chain. \ No newline at end of file diff --git a/docs/versioned_docs/version-v28/03-clients/02-typescript.md b/docs/versioned_docs/version-v28/03-clients/02-typescript.md new file mode 100644 index 0000000..9d41a23 --- /dev/null +++ b/docs/versioned_docs/version-v28/03-clients/02-typescript.md @@ -0,0 +1,430 @@ +--- +description: Information about the generated TypeScript client code. +--- + +# TypeScript frontend + +Ignite offers powerful functionality for generating client-side code for your +blockchain. Think of this as a one-click client SDK generation tailored +specifically for your blockchain. + +See `ignite generate ts-client --help` learn more on how to use TypeScript code generation. + +## Starting a node + +Create a new blockchain with `ignite scaffold chain`. You can use an existing +blockchain project if you have one, instead. + +``` +ignite scaffold chain example +``` + +For testing purposes add a new account to `config.yml` with a mnemonic: + +```yml title="config.yml" +accounts: + - name: frank + coins: ["1000token", "100000000stake"] + mnemonic: play butter frown city voyage pupil rabbit wheat thrive mind skate turkey helmet thrive door either differ gate exhibit impose city swallow goat faint +``` + +Run a command to generate TypeScript clients for both standard and custom Cosmos +SDK modules: + +``` +ignite generate ts-client --clear-cache +``` + +Run a command to start your blockchain node: + +``` +ignite chain serve -r +``` + +## Setting up a TypeScript frontend client + +The best way to get started building with the TypeScript client is by using +[Vite](https://vitejs.dev). Vite provides boilerplate code for +vanilla TS projects as well as React, Vue, Lit, Svelte and Preact frameworks. +You can find additional information at the [Vite Getting Started +guide](https://vitejs.dev/guide). + +You will also need to [polyfill](https://developer.mozilla.org/en-US/docs/Glossary/Polyfill) the client's dependencies. The following is an +example of setting up a vanilla TS project with the necessary polyfills: + +```bash +npm create vite@latest my-frontend-app -- --template vanilla-ts +cd my-frontend-app +npm install --save-dev @esbuild-plugins/node-globals-polyfill @rollup/plugin-node-resolve +``` + +You must then create the necessary `vite.config.ts` file. + +```typescript title="my-frontend-app/vite.config.ts" +import { nodeResolve } from "@rollup/plugin-node-resolve"; +import { NodeGlobalsPolyfillPlugin } from "@esbuild-plugins/node-globals-polyfill"; +import { defineConfig } from "vite"; + +export default defineConfig({ + plugins: [nodeResolve()], + + optimizeDeps: { + esbuildOptions: { + define: { + global: "globalThis", + }, + plugins: [ + NodeGlobalsPolyfillPlugin({ + buffer: true, + }), + ], + }, + }, +}); +``` + +You are then ready to use the generated client code inside this project directly +or by publishing the client and installing it like any other `npm` package. + +After the chain starts, you will see Frank's address is +`cosmos13xkhcx2dquhqdml0k37sr7yndquwteuvt2cml7`. We'll be using Frank's account +for querying data and broadcasting transactions in the next section. + +## Querying + +The code generated in `ts-client` comes with a `package.json` file ready to +publish which you can modify to suit your needs. To use`ts-client` install the +required dependencies: + +``` +cd ts-client +npm install +``` + +The client is based on a modular architecture where you can configure a client +class to support the modules you need and instantiate it. + +By default, the generated client exports a client class that includes all the +Cosmos SDK, custom and 3rd party modules in use in your project. + +To instantiate the client you need to provide environment information (endpoints +and chain prefix). For querying that's all you need: + +```typescript title="my-frontend-app/src/main.ts" +import { Client } from "../../ts-client"; + +const client = new Client( + { + apiURL: "http://localhost:1317", + rpcURL: "http://localhost:26657", + prefix: "cosmos", + } +); +``` + +The example above uses `ts-client` from a local directory. If you have published +your `ts-client` on `npm` replace `../../ts-client` with a package name. + +The resulting client instance contains namespaces for each module, each with a +`query` and `tx` namespace containing the module's relevant querying and +transacting methods with full type and auto-completion support. + +To query for a balance of an address: + +```typescript +const balances = await client.CosmosBankV1Beta1.query.queryAllBalances( + 'cosmos13xkhcx2dquhqdml0k37sr7yndquwteuvt2cml7' +); +``` + +## Broadcasting a transaction + +Add signing capabilities to the client by creating a wallet from a mnemonic +(we're using the Frank's mnemonic added to `config.yml` earlier) and passing it +as an optional argument to `Client()`. The wallet implements the CosmJS +OfflineSigner` interface. + +```typescript title="my-frontend-app/src/main.ts" +import { Client } from "../../ts-client"; +// highlight-start +import { DirectSecp256k1HdWallet } from "@cosmjs/proto-signing"; + +const mnemonic = + "play butter frown city voyage pupil rabbit wheat thrive mind skate turkey helmet thrive door either differ gate exhibit impose city swallow goat faint"; +const wallet = await DirectSecp256k1HdWallet.fromMnemonic(mnemonic); +// highlight-end + +const client = new Client( + { + apiURL: "http://localhost:1317", + rpcURL: "http://localhost:26657", + prefix: "cosmos", + }, + // highlight-next-line + wallet +); +``` + +Broadcasting a transaction: + +```typescript title="my-frontend-app/src/main.ts" +const tx_result = await client.CosmosBankV1Beta1.tx.sendMsgSend({ + value: { + amount: [ + { + amount: '200', + denom: 'token', + }, + ], + fromAddress: 'cosmos13xkhcx2dquhqdml0k37sr7yndquwteuvt2cml7', + toAddress: 'cosmos15uw6qpxqs6zqh0zp3ty2ac29cvnnzd3qwjntnc', + }, + fee: { + amount: [{ amount: '0', denom: 'stake' }], + gas: '200000', + }, + memo: '', +}) +``` + +## Broadcasting a transaction with a custom message + +If your chain already has custom messages defined, you can use those. If not, +we'll be using Ignite's scaffolded code as an example. Create a post with CRUD +messages: + +``` +ignite scaffold list post title body +``` + +After adding messages to your chain you may need to re-generate the TypeScript +client: + +``` +ignite generate ts-client --clear-cache +``` + +Broadcast a transaction containing the custom `MsgCreatePost`: + +```typescript title="my-frontend-app/src/main.ts" +import { Client } from "../../ts-client"; +import { DirectSecp256k1HdWallet } from "@cosmjs/proto-signing"; + +const mnemonic = + "play butter frown city voyage pupil rabbit wheat thrive mind skate turkey helmet thrive door either differ gate exhibit impose city swallow goat faint"; +const wallet = await DirectSecp256k1HdWallet.fromMnemonic(mnemonic); + +const client = new Client( + { + apiURL: "http://localhost:1317", + rpcURL: "http://localhost:26657", + prefix: "cosmos", + }, + wallet +); +// highlight-start +const tx_result = await client.ExampleExample.tx.sendMsgCreatePost({ + value: { + title: 'foo', + body: 'bar', + creator: 'cosmos13xkhcx2dquhqdml0k37sr7yndquwteuvt2cml7', + }, + fee: { + amount: [{ amount: '0', denom: 'stake' }], + gas: '200000', + }, + memo: '', +}) +// highlight-end +``` + +## Lightweight client + +If you prefer, you can construct a lighter client using only the modules you are +interested in by importing the generic client class and expanding it with the +modules you need: + +```typescript title="my-frontend-app/src/main.ts" +// highlight-start +import { IgniteClient } from '../../ts-client/client' +import { Module as CosmosBankV1Beta1 } from '../../ts-client/cosmos.bank.v1beta1' +import { Module as CosmosStakingV1Beta1 } from '../../ts-client/cosmos.staking.v1beta1' +// highlight-end +import { DirectSecp256k1HdWallet } from '@cosmjs/proto-signing' + +const mnemonic = + 'play butter frown city voyage pupil rabbit wheat thrive mind skate turkey helmet thrive door either differ gate exhibit impose city swallow goat faint' +const wallet = await DirectSecp256k1HdWallet.fromMnemonic(mnemonic) +// highlight-next-line +const Client = IgniteClient.plugin([CosmosBankV1Beta1, CosmosStakingV1Beta1]) + +const client = new Client( + { + apiURL: 'http://localhost:1317', + rpcURL: 'http://localhost:26657', + prefix: 'cosmos', + }, + wallet, +) +``` + +## Broadcasting a multi-message transaction + +You can also construct TX messages separately and send them in a single TX using +a global signing client like so: + +```typescript title="my-frontend-app/src/main.ts" +const msg1 = await client.CosmosBankV1Beta1.tx.msgSend({ + value: { + amount: [ + { + amount: '200', + denom: 'token', + }, + ], + fromAddress: 'cosmos13xkhcx2dquhqdml0k37sr7yndquwteuvt2cml7', + toAddress: 'cosmos15uw6qpxqs6zqh0zp3ty2ac29cvnnzd3qwjntnc', + }, +}) + +const msg2 = await client.CosmosBankV1Beta1.tx.msgSend({ + value: { + amount: [ + { + amount: '200', + denom: 'token', + }, + ], + fromAddress: 'cosmos13xkhcx2dquhqdml0k37sr7yndquwteuvt2cml7', + toAddress: 'cosmos15uw6qpxqs6zqh0zp3ty2ac29cvnnzd3qwjntnc', + }, +}) + +const tx_result = await client.signAndBroadcast( + [msg1, msg2], + { + amount: [{ amount: '0', denom: 'stake' }], + gas: '200000', + }, + '', +) +``` + +Finally, for additional ease-of-use, apart from the modular client mentioned +above, each generated module is usable on its own in a stripped-down way by +exposing a separate txClient and queryClient. + +```typescript title="my-frontend-app/src/main.ts" +import { txClient } from '../../ts-client/cosmos.bank.v1beta1' +import { DirectSecp256k1HdWallet } from '@cosmjs/proto-signing' + +const mnemonic = + 'play butter frown city voyage pupil rabbit wheat thrive mind skate turkey helmet thrive door either differ gate exhibit impose city swallow goat faint' +const wallet = await DirectSecp256k1HdWallet.fromMnemonic(mnemonic) + +const client = txClient({ + signer: wallet, + prefix: 'cosmos', + addr: 'http://localhost:26657', +}) + +const tx_result = await client.sendMsgSend({ + value: { + amount: [ + { + amount: '200', + denom: 'token', + }, + ], + fromAddress: 'cosmos13xkhcx2dquhqdml0k37sr7yndquwteuvt2cml7', + toAddress: 'cosmos15uw6qpxqs6zqh0zp3ty2ac29cvnnzd3qwjntnc', + }, + fee: { + amount: [{ amount: '0', denom: 'stake' }], + gas: '200000', + }, + memo: '', +}) +``` + +## Usage with Keplr + +Normally, Keplr provides a wallet object implementing the `OfflineSigner` +interface, so you can simply replace the `wallet` argument in client +instantiation with `window.keplr.getOfflineSigner(chainId)`. However, Keplr +requires information about your chain, like chain ID, denoms, fees, etc. +[`experimentalSuggestChain()`](https://docs.keplr.app/api/guide/suggest-chain) is +a method Keplr provides to pass this information to the Keplr extension. + +The generated client makes this easier by offering a `useKeplr()` method that +automatically discovers the chain information and sets it up for you. Thus, you +can instantiate the client without a wallet and then call `useKeplr()` to enable +transacting via Keplr like so: + +```typescript title="my-frontend-app/src/main.ts" +import { Client } from '../../ts-client'; + +const client = new Client({ + apiURL: "http://localhost:1317", + rpcURL: "http://localhost:26657", + prefix: "cosmos" + } +); +await client.useKeplr(); +``` + +`useKeplr()` optionally accepts an object argument that contains one or more of +the same keys as the `ChainInfo` type argument of `experimentalSuggestChain()` +allowing you to override the auto-discovered values. + +For example, the default chain name and token precision (which are not recorded +on-chain) are set to `<chainId> Network` and `0` while the ticker for the denom +is set to the denom name in uppercase. If you want to override these, you can do +something like: + +```typescript title="my-frontend-app/src/main.ts" +import { Client } from '../../ts-client'; + +const client = new Client({ + apiURL: "http://localhost:1317", + rpcURL: "http://localhost:26657", + prefix: "cosmos" + } +); +await client.useKeplr({ + chainName: 'My Great Chain', + stakeCurrency: { + coinDenom: 'TOKEN', + coinMinimalDenom: 'utoken', + coinDecimals: '6', + }, +}) +``` + +## Wallet switching + +The client also allows you to switch out the wallet for a different one on an +already instantiated client like so: + +```typescript +import { Client } from '../../ts-client'; +import { DirectSecp256k1HdWallet } from "@cosmjs/proto-signing"; + +const mnemonic = + 'play butter frown city voyage pupil rabbit wheat thrive mind skate turkey helmet thrive door either differ gate exhibit impose city swallow goat faint' +const wallet = await DirectSecp256k1HdWallet.fromMnemonic(mnemonic); + +const client = new Client({ + apiURL: "http://localhost:1317", + rpcURL: "http://localhost:26657", + prefix: "cosmos" + } +); +await client.useKeplr(); + +// broadcast transactions using the Keplr wallet + +client.useSigner(wallet); + +// broadcast transactions using the CosmJS wallet +``` diff --git a/docs/versioned_docs/version-v28/03-clients/03-vue.md b/docs/versioned_docs/version-v28/03-clients/03-vue.md new file mode 100644 index 0000000..4518a9b --- /dev/null +++ b/docs/versioned_docs/version-v28/03-clients/03-vue.md @@ -0,0 +1,176 @@ +# Vue frontend + +Welcome to this tutorial on using Ignite to develop a web application for your +blockchain with Vue 3. Ignite is a tool that simplifies the process of building +a blockchain application by providing a set of templates and generators that can +be used to get up and running quickly. + +One of the features of Ignite is its support for [Vue 3](https://vuejs.org/), a +popular JavaScript framework for building user interfaces. In this tutorial, you +will learn how to use Ignite to create a new blockchain and scaffold a Vue +frontend template. This will give you a basic foundation for your web +application and make it easier to get started building out the rest of your +application. + +Once you have your blockchain and Vue template set up, the next step is to +generate an API client. This will allow you to easily interact with your +blockchain from your web application, enabling you to retrieve data and make +transactions. By the end of this tutorial, you will have a fully functional web +application that is connected to your own blockchain. + +Prerequisites: + +* [Node.js](https://nodejs.org/en/) +* [Keplr](https://www.keplr.app/) Chrome extension + +## Create a blockchain and a Vue app + +Create a new blockchain project: + +``` +ignite scaffold chain example +``` + +To create a Vue frontend template, go to the `example` directory and run the +following command: + +``` +ignite scaffold vue +``` + +This will create a new Vue project in the `vue` directory. This project can be +used with any blockchain, but it depends on an API client to interact with the +blockchain. To generate an API client, run the following command in the +`example` directory: + +``` +ignite generate composables +``` + +This command generates two directories: + +* `ts-client`: a framework-agnostic TypeScript client that can be used to + interact with your blockchain. You can learn more about how to use this client + in the [TypeScript client tutorial](/clients/typescript). +* `vue/src/composables`: a collection of Vue 3 + [composables](https://vuejs.org/guide/reusability/composables.html) that wrap + the TypeScript client and make it easier to interact with your blockchain from + your Vue application. + +## Set up Keplr and an account + +Open your browser with the Keplr wallet extension installed. Follow [the +instructions](https://keplr.crunch.help/en/getting-started/creating-a-new-keplr-account) +to create a new account or use an existing one. Make sure to save the mnemonic +phrase as you will need it in the next step. + +Do not use a mnemonic phrase that is associated with an account that holds +assets you care about. If you do, you risk losing those assets. It's a good +practice to create a new account for development purposes. + +Add the account you're using in Keplr to your blockchain's `config.yml` file: + +```yml +accounts: + - name: alice + coins: [20000token, 200000000stake] + - name: bob + coins: [10000token, 100000000stake] + # highlight-start + - name: frank + coins: [10000token, 100000000stake] + mnemonic: struggle since inmate safe logic kite tag web win stay security wonder + # highlight-end +``` + +Replace the `struggle since...` mnemonic with the one you saved in the previous +step. + +Adding an account with a mnemonic to the config file will tell Ignite CLI to add +the account to the blockchain when you start it. This is useful for development +purposes, but you should not do this in production. + +## Start a blockchain and a Vue app + +In the `example` directory run the following command to start your blockchain: + +```bash +ignite chain serve +``` + +To start your Vue application, go to the `vue` directory and run the following +command in a separate terminal window: + +:::note +Make sure you have [pnpm](https://pnpm.io/) installed. +::: + +```bash +pnpm install && pnpm dev +``` + +It is recommended to run `pnpm install` before starting your app with `pnpm dev` to ensure that all dependencies are installed (including the ones that the API client has, see `vue/postinstall.js`). + +Open your browser and navigate to +[http://localhost:5173/](http://localhost:5173/). + +![Web app](/img/web-1.png) + +Press "Connect wallet", enter your password into Keplr and press "Approve" to +add your blockchain to Keplr. + +<img src="/img/web-4.png" width="300"/> + +Make sure to select the account you're using for development purposes and the +"Example Network" in Keplr's blockchain dropdown. You should see a list of +assets in your Vue app. + +![Web app](/img/web-5.png) + +Congratulations! You have successfully created a client-side Vue application and +connected it to your blockchain. You can modify the source code of your Vue +application to build out the rest of your project. + +## Setting the address prefix + +It is necessary to set the correct address prefix in order for the Vue app to +properly interact with a Cosmos chain. The address prefix is used to identify +the chain that the app is connected to, and must match the prefix used by the +chain. + +By default, Ignite creates a chain with the `cosmos` prefix. If you have +created your chain with `ignite scaffold chain ... --address-prefix foo` or +manually changed the prefix in the source code of the chain, you need to set the +prefix in the Vue app. + +There are two ways to set the address prefix in a Vue app. + +### Using an environment variable + +You can set the `VITE_ADDRESS_PREFIX` environment variable to the correct +address prefix for your chain. This will override the default prefix used by the +app. + +To set the `VITE_ADDRESS_PREFIX` environment variable, you can use the following +command: + +```bash +export VITE_ADDRESS_PREFIX=your-prefix +``` + +Replace `your-prefix` with the actual address prefix for your chain. + +### Setting address prefix in the code + +Alternatively, you can manually set the correct address prefix by replacing the +fallback value of the `prefix` variable in the file `./vue/src/env.ts`. + +To do this, open the file `./vue/src/env.ts` and find the following line: + +```ts title="./vue/src/env.ts" +const prefix = process.env.VITE_ADDRESS_PREFIX || 'your-prefix'; +``` + +Replace `your-prefix` with the actual address prefix for your chain. + +Save the file and restart the Vue app to apply the changes. diff --git a/docs/versioned_docs/version-v28/03-clients/04-react.md b/docs/versioned_docs/version-v28/03-clients/04-react.md new file mode 100644 index 0000000..9d688d9 --- /dev/null +++ b/docs/versioned_docs/version-v28/03-clients/04-react.md @@ -0,0 +1,130 @@ +# React frontend + +Welcome to this tutorial on using Ignite to develop a web application for your +blockchain with React. Ignite is a tool that simplifies the process of building +a blockchain application by providing a set of templates and generators that can +be used to get up and running quickly. + +One of the features of Ignite is its support for [React](https://reactjs.org/), a +popular JavaScript framework for building user interfaces. In this tutorial, you +will learn how to use Ignite to create a new blockchain and scaffold a React +frontend template. This will give you a basic foundation for your web +application and make it easier to get started building out the rest of your +application. + +Once you have your blockchain and React template set up, the next step is to +generate an API client. This will allow you to easily interact with your +blockchain from your web application, enabling you to retrieve data and make +transactions. By the end of this tutorial, you will have a fully functional web +application that is connected to your own blockchain. + +Prerequisites: + +* [Node.js](https://nodejs.org/en/) +* [Keplr](https://www.keplr.app/) Chrome extension + +## Create a blockchain and a React app + +Create a new blockchain project: + +``` +ignite scaffold chain example +``` + +To create a React frontend template, go to the `example` directory and run the +following command: + +``` +ignite scaffold react +``` + +This will create a new React project in the `react` directory. This project can be +used with any blockchain, but it depends on an API client to interact with the +blockchain. To generate an API client, run the following command in the +`example` directory: + +``` +ignite generate hooks +``` + +This command generates two directories: + +* `ts-client`: a framework-agnostic TypeScript client that can be used to + interact with your blockchain. You can learn more about how to use this client + in the [TypeScript client tutorial](/clients/typescript). +* `react/src/hooks`: a collection of + [React Hooks](https://reactjs.org/docs/hooks-intro.html) that wrap + the TypeScript client and make it easier to interact with your blockchain from + your React application. + +## Set up Keplr and an account + +Open your browser with the Keplr wallet extension installed. Follow [the +instructions](https://keplr.crunch.help/en/getting-started/creating-a-new-keplr-account) +to create a new account or use an existing one. Make sure to save the mnemonic +phrase as you will need it in the next step. + +Do not use a mnemonic phrase that is associated with an account that holds +assets you care about. If you do, you risk losing those assets. It's a good +practice to create a new account for development purposes. + +Add the account you're using in Keplr to your blockchain's `config.yml` file: + +```yml +accounts: + - name: alice + coins: [20000token, 200000000stake] + - name: bob + coins: [10000token, 100000000stake] + # highlight-start + - name: frank + coins: [10000token, 100000000stake] + mnemonic: struggle since inmate safe logic kite tag web win stay security wonder + # highlight-end +``` + +Replace the `struggle since...` mnemonic with the one you saved in the previous +step. + +Adding an account with a mnemonic to the config file will tell Ignite CLI to add +the account to the blockchain when you start it. This is useful for development +purposes, but you should not do this in production. + +## Start a blockchain and a React app + +In the `example` directory run the following command to start your blockchain: + +``` +ignite chain serve +``` + +To start your React application, go to the `react` directory and run the following +command in a separate terminal window: + +``` +npm install && npm run dev +``` + +It is recommended to run `npm install` before starting your app with `npm run +dev` to ensure that all dependencies are installed (including the ones that the +API client has, see `react/postinstall.js`). + +Open your browser and navigate to +[http://localhost:5173/](http://localhost:5173/). + +![Web app](/img/web-1.png) + +Press "Connect wallet", enter your password into Keplr and press "Approve" to +add your blockchain to Keplr. + +<img src="/img/web-4.png" width="300"/> + +Make sure to select the account you're using for development purposes and the +"Example Network" in Keplr's blockchain dropdown. You should see a list of +assets in your React app. + +![Web app](/img/web-5.png) + +Congratulations! You have successfully created a client-side React application and +connected it to your blockchain. You can modify the source code of your React +application to build out the rest of your project. \ No newline at end of file diff --git a/docs/versioned_docs/version-v28/03-clients/_category_.json b/docs/versioned_docs/version-v28/03-clients/_category_.json new file mode 100644 index 0000000..036cfbe --- /dev/null +++ b/docs/versioned_docs/version-v28/03-clients/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Develop a client app", + "link": null +} \ No newline at end of file diff --git a/docs/versioned_docs/version-v28/04-network/01-chain.md b/docs/versioned_docs/version-v28/04-network/01-chain.md new file mode 100644 index 0000000..9cec1e9 --- /dev/null +++ b/docs/versioned_docs/version-v28/04-network/01-chain.md @@ -0,0 +1,237 @@ +--- +sidebar_position: 1 +description: Ignite Chain. +--- + +# Ignite Chain + +## Introduction + +_Ignite is a blockchain to help launch Cosmos SDK-based blockchains._ + +Using Cosmos SDK and Ignite CLI, developers can quickly create a crypto application that is decentralized, economical for usage, and scalable. The Cosmos SDK framework allows developers to create sovereign application-specific blockchains that become part of the wider [Cosmos ecosystem](https://cosmos.network/ecosystem/apps). Blockchains created with Cosmos SDK use a Proof-of-Stake (PoS) consensus protocol that requires validators to secure the chain. + +Even though tools like Ignite CLI simplify the development of a Cosmos SDK blockchain, launching a new chain is a highly complex process. One of the major challenges of developing and launching your own sovereign blockchain is ensuring the security of the underlying consensus. Since Cosmos SDK chains are based on the PoS consensus, each blockchain requires initial coin allocations and validators before they can be launched, which presents developers with significant challenges, such as determining their chain's tokenomics or coordinating a robust validator set. + +The initial coin allocations and validators are described in a JSON-formatted genesis file that is shared among all initial nodes in the network. This genesis file defines the initial state of the application. Based on PoS, secure chains require the initial allocation of coins to be well distributed so that no single validator holds more than 1/3 of all tokens and receives a disproportionate amount of voting power. + +Along with ensuring the security of the underlying consensus, another highly difficult task in launching a new blockchain is attracting a diverse set of validators for the genesis file. Many promising projects fail to capture the attention of a sufficient number of trustworthy validators to secure their chains due to a lack of resources or experience. + +The Ignite Chain has, therefore, been conceived to facilitate the launch of Cosmos SDK blockchains by helping developers to navigate the complexities of launching a blockchain and coordinate the genesis of a new chain. Using the decentralized nature of blockchain, Ignite's coordination features help blockchain builders connect with validators and investors, speeding up the time to market of their projects and chances of success. + +Commands to interact with Ignite Chain are integrated into Ignite CLI and allow launching chains from it. Integration with Ignite Chain allows the CLI to support the developer in the entire lifecycle of realizing a Cosmos project, from the development and experimentation of the blockchain to the launch of its mainnet. + +## What is Ignite Chain + +Ignite Chain is a secure platform that simplifies the launch of Cosmos SDK-based chains, lending vital resources and support at the coordination, preparation, and launch stages. Ignite provides the tools that blockchain projects need to overcome the complexities of launching their chain, from validator coordination and token issuance to fundraising and community building. + +Ignite facilitates the launch of new chains with an overall launch process during three phases: + +- Coordination +- Preparation +- Launch + +To reduce friction at each phase, Ignite provides an immutable and universal database for validator coordination. + +In the future, Ignite will also offer: + +- Token issuance: Ignite allows the issuance of tokens (called vouchers) that represent a share + allocation of a future mainnet network +- A fundraising platform for selling vouchers +- A permissionless framework to reward validator activities on a launched testnet network + +## Validator coordination + +To launch a chain in the Cosmos ecosystem, the validators must start nodes that connect to each other to create the new blockchain network. A node must be started from a file called the genesis file. The genesis file must be identical on all validator nodes before the new chain can be started. + +![genesis](./assets/genesis.png) + +The JSON-formatted genesis file contains information on the initial state of the chain, including coin allocations, the list of validators, various parameters for the chain like the maximum number of validators actively signing blocks, and the specific launch time. Because each validator has the same genesis file, the blockchain network starts automatically when the genesis time is reached. + +![launch](./assets/launch.png) + +### Ignite as a coordination source of truth + +Ignite Chain acts as a source of truth for new chains to coordinate a validator set and for validators to generate the genesis for a chain launch. The blockchain doesn’t directly store the final genesis file in its own ledger but rather stores information that allows generating the genesis file in a deterministic manner. + +The information stored on Ignite that supports deterministic generation of the genesis file for a specific chain launch is referred to as the _launch information_. When creating a new chain on Ignite, the coordinator provides the initial launch information. Then, through on-chain coordination, this launch information is updated by interacting with the blockchain by sending messages. When the chain is ready to be launched, the genesis file is generated by calling a genesis generation algorithm that uses the launch information. + +**GenesisGenerate(LaunchInformation) => genesis.json** + +The genesis generation algorithm is officially and formally specified. The official implementation of the genesis generation algorithm is developed in Go using Ignite CLI. However, any project is free to develop its own implementation of the algorithm as long as it complies with the specification of the algorithm. + +The genesis generation algorithm is not part of the on-chain protocol. In order to successfully launch a new chain, all validators must use the algorithm to generate their genesis using the launch information. The algorithm deterministically generates the genesis from the launch information that is stored on the Ignite chain. + +If any element of the launch information is censored, for example, removing an account balance, the launched chain reputation is negatively impacted and implies that the majority of validators agree on not using: + +- The tamper-proof launch information +- The official genesis generation algorithm + +Outside of the genesis generation, the genesis generation algorithm specification gives guidance on how to set up your network configuration. For example, the launch information can contain the addresses of the persistent peers of the blockchain network. + +![generation](./assets/generation.png) + +## Launch information + +Launch information can be created or updated in three different ways: + +1. Defined during chain creation but updatable by the coordinator after creation +2. Determined through coordination +3. Determined through specific on-chain logic not related to coordination + +### 1 - Launch information determined during chain creation: + +- `GenesisChainID`: The identifier for the network +- `SourceURL`: The URL of the git repository of the source code for building the blockchain + node binary +- `SourceHash`: The specific hash that identifies the release of the source code +- `InitialGenesis`: A multiformat structure that specifies the initial genesis for the chain + launch before running the genesis generation algorithm + +### 2 - Launch information determined through coordination: + +- `GenesisAccounts`: A list of genesis accounts for the chain, comprised of addresses with associated balances +- `VestingAccounts`: A list of genesis accounts with vesting options +- `GenesisValidators`: A list of the initial validators at chain launch +- `ParamChanges`: A list of module param changes in the genesis state + +### 3 - Launch information determined through on-chain logic: + +- `GenesisTime`: The timestamp for the network start, also referred to as LaunchTime + +### Initial genesis + +The launch information contains the initial genesis structure. This structure provides the information for generating the initial genesis before running the genesis generation algorithm and finalizing the genesis file. + +The initial genesis structure can be: + +- `DefaultGenesis`: the default genesis file is generated by the chain binary init command +- `GenesisURL`: the initial genesis for a chain launch is an existing genesis file that is + fetched from a URL and then modified with the required algorithm - this initial genesis type should be used when the initial genesis state is extensive, + containing a lot of accounts for token distribution, containing records for an + airdrop +- `GenesisConfig`: the initial genesis for a chain launch is generated from an Ignite CLI + config that contains genesis accounts and module parameters - this initial genesis type should be used when the coordinator doesn’t have extensive state for the initial genesis but some module parameters must be customized. For example, the staking bond denom for the staking token + +## Coordination process + +The coordination process starts immediately after the chain is created and ends when the coordinator triggers the launch of the chain. + +The launch information is updated during the coordination process. + +During the coordination process, any entity can send requests to the network. A request is an object whose content specifies updates to the launch information. + +The chain coordinator approves or rejects the requests: + +- If a request is approved, the content is applied to the launch information +- If the request is rejected, no change is made to the launch information + +The request creator can also directly reject or cancel the request. + +Each chain contains a request pool that contains all requests. Each request has a status: + +- _PENDING_: Waiting for the approval of the coordinator +- _APPROVED_: Approved by the coordinator, its content has been applied to the launch + information +- _REJECTED_: Rejected by the coordinator or the request creator + +Approving or rejecting a request is irreversible. The only possible status transitions are: + +- _PENDING_ to _APPROVED_ +- _PENDING_ to _REJECTED_ + +To revert the effect on launch information from a request, a user must send the eventual opposite request (example: AddAccount → RemoveAccount). + +Since the coordinator is the sole approver for requests, each request created by the coordinator is immediately set to APPROVED and its content is applied to the launch information. + +![requests](./assets/requests.png) + +## Available requests + +Six types of requests can be sent to the Ignite chain: + +- `AddGenesisAccount` +- `AddVestingAccount` +- `AddGenesisValidator` +- `RemoveAccount` +- `RemoveValidator` +- `ChangeParam` + +**`AddGenesisAccount`** requests a new account for the chain genesis with a coin balance. This request content is composed of two fields: + +- Account address, must be unique in launch information +- Account balance + +The request automatically fails to be applied if a genesis account or a vesting account with an identical address is already specified in the launch information. + +**`AddVestingAccount`** requests a new account for the chain genesis with a coin balance and vesting options. This request content is composed of two fields: + +- Address of the account +- Vesting options of the account + +The currently supported vesting option is delayed vesting where the total balance of the account is specified and a number of tokens of the total balance of the account are vested only after an end time is reached. + +The request automatically fails to be applied if a genesis account or a vesting account with an identical address is already specified in the launch information. + +**`AddGenesisValidator`** requests a new genesis validator for the chain. A genesis validator in a Cosmos SDK blockchain represents an account with an existing balance in the genesis that self-delegates part of its balance during genesis initialization to become a bonded validator when the network starts. In most cases, the validator must first request an account with `AddGenesisAccount` before requesting to be a validator, unless they already have an account with a balance in the initial genesis of the chain. + +Self-delegation during genesis initialization is performed with a [Cosmos SDK module named genutils](https://pkg.go.dev/github.com/cosmos/cosmos-sdk/x/genutil). In the genesis, the _genutils_ module contains objects called gentx that represent transactions that were executed before the network launch. To be a validator when the network starts, a future validator must provide a gentx that contains the transaction for the self-delegation from their account. + +The request content is composed of five fields: + +- The gentx for the validator self-delegation +- The address of the validator +- The consensus public key of the validator node +- The self-delegation +- The peer information for the validator node + +The request automatically fails to be applied if a validator with the same address already exists in the launch information. + +**`RemoveAccount`** requests the removal of a genesis or vesting account from the launch information. The request content contains the address of the account to be removed. The request automatically fails to be applied if no genesis or vesting account with the specified address exists in the launch information. + +**`RemoveValidator`** requests the removal of a genesis validator from the launch information. The request content contains the address of the validator to be removed. The request automatically fails to be applied if no validator account with the specified address exists in the launch information. + +**`ChangeParam`** requests the modification of a module parameter in the genesis. Modules in a Cosmos SDK blockchain can have parameters that will configure the logic of the blockchain. The parameters can be changed through governance once the blockchain network is live. During the launch process, the initial parameters of the chain are set in the genesis. + +This request content is composed of three fields: + +- The name of the module +- The name of the parameter +- The value of the parameter represented as generic data + +### Request validity + +Some checks are verified on-chain when applying a request. For example, a genesis account can’t be added twice. However, some other validity properties can’t be checked on-chain. For example, because a gentx is represented through a generic byte array in the blockchain, an on-chain check is not possible to verify that the gentx is correctly signed or that the provided consensus public key that is stored on-chain corresponds to the consensus public key in the gentx. This gentx verification is the responsibility of the client interacting with the blockchain to ensure the requests have a valid format and allow for the start of the chain. Some validity checks are specified in the genesis generation algorithm. + +## Launch process + +The overall launch process of a chain through Ignite is composed of three phases: + +- Coordination phase +- Preparation phase +- Launch phase + +After the coordinator creates the chain on Ignite and provides the initial launch information, the launch process enters the coordination phase where users can send requests for the chain genesis. After the coordinator deems the chain as ready to be launched, they trigger the launch of the chain. During this operation, the coordinator provides the launch time, or genesis, time for the chain. + +Once the launch is triggered and before the launch time is reached, the chain launch process enters the preparation phase. During the preparation phase, requests can no longer be sent and the launch information of the chain is finalized. The validators run the genesis generation algorithm to get the final genesis of the chain and prepare their node. The remaining time must provide enough time for the validators to prepare their nodes. This launch time is set by the coordinator, although a specific range for the remaining time is imposed. + +Once the launch time is reached, the chain network is started and the chain launch process enters the launch phase. At this point, since the chain is live, no further action is required from the coordinator. However, under some circumstances, the chain might have failed to start. For example, a chain does not start if every validator in the genesis does not start their node. + +The coordinator has the ability to revert the chain launch. Reverting the chain launch sets the launch process back to the coordination phase where requests can be sent again to allow addressing the issue related to the launch failure. Reverting the launch has an effect only on Ignite. If the new chain is effectively launched, reverting the launch on Ignite has no effect on the chain liveness. Reverting the launch of the chain can be performed only by the coordinator after the launch time plus a delay called the revert delay. + +![process](./assets/process.png) + +## Genesis generation + +To ensure determinism, genesis generation rules must be rigorously specified depending on the launch information of the chain. + +The general steps for the genesis generation are: + +- Building the blockchain node binary from source +- Generating the initial genesis +- Setting the chain ID +- Setting the genesis time +- Adding genesis accounts +- Adding genesis accounts with vesting options +- Adding gentxs for genesis validators +- Changing module params from param changes diff --git a/docs/versioned_docs/version-v28/04-network/02-introduction.md b/docs/versioned_docs/version-v28/04-network/02-introduction.md new file mode 100644 index 0000000..dc711fe --- /dev/null +++ b/docs/versioned_docs/version-v28/04-network/02-introduction.md @@ -0,0 +1,75 @@ +--- +sidebar_position: 2 +description: Introduction to Ignite Network commands. +--- + +# Ignite Network commands + +The `ignite network` commands allow to coordinate the launch of sovereign Cosmos blockchains by interacting with the +Ignite Chain. + +To launch a Cosmos blockchain you need someone to be a coordinator and others to be validators. These are just roles, +anyone can be a coordinator or a validator. + +- A coordinator publishes information about a chain to be launched on the Ignite blockchain, approves validator requests + and coordinates the launch. +- Validators send requests to join a chain and start their nodes when a blockchain is ready for launch. + +## Launching a chain on Ignite + +Launching with the CLI can be as simple as a few short commands with the CLI using `ignite network` command +namespace. + +> **NOTE:** `ignite n` can also be used as a shortcut for `ignite network`. + +To publish the information about your chain as a coordinator, run the following command (the URL should point to a +repository with a Cosmos SDK chain): + +``` +ignite network chain publish github.com/ignite/example +``` + +This command will return the launch identifier you will be using in the following +commands. Let's say this identifier is 42. +Next, ask validators to initialize their nodes and request to join the network. +For a testnet you can use the default values suggested by the +CLI. + +``` +ignite network chain init 42 +ignite network chain join 42 --amount 95000000stake +``` + +As a coordinator, list all validator requests: + +``` +ignite network request list 42 +``` + +Approve validator requests: + +``` +ignite network request approve 42 1,2 +``` + +Once you've approved all validators you need in the validator set, announce that +the chain is ready for launch: + +``` +ignite network chain launch 42 +``` + +Validators can now prepare their nodes for launch: + +``` +ignite network chain prepare 42 +``` + +The output of this command will show a command that a validator would use to +launch their node, for example `exampled --home ~/.example`. After enough +validators launch their nodes, a blockchain will be live. + +--- + +The next two sections provide more information on the process of coordinating a chain launch from a coordinator and +participating in a chain launch as a validator. diff --git a/docs/versioned_docs/version-v28/04-network/03-coordinator.md b/docs/versioned_docs/version-v28/04-network/03-coordinator.md new file mode 100644 index 0000000..76d5997 --- /dev/null +++ b/docs/versioned_docs/version-v28/04-network/03-coordinator.md @@ -0,0 +1,146 @@ +--- +sidebar_position: 3 +description: Ignite Network commands for coordinators. +--- + +# Coordinator Guide + +Coordinators organize and launch new chains on Ignite Chain. + +--- + +## Publish a chain + +The first step in the process of a chain launch is for the coordinator to publish the intention of launching a chain. +The `publish` command publishes the intention of launching a chain on Ignite from a project git repository. + +```shell +ignite n chain publish https://github.com/ignite/example +``` + +**Output** + +``` +✔ Source code fetched +✔ Blockchain set up +✔ Chain's binary built +✔ Blockchain initialized +✔ Genesis initialized +✔ Network published +⋆ Launch ID: 3 +``` + +`LaunchID` identifies the published blockchain on Ignite blockchain. + +### Specify a initial genesis + +During coordination, new genesis accounts and genesis validators are added into the chain genesis. +The initial genesis where these accounts are added is by default the default genesis generated by the chain binary. + +The coordinator can specify a custom initial genesis for the chain launch with the `--genesis` flag. This custom initial +genesis can contain additional default genesis accounts and custom params for the chain modules. + +A URL must be provided for the `--genesis-url` flag. This can either directly point to a JSON genesis file or a tarball +containing a genesis file. + +```shell +ignite n chain publish https://github.com/ignite/example --genesis-url https://raw.githubusercontent.com/ignite/example/master/genesis/gen.json +``` + +## Approve validator requests + +When coordinating for a chain launch, validators send requests. These represent requests to be part of the genesis as a +validator for the chain. + +The coordinator can list these requests: + +``` +ignite n request list 3 +``` + +> **NOTE:** here "3" is specifying the `LaunchID`. + +**Output** + +``` +Id Status Type Content +1 APPROVED Add Genesis Account spn1daefnhnupn85e8vv0yc5epmnkcr5epkqncn2le, 100000000stake +2 APPROVED Add Genesis Validator e3d3ca59d8214206839985712282967aaeddfb01@84.118.211.157:26656, spn1daefnhnupn85e8vv0yc5epmnkcr5epkqncn2le, 95000000stake +3 PENDING Add Genesis Account spn1daefnhnupn85e8vv0yc5epmnkcr5epkqncn2le, 95000000stake +4 PENDING Add Genesis Validator b10f3857133907a14dca5541a14df9e8e3389875@84.118.211.157:26656, spn1daefnhnupn85e8vv0yc5epmnkcr5epkqncn2le, 95000000stake +``` + +The coordinator can either approve or reject these requests. + +To approve the requests: + +``` +ignite n request approve 3 3,4 +``` + +> **NOTE:** when selecting a list of requests, both syntaxes can be used: `1,2,3,4` and `1-3,4`. + +**Output** + +``` +✔ Source code fetched +✔ Blockchain set up +✔ Requests format verified +✔ Blockchain initialized +✔ Genesis initialized +✔ Genesis built +✔ The network can be started +✔ Request(s) #3, #4 verified +✔ Request(s) #3, #4 approved +``` + +Ignite CLI automatically verifies that the requests can be applied for the genesis, the approved requests don't generate +an invalid genesis. + +To reject the requests: + +``` +ignite n request reject 3 3,4 +``` + +**Output** + +``` +✔ Request(s) #3, #4 rejected +``` + +--- + +## Initiate the launch of a chain + +When enough validators are approved for the genesis and the coordinator deems the chain ready to be launched, the +coordinator can initiate the launch of the chain. + +This action will finalize the genesis of chain, meaning that no new requests can be approved for the chain. + +This action also sets the launch time (or genesis time) for the chain, the time when the blockchain network will go +live. + +``` +ignite n chain launch 3 +``` + +**Output** + +``` +✔ Chain 3 will be launched on 2022-10-01 09:00:00.000000 +0200 CEST +``` + +This example output shows the launch time of the chain on the network. + +### Set a custom launch time + +By default, the launch time will be set to the earliest date possible. In practice, the validators should have time to +prepare their node for the network launch. If a validator fails to be online, they can get jailed for inactivity in the +validator set. + +The coordinator can specify a custom time with the `--launch-time` flag. + +``` +ignite n chain launch --launch-time 2022-01-01T00:00:00Z +``` diff --git a/docs/versioned_docs/version-v28/04-network/04-validator.md b/docs/versioned_docs/version-v28/04-network/04-validator.md new file mode 100644 index 0000000..d1e6b33 --- /dev/null +++ b/docs/versioned_docs/version-v28/04-network/04-validator.md @@ -0,0 +1,161 @@ +--- +sidebar_position: 4 +description: Ignite Network commands for validators. +--- + +# Validator Guide + +Validators join as genesis validators for chain launches on Ignite Chain. + +--- + +## List all published chains + +Validators can list and explore published chains to be launched on Ignite. + +``` +ignite n chain list +``` + +**Output** + +``` +Launch Id Chain Id Source Phase + +3 example-1 https://github.com/ignite/example coordinating +2 spn-10 https://github.com/tendermint/spn launched +1 example-20 https://github.com/tendermint/spn launching +``` + +- `Launch ID` is the unique identifier of the chain on Ignite. This is the ID used to interact with the chain launch. +- `Chain ID` represents the identifier of the chain network once it will be launched. It should be a unique identifier in + practice but doesn't need to be unique on Ignite. +- `Source` is the repository URL of the project. +- `Phase` is the current phase of the chain launch. A chain can have 3 different phases: + - `coordinating`: means the chain is open to receive requests from validators + - `launching`: means the chain no longer receives requests but it hasn't been launched yet + - `launched`: means the chain network has been launched + +--- + +## Request network participation + +When the chain is in the coordination phase, validators can request to be a genesis validator for the chain. +Ignite CLI supports an automatic workflow that can setup a node for the validator and a workflow for advanced users with +a specific setup for their node. + +### Simple Flow + +`ignite` can handle validator setup automatically. Initialize the node and generate a gentx file with default values: + +``` +ignite n chain init 3 +``` + +**Output** + +``` +✔ Source code fetched +✔ Blockchain set up +✔ Blockchain initialized +✔ Genesis initialized +? Staking amount 95000000stake +? Commission rate 0.10 +? Commission max rate 0.20 +? Commission max change rate 0.01 +⋆ Gentx generated: /Users/lucas/spn/3/config/gentx/gentx.json +``` + +Now, create and broadcast a request to join a chain as a validator: + +``` +ignite n chain join 3 --amount 100000000stake +``` + +The join command accepts a `--amount` flag with a comma-separated list of tokens. If the flag is provided, the +command will broadcast a request to add the validator’s address as an account to the genesis with the specific amount. + +**Output** + +``` +? Peer's address 192.168.0.1:26656 +✔ Source code fetched +✔ Blockchain set up +✔ Account added to the network by the coordinator! +✔ Validator added to the network by the coordinator! +``` + +--- + +### Advanced Flow + +Using a more advanced setup (e.g. custom `gentx`), validators must provide an additional flag to their command +to point to the custom file: + +``` +ignite n chain join 3 --amount 100000000stake --gentx ~/chain/config/gentx/gentx.json +``` + +--- + +## Launch the network + +### Simple Flow + +Generate the final genesis and config of the node: + +``` +ignite n chain prepare 3 +``` + +**Output** + +``` +✔ Source code fetched +✔ Blockchain set up +✔ Chain's binary built +✔ Genesis initialized +✔ Genesis built +✔ Chain is prepared for launch +``` + +Next, start the node: + +``` +exampled start --home ~/spn/3 +``` + +--- + +### Advanced Flow + +Fetch the final genesis for the chain: + +``` +ignite n chain show genesis 3 +``` + +**Output** + +``` +✔ Source code fetched +✔ Blockchain set up +✔ Blockchain initialized +✔ Genesis initialized +✔ Genesis built +⋆ Genesis generated: ./genesis.json +``` + +Next, fetch the persistent peer list: + +``` +ignite n chain show peers 3 +``` + +**Output** + +``` +⋆ Peer list generated: ./peers.txt +``` + +The fetched genesis file and peer list can be used for a manual node setup. diff --git a/docs/versioned_docs/version-v28/04-network/05-coordination.md b/docs/versioned_docs/version-v28/04-network/05-coordination.md new file mode 100644 index 0000000..eb26b71 --- /dev/null +++ b/docs/versioned_docs/version-v28/04-network/05-coordination.md @@ -0,0 +1,72 @@ +--- +sidebar_position: 5 +description: Other commands for coordination. +--- + +# Other commands for coordination + +Ignite CLI offers various other commands to coordinate chain launches that can be used by coordinators, validators, or other participants. + +The requests follow the same logic as the request for validator participation; they must be approved by the chain coordinator to be effective in the genesis. + +--- + +## Request a genesis account + +Any participant can request a genesis account with an associated balance for the chain. +The participant must provide an address with a comma-separated list of token balances. + +Any prefix can be used for the Bech32 address, it is automatically converted into `spn` on the Ignite Chain. + +``` +ignite n request add-account 3 spn1pe5h2gelhu8aukmrnj0clmec56aspxzuxcy99y 1000stake +``` + +**Output** + +``` +Source code fetched +Blockchain set up +⋆ Request 10 to add account to the network has been submitted! +``` +--- + +## Request to remove a genesis account + +Any participant can request to remove a genesis account from the chain genesis. +It might be the case if, for example, a user suggests an account balance that is so high it could harm the network. +The participant must provide the address of the account. + +Any prefix can be used for the Bech32 address, it is automatically converted into `spn` on the Ignite Chain. + +``` +ignite n request remove-account 3 spn1pe5h2gelhu8aukmrnj0clmec56aspxzuxcy99y +``` + +**Output** + +``` +Request 11 to remove account from the network has been submitted! +``` +--- + +## Request to remove a genesis validator + +Any participant can request to remove a genesis validator (gentx) from the chain genesis. +It might be the case if, for example, a chain failed to launch because of some validators, and they must be removed from genesis. +The participant must provide the address of the validator account (same format as genesis account). + +Any prefix can be used for the Bech32 address, it is automatically converted into `spn` on the Ignite Chain. + +The request removes only the gentx from the genesis but not the associated account balance. + +``` +ignite n request remove-validator 429 spn1pe5h2gelhu8aukmrnj0clmec56aspxzuxcy99y +``` + +**Output** + +``` +Request 12 to remove validator from the network has been submitted! +``` +--- \ No newline at end of file diff --git a/docs/versioned_docs/version-v28/04-network/_category_.json b/docs/versioned_docs/version-v28/04-network/_category_.json new file mode 100644 index 0000000..c45c6eb --- /dev/null +++ b/docs/versioned_docs/version-v28/04-network/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Launch a chain", + "link": null +} \ No newline at end of file diff --git a/docs/versioned_docs/version-v28/04-network/assets/generation.png b/docs/versioned_docs/version-v28/04-network/assets/generation.png new file mode 100644 index 0000000..c10cac2 Binary files /dev/null and b/docs/versioned_docs/version-v28/04-network/assets/generation.png differ diff --git a/docs/versioned_docs/version-v28/04-network/assets/genesis.png b/docs/versioned_docs/version-v28/04-network/assets/genesis.png new file mode 100644 index 0000000..b640db4 Binary files /dev/null and b/docs/versioned_docs/version-v28/04-network/assets/genesis.png differ diff --git a/docs/versioned_docs/version-v28/04-network/assets/launch.png b/docs/versioned_docs/version-v28/04-network/assets/launch.png new file mode 100644 index 0000000..4ea50b2 Binary files /dev/null and b/docs/versioned_docs/version-v28/04-network/assets/launch.png differ diff --git a/docs/versioned_docs/version-v28/04-network/assets/process.png b/docs/versioned_docs/version-v28/04-network/assets/process.png new file mode 100644 index 0000000..a5c21d2 Binary files /dev/null and b/docs/versioned_docs/version-v28/04-network/assets/process.png differ diff --git a/docs/versioned_docs/version-v28/04-network/assets/requests.png b/docs/versioned_docs/version-v28/04-network/assets/requests.png new file mode 100644 index 0000000..d097e68 Binary files /dev/null and b/docs/versioned_docs/version-v28/04-network/assets/requests.png differ diff --git a/docs/versioned_docs/version-v28/05-contributing/01-docs.md b/docs/versioned_docs/version-v28/05-contributing/01-docs.md new file mode 100644 index 0000000..4c05ed2 --- /dev/null +++ b/docs/versioned_docs/version-v28/05-contributing/01-docs.md @@ -0,0 +1,105 @@ +--- +sidebar_position: 1 +slug: /contributing +--- + +# Improving documentation + +Thank you for visiting our repository and considering making contributions. We +appreciate your interest in helping us to create and maintain awesome tutorials +and documentation. + +## Using this repo + +Review existing [Ignite CLI issues](https://github.com/ignite/cli/issues) to see +if your question has already been asked and answered. + +- To provide feedback, file an issue and provide generous details to help us + understand how we can make it better. +- To provide a fix, make a direct contribution. If you're not a member or + maintainer, fork the repo and then submit a pull request (PR) from your forked + repo to the `main` branch. +- Start by creating a draft pull request. Create your draft PR early, even if + your work is just beginning or incomplete. Your draft PR indicates to the + community that you're working on something and provides a space for + conversations early in the development process. Merging is blocked for `Draft` + PRs, so they provide a safe place to experiment and invite comments. + +## Reviewing technical content PRs + +Some of the best content contributions come during the PR review cycles. Follow +best practices for technical content PR reviews just like you do for code +reviews. + +- For in-line suggestions, use the [GitHub suggesting + feature](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/reviewing-changes-in-pull-requests/commenting-on-a-pull-request) + . +- The PR owner can merge in your suggested commits one at a time or in batch + (preferred). +- When you are providing a more granular extensive review that results in more + than 20 in-line suggestions, go ahead and check out the branch and make the + changes yourself. + +## Writing and contributing + +We welcome contributions to the docs and tutorials. + +Our technical content follows the [Google developer documentation style +guide](https://developers.google.com/style). Highlights to help you get started: + +- [Highlights](https://developers.google.com/style/highlights) +- [Word list](https://developers.google.com/style/word-list) +- [Style and tone](https://developers.google.com/style/tone) +- [Writing for a global + audience](https://developers.google.com/style/translation) +- [Cross-references](https://developers.google.com/style/cross-references) +- [Present tense](https://developers.google.com/style/tense) + +The Google guidelines include more material than is listed here and are used as +a guide that enables easy decision-making about proposed content changes. + +Other useful resources: + +- [Google Technical Writing Courses](https://developers.google.com/tech-writing) +- [GitHub Guides Mastering + Markdown](https://docs.github.com/en/get-started/writing-on-github/getting-started-with-writing-and-formatting-on-github/basic-writing-and-formatting-syntax) + +## Where can I find the tutorials and docs? + +Technical content includes knowledge base articles and interactive tutorials. + +- The Ignite CLI Developer Tutorials content is in the `docs/guide` folder. +- The Knowledge Base content is in the `docs/kb` folder. +- Upgrade information is in the `docs/migration` folder. + +Note: The CLI docs are auto-generated and do not support doc updates. + +Locations and folders for other content can vary. Explore the self-describing +folders for the content that you are interested in. Some articles and tutorials +reside in a single Markdown file while sub-folders might be present for other +tutorials. + +As always, work-in-progress content might be happening in other locations and +repos. + +## Who works on the tutorials? + +The Ignite product team developers are focused on building Ignite CLI and +improving the developer experience. The Ignite Ecosystem Development team owns +the technical content and tutorials and manages developer onboarding. + +Meet the [people behind Ignite CLI and our +contributors](https://github.com/ignite/cli/graphs/contributors). + +## Viewing docs builds + +Use a preview to see what your changes will look like in production before the +updated pages are published. + +- While a PR is in draft mode, you can rely on using the preview feature in + Markdown. +- After the PR moves from **Draft** to **Ready for review**, the CI status + checks generate a deployment preview. This preview stays up to date as you + continue to work and commit new changes to the same branch. A `Docs Deploy + Preview / build_and_deploy (pull_request)` preview on a GitHub actions URL is + unique for that PR. diff --git a/docs/versioned_docs/version-v28/05-contributing/_category_.json b/docs/versioned_docs/version-v28/05-contributing/_category_.json new file mode 100644 index 0000000..094b1f3 --- /dev/null +++ b/docs/versioned_docs/version-v28/05-contributing/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Contribute to Ignite", + "link": null +} \ No newline at end of file diff --git a/docs/versioned_docs/version-v28/06-migration/_category_.json b/docs/versioned_docs/version-v28/06-migration/_category_.json new file mode 100644 index 0000000..9460d57 --- /dev/null +++ b/docs/versioned_docs/version-v28/06-migration/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Migration", + "link": null +} \ No newline at end of file diff --git a/docs/versioned_docs/version-v28/06-migration/readme.md b/docs/versioned_docs/version-v28/06-migration/readme.md new file mode 100644 index 0000000..978e90e --- /dev/null +++ b/docs/versioned_docs/version-v28/06-migration/readme.md @@ -0,0 +1,14 @@ +--- +sidebar_position: 0 +--- + +# Migration Guides + +Welcome to the section on upgrading to a newer version of Ignite CLI! If you're +looking to update to the latest version, you'll want to start by checking the +documentation to see if there are any special considerations or instructions you +need to follow. + +If there is no documentation for the latest version of Ignite CLI, it's +generally safe to assume that there were no breaking changes, and you can +proceed with using the latest version with your project. \ No newline at end of file diff --git a/docs/versioned_docs/version-v28/06-migration/v0.18.md b/docs/versioned_docs/version-v28/06-migration/v0.18.md new file mode 100644 index 0000000..c516986 --- /dev/null +++ b/docs/versioned_docs/version-v28/06-migration/v0.18.md @@ -0,0 +1,458 @@ +--- +sidebar_position: 999 +title: v0.18.0 +description: For chains that were scaffolded with Ignite CLI versions lower than v0.18, changes are required to use Ignite CLI v0.18. +--- + +# Upgrading a Blockchain to use Ignite CLI v0.18 + +Ignite CLI v0.18 comes with Cosmos SDK v0.44. This version of Cosmos SDK introduced changes that are not compatible with +chains that were scaffolded with Ignite CLI versions lower than v0.18. + +**Important:** After upgrading from Ignite CLI v0.17.3 to Ignite CLI v0.18, you must update the default blockchain +template to use blockchains that were scaffolded with earlier versions. + +These instructions are written for a blockchain that was scaffolded with the following command: + +``` +ignite scaffold chain github.com/username/mars +``` + +If you used a different module path, replace `username` and `mars` with the correct values for your blockchain. + +## Blockchain + +For each file listed, make the required changes to the source code of the blockchain template. + +### go.mod + +``` +module github.com/username/mars + +go 1.16 + +require ( + github.com/cosmos/cosmos-sdk v0.44.0 + github.com/cosmos/ibc-go v1.2.0 + github.com/gogo/protobuf v1.3.3 + github.com/google/go-cmp v0.5.6 // indirect + github.com/gorilla/mux v1.8.0 + github.com/grpc-ecosystem/grpc-gateway v1.16.0 + github.com/spf13/cast v1.3.1 + github.com/spf13/cobra v1.1.3 + github.com/stretchr/testify v1.7.0 + github.com/tendermint/spm v0.1.6 + github.com/tendermint/tendermint v0.34.13 + github.com/tendermint/tm-db v0.6.4 + google.golang.org/genproto v0.0.0-20210903162649-d08c68adba83 + google.golang.org/grpc v1.40.0 +) + +replace ( + github.com/99designs/keyring => github.com/cosmos/keyring v1.1.7-0.20210622111912-ef00f8ac3d76 + github.com/gogo/protobuf => github.com/regen-network/protobuf v1.3.3-alpha.regen.1 + google.golang.org/grpc => google.golang.org/grpc v1.33.2 +) +``` + +### app/app.go + +```go +package app + +import ( + //... + // Add the following packages: + "github.com/cosmos/cosmos-sdk/x/feegrant" + feegrantkeeper "github.com/cosmos/cosmos-sdk/x/feegrant/keeper" + feegrantmodule "github.com/cosmos/cosmos-sdk/x/feegrant/module" + + "github.com/cosmos/ibc-go/modules/apps/transfer" + ibctransferkeeper "github.com/cosmos/ibc-go/modules/apps/transfer/keeper" + ibctransfertypes "github.com/cosmos/ibc-go/modules/apps/transfer/types" + ibc "github.com/cosmos/ibc-go/modules/core" + ibcclient "github.com/cosmos/ibc-go/modules/core/02-client" + ibcporttypes "github.com/cosmos/ibc-go/modules/core/05-port/types" + ibchost "github.com/cosmos/ibc-go/modules/core/24-host" + ibckeeper "github.com/cosmos/ibc-go/modules/core/keeper" + // Remove the following packages: + // transfer "github.com/cosmos/cosmos-sdk/x/ibc/applications/transfer" + // ibctransferkeeper "github.com/cosmos/cosmos-sdk/x/ibc/applications/transfer/keeper" + // ibctransfertypes "github.com/cosmos/cosmos-sdk/x/ibc/applications/transfer/types" + // ibc "github.com/cosmos/cosmos-sdk/x/ibc/core" + // ibcclient "github.com/cosmos/cosmos-sdk/x/ibc/core/02-client" + // porttypes "github.com/cosmos/cosmos-sdk/x/ibc/core/05-port/types" + // ibchost "github.com/cosmos/cosmos-sdk/x/ibc/core/24-host" + // ibckeeper "github.com/cosmos/cosmos-sdk/x/ibc/core/keeper" +) + +var ( + //... + ModuleBasics = module.NewBasicManager( + //... + slashing.AppModuleBasic{}, + // Add feegrantmodule.AppModuleBasic{}, + feegrantmodule.AppModuleBasic{}, // <-- + ibc.AppModuleBasic{}, + //... + ) + //... +) + +type App struct { + //... + // Replace codec.Marshaler with codec.Codec + appCodec codec.Codec // <-- + // Add FeeGrantKeeper + FeeGrantKeeper feegrantkeeper.Keeper // <-- +} + +func New( /*...*/ ) { + //bApp.SetAppVersion(version.Version) + bApp.SetVersion(version.Version) // <-- + + keys := sdk.NewKVStoreKeys( + //... + upgradetypes.StoreKey, + // Add feegrant.StoreKey + feegrant.StoreKey, // <-- + evidencetypes.StoreKey, + //... + ) + + app.FeeGrantKeeper = feegrantkeeper.NewKeeper(appCodec, keys[feegrant.StoreKey], app.AccountKeeper) // <-- + // Add app.BaseApp as the last argument to upgradekeeper.NewKeeper + app.UpgradeKeeper = upgradekeeper.NewKeeper(skipUpgradeHeights, keys[upgradetypes.StoreKey], appCodec, homePath, app.BaseApp) + + app.IBCKeeper = ibckeeper.NewKeeper( + // Add app.UpgradeKeeper + appCodec, keys[ibchost.StoreKey], app.GetSubspace(ibchost.ModuleName), app.StakingKeeper, app.UpgradeKeeper, scopedIBCKeeper, + ) + + govRouter.AddRoute(govtypes.RouterKey, govtypes.ProposalHandler). + //... + // Replace NewClientUpdateProposalHandler with NewClientProposalHandler + AddRoute(ibchost.RouterKey, ibcclient.NewClientProposalHandler(app.IBCKeeper.ClientKeeper)) + + // Replace porttypes with ibcporttypes + ibcRouter := ibcporttypes.NewRouter() + + app.mm.SetOrderBeginBlockers( + upgradetypes.ModuleName, + // Add capabilitytypes.ModuleName, + capabilitytypes.ModuleName, + minttypes.ModuleName, + //... + // Add feegrant.ModuleName, + feegrant.ModuleName, + ) + + // Add app.appCodec as an argument to module.NewConfigurator: + app.mm.RegisterServices(module.NewConfigurator(app.appCodec, app.MsgServiceRouter(), app.GRPCQueryRouter())) + + // Replace: + // app.SetAnteHandler( + // ante.NewAnteHandler( + // app.AccountKeeper, app.BankKeeper, ante.DefaultSigVerificationGasConsumer, + // encodingConfig.TxConfig.SignModeHandler(), + // ), + // ) + + // With the following: + anteHandler, err := ante.NewAnteHandler( + ante.HandlerOptions{ + AccountKeeper: app.AccountKeeper, + BankKeeper: app.BankKeeper, + SignModeHandler: encodingConfig.TxConfig.SignModeHandler(), + FeegrantKeeper: app.FeeGrantKeeper, + SigGasConsumer: ante.DefaultSigVerificationGasConsumer, + }, + ) + if err != nil { + panic(err) + } + app.SetAnteHandler(anteHandler) + + // Remove the following: + // ctx := app.BaseApp.NewUncachedContext(true, tmproto.Header{}) + // app.CapabilityKeeper.InitializeAndSeal(ctx) +} + +func (app *App) InitChainer(ctx sdk.Context, req abci.RequestInitChain) abci.ResponseInitChain { + var genesisState GenesisState + if err := tmjson.Unmarshal(req.AppStateBytes, &genesisState); err != nil { + panic(err) + } + // Add the following: + app.UpgradeKeeper.SetModuleVersionMap(ctx, app.mm.GetVersionMap()) + return app.mm.InitGenesis(ctx, app.appCodec, genesisState) +} + +// Replace Marshaler with Codec +func (app *App) AppCodec() codec.Codec { + return app.appCodec +} + +// Replace BinaryMarshaler with BinaryCodec +func initParamsKeeper(appCodec codec.BinaryCodec, legacyAmino *codec.LegacyAmino, key, tkey sdk.StoreKey) paramskeeper.Keeper { + //... +} +``` + +### app/genesis.go + +```go +// Replace codec.JSONMarshaler with codec.JSONCodec +func NewDefaultGenesisState(cdc codec.JSONCodec) GenesisState { + // ... +} +``` + +### testutil/keeper/mars.go + +Add the following code: + +```go +package keeper + +import ( + "testing" + + "github.com/cosmos/cosmos-sdk/codec" + codectypes "github.com/cosmos/cosmos-sdk/codec/types" + "github.com/cosmos/cosmos-sdk/store" + storetypes "github.com/cosmos/cosmos-sdk/store/types" + sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/stretchr/testify/require" + "github.com/tendermint/tendermint/libs/log" + tmproto "github.com/tendermint/tendermint/proto/tendermint/types" + tmdb "github.com/tendermint/tm-db" + "github.com/username/mars/x/mars/keeper" + "github.com/username/mars/x/mars/types" +) + +func MarsKeeper(t testing.TB) (*keeper.Keeper, sdk.Context) { + storeKey := sdk.NewKVStoreKey(types.StoreKey) + memStoreKey := storetypes.NewMemoryStoreKey(types.MemStoreKey) + + db := tmdb.NewMemDB() + stateStore := store.NewCommitMultiStore(db) + stateStore.MountStoreWithDB(storeKey, sdk.StoreTypeIAVL, db) + stateStore.MountStoreWithDB(memStoreKey, sdk.StoreTypeMemory, nil) + require.NoError(t, stateStore.LoadLatestVersion()) + + registry := codectypes.NewInterfaceRegistry() + k := keeper.NewKeeper( + codec.NewProtoCodec(registry), + storeKey, + memStoreKey, + ) + + ctx := sdk.NewContext(stateStore, tmproto.Header{}, false, log.NewNopLogger()) + return k, ctx +} +``` + +If `mars` is an IBC-enabled module, add the following code, instead: + +```go +package keeper + +import ( + "testing" + + "github.com/cosmos/cosmos-sdk/codec" + codectypes "github.com/cosmos/cosmos-sdk/codec/types" + "github.com/cosmos/cosmos-sdk/store" + storetypes "github.com/cosmos/cosmos-sdk/store/types" + sdk "github.com/cosmos/cosmos-sdk/types" + capabilitykeeper "github.com/cosmos/cosmos-sdk/x/capability/keeper" + typesparams "github.com/cosmos/cosmos-sdk/x/params/types" + ibckeeper "github.com/cosmos/ibc-go/modules/core/keeper" + "github.com/stretchr/testify/require" + "github.com/tendermint/tendermint/libs/log" + tmproto "github.com/tendermint/tendermint/proto/tendermint/types" + tmdb "github.com/tendermint/tm-db" + "github.com/username/test/x/mars/keeper" + "github.com/username/test/x/mars/types" +) + +func MarsKeeper(t testing.TB) (*keeper.Keeper, sdk.Context) { + logger := log.NewNopLogger() + + storeKey := sdk.NewKVStoreKey(types.StoreKey) + memStoreKey := storetypes.NewMemoryStoreKey(types.MemStoreKey) + + db := tmdb.NewMemDB() + stateStore := store.NewCommitMultiStore(db) + stateStore.MountStoreWithDB(storeKey, sdk.StoreTypeIAVL, db) + stateStore.MountStoreWithDB(memStoreKey, sdk.StoreTypeMemory, nil) + require.NoError(t, stateStore.LoadLatestVersion()) + + registry := codectypes.NewInterfaceRegistry() + appCodec := codec.NewProtoCodec(registry) + capabilityKeeper := capabilitykeeper.NewKeeper(appCodec, storeKey, memStoreKey) + + amino := codec.NewLegacyAmino() + ss := typesparams.NewSubspace(appCodec, + amino, + storeKey, + memStoreKey, + "MarsSubSpace", + ) + IBCKeeper := ibckeeper.NewKeeper( + appCodec, + storeKey, + ss, + nil, + nil, + capabilityKeeper.ScopeToModule("MarsIBCKeeper"), + ) + + k := keeper.NewKeeper( + codec.NewProtoCodec(registry), + storeKey, + memStoreKey, + IBCKeeper.ChannelKeeper, + &IBCKeeper.PortKeeper, + capabilityKeeper.ScopeToModule("MarsScopedKeeper"), + ) + + ctx := sdk.NewContext(stateStore, tmproto.Header{}, false, logger) + return k, ctx +} +``` + +### testutil/network/network.go + +```go +func DefaultConfig() network.Config { + // ... + return network.Config{ + // ... + // Add sdk.DefaultPowerReduction + AccountTokens: sdk.TokensFromConsensusPower(1000, sdk.DefaultPowerReduction), + StakingTokens: sdk.TokensFromConsensusPower(500, sdk.DefaultPowerReduction), + BondedTokens: sdk.TokensFromConsensusPower(100, sdk.DefaultPowerReduction), + // ... + } +} +``` + +### testutil/sample/sample.go + +Add the following code: + +```go +package sample + +import ( + "github.com/cosmos/cosmos-sdk/crypto/keys/ed25519" + sdk "github.com/cosmos/cosmos-sdk/types" +) + +// AccAddress returns a sample account address +func AccAddress() string { + pk := ed25519.GenPrivKey().PubKey() + addr := pk.Address() + return sdk.AccAddress(addr).String() +} +``` + +### BandChain Support + +If your module includes integration with BandChain, added manually or scaffolded with `ignite scaffold band`, upgrade +the `github.com/bandprotocol/bandchain-packet` package to `v0.0.2` in `go.mod`. + +## Module + +### x/mars/keeper/keeper.go + +```go +package keeper + +// ... + +type ( + Keeper struct { + // Replace Marshaler with BinaryCodec + cdc codec.BinaryCodec + //... + } +) + +func NewKeeper( + // Replace Marshaler with BinaryCodec + cdc codec.BinaryCodec, + // ... +) *Keeper { + // ... +} +``` + +### x/mars/keeper/msg_server_test.go + +```go +package keeper_test + +import ( + //... + // Add the following: + keepertest "github.com/username/mars/testutil/keeper" + "github.com/username/mars/x/mars/keeper" +) + +func setupMsgServer(t testing.TB) (types.MsgServer, context.Context) { + // Replace + // keeper, ctx := setupKeeper(t) + // return NewMsgServerImpl(*keeper), sdk.WrapSDKContext(ctx) + + // With the following: + k, ctx := keepertest.MarsKeeper(t) + return keeper.NewMsgServerImpl(*k), sdk.WrapSDKContext(ctx) +} +``` + +### x/mars/module.go + +```go +package mars + +type AppModuleBasic struct { + // Replace Marshaler with BinaryCodec + cdc codec.BinaryCodec +} + +// Replace Marshaler with BinaryCodec +func NewAppModuleBasic(cdc codec.BinaryCodec) AppModuleBasic { + return AppModuleBasic{cdc: cdc} +} + +// Replace JSONMarshaler with JSONCodec +func (AppModuleBasic) DefaultGenesis(cdc codec.JSONCodec) json.RawMessage { + return cdc.MustMarshalJSON(types.DefaultGenesis()) +} + +// Replace JSONMarshaler with JSONCodec +func (AppModuleBasic) ValidateGenesis(cdc codec.JSONCodec, config client.TxEncodingConfig, bz json.RawMessage) error { + //... +} + +// Replace codec.Marshaller with codec.Codec +func NewAppModule(cdc codec.Codec, keeper keeper.Keeper) AppModule { + //... +} + +// Replace JSONMarshaler with JSONCodec +func (am AppModule) InitGenesis(ctx sdk.Context, cdc codec.JSONCodec, gs json.RawMessage) []abci.ValidatorUpdate { + //... +} + +// Replace JSONMarshaler with JSONCodec +func (am AppModule) ExportGenesis(ctx sdk.Context, cdc codec.JSONCodec) json.RawMessage { + //... +} + +// Add the following +func (AppModule) ConsensusVersion() uint64 { return 2 } +``` diff --git a/docs/versioned_docs/version-v28/06-migration/v0.19.2.md b/docs/versioned_docs/version-v28/06-migration/v0.19.2.md new file mode 100644 index 0000000..0ebd0ec --- /dev/null +++ b/docs/versioned_docs/version-v28/06-migration/v0.19.2.md @@ -0,0 +1,26 @@ +--- +sidebar_position: 998 +title: v0.19.2 +description: For chains that were scaffolded with Ignite CLI versions lower than v0.19.2, changes are required to use Ignite CLI v0.19.2. +--- + +# Upgrading a blockchain to use Ignite CLI v0.19.2 + +Ignite CLI v0.19.2 comes with IBC v2.0.2. + +With Ignite CLI v0.19.2, the contents of the deprecated Ignite CLI Modules `tendermint/spm` repo are moved to the +official Ignite CLI repo which introduces breaking changes. + +To migrate your chain that was scaffolded with Ignite CLI versions lower than v0.19.2: + +1. IBC upgrade: Use + the [IBC migration documents](https://github.com/cosmos/ibc-go/blob/v6.2.0/docs/migrations/v1-to-v2.md) + +2. In your chain's `go.mod` file, remove `tendermint/spm` and add the v0.19.2 version of `tendermint/starport`. If your + chain uses these packages, change the import paths as shown: + + - `github.com/tendermint/spm/ibckeeper` moved to `github.com/tendermint/starport/starport/pkg/cosmosibckeeper` + - `github.com/tendermint/spm/cosmoscmd` moved to `github.com/tendermint/starport/starport/pkg/cosmoscmd` + - `github.com/tendermint/spm/openapiconsole` moved to `github.com/tendermint/starport/starport/pkg/openapiconsole` + - `github.com/tendermint/spm/testutil/sample` moved + to `github.com/tendermint/starport/starport/pkg/cosmostestutil/sample` diff --git a/docs/versioned_docs/version-v28/06-migration/v0.20.0.md b/docs/versioned_docs/version-v28/06-migration/v0.20.0.md new file mode 100644 index 0000000..197dafc --- /dev/null +++ b/docs/versioned_docs/version-v28/06-migration/v0.20.0.md @@ -0,0 +1,12 @@ +--- +sidebar_position: 997 +title: v0.20.0 +description: For chains that were scaffolded with Ignite CLI versions lower than v0.20.0, changes are required to use Ignite CLI v0.20.0. +--- + +# Upgrading a blockchain to use Ignite CLI v0.20.2 + +1. Upgrade your Cosmos SDK version to [v0.45.3](https://github.com/cosmos/cosmos-sdk/releases/tag/v0.45.3). + +2. Update your `SetOrderBeginBlockers` and `SetOrderEndBlockers` in your `app/app.go` to explicitly add entries for all + the modules you use in your chain. diff --git a/docs/versioned_docs/version-v28/06-migration/v0.22.0.md b/docs/versioned_docs/version-v28/06-migration/v0.22.0.md new file mode 100644 index 0000000..e2d82e6 --- /dev/null +++ b/docs/versioned_docs/version-v28/06-migration/v0.22.0.md @@ -0,0 +1,36 @@ +--- +sidebar_position: 996 +title: v0.22.0 +description: For chains that were scaffolded with Ignite CLI versions lower than v0.22.0, changes are required to use Ignite CLI v0.22.0. +--- + +# Upgrading a blockchain to use Ignite CLI v0.22.0 + +Ignite CLI v0.22.2 changed the GitHub username from "ignite-hq" to "ignite", which means the imports must be fixed to +reflect this change. + +1. In your `go.mod` file find the require line for Ignite CLI that starts with `github.com/ignite-hq/cli` and is + followed by a version. + It looks something like `github.com/ignite-hq/cli v0.22.0`, and replace it by `github.com/ignite/cli v0.22.2`. + +2. Make a bulk find and replace in the import statements for `github.com/ignite-hq/cli` to be replaced + by `github.com/ignite/cli`. + +3. Finally, run `go mod tidy` and ensure there's no mention if `ignite-hq/cli` in your `go.sum` file. + +This update includes an upgrade to the `ibc-go` packages. Please make the according changes: + +1. Upgrade your IBC version to [v3](https://github.com/cosmos/ibc-go/releases/tag/v3.0.0). + + 1. Search for `github.com/cosmos/ibc-go/v2` in the import statements of your `.go` files and replace `v2` in the end + with `v3` + + 1. Open your `app.go`, + + - Update your transfer keeper by adding another `app.IBCKeeper.ChannelKeeper` as an argument + after `app.IBCKeeper.ChannelKeeper` + + - Define `var transferIBCModule = transfer.NewIBCModule(app.TransferKeeper)` in your `New()` func, and update + your existent IBC router to use it: `ibcRouter.AddRoute(ibctransfertypes.ModuleName, transferIBCModule)` + + 3. Open your `go.mod` and change the IBC line with `github.com/cosmos/ibc-go/v3 v3.0.0` diff --git a/docs/versioned_docs/version-v28/06-migration/v0.24.0.md b/docs/versioned_docs/version-v28/06-migration/v0.24.0.md new file mode 100644 index 0000000..8e31f89 --- /dev/null +++ b/docs/versioned_docs/version-v28/06-migration/v0.24.0.md @@ -0,0 +1,330 @@ +--- +sidebar_position: 995 +title: v0.24.0 +description: For chains that were scaffolded with Ignite CLI versions lower than v0.24, changes are required to use Ignite CLI v0.24.0. +--- + +## Cosmos SDK v0.46 upgrade notes + +### Update dependencies + +Cosmos SDK v0.46 is compatible with the latest version of IBC Go v5. If you have a chain that is using an older version, +update the dependencies in your project. + +Throughout the code you might see the following dependencies: + +```go +package pkg_name + +import ( + "github.com/cosmos/ibc-go/v3/..." +) +``` + +Where `v3` is the version of IBC Go and `...` are different IBC Go packages. + +To upgrade the version to `v5`, a global find-and-replace should work. Replace `cosmos/ibc-go/v3` (or whicherver version +you're using) with `cosmos/ibc-go/v5` only in `*.go` files (to exclude unwated changes to files like `go,sum`). + +### Module keeper + +Add an import: + +```go +// x/{moduleName}/keeper/keeper.go + +package keeper + +// ... + +import ( + //... + storetypes "github.com/cosmos/cosmos-sdk/store/types" +) +``` + +In the `Keeper` struct replace `sdk.StoreKey` with `storetypes.StoreKey`: + +```go +// x/{moduleName}/keeper/keeper.go + +package keeper + +// ... + +type ( + Keeper struct { + cdc codec.BinaryCodec + storeKey storetypes.StoreKey + memKey storetypes.StoreKey + paramstore paramtypes.Subspace + } +) +``` + +In the argument list of the `NewKeeper` function definition: + +```go +package keeper + +// ... + +// x/{moduleName}/keeper/keeper.go + +func NewKeeper( + //... + memKey storetypes.StoreKey, +) +``` + +Store type aliases have been removed from the Cosmos SDK `types` package and now have to be imported from `store/types`, +instead. + +In the `testutil/keeper/{moduleName}.go` replace `types.StoreKey` with `storetypes.StoreKey` and `types.MemStoreKey` +with `storetypes.MemStoreKey`. + +```go +// testutil/keeper/{moduleName}.go + +package keeper + +// ... + +func {moduleName}Keeper(t testing.TB) (*keeper.Keeper, sdk.Context) { + storeKey := sdk.NewKVStoreKey(storetypes.StoreKey) + memStoreKey := storetypes.NewMemoryStoreKey(storetypes.MemStoreKey) + //... +} +``` + +### Testutil network package + +Add the `require` package for testing and `pruningtypes` and remove `storetypes`: + +```go +// testutil/network/network.go + +package network + +// ... + +import ( + pruningtypes "github.com/cosmos/cosmos-sdk/pruning/types" + "github.com/stretchr/testify/require" + // storetypes "github.com/cosmos/cosmos-sdk/store/types" <-- remove this line +) +``` + +In the `DefaultConfig` function replace `storetypes.NewPruningOptionsFromString` +with `pruningtypes.NewPruningOptionsFromString` + +```go +// testutil/network/network.go + +package network + +// ... + +func DefaultConfig() network.Config { + //... + return network.Config{ + AppConstructor: func(val network.Validator) servertypes.Application { + return app.New( + baseapp.SetPruning(pruningtypes.NewPruningOptionsFromString(val.AppConfig.Pruning)), + //... + ) + }, + //... + } +} +``` + +The `New` function in the Cosmos SDK `testutil/network` package now +accepts [three arguments](https://github.com/cosmos/cosmos-sdk/blob/v0.46.0/testutil/network/network.go#L206) instead of +two. + +In the `New` function add `t.TempDir()` as the second argument to `network.New()` and test that no error is thrown +with `require.NoError(t, err)`: + +```go +// testutil/network/network.go + +package network + +// ... + +func New(t *testing.T, configs ...network.Config) *network.Network { + //... + net, err := network.New(t, t.TempDir(), cfg) + require.NoError(t, err) + //... +} +``` + +### Testutil keeper package + +In the `{moduleName}Keeper` function make the following replacements: + +- `storetypes.StoreKey` → `types.StoreKey` +- `storetypes.MemStoreKey` → `types.MemStoreKey` +- `sdk.StoreTypeIAVL` → `storetypes.StoreTypeIAVL` +- `sdk.StoreTypeMemory` → `storetypes.StoreTypeMemory` + +```go +// testutil/keeper/{moduleName}.go + +package keeper + +// ... + +func {moduleName}Keeper(t testing.TB) (*keeper.Keeper, sdk.Context) { + storeKey := sdk.NewKVStoreKey(types.StoreKey) + memStoreKey := storetypes.NewMemoryStoreKey(types.MemStoreKey) + //... + stateStore.MountStoreWithDB(storeKey, storetypes.StoreTypeIAVL, db) + stateStore.MountStoreWithDB(memStoreKey, storetypes.StoreTypeMemory, nil) + //... +} +``` + +### IBC modules + +If you have IBC-enabled modules (for example, added with `ignite scaffold module ... --ibc` or created manually), make +the following changes to the source code. + +Cosmos SDK expects IBC modules +to [implement the `IBCModule` interface](https://ibc.cosmos.network/main/ibc/apps/ibcmodule/). Create a `IBCModule` +type that embeds the module's keeper and a method that returns a new `IBCModule`. Methods in this file will be defined +on this type. + +```go +// x/{moduleName}/module_ibc.go + +package module_name + +// ... + +type IBCModule struct { + keeper keeper.Keeper +} + +func NewIBCModule(k keeper.Keeper) IBCModule { + return IBCModule{ + keeper: k, + } +} +``` + +Replace receivers for all methods in this file from `(am AppModule)` to `(im IBCModule)`. Replace all instances of `am.` +with `im.` to fix the errors. + +`OnChanOpenInit` now returns to values: a `string` and an `error`: + +```go +// x/{moduleName}/module_ibc.go + +package module_name + +// ... + +func (im IBCModule) OnChanOpenInit( /*...*/ ) (string, error) +``` + +Ensure that all return statements (five, in the default template) in `OnChanOpenInit` return two values. For example: + +```go +// x/{moduleName}/module_ibc.go + +package module_name + +// ... + +func (im IBCModule) OnChanOpenInit( /*...*/ ) (string, error) { + //... + return "", errorsmod.Wrapf(porttypes.ErrInvalidPort, "invalid port: %s, expected %s", portID, boundPort) + //... +} +``` + +Error acknowledgments returned from Transfer `OnRecvPacket` now include a deterministic ABCI code and error message. +Remove the `.Error()` call: + +```go +// x/{moduleName}/module_ibc.go + +package module_name + +// ... + +func (im IBCModule) OnRecvPacket( /*...*/ ) { + //... + if err := modulePacketData.Unmarshal(modulePacket.GetData()); err != nil { + // return channeltypes.NewErrorAcknowledgement(errorsmod.Wrapf(sdkerrors.ErrUnknownRequest, "cannot unmarshal packet data: %s", err.Error()).Error()) + return channeltypes.NewErrorAcknowledgement(errorsmod.Wrapf(sdkerrors.ErrUnknownRequest, "cannot unmarshal packet data: %s", err.Error())) + } + + // ... + + // Dispatch packet + switch packet := modulePacketData.Packet.(type) { + // ... + default: + // errMsg := fmt.Sprintf("unrecognized %s packet type: %T", types.ModuleName, packet) + // return channeltypes.NewErrorAcknowledgement(errMsg) + err := fmt.Errorf("unrecognized %s packet type: %T", types.ModuleName, packet) + return channeltypes.NewErrorAcknowledgement(err) + } +} +``` + +After switching to using both `AppModule` and `IBCModule`, modifying the following line: + +```go +// x/{moduleName}/module.go + +package module_name + +// ... + +var ( + //... + _ porttypes.IBCModule = IBCModule{} // instead of "= AppModule{}" +) +``` + +### Main + +The `Execute` function in Cosmos SDK `server/cmd` package now +accepts [three arguments](https://github.com/cosmos/cosmos-sdk/blob/v0.46.0/server/cmd/execute.go#L20) instead of two. + +```go +// cmd/{{projectName}}d/main.go + +package projectNamed + +// ... + +func main() { + //... + if err := svrcmd.Execute(rootCmd, "", app.DefaultNodeHome); err != nil { + os.Exit(1) + } +} +``` + +### Handler + +Cosmos SDK v0.46 no longer needs a `NewHandler` function that was used to handle messages and call appropriate keeper +methods based on message types. Feel free to remove `x/{moduleName}/handler.go` file. + +Since there is no `NewHandler` now, modify the deprecated `Route` function to return `sdk.Route{}`: + +```go +// x/{moduleName}/module.go + +package module_name + +// ... + +func (am AppModule) Route() sdk.Route { return sdk.Route{} } +``` diff --git a/docs/versioned_docs/version-v28/06-migration/v0.25.0.md b/docs/versioned_docs/version-v28/06-migration/v0.25.0.md new file mode 100644 index 0000000..66ec75c --- /dev/null +++ b/docs/versioned_docs/version-v28/06-migration/v0.25.0.md @@ -0,0 +1,1187 @@ +--- +sidebar_position: 994 +title: v0.25.0 +description: For chains that were scaffolded with Ignite CLI versions lower than v0.25.0. changes are required to use Ignite CLI v0.25.0. +--- + +## Protobuf directory migration + +`v0.25.0` changes the location of scaffolded `.proto` files. Previously, `.proto` files were located in `./proto/{moduleName}/`, +where `moduleName` is the same name of the Cosmos SDK module found in `./x/{moduleName}/`. This new version of `ignite` +modifies the scaffolded protobuf files so that they are now generated in `./proto/{appName}/{moduleName}`. + +The only change that is needed to be made is to create an `{appName}` folder in the `proto` directory, and then place the +sub-directories within it. An example below demonstrates this change: + +### Previous Directory Structure + +This example shows a chain that was generated using `ignite` with `v0.24.0` using the following command: + +```bash +ignite s chain github.com/cosmos/planet --no-module +ignite s module mars +``` + +```bash +├── app +├── cmd +├── docs +├── proto +│ ├── mars +├── x +│ ├── mars +├── README.md +├── config.yml +├── go.mod +├── go.sum +└── .gitignore +``` + +### `v0.25.0` Directory Structure + +This example shows a chain that was generated using `ignite` with `v0.25.0` using the following command: + +```bash +ignite s chain github.com/cosmos/planet --no-module +ignite s module mars +``` + +```bash +├── app +├── cmd +├── docs +├── proto +│ ├── planet +│ │ ├── mars +├── x +│ ├── mars +├── README.md +├── config.yml +├── go.mod +├── go.sum +└── .gitignore +``` + +The only difference is the additional directory `planet` which is the name of the application. The name of the app can +be verified by checking the package in the `go.mod` file. In this example, the package is `github.com/cosmos/planet` +where `planet` is the app name. + + --- + +## Removing `cosmoscmd` + +`v0.25.0` removes the `cosmoscmd` package from scaffolded chains. This package provided utility for creating +commands and starting up their application. The `cosmoscmd` package is now deprecated, and it is suggested that chains +implement this functionality in their codebase so they can be more easily upgraded and customized. + +The main functionality of `cosmoscmd` will be moved to the `app` package of your chain. Some imports in these +examples contain the sample string, `{ModulePath}`. Replace this string with the Go module path of your blockchain. +For example, if your blockchain module path is `github.com/planet/mars`, `{ModulePath}/app/params` would be become +`github.com/planet/mars/app/params`. + +#### Migration in `app` package + +To begin, create a new file, `./app/params/encoding.go`, containing the following code: + +```go +package params + +import ( + "github.com/cosmos/cosmos-sdk/client" + "github.com/cosmos/cosmos-sdk/codec" + "github.com/cosmos/cosmos-sdk/codec/types" +) + +// EncodingConfig specifies the concrete encoding types to use for a given app. +// This is provided for compatibility between protobuf and amino implementations. +type EncodingConfig struct { + InterfaceRegistry types.InterfaceRegistry + Marshaler codec.Codec + TxConfig client.TxConfig + Amino *codec.LegacyAmino +} +``` + +Next, create a new file, `./app/encoding.go`, containing the following code: + +```go +package app + +import ( + "github.com/cosmos/cosmos-sdk/codec" + "github.com/cosmos/cosmos-sdk/codec/types" + "github.com/cosmos/cosmos-sdk/std" + "github.com/cosmos/cosmos-sdk/x/auth/tx" + + "{ModulePath}/app/params" +) + +// makeEncodingConfig creates an EncodingConfig for an amino based test configuration. +func makeEncodingConfig() params.EncodingConfig { + amino := codec.NewLegacyAmino() + interfaceRegistry := types.NewInterfaceRegistry() + marshaler := codec.NewProtoCodec(interfaceRegistry) + txCfg := tx.NewTxConfig(marshaler, tx.DefaultSignModes) + + return params.EncodingConfig{ + InterfaceRegistry: interfaceRegistry, + Marshaler: marshaler, + TxConfig: txCfg, + Amino: amino, + } +} + +// MakeEncodingConfig creates an EncodingConfig for testing +func MakeEncodingConfig() params.EncodingConfig { + encodingConfig := makeEncodingConfig() + std.RegisterLegacyAminoCodec(encodingConfig.Amino) + std.RegisterInterfaces(encodingConfig.InterfaceRegistry) + ModuleBasics.RegisterLegacyAminoCodec(encodingConfig.Amino) + ModuleBasics.RegisterInterfaces(encodingConfig.InterfaceRegistry) + return encodingConfig +} +``` + +Next, modify `./app/simulation_test.go` so that it looks like the following: + +```go +package app_test + +import ( + "os" + "testing" + "time" + + "github.com/cosmos/cosmos-sdk/simapp" + simulationtypes "github.com/cosmos/cosmos-sdk/types/simulation" + "github.com/cosmos/cosmos-sdk/x/simulation" + "github.com/stretchr/testify/require" + abci "github.com/tendermint/tendermint/abci/types" + tmproto "github.com/tendermint/tendermint/proto/tendermint/types" + tmtypes "github.com/tendermint/tendermint/types" + // remove-next-line + "github.com/ignite/cli/ignite/pkg/cosmoscmd" + + // highlight-next-line + "{ModulePath}/app" +) + +// remove-start +type SimApp interface { + cosmoscmd.App + GetBaseApp() *baseapp.BaseApp + AppCodec() codec.Codec + SimulationManager() *module.SimulationManager + ModuleAccountAddrs() map[string]bool + Name() string + LegacyAmino() *codec.LegacyAmino + BeginBlocker(ctx sdk.Context, req abci.RequestBeginBlock) + abci.ResponseBeginBlock + EndBlocker(ctx sdk.Context, req abci.RequestEndBlock) + abci.ResponseEndBlock + InitChainer(ctx sdk.Context, req abci.RequestInitChain) + abci.ResponseInitChain +} + +// remove-end + +// ... + +// BenchmarkSimulation run the chain simulation +// Running using starport command: +// `starport chain simulate -v --numBlocks 200 --blockSize 50` +// Running as go benchmark test: +// `go test -benchmem -run=^$ -bench ^BenchmarkSimulation ./app -NumBlocks=200 -BlockSize 50 -Commit=true -Verbose=true -Enabled=true` +func BenchmarkSimulation(b *testing.B) { + + // ... + + // remove-next-line + encoding := cosmoscmd.MakeEncodingConfig(app.ModuleBasics) + // highlight-next-line + encoding := app.MakeEncodingConfig() + + app := app.New( + logger, + db, + nil, + true, + map[int64]bool{}, + app.DefaultNodeHome, + 0, + encoding, + simapp.EmptyAppOptions{}, + ) + + // remove-start + simApp, ok := app.(SimApp) + require.True(b, ok, "can't use simapp") + // remove-end + + // Run randomized simulations + _, simParams, simErr := simulation.SimulateFromSeed( + b, + os.Stdout, + // highlight-next-line + app.BaseApp, + // highlight-next-line + simapp.AppStateFn(app.AppCodec(), app.SimulationManager()), + simulationtypes.RandomAccounts, + // highlight-next-line + simapp.SimulationOperations(app, app.AppCodec(), config), + // highlight-next-line + app.ModuleAccountAddrs(), + config, + // highlight-next-line + app.AppCodec(), + ) + + // export state and simParams before the simulation error is checked + // highlight-next-line + err = simapp.CheckExportSimulation(app, config, simParams) + require.NoError(b, err) + require.NoError(b, simErr) + + // ... +} +``` + +The main changes here are that the `SimApp` interface has been removed and is being replaced with `app`. + +The final modification in the `app` package is in `app/app.go`: + +```go +package app + +import ( + // ... + + // this line is used by starport scaffolding # stargate/app/moduleImport + + // remove-next-line + "github.com/ignite/cli/ignite/pkg/cosmoscmd" + + // highlight-start + appparams "{ModulePath}/app/params" + "{ModulePath}/docs" + // highlight-end +) + +// ... + +var ( + // remove-next-line + _ cosmoscmd.App = (*App)(nil) + _ servertypes.Application = (*App)(nil) + _ simapp.App = (*App)(nil) +) + +// ... + +// New returns a reference to an initialized blockchain app +func New( + logger log.Logger, + db dbm.DB, + traceStore io.Writer, + loadLatest bool, + skipUpgradeHeights map[int64]bool, + homePath string, + invCheckPeriod uint, + // highlight-next-line + encodingConfig appparams.EncodingConfig, + appOpts servertypes.AppOptions, + baseAppOptions ...func(*baseapp.BaseApp), + // highlight-next-line +) *App { + appCodec := encodingConfig.Marshaler + cdc := encodingConfig.Amino + interfaceRegistry := encodingConfig.InterfaceRegistry + + bApp := baseapp.NewBaseApp( + Name, + logger, + db, + encodingConfig.TxConfig.TxDecoder(), + baseAppOptions..., + ) + + // ... + +} + +// ... + +// Name returns the name of the App +func (app *App) Name() string { return app.BaseApp.Name() } + +// remove-start +// GetBaseApp returns the base app of the application +func (app App) GetBaseApp() *baseapp.BaseApp { return app.BaseApp } + +// remove-end + +// BeginBlocker application updates every begin block +func (app *App) BeginBlocker(ctx sdk.Context, req abci.RequestBeginBlock) abci.ResponseBeginBlock { + return app.mm.BeginBlock(ctx, req) +} + +// ... +``` + +Again, here we are removing the use of `cosmoscmd` and replacing it with `app`. + +#### Migration in `cmd` package + +Some imports in these +examples contain the sample string, `{binaryNamePrefix}d`. Replace this string with the binary name of your blockchain. +For example, if your blockchain module path is `github.com/planet/mars`, `./cmd/{binaryNamePrefix}d/cmd/` would be +become `./cmd/marsd/cmd/`. + +First, create the new file `./cmd/{binaryNamePrefix}d/cmd/config.go` with the following code: + +```go +package cmd + +import ( + sdk "github.com/cosmos/cosmos-sdk/types" + + "{ModulePath}/app" +) + +func initSDKConfig() { + // Set prefixes + accountPubKeyPrefix := app.AccountAddressPrefix + "pub" + validatorAddressPrefix := app.AccountAddressPrefix + "valoper" + validatorPubKeyPrefix := app.AccountAddressPrefix + "valoperpub" + consNodeAddressPrefix := app.AccountAddressPrefix + "valcons" + consNodePubKeyPrefix := app.AccountAddressPrefix + "valconspub" + + // Set and seal config + config := sdk.GetConfig() + config.SetBech32PrefixForAccount(app.AccountAddressPrefix, accountPubKeyPrefix) + config.SetBech32PrefixForValidator(validatorAddressPrefix, validatorPubKeyPrefix) + config.SetBech32PrefixForConsensusNode(consNodeAddressPrefix, consNodePubKeyPrefix) + config.Seal() +} +``` + +Next, create the new file `./cmd/{binaryNamePrefix}d/cmd/genaccounts.go` with the following code: + +```go +package cmd + +import ( + "bufio" + "encoding/json" + "errors" + "fmt" + + "github.com/cosmos/cosmos-sdk/client" + "github.com/cosmos/cosmos-sdk/client/flags" + "github.com/cosmos/cosmos-sdk/crypto/keyring" + "github.com/cosmos/cosmos-sdk/server" + sdk "github.com/cosmos/cosmos-sdk/types" + authtypes "github.com/cosmos/cosmos-sdk/x/auth/types" + authvesting "github.com/cosmos/cosmos-sdk/x/auth/vesting/types" + banktypes "github.com/cosmos/cosmos-sdk/x/bank/types" + "github.com/cosmos/cosmos-sdk/x/genutil" + genutiltypes "github.com/cosmos/cosmos-sdk/x/genutil/types" + "github.com/spf13/cobra" +) + +const ( + flagVestingStart = "vesting-start-time" + flagVestingEnd = "vesting-end-time" + flagVestingAmt = "vesting-amount" +) + +// AddGenesisAccountCmd returns add-genesis-account cobra Command. +func AddGenesisAccountCmd(defaultNodeHome string) *cobra.Command { + cmd := &cobra.Command{ + Use: "add-genesis-account [address_or_key_name] [coin][,[coin]]", + Short: "Add a genesis account to genesis.json", + Long: `Add a genesis account to genesis.json. The provided account must specify +the account address or key name and a list of initial coins. If a key name is given, +the address will be looked up in the local Keybase. The list of initial tokens must +contain valid denominations. Accounts may optionally be supplied with vesting parameters. +`, + Args: cobra.ExactArgs(2), + RunE: func(cmd *cobra.Command, args []string) error { + clientCtx := client.GetClientContextFromCmd(cmd) + cdc := clientCtx.Codec + + serverCtx := server.GetServerContextFromCmd(cmd) + config := serverCtx.Config + + config.SetRoot(clientCtx.HomeDir) + + coins, err := sdk.ParseCoinsNormalized(args[1]) + if err != nil { + return fmt.Errorf("failed to parse coins: %w", err) + } + + addr, err := sdk.AccAddressFromBech32(args[0]) + if err != nil { + inBuf := bufio.NewReader(cmd.InOrStdin()) + keyringBackend, err := cmd.Flags().GetString(flags.FlagKeyringBackend) + if err != nil { + return err + } + + // attempt to lookup address from Keybase if no address was provided + kb, err := keyring.New(sdk.KeyringServiceName(), keyringBackend, clientCtx.HomeDir, inBuf, cdc) + if err != nil { + return err + } + + info, err := kb.Key(args[0]) + if err != nil { + return fmt.Errorf("failed to get address from Keybase: %w", err) + } + + addr, err = info.GetAddress() + if err != nil { + return fmt.Errorf("failed to get address from Keybase: %w", err) + } + } + + vestingStart, err := cmd.Flags().GetInt64(flagVestingStart) + if err != nil { + return err + } + vestingEnd, err := cmd.Flags().GetInt64(flagVestingEnd) + if err != nil { + return err + } + vestingAmtStr, err := cmd.Flags().GetString(flagVestingAmt) + if err != nil { + return err + } + + vestingAmt, err := sdk.ParseCoinsNormalized(vestingAmtStr) + if err != nil { + return fmt.Errorf("failed to parse vesting amount: %w", err) + } + + // create concrete account type based on input parameters + var genAccount authtypes.GenesisAccount + + balances := banktypes.Balance{Address: addr.String(), Coins: coins.Sort()} + baseAccount := authtypes.NewBaseAccount(addr, nil, 0, 0) + + if !vestingAmt.IsZero() { + baseVestingAccount := authvesting.NewBaseVestingAccount(baseAccount, vestingAmt.Sort(), vestingEnd) + + if (balances.Coins.IsZero() && !baseVestingAccount.OriginalVesting.IsZero()) || + baseVestingAccount.OriginalVesting.IsAnyGT(balances.Coins) { + return errors.New("vesting amount cannot be greater than total amount") + } + + switch { + case vestingStart != 0 && vestingEnd != 0: + genAccount = authvesting.NewContinuousVestingAccountRaw(baseVestingAccount, vestingStart) + + case vestingEnd != 0: + genAccount = authvesting.NewDelayedVestingAccountRaw(baseVestingAccount) + + default: + return errors.New("invalid vesting parameters; must supply start and end time or end time") + } + } else { + genAccount = baseAccount + } + + if err := genAccount.Validate(); err != nil { + return fmt.Errorf("failed to validate new genesis account: %w", err) + } + + genFile := config.GenesisFile() + appState, genDoc, err := genutiltypes.GenesisStateFromGenFile(genFile) + if err != nil { + return fmt.Errorf("failed to unmarshal genesis state: %w", err) + } + + authGenState := authtypes.GetGenesisStateFromAppState(cdc, appState) + + accs, err := authtypes.UnpackAccounts(authGenState.Accounts) + if err != nil { + return fmt.Errorf("failed to get accounts from any: %w", err) + } + + if accs.Contains(addr) { + return fmt.Errorf("cannot add account at existing address %s", addr) + } + + // Add the new account to the set of genesis accounts and sanitize the + // accounts afterwards. + accs = append(accs, genAccount) + accs = authtypes.SanitizeGenesisAccounts(accs) + + genAccs, err := authtypes.PackAccounts(accs) + if err != nil { + return fmt.Errorf("failed to convert accounts into any's: %w", err) + } + authGenState.Accounts = genAccs + + authGenStateBz, err := cdc.MarshalJSON(&authGenState) + if err != nil { + return fmt.Errorf("failed to marshal auth genesis state: %w", err) + } + + appState[authtypes.ModuleName] = authGenStateBz + + bankGenState := banktypes.GetGenesisStateFromAppState(cdc, appState) + bankGenState.Balances = append(bankGenState.Balances, balances) + bankGenState.Balances = banktypes.SanitizeGenesisBalances(bankGenState.Balances) + + bankGenStateBz, err := cdc.MarshalJSON(bankGenState) + if err != nil { + return fmt.Errorf("failed to marshal bank genesis state: %w", err) + } + + appState[banktypes.ModuleName] = bankGenStateBz + + appStateJSON, err := json.Marshal(appState) + if err != nil { + return fmt.Errorf("failed to marshal application genesis state: %w", err) + } + + genDoc.AppState = appStateJSON + return genutil.ExportGenesisFile(genDoc, genFile) + }, + } + + cmd.Flags().String(flags.FlagKeyringBackend, flags.DefaultKeyringBackend, "Select keyring's backend (os|file|kwallet|pass|test)") + cmd.Flags().String(flags.FlagHome, defaultNodeHome, "The application home directory") + cmd.Flags().String(flagVestingAmt, "", "amount of coins for vesting accounts") + cmd.Flags().Int64(flagVestingStart, 0, "schedule start time (unix epoch) for vesting accounts") + cmd.Flags().Int64(flagVestingEnd, 0, "schedule end time (unix epoch) for vesting accounts") + flags.AddQueryFlagsToCmd(cmd) + + return cmd +} +``` + +This command allows one to generate new accounts: `appd add-genesis-account`. + +Next, create the new file `./cmd/{binaryNamePrefix}d/cmd/root.go` with the following code: + +```go +package cmd + +import ( + "errors" + "io" + "os" + "path/filepath" + "strings" + + "github.com/cosmos/cosmos-sdk/baseapp" + "github.com/cosmos/cosmos-sdk/client" + "github.com/cosmos/cosmos-sdk/client/config" + "github.com/cosmos/cosmos-sdk/client/debug" + "github.com/cosmos/cosmos-sdk/client/flags" + "github.com/cosmos/cosmos-sdk/client/keys" + "github.com/cosmos/cosmos-sdk/client/rpc" + "github.com/cosmos/cosmos-sdk/server" + serverconfig "github.com/cosmos/cosmos-sdk/server/config" + servertypes "github.com/cosmos/cosmos-sdk/server/types" + "github.com/cosmos/cosmos-sdk/snapshots" + snapshottypes "github.com/cosmos/cosmos-sdk/snapshots/types" + "github.com/cosmos/cosmos-sdk/store" + sdk "github.com/cosmos/cosmos-sdk/types" + authcmd "github.com/cosmos/cosmos-sdk/x/auth/client/cli" + "github.com/cosmos/cosmos-sdk/x/auth/types" + banktypes "github.com/cosmos/cosmos-sdk/x/bank/types" + "github.com/cosmos/cosmos-sdk/x/crisis" + genutilcli "github.com/cosmos/cosmos-sdk/x/genutil/client/cli" + "github.com/ignite/cli/ignite/services/network" + "github.com/spf13/cast" + "github.com/spf13/cobra" + "github.com/spf13/pflag" + tmcfg "github.com/tendermint/tendermint/config" + tmcli "github.com/tendermint/tendermint/libs/cli" + "github.com/tendermint/tendermint/libs/log" + dbm "github.com/tendermint/tm-db" + // this line is used by starport scaffolding # root/moduleImport + + "{ModulePath}/app" + appparams "{ModulePath}/app/params" +) + +// NewRootCmd creates a new root command for a Cosmos SDK application +func NewRootCmd() (*cobra.Command, appparams.EncodingConfig) { + encodingConfig := app.MakeEncodingConfig() + initClientCtx := client.Context{}. + WithCodec(encodingConfig.Marshaler). + WithInterfaceRegistry(encodingConfig.InterfaceRegistry). + WithTxConfig(encodingConfig.TxConfig). + WithLegacyAmino(encodingConfig.Amino). + WithInput(os.Stdin). + WithAccountRetriever(types.AccountRetriever{}). + WithHomeDir(app.DefaultNodeHome). + WithViper("") + + rootCmd := &cobra.Command{ + Use: app.Name + "d", + Short: "Stargate CosmosHub App", + PersistentPreRunE: func(cmd *cobra.Command, _ []string) error { + // set the default command outputs + cmd.SetOut(cmd.OutOrStdout()) + cmd.SetErr(cmd.ErrOrStderr()) + initClientCtx, err := client.ReadPersistentCommandFlags(initClientCtx, cmd.Flags()) + if err != nil { + return err + } + initClientCtx, err = config.ReadFromClientConfig(initClientCtx) + if err != nil { + return err + } + + if err := client.SetCmdClientContextHandler(initClientCtx, cmd); err != nil { + return err + } + + customAppTemplate, customAppConfig := initAppConfig() + customTMConfig := initTendermintConfig() + return server.InterceptConfigsPreRunHandler( + cmd, customAppTemplate, customAppConfig, customTMConfig, + ) + }, + } + + initRootCmd(rootCmd, encodingConfig) + overwriteFlagDefaults(rootCmd, map[string]string{ + flags.FlagChainID: strings.ReplaceAll(app.Name, "-", ""), + flags.FlagKeyringBackend: "test", + }) + + return rootCmd, encodingConfig +} + +// initTendermintConfig helps to override default Tendermint Config values. +// return tmcfg.DefaultConfig if no custom configuration is required for the application. +func initTendermintConfig() *tmcfg.Config { + cfg := tmcfg.DefaultConfig() + return cfg +} + +func initRootCmd( + rootCmd *cobra.Command, + encodingConfig appparams.EncodingConfig, +) { + // Set config + initSDKConfig() + + rootCmd.AddCommand( + genutilcli.InitCmd(app.ModuleBasics, app.DefaultNodeHome), + genutilcli.CollectGenTxsCmd(banktypes.GenesisBalancesIterator{}, app.DefaultNodeHome), + genutilcli.MigrateGenesisCmd(), + genutilcli.GenTxCmd( + app.ModuleBasics, + encodingConfig.TxConfig, + banktypes.GenesisBalancesIterator{}, + app.DefaultNodeHome, + ), + genutilcli.ValidateGenesisCmd(app.ModuleBasics), + AddGenesisAccountCmd(app.DefaultNodeHome), + tmcli.NewCompletionCmd(rootCmd, true), + debug.Cmd(), + config.Cmd(), + // this line is used by starport scaffolding # root/commands + ) + + a := appCreator{ + encodingConfig, + } + + // add server commands + server.AddCommands( + rootCmd, + app.DefaultNodeHome, + a.newApp, + a.appExport, + addModuleInitFlags, + ) + + // add keybase, auxiliary RPC, query, and tx child commands + rootCmd.AddCommand( + rpc.StatusCommand(), + queryCommand(), + txCommand(), + keys.Commands(app.DefaultNodeHome), + ) +} + +// queryCommand returns the sub-command to send queries to the app +func queryCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "query", + Aliases: []string{"q"}, + Short: "Querying subcommands", + DisableFlagParsing: true, + SuggestionsMinimumDistance: 2, + RunE: client.ValidateCmd, + } + + cmd.AddCommand( + authcmd.GetAccountCmd(), + rpc.ValidatorCommand(), + rpc.BlockCommand(), + authcmd.QueryTxsByEventsCmd(), + authcmd.QueryTxCmd(), + ) + + app.ModuleBasics.AddQueryCommands(cmd) + cmd.PersistentFlags().String(flags.FlagChainID, "", "The network chain ID") + + return cmd +} + +// txCommand returns the sub-command to send transactions to the app +func txCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "tx", + Short: "Transactions subcommands", + DisableFlagParsing: true, + SuggestionsMinimumDistance: 2, + RunE: client.ValidateCmd, + } + + cmd.AddCommand( + authcmd.GetSignCommand(), + authcmd.GetSignBatchCommand(), + authcmd.GetMultiSignCommand(), + authcmd.GetValidateSignaturesCommand(), + flags.LineBreak, + authcmd.GetBroadcastCommand(), + authcmd.GetEncodeCommand(), + authcmd.GetDecodeCommand(), + ) + + app.ModuleBasics.AddTxCommands(cmd) + cmd.PersistentFlags().String(flags.FlagChainID, "", "The network chain ID") + + return cmd +} + +func addModuleInitFlags(startCmd *cobra.Command) { + crisis.AddModuleInitFlags(startCmd) + // this line is used by starport scaffolding # root/arguments +} + +func overwriteFlagDefaults(c *cobra.Command, defaults map[string]string) { + set := func(s *pflag.FlagSet, key, val string) { + if f := s.Lookup(key); f != nil { + f.DefValue = val + f.Value.Set(val) + } + } + for key, val := range defaults { + set(c.Flags(), key, val) + set(c.PersistentFlags(), key, val) + } + for _, c := range c.Commands() { + overwriteFlagDefaults(c, defaults) + } +} + +type appCreator struct { + encodingConfig appparams.EncodingConfig +} + +// newApp creates a new Cosmos SDK app +func (a appCreator) newApp( + logger log.Logger, + db dbm.DB, + traceStore io.Writer, + appOpts servertypes.AppOptions, +) servertypes.Application { + var cache sdk.MultiStorePersistentCache + + if cast.ToBool(appOpts.Get(server.FlagInterBlockCache)) { + cache = store.NewCommitKVStoreCacheManager() + } + + skipUpgradeHeights := make(map[int64]bool) + for _, h := range cast.ToIntSlice(appOpts.Get(server.FlagUnsafeSkipUpgrades)) { + skipUpgradeHeights[int64(h)] = true + } + + pruningOpts, err := server.GetPruningOptionsFromFlags(appOpts) + if err != nil { + panic(err) + } + + snapshotDir := filepath.Join(cast.ToString(appOpts.Get(flags.FlagHome)), "data", "snapshots") + snapshotDB, err := dbm.NewDB("metadata", dbm.GoLevelDBBackend, snapshotDir) + if err != nil { + panic(err) + } + snapshotStore, err := snapshots.NewStore(snapshotDB, snapshotDir) + if err != nil { + panic(err) + } + + snapshotOptions := snapshottypes.NewSnapshotOptions( + cast.ToUint64(appOpts.Get(server.FlagStateSyncSnapshotInterval)), + cast.ToUint32(appOpts.Get(server.FlagStateSyncSnapshotKeepRecent)), + ) + + return app.New( + logger, + db, + traceStore, + true, + skipUpgradeHeights, + cast.ToString(appOpts.Get(flags.FlagHome)), + cast.ToUint(appOpts.Get(server.FlagInvCheckPeriod)), + a.encodingConfig, + appOpts, + baseapp.SetPruning(pruningOpts), + baseapp.SetMinGasPrices(cast.ToString(appOpts.Get(server.FlagMinGasPrices))), + baseapp.SetMinRetainBlocks(cast.ToUint64(appOpts.Get(server.FlagMinRetainBlocks))), + baseapp.SetHaltHeight(cast.ToUint64(appOpts.Get(server.FlagHaltHeight))), + baseapp.SetHaltTime(cast.ToUint64(appOpts.Get(server.FlagHaltTime))), + baseapp.SetInterBlockCache(cache), + baseapp.SetTrace(cast.ToBool(appOpts.Get(server.FlagTrace))), + baseapp.SetIndexEvents(cast.ToStringSlice(appOpts.Get(server.FlagIndexEvents))), + baseapp.SetSnapshot(snapshotStore, snapshotOptions), + ) +} + +// appExport creates a new simapp (optionally at a given height) +func (a appCreator) appExport( + logger log.Logger, + db dbm.DB, + traceStore io.Writer, + height int64, + forZeroHeight bool, + jailAllowedAddrs []string, + appOpts servertypes.AppOptions, +) (servertypes.ExportedApp, error) { + homePath, ok := appOpts.Get(flags.FlagHome).(string) + if !ok || homePath == "" { + return servertypes.ExportedApp{}, errors.New("application home not set") + } + + app := app.New( + logger, + db, + traceStore, + height == -1, // -1: no height provided + map[int64]bool{}, + homePath, + uint(1), + a.encodingConfig, + appOpts, + ) + + if height != -1 { + if err := app.LoadHeight(height); err != nil { + return servertypes.ExportedApp{}, err + } + } + + return app.ExportAppStateAndValidators(forZeroHeight, jailAllowedAddrs) +} + +// initAppConfig helps to override default appConfig template and configs. +// return "", nil if no custom configuration is required for the application. +func initAppConfig() (string, interface{}) { + // The following code snippet is just for reference. + + // WASMConfig defines configuration for the wasm module. + type WASMConfig struct { + // This is the maximum sdk gas (wasm and storage) that we allow for any x/wasm "smart" queries + QueryGasLimit uint64 `mapstructure:"query_gas_limit"` + + // Address defines the gRPC-web server to listen on + LruSize uint64 `mapstructure:"lru_size"` + } + + type CustomAppConfig struct { + serverconfig.Config + + WASM WASMConfig `mapstructure:"wasm"` + } + + // Optionally allow the chain developer to overwrite the SDK's default + // server config. + srvCfg := serverconfig.DefaultConfig() + // The SDK's default minimum gas price is set to "" (empty value) inside + // app.toml. If left empty by validators, the node will halt on startup. + // However, the chain developer can set a default app.toml value for their + // validators here. + // + // In summary: + // - if you leave srvCfg.MinGasPrices = "", all validators MUST tweak their + // own app.toml config, + // - if you set srvCfg.MinGasPrices non-empty, validators CAN tweak their + // own app.toml to override, or use this default value. + // + // In simapp, we set the min gas prices to 0. + srvCfg.MinGasPrices = "0stake" + + customAppConfig := CustomAppConfig{ + Config: *srvCfg, + WASM: WASMConfig{ + LruSize: 1, + QueryGasLimit: 300000, + }, + } + + customAppTemplate := serverconfig.DefaultConfigTemplate + ` +[wasm] +# This is the maximum sdk gas (wasm and storage) that we allow for any x/wasm "smart" queries +query_gas_limit = 300000 +# This is the number of wasm vm instances we keep cached in memory for speed-up +# Warning: this is currently unstable and may lead to crashes, best to keep for 0 unless testing locally +lru_size = 0` + + return customAppTemplate, customAppConfig +} +``` + +Finally, modify `./cmd/{binaryNamePrefix}d/main.go` to include the new changes: + +```go +package main + +import ( + "os" + + "github.com/cosmos/cosmos-sdk/server" + svrcmd "github.com/cosmos/cosmos-sdk/server/cmd" + // remove-next-line + "github.com/ignite/cli/ignite/pkg/cosmoscmd" + + "{ModulePath}/app" + "{ModulePath}/cmd/{BinaryNamePrefix}d/cmd" +) + +func main() { + // highlight-start + rootCmd, _ := cmd.NewRootCmd() + if err := svrcmd.Execute(rootCmd, "", app.DefaultNodeHome); err != nil { + switch e := err.(type) { + case server.ErrorCode: + os.Exit(e.Code) + + default: + os.Exit(1) + } + } + // highlight-end +} +``` + +#### Migration in `testutil` package + +Modify `./testutil/network/network.go` to include the new changes: + + +```go +package network + +import ( + "fmt" + "testing" + "time" + + "github.com/cosmos/cosmos-sdk/baseapp" + "github.com/cosmos/cosmos-sdk/crypto/hd" + "github.com/cosmos/cosmos-sdk/crypto/keyring" + pruningtypes "github.com/cosmos/cosmos-sdk/pruning/types" + servertypes "github.com/cosmos/cosmos-sdk/server/types" + "github.com/cosmos/cosmos-sdk/simapp" + "github.com/cosmos/cosmos-sdk/testutil/network" + sdk "github.com/cosmos/cosmos-sdk/types" + authtypes "github.com/cosmos/cosmos-sdk/x/auth/types" + "github.com/stretchr/testify/require" + tmrand "github.com/tendermint/tendermint/libs/rand" + tmdb "github.com/tendermint/tm-db" + + // highlight-next-line + "{ModulePath}/app" + + // remove-next-line + "github.com/ignite/cli/ignite/pkg/cosmoscmd" +) + +// ... + +// DefaultConfig will initialize config for the network with custom application, +// genesis and single validator. All other parameters are inherited from cosmos-sdk/testutil/network.DefaultConfig +func DefaultConfig() network.Config { + // highlight-next-line + encoding := app.MakeEncodingConfig() + // remove-next-line + encoding := cosmoscmd.MakeEncodingConfig(app.ModuleBasics) + return network.Config{ + Codec: encoding.Marshaler, + TxConfig: encoding.TxConfig, + LegacyAmino: encoding.Amino, + InterfaceRegistry: encoding.InterfaceRegistry, + AccountRetriever: authtypes.AccountRetriever{}, + AppConstructor: func(val network.Validator) servertypes.Application { + return app.New( + val.Ctx.Logger, tmdb.NewMemDB(), nil, true, map[int64]bool{}, val.Ctx.Config.RootDir, 0, + encoding, + simapp.EmptyAppOptions{}, + baseapp.SetPruning(pruningtypes.NewPruningOptionsFromString(val.AppConfig.Pruning)), + baseapp.SetMinGasPrices(val.AppConfig.MinGasPrices), + ) + }, + GenesisState: app.ModuleBasics.DefaultGenesis(encoding.Marshaler), + TimeoutCommit: 2 * time.Second, + ChainID: "chain-" + tmrand.NewRand().Str(6), + NumValidators: 1, + BondDenom: sdk.DefaultBondDenom, + MinGasPrices: fmt.Sprintf("0.000006%s", sdk.DefaultBondDenom), + AccountTokens: sdk.TokensFromConsensusPower(1000, sdk.DefaultPowerReduction), + StakingTokens: sdk.TokensFromConsensusPower(500, sdk.DefaultPowerReduction), + BondedTokens: sdk.TokensFromConsensusPower(100, sdk.DefaultPowerReduction), + PruningStrategy: pruningtypes.PruningOptionNothing, + CleanupDir: true, + SigningAlgo: string(hd.Secp256k1Type), + KeyringOptions: []keyring.Option{}, + } +} +``` + + --- + +## Fix ICA controller keeper wiring + +Related issue: https://github.com/ignite/cli/issues/2867 + +Apply the following changes to `app/app.go` file : + +```go +package app + +import ( + + // highlight-start + icacontrollerkeeper "github.com/cosmos/ibc-go/v5/modules/apps/27-interchain-accounts/controller/keeper" + icacontrollertypes "github.com/cosmos/ibc-go/v5/modules/apps/27-interchain-accounts/controller/types" + // highlight-end + // ... +) + +// New returns a reference to an initialized blockchain app +func New( + logger log.Logger, + db dbm.DB, + traceStore io.Writer, + loadLatest bool, + skipUpgradeHeights map[int64]bool, + homePath string, + invCheckPeriod uint, + encodingConfig appparams.EncodingConfig, + appOpts servertypes.AppOptions, + baseAppOptions ...func(*baseapp.BaseApp), +) *App { + + // ... + + keys := sdk.NewKVStoreKeys( + authtypes.StoreKey, authz.ModuleName, banktypes.StoreKey, + stakingtypes.StoreKey, + minttypes.StoreKey, distrtypes.StoreKey, slashingtypes.StoreKey, + govtypes.StoreKey, + paramstypes.StoreKey, ibchost.StoreKey, upgradetypes.StoreKey, + feegrant.StoreKey, evidencetypes.StoreKey, + ibctransfertypes.StoreKey, icahosttypes.StoreKey, + capabilitytypes.StoreKey, group.StoreKey, + // highlight-next-line + icacontrollertypes.StoreKey, + yourchainmoduletypes.StoreKey, + // this line is used by starport scaffolding # stargate/app/storeKey + ) + + // ... + + // remove-next-line + icaModule := ica.NewAppModule(nil, &app.ICAHostKeeper) + // highlight-start + icaControllerKeeper := icacontrollerkeeper.NewKeeper( + appCodec, keys[icacontrollertypes.StoreKey], + app.GetSubspace(icacontrollertypes.SubModuleName), + app.IBCKeeper.ChannelKeeper, // may be replaced with middleware such as ics29 fee + app.IBCKeeper.ChannelKeeper, &app.IBCKeeper.PortKeeper, + scopedICAControllerKeeper, app.MsgServiceRouter(), + ) + icaModule := ica.NewAppModule(&icaControllerKeeper, &app.ICAHostKeeper) + // highlight-end + icaHostIBCModule := icahost.NewIBCModule(app.ICAHostKeeper) + + // ... +} + +// ... + +// initParamsKeeper init params keeper and its subspaces +func initParamsKeeper(appCodec codec.BinaryCodec, legacyAmino *codec.LegacyAmino, key, tkey storetypes.StoreKey) paramskeeper.Keeper { + paramsKeeper := paramskeeper.NewKeeper(appCodec, legacyAmino, key, tkey) + + paramsKeeper.Subspace(authtypes.ModuleName) + paramsKeeper.Subspace(banktypes.ModuleName) + paramsKeeper.Subspace(stakingtypes.ModuleName) + paramsKeeper.Subspace(minttypes.ModuleName) + paramsKeeper.Subspace(distrtypes.ModuleName) + paramsKeeper.Subspace(slashingtypes.ModuleName) + paramsKeeper.Subspace(govtypes.ModuleName).WithKeyTable(govv1.ParamKeyTable()) + paramsKeeper.Subspace(crisistypes.ModuleName) + paramsKeeper.Subspace(ibctransfertypes.ModuleName) + paramsKeeper.Subspace(ibchost.ModuleName) + // highlight-next-line + paramsKeeper.Subspace(icacontrollertypes.SubModuleName) + paramsKeeper.Subspace(icahosttypes.SubModuleName) + paramsKeeper.Subspace(mychainmoduletypes.ModuleName) + // this line is used by starport scaffolding # stargate/app/paramSubspace + + return paramsKeeper +} +``` + + --- + +## Fix capability keeper not sealed + +Related issue: https://github.com/ignite/cli/issues/1921 + +Apply the following change to `app/app.go` file : + +```go +package app + +// New returns a reference to an initialized blockchain app +func New( + logger log.Logger, + db dbm.DB, + traceStore io.Writer, + loadLatest bool, + skipUpgradeHeights map[int64]bool, + homePath string, + invCheckPeriod uint, + encodingConfig appparams.EncodingConfig, + appOpts servertypes.AppOptions, + baseAppOptions ...func(*baseapp.BaseApp), +) *App { + + // ... + + // this line is used by starport scaffolding # stargate/app/keeperDefinition + + // highlight-start + // Sealing prevents other modules from creating scoped sub-keepers + app.CapabilityKeeper.Seal() + // highlight-end + + // Create static IBC router, add transfer route, then set and seal it + + // ... +} +``` diff --git a/docs/versioned_docs/version-v28/06-migration/v0.25.1.md b/docs/versioned_docs/version-v28/06-migration/v0.25.1.md new file mode 100644 index 0000000..f3d1cc2 --- /dev/null +++ b/docs/versioned_docs/version-v28/06-migration/v0.25.1.md @@ -0,0 +1,67 @@ +--- +sidebar_position: 993 +title: v0.25.1 +description: For chains that were scaffolded with Ignite CLI versions lower than v0.25.1. changes are required to use Ignite CLI v0.25.1. +--- + +## Drabonberry fix + +`v0.25.1` contains the Dragonberry fix, update your `go.mod` as : + +```sh +require ( + // remove-next-line + github.com/ignite/cli v0.24.0 + // highlight-next-line + github.com/ignite/cli v0.25.1 +) + +// highlight-next-line +replace github.com/confio/ics23/go => github.com/cosmos/cosmos-sdk/ics23/go v0.8.0 +``` + +Then run: + +``` +$ go mod tidy +``` + +As a result, you should see `cosmos-sdk` and `ibc-go` upgraded as well. + +Finally, apply the following change to `app/app.go`: + +```go +package app + +// New returns a reference to an initialized blockchain app +func New( + logger log.Logger, + db dbm.DB, + traceStore io.Writer, + loadLatest bool, + skipUpgradeHeights map[int64]bool, + homePath string, + invCheckPeriod uint, + encodingConfig appparams.EncodingConfig, + appOpts servertypes.AppOptions, + baseAppOptions ...func(*baseapp.BaseApp), +) *App { + + // ... + + app.ICAHostKeeper = icahostkeeper.NewKeeper( + appCodec, keys[icahosttypes.StoreKey], + app.GetSubspace(icahosttypes.SubModuleName), + app.IBCKeeper.ChannelKeeper, + // highlight-next-line + app.IBCKeeper.ChannelKeeper, + &app.IBCKeeper.PortKeeper, + app.AccountKeeper, + scopedICAHostKeeper, + app.MsgServiceRouter(), + ) + + // ... + +} +``` diff --git a/docs/versioned_docs/version-v28/06-migration/v0.26.0.md b/docs/versioned_docs/version-v28/06-migration/v0.26.0.md new file mode 100644 index 0000000..96148ee --- /dev/null +++ b/docs/versioned_docs/version-v28/06-migration/v0.26.0.md @@ -0,0 +1,263 @@ +--- +sidebar_position: 992 +title: v0.26.0 +description: For chains that were scaffolded with Ignite CLI versions lower than v0.26.0. changes are required to use Ignite CLI v0.26.0. +--- + +Ignite CLI `v0.26.0` is fully compatible with chains that are compatible with `v0.25.1`. Please follow the existing +migration guides if your chain is not upgraded to `v0.25.1` support. + +## Go Version + +Chains that are newly scaffolded with Ignite CLI `v0.26.0` now require `go 1.19` in their `go.mod` files. It is +recommended that chains scaffolded with an older version of Ignite CLI also bump their required `go` version and update +their tooling to the latest version. + +## ibc-go v6 + +Chains that are newly scaffolded with Ignite CLI `v0.26.0` now use `ibc-go/v6` for ibc functionality. It is not +necessary, but recommended to upgrade to the newest version of `ibc-go`. Most migrations can be done by following the +`ibc-go` [migration guide](https://github.com/cosmos/ibc-go/blob/v6.2.0/docs/migrations/v5-to-v6.md), but there are some +specific changes that will need to be followed for Ignite scaffolded chains. + +### Removing `cosmosibckeeper` + +Ignite CLI `v0.26.0` has deprecated [pkg/cosmosibckeeper](https://github.com/ignite/cli/tree/v0.26.0/ignite/pkg/cosmosibckeeper). +This package contained interfaces for ibc-related keepers. Newly scaffolded chains now include the interface files in their +`./x/{moduleName}/types` directory in a new `expected_ibc_keeper.go` file. To migrate, create the following file for +each module: + +```go title="x/{moduleName}/types/expected_ibc_keeper.go" +package types + +import ( + sdk "github.com/cosmos/cosmos-sdk/types" + capabilitytypes "github.com/cosmos/cosmos-sdk/x/capability/types" + clienttypes "github.com/cosmos/ibc-go/v6/modules/core/02-client/types" + channeltypes "github.com/cosmos/ibc-go/v6/modules/core/04-channel/types" +) + +// ChannelKeeper defines the expected IBC channel keeper. +type ChannelKeeper interface { + GetChannel(ctx sdk.Context, portID, channelID string) (channeltypes.Channel, bool) + GetNextSequenceSend(ctx sdk.Context, portID, channelID string) (uint64, bool) + SendPacket( + ctx sdk.Context, + channelCap *capabilitytypes.Capability, + sourcePort string, + sourceChannel string, + timeoutHeight clienttypes.Height, + timeoutTimestamp uint64, + data []byte, + ) (uint64, error) + ChanCloseInit(ctx sdk.Context, portID, channelID string, chanCap *capabilitytypes.Capability) error +} + +// PortKeeper defines the expected IBC port keeper. +type PortKeeper interface { + BindPort(ctx sdk.Context, portID string) *capabilitytypes.Capability +} + +// ScopedKeeper defines the expected IBC scoped keeper. +type ScopedKeeper interface { + GetCapability(ctx sdk.Context, name string) (*capabilitytypes.Capability, bool) + AuthenticateCapability(ctx sdk.Context, cap *capabilitytypes.Capability, name string) bool + ClaimCapability(ctx sdk.Context, cap *capabilitytypes.Capability, name string) error +} +``` + +Next, make the following updates to each `x/{moduleName}/keeper/keeper.go` file for each ibc-enabled +module in your project: + +```go title="x/{moduleName}/keeper/keeper.go" +package keeper + +import ( + "fmt" + + // remove-start + "blogibc/x/testibc/types" + "github.com/cosmos/cosmos-sdk/codec" + storetypes "github.com/cosmos/cosmos-sdk/store/types" + sdk "github.com/cosmos/cosmos-sdk/types" + paramtypes "github.com/cosmos/cosmos-sdk/x/params/types" + "github.com/ignite/cli/ignite/pkg/cosmosibckeeper" + "github.com/tendermint/tendermint/libs/log" + // remove-end + // highlight-start + "github.com/cosmos/cosmos-sdk/codec" + storetypes "github.com/cosmos/cosmos-sdk/store/types" + sdk "github.com/cosmos/cosmos-sdk/types" + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + capabilitytypes "github.com/cosmos/cosmos-sdk/x/capability/types" + paramtypes "github.com/cosmos/cosmos-sdk/x/params/types" + channeltypes "github.com/cosmos/ibc-go/v6/modules/core/04-channel/types" + host "github.com/cosmos/ibc-go/v6/modules/core/24-host" + "github.com/cosmos/ibc-go/v6/modules/core/exported" + "github.com/tendermint/tendermint/libs/log" + + "{appName}/x/{moduleName}/types" + // highlight-end +) + +type ( + Keeper struct { + // remove-line-next + *cosmosibckeeper.Keeper + cdc codec.BinaryCodec + storeKey storetypes.StoreKey + memKey storetypes.StoreKey + paramstore paramtypes.Subspace + + // highlight-start + channelKeeper types.ChannelKeeper + portKeeper types.PortKeeper + scopedKeeper exported.ScopedKeeper + // highlight-end + } +) + +func NewKeeper( + cdc codec.BinaryCodec, + storeKey, + memKey storetypes.StoreKey, + ps paramtypes.Subspace, + // highlight-start + channelKeeper types.ChannelKeeper, + portKeeper types.PortKeeper, + scopedKeeper types.ScopedKeeper, + // highlight-end +) *Keeper { + // set KeyTable if it has not already been set + if !ps.HasKeyTable() { + ps = ps.WithKeyTable(types.ParamKeyTable()) + } + + return &Keeper{ + // remove-start + Keeper: cosmosibckeeper.NewKeeper( + types.PortKey, + storeKey, + channelKeeper, + portKeeper, + scopedKeeper, + ), + // remove-end + cdc: cdc, + storeKey: storeKey, + memKey: memKey, + paramstore: ps, + // highlight-start + channelKeeper: channelKeeper, + portKeeper: portKeeper, + scopedKeeper: scopedKeeper, + // highlight-end + } +} + +// highlight-start +// ---------------------------------------------------------------------------- +// IBC Keeper Logic +// ---------------------------------------------------------------------------- + +// ChanCloseInit defines a wrapper function for the channel Keeper's function. +func (k Keeper) ChanCloseInit(ctx sdk.Context, portID, channelID string) error { + capName := host.ChannelCapabilityPath(portID, channelID) + chanCap, ok := k.scopedKeeper.GetCapability(ctx, capName) + if !ok { + return errorsmod.Wrapf(channeltypes.ErrChannelCapabilityNotFound, "could not retrieve channel capability at: %s", capName) + } + return k.channelKeeper.ChanCloseInit(ctx, portID, channelID, chanCap) +} + +// IsBound checks if the IBC app module is already bound to the desired port +func (k Keeper) IsBound(ctx sdk.Context, portID string) bool { + _, ok := k.scopedKeeper.GetCapability(ctx, host.PortPath(portID)) + return ok +} + +// BindPort defines a wrapper function for the port Keeper's function in +// order to expose it to module's InitGenesis function +func (k Keeper) BindPort(ctx sdk.Context, portID string) error { + cap := k.portKeeper.BindPort(ctx, portID) + return k.ClaimCapability(ctx, cap, host.PortPath(portID)) +} + +// GetPort returns the portID for the IBC app module. Used in ExportGenesis +func (k Keeper) GetPort(ctx sdk.Context) string { + store := ctx.KVStore(k.storeKey) + return string(store.Get(types.PortKey)) +} + +// SetPort sets the portID for the IBC app module. Used in InitGenesis +func (k Keeper) SetPort(ctx sdk.Context, portID string) { + store := ctx.KVStore(k.storeKey) + store.Set(types.PortKey, []byte(portID)) +} + +// AuthenticateCapability wraps the scopedKeeper's AuthenticateCapability function +func (k Keeper) AuthenticateCapability(ctx sdk.Context, cap *capabilitytypes.Capability, name string) bool { + return k.scopedKeeper.AuthenticateCapability(ctx, cap, name) +} + +// ClaimCapability allows the IBC app module to claim a capability that core IBC +// passes to it +func (k Keeper) ClaimCapability(ctx sdk.Context, cap *capabilitytypes.Capability, name string) error { + return k.scopedKeeper.ClaimCapability(ctx, cap, name) +} + +//highlight-end + +func (k Keeper) Logger(ctx sdk.Context) log.Logger { + return ctx.Logger().With("module", fmt.Sprintf("x/%s", types.ModuleName)) +} +``` + +### Remaining migration + +After all uses of `cosmosibckeeper` have been removed, you can follow any remaining steps in the`ibc-go`[migration guide](https://github.com/cosmos/ibc-go/blob/v6.2.0/docs/migrations/v5-to-v6.md). + +## Scaffolded Release Workflow + +The develop branch of the CLI has been deprecated. To continue using the release workflow that uses the CLI to +automatically build and release your chain's binaries, replace develop with main in the following lines: + +```yaml title=".github/workflows/release.yml" +... + +jobs: + might_release: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v2 + with: + fetch-depth: 0 + - name: Prepare Release Variables + id: vars + // highlight-next-line + uses: ignite/cli/actions/release/vars@main + - name: Issue Release Assets + // highlight-next-line + uses: ignite/cli/actions/cli@main + if: ${{ steps.vars.outputs.should_release == 'true' }} + with: + args: chain build --release --release.prefix ${{ steps.vars.outputs.tarball_prefix }} -t linux:amd64 -t darwin:amd64 -t darwin:arm64 + - name: Delete the "latest" Release + uses: dev-drprasad/delete-tag-and-release@v0.2.0 + if: ${{ steps.vars.outputs.is_release_type_latest == 'true' }} + with: + tag_name: ${{ steps.vars.outputs.tag_name }} + delete_release: true + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Publish the Release + uses: softprops/action-gh-release@v1 + if: ${{ steps.vars.outputs.should_release == 'true' }} + with: + tag_name: ${{ steps.vars.outputs.tag_name }} + files: release/* + prerelease: true + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} +``` diff --git a/docs/versioned_docs/version-v28/06-migration/v0.27.1.md b/docs/versioned_docs/version-v28/06-migration/v0.27.1.md new file mode 100644 index 0000000..5d6d1ea --- /dev/null +++ b/docs/versioned_docs/version-v28/06-migration/v0.27.1.md @@ -0,0 +1,1208 @@ +--- +sidebar_position: 991 +title: v0.27.1 +description: For chains that were scaffolded with Ignite CLI versions lower than v0.27.0. changes are required to use Ignite CLI v0.27.1. +--- + +## Cosmos SDK v0.47.3 upgrade notes + +### Imports + +To use the new cosmos SDK make sure you update `go.mod` dependencies: + +```text title="go.mod" +go 1.20 + +require ( + // remove-start + github.com/cosmos/cosmos-sdk v0.46.7 + github.com/tendermint/tendermint v0.34.24 + github.com/tendermint/tm-db v0.6.7 + github.com/cosmos/ibc-go/v7 v7.1.0 + github.com/gogo/protobuf v1.3.3 + github.com/regen-network/cosmos-proto v0.3.1 + // remove-end + // highlight-start + cosmossdk.io/api v0.3.1 + github.com/cosmos/cosmos-sdk v0.47.3 + github.com/cometbft/cometbft v0.37.1 + github.com/cometbft/cometbft-db v0.7.0 + github.com/cosmos/ibc-go/v6 v6.1.0 + github.com/cosmos/gogoproto v1.4.7 + // highlight-end + + // ... +) + +replace ( + // remove-start + github.com/confio/ics23/go => github.com/cosmos/cosmos-sdk/ics23/go v0.8.0 + github.com/gogo/protobuf => github.com/regen-network/protobuf v1.3.3-alpha.regen.1 + // remove-end + // highlight-next-line + github.com/syndtr/goleveldb => github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7 +) +``` + +The Cosmos SDK has migrated to CometBFT as its default consensus engine which requires +changes in your app imports: + +1. Replace `github.com/tendermint/tendermint` by `github.com/cometbft/cometbft` +2. Replace `github.com/tendermint/tm-db` by `github.com/cometbft/cometbft-db` +3. Verify `github.com/tendermint/tendermint` is not an indirect or direct dependency + +The SDK has also migrated from `gogo/protobuf` to `cosmos/gogoproto`. This means you must +replace all `github.com/gogo/protobuf` imports with `github.com/cosmos/gogoproto`. This change +might introduce breaking changes to your proto layout. Follow the official +[Cosmos migration guide](https://docs.cosmos.network/main/migrations/upgrading#gogoproto-import-paths) +to make sure you are using the correct layout. + +You might need to replace the following imports: + +1. Replace `github.com/cosmos/cosmos-sdk/simapp` by `cosmossdk.io/simapp` + +### App changes + +Applications scaffolded with older version of Ignite CLI would require the following changes +to some of the app files: + +```text title="app/app.go" +import ( + //... + + // remove-next-line + tmjson "github.com/tendermint/tendermint/libs/json" + // highlight-next-line + "encoding/json" + + // highlight-start + autocliv1 "cosmossdk.io/api/cosmos/autocli/v1" + reflectionv1 "cosmossdk.io/api/cosmos/reflection/v1" + "github.com/cosmos/cosmos-sdk/runtime" + runtimeservices "github.com/cosmos/cosmos-sdk/runtime/services" + "github.com/cosmos/cosmos-sdk/x/consensus" + consensusparamkeeper "github.com/cosmos/cosmos-sdk/x/consensus/keeper" + consensusparamtypes "github.com/cosmos/cosmos-sdk/x/consensus/types" + // highlight-end +) + +func getGovProposalHandlers() []govclient.ProposalHandler { + // ... + govProposalHandlers = append(govProposalHandlers, + paramsclient.ProposalHandler, + // remove-next-line + distrclient.ProposalHandler, + upgradeclient.LegacyProposalHandler, + // ... + ) + + return govProposalHandlers +} + +var ( + // ... + + ModuleBasics = module.NewBasicManager( + auth.AppModuleBasic{}, + authzmodule.AppModuleBasic{}, + // remove-next-line + genutil.AppModuleBasic{}, + // highlight-next-line + genutil.NewAppModuleBasic(genutiltypes.DefaultMessageValidator), + bank.AppModuleBasic{}, + // ... + vesting.AppModuleBasic{}, + // highlight-next-line + consensus.AppModuleBasic{}, + //... + ) +) + +var ( + // highlight-next-line + _ runtime.AppI = (*App)(nil) + _ servertypes.Application = (*App)(nil) + // remove-next-line + _ simapp.App = (*App)(nil) +) + +type App struct { + *baseapp.BaseApp + + cdc *codec.LegacyAmino + appCodec codec.Codec + interfaceRegistry types.InterfaceRegistry + // highlight-next-line + txConfig client.TxConfig + + invCheckPeriod uint + + // ... + // remove-start + StakingKeeper stakingkeeper.Keeper + CrisisKeeper crisiskeeper.Keeper + UpgradeKeeper upgradekeeper.Keeper + // remove-end + // highlight-start + StakingKeeper *stakingkeeper.Keeper + CrisisKeeper *crisiskeeper.Keeper + UpgradeKeeper *upgradekeeper.Keeper + // highlight-end + // ... + FeeGrantKeeper feegrantkeeper.Keeper + GroupKeeper groupkeeper.Keeper + // highlight-next-line + ConsensusParamsKeeper consensusparamkeeper.Keeper + + // ... +} + +func New( + logger log.Logger, + db dbm.DB, + traceStore io.Writer, + loadLatest bool, + skipUpgradeHeights map[int64]bool, + homePath string, + invCheckPeriod uint, + encodingConfig appparams.EncodingConfig, + appOpts servertypes.AppOptions, + baseAppOptions ...func(*baseapp.BaseApp), +) *App { + appCodec := encodingConfig.Marshaler + cdc := encodingConfig.Amino + interfaceRegistry := encodingConfig.InterfaceRegistry + // highlight-next-line + txConfig := encodingConfig.TxConfig + + // ... + + bApp.SetCommitMultiStoreTracer(traceStore) + bApp.SetVersion(version.Version) + bApp.SetInterfaceRegistry(interfaceRegistry) + // highlight-next-line + bApp.SetTxEncoder(txConfig.TxEncoder()) + + keys := sdk.NewKVStoreKeys( + // ... + banktypes.StoreKey, + stakingtypes.StoreKey, + // highlight-next-line + crisistypes.StoreKey, + // ... + group.StoreKey, + icacontrollertypes.StoreKey, + // highlight-next-line + consensusparamtypes.StoreKey, + // ... + ) + + // ... + + app := &App{ + // ... + interfaceRegistry: interfaceRegistry, + // highlight-next-line + txConfig: txConfig, + invCheckPeriod: invCheckPeriod, + // ... + } + + // ... + + // set the BaseApp's parameter store + // remove-next-line + bApp.SetParamStore(app.ParamsKeeper.Subspace(baseapp.Paramspace).WithKeyTable(paramstypes.ConsensusParamsKeyTable())) + // highlight-start + app.ConsensusParamsKeeper = consensusparamkeeper.NewKeeper(appCodec, keys[upgradetypes.StoreKey], authtypes.NewModuleAddress(govtypes.ModuleName).String()) + bApp.SetParamStore(&app.ConsensusParamsKeeper) + // highlight-end + + // ... + + app.AccountKeeper = authkeeper.NewAccountKeeper( + appCodec, + keys[authtypes.StoreKey], + // remove-next-line + app.GetSubspace(authtypes.ModuleName), + authtypes.ProtoBaseAccount, + maccPerms, + sdk.Bech32PrefixAccAddr, + // highlight-next-line + authtypes.NewModuleAddress(govtypes.ModuleName).String(), + ) + + app.BankKeeper = bankkeeper.NewBaseKeeper( + appCodec, + keys[banktypes.StoreKey], + app.AccountKeeper, + // remove-next-line + app.GetSubspace(banktypes.ModuleName), + app.BlockedModuleAccountAddrs(), + // highlight-next-line + authtypes.NewModuleAddress(govtypes.ModuleName).String(), + ) + + app.StakingKeeper = stakingkeeper.NewKeeper( + appCodec, + keys[stakingtypes.StoreKey], + app.AccountKeeper, + app.BankKeeper, + // remove-next-line + app.GetSubspace(stakingtypes.ModuleName), + // highlight-next-line + authtypes.NewModuleAddress(govtypes.ModuleName).String(), + ) + + app.MintKeeper = mintkeeper.NewKeeper( + appCodec, + keys[minttypes.StoreKey], + // remove-next-line + app.GetSubspace(minttypes.ModuleName), + // remove-next-line + &app.StakingKeeper, + // highlight-next-line + app.StakingKeeper, + app.AccountKeeper, + app.BankKeeper, + authtypes.FeeCollectorName, + // highlight-next-line + authtypes.NewModuleAddress(govtypes.ModuleName).String(), + ) + + app.DistrKeeper = distrkeeper.NewKeeper( + appCodec, + keys[distrtypes.StoreKey], + // remove-next-line + app.GetSubspace(distrtypes.ModuleName), + app.AccountKeeper, + app.BankKeeper, + // remove-next-line + &app.StakingKeeper, + // highlight-next-line + app.StakingKeeper, + authtypes.FeeCollectorName, + // highlight-next-line + authtypes.NewModuleAddress(govtypes.ModuleName).String(), + ) + + app.SlashingKeeper = slashingkeeper.NewKeeper( + appCodec, + // highlight-next-line + cdc, + keys[slashingtypes.StoreKey], + // remove-next-line + &app.StakingKeeper, + // highlight-next-line + app.StakingKeeper, + // remove-next-line + app.GetSubspace(slashingtypes.ModuleName), + // highlight-next-line + authtypes.NewModuleAddress(govtypes.ModuleName).String(), + ) + + app.CrisisKeeper = crisiskeeper.NewKeeper( + // remove-next-line + app.GetSubspace(crisistypes.ModuleName), + // highlight-start + appCodec, + keys[crisistypes.StoreKey], + // highlight-end + invCheckPeriod, + app.BankKeeper, + authtypes.FeeCollectorName, + // highlight-next-line + authtypes.NewModuleAddress(govtypes.ModuleName).String(), + ) + + // ... + + // Create evidence Keeper for to register the IBC light client misbehaviour evidence route + evidenceKeeper := evidencekeeper.NewKeeper( + appCodec, + keys[evidencetypes.StoreKey], + // remove-next-line + &app.StakingKeeper, + // highlight-next-line + app.StakingKeeper, + app.SlashingKeeper, + ) + // If evidence needs to be handled for the app, set routes in router here and seal + app.EvidenceKeeper = *evidenceKeeper + + // highlight-start + govConfig := govtypes.DefaultConfig() + govKeeper := govkeeper.NewKeeper( + appCodec, + keys[govtypes.StoreKey], + app.AccountKeeper, + app.BankKeeper, + app.StakingKeeper, + app.MsgServiceRouter(), + govConfig, + authtypes.NewModuleAddress(govtypes.ModuleName).String(), + ) + // highlight-end + + govRouter := govv1beta1.NewRouter() + govRouter. + AddRoute(govtypes.RouterKey, govv1beta1.ProposalHandler). + AddRoute(paramproposal.RouterKey, params.NewParamChangeProposalHandler(app.ParamsKeeper)). + // remove-next-line + AddRoute(distrtypes.RouterKey, distr.NewCommunityPoolSpendProposalHandler(app.DistrKeeper)). + AddRoute(upgradetypes.RouterKey, upgrade.NewSoftwareUpgradeProposalHandler(app.UpgradeKeeper)). + AddRoute(ibcclienttypes.RouterKey, ibcclient.NewClientProposalHandler(app.IBCKeeper.ClientKeeper)) + // highlight-next-line + govKeeper.SetLegacyRouter(govRouter) + + // remove-start + govConfig := govtypes.DefaultConfig() + app.GovKeeper = govkeeper.NewKeeper( + appCodec, + keys[govtypes.StoreKey], + app.GetSubspace(govtypes.ModuleName), + app.AccountKeeper, + app.BankKeeper, + &app.StakingKeeper, + govRouter, + app.MsgServiceRouter(), + govConfig, + ) + // remove-end + // highlight-start + app.GovKeeper = *govKeeper.SetHooks( + govtypes.NewMultiGovHooks( + // register the governance hooks + ), + ) + // highlight-end + + // ... + + // remove-start + app.GovKeeper.SetHooks( + govtypes.NewMultiGovHooks( + // insert governance hooks receivers here + ), + ) + // remove-end + + // ... + + app.mm = module.NewManager( + genutil.NewAppModule( + app.AccountKeeper, + app.StakingKeeper, + app.BaseApp.DeliverTx, + encodingConfig.TxConfig, + ), + // remove-next-line + auth.NewAppModule(appCodec, app.AccountKeeper, nil), + // highlight-next-line + auth.NewAppModule(appCodec, app.AccountKeeper, authsims.RandomGenesisAccounts, app.GetSubspace(authtypes.ModuleName)), + authzmodule.NewAppModule(appCodec, app.AuthzKeeper, app.AccountKeeper, app.BankKeeper, app.interfaceRegistry), + vesting.NewAppModule(app.AccountKeeper, app.BankKeeper), + // remove-start + bank.NewAppModule(appCodec, app.BankKeeper, app.AccountKeeper), + capability.NewAppModule(appCodec, *app.CapabilityKeeper), + // remove-end + // highlight-start + bank.NewAppModule(appCodec, app.BankKeeper, app.AccountKeeper, app.GetSubspace(banktypes.ModuleName)), + capability.NewAppModule(appCodec, *app.CapabilityKeeper, false), + // highlight-end + feegrantmodule.NewAppModule(appCodec, app.AccountKeeper, app.BankKeeper, app.FeeGrantKeeper, app.interfaceRegistry), + groupmodule.NewAppModule(appCodec, app.GroupKeeper, app.AccountKeeper, app.BankKeeper, app.interfaceRegistry), + // remove-start + crisis.NewAppModule(&app.CrisisKeeper, skipGenesisInvariants), + gov.NewAppModule(appCodec, app.GovKeeper, app.AccountKeeper, app.BankKeeper), + mint.NewAppModule(appCodec, app.MintKeeper, app.AccountKeeper, minttypes.DefaultInflationCalculationFn), + slashing.NewAppModule(appCodec, app.SlashingKeeper, app.AccountKeeper, app.BankKeeper, app.StakingKeeper), + distr.NewAppModule(appCodec, app.DistrKeeper, app.AccountKeeper, app.BankKeeper, app.StakingKeeper), + staking.NewAppModule(appCodec, app.StakingKeeper, app.AccountKeeper, app.BankKeeper), + // remove-end + // highlight-start + crisis.NewAppModule(app.CrisisKeeper, skipGenesisInvariants, app.GetSubspace(crisistypes.ModuleName)), + gov.NewAppModule(appCodec, &app.GovKeeper, app.AccountKeeper, app.BankKeeper, app.GetSubspace(govtypes.ModuleName)), + mint.NewAppModule(appCodec, app.MintKeeper, app.AccountKeeper, nil, app.GetSubspace(minttypes.ModuleName)), + slashing.NewAppModule(appCodec, app.SlashingKeeper, app.AccountKeeper, app.BankKeeper, app.StakingKeeper, app.GetSubspace(slashingtypes.ModuleName)), + distr.NewAppModule(appCodec, app.DistrKeeper, app.AccountKeeper, app.BankKeeper, app.StakingKeeper, app.GetSubspace(distrtypes.ModuleName)), + staking.NewAppModule(appCodec, app.StakingKeeper, app.AccountKeeper, app.BankKeeper, app.GetSubspace(stakingtypes.ModuleName)), + // highlight-end + upgrade.NewAppModule(app.UpgradeKeeper), + evidence.NewAppModule(app.EvidenceKeeper), + // highlight-next-line + consensus.NewAppModule(appCodec, app.ConsensusParamsKeeper), + ibc.NewAppModule(app.IBCKeeper), + params.NewAppModule(app.ParamsKeeper), + transferModule, + icaModule, + // this line is used by starport scaffolding # stargate/app/appModule + + ) + + app.mm.SetOrderBeginBlockers( + // ... + paramstypes.ModuleName, + vestingtypes.ModuleName, + // highlight-next-line + consensusparamtypes.ModuleName, + // ... + ) + + app.mm.SetOrderEndBlockers( + // ... + paramstypes.ModuleName, + upgradetypes.ModuleName, + vestingtypes.ModuleName, + // highlight-next-line + consensusparamtypes.ModuleName, + // ... + ) + + // remove-next-line + app.mm.SetOrderInitGenesis( + // highlight-next-line + genesisModuleOrder := []string{ + // ... + paramstypes.ModuleName, + upgradetypes.ModuleName, + vestingtypes.ModuleName, + // highlight-next-line + consensusparamtypes.ModuleName, + // ... + // remove-next-line + ) + // highlight-start + } + app.mm.SetOrderInitGenesis(genesisModuleOrder...) + app.mm.SetOrderExportGenesis(genesisModuleOrder...) + // highlight-end + + // remove-start + app.mm.RegisterInvariants(&app.CrisisKeeper) + app.mm.RegisterRoutes(app.Router(), app.QueryRouter(), encodingConfig.Amino) + // remove-end + // highlight-next-line + app.mm.RegisterInvariants(app.CrisisKeeper) + + app.configurator = module.NewConfigurator(app.appCodec, app.MsgServiceRouter(), app.GRPCQueryRouter()) + app.mm.RegisterServices(app.configurator) + + // highlight-start + autocliv1.RegisterQueryServer(app.GRPCQueryRouter(), runtimeservices.NewAutoCLIQueryService(app.mm.Modules)) + reflectionSvc, err := runtimeservices.NewReflectionService() + if err != nil { + panic(err) + } + reflectionv1.RegisterReflectionServiceServer(app.GRPCQueryRouter(), reflectionSvc) + // highlight-end + + // create the simulation manager and define the order of the modules for deterministic simulations + // remove-start + app.sm = module.NewSimulationManager( + // ... + ) + // remove-end + // highlight-start + overrideModules := map[string]module.AppModuleSimulation{ + authtypes.ModuleName: auth.NewAppModule(app.appCodec, app.AccountKeeper, authsims.RandomGenesisAccounts, app.GetSubspace(authtypes.ModuleName)), + } + app.sm = module.NewSimulationManagerFromAppModules(app.mm.Modules, overrideModules) + // highlight-end + app.sm.RegisterStoreDecoders() + + // ... + + // remove-start + app.SetInitChainer(app.InitChainer) + app.SetBeginBlocker(app.BeginBlocker) + // remove-end + + // ... +} + +func (app *App) InitChainer(ctx sdk.Context, req abci.RequestInitChain) abci.ResponseInitChain { + var genesisState GenesisState + // remove-next-line + if err := tmjson.Unmarshal(req.AppStateBytes, &genesisState); err != nil { + // highlight-next-line + if err := json.Unmarshal(req.AppStateBytes, &genesisState); err != nil { + panic(err) + } + // ... +} + +// remove-start +// GetMaccPerms returns a copy of the module account permissions +func GetMaccPerms() map[string][]string { + dupMaccPerms := make(map[string][]string) + for k, v := range maccPerms { + dupMaccPerms[k] = v + } + return dupMaccPerms +} +// remove-end + +// highlight-start +// TxConfig returns App's TxConfig. +func (app *App) TxConfig() client.TxConfig { + return app.txConfig +} + +// Configurator get app configurator +func (app *App) Configurator() module.Configurator { + return app.configurator +} + +// ModuleManager returns the app ModuleManager +func (app *App) ModuleManager() *module.Manager { + return app.mm +} +// highlight-end +``` + +```text title="app/simulation_test.go" +import ( + // ... + // remove-start + "cosmossdk.io/simapp" + tmtypes "github.com/tendermint/tendermint/types" + // remove-end + // highlight-start + "encoding/json" + "fmt" + "math/rand" + "runtime/debug" + "strings" + + dbm "github.com/cometbft/cometbft-db" + "github.com/cometbft/cometbft/libs/log" + "github.com/cosmos/cosmos-sdk/baseapp" + "github.com/cosmos/cosmos-sdk/client/flags" + "github.com/cosmos/cosmos-sdk/server" + storetypes "github.com/cosmos/cosmos-sdk/store/types" + simtestutil "github.com/cosmos/cosmos-sdk/testutil/sims" + sdk "github.com/cosmos/cosmos-sdk/types" + authtypes "github.com/cosmos/cosmos-sdk/x/auth/types" + authzkeeper "github.com/cosmos/cosmos-sdk/x/authz/keeper" + banktypes "github.com/cosmos/cosmos-sdk/x/bank/types" + capabilitytypes "github.com/cosmos/cosmos-sdk/x/capability/types" + distrtypes "github.com/cosmos/cosmos-sdk/x/distribution/types" + evidencetypes "github.com/cosmos/cosmos-sdk/x/evidence/types" + govtypes "github.com/cosmos/cosmos-sdk/x/gov/types" + minttypes "github.com/cosmos/cosmos-sdk/x/mint/types" + paramstypes "github.com/cosmos/cosmos-sdk/x/params/types" + simcli "github.com/cosmos/cosmos-sdk/x/simulation/client/cli" + slashingtypes "github.com/cosmos/cosmos-sdk/x/slashing/types" + stakingtypes "github.com/cosmos/cosmos-sdk/x/staking/types" + // highlight-end +) + +// highlight-start +type storeKeysPrefixes struct { + A storetypes.StoreKey + B storetypes.StoreKey + Prefixes [][]byte +} +// highlight-end + +// Get flags every time the simulator is run +func init() { + // remove-next-line + simapp.GetSimulatorFlags() + // highlight-next-line + simcli.GetSimulatorFlags() +} + +// remove-start +var defaultConsensusParams = &abci.ConsensusParams{ + Block: &abci.BlockParams{ + MaxBytes: 200000, + MaxGas: 2000000, + }, + Evidence: &tmproto.EvidenceParams{ + MaxAgeNumBlocks: 302400, + MaxAgeDuration: 504 * time.Hour, // 3 weeks is the max duration + MaxBytes: 10000, + }, + Validator: &tmproto.ValidatorParams{ + PubKeyTypes: []string{ + tmtypes.ABCIPubKeyTypeEd25519, + }, + }, +} +// remove-end +// highlight-start +func fauxMerkleModeOpt(bapp *baseapp.BaseApp) { + bapp.SetFauxMerkleMode() +} +// highlight-end + +func BenchmarkSimulation(b *testing.B) { + // remove-start + simapp.FlagEnabledValue = true + simapp.FlagCommitValue = true + + config, db, dir, logger, _, err := simapp.SetupSimulation("goleveldb-app-sim", "Simulation") + // remove-end + // highlight-start + simcli.FlagSeedValue = time.Now().Unix() + simcli.FlagVerboseValue = true + simcli.FlagCommitValue = true + simcli.FlagEnabledValue = true + + config := simcli.NewConfigFromFlags() + config.ChainID = "mars-simapp" + db, dir, logger, _, err := simtestutil.SetupSimulation( + config, + "leveldb-bApp-sim", + "Simulation", + simcli.FlagVerboseValue, + simcli.FlagEnabledValue, + ) + // highlight-end + + require.NoError(b, err, "simulation setup failed") + + b.Cleanup(func() { + // remove-start + db.Close() + err = os.RemoveAll(dir) + require.NoError(b, err) + // remove-end + // highlight-start + require.NoError(b, db.Close()) + require.NoError(b, os.RemoveAll(dir)) + // highlight-end + }) + + + // remove-next-line + encoding := app.MakeEncodingConfig() + // highlight-start + appOptions := make(simtestutil.AppOptionsMap, 0) + appOptions[flags.FlagHome] = app.DefaultNodeHome + appOptions[server.FlagInvCheckPeriod] = simcli.FlagPeriodValue + // highlight-end + + // remove-next-line + app := app.New( + // highlight-next-line + bApp := app.New( + logger, + db, + nil, + true, + map[int64]bool{}, + app.DefaultNodeHome, + 0, + // remove-start + encoding, + simapp.EmptyAppOptions{}, + // remove-end + // highlight-start + app.MakeEncodingConfig(), + appOptions, + baseapp.SetChainID(config.ChainID), + // highlight-end + ) + // highlight-next-line + require.Equal(b, app.Name, bApp.Name()) + + _, simParams, simErr := simulation.SimulateFromSeed( + b, + os.Stdout, + // remove-start + app.BaseApp, + simapp.AppStateFn(app.AppCodec(), app.SimulationManager()), + simulationtypes.RandomAccounts, + simapp.SimulationOperations(app, app.AppCodec(), config), + app.ModuleAccountAddrs(), + config, + app.AppCodec(), + // remove-end + // highlight-start + bApp.BaseApp, + simtestutil.AppStateFn( + bApp.AppCodec(), + bApp.SimulationManager(), + app.NewDefaultGenesisState(bApp.AppCodec()), + ), + simulationtypes.RandomAccounts, + simtestutil.SimulationOperations(bApp, bApp.AppCodec(), config), + bApp.ModuleAccountAddrs(), + config, + bApp.AppCodec(), + // highlight-end + ) + + // remove-next-line + err = simapp.CheckExportSimulation(app, config, simParams) + // highlight-next-line + err = simtestutil.CheckExportSimulation(bApp, config, simParams) + require.NoError(b, err) + require.NoError(b, simErr) + + if config.Commit { + // remove-next-line + simapp.PrintStats(db) + // highlight-next-line + simtestutil.PrintStats(db) + } +} +``` + +```text title="x/{{moduleName}}/module_simulation.go" +import ( + // ... + // remove-next-line + simappparams "cosmossdk.io/simapp/params" +) + +var ( + // ... + // remove-next-line + _ = simappparams.StakePerAccount + // highlight-next-line + _ = rand.Rand{} +) + +// remove-start +func (am AppModule) RandomizedParams(_ *rand.Rand) []simtypes.ParamChange { + // ... +} +// remove-end +// highlight-start +// ProposalMsgs returns msgs used for governance proposals for simulations. +func (am AppModule) ProposalMsgs(simState module.SimulationState) []simtypes.WeightedProposalMsg { + return []simtypes.WeightedProposalMsg{ + // this line is used by starport scaffolding # simapp/module/OpMsg + } +} +// highlight-end +``` + +### Deprecations + +The app module might contains some legacy methods that are deprecated and can be removed: + +```text title="x/{{moduleName}}/module.go" +// remove-start +// Deprecated: use RegisterServices +func (am AppModule) Route() sdk.Route { return sdk.Route{} } + +// Deprecated: use RegisterServices +func (AppModule) QuerierRoute() string { return types.RouterKey } + +// Deprecated: use RegisterServices +func (am AppModule) LegacyQuerierHandler(_ *codec.LegacyAmino) sdk.Querier { + return nil +} +// remove-end +``` + +### Other required changes + +Changes required to the network test util: + +```text title="testutil/network/network.go" +import ( + // ... + + // remove-start + "github.com/cosmos/cosmos-sdk/simapp" + pruningtypes "github.com/cosmos/cosmos-sdk/pruning/types" + // remove-end + // highlight-start + pruningtypes "github.com/cosmos/cosmos-sdk/store/pruning/types" + simtestutil "github.com/cosmos/cosmos-sdk/testutil/sims" + // highlight-end +) + +func New(t *testing.T, configs ...Config) *Network { + // ... + + net, err := network.New(t, t.TempDir(), cfg) + require.NoError(t, err) + // highlight-start + _, err = net.WaitForHeight(1) + require.NoError(t, err) + // highlight-end + + // ... +} + +func DefaultConfig() network.Config { + // remove-next-line + encoding := app.MakeEncodingConfig() + // highlight-start + var ( + encoding = app.MakeEncodingConfig() + chainID = "chain-" + tmrand.NewRand().Str(6) + ) + // highlight-end + + return network.Config{ + // ... + // remove-next-line + AppConstructor: func(val network.Validator) servertypes.Application { + // highlight-next-line + AppConstructor: func(val network.ValidatorI) servertypes.Application { + return app.New( + // remove-next-line + val.Ctx.Logger, + // highlight-next-line + val.GetCtx().Logger, + tmdb.NewMemDB(), + nil, + true, + map[int64]bool{}, + // remove-next-line + val.Ctx.Config.RootDir, + // highlight-next-line + val.GetCtx().Config.RootDir, + 0, + encoding, + // remove-start + simapp.EmptyAppOptions{}, + baseapp.SetPruning(pruningtypes.NewPruningOptionsFromString(val.AppConfig.Pruning)), + baseapp.SetMinGasPrices(val.AppConfig.MinGasPrices), + // remove-end + // highlight-start + simtestutil.EmptyAppOptions{}, + baseapp.SetPruning(pruningtypes.NewPruningOptionsFromString(val.GetAppConfig().Pruning)), + baseapp.SetMinGasPrices(val.GetAppConfig().MinGasPrices), + baseapp.SetChainID(chainID), + // highlight-end + ) + }, + // ... + // remove-next-line + ChainID: "chain-" + tmrand.NewRand().Str(6), + // highlight-next-line + ChainID: chainID, + // ... + } +} +``` + +Update the collect genesis transactions command and add the new message validator argument: + +```text title="cmd/{{binaryNamePrefix}}d/cmd/root.go" +import ( + // ... + + // highlight-start + tmtypes "github.com/cometbft/cometbft/types" + "github.com/cosmos/cosmos-sdk/x/genutil" + genutiltypes "github.com/cosmos/cosmos-sdk/x/genutil/types" + // highlight-end +) + +func initRootCmd(rootCmd *cobra.Command, encodingConfig params.EncodingConfig) { + // ... + + // highlight-next-line + gentxModule := app.ModuleBasics[genutiltypes.ModuleName].(genutil.AppModuleBasic) + rootCmd.AddCommand( + // ... + // remove-next-line + genutilcli.CollectGenTxsCmd(banktypes.GenesisBalancesIterator{}, app.DefaultHome), + // highlight-next-line + genutilcli.CollectGenTxsCmd(banktypes.GenesisBalancesIterator{}, app.DefaultNodeHome, gentxModule.GenTxValidator), + // ... + ) + + // ... +} + +func (a appCreator) newApp( + logger log.Logger, + db dbm.DB, + traceStore io.Writer, + appOpts servertypes.AppOptions, +) servertypes.Application { + // ... + + pruningOpts, err := server.GetPruningOptionsFromFlags(appOpts) + if err != nil { + panic(err) + } + + // highlight-start + homeDir := cast.ToString(appOpts.Get(flags.FlagHome)) + chainID := cast.ToString(appOpts.Get(flags.FlagChainID)) + if chainID == "" { + // fallback to genesis chain-id + appGenesis, err := tmtypes.GenesisDocFromFile(filepath.Join(homeDir, "config", "genesis.json")) + if err != nil { + panic(err) + } + + chainID = appGenesis.ChainID + } + // highlight-end + + // ... + + return app.New( + // ... + baseapp.SetPruning(pruningOpts), + baseapp.SetMinGasPrices(cast.ToString(appOpts.Get(server.FlagMinGasPrices))), + // remove-next-line + baseapp.SetMinRetainBlocks(cast.ToUint64(appOpts.Get(server.FlagMinRetainBlocks))), + baseapp.SetHaltHeight(cast.ToUint64(appOpts.Get(server.FlagHaltHeight))), + baseapp.SetHaltTime(cast.ToUint64(appOpts.Get(server.FlagHaltTime))), + // highlight-next-line + baseapp.SetMinRetainBlocks(cast.ToUint64(appOpts.Get(server.FlagMinRetainBlocks))), + // ... + baseapp.SetIAVLDisableFastNode(cast.ToBool(appOpts.Get(server.FlagDisableIAVLFastNode))), + // highlight-next-line + baseapp.SetChainID(chainID), + ) +) + +func (a appCreator) appExport( + logger log.Logger, + db dbm.DB, + traceStore io.Writer, + height int64, + forZeroHeight bool, + jailAllowedAddrs []string, + appOpts servertypes.AppOptions, + // highlight-next-line + modulesToExport []string, +) (servertypes.ExportedApp, error) { + // ... + + // remove-next-line + return app.ExportAppStateAndValidators(forZeroHeight, jailAllowedAddrs) + // highlight-next-line + return app.ExportAppStateAndValidators(forZeroHeight, jailAllowedAddrs, modulesToExport) +} +``` + +Add the new extra argument to `ExportAppStateAndValidators`: + +```text title="app/export.go" +func (app *App) ExportAppStateAndValidators( + forZeroHeight bool, + jailAllowedAddrs []string, + // highlight-next-line + modulesToExport []string, +) (servertypes.ExportedApp, error) { + // ... + + // remove-next-line + genState := app.mm.ExportGenesis(ctx, app.appCodec) + // highlight-next-line + genState := app.mm.ExportGenesisForModules(ctx, app.appCodec, modulesToExport) + appState, err := json.MarshalIndent(genState, "", " ") + if err != nil { + return servertypes.ExportedApp{}, err + } + + // ... +} +``` + +### Migration + +You can also follow other Cosmos SDK migration steps in their [upgrade guide](https://docs.cosmos.network/main/migrations/upgrading#v047x). +Specially the [parameter migration](https://docs.cosmos.network/main/migrations/upgrading#xconsensus) which +is required if you want to run the updated version keeping you current app state. + +## Query commands + +Query commands context initialization should be changed to: + +```text title="x/{moduleName}/client/cli/query_{typeName}.go" +RunE: func(cmd *cobra.Command, args []string) (err error) { + // remove-next-line + clientCtx := client.GetClientContextFromCmd(cmd) + // highlight-start + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } + // highlight-end + + // ... +} +``` + + +## ibc-go v7 + +Chains that are newly scaffolded with Ignite CLI `v0.27.1` now use `ibc-go/v7` for IBC functionality. It is +required to upgrade to the newest version of `ibc-go`. + +Applications scaffolded with older version of Ignite CLI require the following changes to the app file: + +```text title="app/app.go" +import ( + // ... + // remove-start + ica "github.com/cosmos/ibc-go/v6/modules/apps/27-interchain-accounts" + icacontrollerkeeper "github.com/cosmos/ibc-go/v6/modules/apps/27-interchain-accounts/controller/keeper" + icacontrollertypes "github.com/cosmos/ibc-go/v6/modules/apps/27-interchain-accounts/controller/types" + icahost "github.com/cosmos/ibc-go/v6/modules/apps/27-interchain-accounts/host" + icahostkeeper "github.com/cosmos/ibc-go/v6/modules/apps/27-interchain-accounts/host/keeper" + icahosttypes "github.com/cosmos/ibc-go/v6/modules/apps/27-interchain-accounts/host/types" + icatypes "github.com/cosmos/ibc-go/v6/modules/apps/27-interchain-accounts/types" + "github.com/cosmos/ibc-go/v6/modules/apps/transfer" + ibctransferkeeper "github.com/cosmos/ibc-go/v6/modules/apps/transfer/keeper" + ibctransfertypes "github.com/cosmos/ibc-go/v6/modules/apps/transfer/types" + ibc "github.com/cosmos/ibc-go/v6/modules/core" + ibcclient "github.com/cosmos/ibc-go/v6/modules/core/02-client" + ibcclientclient "github.com/cosmos/ibc-go/v6/modules/core/02-client/client" + ibcclienttypes "github.com/cosmos/ibc-go/v6/modules/core/02-client/types" + ibcporttypes "github.com/cosmos/ibc-go/v6/modules/core/05-port/types" + ibchost "github.com/cosmos/ibc-go/v6/modules/core/24-host" + ibckeeper "github.com/cosmos/ibc-go/v6/modules/core/keeper" + // remove-end + // highlight-start + ica "github.com/cosmos/ibc-go/v7/modules/apps/27-interchain-accounts" + icacontrollerkeeper "github.com/cosmos/ibc-go/v7/modules/apps/27-interchain-accounts/controller/keeper" + icacontrollertypes "github.com/cosmos/ibc-go/v7/modules/apps/27-interchain-accounts/controller/types" + icahost "github.com/cosmos/ibc-go/v7/modules/apps/27-interchain-accounts/host" + icahostkeeper "github.com/cosmos/ibc-go/v7/modules/apps/27-interchain-accounts/host/keeper" + icahosttypes "github.com/cosmos/ibc-go/v7/modules/apps/27-interchain-accounts/host/types" + icatypes "github.com/cosmos/ibc-go/v7/modules/apps/27-interchain-accounts/types" + "github.com/cosmos/ibc-go/v7/modules/apps/transfer" + ibctransferkeeper "github.com/cosmos/ibc-go/v7/modules/apps/transfer/keeper" + ibctransfertypes "github.com/cosmos/ibc-go/v7/modules/apps/transfer/types" + ibc "github.com/cosmos/ibc-go/v7/modules/core" + ibcclient "github.com/cosmos/ibc-go/v7/modules/core/02-client" + ibcclientclient "github.com/cosmos/ibc-go/v7/modules/core/02-client/client" + ibcclienttypes "github.com/cosmos/ibc-go/v7/modules/core/02-client/types" + ibcporttypes "github.com/cosmos/ibc-go/v7/modules/core/05-port/types" + ibcexported "github.com/cosmos/ibc-go/v7/modules/core/exported" + ibckeeper "github.com/cosmos/ibc-go/v7/modules/core/keeper" + solomachine "github.com/cosmos/ibc-go/v7/modules/light-clients/06-solomachine" + ibctm "github.com/cosmos/ibc-go/v7/modules/light-clients/07-tendermint" + // highlight-end +) + +var ( + // ... + + ModuleBasics = module.NewBasicManager( + // ... + groupmodule.AppModuleBasic{}, + ibc.AppModuleBasic{}, + // highlight-start + ibctm.AppModuleBasic{}, + solomachine.AppModuleBasic{}, + // highlight-end + upgrade.AppModuleBasic{}, + // ... + ) +) + +func New( + logger log.Logger, + db dbm.DB, + traceStore io.Writer, + loadLatest bool, + skipUpgradeHeights map[int64]bool, + homePath string, + invCheckPeriod uint, + encodingConfig appparams.EncodingConfig, + appOpts servertypes.AppOptions, + baseAppOptions ...func(*baseapp.BaseApp), +) *App { + // ... + + keys := sdk.NewKVStoreKeys( + // ... + govtypes.StoreKey, + paramstypes.StoreKey, + // remove-next-line + ibchost.StoreKey, + // highlight-next-line + ibcexported.StoreKey, + // ... + ) + + // ... + // grant capabilities for the ibc and ibc-transfer modules + // remove-next-line + scopedIBCKeeper := app.CapabilityKeeper.ScopeToModule(ibchost.ModuleName) + // highlight-next-line + scopedIBCKeeper := app.CapabilityKeeper.ScopeToModule(ibcexported.ModuleName) + scopedICAControllerKeeper := app.CapabilityKeeper.ScopeToModule(icacontrollertypes.SubModuleName) + + // ... + + app.IBCKeeper = ibckeeper.NewKeeper( + appCodec, + // remove-start + keys[ibchost.StoreKey], + app.GetSubspace(ibchost.ModuleName), + // remove-end + // highlight-start + keys[ibcexported.StoreKey], + app.GetSubspace(ibcexported.ModuleName), + // highlight-end + app.StakingKeeper, + app.UpgradeKeeper, + scopedIBCKeeper, + ) + + // ... + + app.mm.SetOrderBeginBlockers( + // ... + crisistypes.ModuleName, + ibctransfertypes.ModuleName, + // remove-next-line + ibchost.ModuleName, + // highlight-next-line + ibcexported.ModuleName, + // ... + ) + + app.mm.SetOrderEndBlockers( + // ... + stakingtypes.ModuleName, + ibctransfertypes.ModuleName, + // remove-next-line + ibchost.ModuleName, + // highlight-next-line + ibcexported.ModuleName, + // ... + ) + + genesisModuleOrder := []string{ + // ... + genutiltypes.ModuleName, + ibctransfertypes.ModuleName, + // remove-next-line + ibchost.ModuleName, + // highlight-next-line + ibcexported.ModuleName, + // ... + } + + // ... +) + +func initParamsKeeper(appCodec codec.BinaryCodec, legacyAmino *codec.LegacyAmino, key, tkey storetypes.StoreKey) paramskeeper.Keeper { + // ... + paramsKeeper.Subspace(crisistypes.ModuleName) + paramsKeeper.Subspace(ibctransfertypes.ModuleName) + // remove-next-line + paramsKeeper.Subspace(ibchost.ModuleName) + // highlight-next-line + paramsKeeper.Subspace(ibcexported.ModuleName) + // ... +} +``` + + +You can follow other IBC migration steps in their [migration guide v6 to v7](https://github.com/cosmos/ibc-go/blob/v7.0.1/docs/migrations/v6-to-v7.md). + +## Doctor command + +As the final steps it's recommended to run `ignite doctor` and `go mod tidy`. diff --git a/docs/versioned_docs/version-v28/06-migration/v28.0.0.md b/docs/versioned_docs/version-v28/06-migration/v28.0.0.md new file mode 100644 index 0000000..fd61d3c --- /dev/null +++ b/docs/versioned_docs/version-v28/06-migration/v28.0.0.md @@ -0,0 +1,124 @@ +--- +sidebar_position: 990 +title: v28.0.0 +description: For chains that were scaffolded with Ignite CLI versions lower than v28.0.0 changes are required to use Ignite CLI v28.0.0 +--- + +## **Upgrade to v28.0.0 - New Versioning Scheme in Ignite** + +With the latest update, Ignite has transitioned its versioning format from a leading-zero release system to a full number release system. This change marks a significant shift in how we communicate updates and stability in our software. Where the previous version was denoted as v0.27.0, it will now be upgraded to v28.0.0. + +This new versioning approach enhances our version control by clearly indicating major, minor, and patch releases. +From now on first number indicates a major release with breaking API changes, second number indicates minor release that might include new features while the last number is typically focused on bug fixes and minor improvements. +[Learn more about semantic versioning](https://semver.org/). + +## **Plugins are now called Apps. Upgrade Configuration Files** + +Ignite `v28.0.0` changes the plugin system which is now called Ignite Apps. This version includes changes +to the CLI command names and the plugin configuration file. + +The plugins configuration file is now called `igniteapps.yml` and "plugins" are now called "apps". + +The plugins configuration home directory is now `$HOME/.ignite/apps` instead `$HOME/.ignite/plugins`. + +Updates can be automatically applied by running `ignite doctor` in your blockchain application directory. +Running the command outside your blockchain application directory will only update the global plugins. + +## **Ignite and Cosmos SDK Upgrade Guide: From Ignite v0.27.0 to v28.0.0 and Cosmos SDK v0.47 to v0.50** + +### **Introduction** + +This guide provides a step-by-step process for developers to upgrade their applications from Ignite version 0.27.0 to 28.0.0, along with an upgrade in the Cosmos SDK from version 0.47 to v0.50. It covers essential changes, new features, and adjustments required for a smooth transition. + +### **Prerequisites** + +- Backup your current project. +- Ensure you have Ignite v0.27.0 and Cosmos SDK v0.47 installed. +- Basic familiarity with command line operations and the existing project structure. + +### **Step 1: Update Ignite CLI to Version 28.0.0** + +- **Command**: Run **`curl https://get.ignite.com/cli@v28.0.0 | bash`** in your terminal. +- **Note**: This command updates the Ignite CLI to the latest version. Ensure you have the necessary permissions to execute it. + +### **Step 2: Update Scaffold Chain Command** + +- **Old Command**: **`ignite scaffold chain github.com/alice/blog`** +- **New Command**: **`ignite scaffold chain blog`** +- **Explanation**: The command format has been simplified in the new version for ease of use. + +### **Step 3: Docker Version Upgrades** + +- **Action**: Upgrade the Ignite version for the Docker container to match the CLI version. +- **Note**: Ensure Docker compatibility with the new Ignite CLI version. + +### **Step 4: Change in Module Path** + +- **Old Path**: **`x/blog/module.go`** +- **New Path**: **`x/blog/module/module.go`** +- **Explanation**: The module path structure has been updated for better organization. + +### **Step 5: Frontend Scaffolding Options** + +- **Action**: Choose between Vue, React, Go, or TypeScript for frontend scaffolding. +- **Commands**: + - **`ignite scaffold react`** + - **`ignite scaffold vue`** +- **Note**: Vue is no longer the default option for frontend scaffolding. + +### **Step 6: Update Scaffold Message for CreatePost Command** + +- **Action**: Review and update the output for the scaffolded createPost command as per the new format. + +### **Step 7: AutoCLI Path Change** + +- **Old Path**: **`x/blog/client/cli/tx_create_post.go`** +- **New Path**: **`x/blog/module/autocli.go`** +- **Explanation**: AutoCLI is now integrated at a different path to streamline command-line interactions. + +### **Step 8: Adjustment in Stored Game** + +- **Old Code**: + + ```go + store := prefix.NewStore(ctx.KVStore(k.storeKey), types.KeyPrefix(types.PostKey)) + + ``` + +- **New Code**: + + ```go + storeAdapter := runtime.KVStoreAdapter(k.storeService.OpenKVStore(ctx)) + store := prefix.NewStore(storeAdapter, types.KeyPrefix(types.PostKey)) + + ``` + +- **Explanation**: The way the KVStore is accessed has changed, requiring an update in the code for stored games. + +### **Step 9: Chain-ID Requirements in CLI Transaction Commands** + +- **Action**: Add **`-chain-id`** flag to CLI transaction commands. +- **Example**: + - **Old Command**: **`blogd tx blog create-post 'Hello, World!' 'This is a blog post' --from alice`** + - **New Command**: **`blogd tx blog create-post 'Hello, World!' 'This is a blog post' --from alice --chain-id blog`** +- **Explanation**: The **`chain-id`** flag is now required for transaction commands for identification purposes. + +### **Troubleshooting Common Issues** + +- **Dependency Conflicts**: Ensure compatibility of all dependencies with Ignite v28.0.0 and Cosmos SDK v0.50. +- **Docker Image Compatibility**: Align Docker image versions with the CLI for seamless operations. +- **Frontend Scaffolding**: For older projects, ensure correct scaffolding as per the new commands. +- **AutoCLI Integration**: Address discrepancies due to the new AutoCLI integration path. + +### **Additional Resources** + +- [Ignite Documentation](https://docs.ignite.com/) +- [Cosmos SDK Release Notes](https://github.com/cosmos/cosmos-sdk/releases/tag/v0.50.1) + +### **Feedback** + +We value your feedback on this guide. Please share your experiences and suggestions for improvements. + +### **Updates Log** + +- **[01/15/24]**: Guide created for Ignite v28.0.0 and Cosmos SDK v0.50.1 \ No newline at end of file diff --git a/docs/versioned_docs/version-v28/07-packages/_category_.json b/docs/versioned_docs/version-v28/07-packages/_category_.json new file mode 100644 index 0000000..6dbb883 --- /dev/null +++ b/docs/versioned_docs/version-v28/07-packages/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Packages", + "link": null +} \ No newline at end of file diff --git a/docs/versioned_docs/version-v28/07-packages/cosmostxcollector.md b/docs/versioned_docs/version-v28/07-packages/cosmostxcollector.md new file mode 100644 index 0000000..6684a84 --- /dev/null +++ b/docs/versioned_docs/version-v28/07-packages/cosmostxcollector.md @@ -0,0 +1,200 @@ +--- +sidebar_position: 0 +title: cosmostxcollector +slug: /packages/cosmostxcollector +--- + +# cosmostxcollector + +The package implements support for collecting transactions and events from Cosmos blockchains +into a data backend and it also adds support for querying the collected data. + +## Transaction and event data collecting + +Transactions and events can be collected using the `cosmostxcollector.Collector` type. This +type uses a `cosmosclient.Client` instance to fetch the data from each block and a data backend +adapter to save the data. + +### Data backend adapters + +Data backend adapters are used to query and save the collected data into different types of data +backends and must implement the `cosmostxcollector.adapter.Adapter` interface. + +An adapter for PostgreSQL is already implemented in `cosmostxcollector.adapter.postgres.Adapter`. +This is the one used in the examples. + +### Example: Data collection + +The data collection example assumes that there is a PostgreSQL database running in the local +environment containing an empty database named "cosmos". + +The required database tables will be created automatically by the collector the first time it is run. + +When the application is run it will fetch all the transactions and events starting from one of the +recent blocks until the current block height and populate the database: + +```go +package main + +import ( + "context" + "log" + + "github.com/ignite/cli/v28/ignite/pkg/clictx" + "github.com/ignite/cli/v28/ignite/pkg/cosmosclient" + "github.com/ignite/cli/v28/ignite/pkg/cosmostxcollector" + "github.com/ignite/cli/v28/ignite/pkg/cosmostxcollector/adapter/postgres" +) + +const ( + // Name of a local PostgreSQL database + dbName = "cosmos" + + // Cosmos RPC address + rpcAddr = "https://rpc.cosmos.directory:443/cosmoshub" +) + +func collect(ctx context.Context, db postgres.Adapter) error { + // Make sure that the data backend schema is up to date + if err := db.Init(ctx); err != nil { + return err + } + + // Init the Cosmos client + client, err := cosmosclient.New(ctx, cosmosclient.WithNodeAddress(rpcAddr)) + if err != nil { + return err + } + + // Get the latest block height + latestHeight, err := client.LatestBlockHeight(ctx) + if err != nil { + return err + } + + // Collect transactions and events starting from a block height. + // The collector stops at the latest height available at the time of the call. + collector := cosmostxcollector.New(db, client) + if err := collector.Collect(ctx, latestHeight-50); err != nil { + return err + } + + return nil +} + +func main() { + ctx := clictx.From(context.Background()) + + // Init an adapter for a local PostgreSQL database running with the default values + params := map[string]string{"sslmode": "disable"} + db, err := postgres.NewAdapter(dbName, postgres.WithParams(params)) + if err != nil { + log.Fatal(err) + } + + if err := collect(ctx, db); err != nil { + log.Fatal(err) + } +} +``` + +## Queries + +Collected data can be queried through the data backend adapters using event queries or +cursor-based queries. + +Queries support sorting, paging and filtering by using different options during creation. +The cursor-based ones also support the selection of specific fields or properties and also +passing arguments in cases where the query is a function. + +By default no sorting, filtering nor paging is applied to the queries. + +### Event queries + +The event queries return events and their attributes as `[]cosmostxcollector.query.Event`. + +### Example: Query events + +The example reads transfer events from Cosmos' bank module and paginates the results. + +```go +import ( + "context" + + banktypes "github.com/cosmos/cosmos-sdk/x/bank/types" + "github.com/ignite/cli/ignite/pkg/cosmostxcollector/adapter/postgres" + "github.com/ignite/cli/ignite/pkg/cosmostxcollector/query" +) + +func queryBankTransferEvents(ctx context.Context, db postgres.Adapter) ([]query.Event, error) { + // Create an event query that returns events of type "transfer" + qry := query.NewEventQuery( + query.WithFilters( + // Filter transfer events from Cosmos' bank module + postgres.FilterByEventType(banktypes.EventTypeTransfer), + ), + query.WithPageSize(10), + query.AtPage(1), + ) + + // Execute the query + return db.QueryEvents(ctx, qry) +} +``` + +### Cursor-based queries + +This type of queries is meant to be used in contexts where the Event queries are not +useful. + +Cursor-based queries can query a single "entity" which can be a table, view or function +in relational databases or a collection or function in non relational data backends. + +The result of these types of queries is a cursor that implements the `cosmostxcollector.query.Cursor` +interface. + +### Example: Query events using cursors + +```go +import ( + "context" + + banktypes "github.com/cosmos/cosmos-sdk/x/bank/types" + "github.com/ignite/cli/ignite/pkg/cosmostxcollector/adapter/postgres" + "github.com/ignite/cli/ignite/pkg/cosmostxcollector/query" +) + +func queryBankTransferEventIDs(ctx context.Context, db postgres.Adapter) (ids []int64, err error) { + // Create a query that returns the IDs for events of type "transfer" + qry := query.New( + "event", + query.Fields("id"), + query.WithFilters( + // Filter transfer events from Cosmos' bank module + postgres.NewFilter("type", banktypes.EventTypeTransfer), + ), + query.WithPageSize(10), + query.AtPage(1), + query.SortByFields(query.SortOrderAsc, "id"), + ) + + // Execute the query + cr, err := db.Query(ctx, qry) + if err != nil { + return nil, err + } + + // Read the results + for cr.Next() { + var eventID int64 + + if err := cr.Scan(&eventID); err != nil { + return nil, err + } + + ids = append(ids, eventID) + } + + return ids, nil +} +``` diff --git a/docs/versioned_docs/version-v28/08-references/01-cli.md b/docs/versioned_docs/version-v28/08-references/01-cli.md new file mode 100644 index 0000000..0a6520e --- /dev/null +++ b/docs/versioned_docs/version-v28/08-references/01-cli.md @@ -0,0 +1,3711 @@ +--- +description: Ignite CLI docs. +--- + +# CLI commands + +Documentation for Ignite CLI. +## ignite + +Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain + +**Synopsis** + +Ignite CLI is a tool for creating sovereign blockchains built with Cosmos SDK, the world’s +most popular modular blockchain framework. Ignite CLI offers everything you need to scaffold, +test, build, and launch your blockchain. + +To get started, create a blockchain: + + ignite scaffold chain example + + +**Options** + +``` + -h, --help help for ignite +``` + +**SEE ALSO** + +* [ignite account](#ignite-account) - Create, delete, and show Ignite accounts +* [ignite app](#ignite-app) - Create and manage Ignite Apps +* [ignite chain](#ignite-chain) - Build, init and start a blockchain node +* [ignite completion](#ignite-completion) - Generates shell completion script. +* [ignite docs](#ignite-docs) - Show Ignite CLI docs +* [ignite generate](#ignite-generate) - Generate clients, API docs from source code +* [ignite network](#ignite-network) - Launch a blockchain in production +* [ignite node](#ignite-node) - Make requests to a live blockchain node +* [ignite relayer](#ignite-relayer) - Connect blockchains with an IBC relayer +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more +* [ignite tools](#ignite-tools) - Tools for advanced users +* [ignite version](#ignite-version) - Print the current build information + + +## ignite account + +Create, delete, and show Ignite accounts + +**Synopsis** + +Commands for managing Ignite accounts. An Ignite account is a private/public +keypair stored in a keyring. Currently Ignite accounts are used when interacting +with Ignite relayer commands and when using "ignite network" commands. + +Note: Ignite account commands are not for managing your chain's keys and accounts. Use +you chain's binary to manage accounts from "config.yml". For example, if your +blockchain is called "mychain", use "mychaind keys" to manage keys for the +chain. + + +**Options** + +``` + -h, --help help for account + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain +* [ignite account create](#ignite-account-create) - Create a new account +* [ignite account delete](#ignite-account-delete) - Delete an account by name +* [ignite account export](#ignite-account-export) - Export an account as a private key +* [ignite account import](#ignite-account-import) - Import an account by using a mnemonic or a private key +* [ignite account list](#ignite-account-list) - Show a list of all accounts +* [ignite account show](#ignite-account-show) - Show detailed information about a particular account + + +## ignite account create + +Create a new account + +``` +ignite account create [name] [flags] +``` + +**Options** + +``` + -h, --help help for create +``` + +**Options inherited from parent commands** + +``` + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**SEE ALSO** + +* [ignite account](#ignite-account) - Create, delete, and show Ignite accounts + + +## ignite account delete + +Delete an account by name + +``` +ignite account delete [name] [flags] +``` + +**Options** + +``` + -h, --help help for delete +``` + +**Options inherited from parent commands** + +``` + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**SEE ALSO** + +* [ignite account](#ignite-account) - Create, delete, and show Ignite accounts + + +## ignite account export + +Export an account as a private key + +``` +ignite account export [name] [flags] +``` + +**Options** + +``` + -h, --help help for export + --non-interactive do not enter into interactive mode + --passphrase string passphrase to encrypt the exported key + --path string path to export private key. default: ./key_[name] +``` + +**Options inherited from parent commands** + +``` + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**SEE ALSO** + +* [ignite account](#ignite-account) - Create, delete, and show Ignite accounts + + +## ignite account import + +Import an account by using a mnemonic or a private key + +``` +ignite account import [name] [flags] +``` + +**Options** + +``` + -h, --help help for import + --non-interactive do not enter into interactive mode + --passphrase string passphrase to decrypt the imported key (ignored when secret is a mnemonic) + --secret string Your mnemonic or path to your private key (use interactive mode instead to securely pass your mnemonic) +``` + +**Options inherited from parent commands** + +``` + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**SEE ALSO** + +* [ignite account](#ignite-account) - Create, delete, and show Ignite accounts + + +## ignite account list + +Show a list of all accounts + +``` +ignite account list [flags] +``` + +**Options** + +``` + --address-prefix string account address prefix (default "cosmos") + -h, --help help for list +``` + +**Options inherited from parent commands** + +``` + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**SEE ALSO** + +* [ignite account](#ignite-account) - Create, delete, and show Ignite accounts + + +## ignite account show + +Show detailed information about a particular account + +``` +ignite account show [name] [flags] +``` + +**Options** + +``` + --address-prefix string account address prefix (default "cosmos") + -h, --help help for show +``` + +**Options inherited from parent commands** + +``` + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**SEE ALSO** + +* [ignite account](#ignite-account) - Create, delete, and show Ignite accounts + + +## ignite app + +Create and manage Ignite Apps + +**Options** + +``` + -h, --help help for app +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain +* [ignite app describe](#ignite-app-describe) - Print information about installed apps +* [ignite app install](#ignite-app-install) - Install app +* [ignite app list](#ignite-app-list) - List installed apps +* [ignite app scaffold](#ignite-app-scaffold) - Scaffold a new Ignite App +* [ignite app uninstall](#ignite-app-uninstall) - Uninstall app +* [ignite app update](#ignite-app-update) - Update app + + +## ignite app describe + +Print information about installed apps + +**Synopsis** + +Print information about an installed Ignite App commands and hooks. + +``` +ignite app describe [path] [flags] +``` + +**Examples** + +``` +ignite app describe github.com/org/my-app/ +``` + +**Options** + +``` + -h, --help help for describe +``` + +**SEE ALSO** + +* [ignite app](#ignite-app) - Create and manage Ignite Apps + + +## ignite app install + +Install app + +**Synopsis** + +Installs an Ignite App. + +Respects key value pairs declared after the app path to be added to the generated configuration definition. + +``` +ignite app install [path] [key=value]... [flags] +``` + +**Examples** + +``` +ignite app install github.com/org/my-app/ foo=bar baz=qux +``` + +**Options** + +``` + -g, --global use global plugins configuration ($HOME/.ignite/apps/igniteapps.yml) + -h, --help help for install +``` + +**SEE ALSO** + +* [ignite app](#ignite-app) - Create and manage Ignite Apps + + +## ignite app list + +List installed apps + +**Synopsis** + +Prints status and information of all installed Ignite Apps. + +``` +ignite app list [flags] +``` + +**Options** + +``` + -h, --help help for list +``` + +**SEE ALSO** + +* [ignite app](#ignite-app) - Create and manage Ignite Apps + + +## ignite app scaffold + +Scaffold a new Ignite App + +**Synopsis** + +Scaffolds a new Ignite App in the current directory. + +A git repository will be created with the given module name, unless the current directory is already a git repository. + +``` +ignite app scaffold [name] [flags] +``` + +**Examples** + +``` +ignite app scaffold github.com/org/my-app/ +``` + +**Options** + +``` + -h, --help help for scaffold +``` + +**SEE ALSO** + +* [ignite app](#ignite-app) - Create and manage Ignite Apps + + +## ignite app uninstall + +Uninstall app + +**Synopsis** + +Uninstalls an Ignite App specified by path. + +``` +ignite app uninstall [path] [flags] +``` + +**Examples** + +``` +ignite app uninstall github.com/org/my-app/ +``` + +**Options** + +``` + -g, --global use global plugins configuration ($HOME/.ignite/apps/igniteapps.yml) + -h, --help help for uninstall +``` + +**SEE ALSO** + +* [ignite app](#ignite-app) - Create and manage Ignite Apps + + +## ignite app update + +Update app + +**Synopsis** + +Updates an Ignite App specified by path. + +If no path is specified all declared apps are updated. + +``` +ignite app update [path] [flags] +``` + +**Examples** + +``` +ignite app update github.com/org/my-app/ +``` + +**Options** + +``` + -h, --help help for update +``` + +**SEE ALSO** + +* [ignite app](#ignite-app) - Create and manage Ignite Apps + + +## ignite chain + +Build, init and start a blockchain node + +**Synopsis** + +Commands in this namespace let you to build, initialize, and start your +blockchain node locally for development purposes. + +To run these commands you should be inside the project's directory so that +Ignite can find the source code. To ensure that you are, run "ls", you should +see the following files in the output: "go.mod", "x", "proto", "app", etc. + +By default the "build" command will identify the "main" package of the project, +install dependencies if necessary, set build flags, compile the project into a +binary and install the binary. The "build" command is useful if you just want +the compiled binary, for example, to initialize and start the chain manually. It +can also be used to release your chain's binaries automatically as part of +continuous integration workflow. + +The "init" command will build the chain's binary and use it to initialize a +local validator node. By default the validator node will be initialized in your +$HOME directory in a hidden directory that matches the name of your project. +This directory is called a data directory and contains a chain's genesis file +and a validator key. This command is useful if you want to quickly build and +initialize the data directory and use the chain's binary to manually start the +blockchain. The "init" command is meant only for development purposes, not +production. + +The "serve" command builds, initializes, and starts your blockchain locally with +a single validator node for development purposes. "serve" also watches the +source code directory for file changes and intelligently +re-builds/initializes/starts the chain, essentially providing "code-reloading". +The "serve" command is meant only for development purposes, not production. + +To distinguish between production and development consider the following. + +In production, blockchains often run the same software on many validator nodes +that are run by different people and entities. To launch a blockchain in +production, the validator entities coordinate the launch process to start their +nodes simultaneously. + +During development, a blockchain can be started locally on a single validator +node. This convenient process lets you restart a chain quickly and iterate +faster. Starting a chain on a single node in development is similar to starting +a traditional web application on a local server. + +The "faucet" command lets you send tokens to an address from the "faucet" +account defined in "config.yml". Alternatively, you can use the chain's binary +to send token from any other account that exists on chain. + +The "simulate" command helps you start a simulation testing process for your +chain. + + +**Options** + +``` + -c, --config string path to Ignite config file (default: ./config.yml) + -h, --help help for chain + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain +* [ignite chain build](#ignite-chain-build) - Build a node binary +* [ignite chain debug](#ignite-chain-debug) - Launch a debugger for a blockchain app +* [ignite chain faucet](#ignite-chain-faucet) - Send coins to an account +* [ignite chain init](#ignite-chain-init) - Initialize your chain +* [ignite chain serve](#ignite-chain-serve) - Start a blockchain node in development +* [ignite chain simulate](#ignite-chain-simulate) - Run simulation testing for the blockchain + + +## ignite chain build + +Build a node binary + +**Synopsis** + + +The build command compiles the source code of the project into a binary and +installs the binary in the $(go env GOPATH)/bin directory. + +You can customize the output directory for the binary using a flag: + + ignite chain build --output dist + +To compile the binary Ignite first compiles protocol buffer (proto) files into +Go source code. Proto files contain required type and services definitions. If +you're using another program to compile proto files, you can use a flag to tell +Ignite to skip the proto compilation step: + + ignite chain build --skip-proto + +Afterwards, Ignite install dependencies specified in the go.mod file. By default +Ignite doesn't check that dependencies of the main module stored in the module +cache have not been modified since they were downloaded. To enforce dependency +checking (essentially, running "go mod verify") use a flag: + + ignite chain build --check-dependencies + +Next, Ignite identifies the "main" package of the project. By default the "main" +package is located in "cmd/{app}d" directory, where "{app}" is the name of the +scaffolded project and "d" stands for daemon. If your project contains more +than one "main" package, specify the path to the one that Ignite should compile +in config.yml: + + build: + main: custom/path/to/main + +By default the binary name will match the top-level module name (specified in +go.mod) with a suffix "d". This can be customized in config.yml: + + build: + binary: mychaind + +You can also specify custom linker flags: + + build: + ldflags: + - "-X main.Version=development" + - "-X main.Date=01/05/2022T19:54" + +To build binaries for a release, use the --release flag. The binaries for one or +more specified release targets are built in a "release/" directory in the +project's source directory. Specify the release targets with GOOS:GOARCH build +tags. If the optional --release.targets is not specified, a binary is created +for your current environment. + + ignite chain build --release -t linux:amd64 -t darwin:amd64 -t darwin:arm64 + + +``` +ignite chain build [flags] +``` + +**Options** + +``` + --build.tags strings parameters to build the chain binary + --check-dependencies verify that cached dependencies have not been modified since they were downloaded + --clear-cache clear the build cache (advanced) + --debug build a debug binary + -h, --help help for build + -o, --output string binary output path + -p, --path string path of the app (default ".") + --release build for a release + --release.prefix string tarball prefix for each release target. Available only with --release flag + -t, --release.targets strings release targets. Available only with --release flag + --skip-proto skip file generation from proto + -v, --verbose verbose output +``` + +**Options inherited from parent commands** + +``` + -c, --config string path to Ignite config file (default: ./config.yml) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite chain](#ignite-chain) - Build, init and start a blockchain node + + +## ignite chain debug + +Launch a debugger for a blockchain app + +**Synopsis** + +The debug command starts a debug server and launches a debugger. + +Ignite uses the Delve debugger by default. Delve enables you to interact with +your program by controlling the execution of the process, evaluating variables, +and providing information of thread / goroutine state, CPU register state and +more. + +A debug server can optionally be started in cases where default terminal client +is not desirable. When the server starts it first runs the blockchain app, +attaches to it and finally waits for a client connection. It accepts both +JSON-RPC or DAP client connections. + +To start a debug server use the following flag: + + ignite chain debug --server + +To start a debug server with a custom address use the following flags: + + ignite chain debug --server --server-address 127.0.0.1:30500 + +The debug server stops automatically when the client connection is closed. + + +``` +ignite chain debug [flags] +``` + +**Options** + +``` + -h, --help help for debug + -p, --path string path of the app (default ".") + --server start a debug server + --server-address string debug server address (default "127.0.0.1:30500") +``` + +**Options inherited from parent commands** + +``` + -c, --config string path to Ignite config file (default: ./config.yml) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite chain](#ignite-chain) - Build, init and start a blockchain node + + +## ignite chain faucet + +Send coins to an account + +``` +ignite chain faucet [address] [coin<,...>] [flags] +``` + +**Options** + +``` + -h, --help help for faucet + --home string directory where the blockchain node is initialized + -p, --path string path of the app (default ".") + -v, --verbose verbose output +``` + +**Options inherited from parent commands** + +``` + -c, --config string path to Ignite config file (default: ./config.yml) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite chain](#ignite-chain) - Build, init and start a blockchain node + + +## ignite chain init + +Initialize your chain + +**Synopsis** + +The init command compiles and installs the binary (like "ignite chain build") +and uses that binary to initialize the blockchain's data directory for one +validator. To learn how the build process works, refer to "ignite chain build +--help". + +By default, the data directory will be initialized in $HOME/.mychain, where +"mychain" is the name of the project. To set a custom data directory use the +--home flag or set the value in config.yml: + + validators: + - name: alice + bonded: '100000000stake' + home: "~/.customdir" + +The data directory contains three files in the "config" directory: app.toml, +config.toml, client.toml. These files let you customize the behavior of your +blockchain node and the client executable. When a chain is re-initialized the +data directory can be reset. To make some values in these files persistent, set +them in config.yml: + + validators: + - name: alice + bonded: '100000000stake' + app: + minimum-gas-prices: "0.025stake" + config: + consensus: + timeout_commit: "5s" + timeout_propose: "5s" + client: + output: "json" + +The configuration above changes the minimum gas price of the validator (by +default the gas price is set to 0 to allow "free" transactions), sets the block +time to 5s, and changes the output format to JSON. To see what kind of values +this configuration accepts see the generated TOML files in the data directory. + +As part of the initialization process Ignite creates on-chain accounts with +token balances. By default, config.yml has two accounts in the top-level +"accounts" property. You can add more accounts and change their token balances. +Refer to config.yml guide to see which values you can set. + +One of these accounts is a validator account and the amount of self-delegated +tokens can be set in the top-level "validator" property. + +One of the most important components of an initialized chain is the genesis +file, the 0th block of the chain. The genesis file is stored in the data +directory "config" subdirectory and contains the initial state of the chain, +including consensus and module parameters. You can customize the values of the +genesis in config.yml: + + genesis: + app_state: + staking: + params: + bond_denom: "foo" + +The example above changes the staking token to "foo". If you change the staking +denom, make sure the validator account has the right tokens. + +The init command is meant to be used ONLY FOR DEVELOPMENT PURPOSES. Under the +hood it runs commands like "appd init", "appd add-genesis-account", "appd +gentx", and "appd collect-gentx". For production, you may want to run these +commands manually to ensure a production-level node initialization. + + +``` +ignite chain init [flags] +``` + +**Options** + +``` + --build.tags strings parameters to build the chain binary + --check-dependencies verify that cached dependencies have not been modified since they were downloaded + --clear-cache clear the build cache (advanced) + --debug build a debug binary + -h, --help help for init + --home string directory where the blockchain node is initialized + -p, --path string path of the app (default ".") + --skip-proto skip file generation from proto +``` + +**Options inherited from parent commands** + +``` + -c, --config string path to Ignite config file (default: ./config.yml) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite chain](#ignite-chain) - Build, init and start a blockchain node + + +## ignite chain serve + +Start a blockchain node in development + +**Synopsis** + +The serve command compiles and installs the binary (like "ignite chain build"), +uses that binary to initialize the blockchain's data directory for one validator +(like "ignite chain init"), and starts the node locally for development purposes +with automatic code reloading. + +Automatic code reloading means Ignite starts watching the project directory. +Whenever a file change is detected, Ignite automatically rebuilds, reinitializes +and restarts the node. + +Whenever possible Ignite will try to keep the current state of the chain by +exporting and importing the genesis file. + +To force Ignite to start from a clean slate even if a genesis file exists, use +the following flag: + + ignite chain serve --reset-once + +To force Ignite to reset the state every time the source code is modified, use +the following flag: + + ignite chain serve --force-reset + +With Ignite it's possible to start more than one blockchain from the same source +code using different config files. This is handy if you're building +inter-blockchain functionality and, for example, want to try sending packets +from one blockchain to another. To start a node using a specific config file: + + ignite chain serve --config mars.yml + +The serve command is meant to be used ONLY FOR DEVELOPMENT PURPOSES. Under the +hood, it runs "appd start", where "appd" is the name of your chain's binary. For +production, you may want to run "appd start" manually. + + +``` +ignite chain serve [flags] +``` + +**Options** + +``` + --build.tags strings parameters to build the chain binary + --check-dependencies verify that cached dependencies have not been modified since they were downloaded + --clear-cache clear the build cache (advanced) + -f, --force-reset force reset of the app state on start and every source change + --generate-clients generate code for the configured clients on reset or source code change + -h, --help help for serve + --home string directory where the blockchain node is initialized + -p, --path string path of the app (default ".") + --quit-on-fail quit program if the app fails to start + -r, --reset-once reset the app state once on init + --skip-proto skip file generation from proto + -v, --verbose verbose output +``` + +**Options inherited from parent commands** + +``` + -c, --config string path to Ignite config file (default: ./config.yml) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite chain](#ignite-chain) - Build, init and start a blockchain node + + +## ignite chain simulate + +Run simulation testing for the blockchain + +**Synopsis** + +Run simulation testing for the blockchain. It sends many randomized-input messages of each module to a simulated node and checks if invariants break + +``` +ignite chain simulate [flags] +``` + +**Options** + +``` + --blockSize int operations per block (default 30) + --exportParamsHeight int height to which export the randomly generated params + --exportParamsPath string custom file path to save the exported params JSON + --exportStatePath string custom file path to save the exported app state JSON + --exportStatsPath string custom file path to save the exported simulation statistics JSON + --genesis string custom simulation genesis file; cannot be used with params file + --genesisTime int override genesis UNIX time instead of using a random UNIX time + -h, --help help for simulate + --initialBlockHeight int initial block to start the simulation (default 1) + --lean lean simulation log output + --numBlocks int number of new blocks to simulate from the initial block height (default 200) + --params string custom simulation params file which overrides any random params; cannot be used with genesis + --period uint run slow invariants only once every period assertions + --printAllInvariants print all invariants if a broken invariant is found + --seed int simulation random seed (default 42) + --simulateEveryOperation run slow invariants every operation + -v, --verbose verbose log output +``` + +**Options inherited from parent commands** + +``` + -c, --config string path to Ignite config file (default: ./config.yml) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite chain](#ignite-chain) - Build, init and start a blockchain node + + +## ignite completion + +Generates shell completion script. + +``` +ignite completion [bash|zsh|fish|powershell] [flags] +``` + +**Options** + +``` + -h, --help help for completion +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain + + +## ignite docs + +Show Ignite CLI docs + +``` +ignite docs [flags] +``` + +**Options** + +``` + -h, --help help for docs +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain + + +## ignite generate + +Generate clients, API docs from source code + +**Synopsis** + +Generate clients, API docs from source code. + +Such as compiling protocol buffer files into Go or implement particular +functionality, for example, generating an OpenAPI spec. + +Produced source code can be regenerated by running a command again and is not +meant to be edited by hand. + + +**Options** + +``` + --clear-cache clear the build cache (advanced) + --enable-proto-vendor enable proto package vendor for missing Buf dependencies + -h, --help help for generate + -p, --path string path of the app (default ".") +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain +* [ignite generate composables](#ignite-generate-composables) - TypeScript frontend client and Vue 3 composables +* [ignite generate hooks](#ignite-generate-hooks) - TypeScript frontend client and React hooks +* [ignite generate openapi](#ignite-generate-openapi) - OpenAPI spec for your chain +* [ignite generate proto-go](#ignite-generate-proto-go) - Compile protocol buffer files to Go source code required by Cosmos SDK +* [ignite generate ts-client](#ignite-generate-ts-client) - TypeScript frontend client +* [ignite generate vuex](#ignite-generate-vuex) - *DEPRECATED* TypeScript frontend client and Vuex stores + + +## ignite generate composables + +TypeScript frontend client and Vue 3 composables + +``` +ignite generate composables [flags] +``` + +**Options** + +``` + -h, --help help for composables + -o, --output string Vue 3 composables output path + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + --clear-cache clear the build cache (advanced) + --enable-proto-vendor enable proto package vendor for missing Buf dependencies + -p, --path string path of the app (default ".") +``` + +**SEE ALSO** + +* [ignite generate](#ignite-generate) - Generate clients, API docs from source code + + +## ignite generate hooks + +TypeScript frontend client and React hooks + +``` +ignite generate hooks [flags] +``` + +**Options** + +``` + -h, --help help for hooks + -o, --output string React hooks output path + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + --clear-cache clear the build cache (advanced) + --enable-proto-vendor enable proto package vendor for missing Buf dependencies + -p, --path string path of the app (default ".") +``` + +**SEE ALSO** + +* [ignite generate](#ignite-generate) - Generate clients, API docs from source code + + +## ignite generate openapi + +OpenAPI spec for your chain + +``` +ignite generate openapi [flags] +``` + +**Options** + +``` + -h, --help help for openapi + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + --clear-cache clear the build cache (advanced) + --enable-proto-vendor enable proto package vendor for missing Buf dependencies + -p, --path string path of the app (default ".") +``` + +**SEE ALSO** + +* [ignite generate](#ignite-generate) - Generate clients, API docs from source code + + +## ignite generate proto-go + +Compile protocol buffer files to Go source code required by Cosmos SDK + +``` +ignite generate proto-go [flags] +``` + +**Options** + +``` + -h, --help help for proto-go + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + --clear-cache clear the build cache (advanced) + --enable-proto-vendor enable proto package vendor for missing Buf dependencies + -p, --path string path of the app (default ".") +``` + +**SEE ALSO** + +* [ignite generate](#ignite-generate) - Generate clients, API docs from source code + + +## ignite generate ts-client + +TypeScript frontend client + +**Synopsis** + +Generate a framework agnostic TypeScript client for your blockchain project. + +By default the TypeScript client is generated in the "ts-client/" directory. You +can customize the output directory in config.yml: + + client: + typescript: + path: new-path + +Output can also be customized by using a flag: + + ignite generate ts-client --output new-path + +TypeScript client code can be automatically regenerated on reset or source code +changes when the blockchain is started with a flag: + + ignite chain serve --generate-clients + + +``` +ignite generate ts-client [flags] +``` + +**Options** + +``` + -h, --help help for ts-client + -o, --output string TypeScript client output path + --use-cache use build cache to speed-up generation + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + --clear-cache clear the build cache (advanced) + --enable-proto-vendor enable proto package vendor for missing Buf dependencies + -p, --path string path of the app (default ".") +``` + +**SEE ALSO** + +* [ignite generate](#ignite-generate) - Generate clients, API docs from source code + + +## ignite generate vuex + +*DEPRECATED* TypeScript frontend client and Vuex stores + +``` +ignite generate vuex [flags] +``` + +**Options** + +``` + -h, --help help for vuex + -o, --output string Vuex store output path + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + --clear-cache clear the build cache (advanced) + --enable-proto-vendor enable proto package vendor for missing Buf dependencies + -p, --path string path of the app (default ".") +``` + +**SEE ALSO** + +* [ignite generate](#ignite-generate) - Generate clients, API docs from source code + + +## ignite network + +Launch a blockchain in production + +**Synopsis** + + +Ignite Network commands allow to coordinate the launch of sovereign Cosmos blockchains. + +To launch a Cosmos blockchain you need someone to be a coordinator and others to +be validators. These are just roles, anyone can be a coordinator or a validator. +A coordinator publishes information about a chain to be launched on the Ignite +blockchain, approves validator requests and coordinates the launch. Validators +send requests to join a chain and start their nodes when a blockchain is ready +for launch. + +To publish the information about your chain as a coordinator run the following +command (the URL should point to a repository with a Cosmos SDK chain): + + ignite network chain publish github.com/ignite/example + +This command will return a launch identifier you will be using in the following +commands. Let's say this identifier is 42. + +Next, ask validators to initialize their nodes and request to join the network +as validators. For a testnet you can use the default values suggested by the +CLI. + + ignite network chain init 42 + + ignite network chain join 42 --amount 95000000stake + +As a coordinator list all validator requests: + + ignite network request list 42 + +Approve validator requests: + + ignite network request approve 42 1,2 + +Once you've approved all validators you need in the validator set, announce that +the chain is ready for launch: + + ignite network chain launch 42 + +Validators can now prepare their nodes for launch: + + ignite network chain prepare 42 + +The output of this command will show a command that a validator would use to +launch their node, for example “exampled --home ~/.example”. After enough +validators launch their nodes, a blockchain will be live. + + +**Options** + +``` + -h, --help help for network + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain +* [ignite network chain](#ignite-network-chain) - Publish a chain, join as a validator and prepare node for launch +* [ignite network coordinator](#ignite-network-coordinator) - Show and update a coordinator profile +* [ignite network request](#ignite-network-request) - Create, show, reject and approve requests +* [ignite network tool](#ignite-network-tool) - Commands to run subsidiary tools +* [ignite network validator](#ignite-network-validator) - Show and update a validator profile +* [ignite network version](#ignite-network-version) - Version of the plugin + + +## ignite network chain + +Publish a chain, join as a validator and prepare node for launch + +**Synopsis** + +The "chain" namespace features the most commonly used commands for launching +blockchains with Ignite. + +As a coordinator you "publish" your blockchain to Ignite. When enough validators +are approved for the genesis and no changes are excepted to be made to the +genesis, a coordinator announces that the chain is ready for launch with the +"launch" command. In the case of an unsuccessful launch, the coordinator can revert it +using the "revert-launch" command. + +As a validator, you "init" your node and apply to become a validator for a +blockchain with the "join" command. After the launch of the chain is announced, +validators can generate the finalized genesis and download the list of peers with the +"prepare" command. + +The "install" command can be used to download, compile the source code and +install the chain's binary locally. The binary can be used, for example, to +initialize a validator node or to interact with the chain after it has been +launched. + +All chains published to Ignite can be listed by using the "list" command. + + +**Options** + +``` + -h, --help help for chain +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network](#ignite-network) - Launch a blockchain in production +* [ignite network chain init](#ignite-network-chain-init) - Initialize a chain from a published chain ID +* [ignite network chain install](#ignite-network-chain-install) - Install chain binary for a launch +* [ignite network chain join](#ignite-network-chain-join) - Request to join a network as a validator +* [ignite network chain launch](#ignite-network-chain-launch) - Trigger the launch of a chain +* [ignite network chain list](#ignite-network-chain-list) - List published chains +* [ignite network chain prepare](#ignite-network-chain-prepare) - Prepare the chain for launch +* [ignite network chain publish](#ignite-network-chain-publish) - Publish a new chain to start a new network +* [ignite network chain revert-launch](#ignite-network-chain-revert-launch) - Revert launch of a network as a coordinator +* [ignite network chain show](#ignite-network-chain-show) - Show details of a chain + + +## ignite network chain init + +Initialize a chain from a published chain ID + +**Synopsis** + +Ignite network chain init is a command used by validators to initialize a +validator node for a blockchain from the information stored on the Ignite chain. + + ignite network chain init 42 + +This command fetches the information about a chain with launch ID 42. The source +code of the chain is cloned in a temporary directory, and the node's binary is +compiled from the source. The binary is then used to initialize the node. By +default, Ignite uses "~/spn/[launch-id]/" as the home directory for the blockchain. + +An important part of initializing a validator node is creation of the gentx (a +transaction that adds a validator at the genesis of the chain). + +The "init" command will prompt for values like self-delegation and commission. +These values will be used in the validator's gentx. You can use flags to provide +the values in non-interactive mode. + +Use the "--home" flag to choose a different path for the home directory of the +blockchain: + + ignite network chain init 42 --home ~/mychain + +The end result of the "init" command is a validator home directory with a +genesis validator transaction (gentx) file. + +``` +ignite network chain init [launch-id] [flags] +``` + +**Options** + +``` + --check-dependencies verify that cached dependencies have not been modified since they were downloaded + --clear-cache clear the build cache (advanced) + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for init + --home string home directory used for blockchains + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") + --validator-account string account for the chain validator (default "default") + --validator-details string details about the validator + --validator-gas-price string validator gas price + --validator-identity string validator identity signature (ex. UPort or Keybase) + --validator-moniker string custom validator moniker + --validator-security-contact string validator security contact email + --validator-self-delegation string validator minimum self delegation + --validator-website string associate a website with the validator + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network chain](#ignite-network-chain) - Publish a chain, join as a validator and prepare node for launch + + +## ignite network chain install + +Install chain binary for a launch + +``` +ignite network chain install [launch-id] [flags] +``` + +**Options** + +``` + --check-dependencies verify that cached dependencies have not been modified since they were downloaded + --clear-cache clear the build cache (advanced) + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for install +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network chain](#ignite-network-chain) - Publish a chain, join as a validator and prepare node for launch + + +## ignite network chain join + +Request to join a network as a validator + +**Synopsis** + +The "join" command is used by validators to send a request to join a blockchain. +The required argument is a launch ID of a blockchain. The "join" command expects +that the validator has already setup a home directory for the blockchain and has +a gentx either by running "ignite network chain init" or initializing the data +directory manually with the chain's binary. + +By default the "join" command just sends the request to join as a validator. +However, often a validator also needs to request an genesis account with a token +balance to afford self-delegation. + +The following command will send a request to join blockchain with launch ID 42 +as a validator and request to be added as an account with a token balance of +95000000 STAKE. + + ignite network chain join 42 --amount 95000000stake + +A request to join as a validator contains a gentx file. Ignite looks for gentx +in a home directory used by "ignite network chain init" by default. To use a +different directory, use the "--home" flag or pass a gentx file directly with +the "--gentx" flag. + +To join a chain as a validator, you must provide the IP address of your node so +that other validators can connect to it. The join command will ask you for the +IP address and will attempt to automatically detect and fill in the value. If +you want to manually specify the IP address, you can use the "--peer-address" +flag: + + ignite network chain join 42 --peer-address 0.0.0.0 + +Since "join" broadcasts a transaction to the Ignite blockchain, you will need an +account on the Ignite blockchain. During the testnet phase, however, Ignite +automatically requests tokens from a faucet. + + +``` +ignite network chain join [launch-id] [flags] +``` + +**Options** + +``` + --amount string amount of coins for account request (ignored if coordinator has fixed the account balances or if --no-acount flag is set) + --check-dependencies verify that cached dependencies have not been modified since they were downloaded + --from string account name to use for sending transactions to SPN (default "default") + --gentx string path to a gentx json file + -h, --help help for join + --home string home directory used for blockchains + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") + --no-account prevent sending a request for a genesis account + --peer-address string peer's address + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network chain](#ignite-network-chain) - Publish a chain, join as a validator and prepare node for launch + + +## ignite network chain launch + +Trigger the launch of a chain + +**Synopsis** + +The launch command communicates to the world that the chain is ready to be +launched. + +Only the coordinator of the chain can execute the launch command. + + ignite network chain launch 42 + +After the launch command is executed no changes to the genesis are accepted. For +example, validators will no longer be able to successfully execute the "ignite +network chain join" command to apply as a validator. + +The launch command sets the date and time after which the chain will start. By +default, the current time is set. To give validators more time to prepare for +the launch, set the time with the "--launch-time" flag: + + ignite network chain launch 42 --launch-time 2023-01-01T00:00:00Z + +After the launch command is executed, validators can generate the finalized +genesis and prepare their nodes for the launch. For example, validators can run +"ignite network chain prepare" to generate the genesis and populate the peer +list. + +If you want to change the launch time or open up the genesis file for changes +you can use "ignite network chain revert-launch" to make it possible, for +example, to accept new validators and add accounts. + + +``` +ignite network chain launch [launch-id] [flags] +``` + +**Options** + +``` + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for launch + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") + --launch-time string timestamp the chain is effectively launched (example "2022-01-01T00:00:00Z") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network chain](#ignite-network-chain) - Publish a chain, join as a validator and prepare node for launch + + +## ignite network chain list + +List published chains + +``` +ignite network chain list [flags] +``` + +**Options** + +``` + --advanced show advanced information about the chains + -h, --help help for list + --limit uint limit of results per page (default 100) + --page uint page for chain list result (default 1) +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network chain](#ignite-network-chain) - Publish a chain, join as a validator and prepare node for launch + + +## ignite network chain prepare + +Prepare the chain for launch + +**Synopsis** + +The prepare command prepares a validator node for the chain launch by generating +the final genesis and adding IP addresses of peers to the validator's +configuration file. + + ignite network chain prepare 42 + +By default, Ignite uses "$HOME/spn/LAUNCH_ID" as the data directory. If you used +a different data directory when initializing the node, use the "--home" flag and +set the correct path to the data directory. + +Ignite generates the genesis file in "config/genesis.json" and adds peer IPs by +modifying "config/config.toml". + +The prepare command should be executed after the coordinator has triggered the +chain launch and finalized the genesis with "ignite network chain launch". You +can force Ignite to run the prepare command without checking if the launch has +been triggered with the "--force" flag (this is not recommended). + +After the prepare command is executed the node is ready to be started. + + +``` +ignite network chain prepare [launch-id] [flags] +``` + +**Options** + +``` + --check-dependencies verify that cached dependencies have not been modified since they were downloaded + --clear-cache clear the build cache (advanced) + -f, --force force the prepare command to run even if the chain is not launched + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for prepare + --home string home directory used for blockchains + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network chain](#ignite-network-chain) - Publish a chain, join as a validator and prepare node for launch + + +## ignite network chain publish + +Publish a new chain to start a new network + +**Synopsis** + +To begin the process of launching a blockchain with Ignite, a coordinator needs +to publish the information about a blockchain. The only required bit of +information is the URL of the source code of the blockchain. + +The following command publishes the information about an example blockchain: + + ignite network chain publish github.com/ignite/example + +This command fetches the source code of the blockchain, compiles the binary, +verifies that a blockchain can be started with the binary, and publishes the +information about the blockchain to Ignite. Currently, only public repositories +are supported. The command returns an integer number that acts as an identifier +of the chain on Ignite. + +By publishing a blockchain on Ignite you become the "coordinator" of this +blockchain. A coordinator is an account that has the authority to approve and +reject validator requests, set parameters of the blockchain and trigger the +launch of the chain. + +The default Git branch is used when publishing a chain. If you want to use a +specific branch, tag or a commit hash, use "--branch", "--tag", or "--hash" +flags respectively. + +The repository name is used as the default chain ID. Ignite does not ensure that +chain IDs are unique, but they have to have a valid format: [string]-[integer]. +To set a custom chain ID use the "--chain-id" flag. + + ignite network chain publish github.com/ignite/example --chain-id foo-1 + +Once the chain is published users can request accounts with coin balances to be +added to the chain's genesis. By default, users are free to request any number +of tokens. If you want all users requesting tokens to get the same amount, use +the "--account-balance" flag with a list of coins. + + ignite network chain publish github.com/ignite/example --account-balance 2000foocoin + + +``` +ignite network chain publish [source-url] [flags] +``` + +**Options** + +``` + --account-balance string balance for each approved genesis account for the chain + --amount string amount of coins for account request + --branch string Git branch to use for the repo + --chain-id string chain ID to use for this network + --check-dependencies verify that cached dependencies have not been modified since they were downloaded + --clear-cache clear the build cache (advanced) + --from string account name to use for sending transactions to SPN (default "default") + --genesis-config string name of an Ignite config file in the repo for custom Genesis + --genesis-url string URL to a custom Genesis + --hash string Git hash to use for the repo + -h, --help help for publish + --home string home directory used for blockchains + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") + --mainnet initialize a mainnet project + --metadata string add chain metadata + --no-check skip verifying chain's integrity + --project uint project ID to use for this network + --reward.coins string reward coins + --reward.height int last reward height + --shares string add shares for the project + --tag string Git tag to use for the repo + --total-supply string add a total of the mainnet of a project + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network chain](#ignite-network-chain) - Publish a chain, join as a validator and prepare node for launch + + +## ignite network chain revert-launch + +Revert launch of a network as a coordinator + +**Synopsis** + +The revert launch command reverts the previously scheduled launch of a chain. + +Only the coordinator of the chain can execute the launch command. + + ignite network chain revert-launch 42 + +After the revert launch command is executed, changes to the genesis of the chain +are allowed again. For example, validators will be able to request to join the +chain. Revert launch also resets the launch time. + + +``` +ignite network chain revert-launch [launch-id] [flags] +``` + +**Options** + +``` + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for revert-launch + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network chain](#ignite-network-chain) - Publish a chain, join as a validator and prepare node for launch + + +## ignite network chain show + +Show details of a chain + +**Options** + +``` + -h, --help help for show +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network chain](#ignite-network-chain) - Publish a chain, join as a validator and prepare node for launch +* [ignite network chain show accounts](#ignite-network-chain-show-accounts) - Show all vesting and genesis accounts of the chain +* [ignite network chain show genesis](#ignite-network-chain-show-genesis) - Show the chain genesis file +* [ignite network chain show info](#ignite-network-chain-show-info) - Show info details of the chain +* [ignite network chain show peers](#ignite-network-chain-show-peers) - Show peers list of the chain +* [ignite network chain show validators](#ignite-network-chain-show-validators) - Show all validators of the chain + + +## ignite network chain show accounts + +Show all vesting and genesis accounts of the chain + +``` +ignite network chain show accounts [launch-id] [flags] +``` + +**Options** + +``` + -h, --help help for accounts + --prefix string account address prefix (default "spn") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network chain show](#ignite-network-chain-show) - Show details of a chain + + +## ignite network chain show genesis + +Show the chain genesis file + +``` +ignite network chain show genesis [launch-id] [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -h, --help help for genesis + --out string path to output Genesis file (default "./genesis.json") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network chain show](#ignite-network-chain-show) - Show details of a chain + + +## ignite network chain show info + +Show info details of the chain + +``` +ignite network chain show info [launch-id] [flags] +``` + +**Options** + +``` + -h, --help help for info +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network chain show](#ignite-network-chain-show) - Show details of a chain + + +## ignite network chain show peers + +Show peers list of the chain + +``` +ignite network chain show peers [launch-id] [flags] +``` + +**Options** + +``` + -h, --help help for peers + --out string path to output peers list (default "./peers.txt") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network chain show](#ignite-network-chain-show) - Show details of a chain + + +## ignite network chain show validators + +Show all validators of the chain + +``` +ignite network chain show validators [launch-id] [flags] +``` + +**Options** + +``` + -h, --help help for validators + --prefix string account address prefix (default "spn") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network chain show](#ignite-network-chain-show) - Show details of a chain + + +## ignite network coordinator + +Show and update a coordinator profile + +**Options** + +``` + -h, --help help for coordinator +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network](#ignite-network) - Launch a blockchain in production +* [ignite network coordinator set](#ignite-network-coordinator-set) - Set an information in a coordinator profile +* [ignite network coordinator show](#ignite-network-coordinator-show) - Show a coordinator profile + + +## ignite network coordinator set + +Set an information in a coordinator profile + +**Synopsis** + +Coordinators on Ignite can set a profile containing a description for the coordinator. +The coordinator set command allows to set information for the coordinator. +The following information can be set: +- details: general information about the coordinator. +- identity: a piece of information to verify the identity of the coordinator with a system like Keybase or Veramo. +- website: website of the coordinator. + + +``` +ignite network coordinator set details|identity|website [value] [flags] +``` + +**Options** + +``` + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for set + --home string home directory used for blockchains + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network coordinator](#ignite-network-coordinator) - Show and update a coordinator profile + + +## ignite network coordinator show + +Show a coordinator profile + +``` +ignite network coordinator show [address] [flags] +``` + +**Options** + +``` + -h, --help help for show +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network coordinator](#ignite-network-coordinator) - Show and update a coordinator profile + + +## ignite network request + +Create, show, reject and approve requests + +**Synopsis** + +The "request" namespace contains commands for creating, showing, approving, and +rejecting requests. + +A request is mechanism in Ignite that allows changes to be made to the genesis +file like adding accounts with token balances and validators. Anyone can submit +a request, but only the coordinator of a chain can approve or reject a request. + +Each request has a status: + +* Pending: waiting for the approval of the coordinator +* Approved: approved by the coordinator, its content has been applied to the + launch information +* Rejected: rejected by the coordinator or the request creator + + +**Options** + +``` + -h, --help help for request +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network](#ignite-network) - Launch a blockchain in production +* [ignite network request add-account](#ignite-network-request-add-account) - Send request to add account +* [ignite network request approve](#ignite-network-request-approve) - Approve requests +* [ignite network request change-param](#ignite-network-request-change-param) - Send request to change a module param +* [ignite network request list](#ignite-network-request-list) - List all requests for a chain +* [ignite network request reject](#ignite-network-request-reject) - Reject requests +* [ignite network request remove-account](#ignite-network-request-remove-account) - Send request to remove a genesis account +* [ignite network request remove-validator](#ignite-network-request-remove-validator) - Send request to remove a validator +* [ignite network request show](#ignite-network-request-show) - Show detailed information about a request +* [ignite network request verify](#ignite-network-request-verify) - Verify the request and simulate the chain genesis from them + + +## ignite network request add-account + +Send request to add account + +**Synopsis** + +The "add account" command creates a new request to add an account with a given +address and a specified coin balance to the genesis of the chain. + +The request automatically fails to be applied if a genesis account or a vesting +account with an identical address is already specified in the launch +information. + +If a coordinator has specified that all genesis accounts on a chain should have +the same balance (useful for testnets, for example), the "add account" expects +only an address as an argument. Attempt to provide a token balance will result +in an error. + + +``` +ignite network request add-account [launch-id] [address] [coins] [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for add-account + --home string home directory used for blockchains + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network request](#ignite-network-request) - Create, show, reject and approve requests + + +## ignite network request approve + +Approve requests + +**Synopsis** + +The "approve" command is used by a chain's coordinator to approve requests. +Multiple requests can be approved using a comma-separated list and/or using a +dash syntax. + + ignite network request approve 42 1,2,3-6,7,8 + +The command above approves requests with IDs from 1 to 8 included on a chain +with a launch ID 42. + +When requests are approved Ignite applies the requested changes and simulates +initializing and launching the chain locally. If the chain starts successfully, +requests are considered to be "verified" and are approved. If one or more +requested changes stop the chain from launching locally, the verification +process fails and the approval of all requests is canceled. To skip the +verification process use the "--no-verification" flag. + +Note that Ignite will try to approve requests in the same order as request IDs +are submitted to the "approve" command. + +``` +ignite network request approve [launch-id] [number<,...>] [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for approve + --home string home directory used for blockchains + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") + --no-verification approve the requests without verifying them +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network request](#ignite-network-request) - Create, show, reject and approve requests + + +## ignite network request change-param + +Send request to change a module param + +``` +ignite network request change-param [launch-id] [module-name] [param-name] [value (json, string, number)] [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for change-param + --home string home directory used for blockchains + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network request](#ignite-network-request) - Create, show, reject and approve requests + + +## ignite network request list + +List all requests for a chain + +``` +ignite network request list [launch-id] [flags] +``` + +**Options** + +``` + -h, --help help for list + --prefix string account address prefix (default "spn") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network request](#ignite-network-request) - Create, show, reject and approve requests + + +## ignite network request reject + +Reject requests + +**Synopsis** + +The "reject" command is used by a chain's coordinator to reject requests. + + ignite network request reject 42 1,2,3-6,7,8 + +The syntax of the "reject" command is similar to that of the "approve" command. + + +``` +ignite network request reject [launch-id] [number<,...>] [flags] +``` + +**Options** + +``` + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for reject + --home string home directory used for blockchains + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network request](#ignite-network-request) - Create, show, reject and approve requests + + +## ignite network request remove-account + +Send request to remove a genesis account + +``` +ignite network request remove-account [launch-id] [address] [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for remove-account + --home string home directory used for blockchains + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network request](#ignite-network-request) - Create, show, reject and approve requests + + +## ignite network request remove-validator + +Send request to remove a validator + +``` +ignite network request remove-validator [launch-id] [address] [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for remove-validator + --home string home directory used for blockchains + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network request](#ignite-network-request) - Create, show, reject and approve requests + + +## ignite network request show + +Show detailed information about a request + +``` +ignite network request show [launch-id] [request-id] [flags] +``` + +**Options** + +``` + -h, --help help for show +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network request](#ignite-network-request) - Create, show, reject and approve requests + + +## ignite network request verify + +Verify the request and simulate the chain genesis from them + +**Synopsis** + +The "verify" command applies selected requests to the genesis of a chain locally +to verify that approving these requests will result in a valid genesis that +allows a chain to launch without issues. This command does not approve requests, +only checks them. + + +``` +ignite network request verify [launch-id] [number<,...>] [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for verify + --home string home directory used for blockchains + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network request](#ignite-network-request) - Create, show, reject and approve requests + + +## ignite network tool + +Commands to run subsidiary tools + +**Options** + +``` + -h, --help help for tool +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network](#ignite-network) - Launch a blockchain in production +* [ignite network tool proxy-tunnel](#ignite-network-tool-proxy-tunnel) - Setup a proxy tunnel via HTTP + + +## ignite network tool proxy-tunnel + +Setup a proxy tunnel via HTTP + +**Synopsis** + +Starts an HTTP proxy server and HTTP proxy clients for each node that +needs HTTP tunneling. + +HTTP tunneling is activated **ONLY** if SPN_CONFIG_FILE has "tunneled_peers" +field inside with a list of tunneled peers/nodes. + +If you're using SPN as coordinator and do not want to allow HTTP tunneling +feature at all, you can prevent "spn.yml" file to being generated by not +approving validator requests that has HTTP tunneling enabled instead of plain +TCP connections. + +``` +ignite network tool proxy-tunnel SPN_CONFIG_FILE [flags] +``` + +**Options** + +``` + -h, --help help for proxy-tunnel +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network tool](#ignite-network-tool) - Commands to run subsidiary tools + + +## ignite network validator + +Show and update a validator profile + +**Options** + +``` + -h, --help help for validator +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network](#ignite-network) - Launch a blockchain in production +* [ignite network validator set](#ignite-network-validator-set) - Set an information in a validator profile +* [ignite network validator show](#ignite-network-validator-show) - Show a validator profile + + +## ignite network validator set + +Set an information in a validator profile + +**Synopsis** + +Validators on Ignite can set a profile containing a description for the validator. +The validator set command allows to set information for the validator. +The following information can be set: +- details: general information about the validator. +- identity: piece of information to verify identity of the validator with a system like Keybase of Veramo. +- website: website of the validator. +- security: security contact for the validator. + + +``` +ignite network validator set details|identity|website|security [value] [flags] +``` + +**Options** + +``` + --from string account name to use for sending transactions to SPN (default "default") + -h, --help help for set + --home string home directory used for blockchains + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network validator](#ignite-network-validator) - Show and update a validator profile + + +## ignite network validator show + +Show a validator profile + +``` +ignite network validator show [address] [flags] +``` + +**Options** + +``` + -h, --help help for show +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network validator](#ignite-network-validator) - Show and update a validator profile + + +## ignite network version + +Version of the plugin + +**Synopsis** + +The version of the plugin to use to interact with a chain might be specified by the coordinator. + + +``` +ignite network version [flags] +``` + +**Options** + +``` + -h, --help help for version +``` + +**Options inherited from parent commands** + +``` + --local Use local SPN network + --nightly Use nightly SPN network + --spn-faucet-address string SPN faucet address (default "https://faucet.devnet.ignite.com:443") + --spn-node-address string SPN node address (default "https://rpc.devnet.ignite.com:443") +``` + +**SEE ALSO** + +* [ignite network](#ignite-network) - Launch a blockchain in production + + +## ignite node + +Make requests to a live blockchain node + +**Options** + +``` + -h, --help help for node + --node string <host>:<port> to tendermint rpc interface for this chain (default "https://rpc.cosmos.directory:443/cosmoshub") +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain +* [ignite node query](#ignite-node-query) - Querying subcommands +* [ignite node tx](#ignite-node-tx) - Transactions subcommands + + +## ignite node query + +Querying subcommands + +**Options** + +``` + -h, --help help for query +``` + +**Options inherited from parent commands** + +``` + --node string <host>:<port> to tendermint rpc interface for this chain (default "https://rpc.cosmos.directory:443/cosmoshub") +``` + +**SEE ALSO** + +* [ignite node](#ignite-node) - Make requests to a live blockchain node +* [ignite node query bank](#ignite-node-query-bank) - Querying commands for the bank module +* [ignite node query tx](#ignite-node-query-tx) - Query for transaction by hash + + +## ignite node query bank + +Querying commands for the bank module + +**Options** + +``` + -h, --help help for bank +``` + +**Options inherited from parent commands** + +``` + --node string <host>:<port> to tendermint rpc interface for this chain (default "https://rpc.cosmos.directory:443/cosmoshub") +``` + +**SEE ALSO** + +* [ignite node query](#ignite-node-query) - Querying subcommands +* [ignite node query bank balances](#ignite-node-query-bank-balances) - Query for account balances by account name or address + + +## ignite node query bank balances + +Query for account balances by account name or address + +``` +ignite node query bank balances [from_account_or_address] [flags] +``` + +**Options** + +``` + --address-prefix string account address prefix (default "cosmos") + --count-total count total number of records in all balances to query for + -h, --help help for balances + --home string directory where the blockchain node is initialized + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") + --limit uint pagination limit of all balances to query for (default 100) + --offset uint pagination offset of all balances to query for + --page uint pagination page of all balances to query for. This sets offset to a multiple of limit (default 1) + --page-key string pagination page-key of all balances to query for + --reverse results are sorted in descending order +``` + +**Options inherited from parent commands** + +``` + --node string <host>:<port> to tendermint rpc interface for this chain (default "https://rpc.cosmos.directory:443/cosmoshub") +``` + +**SEE ALSO** + +* [ignite node query bank](#ignite-node-query-bank) - Querying commands for the bank module + + +## ignite node query tx + +Query for transaction by hash + +``` +ignite node query tx [hash] [flags] +``` + +**Options** + +``` + -h, --help help for tx +``` + +**Options inherited from parent commands** + +``` + --node string <host>:<port> to tendermint rpc interface for this chain (default "https://rpc.cosmos.directory:443/cosmoshub") +``` + +**SEE ALSO** + +* [ignite node query](#ignite-node-query) - Querying subcommands + + +## ignite node tx + +Transactions subcommands + +**Options** + +``` + --address-prefix string account address prefix (default "cosmos") + --fees string fees to pay along with transaction; eg: 10uatom + --gas string gas limit to set per-transaction; set to "auto" to calculate sufficient gas automatically (default "auto") + --gas-adjustment float gas adjustment to set per-transaction + --gas-prices string gas prices in decimal format to determine the transaction fee (e.g. 0.1uatom) + --generate-only build an unsigned transaction and write it to STDOUT + -h, --help help for tx + --home string directory where the blockchain node is initialized + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**Options inherited from parent commands** + +``` + --node string <host>:<port> to tendermint rpc interface for this chain (default "https://rpc.cosmos.directory:443/cosmoshub") +``` + +**SEE ALSO** + +* [ignite node](#ignite-node) - Make requests to a live blockchain node +* [ignite node tx bank](#ignite-node-tx-bank) - Bank transaction subcommands + + +## ignite node tx bank + +Bank transaction subcommands + +**Options** + +``` + -h, --help help for bank +``` + +**Options inherited from parent commands** + +``` + --address-prefix string account address prefix (default "cosmos") + --fees string fees to pay along with transaction; eg: 10uatom + --gas string gas limit to set per-transaction; set to "auto" to calculate sufficient gas automatically (default "auto") + --gas-adjustment float gas adjustment to set per-transaction + --gas-prices string gas prices in decimal format to determine the transaction fee (e.g. 0.1uatom) + --generate-only build an unsigned transaction and write it to STDOUT + --home string directory where the blockchain node is initialized + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") + --node string <host>:<port> to tendermint rpc interface for this chain (default "https://rpc.cosmos.directory:443/cosmoshub") +``` + +**SEE ALSO** + +* [ignite node tx](#ignite-node-tx) - Transactions subcommands +* [ignite node tx bank send](#ignite-node-tx-bank-send) - Send funds from one account to another. + + +## ignite node tx bank send + +Send funds from one account to another. + +``` +ignite node tx bank send [from_account_or_address] [to_account_or_address] [amount] [flags] +``` + +**Options** + +``` + -h, --help help for send +``` + +**Options inherited from parent commands** + +``` + --address-prefix string account address prefix (default "cosmos") + --fees string fees to pay along with transaction; eg: 10uatom + --gas string gas limit to set per-transaction; set to "auto" to calculate sufficient gas automatically (default "auto") + --gas-adjustment float gas adjustment to set per-transaction + --gas-prices string gas prices in decimal format to determine the transaction fee (e.g. 0.1uatom) + --generate-only build an unsigned transaction and write it to STDOUT + --home string directory where the blockchain node is initialized + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") + --node string <host>:<port> to tendermint rpc interface for this chain (default "https://rpc.cosmos.directory:443/cosmoshub") +``` + +**SEE ALSO** + +* [ignite node tx bank](#ignite-node-tx-bank) - Bank transaction subcommands + + +## ignite relayer + +Connect blockchains with an IBC relayer + +**Options** + +``` + -h, --help help for relayer +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain +* [ignite relayer configure](#ignite-relayer-configure) - Configure source and target chains for relaying +* [ignite relayer connect](#ignite-relayer-connect) - Link chains associated with paths and start relaying tx packets in between + + +## ignite relayer configure + +Configure source and target chains for relaying + +``` +ignite relayer configure [flags] +``` + +**Options** + +``` + -a, --advanced advanced configuration options for custom IBC modules + -h, --help help for configure + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") + --ordered set the channel as ordered + -r, --reset reset the relayer config + --source-account string source Account + --source-client-id string use a custom client id for source + --source-faucet string faucet address of the source chain + --source-gaslimit int gas limit used for transactions on source chain + --source-gasprice string gas price used for transactions on source chain + --source-port string IBC port ID on the source chain + --source-prefix string address prefix of the source chain + --source-rpc string RPC address of the source chain + --source-version string module version on the source chain + --target-account string target Account + --target-client-id string use a custom client id for target + --target-faucet string faucet address of the target chain + --target-gaslimit int gas limit used for transactions on target chain + --target-gasprice string gas price used for transactions on target chain + --target-port string IBC port ID on the target chain + --target-prefix string address prefix of the target chain + --target-rpc string RPC address of the target chain + --target-version string module version on the target chain +``` + +**SEE ALSO** + +* [ignite relayer](#ignite-relayer) - Connect blockchains with an IBC relayer + + +## ignite relayer connect + +Link chains associated with paths and start relaying tx packets in between + +``` +ignite relayer connect [<path>,...] [flags] +``` + +**Options** + +``` + -h, --help help for connect + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**SEE ALSO** + +* [ignite relayer](#ignite-relayer) - Connect blockchains with an IBC relayer + + +## ignite scaffold + +Create a new blockchain, module, message, query, and more + +**Synopsis** + +Scaffolding is a quick way to generate code for major pieces of your +application. + +For details on each scaffolding target (chain, module, message, etc.) run the +corresponding command with a "--help" flag, for example, "ignite scaffold chain +--help". + +The Ignite team strongly recommends committing the code to a version control +system before running scaffolding commands. This will make it easier to see the +changes to the source code as well as undo the command if you've decided to roll +back the changes. + +This blockchain you create with the chain scaffolding command uses the modular +Cosmos SDK framework and imports many standard modules for functionality like +proof of stake, token transfer, inter-blockchain connectivity, governance, and +more. Custom functionality is implemented in modules located by convention in +the "x/" directory. By default, your blockchain comes with an empty custom +module. Use the module scaffolding command to create an additional module. + +An empty custom module doesn't do much, it's basically a container for logic +that is responsible for processing transactions and changing the application +state. Cosmos SDK blockchains work by processing user-submitted signed +transactions, which contain one or more messages. A message contains data that +describes a state transition. A module can be responsible for handling any +number of messages. + +A message scaffolding command will generate the code for handling a new type of +Cosmos SDK message. Message fields describe the state transition that the +message is intended to produce if processed without errors. + +Scaffolding messages is useful to create individual "actions" that your module +can perform. Sometimes, however, you want your blockchain to have the +functionality to create, read, update and delete (CRUD) instances of a +particular type. Depending on how you want to store the data there are three +commands that scaffold CRUD functionality for a type: list, map, and single. +These commands create four messages (one for each CRUD action), and the logic to +add, delete, and fetch the data from the store. If you want to scaffold only the +logic, for example, you've decided to scaffold messages separately, you can do +that as well with the "--no-message" flag. + +Reading data from a blockchain happens with a help of queries. Similar to how +you can scaffold messages to write data, you can scaffold queries to read the +data back from your blockchain application. + +You can also scaffold a type, which just produces a new protocol buffer file +with a proto message description. Note that proto messages produce (and +correspond with) Go types whereas Cosmos SDK messages correspond to proto "rpc" +in the "Msg" service. + +If you're building an application with custom IBC logic, you might need to +scaffold IBC packets. An IBC packet represents the data sent from one blockchain +to another. You can only scaffold IBC packets in IBC-enabled modules scaffolded +with an "--ibc" flag. Note that the default module is not IBC-enabled. + + +**Options** + +``` + -h, --help help for scaffold +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain +* [ignite scaffold chain](#ignite-scaffold-chain) - New Cosmos SDK blockchain +* [ignite scaffold list](#ignite-scaffold-list) - CRUD for data stored as an array +* [ignite scaffold map](#ignite-scaffold-map) - CRUD for data stored as key-value pairs +* [ignite scaffold message](#ignite-scaffold-message) - Message to perform state transition on the blockchain +* [ignite scaffold module](#ignite-scaffold-module) - Custom Cosmos SDK module +* [ignite scaffold packet](#ignite-scaffold-packet) - Message for sending an IBC packet +* [ignite scaffold query](#ignite-scaffold-query) - Query for fetching data from a blockchain +* [ignite scaffold react](#ignite-scaffold-react) - React web app template +* [ignite scaffold single](#ignite-scaffold-single) - CRUD for data stored in a single location +* [ignite scaffold type](#ignite-scaffold-type) - Type definition +* [ignite scaffold vue](#ignite-scaffold-vue) - Vue 3 web app template + + +## ignite scaffold chain + +New Cosmos SDK blockchain + +**Synopsis** + +Create a new application-specific Cosmos SDK blockchain. + +For example, the following command will create a blockchain called "hello" in +the "hello/" directory: + + ignite scaffold chain hello + +A project name can be a simple name or a URL. The name will be used as the Go +module path for the project. Examples of project names: + + ignite scaffold chain foo + ignite scaffold chain foo/bar + ignite scaffold chain example.org/foo + ignite scaffold chain github.com/username/foo + +A new directory with source code files will be created in the current directory. +To use a different path use the "--path" flag. + +Most of the logic of your blockchain is written in custom modules. Each module +effectively encapsulates an independent piece of functionality. Following the +Cosmos SDK convention, custom modules are stored inside the "x/" directory. By +default, Ignite creates a module with a name that matches the name of the +project. To create a blockchain without a default module use the "--no-module" +flag. Additional modules can be added after a project is created with "ignite +scaffold module" command. + +Account addresses on Cosmos SDK-based blockchains have string prefixes. For +example, the Cosmos Hub blockchain uses the default "cosmos" prefix, so that +addresses look like this: "cosmos12fjzdtqfrrve7zyg9sv8j25azw2ua6tvu07ypf". To +use a custom address prefix use the "--address-prefix" flag. For example: + + ignite scaffold chain foo --address-prefix bar + +By default when compiling a blockchain's source code Ignite creates a cache to +speed up the build process. To clear the cache when building a blockchain use +the "--clear-cache" flag. It is very unlikely you will ever need to use this +flag. + +The blockchain is using the Cosmos SDK modular blockchain framework. Learn more +about Cosmos SDK on https://docs.cosmos.network + + +``` +ignite scaffold chain [name] [flags] +``` + +**Options** + +``` + --address-prefix string account address prefix (default "cosmos") + --clear-cache clear the build cache (advanced) + -h, --help help for chain + --minimal create a minimal blockchain (with the minimum required Cosmos SDK modules) + --no-module create a project without a default module + --params strings add default module parameters + -p, --path string create a project in a specific path + --skip-git skip Git repository initialization +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold list + +CRUD for data stored as an array + +**Synopsis** + +The "list" scaffolding command is used to generate files that implement the +logic for storing and interacting with data stored as a list in the blockchain +state. + +The command accepts a NAME argument that will be used as the name of a new type +of data. It also accepts a list of FIELDs that describe the type. + +The interaction with the data follows the create, read, updated, and delete +(CRUD) pattern. For each type three Cosmos SDK messages are defined for writing +data to the blockchain: MsgCreate{Name}, MsgUpdate{Name}, MsgDelete{Name}. For +reading data two queries are defined: {Name} and {Name}All. The type, messages, +and queries are defined in the "proto/" directory as protocol buffer messages. +Messages and queries are mounted in the "Msg" and "Query" services respectively. + +When messages are handled, the appropriate keeper methods are called. By +convention, the methods are defined in +"x/{moduleName}/keeper/msg_server_{name}.go". Helpful methods for getting, +setting, removing, and appending are defined in the same "keeper" package in +"{name}.go". + +The "list" command essentially allows you to define a new type of data and +provides the logic to create, read, update, and delete instances of the type. +For example, let's review a command that generates the code to handle a list of +posts and each post has "title" and "body" fields: + + ignite scaffold list post title body + +This provides you with a "Post" type, MsgCreatePost, MsgUpdatePost, +MsgDeletePost and two queries: Post and PostAll. The compiled CLI, let's say the +binary is "blogd" and the module is "blog", has commands to query the chain (see +"blogd q blog") and broadcast transactions with the messages above (see "blogd +tx blog"). + +The code generated with the list command is meant to be edited and tailored to +your application needs. Consider the code to be a "skeleton" for the actual +business logic you will implement next. + +By default, all fields are assumed to be strings. If you want a field of a +different type, you can specify it after a colon ":". The following types are +supported: string, bool, int, uint, coin, array.string, array.int, array.uint, +array.coin. An example of using field types: + + ignite scaffold list pool amount:coin tags:array.string height:int + +For detailed type information use ignite scaffold type --help + +"Index" indicates whether the type can be used as an index in +"ignite scaffold map". + +Ignite also supports custom types: + + ignite scaffold list product-details name desc + ignite scaffold list product price:coin details:ProductDetails + +In the example above the "ProductDetails" type was defined first, and then used +as a custom type for the "details" field. Ignite doesn't support arrays of +custom types yet. + +Your chain will accept custom types in JSON-notation: + + exampled tx example create-product 100coin '{"name": "x", "desc": "y"}' --from alice + +By default the code will be scaffolded in the module that matches your project's +name. If you have several modules in your project, you might want to specify a +different module: + + ignite scaffold list post title body --module blog + +By default, each message comes with a "creator" field that represents the +address of the transaction signer. You can customize the name of this field with +a flag: + + ignite scaffold list post title body --signer author + +It's possible to scaffold just the getter/setter logic without the CRUD +messages. This is useful when you want the methods to handle a type, but would +like to scaffold messages manually. Use a flag to skip message scaffolding: + + ignite scaffold list post title body --no-message + +The "creator" field is not generated if a list is scaffolded with the +"--no-message" flag. + + +``` +ignite scaffold list NAME [field]... [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -h, --help help for list + --module string specify which module to generate code in + --no-message skip generating message handling logic + --no-simulation skip simulation logic + -p, --path string path of the app (default ".") + --signer string label for the message signer (default: creator) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold map + +CRUD for data stored as key-value pairs + +**Synopsis** + +The "map" scaffolding command is used to generate files that implement the logic +for storing and interacting with data stored as key-value pairs (or a +dictionary) in the blockchain state. + +The "map" command is very similar to "ignite scaffold list" with the main +difference in how values are indexed. With "list" values are indexed by an +incrementing integer, whereas "map" values are indexed by a user-provided value +(or multiple values). + +Let's use the same blog post example: + + ignite scaffold map post title body:string + +This command scaffolds a "Post" type and CRUD functionality to create, read, +updated, and delete posts. However, when creating a new post with your chain's +binary (or by submitting a transaction through the chain's API) you will be +required to provide an "index": + + blogd tx blog create-post [index] [title] [body] + blogd tx blog create-post hello "My first post" "This is the body" + +This command will create a post and store it in the blockchain's state under the +"hello" index. You will be able to fetch back the value of the post by querying +for the "hello" key. + + blogd q blog show-post hello + +To customize the index, use the "--index" flag. Multiple indices can be +provided, which simplifies querying values. For example: + + ignite scaffold map product price desc --index category,guid + +With this command, you would get a "Product" value indexed by both a category +and a GUID (globally unique ID). This will let you programmatically fetch +product values that have the same category but are using different GUIDs. + +Since the behavior of "list" and "map" scaffolding is very similar, you can use +the "--no-message", "--module", "--signer" flags as well as the colon syntax for +custom types. + +For detailed type information use ignite scaffold type --help + + +``` +ignite scaffold map NAME [field]... [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -h, --help help for map + --index strings fields that index the value (default [index]) + --module string specify which module to generate code in + --no-message skip generating message handling logic + --no-simulation skip simulation logic + -p, --path string path of the app (default ".") + --signer string label for the message signer (default: creator) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold message + +Message to perform state transition on the blockchain + +**Synopsis** + +Message scaffolding is useful for quickly adding functionality to your +blockchain to handle specific Cosmos SDK messages. + +Messages are objects whose end goal is to trigger state transitions on the +blockchain. A message is a container for fields of data that affect how the +blockchain's state will change. You can think of messages as "actions" that a +user can perform. + +For example, the bank module has a "Send" message for token transfers between +accounts. The send message has three fields: from address (sender), to address +(recipient), and a token amount. When this message is successfully processed, +the token amount will be deducted from the sender's account and added to the +recipient's account. + +Ignite's message scaffolding lets you create new types of messages and add them +to your chain. For example: + + ignite scaffold message add-pool amount:coins denom active:bool --module dex + +The command above will create a new message MsgAddPool with three fields: amount +(in tokens), denom (a string), and active (a boolean). The message will be added +to the "dex" module. + +For detailed type information use ignite scaffold type --help + +By default, the message is defined as a proto message in the +"proto/{app}/{module}/tx.proto" and registered in the "Msg" service. A CLI command to +create and broadcast a transaction with MsgAddPool is created in the module's +"cli" package. Additionally, Ignite scaffolds a message constructor and the code +to satisfy the sdk.Msg interface and register the message in the module. + +Most importantly in the "keeper" package Ignite scaffolds an "AddPool" function. +Inside this function, you can implement message handling logic. + +When successfully processed a message can return data. Use the —response flag to +specify response fields and their types. For example + + ignite scaffold message create-post title body --response id:int,title + +The command above will scaffold MsgCreatePost which returns both an ID (an +integer) and a title (a string). + +Message scaffolding follows the rules as "ignite scaffold list/map/single" and +supports fields with standard and custom types. See "ignite scaffold list —help" +for details. + + +``` +ignite scaffold message [name] [field1:type1] [field2:type2] ... [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -d, --desc string description of the command + -h, --help help for message + --module string module to add the message into. Default: app's main module + --no-simulation disable CRUD simulation scaffolding + -p, --path string path of the app (default ".") + -r, --response strings response fields + --signer string label for the message signer (default: creator) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold module + +Custom Cosmos SDK module + +**Synopsis** + +Scaffold a new Cosmos SDK module. + +Cosmos SDK is a modular framework and each independent piece of functionality is +implemented in a separate module. By default your blockchain imports a set of +standard Cosmos SDK modules. To implement custom functionality of your +blockchain, scaffold a module and implement the logic of your application. + +This command does the following: + +* Creates a directory with module's protocol buffer files in "proto/" +* Creates a directory with module's boilerplate Go code in "x/" +* Imports the newly created module by modifying "app/app.go" +* Creates a file in "testutil/keeper/" that contains logic to create a keeper + for testing purposes + +This command will proceed with module scaffolding even if "app/app.go" doesn't +have the required default placeholders. If the placeholders are missing, you +will need to modify "app/app.go" manually to import the module. If you want the +command to fail if it can't import the module, use the "--require-registration" +flag. + +To scaffold an IBC-enabled module use the "--ibc" flag. An IBC-enabled module is +like a regular module with the addition of IBC-specific logic and placeholders +to scaffold IBC packets with "ignite scaffold packet". + +A module can depend on one or more other modules and import their keeper +methods. To scaffold a module with a dependency use the "--dep" flag + +For example, your new custom module "foo" might have functionality that requires +sending tokens between accounts. The method for sending tokens is a defined in +the "bank"'s module keeper. You can scaffold a "foo" module with the dependency +on "bank" with the following command: + + ignite scaffold module foo --dep bank + +You can then define which methods you want to import from the "bank" keeper in +"expected_keepers.go". + +You can also scaffold a module with a list of dependencies that can include both +standard and custom modules (provided they exist): + + ignite scaffold module bar --dep foo,mint,account,FeeGrant + +Note: the "--dep" flag doesn't install third-party modules into your +application, it just generates extra code that specifies which existing modules +your new custom module depends on. + +A Cosmos SDK module can have parameters (or "params"). Params are values that +can be set at the genesis of the blockchain and can be modified while the +blockchain is running. An example of a param is "Inflation rate change" of the +"mint" module. A module can be scaffolded with params using the "--params" flag +that accepts a list of param names. By default params are of type "string", but +you can specify a type for each param. For example: + + ignite scaffold module foo --params baz:uint,bar:bool + +Refer to Cosmos SDK documentation to learn more about modules, dependencies and +params. + + +``` +ignite scaffold module [name] [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + --dep strings add a dependency on another module + -h, --help help for module + --ibc add IBC functionality + --ordering string channel ordering of the IBC module [none|ordered|unordered] (default "none") + --params strings add module parameters + -p, --path string path of the app (default ".") + --require-registration fail if module can't be registered + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold packet + +Message for sending an IBC packet + +**Synopsis** + +Scaffold an IBC packet in a specific IBC-enabled Cosmos SDK module + +``` +ignite scaffold packet [packetName] [field1] [field2] ... --module [moduleName] [flags] +``` + +**Options** + +``` + --ack strings custom acknowledgment type (field1,field2,...) + --clear-cache clear the build cache (advanced) + -h, --help help for packet + --module string IBC Module to add the packet into + --no-message disable send message scaffolding + -p, --path string path of the app (default ".") + --signer string label for the message signer (default: creator) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold query + +Query for fetching data from a blockchain + +**Synopsis** + +Query for fetching data from a blockchain. + +For detailed type information use ignite scaffold type --help. + +``` +ignite scaffold query [name] [field1:type1] [field2:type2] ... [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -d, --desc string description of the CLI to broadcast a tx with the message + -h, --help help for query + --module string module to add the query into. Default: app's main module + --paginated define if the request can be paginated + -p, --path string path of the app (default ".") + -r, --response strings response fields + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold react + +React web app template + +``` +ignite scaffold react [flags] +``` + +**Options** + +``` + -h, --help help for react + -p, --path string path to scaffold content of the React app (default "./react") + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold single + +CRUD for data stored in a single location + +**Synopsis** + +CRUD for data stored in a single location. + +For detailed type information use ignite scaffold type --help. + +``` +ignite scaffold single NAME [field:type]... [flags] +``` + +**Examples** + +``` + ignite scaffold single todo-single title:string done:bool +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -h, --help help for single + --module string specify which module to generate code in + --no-message skip generating message handling logic + --no-simulation skip simulation logic + -p, --path string path of the app (default ".") + --signer string label for the message signer (default: creator) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold type + +Type definition + +**Synopsis** + +Type information + +Currently supports: + +| Type | Alias | Index | Code Type | Description | +|--------------|---------|-------|-----------|---------------------------------| +| string | - | yes | string | Text type | +| array.string | strings | no | []string | List of text type | +| bool | - | yes | bool | Boolean type | +| int | - | yes | int32 | Integer type | +| array.int | ints | no | []int32 | List of integers types | +| uint | - | yes | uint64 | Unsigned integer type | +| array.uint | uints | no | []uint64 | List of unsigned integers types | +| coin | - | no | sdk.Coin | Cosmos SDK coin type | +| array.coin | coins | no | sdk.Coins | List of Cosmos SDK coin types | + +Field Usage: + - fieldName + - fieldName:fieldType + +If no :fieldType, default (string) is used + + + +``` +ignite scaffold type NAME [field:type] ... [flags] +``` + +**Examples** + +``` + ignite scaffold type todo-item priority:int desc:string tags:array.string done:bool +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -h, --help help for type + --module string specify which module to generate code in + --no-message skip generating message handling logic + --no-simulation skip simulation logic + -p, --path string path of the app (default ".") + --signer string label for the message signer (default: creator) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold vue + +Vue 3 web app template + +``` +ignite scaffold vue [flags] +``` + +**Options** + +``` + -h, --help help for vue + -p, --path string path to scaffold content of the Vue.js app (default "./vue") + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite tools + +Tools for advanced users + +**Options** + +``` + -h, --help help for tools +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain +* [ignite tools ibc-relayer](#ignite-tools-ibc-relayer) - TypeScript implementation of an IBC relayer +* [ignite tools ibc-setup](#ignite-tools-ibc-setup) - Collection of commands to quickly setup a relayer + + +## ignite tools ibc-relayer + +TypeScript implementation of an IBC relayer + +``` +ignite tools ibc-relayer [--] [...] [flags] +``` + +**Examples** + +``` +ignite tools ibc-relayer -- -h +``` + +**Options** + +``` + -h, --help help for ibc-relayer +``` + +**SEE ALSO** + +* [ignite tools](#ignite-tools) - Tools for advanced users + + +## ignite tools ibc-setup + +Collection of commands to quickly setup a relayer + +``` +ignite tools ibc-setup [--] [...] [flags] +``` + +**Examples** + +``` +ignite tools ibc-setup -- -h +ignite tools ibc-setup -- init --src relayer_test_1 --dest relayer_test_2 +``` + +**Options** + +``` + -h, --help help for ibc-setup +``` + +**SEE ALSO** + +* [ignite tools](#ignite-tools) - Tools for advanced users + + +## ignite version + +Print the current build information + +``` +ignite version [flags] +``` + +**Options** + +``` + -h, --help help for version +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain + diff --git a/docs/versioned_docs/version-v28/08-references/02-config.md b/docs/versioned_docs/version-v28/08-references/02-config.md new file mode 100644 index 0000000..8b4c5e2 --- /dev/null +++ b/docs/versioned_docs/version-v28/08-references/02-config.md @@ -0,0 +1,336 @@ +--- +sidebar_position: 3 +description: Primary configuration file to describe the development environment for your blockchain. +title: Configuration file +--- + +# Configuration file reference + +The `config.yml` file generated in your blockchain folder uses key-value pairs +to describe the development environment for your blockchain. + +Only a default set of parameters is provided. If more nuanced configuration is +required, you can add these parameters to the `config.yml` file. + +## Validation + +Ignite uses the `validation` field to determine the kind of validation +of your blockchain. There are currently two supported kinds of validation: + +- `sovereign` which is the standard kind of validation where your blockchain + has its own validator set. This is the default value when this field is not + in the config file. +- `consumer` indicates your blockchain is a consumer chain, in the sense of + Replicated Security. That means it doesn't have a validator set, but + inherits the one of a provider chain. + +While the `sovereign` chain is the default validation when you run the `ignite scaffold +chain`, to scaffold a consumer chain, you have to run `ignite scaffold chain +--consumer`. + +This field is, at this time of writing, only used by Ignite at the genesis +generation step, because the genesis of a sovereign chain and a consumer chain +are different. + +## Accounts + +A list of user accounts created during genesis of the blockchain. + +```yml +accounts: + - name: alice + coins: ['20000token', '200000000stake'] + - name: bob + coins: ['10000token', '100000000stake'] +``` + +Ignite uses information from `accounts` when initializing the chain with `ignite +chain init` and `ignite chain start`. In the example above Ignite will add two +accounts to the `genesis.json` file of the chain. + +`name` is a local name of a key pair associated with an account. Once the chain +is initialized and started, you will be able to use `name` when signing +transactions. With the configuration above, you'd be able to sign transactions +both with Alice's and Bob's accounts like so `exampled tx bank send ... --from +alice`. + +`coins` is a list of token balances for the account. If a token denomination is +in this list, it will exist in the genesis balance and will be a valid token. +When initialized with the config file above, a chain will only have two accounts +at genesis (Alice and Bob) and two native tokens (with denominations `token` and +`stake`). + +By default, every time a chain is re-initialized, Ignite will create a new key +pair for each account. So even though the account name can remain the same +(`bob`), every chain reinitialize it will have a different mnemonic and address. + +If you want an account to have a specific address, provide the `address` field +with a valid bech32 address. The prefix (by default, `cosmos`) should match the +one expected by your chain. When an account is provided with an `address` a key +pair will not be generated, because it's impossible to derive a key from an +address. An account with a given address will be added to the genesis file (with +an associated token balance), but because there is no key pair, you will not be +able to broadcast transactions from that address. This is useful when you have +generated a key pair outside of Ignite (for example, using your chain's CLI or +in an extension wallet) and want to have a token balance associated with the +address of this key pair. + +```yml +accounts: + - name: bob + coins: ['20000token', '200000000stake'] + address: cosmos1s39200s6v4c96ml2xzuh389yxpd0guk2mzn3mz +``` + +If you want an account to be initialized from a specific mnemonic, provide the +`mnemonic` field with a valid mnemonic. A private key, a public key and an +address will be derived from a mnemonic. + +```yml +accounts: + - name: bob + coins: ['20000token', '200000000stake'] + mnemonic: cargo ramp supreme review change various throw air figure humble soft steel slam pole betray inhale already dentist enough away office apple sample glue +``` + +You cannot have both `address` and `mnemonic` defined for a single account. + +Some accounts are used as validator accounts (see `validators` section). +Validator accounts cannot have an `address` field, because Ignite needs to be +able to derive a private key (either from a random mnemonic or from a specific +one provided in the `mnemonic` field). Validator accounts should have enough +tokens of the staking denomination for self-delegation. + +By default, the `alice` account is used as a validator account, its key is +derived from a mnemonic generated randomly at genesis, the staking denomination +is `stake`, and this account has enough `stake` for self-delegation. + +If your chain is using its own +[cointype](https://github.com/satoshilabs/slips/blob/master/slip-0044.md), you +can use the `cointype` field to provide the integer value + +```yml +accounts: + - name: bob + coins: ['20000token', '200000000stake'] + cointype: 7777777 +``` + +## Validators + +Commands like `ignite chain init` and `ignite chain serve` initialize and launch +a validator node for development purposes. + +```yml +validators: + - name: alice + bonded: '100000000stake' +``` + +`name` refers to key name in the `accounts` list. + +`bonded` is the self-delegation amount of a validator. The `bonded` amount +should not be lower than `1000000` nor higher than the account's +balance in the `account` list. + +Validators store their node configuration files in the data directory. By +default, Ignite uses the name of the project as the name of the data directory, +for example, `$HOME/.example/`. To use a different path for the data directory +you can customize the `home` property. + +Configuration in the data directory is reset frequently by Ignite. To persist +some changes to configuration files you can use `app`, `config` and `client` +properties that correspond to `$HOME/.example/config/app.toml`, +`$HOME/.example/config/config.toml` and `$HOME/.example/config/client.toml`. + +```yml +validators: + - name: alice + bonded: '100000000stake' + home: "~/.mychain" + app: + pruning: "nothing" + config: + moniker: "mychain" + client: + output: "json" +``` + +To see which properties are available for `config.toml`, `app.toml` and +`client.toml`, initialize a chain with `ignite chain init` and open the file you +want to know more about. + +Currently, Ignite starts only one validator node, so the first item in the +`validators` list is used (the rest is ignored). Support for multiple validators +is in progress. + +## Build + +The `build` property lets you customize how Ignite builds your chain's binary. + +By default, Ignite builds the `main` package from `cmd/PROJECT_NAME/main.go`. If +you more than one `main` package in your project, or you have renamed the +directory, use the `main` property to provide the path to the `main` Go package: + +```yml +build: + main: cmd/hello/cmd +``` + +Ignite compiles your project into a binary and uses the project's name with a +`d` suffix as name for the binary. To customize the binary name use the `binary` +property: + +```yml +build: + binary: "helloworldd" +``` + +To customize the linker flags used in the build process: + +```yml +build: + ldflags: [ "-X main.Version=development", "-X main.Date=01/05/2022T19:54" ] +``` + +By default, custom protocol buffer (proto) files are located in the `proto` +directory. If your project keeps proto files in a different directory, you +should tell Ignite about this: + +```yml +build: + proto: + path: "myproto" +``` + +Ignite comes with required third-party proto out of the box. Ignite also looks +into `third_party/proto` and `proto_vendor` directories for extra proto files. +If your project keeps third-party proto files in a different directory, you +should tell Ignite about this: + +```yml +build: + proto: + third_party_paths: ["my_third_party/proto"] +``` + +## Faucet + +The faucet service sends tokens to addresses. + +```yml +faucet: + name: bob + coins: ["5token", "100000stake"] +``` + +`name` refers to a key name in the `accounts` list. This is a required property. + +`coins` is the amount of tokens that will be sent to a user by the faucet. This +is a required property. + +`coins_max` is a maximum amount of tokens that can be sent to a single address. +To reset the token limit use the `rate_limit_window` property (in seconds). + +The default the faucet works on port `4500`. To use a different port number use +the `port` property. + +```yml +faucet: + name: faucet + coins: [ "100token", "5foo" ] + coins_max: [ "2000token", "1000foo" ] + port: 4500 + rate_limit_window: 3600 +``` + +## Genesis + +Genesis file is the initial block in the blockchain. It is required to launch a +blockchain, because it contains important information like token balances, and +modules' state. Genesis is stored in `$DATA_DIR/config/genesis.json`. + +Since the genesis file is reinitialized frequently during development, you can +set persistent options in the `genesis` property: + +```yml +genesis: + app_state: + staking: + params: + bond_denom: "denom" +``` + +To know which properties a genesis file supports, initialize a chain and look up +the genesis file in the data directory. + +## Client code generation + +Ignite can generate client-side code for interacting with your chain with the +`ignite generate` set of commands. Use the following properties to customize the +paths where the client-side code is generated. + +```yml +client: + openapi: + path: "docs/static/openapi.yml" + typescript: + path: "ts-client" + composables: + path: "vue/src/composables" + hooks: + path: "react/src/hooks" +``` + +## Include + +In your main `config.yml`, use the `include` field to reference other local or remote YAML files. +It allows you to split your chain configuration across multiple files, making it easier to manage and reuse configuration parts. + +```yml +version: 1 +include: + - "./accounts.yml" + - "./validators.yml" +``` + +Include remote files via URL or server path are also valid: + +```yml +version: 1 +include: + - "localhost:3045/accounts.yml" + - "https://ignite.com/config/validators.yml" +``` + +#### Common Use Cases: + +Split your config into a base setup and an external `accounts.yml` for better separation of concerns: + +- `config.yml` +```yml +version: 1 +include: + - "./accounts.yml" +client: + typescript: + path: ts-client +``` + +- `accounts.yml` +```yml +accounts: + - name: alice + coins: + - 20000token + - 200000000stake + - name: bob + coins: + - 20000token + - 200000000stake +faucet: + name: alice + coins: + - 5token + - 100000stake +``` diff --git a/docs/versioned_docs/version-v28/08-references/_category_.json b/docs/versioned_docs/version-v28/08-references/_category_.json new file mode 100644 index 0000000..3bcc076 --- /dev/null +++ b/docs/versioned_docs/version-v28/08-references/_category_.json @@ -0,0 +1,5 @@ +{ + "label": "References", + "link": null, + "collapsed": false +} \ No newline at end of file diff --git a/docs/versioned_docs/version-v28/apps/01-using-apps.md b/docs/versioned_docs/version-v28/apps/01-using-apps.md new file mode 100644 index 0000000..c52ad9e --- /dev/null +++ b/docs/versioned_docs/version-v28/apps/01-using-apps.md @@ -0,0 +1,42 @@ +--- +description: Using and Developing Ignite Apps +--- + +# Using Ignite Apps + +Apps offer a way to extend the functionality of the Ignite CLI. There are two +core concepts within apps: `Commands` and `Hooks`. `Commands` extend the CLI's +functionality and `Hooks` extend existing CLI command functionality. + +Apps are registered in an Ignite scaffolded blockchain project through the +`igniteapps.yml`, or globally through `$HOME/.ignite/apps/igniteapps.yml`. + +To use an app within your project execute the following command inside the +project directory: + +```sh +ignite app install github.com/project/cli-app +``` + +The app will be available only when running `ignite` inside the project +directory. + +To use an app globally on the other hand, execute the following command: + +```sh +ignite app install -g github.com/project/cli-app +``` + +The command will compile the app and make it immediately available to the +`ignite` command lists. + +## Listing installed apps + +When in an ignite scaffolded blockchain you can use the command `ignite app +list` to list all Ignite Apps and there statuses. + +## Updating apps + +When an app in a remote repository releases updates, running `ignite app +update <path/to/app>` will update an specific app declared in your +project's `config.yml`. diff --git a/docs/versioned_docs/version-v28/apps/02-developing-apps.md b/docs/versioned_docs/version-v28/apps/02-developing-apps.md new file mode 100644 index 0000000..1dbd37e --- /dev/null +++ b/docs/versioned_docs/version-v28/apps/02-developing-apps.md @@ -0,0 +1,258 @@ +--- +description: Using and Developing Ignite Apps +--- + +# Developing Ignite Apps + +It's easy to create an app and use it immediately in your project. First +choose a directory outside your project and run: + +```sh +$ ignite app scaffold my-app +``` + +This will create a new directory `my-app` that contains the app's code +and will output some instructions about how to use your app with the +`ignite` command. An app path can be a local directory which has several +benefits: + +- You don't need to use a Git repository during the development of your app. +- The app is recompiled each time you run the `ignite` binary in your + project if the source files are older than the app binary. + +Thus, app development workflow is as simple as: + +1. Scaffold an app with `ignite app scaffold my-app` +2. Add it to your config via `ignite app install -g /path/to/my-app` +3. Update app code +4. Run `ignite my-app` binary to compile and run the app +5. Go back to 3 + +Once your app is ready you can publish it to a Git repository and the +community can use it by calling `ignite app install github.com/foo/my-app`. + +Now let's detail how to update your app's code. + +## App interface + +Under the hood Ignite Apps are implemented using a plugin system based on +`github.com/hashicorp/go-plugin`. + +All apps must implement a predefined interface: + +```go title=ignite/services/plugin/interface.go +type Interface interface { + // Manifest declares app's Command(s) and Hook(s). + Manifest(context.Context) (*Manifest, error) + + // Execute will be invoked by ignite when an app Command is executed. + // It is global for all commands declared in Manifest, if you have declared + // multiple commands, use cmd.Path to distinguish them. + // The ClientAPI argument can be used by plugins to get chain app analysis info. + Execute(context.Context, *ExecutedCommand, ClientAPI) error + + // ExecuteHookPre is invoked by ignite when a command specified by the Hook + // path is invoked. + // It is global for all hooks declared in Manifest, if you have declared + // multiple hooks, use hook.Name to distinguish them. + // The ClientAPI argument can be used by plugins to get chain app analysis info. + ExecuteHookPre(context.Context, *ExecutedHook, ClientAPI) error + + // ExecuteHookPost is invoked by ignite when a command specified by the hook + // path is invoked. + // It is global for all hooks declared in Manifest, if you have declared + // multiple hooks, use hook.Name to distinguish them. + // The ClientAPI argument can be used by plugins to get chain app analysis info. + ExecuteHookPost(context.Context, *ExecutedHook, ClientAPI) error + + // ExecuteHookCleanUp is invoked by ignite when a command specified by the + // hook path is invoked. Unlike ExecuteHookPost, it is invoked regardless of + // execution status of the command and hooks. + // It is global for all hooks declared in Manifest, if you have declared + // multiple hooks, use hook.Name to distinguish them. + // The ClientAPI argument can be used by plugins to get chain app analysis info. + ExecuteHookCleanUp(context.Context, *ExecutedHook, ClientAPI) error +} +``` + +The scaffolded code already implements this interface, you just need to update +the method's body. + +## Defining app's manifest + +Here is the `Manifest` proto message definition: + +```protobuf title=proto/ignite/services/plugin/grpc/v1/types.proto +message Manifest { + // App name. + string name = 1; + + // Commands contains the commands that will be added to the list of ignite commands. + // Each commands are independent, for nested commands use the inner Commands field. + bool shared_host = 2; + + // Hooks contains the hooks that will be attached to the existing ignite commands. + repeated Command commands = 3; + + // Enables sharing a single app server across all running instances of an Ignite App. + // Useful if an app adds or extends long running commands. + // + // Example: if an app defines a hook on `ignite chain serve`, a server is instantiated + // when the command is run. Now if you want to interact with that instance + // from commands defined in that app, you need to enable shared host, or else the + // commands will just instantiate separate app servers. + // + // When enabled, all apps of the same path loaded from the same configuration will + // attach it's RPC client to a an existing RPC server. + // + // If an app instance has no other running app servers, it will create one and it + // will be the host. + repeated Hook hooks = 4; +} +``` + +In your app's code the `Manifest` method already returns a predefined +`Manifest` struct as an example. You must adapt it according to your need. + +If your app adds one or more new commands to `ignite`, add them to the +`Commands` field. + +If your app adds features to existing commands, add them to the `Hooks` field. + +Of course an app can declare both, `Commands` *and* `Hooks`. + +An app may also share a host process by setting `SharedHost` to `true`. +`SharedHost` is desirable if an app hooks into, or declares long running commands. +Commands executed from the same app context interact with the same app server. +Allowing all executing commands to share the same server instance, giving shared execution context. + +## Adding new commands + +App commands are custom commands added to Ignite CLI by an installed app. +Commands can use any path not defined already by the CLI. + +For instance, let's say your app adds a new `oracle` command to `ignite +scaffold`, then the `Manifest` method will look like : + +```go +func (app) Manifest(context.Context) (*plugin.Manifest, error) { + return &plugin.Manifest{ + Name: "oracle", + Commands: []*plugin.Command{ + { + Use: "oracle [name]", + Short: "Scaffold an oracle module", + Long: "Long description goes here...", + // Optional flags is required + Flags: []*plugin.Flag{ + {Name: "source", Type: plugin.FlagTypeString, Usage: "the oracle source"}, + }, + // Attach the command to `scaffold` + PlaceCommandUnder: "ignite scaffold", + }, + }, + }, nil +} +``` + +To update the app execution, you have to change the `Execute` command. For +example: + +```go +func (app) Execute(_ context.Context, cmd *plugin.ExecutedCommand, _ plugin.ClientAPI) error { + if len(cmd.Args) == 0 { + return fmt.Errorf("oracle name missing") + } + + flags, err := cmd.NewFlags() + if err != nil { + return err + } + + var ( + name = cmd.Args[0] + source, _ = flags.GetString("source") + ) + + // Read chain information + c, err := getChain(cmd) + if err != nil { + return err + } + + //... +} +``` + +Then, run `ignite scaffold oracle` to execute the app. + +## Adding hooks + +App `Hooks` allow existing CLI commands to be extended with new +functionality. Hooks are useful when you want to streamline functionality +without needing to run custom scripts after or before a command has been run. +This can streamline processes that where once error prone or forgotten all +together. + +The following are hooks defined which will run on a registered `ignite` +command: + +| Name | Description | +| -------- | ----------------------------------------------------------------------------------------------------------------------------------------------------- | +| Pre | Runs before a commands main functionality is invoked in the `PreRun` scope | +| Post | Runs after a commands main functionality is invoked in the `PostRun` scope | +| Clean Up | Runs after a commands main functionality is invoked. If the command returns an error it will run before the error is returned to guarantee execution. | + +*Note*: If a hook causes an error in the pre step the command will not run +resulting in `post` and `clean up` not executing. + +The following is an example of a `hook` definition. + +```go +func (app) Manifest(context.Context) (*plugin.Manifest, error) { + return &plugin.Manifest{ + Name: "oracle", + Hooks: []*plugin.Hook{ + { + Name: "my-hook", + PlaceHookOn: "ignite chain build", + }, + }, + }, nil +} + +func (app) ExecuteHookPre(_ context.Context, h *plugin.ExecutedHook, _ plugin.ClientAPI) error { + switch h.Hook.GetName() { + case "my-hook": + fmt.Println("I'm executed before ignite chain build") + default: + return fmt.Errorf("hook not defined") + } + return nil +} + +func (app) ExecuteHookPost(_ context.Context, h *plugin.ExecutedHook, _ plugin.ClientAPI) error { + switch h.Hook.GetName() { + case "my-hook": + fmt.Println("I'm executed after ignite chain build (if no error)") + default: + return fmt.Errorf("hook not defined") + } + return nil +} + +func (app) ExecuteHookCleanUp(_ context.Context, h *plugin.ExecutedHook, _ plugin.ClientAPI) error { + switch h.Hook.GetName() { + case "my-hook": + fmt.Println("I'm executed after ignite chain build (regardless errors)") + default: + return fmt.Errorf("hook not defined") + } + return nil +} +``` + +Above we can see a similar definition to `Command` where a hook has a `Name` +and a `PlaceHookOn`. You'll notice that the `Execute*` methods map directly to +each life cycle of the hook. All hooks defined within the app will invoke these +methods. diff --git a/docs/versioned_docs/version-v28/apps/_category_.json b/docs/versioned_docs/version-v28/apps/_category_.json new file mode 100644 index 0000000..d9804b8 --- /dev/null +++ b/docs/versioned_docs/version-v28/apps/_category_.json @@ -0,0 +1,5 @@ +{ + "label": "Ignite Apps", + "position": 7, + "link": null +} diff --git a/docs/versioned_docs/version-v29/01-welcome/01-index.md b/docs/versioned_docs/version-v29/01-welcome/01-index.md new file mode 100644 index 0000000..12a9057 --- /dev/null +++ b/docs/versioned_docs/version-v29/01-welcome/01-index.md @@ -0,0 +1,52 @@ +--- +slug: /welcome +--- + +import ProjectsTable from '@site/src/components/ProjectsTable'; + +# Introduction to IGNITE® CLI: Your Gateway to Blockchain Innovation + +[IGNITE® CLI](https://github.com/ignite/cli) is a powerful tool that simplifies the journey of building, testing, and launching diverse blockchain applications. Developed on top of the [Cosmos SDK](https://docs.cosmos.network), the leading framework for blockchain technology, IGNITE® CLI is pivotal in streamlining the development process. It enables developers to focus on the unique aspects of their projects, from DeFi and NFTs to supply chain solutions and smart contracts. +Beyond these, IGNITE® has been instrumental in a wide array of blockchain applications, ranging from VPNs and gaming platforms to blogs, oracle systems, and innovative consensus mechanisms. This demonstrates its versatility in supporting a broad spectrum of blockchain-based solutions. + +## Key Features of IGNITE® CLI + +- **Simplified Blockchain Development:** IGNITE® CLI, leveraging Cosmos SDK, makes building sovereign application-specific blockchains intuitive and efficient. +- **Comprehensive Scaffolding:** Easily scaffold modules, messages, CRUD operations, IBC packets, and more, expediting the development of complex functionalities. +- **Development with Live Reloading:** Start and test your blockchain node with real-time updates, enhancing your development workflow. +- **Frontend Flexibility:** Utilize pre-built templates for Vue.js, React, Typescript or Go, catering to diverse frontend development needs. +- **Inter-Blockchain Communication (IBC):** Seamlessly connect and interact with other blockchains using an integrated IBC relayer, a key feature of the Cosmos SDK. +- **CometBFT Integration:** Built with the CometBFT consensus engine (formerly Tendermint), ensuring robust consensus mechanisms in your blockchain solutions. +- **Cross-Domain Applications:** IGNITE® is perfectly suited for developing a diverse array of use cases across various sectors. These include DeFi, NFTs, supply chain management, smart contracts (both EVM and WASM), and decentralized exchanges (DEXes). + +## Embracing the Cosmos Ecosystem + +IGNITE® CLI is your entry point into the vibrant Cosmos ecosystem, a hub of innovation where you can explore a range of applications, from wallets and explorers to smart contracts and DEXes, all powered by CometBFT and the Cosmos SDK. +This ecosystem is home to over [$100 billion worth of blockchain projects](https://cosmos.network/ecosystem/tokens/), showcasing the scalability and versatility of the technologies at play. + +## Projects using Tendermint and Cosmos SDK + +Many projects already showcase the Tendermint BFT consensus engine and the Cosmos SDK. Explore +the [Cosmos ecosystem](https://cosmos.network/ecosystem/apps) to discover a wide variety of apps, blockchains, wallets, +and explorers that are built in the Cosmos ecosystem. + +## Projects building with IGNITE® CLI + +<ProjectsTable data={[ + { name: "Stride Labs", logo: "img/logo/stride.svg"}, + { name: "KYVE Network", logo: "img/logo/kyve.svg"}, + { name: "Umee", logo: "img/logo/umee.svg"}, + { name: "MediBloc Core", logo: "img/logo/medibloc.svg"}, + { name: "Cudos", logo: "img/logo/cudos.svg"}, + { name: "Firma Chain", logo: "img/logo/firmachain.svg"}, + { name: "BitCanna", logo: "img/logo/bitcanna.svg"}, + { name: "Source Protocol", logo: "img/logo/source.svg"}, + { name: "Sonr", logo: "img/logo/sonr.svg"}, + { name: "Neutron", logo: "img/logo/neutron.svg"}, + { name: "OKP4 Blockchain", logo: "img/logo/okp4.svg"}, + { name: "Dymension Hub", logo: "img/logo/dymension.svg"}, + { name: "Electra Blockchain", logo: "img/logo/electra.svg"}, + { name: "OLLO Station", logo: "img/logo/ollostation.svg"}, + { name: "Mun", logo: "img/logo/mun.svg"}, + { name: "Aura Network", logo: "img/logo/aura.svg"}, +]}/> diff --git a/docs/versioned_docs/version-v29/01-welcome/02-install.md b/docs/versioned_docs/version-v29/01-welcome/02-install.md new file mode 100644 index 0000000..d337ff9 --- /dev/null +++ b/docs/versioned_docs/version-v29/01-welcome/02-install.md @@ -0,0 +1,119 @@ +--- +sidebar_position: 1 +description: Steps to install IGNITE® CLI on your local computer. +--- + +# Install IGNITE® CLI + +You can run [IGNITE® CLI](https://github.com/ignite/cli) in a web-based IDE or you can install IGNITE® CLI on your local computer. + +## Prerequisites + +Be sure you have met the prerequisites before you install and use IGNITE® CLI. + +### Operating systems + +IGNITE® CLI is supported for the following operating systems: + +- GNU/Linux +- macOS +- Windows Subsystem for Linux (WSL) + +### Go + +IGNITE® CLI is written in the Go programming language. To use IGNITE® CLI on a local system: + +- Install [Go](https://golang.org/doc/install) (**version 1.24.1** or higher) +- Ensure the Go environment variables are [set properly](https://golang.org/doc/gopath_code#GOPATH) on your system + +## Verify your IGNITE® CLI version + +To verify the version of IGNITE® CLI you have installed, run the following command: + +```bash +ignite version +``` + +## Installing IGNITE® CLI + +To install the latest version of IGNITE® use [HomeBrew](https://formulae.brew.sh/formula/ignite) on macOS and GNU/Linux: + +```sh +brew install ignite +``` + +### Install manually + +Alternatively, you can install the latest version of the `ignite` binary use the following command: + +```bash +curl https://get.ignite.com/cli! | bash +``` + +This command invokes `curl` to download the installation script and pipes the output to `bash` to perform the +installation. The `ignite` binary is installed in `/usr/local/bin`. + +IGNITE® CLI installation requires write permission to the `/usr/local/bin/` directory. If the installation fails because +you do not have write permission to `/usr/local/bin/`, run the following command: + +```bash +curl https://get.ignite.com/cli | bash +``` + +Then run this command to move the `ignite` executable to `/usr/local/bin/`: + +```bash +sudo mv ignite /usr/local/bin/ +``` + +On some machines, a permissions error occurs: + +```bash +mv: rename ./ignite to /usr/local/bin/ignite: Permission denied +============ +Error: mv failed +``` + +In this case, use sudo before `curl` and before `bash`: + +```bash +sudo curl https://get.ignite.com/cli | sudo bash +``` + +To learn more or customize the installation process, see the [installer docs](https://github.com/ignite/installer) on +GitHub. + +## Upgrading your IGNITE® CLI installation {#upgrade} + +Before you install a new version of IGNITE® CLI, remove all existing IGNITE® CLI installations. + +To remove the current IGNITE® CLI installation: + +1. On your terminal window, press `Ctrl+C` to stop the chain that you started with `ignite chain serve`. +2. Remove the IGNITE® CLI binary with `rm $(which ignite)`. + Depending on your user permissions, run the command with or without `sudo`. +3. Repeat this step until all `ignite` installations are removed from your system. + +After all existing IGNITE® CLI installations are removed, follow the [Installing IGNITE® CLI](#installing-ignite-cli) +instructions. + +For details on version features and changes, see +the [changelog.md](https://github.com/ignite/cli/blob/main/changelog.md) +in the repo. + +## Build from source + +To experiment with the source code, you can build from source: + +```bash +git clone https://github.com/ignite/cli --depth=1 +cd cli && make install +``` + +## Summary + +- Verify the prerequisites. +- To set up a local development environment, install IGNITE® CLI locally on your computer. +- Install IGNITE® CLI by fetching the binary using cURL or by building from source. +- The latest version is installed by default. You can install previous versions of the precompiled `ignite` binary. +- Stop the chain and remove existing versions before installing a new version. diff --git a/docs/versioned_docs/version-v29/01-welcome/_category_.json b/docs/versioned_docs/version-v29/01-welcome/_category_.json new file mode 100644 index 0000000..ac625fc --- /dev/null +++ b/docs/versioned_docs/version-v29/01-welcome/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Welcome", + "link": null +} \ No newline at end of file diff --git a/docs/versioned_docs/version-v29/02-guide/02-introduction.md b/docs/versioned_docs/version-v29/02-guide/02-introduction.md new file mode 100644 index 0000000..c79cef6 --- /dev/null +++ b/docs/versioned_docs/version-v29/02-guide/02-introduction.md @@ -0,0 +1,154 @@ +--- +sidebar_position: 2 +--- + +# Introduction + +In this guide, we will be using IGNITE® CLI to create a new blockchain. IGNITE® +CLI is a command line interface that allows users to quickly and easily create +blockchain networks. By using IGNITE® CLI, we can quickly create a new blockchain +without having to manually set up all the necessary components. + +Once we have created our blockchain with IGNITE® CLI, we will take a look at the +directory structure and files that were created. This will give us an +understanding of how the blockchain is organized and how the different +components of the blockchain interact with each other. + +By the end of this guide, you will have a basic understanding of how to use +IGNITE® CLI to create a new blockchain, and you will have a high-level +understanding of the directory structure and files that make up a blockchain. +This knowledge will be useful as you continue to explore the world of blockchain +development. + +If you are looking for more tutorials and hands-on experience, check out our tutorials website: + +[IGNITE® Tutorials](https://tutorials.ignite.com) + +## Creating a new blockchain + +To create a new blockchain project with IGNITE®, you will need to run the +following command: + +``` +ignite scaffold chain example +``` + +The `ignite scaffold chain` command will create a new blockchain in a new +directory `example`. + +The new blockchain is built using the Cosmos SDK framework and imports several +standard modules to provide a range of functionality. These modules include +`staking`, which enables a delegated Proof-of-Stake consensus mechanism, `bank` +for facilitating fungible token transfers between accounts, and `gov` for +on-chain governance. In addition to these modules, the blockchain also imports +other modules from the Cosmos SDK framework. + +The `example` directory contains the generated files and directories that make +up the structure of a Cosmos SDK blockchain. This directory includes files for +the chain's configuration, application logic, and tests, among others. It +provides a starting point for developers to quickly set up a new Cosmos SDK +blockchain and build their desired functionality on top of it. + +By default, IGNITE® creates a new empty custom module with the same name as the +blockchain being created (in this case, `example`) in the `x/` directory. This +module doesn't have any functionality by itself, but can serve as a starting +point for building out the features of your application. If you don't want to +create this module, you can use the `--no-module` flag to skip it. + +## Directory structure + +In order to understand what the IGNITE® CLI has generated for your project, you +can inspect the contents of the `example/` directory. + +The `app/` directory contains the files that connect the different parts of the +blockchain together. The most important file in this directory is `app.go`, +which includes the type definition of the blockchain and functions for creating +and initializing it. This file is responsible for wiring together the various +components of the blockchain and defining how they will interact with each +other. + +The `cmd/` directory contains the main package responsible for the command-line +interface (CLI) of the compiled binary. This package defines the commands that +can be run from the CLI and how they should be executed. It is an important part +of the blockchain project as it provides a way for developers and users to +interact with the blockchain and perform various tasks, such as querying the +blockchain state or sending transactions. + +The `docs/` directory is used for storing project documentation. By default, +this directory includes an OpenAPI specification file, which is a +machine-readable format for defining the API of a software project. The OpenAPI +specification can be used to automatically generate human-readable documentation +for the project, as well as provide a way for other tools and services to +interact with the API. The `docs/` directory can be used to store any additional +documentation that is relevant to the project. + +The `proto/` directory contains protocol buffer files, which are used to +describe the data structure of the blockchain. Protocol buffers are a language- +and platform-neutral mechanism for serializing structured data, and are often +used in the development of distributed systems, such as blockchain networks. The +protocol buffer files in the `proto/` directory define the data structures and +messages that are used by the blockchain, and are used to generate code for +various programming languages that can be used to interact with the blockchain. +In the context of the Cosmos SDK, protocol buffer files are used to define the +specific types of data that can be sent and received by the blockchain, as well +as the specific RPC endpoints that can be used to access the blockchain's +functionality. + +The `testutil/` directory contains helper functions that are used for testing. +These functions provide a convenient way to perform common tasks that are needed +when writing tests for the blockchain, such as creating test accounts, +generating transactions, and checking the state of the blockchain. By using the +helper functions in the `testutil/` directory, developers can write tests more +quickly and efficiently, and can ensure that their tests are comprehensive and +effective. + +The `x/` directory contains custom Cosmos SDK modules that have been added to +the blockchain. Standard Cosmos SDK modules are pre-built components that +provide common functionality for Cosmos SDK-based blockchains, such as support +for staking and governance. Custom modules, on the other hand, are modules that +have been developed specifically for the blockchain project and provide +project-specific functionality. + +The `config.yml` file is a configuration file that can be used to customize the +blockchain during development. This file includes settings that control various +aspects of the blockchain, such as the network's ID, account balances, and the +node parameters. + +The `.github` directory contains a GitHub Actions workflow that can be used to +automatically build and release a blockchain binary. GitHub Actions is a tool +that allows developers to automate their software development workflows, +including building, testing, and deploying their projects. The workflow in the +`.github` directory is used to automate the process of building the blockchain +binary and releasing it, which can save time and effort for developers. + +The `readme.md` file is a readme file that provides an overview of the +blockchain project. This file typically includes information such as the +project's name and purpose, as well as instructions on how to build and run the +blockchain. By reading the `readme.md` file, developers and users can quickly +understand the purpose and capabilities of the blockchain project and get +started using it. + +## Starting a blockchain node + +To start a blockchain node in development, you can run the following command: + +``` +ignite chain serve +``` + +The `ignite chain serve` command is used to start a blockchain node in +development mode. It first compiles and installs the binary using the +`ignite chain build` command, then initializes the blockchain's data directory +for a single validator using the `ignite chain init` command. After that, it +starts the node locally and enables automatic code reloading so that changes to +the code can be reflected in the running blockchain without having to restart +the node. This allows for faster development and testing of the blockchain. + +**Congratulations!** 🥳 You have successfully created a brand-new Cosmos blockchain +using the IGNITE® CLI. This blockchain uses the delegated proof of stake (DPoS) +consensus algorithm, and comes with a set of standard modules for token +transfers, governance, and inflation. Now that you have a basic understanding of +your Cosmos blockchain, it's time to start building custom functionality. In the +following tutorials, you will learn how to build custom modules and add new +features to your blockchain, allowing you to create a unique and powerful +decentralized application. diff --git a/docs/versioned_docs/version-v29/02-guide/03-hello-world.md b/docs/versioned_docs/version-v29/02-guide/03-hello-world.md new file mode 100644 index 0000000..41b4bea --- /dev/null +++ b/docs/versioned_docs/version-v29/02-guide/03-hello-world.md @@ -0,0 +1,96 @@ +--- +description: Build your first blockchain and your first Cosmos SDK query. +title: Hello World +--- + +# "Hello world!" Blockchain Tutorial with IGNITE® CLI + +**Introduction** + +In this tutorial, you'll build a simple blockchain using IGNITE® CLI that responds to a custom query with `Hello %s!`, where `%s` is a name passed in the query. +This will enhance your understanding of creating custom queries in a Cosmos SDK blockchain. + +## Setup and Scaffold + +1. **Create a New Blockchain:** + + ```bash + ignite scaffold chain hello + ``` + +2. **Navigate to the Blockchain Directory:** + + ```bash + cd hello + ``` + +## Adding a Custom Query + +- **Scaffold the Query:** + +```bash +ignite scaffold query say-hello name --response name +``` + +This command generates code for a new query, `say-hello`, which accepts a name, an input, and returns it in the response. + +- **Understanding the Scaffolded Code:** + + - `proto/hello/hello/query.proto`: Defines the request and response structure. + - `x/hello/module/autocli.go`: Contains commands for the query, using [AutoCLI](../08-references/04-glossary.md#autocli). + - `x/hello/keeper/query_say_hello.go`: Houses the logic for the query response. + +## Customizing the Query Response + +In the Cosmos SDK, queries are requests for information from the blockchain, used to access data like the ledger's current state or transaction details. While the SDK offers several built-in query methods, developers can also craft custom queries for specific data retrieval or complex operations. + +- **Modify `query_say_hello.go`:** + +Update the `SayHello` function in `x/hello/keeper/query_say_hello.go` to return a personalized greeting query. + +```go title="x/hello/keeper/query_say_hello.go" +package keeper + +import ( + "context" + "fmt" + + "hello/x/hello/types" + + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" +) + +func (q queryServer) SayHello(ctx context.Context, req *types.QuerySayHelloRequest) (*types.QuerySayHelloResponse, error) { + if req == nil { + return nil, status.Error(codes.InvalidArgument, "invalid request") + } + + // TODO: Process the query + + // Custom Response + return &types.QuerySayHelloResponse{Name: fmt.Sprintf("Hello %s!", req.Name)}, nil +} +``` + +## Running the Blockchain + +1. **Start the Blockchain:** + +```bash +ignite chain serve +``` + +2. **Test the Query:** + +Use the command-line interface to submit a query. + +``` +hellod q hello say-hello world +``` + +Expect a response: `Hello world!` + +## Conclusion + +Congratulations! 🎉 You've successfully created a blockchain module with a custom query using IGNITE® CLI. Through this tutorial, you've learned how to scaffold a chain, add a custom query, and modify the logic for personalized responses. This experience illustrates the power of IGNITE® CLI in streamlining blockchain development and the importance of understanding the underlying code for customization. diff --git a/docs/versioned_docs/version-v29/02-guide/04-ibc.md b/docs/versioned_docs/version-v29/02-guide/04-ibc.md new file mode 100644 index 0000000..c12166b --- /dev/null +++ b/docs/versioned_docs/version-v29/02-guide/04-ibc.md @@ -0,0 +1,669 @@ +--- +sidebar_position: 7 +description: Build an understanding of how to create and send packets across blockchains and navigate between blockchains. +title: "Inter-Blockchain Communication: Basics" +--- + +# Inter-Blockchain Communication: Basics + +The Inter-Blockchain Communication protocol (IBC) is an important part of the +Cosmos SDK ecosystem. The Hello World tutorial is a time-honored tradition in +computer programming. This tutorial builds an understanding of how to create and +send packets across blockchain. This foundational knowledge helps you navigate +between blockchains with the Cosmos SDK. + +**You will learn how to** + +- Use IBC to create and send packets between blockchains. +- Navigate between blockchains using the Cosmos SDK and the IGNITE® CLI Relayer. +- Create a basic blog post and save the post on another blockchain. + +## What is IBC? + +The Inter-Blockchain Communication protocol (IBC) allows blockchains to talk to +each other. IBC handles transport across different sovereign blockchains. This +end-to-end, connection-oriented, stateful protocol provides reliable, ordered, +and authenticated communication between heterogeneous blockchains. + +The [IBC protocol in the Cosmos +SDK](https://ibc.cosmos.network/main/ibc/overview) is the standard for the +interaction between two blockchains. The IBCmodule interface defines how packets +and messages are constructed to be interpreted by the sending and the receiving +blockchain. + +The IBC relayer lets you connect between sets of IBC-enabled chains. This +tutorial teaches you how to create two blockchains and then start and use the +relayer with IGNITE® CLI to connect two blockchains. + +This tutorial covers essentials like modules, IBC packets, relayer, and the +lifecycle of packets routed through IBC. + +## Create a blockchain + +Create a blockchain app with a blog module to write posts on other blockchains +that contain the Hello World message. For this tutorial, you can write posts for +the Cosmos SDK universe that contain Hello Mars, Hello Cosmos, and Hello Earth +messages. + +For this simple example, create an app that contains a blog module that has a +post transaction with title and text. + +After you define the logic, run two blockchains that have this module installed. + +- The chains can send posts between each other using IBC. + +- On the sending chain, save the `acknowledged` and `timed out` posts. + +After the transaction is acknowledged by the receiving chain, you know that the +post is saved on both blockchains. + +- The sending chain has the additional data `postID`. + +- Sent posts that are acknowledged and timed out contain the title and the + target chain of the post. These identifiers +- are visible on the parameter `chain`. The following chart shows the lifecycle + of a packet that travels through IBC. + +![The Lifecycle of an IBC packet](./images/packet_sendpost.png) + +## Build your blockchain app + +Use IGNITE® CLI to scaffold the blockchain app and the blog module. + +### Build a new blockchain + +To scaffold a new blockchain named `planet`: + +```bash +ignite scaffold chain planet --no-module +cd planet +``` + +A new directory named `planet` is created in your home directory. The `planet` +directory contains a working blockchain app. + +### Scaffold the blog module inside your blockchain + +Next, use IGNITE® CLI to scaffold a blog module with IBC capabilities. The blog +module contains the logic for creating blog posts and routing them through IBC +to the second blockchain. + +To scaffold a module named `blog`: + +```bash +ignite scaffold module blog --ibc +``` + +A new directory with the code for an IBC module is created in `planet/x/blog`. +Modules scaffolded with the `--ibc` flag include all the logic for the +scaffolded IBC module. + +### Generate CRUD actions for types + +Next, create the CRUD actions for the blog module types. + +Use the `ignite scaffold list` command to scaffold the boilerplate code for the +create, read, update, and delete (CRUD) actions. + +These `ignite scaffold list` commands create CRUD code for the following +transactions: + +- Creating blog posts + +```bash +ignite scaffold list post title content creator --no-message --module blog +``` + +- Processing acknowledgments for sent posts + +```bash +ignite scaffold list sentPost postID:uint title chain creator --no-message --module blog +``` + +- Managing post timeouts + +```bash +ignite scaffold list timeoutPost title chain creator --no-message --module blog +``` + +The scaffolded code includes proto files for defining data structures, messages, +messages handlers, keepers for modifying the state, and CLI commands. + +### IGNITE® CLI Scaffold List Command Overview + +``` +ignite scaffold list [typeName] [field1] [field2] ... [flags] +``` + +The first argument of the `ignite scaffold list [typeName]` command specifies +the name of the type being created. For the blog app, you created `post`, +`sentPost`, and `timeoutPost` types. + +The next arguments define the fields that are associated with the type. For the +blog app, you created `title`, `content`, `postID`, and `chain` fields. + +The `--module` flag defines which module the new transaction type is added to. +This optional flag lets you manage multiple modules within your IGNITE® CLI app. +When the flag is not present, the type is scaffolded in the module that matches +the name of the repo. + +When a new type is scaffolded, the default behavior is to scaffold messages that +can be sent by users for CRUD operations. The `--no-message` flag disables this +feature. Disable the messages option for the app since you want the posts to be +created upon reception of IBC packets and not directly created from a user's +messages. + +### Scaffold a sendable and interpretable IBC packet + +You must generate code for a packet that contains the title and the content of +the blog post. + +The `ignite packet` command creates the logic for an IBC packet that can be sent +to another blockchain. + +- The `title` and `content` are stored on the target chain. + +- The `postID` is acknowledged on the sending chain. + +To scaffold a sendable and interpretable IBC packet: + +```bash +ignite scaffold packet ibcPost title content --ack postID:uint --module blog +``` + +Notice the fields in the `ibcPost` packet match the fields in the `post` type +that you created earlier. + +- The `--ack` flag defines which identifier is returned to the sending + blockchain. + +- The `--module` flag specifies to create the packet in a particular IBC module. + +The `ignite packet` command also scaffolds the CLI command that is capable of +sending an IBC packet: + +```bash +planetd tx blog send-ibcPost [portID] [channelID] [title] [content] +``` + +## Modify the source code + +After you create the types and transactions, you must manually insert the logic +to manage updates in the database. Modify the source code to save the data as +specified earlier in this tutorial. + +### Add creator to the blog post packet + +Start with the proto file that defines the structure of the IBC packet. + +To identify the creator of the post in the receiving blockchain, add the +`creator` field inside the packet. This field was not specified directly in the +command because it would automatically become a parameter in the `SendIbcPost` +CLI command. + +```protobuf title="proto/planet/blog/v1/packet.proto" +message IbcPostPacketData { + string title = 1; + string content = 2; + // highlight-next-line + string creator = 3; +} +``` + +To make sure the receiving chain has content on the creator of a blog post, add +the `msg.Creator` value to the IBC `packet`. + +- The content of the `sender` of the message is automatically included in + `SendIbcPost` message. +- The sender is verified as the signer of the message, so you can add the + `msg.Sender` as the creator to the new packet +- before it is sent over IBC. + +```go title="x/blog/keeper/msg_server_ibc_post.go" +package keeper + +func (k msgServer) SendIbcPost(goCtx context.Context, msg *types.MsgSendIbcPost) (*types.MsgSendIbcPostResponse, error) { + // validate incoming message + if _, err := k.addressCodec.StringToBytes(msg.Creator); err != nil { + return nil, errorsmod.Wrap(sdkerrors.ErrInvalidAddress, fmt.Sprintf("invalid address: %s", err)) + } + + if msg.Port == "" { + return nil, errorsmod.Wrap(sdkerrors.ErrInvalidRequest, "invalid packet port") + } + + if msg.ChannelID == "" { + return nil, errorsmod.Wrap(sdkerrors.ErrInvalidRequest, "invalid packet channel") + } + + if msg.TimeoutTimestamp == 0 { + return nil, errorsmod.Wrap(sdkerrors.ErrInvalidRequest, "invalid packet timeout") + } + + // TODO: logic before transmitting the packet + + // Construct the packet + var packet types.IbcPostPacketData + + packet.Title = msg.Title + packet.Content = msg.Content + // highlight-next-line + packet.Creator = msg.Creator + + // Transmit the packet + ctx := sdk.UnwrapSDKContext(goCtx) + _, err := k.TransmitIbcPostPacket( + ctx, + packet, + msg.Port, + msg.ChannelID, + clienttypes.ZeroHeight(), + msg.TimeoutTimestamp, + ) + if err != nil { + return nil, err + } + + return &types.MsgSendIbcPostResponse{}, nil +} +``` + +### Receive the post + +The methods for primary transaction logic are in the `x/blog/keeper/ibc_post.go` +file. Use these methods to manage IBC packets: + +- `TransmitIbcPostPacket` is called manually to send the packet over IBC. This + method also defines the logic before the packet is sent over IBC to another + blockchain app. +- `OnRecvIbcPostPacket` hook is automatically called when a packet is received + on the chain. This method defines the packet reception logic. +- `OnAcknowledgementIbcPostPacket` hook is called when a sent packet is + acknowledged on the source chain. This method defines the logic when the + packet has been received. +- `OnTimeoutIbcPostPacket` hook is called when a sent packet times out. This + method defines the logic when the packet is not received on the target chain + +You must modify the source code to add the logic inside those functions so that +the data tables are modified accordingly. + +On reception of the post message, create a new post with the title and the +content on the receiving chain. + +To identify the blockchain app that a message is originating from and who +created the message, use an identifier in the following format: + +`<portID>-<channelID>-<creatorAddress>` + +Finally, the IGNITE® CLI-generated AppendPost function returns the ID of the new +appended post. You can return this value to the source chain through +acknowledgment. + +Append the type instance as `PostId` on receiving the packet: + +- The context `ctx` is an [immutable data + structure](https://docs.cosmos.network/main/core/context#go-context-package) + that has header data from the transaction. See [how the context is + initiated](https://github.com/cosmos/cosmos-sdk/blob/main/types/context.go#L71) +- The identifier format that you defined earlier +- The `title` is the Title of the blog post +- The `content` is the Content of the blog post + +Then modify the `OnRecvIbcPostPacket` keeper function with the following code: + +```go title="x/blog/keeper/ibc_post.go" +package keeper + +func (k Keeper) OnRecvIbcPostPacket(ctx sdk.Context, packet channeltypes.Packet, data types.IbcPostPacketData) (packetAck types.IbcPostPacketAck, err error) { + packetAck.PostId, err = k.PostSeq.Next(ctx) + if err != nil { + return packetAck, err + } + return packetAck, k.Post.Set(ctx, packetAck.PostId, types.Post{Title: data.Title, Content: data.Content}) +} +``` + +### Receive the post acknowledgement + +On the sending blockchain, store a `sentPost` so you know that the post has been +received on the target chain. + +Store the title and the target to identify the post. + +When a packet is scaffolded, the default type for the received acknowledgment +data is a type that identifies if the packet treatment has failed. The +`Acknowledgement_Error` type is set if `OnRecvIbcPostPacket` returns an error +from the packet. + +```go title="x/blog/keeper/ibc_post.go" +package keeper + +import transfertypes "github.com/cosmos/ibc-go/v10/modules/apps/transfer/types" + +func (k Keeper) OnAcknowledgementIbcPostPacket(ctx sdk.Context, packet channeltypes.Packet, data types.IbcPostPacketData, ack channeltypes.Acknowledgement) error { + switch dispatchedAck := ack.Response.(type) { + case *channeltypes.Acknowledgement_Error: + // We will not treat acknowledgment error in this tutorial + return nil + case *channeltypes.Acknowledgement_Result: + // Decode the packet acknowledgment + var packetAck types.IbcPostPacketAck + if err := k.cdc.UnmarshalJSON(dispatchedAck.Result, &packetAck); err != nil { + // The counter-party module doesn't implement the correct acknowledgment format + return errors.New("cannot unmarshal acknowledgment") + } + + seq, err := k.SentPostSeq.Next(ctx) + if err != nil { + return err + } + + return k.SentPost.Set(ctx, seq, + types.SentPost{ + PostId: packetAck.PostId, + Title: data.Title, + Chain: packet.DestinationPort + "-" + packet.DestinationChannel, + }, + ) + default: + return errors.New("the counter-party module does not implement the correct acknowledgment format") + } +} +``` + +### Store information about the timed-out packet + +Store posts that have not been received by target chains in `timeoutPost` +posts. This logic follows the same format as `sentPost`. + +```go title="x/blog/keeper/ibc_post.go" +func (k Keeper) OnTimeoutIbcPostPacket(ctx sdk.Context, packet channeltypes.Packet, data types.IbcPostPacketData) error { + seq, err := k.TimeoutPostSeq.Next(ctx) + if err != nil { + return err + } + + return k.TimeoutPost.Set(ctx, seq, + types.TimeoutPost{ + Title: data.Title, + Chain: packet.DestinationPort + "-" + packet.DestinationChannel, + }, + ) +} +``` + +This last step completes the basic `blog` module setup. The blockchain is now +ready! + +## Use the IBC modules + +You can now spin up the blockchain and send a blog post from one blockchain app +to the other. Multiple terminal windows are required to complete these next +steps. + +### Test the IBC modules + +To test the IBC module, start two blockchain networks on the same machine. Both +blockchains use the same source code. Each blockchain has a unique chain ID. + +One blockchain is named `earth` and the other blockchain is named `mars`. + +The `earth.yml` and `mars.yml` files are required in the project directory: + +```yaml title="earth.yml" +version: 1 +validation: sovereign +build: + proto: + path: proto +accounts: +- name: alice + coins: + - 1000token + - 100000000stake +- name: bob + coins: + - 500token + - 100000000stake +faucet: + name: bob + coins: + - 5token + - 100000stake + host: 0.0.0.0:4500 +genesis: + chain_id: earth +validators: +- name: alice + bonded: 100000000stake + home: $HOME/.earth +``` + +```yaml title="mars.yml" +version: 1 +validation: sovereign +build: + proto: + path: proto +accounts: +- name: alice + coins: + - 1000token + - 1000000000stake +- name: bob + coins: + - 500token + - 100000000stake +faucet: + name: bob + coins: + - 5token + - 100000stake + host: :4501 +genesis: + chain_id: mars +validators: +- name: alice + bonded: 100000000stake + app: + api: + address: :1318 + grpc: + address: :9092 + grpc-web: + address: :9093 + config: + p2p: + laddr: :26658 + rpc: + laddr: :26659 + pprof_laddr: :6061 + home: $HOME/.mars +``` + +Open a terminal window and run the following command to start the `earth` +blockchain: + +```bash +ignite chain serve -c earth.yml +``` + +Open a different terminal window and run the following command to start the +`mars` blockchain: + +```bash +ignite chain serve -c mars.yml +``` +If existing relayer configurations do not exist, the command returns `no matches +found` and no action is taken. + +### Configure and start the relayer + +First, add the Hermes relayer app. + +```bash +ignite app install -g github.com/ignite/apps/hermes +``` + +If you previously used the relayer, follow these steps to remove exiting relayer +and IGNITE® CLI configurations: + +- Stop your blockchains and delete previous configuration files: + +```bash +ignite relayer hermes clear binaries +ignite relayer hermes clear configs +``` + +and after configure the relayer. + +```bash +ignite relayer hermes configure \ +"earth" "http://localhost:26657" "http://localhost:9090" \ +"mars" "http://localhost:26659" "http://localhost:9092" \ +--chain-a-faucet "http://0.0.0.0:4500" \ +--chain-b-faucet "http://0.0.0.0:4501" \ +--chain-a-port-id "blog" \ +--chain-b-port-id "blog" \ +--channel-version "blog-1" +``` + +When prompted, press Enter to accept the default values for `Chain A Account` and +`Chain B Account`. + +The output looks like: + +``` +Hermes config created at /Users/danilopantani/.ignite/relayer/hermes/earth_mars +? Chain earth doesn't have a default Hermes key. Type your mnemonic to continue or type enter to generate a new one: (optional) +New mnemonic generated: danger plate flavor twist chimney myself sketch assist copy expand core tattoo ignore ensure quote mean forum carbon enroll gadget immense grab early maze +Chain earth key created +Chain earth relayer wallet: cosmos1jk6wmyl880j6t9vw6umy9v8ex0yhrfwgx0vv2d +New balance from faucet: 100000stake,5token +? Chain mars doesn't have a default Hermes key. Type your mnemonic to continue or type enter to generate a new one: (optional) +New mnemonic generated: invest box icon session lens demise purse link boss dwarf give minimum jazz eye vocal seven sunset coach express want ask version anger ranch +Chain mars key created +Chain mars relayer wallet: cosmos1x9kt37c0sutanaqwy9gxpvq5990yt0qnpqntmp +New balance from faucet: 100000stake,5token +Client '07-tendermint-0' created (earth -> mars) +Client 07-tendermint-0' created (mars -> earth) +Connection 'earth (connection-0) <-> mars (connection-0)' created +Channel 'earth (channel-0) <-> mars (channel-0)' created +``` + +Now start the relayer: + +```bash +ignite relayer hermes start "earth" "mars" +``` + +### Send packets + +You can now send packets and verify the received posts: + +```bash +planetd tx blog send-ibc-post blog channel-0 "Hello" "Hello Mars, I'm Alice from Earth" --from alice --chain-id earth --home ~/.earth +``` + +To verify that the post has been received on Mars: + +```bash +planetd q blog list-post --node tcp://localhost:26659 +``` + +The packet has been received: + +```yaml +Post: + - content: Hello Mars, I'm Alice from Earth + creator: blog-channel-0-cosmos1aew8dk9cs3uzzgeldatgzvm5ca2k4m98xhy20x + id: "0" + title: Hello +pagination: + next_key: null + total: "1" +``` + +To check if the packet has been acknowledged on Earth: + +```bash +planetd q blog list-sent-post +``` + +Output: + +```yaml +SentPost: + - chain: blog-channel-0 + creator: cosmos1aew8dk9cs3uzzgeldatgzvm5ca2k4m98xhy20x + id: "0" + postID: "0" + title: Hello +pagination: + next_key: null + total: "1" +``` + +To test timeout, set the timeout time of a packet to 1 nanosecond, verify that +the packet is timed out, and check the timed-out posts: + +```bash +planetd tx blog send-ibc-post blog channel-0 "Sorry" "Sorry Mars, you will never see this post" --from alice --chain-id earth --home ~/.earth --packet-timeout-timestamp 1 +``` + +Check the timed-out posts: + +```bash +planetd q blog list-timeout-post +``` + +Results: + +```yaml +TimeoutPost: + - chain: blog-channel-0 + creator: cosmos1fhpcsxn0g8uask73xpcgwxlfxtuunn3ey5ptjv + id: "0" + title: Sorry +pagination: + next_key: null + total: "2" +``` + +You can also send a post from Mars: + +```bash +planetd tx blog send-ibc-post blog channel-0 "Hello" "Hello Earth, I'm Alice from Mars" --from alice --chain-id mars --home ~/.mars --node tcp://localhost:26659 +``` + +List post on Earth: + +```bash +planetd q blog list-post +``` + +Results: + +```yaml +Post: + - content: Hello Earth, I'm Alice from Mars + creator: blog-channel-0-cosmos1xtpx43l826348s59au24p22pxg6q248638q2tf + id: "0" + title: Hello +pagination: + next_key: null + total: "1" +``` + +## Congratulations 🎉 + +By completing this tutorial, you've learned to use the Inter-Blockchain +Communication protocol (IBC). + +Here's what you accomplished in this tutorial: + +- Built two Hello blockchain apps as IBC modules +- Modified the generated code to add CRUD action logic +- Configured and used the IGNITE® CLI relayer to connect two blockchains with + each other +- Transferred IBC packets from one blockchain to another diff --git a/docs/versioned_docs/version-v29/02-guide/05-debug.md b/docs/versioned_docs/version-v29/02-guide/05-debug.md new file mode 100644 index 0000000..545086f --- /dev/null +++ b/docs/versioned_docs/version-v29/02-guide/05-debug.md @@ -0,0 +1,208 @@ +--- +description: Debugging your Cosmos SDK blockchain +--- + +# Debugging a chain + +IGNITE® chain debug command can help you find issues during development. It uses +[Delve](https://github.com/go-delve/delve) debugger which enables you to +interact with your blockchain app by controlling the execution of the process, +evaluating variables, and providing information of thread / goroutine state, CPU +register state and more. + +## Debug Command + +The debug command requires that the blockchain app binary is build with +debugging support by removing optimizations and inlining. A debug binary is +built by default by the `ignite chain serve` command or can optionally be +created using the `--debug` flag when running `ignite chain init` or `ignite +chain build` sub-commands. + +To start a debugging session in the terminal run: + +``` +ignite chain debug +``` + +The command runs your blockchain app in the background, attaches to it and +launches a terminal debugger shell: + +``` +Type 'help' for list of commands. +(dlv) +``` + +At this point the blockchain app blocks execution, so you can set one or more +breakpoints before continuing execution. + +Use the +[break](https://github.com/go-delve/delve/blob/master/Documentation/cli/README.md#break) +(alias `b`) command to set any number of breakpoints using, for example the +`<filename>:<line>` notation: + +``` +(dlv) break x/hello/keeper/query_say_hello.go:13 +``` + +This command adds a breakpoint to the `x/hello/keeper/query_say_hello.go` +file at line 14. + +Once all breakpoints are set resume blockchain execution using the +[continue](https://github.com/go-delve/delve/blob/master/Documentation/cli/README.md#continue) +(alias `c`) command: + +``` +(dlv) continue +``` + +The debugger will launch the shell and stop blockchain execution again when a +breakpoint is triggered. + +Within the debugger shell use the `quit` (alias `q`) or `exit` commands to stop +the blockchain app and exit the debugger. + +## Debug Server + +A debug server can optionally be started in cases where the default terminal +client is not desirable. When the server starts it first runs the blockchain +app, attaches to it and finally waits for a client connection. The default +server address is *tcp://127.0.0.1:30500* and it accepts both JSON-RPC or DAP +client connections. + +To start a debug server use the following flag: + +``` +ignite chain debug --server +``` + +To start a debug server with a custom address use the following flags: + +``` +ignite chain debug --server --server-address 127.0.0.1:30500 +``` + +The debug server stops automatically when the client connection is closed. + +## Debugging Clients + +### Gdlv: Multiplatform Delve UI + +[Gdlv](https://github.com/aarzilli/gdlv) is a graphical frontend to Delve for +Linux, Windows and macOS. + +Using it as debugging client is straightforward as it doesn't require any +configuration. Once the debug server is running and listening for client +requests connect to it by running: + +``` +gdlv connect 127.0.0.1:30500 +``` + +Setting breakpoints and continuing execution is done in the same way as Delve, +by using the `break` and `continue` commands. + +### Visual Studio Code + +Using [Visual Studio Code](https://code.visualstudio.com/) as debugging client +requires an initial configuration to allow it to connect to the debug server. + +Make sure that the [Go](https://code.visualstudio.com/docs/languages/go) +extension is installed. + +VS Code debugging is configured using the `launch.json` file which is usually +located inside the `.vscode` folder in your workspace. + +You can use the following launch configuration to set up VS Code as debugging +client: + +```json title=launch.json +{ + "version": "0.2.0", + "configurations": [ + { + "name": "Connect to Debug Server", + "type": "go", + "request": "attach", + "mode": "remote", + "remotePath": "${workspaceFolder}", + "port": 30500, + "host": "127.0.0.1" + } + ] +} +``` + +Alternatively it's possible to create a custom `launch.json` file from the "Run +and Debug" panel. When prompted choose the Go debugger option labeled "Go: +Connect to Server" and enter the debug host address and then the port number. + +## Example: Debugging a Blockchain App + +In this short example we will be using IGNITE® CLI to create a new blockchain and +a query to be able to trigger a debugging breakpoint when the query is called. + +Create a new blockchain: + +``` +ignite scaffold chain hello +``` + +Scaffold a new query in the `hello` directory: + +``` +ignite scaffold query say-hello name --response name +``` + +The next step initializes the blockchain's data directory and compiles a debug +binary: + +``` +ignite chain init --debug +``` + +Once the initialization finishes launch the debugger shell: + +``` +ignite chain debug +``` + +Within the debugger shell create a breakpoint that will be triggered when the +`SayHello` function is called and then continue execution: + +``` +(dlv) break x/hello/keeper/query_say_hello.go:12 +(dlv) continue +``` + +From a different terminal use the `hellod` binary to call the query: + +``` +hellod query hello say-hello bob +``` + +A debugger shell will be launched when the breakpoint is triggered: + +``` + 7: "google.golang.org/grpc/codes" + 8: "google.golang.org/grpc/status" + 9: "hello/x/hello/types" + 10: ) + 11: +=> 12: func (k Keeper) SayHello(ctx context.Context, req *types.QuerySayHelloRequest) (*types.QuerySayHelloResponse, error) { + 13: if req == nil { + 14: return nil, status.Error(codes.InvalidArgument, "invalid request") + 15: } + 16: +``` + +From then on you can use Delve commands like `next` (alias `n`) or `print` +(alias `p`) to control execution and print values. For example, to print the +*name* argument value use the `print` command followed by "req.Name": + +``` +(dlv) print req.Name +"bob" +``` + +Finally, use `quit` (alias `q`) to stop the blockchain app and finish the +debugging session. diff --git a/docs/versioned_docs/version-v29/02-guide/06-docker.md b/docs/versioned_docs/version-v29/02-guide/06-docker.md new file mode 100644 index 0000000..0f47d37 --- /dev/null +++ b/docs/versioned_docs/version-v29/02-guide/06-docker.md @@ -0,0 +1,142 @@ +--- +description: Run IGNITE® CLI using a Docker container. +--- + +# Running inside a Docker container + +You can run IGNITE® CLI inside a Docker container without installing the IGNITE® +CLI binary directly on your machine. + +Running IGNITE® CLI in Docker can be useful for various reasons; isolating your +test environment, running IGNITE® CLI on an unsupported operating system, or +experimenting with a different version of IGNITE® CLI without installing it. + +Docker containers are like virtual machines because they provide an isolated +environment to programs that runs inside them. In this case, you can run IGNITE® +CLI in an isolated environment. + +Experimentation and file system impact is limited to the Docker instance. The +host machine is not impacted by changes to the container. + +## Prerequisites + +Docker must be installed. See [Get Started with +Docker](https://www.docker.com/get-started). + +## IGNITE® CLI Commands in Docker + +After you scaffold and start a chain in your Docker container, all IGNITE® CLI +commands are available. Just type the commands after `docker run -ti +ignite/cli`. For example: + +```bash +docker run -ti ignitehq/cli -h +docker run -ti ignitehq/cli scaffold chain planet +docker run -ti ignitehq/cli chain serve +``` + +## Scaffolding a chain + +When Docker is installed, you can build a blockchain with a single command. + +IGNITE® CLI, and the chains you serve with IGNITE® CLI, persist some files. When +using the CLI binary directly, those files are located in `$HOME/.ignite` and +`$HOME/.cache`, but in the context of Docker it's better to use a directory +different from `$HOME`, so we use `$HOME/sdh`. This folder should be created +manually prior to the docker commands below, or else Docker creates it with the +root user. + +```bash +mkdir $HOME/sdh +``` + +To scaffold a blockchain `planet` in the `/apps` directory in the container, run +this command in a terminal window: + +```bash +docker run -ti -v $HOME/sdh:/home/tendermint -v $PWD:/apps ignitehq/cli:0.25.2 scaffold chain planet +``` + +Be patient, this command takes a minute or two to run because it does everything +for you: + +- Creates a container that runs from the `ignitehq/cli:0.25.2` image. +- Executes the IGNITE® CLI binary inside the image. +- `-v $HOME/sdh:/home/tendermint` maps the `$HOME/sdh` directory in your local + computer (the host machine) to the home directory `/home/tendermint` inside + the container. +- `-v $PWD:/apps` maps the current directory in the terminal window on the host + machine to the `/apps` directory in the container. You can optionally specify + an absolute path instead of `$PWD`. + + Using `-w` and `-v` together provides file persistence on the host machine. + The application source code on the Docker container is mirrored to the file + system of the host machine. + + **Note:** The directory name for the `-w` and `-v` flags can be a name other + than `/app`, but the same directory must be specified for both flags. If you + omit `-w` and `-v`, the changes are made in the container only and are lost + when that container is shut down. + +## Starting a blockchain + +To start the blockchain node in the Docker container you just created, run this +command: + +```bash +docker run -ti -v $HOME/sdh:/home/tendermint -v $PWD:/apps -p 1317:1317 -p 26657:26657 ignitehq/cli:0.25.2 chain serve -p planet +``` + +This command does the following: + +- `-v $HOME/sdh:/home/tendermint` maps the `$HOME/sdh` directory in your local + computer (the host machine) to the home directory `/home/tendermint` inside + the container. +- `-v $PWD:/apps` persists the scaffolded app in the container to the host + machine at current working directory. +- `serve -p planet` specifies to use the `planet` directory that contains the + source code of the blockchain. +- `-p 1317:1317` maps the API server port (cosmos-sdk) to the host machine to + forward port 1317 listening inside the container to port 1317 on the host + machine. +- `-p 26657:26657` maps RPC server port 26657 (tendermint) on the host machine + to port 26657 in Docker. +- After the blockchain is started, open `http://localhost:26657` to see the + Tendermint API. +- The `-v` flag specifies for the container to access the application's source + code from the host machine, so it can build and run it. + +## Versioning + +You can specify which version of IGNITE® CLI to install and run in your Docker +container. + +### Latest version + +- By default, `ignite/cli` resolves to `ignite/cli:latest`. +- The `latest` image tag is always the latest stable [IGNITE® CLI + release](https://github.com/ignite/cli/releases). + +For example, if latest release is +[v0.25.2](https://github.com/ignite/cli/releases/tag/v0.25.2), the `latest` tag +points to the `0.25.2` tag. + +### Specific version + +You can specify to use a specific version of IGNITE® CLI. All available tags are +in the [ignite/cli +image](https://hub.docker.com/r/ignitehq/cli/tags?page=1&ordering=last_updated) on +Docker Hub. + +For example: + +- Use `ignitehq/cli:0.25.2` (without the `v` prefix) to use version `0.25.2`. +- Use `ignitehq/cli` to use the latest version. +- Use `ignitehq/cli:main` to use the `main` branch, so you can experiment with + the upcoming version. + +To get the latest image, run `docker pull`. + +```bash +docker pull ignitehq/cli:main +``` diff --git a/docs/versioned_docs/version-v29/02-guide/07-simapp.md b/docs/versioned_docs/version-v29/02-guide/07-simapp.md new file mode 100644 index 0000000..9d87f97 --- /dev/null +++ b/docs/versioned_docs/version-v29/02-guide/07-simapp.md @@ -0,0 +1,103 @@ +--- +sidebar_position: 10 +description: Test different scenarios for your chain. +--- + +# Chain simulation + +The IGNITE® CLI chain simulator can help you to run your chain based in +randomized inputs for you can make fuzz testing and also benchmark test for your +chain, simulating the messages, blocks, and accounts. You can scaffold a +template to perform simulation testing in each module along with a boilerplate +simulation methods for each scaffolded message. + +## Module simulation + +Every new module that is scaffolded with IGNITE® CLI implements the Cosmos SDK +[Module +Simulation](https://docs.cosmos.network/main/building-modules/simulator). + +- Each new message creates a file with the simulation methods required for the + tests. +- Scaffolding a `CRUD` type like a `list` or `map` creates a simulation file + with `create`, `update`, and `delete` simulation methods in the + `x/<module>/simulation` folder and registers these methods in + `x/<module>/module_simulation.go`. +- Scaffolding a single message creates an empty simulation method to be + implemented by the user. + +We recommend that you maintain the simulation methods for each new modification +into the message keeper methods. + +Every simulation is weighted because the sender of the operation is assigned +randomly. The weight defines how much the simulation calls the message. + +For better randomizations, you can define a random seed. The simulation with the +same random seed is deterministic with the same output. + +## Scaffold a simulation + +To create a new chain: + +``` +ignite scaffold chain mars +``` + +Review the empty `x/mars/simulation` folder and the +`x/mars/module_simulation.go` file to see that a simulation is not registered. + +Now, scaffold a new message: + +``` +ignite scaffold list user address balance:uint state +``` + +A new file `x/mars/simulation/user.go` is created and is registered with the +weight in the `x/mars/module_simulation.go` file. + +Be sure to define the proper simulation weight with a minimum weight of 0 and a +maximum weight of 100. + +For this example, change the `defaultWeightMsgDeleteUser` to 30 and the +`defaultWeightMsgUpdateUser` to 50. + +Run the `BenchmarkSimulation` method into `app/simulation_test.go` to run +simulation tests for all modules: + +``` +ignite chain simulate +``` + +You can also define flags that are provided by the simulation. Flags are defined +by the method `simapp.GetSimulatorFlags()`: + +``` +ignite chain simulate -v --numBlocks 200 --blockSize 50 --seed 33 +``` + +Wait for the entire simulation to finish and check the result of the messages. + +The default `go test` command works to run the simulation: + +``` +go test -v -benchmem -run=^$ -bench ^BenchmarkSimulation -cpuprofile cpu.out ./app -Commit=true +``` + +### Skip message + +Use logic to avoid sending a message without returning an error. Return only +`simtypes.NoOpMsg(...)` into the simulation message handler. + +## Params + +Scaffolding a module with params automatically adds the module in the +`module_simulaton.go` file: + +``` +ignite s module earth --params channel:string,minLaunch:uint,maxLaunch:int +``` + +After the parameters are scaffolded, change the +`x/<module>/module_simulation.go` file to set the random parameters into the +`RandomizedParams` method. The simulation will change the params randomly +according to call the function. diff --git a/docs/versioned_docs/version-v29/02-guide/08-state.md b/docs/versioned_docs/version-v29/02-guide/08-state.md new file mode 100644 index 0000000..b57cfd2 --- /dev/null +++ b/docs/versioned_docs/version-v29/02-guide/08-state.md @@ -0,0 +1,230 @@ +--- +description: Learn how Cosmos SDK modules manage state with collections +title: State Management +--- + +# State Management in Modules + +In blockchain applications, state refers to the current data stored on the blockchain at a specific point in time. Handling state is usually the core of any blockchain application. The Cosmos SDK provides powerful tools for state management, with the `collections` package being the recommended approach for modern applications. + +## Collections Package + +IGNITE® scaffolds using the [`collections`](https://pkg.go.dev/cosmossdk.io/collections) package for module code. This package provides a type-safe and efficient way to set and query values from the module store. + +### Key Features of Collections + +- **Type Safety**: Collections are type-safe, reducing the risk of runtime errors. +- **Simplified API**: Easy-to-use methods for common operations like Get, Set, and Has. +- **Performance**: Optimized for performance with minimal overhead. +- **Integration**: Seamlessly integrates with the Cosmos SDK ecosystem. + +## Understand keeper field + +IGNITE® creates all the necessary boilerplate for collections in the `x/<module>/keeper/keeper.go` file. The `Keeper` struct contains fields for each collection you define in your module. Each field is an instance of a collection type, such as `collections.Map`, `collections.Item`, or `collections.List`. + +```go +type Keeper struct { + // ... + + Params collections.Item[Params] + Counters collections.Map[string, uint64] + Profiles collections.Map[sdk.AccAddress, Profile] +} +``` + +## Common State Operations + +### Reading State + +To read values from state, use the `Get` method: + +```go +// getting a single item +params, err := k.Params.Get(ctx) +if err != nil { + // handle error + // collections.ErrNotFound is returned when an item doesn't exist +} + +// getting a map entry +counter, err := k.Counters.Get(ctx, "my-counter") +if err != nil { + // handle error +} +``` + +### Writing State + +To write values to state, use the `Set` method: + +```go +// setting a single item +err := k.Params.Set(ctx, params) +if err != nil { + // handle error +} + +// setting a map entry +err = k.Counters.Set(ctx, "my-counter", 42) +if err != nil { + // handle error +} +``` + +### Checking Existence + +Use the `Has` method to check if a value exists without retrieving it: + +```go +exists, err := k.Counters.Has(ctx, "my-counter") +if err != nil { + // handle error +} +if exists { + // value exists +} +``` + +### Removing State + +To remove values from state, use the `Remove` method: + +```go +err := k.Counters.Remove(ctx, "my-counter") +if err != nil { + // handle error +} +``` + +## Implementing Business Logic in Messages + +Messages in Cosmos SDK modules modify state based on user transactions. Here's how to implement business logic in a message handler using collections: + +```go +func (k msgServer) CreateProfile(ctx context.Context, msg *types.MsgCreateProfile) (*types.MsgCreateProfileResponse, error) { + // validate message + if err := msg.ValidateBasic(); err != nil { + return nil, err + } + + // parse sender address + senderBz, err := k.addressCodec.StringToBytes(msg.Creator) + if err != nil { + return nil, err + } + sender := sdk.AccAddress(senderBz) + + // check if profile already exists + exists, err := k.Profiles.Has(ctx, sender) + if err != nil { + return nil, err + } + if exists { + return nil, sdkerrors.Wrap(types.ErrProfileExists, "profile already exists") + } + + // create new profile + sdkCtx := sdk.UnwrapSDKContext(ctx) + profile := types.Profile{ + Name: msg.Name, + Bio: msg.Bio, + CreatedAt: sdkCtx.BlockTime().Unix(), + } + + // store the profile + err = k.Profiles.Set(ctx, sender, profile) + if err != nil { + return nil, err + } + + // increment profile counter + counter, err := k.Counters.Get(ctx, "profiles") + if err != nil && !errors.Is(err, collections.ErrNotFound) { + return nil, err + } + // set the counter (adding 1) + err = k.Counters.Set(ctx, "profiles", counter+1) + if err != nil { + return nil, err + } + + return &types.MsgCreateProfileResponse{}, nil +} +``` + +## Implementing Queries + +Queries allow users to read state without modifying it. Here's how to implement a query handler using collections: + +```go +func (q queryServer) GetProfile(ctx context.Context, req *types.QueryGetProfileRequest) (*types.QueryGetProfileResponse, error) { + if req == nil { + return nil, status.Error(codes.InvalidArgument, "invalid request") + } + + // parse address + addressBz, err := k.addressCodec.StringToBytes(req.Address) + if err != nil { + return nil, status.Error(codes.InvalidArgument, "invalid address") + } + address := sdk.AccAddress(addressBz) + + // get profile + profile, err := q.k.Profiles.Get(ctx, address) + if err != nil { + if errors.Is(err, collections.ErrNotFound) { + return nil, status.Error(codes.NotFound, "profile not found") + } + return nil, status.Error(codes.Internal, "internal error") + } + + return &types.QueryGetProfileResponse{Profile: profile}, nil +} +``` + +## Error Handling with Collections + +When working with collections, proper error handling is essential: + +```go +// example from a query function +params, err := q.k.Params.Get(ctx) +if err != nil && !errors.Is(err, collections.ErrNotFound) { + return nil, status.Error(codes.Internal, "internal error") +} +``` + +In the snippet above, it uses the `Get` method to get a collection item. A `collections.ErrNotFound` can be a valid error when the collection is empty, whereas any other error is considered an internal error that should be handled appropriately. + +## Iterating Over Collections + +Collections also support iteration: + +```go +// iterate over all profiles +err := k.Profiles.Walk(ctx, nil, func(key sdk.AccAddress, value types.Profile) (bool, error) { + // process each profile + // return true to stop iteration, false to continue + return false, nil +}) +if err != nil { + // handle error +} + +// iterate over a range of counters +startKey := "a" +endKey := "z" +err = k.Counters.Walk(ctx, collections.NewPrefixedPairRange[string, uint64](startKey, endKey), func(key string, value uint64) (bool, error) { + // process each counter in the range + return false, nil +}) +if err != nil { + // handle error +} +``` + +## Conclusion + +The `collections` package provides a powerful and type-safe way to manage state in Cosmos SDK modules. By understanding how to use collections effectively, you can build robust and efficient blockchain applications that handle state transitions reliably. + +When developing with IGNITE® CLI, you are already taking advantage of collections which significantly simplify the state management code and reduce the potential for errors. diff --git a/docs/versioned_docs/version-v29/02-guide/_category_.json b/docs/versioned_docs/version-v29/02-guide/_category_.json new file mode 100644 index 0000000..8021cf4 --- /dev/null +++ b/docs/versioned_docs/version-v29/02-guide/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Getting Started", + "link": null +} \ No newline at end of file diff --git a/docs/versioned_docs/version-v29/02-guide/images/packet_sendpost.png b/docs/versioned_docs/version-v29/02-guide/images/packet_sendpost.png new file mode 100644 index 0000000..0bb080c Binary files /dev/null and b/docs/versioned_docs/version-v29/02-guide/images/packet_sendpost.png differ diff --git a/docs/versioned_docs/version-v29/03-CLI-Commands/01-cli-commands.md b/docs/versioned_docs/version-v29/03-CLI-Commands/01-cli-commands.md new file mode 100644 index 0000000..aec4362 --- /dev/null +++ b/docs/versioned_docs/version-v29/03-CLI-Commands/01-cli-commands.md @@ -0,0 +1,2362 @@ +--- +description: Ignite CLI docs. +--- + +# CLI commands + +Documentation for Ignite CLI. +## ignite + +Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain + +**Synopsis** + +Ignite CLI is a tool for creating sovereign blockchains built with Cosmos SDK, the world's +most popular modular blockchain framework. Ignite CLI offers everything you need to scaffold, +test, build, and launch your blockchain. + +To get started, create a blockchain: + +$ ignite scaffold chain example + +Announcements: + +⋆ A new release has appeared! v29.8.0 has just been released :) +⋆ Satisfied with Ignite? Or totally fed-up with it? Tell us: https://bit.ly/3WZS2uS + + +**Options** + +``` + -h, --help help for ignite +``` + +**SEE ALSO** + +* [ignite account](#ignite-account) - Create, delete, and show Ignite accounts +* [ignite app](#ignite-app) - Create and manage Ignite Apps +* [ignite appregistry](#ignite-appregistry) - Browse the Ignite App Registry App +* [ignite chain](#ignite-chain) - Build, init and start a blockchain node +* [ignite completion](#ignite-completion) - Generates shell completion script. +* [ignite docs](#ignite-docs) - Show Ignite CLI docs +* [ignite generate](#ignite-generate) - Generate clients, API docs from source code +* [ignite relayer](#ignite-relayer) - Connect blockchains with an IBC relayer +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more +* [ignite testnet](#ignite-testnet) - Simulate and manage test networks +* [ignite version](#ignite-version) - Print the current build information + + +## ignite account + +Create, delete, and show Ignite accounts + +**Synopsis** + +Commands for managing Ignite accounts. An Ignite account is a private/public +keypair stored in a keyring. Currently Ignite accounts are used when interacting +with Ignite Apps (namely ignite relayer, ignite network and ignite connect). + +Note: Ignite account commands are not for managing your chain's keys and accounts. Use +you chain's binary to manage accounts from "config.yml". For example, if your +blockchain is called "mychain", use "mychaind keys" to manage keys for the +chain. + + +**Options** + +``` + -h, --help help for account + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain +* [ignite account create](#ignite-account-create) - Create a new account +* [ignite account delete](#ignite-account-delete) - Delete an account by name +* [ignite account export](#ignite-account-export) - Export an account as a private key +* [ignite account import](#ignite-account-import) - Import an account by using a mnemonic or a private key +* [ignite account list](#ignite-account-list) - Show a list of all accounts +* [ignite account show](#ignite-account-show) - Show detailed information about a particular account + + +## ignite account create + +Create a new account + +``` +ignite account create [name] [flags] +``` + +**Options** + +``` + --coin-type uint32 coin type to use for the account (default 118) + -h, --help help for create +``` + +**Options inherited from parent commands** + +``` + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**SEE ALSO** + +* [ignite account](#ignite-account) - Create, delete, and show Ignite accounts + + +## ignite account delete + +Delete an account by name + +``` +ignite account delete [name] [flags] +``` + +**Options** + +``` + -h, --help help for delete +``` + +**Options inherited from parent commands** + +``` + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**SEE ALSO** + +* [ignite account](#ignite-account) - Create, delete, and show Ignite accounts + + +## ignite account export + +Export an account as a private key + +``` +ignite account export [name] [flags] +``` + +**Options** + +``` + -h, --help help for export + --non-interactive do not enter into interactive mode + --passphrase string passphrase to encrypt the exported key + --path string path to export private key. default: ./key_[name] +``` + +**Options inherited from parent commands** + +``` + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**SEE ALSO** + +* [ignite account](#ignite-account) - Create, delete, and show Ignite accounts + + +## ignite account import + +Import an account by using a mnemonic or a private key + +``` +ignite account import [name] [flags] +``` + +**Options** + +``` + --coin-type uint32 coin type to use for the account (default 118) + -h, --help help for import + --non-interactive do not enter into interactive mode + --passphrase string passphrase to decrypt the imported key (ignored when secret is a mnemonic) + --secret string Your mnemonic or path to your private key (use interactive mode instead to securely pass your mnemonic) +``` + +**Options inherited from parent commands** + +``` + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**SEE ALSO** + +* [ignite account](#ignite-account) - Create, delete, and show Ignite accounts + + +## ignite account list + +Show a list of all accounts + +``` +ignite account list [flags] +``` + +**Options** + +``` + --address-prefix string account address prefix (default "cosmos") + -h, --help help for list +``` + +**Options inherited from parent commands** + +``` + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**SEE ALSO** + +* [ignite account](#ignite-account) - Create, delete, and show Ignite accounts + + +## ignite account show + +Show detailed information about a particular account + +``` +ignite account show [name] [flags] +``` + +**Options** + +``` + --address-prefix string account address prefix (default "cosmos") + -h, --help help for show +``` + +**Options inherited from parent commands** + +``` + --keyring-backend string keyring backend to store your account keys (default "test") + --keyring-dir string accounts keyring directory (default "/home/runner/.ignite/accounts") +``` + +**SEE ALSO** + +* [ignite account](#ignite-account) - Create, delete, and show Ignite accounts + + +## ignite app + +Create and manage Ignite Apps + +**Options** + +``` + -h, --help help for app +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain +* [ignite app describe](#ignite-app-describe) - Print information about installed apps +* [ignite app install](#ignite-app-install) - Install app +* [ignite app list](#ignite-app-list) - List installed apps +* [ignite app scaffold](#ignite-app-scaffold) - Scaffold a new Ignite App +* [ignite app uninstall](#ignite-app-uninstall) - Uninstall app +* [ignite app update](#ignite-app-update) - Update app + + +## ignite app describe + +Print information about installed apps + +**Synopsis** + +Print information about an installed Ignite App commands and hooks. + +``` +ignite app describe [path] [flags] +``` + +**Examples** + +``` +ignite app describe github.com/org/my-app/ +``` + +**Options** + +``` + -h, --help help for describe +``` + +**SEE ALSO** + +* [ignite app](#ignite-app) - Create and manage Ignite Apps + + +## ignite app install + +Install app + +**Synopsis** + +Installs an Ignite App. + +Respects key value pairs declared after the app path to be added to the generated configuration definition. + +``` +ignite app install [path] [key=value]... [flags] +``` + +**Examples** + +``` +ignite app install github.com/org/my-app/ foo=bar baz=qux +``` + +**Options** + +``` + -g, --global use global plugins configuration ($HOME/.ignite/apps/igniteapps.yml) + -h, --help help for install +``` + +**SEE ALSO** + +* [ignite app](#ignite-app) - Create and manage Ignite Apps + + +## ignite app list + +List installed apps + +**Synopsis** + +Prints status and information of all installed Ignite Apps. + +``` +ignite app list [flags] +``` + +**Options** + +``` + -h, --help help for list +``` + +**SEE ALSO** + +* [ignite app](#ignite-app) - Create and manage Ignite Apps + + +## ignite app scaffold + +Scaffold a new Ignite App + +**Synopsis** + +Scaffolds a new Ignite App in the current directory. + +A git repository will be created with the given module name, unless the current directory is already a git repository. + +``` +ignite app scaffold [name] [flags] +``` + +**Examples** + +``` +ignite app scaffold github.com/org/my-app/ +``` + +**Options** + +``` + -h, --help help for scaffold +``` + +**SEE ALSO** + +* [ignite app](#ignite-app) - Create and manage Ignite Apps + + +## ignite app uninstall + +Uninstall app + +**Synopsis** + +Uninstalls an Ignite App specified by path. + +``` +ignite app uninstall [path] [flags] +``` + +**Examples** + +``` +ignite app uninstall github.com/org/my-app/ +``` + +**Options** + +``` + -g, --global use global plugins configuration ($HOME/.ignite/apps/igniteapps.yml) + -h, --help help for uninstall +``` + +**SEE ALSO** + +* [ignite app](#ignite-app) - Create and manage Ignite Apps + + +## ignite app update + +Update app + +**Synopsis** + +Updates an Ignite App specified by path. + +If no path is specified all declared apps are updated. + +``` +ignite app update [path] [flags] +``` + +**Examples** + +``` +ignite app update github.com/org/my-app/ +``` + +**Options** + +``` + -h, --help help for update +``` + +**SEE ALSO** + +* [ignite app](#ignite-app) - Create and manage Ignite Apps + + +## ignite appregistry + +Browse the Ignite App Registry App + +``` +ignite appregistry [flags] +``` + +**Options** + +``` + -h, --help help for appregistry +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain + + +## ignite chain + +Build, init and start a blockchain node + +**Synopsis** + +Commands in this namespace let you to build, initialize, and start your +blockchain node locally for development purposes. + +To run these commands you should be inside the project's directory so that +Ignite can find the source code. To ensure that you are, run "ls", you should +see the following files in the output: "go.mod", "x", "proto", "app", etc. + +By default the "build" command will identify the "main" package of the project, +install dependencies if necessary, set build flags, compile the project into a +binary and install the binary. The "build" command is useful if you just want +the compiled binary, for example, to initialize and start the chain manually. It +can also be used to release your chain's binaries automatically as part of +continuous integration workflow. + +The "init" command will build the chain's binary and use it to initialize a +local validator node. By default the validator node will be initialized in your +$HOME directory in a hidden directory that matches the name of your project. +This directory is called a data directory and contains a chain's genesis file +and a validator key. This command is useful if you want to quickly build and +initialize the data directory and use the chain's binary to manually start the +blockchain. The "init" command is meant only for development purposes, not +production. + +The "serve" command builds, initializes, and starts your blockchain locally with +a single validator node for development purposes. "serve" also watches the +source code directory for file changes and intelligently +re-builds/initializes/starts the chain, essentially providing "code-reloading". +The "serve" command is meant only for development purposes, not production. + +To distinguish between production and development consider the following. + +In production, blockchains often run the same software on many validator nodes +that are run by different people and entities. To launch a blockchain in +production, the validator entities coordinate the launch process to start their +nodes simultaneously. + +During development, a blockchain can be started locally on a single validator +node. This convenient process lets you restart a chain quickly and iterate +faster. Starting a chain on a single node in development is similar to starting +a traditional web application on a local server. + +The "faucet" command lets you send tokens to an address from the "faucet" +account defined in "config.yml". Alternatively, you can use the chain's binary +to send token from any other account that exists on chain. + +The "simulate" command helps you start a simulation testing process for your +chain. + + +**Options** + +``` + -c, --config string path to Ignite config file (default: ./config.yml) + -h, --help help for chain + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain +* [ignite chain build](#ignite-chain-build) - Build a node binary +* [ignite chain debug](#ignite-chain-debug) - Launch a debugger for a blockchain app +* [ignite chain faucet](#ignite-chain-faucet) - Send coins to an account +* [ignite chain init](#ignite-chain-init) - Initialize your chain +* [ignite chain lint](#ignite-chain-lint) - Lint codebase using golangci-lint +* [ignite chain modules](#ignite-chain-modules) - Manage modules +* [ignite chain serve](#ignite-chain-serve) - Start a blockchain node in development +* [ignite chain simulate](#ignite-chain-simulate) - Run simulation testing for the blockchain + + +## ignite chain build + +Build a node binary + +**Synopsis** + + +The build command compiles the source code of the project into a binary and +installs the binary in the $(go env GOPATH)/bin directory. + +You can customize the output directory for the binary using a flag: + + ignite chain build --output dist + +To compile the binary Ignite first compiles protocol buffer (proto) files into +Go source code. Proto files contain required type and services definitions. If +you're using another program to compile proto files, you can use a flag to tell +Ignite to skip the proto compilation step: + + ignite chain build --skip-proto + +Afterwards, Ignite install dependencies specified in the go.mod file. By default +Ignite doesn't check that dependencies of the main module stored in the module +cache have not been modified since they were downloaded. To enforce dependency +checking (essentially, running "go mod verify") use a flag: + + ignite chain build --check-dependencies + +Next, Ignite identifies the "main" package of the project. By default the "main" +package is located in "cmd/{app}d" directory, where "{app}" is the name of the +scaffolded project and "d" stands for daemon. If your project contains more +than one "main" package, specify the path to the one that Ignite should compile +in config.yml: + + build: + main: custom/path/to/main + +By default the binary name will match the top-level module name (specified in +go.mod) with a suffix "d". This can be customized in config.yml: + + build: + binary: mychaind + +You can also specify custom linker flags: + + build: + ldflags: + - "-X main.Version=development" + - "-X main.Date=01/05/2022T19:54" + +To build binaries for a release, use the --release flag. The binaries for one or +more specified release targets are built in a "release/" directory in the +project's source directory. Specify the release targets with GOOS:GOARCH build +tags. If the optional --release.targets is not specified, a binary is created +for your current environment. + + ignite chain build --release -t linux:amd64 -t darwin:amd64 -t darwin:arm64 + + +``` +ignite chain build [flags] +``` + +**Options** + +``` + --build.tags strings parameters to build the chain binary + --check-dependencies verify that cached dependencies have not been modified since they were downloaded + --clear-cache clear the build cache (advanced) + --debug build a debug binary + -h, --help help for build + -o, --output string binary output path + -p, --path string path of the app (default ".") + --release build for a release + --release.prefix string tarball prefix for each release target. Available only with --release flag + -t, --release.targets strings release targets. Available only with --release flag + --skip-proto skip file generation from proto + -v, --verbose verbose output +``` + +**Options inherited from parent commands** + +``` + -c, --config string path to Ignite config file (default: ./config.yml) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite chain](#ignite-chain) - Build, init and start a blockchain node + + +## ignite chain debug + +Launch a debugger for a blockchain app + +**Synopsis** + +The debug command starts a debug server and launches a debugger. + +Ignite uses the Delve debugger by default. Delve enables you to interact with +your program by controlling the execution of the process, evaluating variables, +and providing information of thread / goroutine state, CPU register state and +more. + +A debug server can optionally be started in cases where default terminal client +is not desirable. When the server starts it first runs the blockchain app, +attaches to it and finally waits for a client connection. It accepts both +JSON-RPC or DAP client connections. + +To start a debug server use the following flag: + + ignite chain debug --server + +To start a debug server with a custom address use the following flags: + + ignite chain debug --server --server-address 127.0.0.1:30500 + +The debug server stops automatically when the client connection is closed. + + +``` +ignite chain debug [flags] +``` + +**Options** + +``` + -h, --help help for debug + -p, --path string path of the app (default ".") + --server start a debug server + --server-address string debug server address (default "127.0.0.1:30500") +``` + +**Options inherited from parent commands** + +``` + -c, --config string path to Ignite config file (default: ./config.yml) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite chain](#ignite-chain) - Build, init and start a blockchain node + + +## ignite chain faucet + +Send coins to an account + +``` +ignite chain faucet [address] [coin<,...>] [flags] +``` + +**Options** + +``` + -h, --help help for faucet + --home string directory where the blockchain node is initialized + -p, --path string path of the app (default ".") + -v, --verbose verbose output +``` + +**Options inherited from parent commands** + +``` + -c, --config string path to Ignite config file (default: ./config.yml) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite chain](#ignite-chain) - Build, init and start a blockchain node + + +## ignite chain init + +Initialize your chain + +**Synopsis** + +The init command compiles and installs the binary (like "ignite chain build") +and uses that binary to initialize the blockchain's data directory for one +validator. To learn how the build process works, refer to "ignite chain build +--help". + +By default, the data directory will be initialized in $HOME/.mychain, where +"mychain" is the name of the project. To set a custom data directory use the +--home flag or set the value in config.yml: + + validators: + - name: alice + bonded: '100000000stake' + home: "~/.customdir" + +The data directory contains three files in the "config" directory: app.toml, +config.toml, client.toml. These files let you customize the behavior of your +blockchain node and the client executable. When a chain is re-initialized the +data directory can be reset. To make some values in these files persistent, set +them in config.yml: + + validators: + - name: alice + bonded: '100000000stake' + app: + minimum-gas-prices: "0.025stake" + config: + consensus: + timeout_commit: "5s" + timeout_propose: "5s" + client: + output: "json" + +The configuration above changes the minimum gas price of the validator (by +default the gas price is set to 0 to allow "free" transactions), sets the block +time to 5s, and changes the output format to JSON. To see what kind of values +this configuration accepts see the generated TOML files in the data directory. + +As part of the initialization process Ignite creates on-chain accounts with +token balances. By default, config.yml has two accounts in the top-level +"accounts" property. You can add more accounts and change their token balances. +Refer to config.yml guide to see which values you can set. + +One of these accounts is a validator account and the amount of self-delegated +tokens can be set in the top-level "validator" property. + +One of the most important components of an initialized chain is the genesis +file, the 0th block of the chain. The genesis file is stored in the data +directory "config" subdirectory and contains the initial state of the chain, +including consensus and module parameters. You can customize the values of the +genesis in config.yml: + + genesis: + app_state: + staking: + params: + bond_denom: "foo" + +The example above changes the staking token to "foo". If you change the staking +denom, make sure the validator account has the right tokens. + +The init command is meant to be used ONLY FOR DEVELOPMENT PURPOSES. Under the +hood it runs commands like "appd init", "appd add-genesis-account", "appd +gentx", and "appd collect-gentx". For production, you may want to run these +commands manually to ensure a production-level node initialization. + + +``` +ignite chain init [flags] +``` + +**Options** + +``` + --build.tags strings parameters to build the chain binary + --check-dependencies verify that cached dependencies have not been modified since they were downloaded + --clear-cache clear the build cache (advanced) + --debug build a debug binary + -h, --help help for init + --home string directory where the blockchain node is initialized + -p, --path string path of the app (default ".") + --skip-proto skip file generation from proto + -v, --verbose verbose output +``` + +**Options inherited from parent commands** + +``` + -c, --config string path to Ignite config file (default: ./config.yml) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite chain](#ignite-chain) - Build, init and start a blockchain node + + +## ignite chain lint + +Lint codebase using golangci-lint + +**Synopsis** + +The lint command runs the golangci-lint tool to lint the codebase. + +``` +ignite chain lint [flags] +``` + +**Options** + +``` + -h, --help help for lint +``` + +**Options inherited from parent commands** + +``` + -c, --config string path to Ignite config file (default: ./config.yml) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite chain](#ignite-chain) - Build, init and start a blockchain node + + +## ignite chain modules + +Manage modules + +**Synopsis** + +The modules command allows you to manage modules in the codebase. + +**Options** + +``` + -h, --help help for modules +``` + +**Options inherited from parent commands** + +``` + -c, --config string path to Ignite config file (default: ./config.yml) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite chain](#ignite-chain) - Build, init and start a blockchain node +* [ignite chain modules list](#ignite-chain-modules-list) - List all Cosmos SDK modules in the app + + +## ignite chain modules list + +List all Cosmos SDK modules in the app + +**Synopsis** + +The list command lists all modules in the app. + +``` +ignite chain modules list [flags] +``` + +**Options** + +``` + -h, --help help for list +``` + +**Options inherited from parent commands** + +``` + -c, --config string path to Ignite config file (default: ./config.yml) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite chain modules](#ignite-chain-modules) - Manage modules + + +## ignite chain serve + +Start a blockchain node in development + +**Synopsis** + +The serve command compiles and installs the binary (like "ignite chain build"), +uses that binary to initialize the blockchain's data directory for one validator +(like "ignite chain init"), and starts the node locally for development purposes +with automatic code reloading. + +Automatic code reloading means Ignite starts watching the project directory. +Whenever a file change is detected, Ignite automatically rebuilds, reinitializes +and restarts the node. + +Whenever possible Ignite will try to keep the current state of the chain by +exporting and importing the genesis file. + +To force Ignite to start from a clean slate even if a genesis file exists, use +the following flag: + + ignite chain serve --reset-once + +To force Ignite to reset the state every time the source code is modified, use +the following flag: + + ignite chain serve --force-reset + +With Ignite it's possible to start more than one blockchain from the same source +code using different config files. This is handy if you're building +inter-blockchain functionality and, for example, want to try sending packets +from one blockchain to another. To start a node using a specific config file: + + ignite chain serve --config mars.yml + +The serve command is meant to be used ONLY FOR DEVELOPMENT PURPOSES. Under the +hood, it runs "appd start", where "appd" is the name of your chain's binary. For +production, you may want to run "appd start" manually. + + +``` +ignite chain serve [flags] +``` + +**Options** + +``` + --build.tags strings parameters to build the chain binary + --check-dependencies verify that cached dependencies have not been modified since they were downloaded + --clear-cache clear the build cache (advanced) + -f, --force-reset force reset of the app state on start and every source change + --generate-clients generate code for the configured clients on reset or source code change + -h, --help help for serve + --home string directory where the blockchain node is initialized + -o, --output-file string output file logging the chain output (no UI, no stdin, listens for SIGTERM, implies --yes) (default: stdout) + -p, --path string path of the app (default ".") + --quit-on-fail quit program if the app fails to start + -r, --reset-once reset the app state once on init + --skip-build skip initial build of the app (uses local binary) + --skip-proto skip file generation from proto + -v, --verbose verbose output +``` + +**Options inherited from parent commands** + +``` + -c, --config string path to Ignite config file (default: ./config.yml) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite chain](#ignite-chain) - Build, init and start a blockchain node + + +## ignite chain simulate + +Run simulation testing for the blockchain + +**Synopsis** + +Run simulation testing for the blockchain. It sends many randomized-input messages of each module to a simulated node. + +``` +ignite chain simulate [flags] +``` + +**Options** + +``` + --blockSize int operations per block (default 30) + --exportParamsHeight int height to which export the randomly generated params + --exportParamsPath string custom file path to save the exported params JSON + --exportStatePath string custom file path to save the exported app state JSON + --exportStatsPath string custom file path to save the exported simulation statistics JSON + --genesis string custom simulation genesis file; cannot be used with params file + --genesisTime int override genesis UNIX time instead of using a random UNIX time + -h, --help help for simulate + --initialBlockHeight int initial block to start the simulation (default 1) + --lean lean simulation log output + --numBlocks int number of new blocks to simulate from the initial block height (default 200) + --params string custom simulation params file which overrides any random params; cannot be used with genesis + --seed int simulation random seed (default 42) + --simName string name of the simulation to run (default "TestFullAppSimulation") +``` + +**Options inherited from parent commands** + +``` + -c, --config string path to Ignite config file (default: ./config.yml) + -y, --yes answers interactive yes/no questions with yes +``` + +**SEE ALSO** + +* [ignite chain](#ignite-chain) - Build, init and start a blockchain node + + +## ignite completion + +Generates shell completion script. + +``` +ignite completion [bash|zsh|fish|powershell] [flags] +``` + +**Options** + +``` + -h, --help help for completion +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain + + +## ignite docs + +Show Ignite CLI docs + +``` +ignite docs [flags] +``` + +**Options** + +``` + -h, --help help for docs +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain + + +## ignite generate + +Generate clients, API docs from source code + +**Synopsis** + +Generate clients, API docs from source code. + +Such as compiling protocol buffer files into Go or implement particular +functionality, for example, generating an OpenAPI spec. + +Produced source code can be regenerated by running a command again and is not +meant to be edited by hand. + + +**Options** + +``` + --clear-cache clear the build cache (advanced) + --enable-proto-vendor enable proto package vendor for missing Buf dependencies + -h, --help help for generate + -p, --path string path of the app (default ".") + -v, --verbose verbose output +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain +* [ignite generate composables](#ignite-generate-composables) - TypeScript frontend client and Vue 3 composables +* [ignite generate openapi](#ignite-generate-openapi) - OpenAPI spec for your chain +* [ignite generate proto-go](#ignite-generate-proto-go) - Compile protocol buffer files to Go source code required by Cosmos SDK +* [ignite generate ts-client](#ignite-generate-ts-client) - TypeScript frontend client + + +## ignite generate composables + +TypeScript frontend client and Vue 3 composables + +``` +ignite generate composables [flags] +``` + +**Options** + +``` + -h, --help help for composables + -o, --output string Vue 3 composables output path + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + --clear-cache clear the build cache (advanced) + --enable-proto-vendor enable proto package vendor for missing Buf dependencies + -p, --path string path of the app (default ".") + -v, --verbose verbose output +``` + +**SEE ALSO** + +* [ignite generate](#ignite-generate) - Generate clients, API docs from source code + + +## ignite generate openapi + +OpenAPI spec for your chain + +``` +ignite generate openapi [flags] +``` + +**Options** + +``` + --exclude strings List of proto files or directories to exclude from the OpenAPI spec generation + -h, --help help for openapi + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + --clear-cache clear the build cache (advanced) + --enable-proto-vendor enable proto package vendor for missing Buf dependencies + -p, --path string path of the app (default ".") + -v, --verbose verbose output +``` + +**SEE ALSO** + +* [ignite generate](#ignite-generate) - Generate clients, API docs from source code + + +## ignite generate proto-go + +Compile protocol buffer files to Go source code required by Cosmos SDK + +``` +ignite generate proto-go [flags] +``` + +**Options** + +``` + -h, --help help for proto-go + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + --clear-cache clear the build cache (advanced) + --enable-proto-vendor enable proto package vendor for missing Buf dependencies + -p, --path string path of the app (default ".") + -v, --verbose verbose output +``` + +**SEE ALSO** + +* [ignite generate](#ignite-generate) - Generate clients, API docs from source code + + +## ignite generate ts-client + +TypeScript frontend client + +**Synopsis** + +Generate a framework agnostic TypeScript client for your blockchain project. + +By default the TypeScript client is generated in the "ts-client/" directory. You +can customize the output directory in config.yml: + + client: + typescript: + path: new-path + +Output can also be customized by using a flag: + + ignite generate ts-client --output new-path + +TypeScript client code can be automatically regenerated on reset or source code +changes when the blockchain is started with a flag: + + ignite chain serve --generate-clients + + +``` +ignite generate ts-client [flags] +``` + +**Options** + +``` + --disable-cache disable build cache + -h, --help help for ts-client + -o, --output string TypeScript client output path + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + --clear-cache clear the build cache (advanced) + --enable-proto-vendor enable proto package vendor for missing Buf dependencies + -p, --path string path of the app (default ".") + -v, --verbose verbose output +``` + +**SEE ALSO** + +* [ignite generate](#ignite-generate) - Generate clients, API docs from source code + + +## ignite relayer + +Connect blockchains with an IBC relayer + +``` +ignite relayer [flags] +``` + +**Options** + +``` + -h, --help help for relayer +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain + + +## ignite scaffold + +Create a new blockchain, module, message, query, and more + +**Synopsis** + +Scaffolding is a quick way to generate code for major pieces of your +application. + +For details on each scaffolding target (chain, module, message, etc.) run the +corresponding command with a "--help" flag, for example, "ignite scaffold chain +--help". + +The Ignite team strongly recommends committing the code to a version control +system before running scaffolding commands. This will make it easier to see the +changes to the source code as well as undo the command if you've decided to roll +back the changes. + +This blockchain you create with the chain scaffolding command uses the modular +Cosmos SDK framework and imports many standard modules for functionality like +proof of stake, token transfer, inter-blockchain connectivity, governance, and +more. Custom functionality is implemented in modules located by convention in +the "x/" directory. By default, your blockchain comes with an empty custom +module. Use the module scaffolding command to create an additional module. + +An empty custom module doesn't do much, it's basically a container for logic +that is responsible for processing transactions and changing the application +state. Cosmos SDK blockchains work by processing user-submitted signed +transactions, which contain one or more messages. A message contains data that +describes a state transition. A module can be responsible for handling any +number of messages. + +A message scaffolding command will generate the code for handling a new type of +Cosmos SDK message. Message fields describe the state transition that the +message is intended to produce if processed without errors. + +Scaffolding messages is useful to create individual "actions" that your module +can perform. Sometimes, however, you want your blockchain to have the +functionality to create, read, update and delete (CRUD) instances of a +particular type. Depending on how you want to store the data there are three +commands that scaffold CRUD functionality for a type: list, map, and single. +These commands create four messages (one for each CRUD action), and the logic to +add, delete, and fetch the data from the store. If you want to scaffold only the +logic, for example, you've decided to scaffold messages separately, you can do +that as well with the "--no-message" flag. + +Reading data from a blockchain happens with a help of queries. Similar to how +you can scaffold messages to write data, you can scaffold queries to read the +data back from your blockchain application. + +You can also scaffold a type, which just produces a new protocol buffer file +with a proto message description. Note that proto messages produce (and +correspond with) Go types whereas Cosmos SDK messages correspond to proto "rpc" +in the "Msg" service. + +If you're building an application with custom IBC logic, you might need to +scaffold IBC packets. An IBC packet represents the data sent from one blockchain +to another. You can only scaffold IBC packets in IBC-enabled modules scaffolded +with an "--ibc" flag. Note that the default module is not IBC-enabled. + + +**Options** + +``` + -h, --help help for scaffold + -v, --verbose verbose output +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain +* [ignite scaffold chain](#ignite-scaffold-chain) - New Cosmos SDK blockchain +* [ignite scaffold chain-registry](#ignite-scaffold-chain-registry) - Configs for the chain registry +* [ignite scaffold configs](#ignite-scaffold-configs) - Configs for a custom Cosmos SDK module +* [ignite scaffold list](#ignite-scaffold-list) - CRUD for data stored as an array +* [ignite scaffold map](#ignite-scaffold-map) - CRUD for data stored as key-value pairs +* [ignite scaffold message](#ignite-scaffold-message) - Message to perform state transition on the blockchain +* [ignite scaffold module](#ignite-scaffold-module) - Custom Cosmos SDK module +* [ignite scaffold packet](#ignite-scaffold-packet) - Message for sending an IBC packet +* [ignite scaffold params](#ignite-scaffold-params) - Parameters for a custom Cosmos SDK module +* [ignite scaffold query](#ignite-scaffold-query) - Query for fetching data from a blockchain +* [ignite scaffold single](#ignite-scaffold-single) - CRUD for data stored in a single location +* [ignite scaffold type](#ignite-scaffold-type) - Type definition +* [ignite scaffold type-list](#ignite-scaffold-type-list) - List scaffold types +* [ignite scaffold vue](#ignite-scaffold-vue) - Vue 3 web app template + + +## ignite scaffold chain + +New Cosmos SDK blockchain + +**Synopsis** + +Create a new application-specific Cosmos SDK blockchain. + +For example, the following command will create a blockchain called "hello" in +the "hello/" directory: + + ignite scaffold chain hello + +A project name can be a simple name or a URL. The name will be used as the Go +module path for the project. Examples of project names: + + ignite scaffold chain foo + ignite scaffold chain foo/bar + ignite scaffold chain example.org/foo + ignite scaffold chain github.com/username/foo + +A new directory with source code files will be created in the current directory. +To use a different path use the "--path" flag. + +Most of the logic of your blockchain is written in custom modules. Each module +effectively encapsulates an independent piece of functionality. Following the +Cosmos SDK convention, custom modules are stored inside the "x/" directory. By +default, Ignite creates a module with a name that matches the name of the +project. To create a blockchain without a default module use the "--no-module" +flag. Additional modules can be added after a project is created with "ignite +scaffold module" command. + +Account addresses on Cosmos SDK-based blockchains have string prefixes. For +example, the Cosmos Hub blockchain uses the default "cosmos" prefix, so that +addresses look like this: "cosmos12fjzdtqfrrve7zyg9sv8j25azw2ua6tvu07ypf". To +use a custom address prefix use the "--address-prefix" flag. For example: + + ignite scaffold chain foo --address-prefix bar + +By default when compiling a blockchain's source code Ignite creates a cache to +speed up the build process. To clear the cache when building a blockchain use +the "--clear-cache" flag. It is very unlikely you will ever need to use this +flag. + +The blockchain is using the Cosmos SDK modular blockchain framework. Learn more +about Cosmos SDK on https://docs.cosmos.network + + +``` +ignite scaffold chain [name] [flags] +``` + +**Options** + +``` + --address-prefix string account address prefix (default "cosmos") + --clear-cache clear the build cache (advanced) + --coin-type uint32 coin type to use for the account (default 118) + --default-denom string default staking denom (default "stake") + -h, --help help for chain + --minimal create a minimal blockchain (with the minimum required Cosmos SDK modules) + --module-configs strings add module configs + --no-module create a project without a default module + --params strings add default module parameters + -p, --path string create a project in a specific path + --proto-dir string chain proto directory (default "proto") + --skip-git skip Git repository initialization + --skip-proto skip proto generation +``` + +**Options inherited from parent commands** + +``` + -v, --verbose verbose output +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold chain-registry + +Configs for the chain registry + +**Synopsis** + +Scaffold the chain registry chain.json and assets.json files. + +The chain registry is a GitHub repo, hosted at https://github.com/cosmos/chain-registry, that +contains the chain.json and assets.json files of most of chains in the Cosmos ecosystem. +It is good practices, when creating a new chain, and about to launch a testnet or mainnet, to +publish the chain's metadata in the chain registry. + +Read more about the chain.json at https://github.com/cosmos/chain-registry?tab=readme-ov-file#chainjson +Read more about the assets.json at https://github.com/cosmos/chain-registry?tab=readme-ov-file#assetlists + +``` +ignite scaffold chain-registry [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -h, --help help for chain-registry + -p, --path string path of the app (default ".") + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + -v, --verbose verbose output +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold configs + +Configs for a custom Cosmos SDK module + +**Synopsis** + +Scaffold a new config for a Cosmos SDK module. + +A Cosmos SDK module can have configurations. An example of a config is "address prefix" of the +"auth" module. A config can be scaffolded into a module using the "--module-configs" into +the scaffold module command or using the "scaffold configs" command. By default +configs are of type "string", but you can specify a type for each config. For example: + + ignite scaffold configs foo baz:uint bar:bool + +Refer to Cosmos SDK documentation to learn more about modules, dependencies and +configs. + + +``` +ignite scaffold configs [configs]... [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -h, --help help for configs + --module string module to add the query into (default: app's main module) + -p, --path string path of the app (default ".") + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + -v, --verbose verbose output +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold list + +CRUD for data stored as an array + +**Synopsis** + +The "list" scaffolding command is used to generate files that implement the +logic for storing and interacting with data stored as a list in the blockchain +state. + +The command accepts a NAME argument that will be used as the name of a new type +of data. It also accepts a list of FIELDs that describe the type. + +The interaction with the data follows the create, read, updated, and delete +(CRUD) pattern. For each type three Cosmos SDK messages are defined for writing +data to the blockchain: MsgCreate{Name}, MsgUpdate{Name}, MsgDelete{Name}. For +reading data two queries are defined: {Name} and {Name}All. The type, messages, +and queries are defined in the "proto/" directory as protocol buffer messages. +Messages and queries are mounted in the "Msg" and "Query" services respectively. + +When messages are handled, the appropriate keeper methods are called. By +convention, the methods are defined in +"x/{moduleName}/keeper/msg_server_{name}.go". Helpful methods for getting, +setting, removing, and appending are defined in the same "keeper" package in +"{name}.go". + +The "list" command essentially allows you to define a new type of data and +provides the logic to create, read, update, and delete instances of the type. +For example, let's review a command that generates the code to handle a list of +posts and each post has "title" and "body" fields: + + ignite scaffold list post title body + +This provides you with a "Post" type, MsgCreatePost, MsgUpdatePost, +MsgDeletePost and two queries: Post and PostAll. The compiled CLI, let's say the +binary is "blogd" and the module is "blog", has commands to query the chain (see +"blogd q blog") and broadcast transactions with the messages above (see "blogd +tx blog"). + +The code generated with the list command is meant to be edited and tailored to +your application needs. Consider the code to be a "skeleton" for the actual +business logic you will implement next. + +By default, all fields are assumed to be strings. If you want a field of a +different type, you can specify it after a colon ":". The following types are +supported: string, bool, int, uint, coin, array.string, array.int, array.uint, +array.coin. An example of using field types: + + ignite scaffold list pool amount:coin tags:array.string height:int + +For detailed type information use ignite scaffold type --help + +"Index" indicates whether the type can be used as an index in +"ignite scaffold map". + +Ignite also supports custom types: + + ignite scaffold list product-details name desc + ignite scaffold list product price:coin details:ProductDetails + +In the example above the "ProductDetails" type was defined first, and then used +as a custom type for the "details" field. + +Your chain will accept custom types in JSON-notation: + + exampled tx example create-product 100coin '{"name": "x", "desc": "y"}' --from alice + +By default the code will be scaffolded in the module that matches your project's +name. If you have several modules in your project, you might want to specify a +different module: + + ignite scaffold list post title body --module blog + +By default, each message comes with a "creator" field that represents the +address of the transaction signer. You can customize the name of this field with +a flag: + + ignite scaffold list post title body --signer author + +It's possible to scaffold just the getter/setter logic without the CRUD +messages. This is useful when you want the methods to handle a type, but would +like to scaffold messages manually. Use a flag to skip message scaffolding: + + ignite scaffold list post title body --no-message + +The "creator" field is not generated if a list is scaffolded with the +"--no-message" flag. + + +``` +ignite scaffold list NAME [field]... [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -h, --help help for list + --module string specify which module to generate code in + --no-message skip generating message handling logic + --no-simulation skip simulation logic + -p, --path string path of the app (default ".") + --signer string label for the message signer (default: creator) + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + -v, --verbose verbose output +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold map + +CRUD for data stored as key-value pairs + +**Synopsis** + +The "map" scaffolding command is used to generate files that implement the logic +for storing and interacting with data stored as key-value pairs (or a +dictionary) in the blockchain state. + +The "map" command is very similar to "ignite scaffold list" with the main +difference in how values are indexed. With "list" values are indexed by an +incrementing integer, whereas "map" values are indexed by a user-provided value +(or multiple values). + +Let's use the same blog post example: + + ignite scaffold map post title body:string + +This command scaffolds a "Post" type and CRUD functionality to create, read, +updated, and delete posts. However, when creating a new post with your chain's +binary (or by submitting a transaction through the chain's API) you will be +required to provide an "index": + + blogd tx blog create-post [index] [title] [body] + blogd tx blog create-post hello "My first post" "This is the body" + +This command will create a post and store it in the blockchain's state under the +"hello" index. You will be able to fetch back the value of the post by querying +for the "hello" key. + + blogd q blog show-post hello + +By default, the index is called "index", to customize the index, use the "--index" flag. + +Since the behavior of "list" and "map" scaffolding is very similar, you can use +the "--no-message", "--module", "--signer" flags as well as the colon syntax for +custom types. + +For detailed type information use ignite scaffold type --help + + +``` +ignite scaffold map NAME [field]... [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -h, --help help for map + --index string field that index the value (default "index") + --module string specify which module to generate code in + --no-message skip generating message handling logic + --no-simulation skip simulation logic + -p, --path string path of the app (default ".") + --signer string label for the message signer (default: creator) + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + -v, --verbose verbose output +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold message + +Message to perform state transition on the blockchain + +**Synopsis** + +Message scaffolding is useful for quickly adding functionality to your +blockchain to handle specific Cosmos SDK messages. + +Messages are objects whose end goal is to trigger state transitions on the +blockchain. A message is a container for fields of data that affect how the +blockchain's state will change. You can think of messages as "actions" that a +user can perform. + +For example, the bank module has a "Send" message for token transfers between +accounts. The send message has three fields: from address (sender), to address +(recipient), and a token amount. When this message is successfully processed, +the token amount will be deducted from the sender's account and added to the +recipient's account. + +Ignite's message scaffolding lets you create new types of messages and add them +to your chain. For example: + + ignite scaffold message add-pool amount:coins denom active:bool --module dex + +The command above will create a new message MsgAddPool with three fields: amount +(in tokens), denom (a string), and active (a boolean). The message will be added +to the "dex" module. + +For detailed type information use ignite scaffold type --help + +By default, the message is defined as a proto message in the +"proto/{app}/{module}/tx.proto" and registered in the "Msg" service. A CLI command to +create and broadcast a transaction with MsgAddPool is created in the module's +"cli" package. Additionally, Ignite scaffolds a message constructor and the code +to satisfy the sdk.Msg interface and register the message in the module. + +Most importantly in the "keeper" package Ignite scaffolds an "AddPool" function. +Inside this function, you can implement message handling logic. + +When successfully processed a message can return data. Use the —response flag to +specify response fields and their types. For example + + ignite scaffold message create-post title body --response id:int,title + +The command above will scaffold MsgCreatePost which returns both an ID (an +integer) and a title (a string). + +Message scaffolding follows the rules as "ignite scaffold list/map/single" and +supports fields with standard and custom types. See "ignite scaffold list —help" +for details. + + +``` +ignite scaffold message [name] [field1:type1] [field2:type2] ... [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -d, --desc string description of the command + -h, --help help for message + --module string module to add the message into. Default: app's main module + --no-simulation disable CRUD simulation scaffolding + -p, --path string path of the app (default ".") + -r, --response strings response fields + --signer string label for the message signer (default: creator) + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + -v, --verbose verbose output +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold module + +Custom Cosmos SDK module + +**Synopsis** + +Scaffold a new Cosmos SDK module. + +Cosmos SDK is a modular framework and each independent piece of functionality is +implemented in a separate module. By default your blockchain imports a set of +standard Cosmos SDK modules. To implement custom functionality of your +blockchain, scaffold a module and implement the logic of your application. + +This command does the following: + +* Creates a directory with module's protocol buffer files in "proto/" +* Creates a directory with module's boilerplate Go code in "x/" +* Imports the newly created module by modifying "app/app.go" + +This command will proceed with module scaffolding even if "app/app.go" doesn't +have the required default placeholders. If the placeholders are missing, you +will need to modify "app/app.go" manually to import the module. If you want the +command to fail if it can't import the module, use the "--require-registration" +flag. + +To scaffold an IBC-enabled module use the "--ibc" flag. An IBC-enabled module is +like a regular module with the addition of IBC-specific logic and placeholders +to scaffold IBC packets with "ignite scaffold packet". + +A module can depend on one or more other modules and import their keeper +methods. To scaffold a module with a dependency use the "--dep" flag + +For example, your new custom module "foo" might have functionality that requires +sending tokens between accounts. The method for sending tokens is a defined in +the "bank"'s module keeper. You can scaffold a "foo" module with the dependency +on "bank" with the following command: + + ignite scaffold module foo --dep bank + +You can then define which methods you want to import from the "bank" keeper in +"expected_keepers.go". + +You can also scaffold a module with a list of dependencies that can include both +standard and custom modules (provided they exist): + + ignite scaffold module bar --dep foo,mint,account,FeeGrant + +Note: the "--dep" flag doesn't install third-party modules into your +application, it just generates extra code that specifies which existing modules +your new custom module depends on. + +A Cosmos SDK module can have parameters (or "params"). Params are values that +can be set at the genesis of the blockchain and can be modified while the +blockchain is running. An example of a param is "Inflation rate change" of the +"mint" module. A module can be scaffolded with params using the "--params" flag +that accepts a list of param names. By default params are of type "string", but +you can specify a type for each param. For example: + + ignite scaffold module foo --params baz:uint,bar:bool + +Refer to Cosmos SDK documentation to learn more about modules, dependencies and +params. + + +``` +ignite scaffold module [name] [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + --dep strings add a dependency on another module + -h, --help help for module + --ibc add IBC functionality + --module-configs strings add module configs + --ordering string channel ordering of the IBC module [none|ordered|unordered] (default "none") + --params strings add module parameters + -p, --path string path of the app (default ".") + --require-registration fail if module can't be registered + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + -v, --verbose verbose output +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold packet + +Message for sending an IBC packet + +**Synopsis** + +Scaffold an IBC packet in a specific IBC-enabled Cosmos SDK module + +``` +ignite scaffold packet [packetName] [field1] [field2] ... --module [moduleName] [flags] +``` + +**Options** + +``` + --ack strings custom acknowledgment type (field1,field2,...) + --clear-cache clear the build cache (advanced) + -h, --help help for packet + --module string IBC Module to add the packet into + --no-message disable send message scaffolding + -p, --path string path of the app (default ".") + --signer string label for the message signer (default: creator) + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + -v, --verbose verbose output +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold params + +Parameters for a custom Cosmos SDK module + +**Synopsis** + +Scaffold a new parameter for a Cosmos SDK module. + +A Cosmos SDK module can have parameters (or "params"). Params are values that +can be set at the genesis of the blockchain and can be modified while the +blockchain is running. An example of a param is "Inflation rate change" of the +"mint" module. A params can be scaffolded into a module using the "--params" into +the scaffold module command or using the "scaffold params" command. By default +params are of type "string", but you can specify a type for each param. For example: + + ignite scaffold params foo baz:uint bar:bool + +Refer to Cosmos SDK documentation to learn more about modules, dependencies and +params. + + +``` +ignite scaffold params [param]... [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -h, --help help for params + --module string module to add the query into. Default: app's main module + -p, --path string path of the app (default ".") + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + -v, --verbose verbose output +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold query + +Query for fetching data from a blockchain + +**Synopsis** + +Query for fetching data from a blockchain. + +For detailed type information use ignite scaffold type --help. + +``` +ignite scaffold query [name] [field1:type1] [field2:type2] ... [flags] +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -d, --desc string description of the CLI to broadcast a tx with the message + -h, --help help for query + --module string module to add the query into. Default: app's main module + --paginated define if the request can be paginated + -p, --path string path of the app (default ".") + -r, --response strings response fields + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + -v, --verbose verbose output +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold single + +CRUD for data stored in a single location + +**Synopsis** + +CRUD for data stored in a single location. + +For detailed type information use ignite scaffold type --help. + +``` +ignite scaffold single NAME [field:type]... [flags] +``` + +**Examples** + +``` + ignite scaffold single todo-single title:string done:bool +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -h, --help help for single + --module string specify which module to generate code in + --no-message skip generating message handling logic + --no-simulation skip simulation logic + -p, --path string path of the app (default ".") + --signer string label for the message signer (default: creator) + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + -v, --verbose verbose output +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold type + +Type definition + +**Synopsis** + +Type information + +Types Usage +address use '<FIELD_NAME>:address' to scaffold string types (eg: cosmos1abcdefghijklmnopqrstuvwxyz0123456). +array.coin use '<FIELD_NAME>:array.coin' to scaffold sdk.Coins types (eg: 20stake). Disclaimer: Only one `coins` or `dec.coins` field can accept multiple CLI values per command due to AutoCLI limitations. +array.dec.coin use '<FIELD_NAME>:array.dec.coin' to scaffold sdk.DecCoins types (eg: 20000002stake). Disclaimer: Only one `coins` or `dec.coins` field can accept multiple CLI values per command due to AutoCLI limitations. +array.int use '<FIELD_NAME>:array.int' to scaffold []int64 types (eg: 5,4,3,2,1). +array.string use '<FIELD_NAME>:array.string' to scaffold []string types (eg: abc,xyz). +array.uint use '<FIELD_NAME>:array.uint' to scaffold []uint64 types (eg: 13,26,31,40). +bool use '<FIELD_NAME>:bool' to scaffold bool types (eg: true). +bytes use '<FIELD_NAME>:bytes' to scaffold []byte types (eg: 3,2,3,5). +coin use '<FIELD_NAME>:coin' to scaffold sdk.Coin types (eg: 10token). +coins use '<FIELD_NAME>:array.coin' to scaffold sdk.Coins types (eg: 20stake). Disclaimer: Only one `coins` or `dec.coins` field can accept multiple CLI values per command due to AutoCLI limitations. +custom use the custom type to scaffold already created chain types. +dec.coin use '<FIELD_NAME>:dec.coin' to scaffold sdk.DecCoin types (eg: 100001token). +dec.coins use '<FIELD_NAME>:array.dec.coin' to scaffold sdk.DecCoins types (eg: 20000002stake). Disclaimer: Only one `coins` or `dec.coins` field can accept multiple CLI values per command due to AutoCLI limitations. +int use '<FIELD_NAME>:int' to scaffold int64 types (eg: 111). +int64 use '<FIELD_NAME>:int' to scaffold int64 types (eg: 111). +ints use '<FIELD_NAME>:array.int' to scaffold []int64 types (eg: 5,4,3,2,1). +string use '<FIELD_NAME>:string' to scaffold string types (eg: xyz). +strings use '<FIELD_NAME>:array.string' to scaffold []string types (eg: abc,xyz). +uint use '<FIELD_NAME>:uint' to scaffold uint64 types (eg: 111). +uint64 use '<FIELD_NAME>:uint' to scaffold uint64 types (eg: 111). +uints use '<FIELD_NAME>:array.uint' to scaffold []uint64 types (eg: 13,26,31,40). + +Field Usage: + - fieldName + - fieldName:fieldType + +If no :fieldType, default (string) is used + + + +``` +ignite scaffold type NAME [field:type] ... [flags] +``` + +**Examples** + +``` + ignite scaffold type todo-item priority:int desc:string tags:array.string done:bool +``` + +**Options** + +``` + --clear-cache clear the build cache (advanced) + -h, --help help for type + --module string specify which module to generate code in + --no-message skip generating message handling logic + --no-simulation skip simulation logic + -p, --path string path of the app (default ".") + --signer string label for the message signer (default: creator) + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + -v, --verbose verbose output +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold type-list + +List scaffold types + +**Synopsis** + +List all available scaffold types + +``` +ignite scaffold type-list [flags] +``` + +**Options** + +``` + -h, --help help for type-list +``` + +**Options inherited from parent commands** + +``` + -v, --verbose verbose output +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite scaffold vue + +Vue 3 web app template + +``` +ignite scaffold vue [flags] +``` + +**Options** + +``` + -h, --help help for vue + -y, --yes answers interactive yes/no questions with yes +``` + +**Options inherited from parent commands** + +``` + -v, --verbose verbose output +``` + +**SEE ALSO** + +* [ignite scaffold](#ignite-scaffold) - Create a new blockchain, module, message, query, and more + + +## ignite testnet + +Simulate and manage test networks + +**Synopsis** + +Comprehensive toolset for managing and simulating blockchain test networks. It allows users to either run a test network in place using mainnet data or set up a multi-node environment for more complex testing scenarios. Additionally, it includes a subcommand for simulating the chain, which is useful for fuzz testing and other testing-related tasks. + +**Options** + +``` + -h, --help help for testnet +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain +* [ignite testnet in-place](#ignite-testnet-in-place) - Create and start a testnet from current local net state +* [ignite testnet multi-node](#ignite-testnet-multi-node) - Initialize and provide multi-node on/off functionality +* [ignite testnet simulate](#ignite-testnet-simulate) - Run simulation testing for the blockchain + + +## ignite testnet in-place + +Create and start a testnet from current local net state + +**Synopsis** + +Testnet in-place command is used to create and start a testnet from current local net state(including mainnet). +After using this command in the repo containing the config.yml file, the network will start. +We can create a testnet from the local network state and mint additional coins for the desired accounts from the config.yml file. + +``` +ignite testnet in-place [flags] +``` + +**Options** + +``` + --address-prefix string account address prefix (default "cosmos") + --check-dependencies verify that cached dependencies have not been modified since they were downloaded + --clear-cache clear the build cache (advanced) + --coin-type uint32 coin type to use for the account (default 118) + -h, --help help for in-place + --home string directory where the blockchain node is initialized + -p, --path string path of the app (default ".") + --skip-proto skip file generation from proto + -v, --verbose verbose output +``` + +**SEE ALSO** + +* [ignite testnet](#ignite-testnet) - Simulate and manage test networks + + +## ignite testnet multi-node + +Initialize and provide multi-node on/off functionality + +**Synopsis** + +Initialize the test network with the number of nodes and bonded from the config.yml file:: + ... + validators: + - name: alice + bonded: 100000000stake + - name: validator1 + bonded: 100000000stake + - name: validator2 + bonded: 200000000stake + - name: validator3 + bonded: 300000000stake + + + The "multi-node" command allows developers to easily set up, initialize, and manage multiple nodes for a + testnet environment. This command provides full flexibility in enabling or disabling each node as desired, + making it a powerful tool for simulating a multi-node blockchain network during development. + + Usage: + ignite testnet multi-node [flags] + + + +``` +ignite testnet multi-node [flags] +``` + +**Options** + +``` + --check-dependencies verify that cached dependencies have not been modified since they were downloaded + --clear-cache clear the build cache (advanced) + -h, --help help for multi-node + --home string directory where the blockchain node is initialized + --node-dir-prefix string prefix of dir node (default "validator") + -p, --path string path of the app (default ".") + -r, --reset-once reset the app state once on init + --skip-proto skip file generation from proto + -v, --verbose verbose output +``` + +**SEE ALSO** + +* [ignite testnet](#ignite-testnet) - Simulate and manage test networks + + +## ignite testnet simulate + +Run simulation testing for the blockchain + +**Synopsis** + +Run simulation testing for the blockchain. It sends many randomized-input messages of each module to a simulated node. + +``` +ignite testnet simulate [flags] +``` + +**Options** + +``` + --blockSize int operations per block (default 30) + --exportParamsHeight int height to which export the randomly generated params + --exportParamsPath string custom file path to save the exported params JSON + --exportStatePath string custom file path to save the exported app state JSON + --exportStatsPath string custom file path to save the exported simulation statistics JSON + --genesis string custom simulation genesis file; cannot be used with params file + --genesisTime int override genesis UNIX time instead of using a random UNIX time + -h, --help help for simulate + --initialBlockHeight int initial block to start the simulation (default 1) + --lean lean simulation log output + --numBlocks int number of new blocks to simulate from the initial block height (default 200) + --params string custom simulation params file which overrides any random params; cannot be used with genesis + --seed int simulation random seed (default 42) + --simName string name of the simulation to run (default "TestFullAppSimulation") +``` + +**SEE ALSO** + +* [ignite testnet](#ignite-testnet) - Simulate and manage test networks + + +## ignite version + +Print the current build information + +``` +ignite version [flags] +``` + +**Options** + +``` + -h, --help help for version +``` + +**SEE ALSO** + +* [ignite](#ignite) - Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain + +# Scaffold Type + +Ignites provides a set of scaffold types that can be used to generate code for your application. +These types are used in the `ignite scaffold` command. + +## Available Scaffold Types + +| Type | Usage | +| --- | --- | +| address | use '<FIELD_NAME>:address' to scaffold string types (eg: cosmos1abcdefghijklmnopqrstuvwxyz0123456). | +| array.coin | use '<FIELD_NAME>:array.coin' to scaffold sdk.Coins types (eg: 20stake). Disclaimer: Only one `coins` or `dec.coins` field can accept multiple CLI values per command due to AutoCLI limitations. | +| array.dec.coin | use '<FIELD_NAME>:array.dec.coin' to scaffold sdk.DecCoins types (eg: 20000002stake). Disclaimer: Only one `coins` or `dec.coins` field can accept multiple CLI values per command due to AutoCLI limitations. | +| array.int | use '<FIELD_NAME>:array.int' to scaffold []int64 types (eg: 5,4,3,2,1). | +| array.string | use '<FIELD_NAME>:array.string' to scaffold []string types (eg: abc,xyz). | +| array.uint | use '<FIELD_NAME>:array.uint' to scaffold []uint64 types (eg: 13,26,31,40). | +| bool | use '<FIELD_NAME>:bool' to scaffold bool types (eg: true). | +| bytes | use '<FIELD_NAME>:bytes' to scaffold []byte types (eg: 3,2,3,5). | +| coin | use '<FIELD_NAME>:coin' to scaffold sdk.Coin types (eg: 10token). | +| coins | use '<FIELD_NAME>:array.coin' to scaffold sdk.Coins types (eg: 20stake). Disclaimer: Only one `coins` or `dec.coins` field can accept multiple CLI values per command due to AutoCLI limitations. | +| custom | use the custom type to scaffold already created chain types. | +| dec.coin | use '<FIELD_NAME>:dec.coin' to scaffold sdk.DecCoin types (eg: 100001token). | +| dec.coins | use '<FIELD_NAME>:array.dec.coin' to scaffold sdk.DecCoins types (eg: 20000002stake). Disclaimer: Only one `coins` or `dec.coins` field can accept multiple CLI values per command due to AutoCLI limitations. | +| int | use '<FIELD_NAME>:int' to scaffold int64 types (eg: 111). | +| int64 | use '<FIELD_NAME>:int' to scaffold int64 types (eg: 111). | +| ints | use '<FIELD_NAME>:array.int' to scaffold []int64 types (eg: 5,4,3,2,1). | +| string | use '<FIELD_NAME>:string' to scaffold string types (eg: xyz). | +| strings | use '<FIELD_NAME>:array.string' to scaffold []string types (eg: abc,xyz). | +| uint | use '<FIELD_NAME>:uint' to scaffold uint64 types (eg: 111). | +| uint64 | use '<FIELD_NAME>:uint' to scaffold uint64 types (eg: 111). | +| uints | use '<FIELD_NAME>:array.uint' to scaffold []uint64 types (eg: 13,26,31,40). | + + +Field Usage: + + - fieldName + - fieldName:fieldType + + +If no :fieldType, default (string) is used diff --git a/docs/versioned_docs/version-v29/03-CLI-Commands/_category_.json b/docs/versioned_docs/version-v29/03-CLI-Commands/_category_.json new file mode 100644 index 0000000..b549261 --- /dev/null +++ b/docs/versioned_docs/version-v29/03-CLI-Commands/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "CLI Commands", + "link": null + } \ No newline at end of file diff --git a/docs/versioned_docs/version-v29/04-clients/01-go-client.md b/docs/versioned_docs/version-v29/04-clients/01-go-client.md new file mode 100644 index 0000000..f592fcb --- /dev/null +++ b/docs/versioned_docs/version-v29/04-clients/01-go-client.md @@ -0,0 +1,298 @@ +--- +description: Blockchain client in Go +title: Go client +--- + +# A client in the Go programming language + +In this tutorial, we will show you how to create a standalone Go program that +serves as a client for a blockchain. We will use the IGNITE® CLI to set up a +standard blockchain. To communicate with the blockchain, we will utilize the +`cosmosclient` package, which provides an easy-to-use interface for interacting +with the blockchain. You will learn how to use the `cosmosclient` package to +send transactions and query the blockchain. By the end of this tutorial, you +will have a good understanding of how to build a client for a blockchain using +Go and the `cosmosclient` package. + +## Create a blockchain + +To create a blockchain using the IGNITE® CLI, use the following command: + +``` +ignite scaffold chain blog +``` + +This will create a new Cosmos SDK blockchain called "blog". + +Once the blockchain has been created, you can generate code for a "blog" model +that will enable you to perform create, read, update, and delete (CRUD) +operations on blog posts. To do this, you can use the following command: + +``` +cd blog +ignite scaffold list post title body +``` + +This will generate the necessary code for the "blog" model, including functions +for creating, reading, updating, and deleting blog posts. With this code in +place, you can now use your blockchain to perform CRUD operations on blog posts. +You can use the generated code to create new blog posts, retrieve existing ones, +update their content, and delete them as needed. This will give you a fully +functional Cosmos SDK blockchain with the ability to manage blog posts. + +Start your blockchain node with the following command: + +``` +ignite chain serve +``` + +## Creating a blockchain client + +Create a new directory called `blogclient` on the same level as `blog` +directory. As the name suggests, `blogclient` will contain a standalone Go +program that acts as a client to your `blog` blockchain. + +```bash +mkdir blogclient +``` + +This command will create a new directory called `blogclient` in your current +location. If you type `ls` in your terminal window, you should see both the +`blog` and `blogclient` directories listed. + +To initialize a new Go package inside the `blogclient` directory, you can use +the following command: + +``` +cd blogclient +go mod init blogclient +``` + +This will create a `go.mod` file in the `blogclient` directory, which contains +information about the package and the Go version being used. + +To import dependencies for your package, you can add the following code to the +`go.mod` file: + +```text title="blogclient/go.mod" +module blogclient + +go 1.24.1 + +require ( + blog v0.0.0-00010101000000-000000000000 + github.com/ignite/cli/v28 v28.8.2 +) + +replace blog => ../blog +``` + +Your package will import two dependencies: + +* `blog`, which contains `types` of messages and a query client +* `ignite` for the `cosmosclient` package + +The `replace` directive uses the package from the local `blog` directory and is +specified as a relative path to the `blogclient` directory. + +Cosmos SDK uses a custom version of the `protobuf` package, so use the `replace` + +Finally, install dependencies for your `blogclient`: + +```bash +go mod tidy +``` + +### Main logic of the client in `main.go` + +Create a `main.go` file inside the `blogclient` directory and add the following +code: + +```go title="blogclient/main.go" +package main + +import ( + "context" + "fmt" + "log" + + // Importing the general purpose Cosmos blockchain client + "github.com/ignite/cli/v29/ignite/pkg/cosmosclient" + + // Importing the types package of your blog blockchain + "blog/x/blog/types" +) + +func main() { + ctx := context.Background() + addressPrefix := "cosmos" + + // Create a Cosmos client instance + client, err := cosmosclient.New(ctx, cosmosclient.WithAddressPrefix(addressPrefix)) + if err != nil { + log.Fatal(err) + } + + // Account `alice` was initialized during `ignite chain serve` + accountName := "alice" + + // Get account from the keyring + account, err := client.Account(accountName) + if err != nil { + log.Fatal(err) + } + + addr, err := account.Address(addressPrefix) + if err != nil { + log.Fatal(err) + } + + // Define a message to create a post + msg := &types.MsgCreatePost{ + Creator: addr, + Title: "Hello!", + Body: "This is the first post", + } + + // Broadcast a transaction from account `alice` with the message + // to create a post store response in txResp + txResp, err := client.BroadcastTx(ctx, account, msg) + if err != nil { + log.Fatal(err) + } + + // Print response from broadcasting a transaction + fmt.Print("MsgCreatePost:\n\n") + fmt.Println(txResp) + + // Instantiate a query client for your `blog` blockchain + queryClient := types.NewQueryClient(client.Context()) + + // Query the blockchain using the client's `PostAll` method + // to get all posts store all posts in queryResp + queryResp, err := queryClient.PostAll(ctx, &types.QueryAllPostRequest{}) + if err != nil { + log.Fatal(err) + } + + // Print response from querying all the posts + fmt.Print("\n\nAll posts:\n\n") + fmt.Println(queryResp) +} +``` + +The code above creates a standalone Go program that acts as a client to the +`blog` blockchain. It begins by importing the required packages, including the +general purpose Cosmos blockchain client and the `types` package of the `blog` +blockchain. + +In the `main` function, the code creates a Cosmos client instance and sets the +address prefix to "cosmos". It then retrieves an account named `"alice"` from +the keyring and gets the address of the account using the address prefix. + +Next, the code defines a message to create a blog post with the title "Hello!" +and body "This is the first post". It then broadcasts a transaction from the +account "alice" with the message to create the post, and stores the response in +the variable `txResp`. + +The code then instantiates a query client for the blog blockchain and uses it to +query the blockchain to retrieve all the posts. It stores the response in the +variable `queryResp` and prints it to the console. + +Finally, the code prints the response from broadcasting the transaction to the +console. This allows the user to see the results of creating and querying a blog +post on the `blog` blockchain using the client. + +To find out more about the `cosmosclient` package, you can refer to the Go +package documentation for +[`cosmosclient`](https://pkg.go.dev/github.com/ignite/cli/ignite/pkg/cosmosclient). +This documentation provides information on how to use the `Client` type with +`Options` and `KeyringBackend`. + +## Run the blockchain and the client + +Make sure your blog blockchain is still running with `ignite chain serve`. + +Run the blockchain client: + +```bash +go run main.go +``` + +If the command is successful, the results of running the command will be printed +to the terminal. The output may include some warnings, which can be ignored. + +```yml +MsgCreatePost: + +code: 0 +codespace: "" +data: 12220A202F626C6F672E626C6F672E4D7367437265617465506F7374526573706F6E7365 +events: +- attributes: + - index: true + key: ZmVl + value: null + - index: true + key: ZmVlX3BheWVy + value: Y29zbW9zMWR6ZW13NzZ3enQ3cDBnajd3MzQyN2E0eHg3MjRkejAzd3hnOGhk + type: tx +- attributes: + - index: true + key: YWNjX3NlcQ== + value: Y29zbW9zMWR6ZW13NzZ3enQ3cDBnajd3MzQyN2E0eHg3MjRkejAzd3hnOGhkLzE= + type: tx +- attributes: + - index: true + key: c2lnbmF0dXJl + value: UWZncUJCUFQvaWxWVzJwNUJNTngzcDlvRzVpSXp0elhXdE9yMHcwVE00OEtlSkRqR0FEdU9VNjJiY1ZRNVkxTHdEbXNuYUlsTmc3VE9uMnJ2ZWRHSlE9PQ== + type: tx +- attributes: + - index: true + key: YWN0aW9u + value: L2Jsb2cuYmxvZy5Nc2dDcmVhdGVQb3N0 + type: message +gas_used: "52085" +gas_wanted: "300000" +height: "20" +info: "" +logs: +- events: + - attributes: + - key: action + value: /blog.blog.MsgCreatePost + type: message + log: "" + msg_index: 0 +raw_log: '[{"msg_index":0,"events":[{"type":"message","attributes":[{"key":"action","value":"/blog.blog.MsgCreatePost"}]}]}]' +timestamp: "" +tx: null +txhash: 4F53B75C18254F96EF159821DDD665E965DBB576A5AC2B94CE863EB62E33156A + +All posts: + +Post:<title:"Hello!" body:"This is the first post" creator:"cosmos1dzemw76wzt7p0gj7w3427a4xx724dz03wxg8hd" > pagination:<total:1 > +``` + +As you can see the client has successfully broadcasted a transaction and queried +the chain for blog posts. + +Please note, that some values in the output on your terminal (like transaction +hash and block height) might be different from the output above. + +You can confirm the new post with using the `blogd q blog list-post` command: + +```yaml +Post: +- body: This is the first post + creator: cosmos1dzemw76wzt7p0gj7w3427a4xx724dz03wxg8hd + id: "0" + title: Hello! +pagination: + next_key: null + total: "0" +``` + +Great job! You have successfully completed the process of creating a Go client +for your Cosmos SDK blockchain, submitting a transaction, and querying the +chain. diff --git a/docs/versioned_docs/version-v29/04-clients/02-typescript.md b/docs/versioned_docs/version-v29/04-clients/02-typescript.md new file mode 100644 index 0000000..7c57d52 --- /dev/null +++ b/docs/versioned_docs/version-v29/04-clients/02-typescript.md @@ -0,0 +1,441 @@ +--- +description: Information about the generated TypeScript client code. +--- + +# TypeScript library + +IGNITE® offers powerful functionality for generating client-side code for your +blockchain. Think of this as a one-click client SDK generation tailored +specifically for your blockchain. + +See `ignite generate ts-client --help` learn more on how to use TypeScript code generation. + +## Starting a node + +Create a new blockchain with `ignite scaffold chain`. You can use an existing +blockchain project if you have one, instead. + +``` +ignite scaffold chain example +``` + +For testing purposes add a new account to `config.yml` with a mnemonic: + +```yml title="config.yml" +accounts: + - name: frank + coins: ["1000token", "100000000stake"] + mnemonic: play butter frown city voyage pupil rabbit wheat thrive mind skate turkey helmet thrive door either differ gate exhibit impose city swallow goat faint +``` + +Run a command to generate TypeScript clients for both standard and custom Cosmos +SDK modules: + +``` +ignite generate ts-client --clear-cache +``` + +:::tip +In order to not rely on the remote `buf.build` service, you can install the +`protoc-gen-ts_proto` binary locally and IGNITE® will use it instead of the remote plugin. + +```sh +npm install -g ts-proto +``` + +Learn more at <https://github.com/stephenh/ts-proto> +::: + +Run a command to start your blockchain node: + +``` +ignite chain serve -r +``` + +## Setting up a TypeScript frontend client + +The best way to get started building with the TypeScript client is by using +[Vite](https://vitejs.dev). Vite provides boilerplate code for +vanilla TS projects as well as React, Vue, Lit, Svelte and Preact frameworks. +You can find additional information at the [Vite Getting Started +guide](https://vitejs.dev/guide). + +You will also need to [polyfill](https://developer.mozilla.org/en-US/docs/Glossary/Polyfill) the client's dependencies. The following is an +example of setting up a vanilla TS project with the necessary polyfills: + +```bash +npm create vite@latest my-frontend-app -- --template vanilla-ts +cd my-frontend-app +npm install --save-dev @esbuild-plugins/node-globals-polyfill @rollup/plugin-node-resolve +``` + +You must then create the necessary `vite.config.ts` file. + +```typescript title="my-frontend-app/vite.config.ts" +import { nodeResolve } from "@rollup/plugin-node-resolve"; +import { NodeGlobalsPolyfillPlugin } from "@esbuild-plugins/node-globals-polyfill"; +import { defineConfig } from "vite"; + +export default defineConfig({ + plugins: [nodeResolve()], + + optimizeDeps: { + esbuildOptions: { + define: { + global: "globalThis", + }, + plugins: [ + NodeGlobalsPolyfillPlugin({ + buffer: true, + }), + ], + }, + }, +}); +``` + +You are then ready to use the generated client code inside this project directly +or by publishing the client and installing it like any other `npm` package. + +After the chain starts, you will see Frank's address is +`cosmos13xkhcx2dquhqdml0k37sr7yndquwteuvt2cml7`. We'll be using Frank's account +for querying data and broadcasting transactions in the next section. + +## Querying + +The code generated in `ts-client` comes with a `package.json` file ready to +publish which you can modify to suit your needs. To use`ts-client` install the +required dependencies: + +``` +cd ts-client +npm install +``` + +The client is based on a modular architecture where you can configure a client +class to support the modules you need and instantiate it. + +By default, the generated client exports a client class that includes all the +Cosmos SDK, custom and 3rd party modules in use in your project. + +To instantiate the client you need to provide environment information (endpoints +and chain prefix). For querying that's all you need: + +```typescript title="my-frontend-app/src/main.ts" +import { Client } from "../../ts-client"; + +const client = new Client( + { + apiURL: "http://localhost:1317", + rpcURL: "http://localhost:26657", + prefix: "cosmos", + } +); +``` + +The example above uses `ts-client` from a local directory. If you have published +your `ts-client` on `npm` replace `../../ts-client` with a package name. + +The resulting client instance contains namespaces for each module, each with a +`query` and `tx` namespace containing the module's relevant querying and +transacting methods with full type and auto-completion support. + +To query for a balance of an address: + +```typescript +const balances = await client.CosmosBankV1Beta1.query.queryAllBalances( + 'cosmos13xkhcx2dquhqdml0k37sr7yndquwteuvt2cml7' +); +``` + +## Broadcasting a transaction + +Add signing capabilities to the client by creating a wallet from a mnemonic +(we're using the Frank's mnemonic added to `config.yml` earlier) and passing it +as an optional argument to `Client()`. The wallet implements the CosmJS +OfflineSigner` interface. + +```typescript title="my-frontend-app/src/main.ts" +import { Client } from "../../ts-client"; +// highlight-start +import { DirectSecp256k1HdWallet } from "@cosmjs/proto-signing"; + +const mnemonic = + "play butter frown city voyage pupil rabbit wheat thrive mind skate turkey helmet thrive door either differ gate exhibit impose city swallow goat faint"; +const wallet = await DirectSecp256k1HdWallet.fromMnemonic(mnemonic); +// highlight-end + +const client = new Client( + { + apiURL: "http://localhost:1317", + rpcURL: "http://localhost:26657", + prefix: "cosmos", + }, + // highlight-next-line + wallet +); +``` + +Broadcasting a transaction: + +```typescript title="my-frontend-app/src/main.ts" +const tx_result = await client.CosmosBankV1Beta1.tx.sendMsgSend({ + value: { + amount: [ + { + amount: '200', + denom: 'token', + }, + ], + fromAddress: 'cosmos13xkhcx2dquhqdml0k37sr7yndquwteuvt2cml7', + toAddress: 'cosmos15uw6qpxqs6zqh0zp3ty2ac29cvnnzd3qwjntnc', + }, + fee: { + amount: [{ amount: '0', denom: 'stake' }], + gas: '200000', + }, + memo: '', +}) +``` + +## Broadcasting a transaction with a custom message + +If your chain already has custom messages defined, you can use those. If not, +we'll be using IGNITE®'s scaffolded code as an example. Create a post with CRUD +messages: + +``` +ignite scaffold list post title body +``` + +After adding messages to your chain you may need to re-generate the TypeScript +client: + +``` +ignite generate ts-client --clear-cache +``` + +Broadcast a transaction containing the custom `MsgCreatePost`: + +```typescript title="my-frontend-app/src/main.ts" +import { Client } from "../../ts-client"; +import { DirectSecp256k1HdWallet } from "@cosmjs/proto-signing"; + +const mnemonic = + "play butter frown city voyage pupil rabbit wheat thrive mind skate turkey helmet thrive door either differ gate exhibit impose city swallow goat faint"; +const wallet = await DirectSecp256k1HdWallet.fromMnemonic(mnemonic); + +const client = new Client( + { + apiURL: "http://localhost:1317", + rpcURL: "http://localhost:26657", + prefix: "cosmos", + }, + wallet +); +// highlight-start +const tx_result = await client.ExampleExample.tx.sendMsgCreatePost({ + value: { + title: 'foo', + body: 'bar', + creator: 'cosmos13xkhcx2dquhqdml0k37sr7yndquwteuvt2cml7', + }, + fee: { + amount: [{ amount: '0', denom: 'stake' }], + gas: '200000', + }, + memo: '', +}) +// highlight-end +``` + +## Lightweight client + +If you prefer, you can construct a lighter client using only the modules you are +interested in by importing the generic client class and expanding it with the +modules you need: + +```typescript title="my-frontend-app/src/main.ts" +// highlight-start +import { IgniteClient } from '../../ts-client/client' +import { Module as CosmosBankV1Beta1 } from '../../ts-client/cosmos.bank.v1beta1' +import { Module as CosmosStakingV1Beta1 } from '../../ts-client/cosmos.staking.v1beta1' +// highlight-end +import { DirectSecp256k1HdWallet } from '@cosmjs/proto-signing' + +const mnemonic = + 'play butter frown city voyage pupil rabbit wheat thrive mind skate turkey helmet thrive door either differ gate exhibit impose city swallow goat faint' +const wallet = await DirectSecp256k1HdWallet.fromMnemonic(mnemonic) +// highlight-next-line +const Client = IgniteClient.plugin([CosmosBankV1Beta1, CosmosStakingV1Beta1]) + +const client = new Client( + { + apiURL: 'http://localhost:1317', + rpcURL: 'http://localhost:26657', + prefix: 'cosmos', + }, + wallet, +) +``` + +## Broadcasting a multi-message transaction + +You can also construct TX messages separately and send them in a single TX using +a global signing client like so: + +```typescript title="my-frontend-app/src/main.ts" +const msg1 = await client.CosmosBankV1Beta1.tx.msgSend({ + value: { + amount: [ + { + amount: '200', + denom: 'token', + }, + ], + fromAddress: 'cosmos13xkhcx2dquhqdml0k37sr7yndquwteuvt2cml7', + toAddress: 'cosmos15uw6qpxqs6zqh0zp3ty2ac29cvnnzd3qwjntnc', + }, +}) + +const msg2 = await client.CosmosBankV1Beta1.tx.msgSend({ + value: { + amount: [ + { + amount: '200', + denom: 'token', + }, + ], + fromAddress: 'cosmos13xkhcx2dquhqdml0k37sr7yndquwteuvt2cml7', + toAddress: 'cosmos15uw6qpxqs6zqh0zp3ty2ac29cvnnzd3qwjntnc', + }, +}) + +const tx_result = await client.signAndBroadcast( + [msg1, msg2], + { + amount: [{ amount: '0', denom: 'stake' }], + gas: '200000', + }, + '', +) +``` + +Finally, for additional ease-of-use, apart from the modular client mentioned +above, each generated module is usable on its own in a stripped-down way by +exposing a separate txClient and queryClient. + +```typescript title="my-frontend-app/src/main.ts" +import { txClient } from '../../ts-client/cosmos.bank.v1beta1' +import { DirectSecp256k1HdWallet } from '@cosmjs/proto-signing' + +const mnemonic = + 'play butter frown city voyage pupil rabbit wheat thrive mind skate turkey helmet thrive door either differ gate exhibit impose city swallow goat faint' +const wallet = await DirectSecp256k1HdWallet.fromMnemonic(mnemonic) + +const client = txClient({ + signer: wallet, + prefix: 'cosmos', + addr: 'http://localhost:26657', +}) + +const tx_result = await client.sendMsgSend({ + value: { + amount: [ + { + amount: '200', + denom: 'token', + }, + ], + fromAddress: 'cosmos13xkhcx2dquhqdml0k37sr7yndquwteuvt2cml7', + toAddress: 'cosmos15uw6qpxqs6zqh0zp3ty2ac29cvnnzd3qwjntnc', + }, + fee: { + amount: [{ amount: '0', denom: 'stake' }], + gas: '200000', + }, + memo: '', +}) +``` + +## Usage with Keplr + +Normally, Keplr provides a wallet object implementing the `OfflineSigner` +interface, so you can simply replace the `wallet` argument in client +instantiation with `window.keplr.getOfflineSigner(chainId)`. However, Keplr +requires information about your chain, like chain ID, denoms, fees, etc. +[`experimentalSuggestChain()`](https://docs.keplr.app/api/guide/suggest-chain) is +a method Keplr provides to pass this information to the Keplr extension. + +The generated client makes this easier by offering a `useKeplr()` method that +automatically discovers the chain information and sets it up for you. Thus, you +can instantiate the client without a wallet and then call `useKeplr()` to enable +transacting via Keplr like so: + +```typescript title="my-frontend-app/src/main.ts" +import { Client } from '../../ts-client'; + +const client = new Client({ + apiURL: "http://localhost:1317", + rpcURL: "http://localhost:26657", + prefix: "cosmos" + } +); +await client.useKeplr(); +``` + +`useKeplr()` optionally accepts an object argument that contains one or more of +the same keys as the `ChainInfo` type argument of `experimentalSuggestChain()` +allowing you to override the auto-discovered values. + +For example, the default chain name and token precision (which are not recorded +on-chain) are set to `<chainId> Network` and `0` while the ticker for the denom +is set to the denom name in uppercase. If you want to override these, you can do +something like: + +```typescript title="my-frontend-app/src/main.ts" +import { Client } from '../../ts-client'; + +const client = new Client({ + apiURL: "http://localhost:1317", + rpcURL: "http://localhost:26657", + prefix: "cosmos" + } +); +await client.useKeplr({ + chainName: 'My Great Chain', + stakeCurrency: { + coinDenom: 'TOKEN', + coinMinimalDenom: 'utoken', + coinDecimals: '6', + }, +}) +``` + +## Wallet switching + +The client also allows you to switch out the wallet for a different one on an +already instantiated client like so: + +```typescript +import { Client } from '../../ts-client'; +import { DirectSecp256k1HdWallet } from "@cosmjs/proto-signing"; + +const mnemonic = + 'play butter frown city voyage pupil rabbit wheat thrive mind skate turkey helmet thrive door either differ gate exhibit impose city swallow goat faint' +const wallet = await DirectSecp256k1HdWallet.fromMnemonic(mnemonic); + +const client = new Client({ + apiURL: "http://localhost:1317", + rpcURL: "http://localhost:26657", + prefix: "cosmos" + } +); +await client.useKeplr(); + +// broadcast transactions using the Keplr wallet + +client.useSigner(wallet); + +// broadcast transactions using the CosmJS wallet +``` diff --git a/docs/versioned_docs/version-v29/04-clients/03-vue.md b/docs/versioned_docs/version-v29/04-clients/03-vue.md new file mode 100644 index 0000000..ead1c00 --- /dev/null +++ b/docs/versioned_docs/version-v29/04-clients/03-vue.md @@ -0,0 +1,181 @@ +# Vue frontend + +:::warning +The Vue frontend is being reworked and is not yet stable. +In the meantime, refer to the [IGNITE® CCA App](https://ignite.com/marketplace/cca). +::: + +Welcome to this tutorial on using IGNITE® to develop a web application for your +blockchain with Vue 3. IGNITE® is a tool that simplifies the process of building +a blockchain application by providing a set of templates and generators that can +be used to get up and running quickly. + +One of the features of IGNITE® is its support for [Vue 3](https://vuejs.org/), a +popular JavaScript framework for building user interfaces. In this tutorial, you +will learn how to use IGNITE® to create a new blockchain and scaffold a Vue +frontend template. This will give you a basic foundation for your web +application and make it easier to get started building out the rest of your +application. + +Once you have your blockchain and Vue template set up, the next step is to +generate an API client. This will allow you to easily interact with your +blockchain from your web application, enabling you to retrieve data and make +transactions. By the end of this tutorial, you will have a fully functional web +application that is connected to your own blockchain. + +Prerequisites: + +* [Node.js](https://nodejs.org/en/) +* [Keplr](https://www.keplr.app/) Chrome extension + +## Create a blockchain and a Vue app + +Create a new blockchain project: + +``` +ignite scaffold chain example +``` + +To create a Vue frontend template, go to the `example` directory and run the +following command: + +``` +ignite scaffold vue +``` + +This will create a new Vue project in the `vue` directory. This project can be +used with any blockchain, but it depends on an API client to interact with the +blockchain. To generate an API client, run the following command in the +`example` directory: + +``` +ignite generate composables +``` + +This command generates two directories: + +* `ts-client`: a framework-agnostic TypeScript client that can be used to + interact with your blockchain. You can learn more about how to use this client + in the [TypeScript client tutorial](/clients/typescript). +* `vue/src/composables`: a collection of Vue 3 + [composables](https://vuejs.org/guide/reusability/composables.html) that wrap + the TypeScript client and make it easier to interact with your blockchain from + your Vue application. + +## Set up Keplr and an account + +Open your browser with the Keplr wallet extension installed. Follow [the +instructions](https://keplr.crunch.help/en/getting-started/creating-a-new-keplr-account) +to create a new account or use an existing one. Make sure to save the mnemonic +phrase as you will need it in the next step. + +Do not use a mnemonic phrase that is associated with an account that holds +assets you care about. If you do, you risk losing those assets. It's a good +practice to create a new account for development purposes. + +Add the account you're using in Keplr to your blockchain's `config.yml` file: + +```yml +accounts: + - name: alice + coins: [20000token, 200000000stake] + - name: bob + coins: [10000token, 100000000stake] + # highlight-start + - name: frank + coins: [10000token, 100000000stake] + mnemonic: struggle since inmate safe logic kite tag web win stay security wonder + # highlight-end +``` + +Replace the `struggle since...` mnemonic with the one you saved in the previous +step. + +Adding an account with a mnemonic to the config file will tell IGNITE® CLI to add +the account to the blockchain when you start it. This is useful for development +purposes, but you should not do this in production. + +## Start a blockchain and a Vue app + +In the `example` directory run the following command to start your blockchain: + +```bash +ignite chain serve +``` + +To start your Vue application, go to the `vue` directory and run the following +command in a separate terminal window: + +:::note +Make sure you have [pnpm](https://pnpm.io/) installed. +::: + +```bash +pnpm install && pnpm dev +``` + +It is recommended to run `pnpm install` before starting your app with `pnpm dev` to ensure that all dependencies are installed (including the ones that the API client has, see `vue/postinstall.js`). + +Open your browser and navigate to +[http://localhost:5173/](http://localhost:5173/). + +![Web app](/img/web-1.png) + +Press "Connect wallet", enter your password into Keplr and press "Approve" to +add your blockchain to Keplr. + +<img src="/img/web-4.png" width="300"/> + +Make sure to select the account you're using for development purposes and the +"Example Network" in Keplr's blockchain dropdown. You should see a list of +assets in your Vue app. + +![Web app](/img/web-5.png) + +Congratulations! You have successfully created a client-side Vue application and +connected it to your blockchain. You can modify the source code of your Vue +application to build out the rest of your project. + +## Setting the address prefix + +It is necessary to set the correct address prefix in order for the Vue app to +properly interact with a Cosmos chain. The address prefix is used to identify +the chain that the app is connected to, and must match the prefix used by the +chain. + +By default, IGNITE® creates a chain with the `cosmos` prefix. If you have +created your chain with `ignite scaffold chain ... --address-prefix foo` or +manually changed the prefix in the source code of the chain, you need to set the +prefix in the Vue app. + +There are two ways to set the address prefix in a Vue app. + +### Using an environment variable + +You can set the `VITE_ADDRESS_PREFIX` environment variable to the correct +address prefix for your chain. This will override the default prefix used by the +app. + +To set the `VITE_ADDRESS_PREFIX` environment variable, you can use the following +command: + +```bash +export VITE_ADDRESS_PREFIX=your-prefix +``` + +Replace `your-prefix` with the actual address prefix for your chain. + +### Setting address prefix in the code + +Alternatively, you can manually set the correct address prefix by replacing the +fallback value of the `prefix` variable in the file `./vue/src/env.ts`. + +To do this, open the file `./vue/src/env.ts` and find the following line: + +```ts title="./vue/src/env.ts" +const prefix = process.env.VITE_ADDRESS_PREFIX || 'your-prefix'; +``` + +Replace `your-prefix` with the actual address prefix for your chain. + +Save the file and restart the Vue app to apply the changes. diff --git a/docs/versioned_docs/version-v29/04-clients/_category_.json b/docs/versioned_docs/version-v29/04-clients/_category_.json new file mode 100644 index 0000000..4d95374 --- /dev/null +++ b/docs/versioned_docs/version-v29/04-clients/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Create an Interface", + "link": null +} \ No newline at end of file diff --git a/docs/versioned_docs/version-v29/05-contributing/01-docs.md b/docs/versioned_docs/version-v29/05-contributing/01-docs.md new file mode 100644 index 0000000..b0b722d --- /dev/null +++ b/docs/versioned_docs/version-v29/05-contributing/01-docs.md @@ -0,0 +1,105 @@ +--- +sidebar_position: 1 +slug: /contributing +--- + +# Improving documentation + +Thank you for visiting our repository and considering making contributions. We +appreciate your interest in helping us to create and maintain awesome tutorials +and documentation. + +## Using this repo + +Review existing [IGNITE® CLI issues](https://github.com/ignite/cli/issues) to see +if your question has already been asked and answered. + +- To provide feedback, file an issue and provide generous details to help us + understand how we can make it better. +- To provide a fix, make a direct contribution. If you're not a member or + maintainer, fork the repo and then submit a pull request (PR) from your forked + repo to the `main` branch. +- Start by creating a draft pull request. Create your draft PR early, even if + your work is just beginning or incomplete. Your draft PR indicates to the + community that you're working on something and provides a space for + conversations early in the development process. Merging is blocked for `Draft` + PRs, so they provide a safe place to experiment and invite comments. + +## Reviewing technical content PRs + +Some of the best content contributions come during the PR review cycles. Follow +best practices for technical content PR reviews just like you do for code +reviews. + +- For in-line suggestions, use the [GitHub suggesting + feature](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/reviewing-changes-in-pull-requests/commenting-on-a-pull-request) + . +- The PR owner can merge in your suggested commits one at a time or in batch + (preferred). +- When you are providing a more granular extensive review that results in more + than 20 in-line suggestions, go ahead and check out the branch and make the + changes yourself. + +## Writing and contributing + +We welcome contributions to the docs and tutorials. + +Our technical content follows the [Google developer documentation style +guide](https://developers.google.com/style). Highlights to help you get started: + +- [Highlights](https://developers.google.com/style/highlights) +- [Word list](https://developers.google.com/style/word-list) +- [Style and tone](https://developers.google.com/style/tone) +- [Writing for a global + audience](https://developers.google.com/style/translation) +- [Cross-references](https://developers.google.com/style/cross-references) +- [Present tense](https://developers.google.com/style/tense) + +The Google guidelines include more material than is listed here and are used as +a guide that enables easy decision-making about proposed content changes. + +Other useful resources: + +- [Google Technical Writing Courses](https://developers.google.com/tech-writing) +- [GitHub Guides Mastering + Markdown](https://docs.github.com/en/get-started/writing-on-github/getting-started-with-writing-and-formatting-on-github/basic-writing-and-formatting-syntax) + +## Where can I find the tutorials and docs? + +Technical content includes knowledge base articles and interactive tutorials. + +- The IGNITE® CLI Developer Tutorials content is in the `docs/guide` folder. +- The Knowledge Base content is in the `docs/kb` folder. +- Upgrade information is in the `docs/migration` folder. + +Note: The CLI docs are auto-generated and do not support doc updates. + +Locations and folders for other content can vary. Explore the self-describing +folders for the content that you are interested in. Some articles and tutorials +reside in a single Markdown file while sub-folders might be present for other +tutorials. + +As always, work-in-progress content might be happening in other locations and +repos. + +## Who works on the tutorials? + +The IGNITE® product team developers are focused on building IGNITE® CLI and +improving the developer experience. The IGNITE® Ecosystem Development team owns +the technical content and tutorials and manages developer onboarding. + +Meet the [people behind IGNITE® CLI and our +contributors](https://github.com/ignite/cli/graphs/contributors). + +## Viewing docs builds + +Use a preview to see what your changes will look like in production before the +updated pages are published. + +- While a PR is in draft mode, you can rely on using the preview feature in + Markdown. +- After the PR moves from **Draft** to **Ready for review**, the CI status + checks generate a deployment preview. This preview stays up to date as you + continue to work and commit new changes to the same branch. A `Docs Deploy + Preview / build_and_deploy (pull_request)` preview on a GitHub actions URL is + unique for that PR. diff --git a/docs/versioned_docs/version-v29/05-contributing/_category_.json b/docs/versioned_docs/version-v29/05-contributing/_category_.json new file mode 100644 index 0000000..5077538 --- /dev/null +++ b/docs/versioned_docs/version-v29/05-contributing/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Contribute to IGNITE®", + "link": null +} \ No newline at end of file diff --git a/docs/versioned_docs/version-v29/06-migration/_category_.json b/docs/versioned_docs/version-v29/06-migration/_category_.json new file mode 100644 index 0000000..9460d57 --- /dev/null +++ b/docs/versioned_docs/version-v29/06-migration/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Migration", + "link": null +} \ No newline at end of file diff --git a/docs/versioned_docs/version-v29/06-migration/readme.md b/docs/versioned_docs/version-v29/06-migration/readme.md new file mode 100644 index 0000000..2c9851a --- /dev/null +++ b/docs/versioned_docs/version-v29/06-migration/readme.md @@ -0,0 +1,74 @@ +--- +sidebar_position: 0 +--- + +# Migration Guides + +Welcome to the section on upgrading to a newer version of IGNITE® CLI! If you're +looking to update to the latest version, you'll want to start by checking the +documentation to see if there are any special considerations or instructions you +need to follow. + +If there is no documentation for the latest version of IGNITE® CLI, it's +generally safe to assume that there were no breaking changes, and you can +proceed with using the latest version with your project. + +## Create your own Migration Guide + +The `gen-mig-diffs` tool helps developers manage and visualize code changes across multiple major versions of IGNITE®. With each major upgrade, the codebase might undergo significant changes, making it challenging for developers to track these differences after several updates. The `gen-mig-diffs` tool simplifies this process by scaffolding blockchains with both the old and new versions and displaying the differences. + +It is located in the [IGNITE® CLI GitHub repository](https://github.com/ignite/cli/tree/main/ignite/internal/tools/gen-mig-diffs) +directory and has been made into a standalone project. + +To set up this tool in your development environment: + +```shell +gen-mig-diffs [flags] +``` + +This tool generates migration diff files for each of IGNITE®'s scaffold commands. It compares two specified versions of IGNITE® and provides a clear, organized view of the changes. + +## How to Get Started + +1. Clone the IGNITE® CLI repository: + +```shell +git clone https://github.com/ignite/cli.git --depth=1 && \ +cd cli/ignite/internal/tools/gen-mig-diffs +``` + +2. Install and show usage: + +```shell +go install . && gen-mig-diffs -h +``` + +### Example Migration + +As an example, to generate migration diffs between versions 0.27.2 and 28.3.0, use the following command: + +```shell +gen-mig-diffs --output temp/migration --from v0.27.2 --to v28.3.0 +``` + +This command scaffolds blockchains with the specified versions and shows the differences, making it easier for developers to understand and apply necessary changes when upgrading their projects. + +## Usage + +```bash +This tool is used to generate migration diff files for each of ignites scaffold commands + +Usage: + gen-mig-diffs [flags] + +Flags: + -f, --from string Version of IGNITE® or path to IGNITE® source code to generate the diff from + -h, --help help for gen-mig-diffs + -o, --output string Output directory to save the migration document (default "docs/docs/06-migration") + --repo-output string Output path to clone the IGNITE® repository + -s, --repo-source string Path to IGNITE® source code repository. Set the source automatically set the cleanup to false + --repo-url string Git URL for the IGNITE® repository (default "https://github.com/ignite/cli.git") + --scaffold-cache string Path to cache directory + --scaffold-output string Output path to clone the IGNITE® repository + -t, --to string Version of IGNITE® or path to IGNITE® source code to generate the diff to +``` diff --git a/docs/versioned_docs/version-v29/06-migration/v0.18.md b/docs/versioned_docs/version-v29/06-migration/v0.18.md new file mode 100644 index 0000000..556d973 --- /dev/null +++ b/docs/versioned_docs/version-v29/06-migration/v0.18.md @@ -0,0 +1,458 @@ +--- +sidebar_position: 999 +title: v0.18.0 +description: For chains that were scaffolded with IGNITE® CLI versions lower than v0.18, changes are required to use IGNITE® CLI v0.18. +--- + +# Upgrading a Blockchain to use IGNITE® CLI v0.18 + +IGNITE® CLI v0.18 comes with Cosmos SDK v0.44. This version of Cosmos SDK introduced changes that are not compatible with +chains that were scaffolded with IGNITE® CLI versions lower than v0.18. + +**Important:** After upgrading from IGNITE® CLI v0.17.3 to IGNITE® CLI v0.18, you must update the default blockchain +template to use blockchains that were scaffolded with earlier versions. + +These instructions are written for a blockchain that was scaffolded with the following command: + +``` +ignite scaffold chain github.com/username/mars +``` + +If you used a different module path, replace `username` and `mars` with the correct values for your blockchain. + +## Blockchain + +For each file listed, make the required changes to the source code of the blockchain template. + +### go.mod + +``` +module github.com/username/mars + +go 1.16 + +require ( + github.com/cosmos/cosmos-sdk v0.44.0 + github.com/cosmos/ibc-go v1.2.0 + github.com/gogo/protobuf v1.3.3 + github.com/google/go-cmp v0.5.6 // indirect + github.com/gorilla/mux v1.8.0 + github.com/grpc-ecosystem/grpc-gateway v1.16.0 + github.com/spf13/cast v1.3.1 + github.com/spf13/cobra v1.1.3 + github.com/stretchr/testify v1.7.0 + github.com/tendermint/spm v0.1.6 + github.com/tendermint/tendermint v0.34.13 + github.com/tendermint/tm-db v0.6.4 + google.golang.org/genproto v0.0.0-20210903162649-d08c68adba83 + google.golang.org/grpc v1.40.0 +) + +replace ( + github.com/99designs/keyring => github.com/cosmos/keyring v1.1.7-0.20210622111912-ef00f8ac3d76 + github.com/gogo/protobuf => github.com/regen-network/protobuf v1.3.3-alpha.regen.1 + google.golang.org/grpc => google.golang.org/grpc v1.33.2 +) +``` + +### app/app.go + +```go +package app + +import ( + //... + // Add the following packages: + "github.com/cosmos/cosmos-sdk/x/feegrant" + feegrantkeeper "github.com/cosmos/cosmos-sdk/x/feegrant/keeper" + feegrantmodule "github.com/cosmos/cosmos-sdk/x/feegrant/module" + + "github.com/cosmos/ibc-go/modules/apps/transfer" + ibctransferkeeper "github.com/cosmos/ibc-go/modules/apps/transfer/keeper" + ibctransfertypes "github.com/cosmos/ibc-go/modules/apps/transfer/types" + ibc "github.com/cosmos/ibc-go/modules/core" + ibcclient "github.com/cosmos/ibc-go/modules/core/02-client" + ibcporttypes "github.com/cosmos/ibc-go/modules/core/05-port/types" + ibchost "github.com/cosmos/ibc-go/modules/core/24-host" + ibckeeper "github.com/cosmos/ibc-go/modules/core/keeper" + // Remove the following packages: + // transfer "github.com/cosmos/cosmos-sdk/x/ibc/applications/transfer" + // ibctransferkeeper "github.com/cosmos/cosmos-sdk/x/ibc/applications/transfer/keeper" + // ibctransfertypes "github.com/cosmos/cosmos-sdk/x/ibc/applications/transfer/types" + // ibc "github.com/cosmos/cosmos-sdk/x/ibc/core" + // ibcclient "github.com/cosmos/cosmos-sdk/x/ibc/core/02-client" + // porttypes "github.com/cosmos/cosmos-sdk/x/ibc/core/05-port/types" + // ibchost "github.com/cosmos/cosmos-sdk/x/ibc/core/24-host" + // ibckeeper "github.com/cosmos/cosmos-sdk/x/ibc/core/keeper" +) + +var ( + //... + ModuleBasics = module.NewBasicManager( + //... + slashing.AppModuleBasic{}, + // Add feegrantmodule.AppModuleBasic{}, + feegrantmodule.AppModuleBasic{}, // <-- + ibc.AppModuleBasic{}, + //... + ) + //... +) + +type App struct { + //... + // Replace codec.Marshaler with codec.Codec + appCodec codec.Codec // <-- + // Add FeeGrantKeeper + FeeGrantKeeper feegrantkeeper.Keeper // <-- +} + +func New( /*...*/ ) { + //bApp.SetAppVersion(version.Version) + bApp.SetVersion(version.Version) // <-- + + keys := sdk.NewKVStoreKeys( + //... + upgradetypes.StoreKey, + // Add feegrant.StoreKey + feegrant.StoreKey, // <-- + evidencetypes.StoreKey, + //... + ) + + app.FeeGrantKeeper = feegrantkeeper.NewKeeper(appCodec, keys[feegrant.StoreKey], app.AccountKeeper) // <-- + // Add app.BaseApp as the last argument to upgradekeeper.NewKeeper + app.UpgradeKeeper = upgradekeeper.NewKeeper(skipUpgradeHeights, keys[upgradetypes.StoreKey], appCodec, homePath, app.BaseApp) + + app.IBCKeeper = ibckeeper.NewKeeper( + // Add app.UpgradeKeeper + appCodec, keys[ibchost.StoreKey], app.GetSubspace(ibchost.ModuleName), app.StakingKeeper, app.UpgradeKeeper, scopedIBCKeeper, + ) + + govRouter.AddRoute(govtypes.RouterKey, govtypes.ProposalHandler). + //... + // Replace NewClientUpdateProposalHandler with NewClientProposalHandler + AddRoute(ibchost.RouterKey, ibcclient.NewClientProposalHandler(app.IBCKeeper.ClientKeeper)) + + // Replace porttypes with ibcporttypes + ibcRouter := ibcporttypes.NewRouter() + + app.mm.SetOrderBeginBlockers( + upgradetypes.ModuleName, + // Add capabilitytypes.ModuleName, + capabilitytypes.ModuleName, + minttypes.ModuleName, + //... + // Add feegrant.ModuleName, + feegrant.ModuleName, + ) + + // Add app.appCodec as an argument to module.NewConfigurator: + app.mm.RegisterServices(module.NewConfigurator(app.appCodec, app.MsgServiceRouter(), app.GRPCQueryRouter())) + + // Replace: + // app.SetAnteHandler( + // ante.NewAnteHandler( + // app.AccountKeeper, app.BankKeeper, ante.DefaultSigVerificationGasConsumer, + // encodingConfig.TxConfig.SignModeHandler(), + // ), + // ) + + // With the following: + anteHandler, err := ante.NewAnteHandler( + ante.HandlerOptions{ + AccountKeeper: app.AccountKeeper, + BankKeeper: app.BankKeeper, + SignModeHandler: encodingConfig.TxConfig.SignModeHandler(), + FeegrantKeeper: app.FeeGrantKeeper, + SigGasConsumer: ante.DefaultSigVerificationGasConsumer, + }, + ) + if err != nil { + panic(err) + } + app.SetAnteHandler(anteHandler) + + // Remove the following: + // ctx := app.BaseApp.NewUncachedContext(true, tmproto.Header{}) + // app.CapabilityKeeper.InitializeAndSeal(ctx) +} + +func (app *App) InitChainer(ctx sdk.Context, req abci.RequestInitChain) abci.ResponseInitChain { + var genesisState GenesisState + if err := tmjson.Unmarshal(req.AppStateBytes, &genesisState); err != nil { + panic(err) + } + // Add the following: + app.UpgradeKeeper.SetModuleVersionMap(ctx, app.mm.GetVersionMap()) + return app.mm.InitGenesis(ctx, app.appCodec, genesisState) +} + +// Replace Marshaler with Codec +func (app *App) AppCodec() codec.Codec { + return app.appCodec +} + +// Replace BinaryMarshaler with BinaryCodec +func initParamsKeeper(appCodec codec.BinaryCodec, legacyAmino *codec.LegacyAmino, key, tkey sdk.StoreKey) paramskeeper.Keeper { + //... +} +``` + +### app/genesis.go + +```go +// Replace codec.JSONMarshaler with codec.JSONCodec +func NewDefaultGenesisState(cdc codec.JSONCodec) GenesisState { + // ... +} +``` + +### testutil/keeper/mars.go + +Add the following code: + +```go +package keeper + +import ( + "testing" + + "github.com/cosmos/cosmos-sdk/codec" + codectypes "github.com/cosmos/cosmos-sdk/codec/types" + "github.com/cosmos/cosmos-sdk/store" + storetypes "github.com/cosmos/cosmos-sdk/store/types" + sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/stretchr/testify/require" + "github.com/tendermint/tendermint/libs/log" + tmproto "github.com/tendermint/tendermint/proto/tendermint/types" + tmdb "github.com/tendermint/tm-db" + "github.com/username/mars/x/mars/keeper" + "github.com/username/mars/x/mars/types" +) + +func MarsKeeper(t testing.TB) (*keeper.Keeper, sdk.Context) { + storeKey := sdk.NewKVStoreKey(types.StoreKey) + memStoreKey := storetypes.NewMemoryStoreKey(types.MemStoreKey) + + db := tmdb.NewMemDB() + stateStore := store.NewCommitMultiStore(db) + stateStore.MountStoreWithDB(storeKey, sdk.StoreTypeIAVL, db) + stateStore.MountStoreWithDB(memStoreKey, sdk.StoreTypeMemory, nil) + require.NoError(t, stateStore.LoadLatestVersion()) + + registry := codectypes.NewInterfaceRegistry() + k := keeper.NewKeeper( + codec.NewProtoCodec(registry), + storeKey, + memStoreKey, + ) + + ctx := sdk.NewContext(stateStore, tmproto.Header{}, false, log.NewNopLogger()) + return k, ctx +} +``` + +If `mars` is an IBC-enabled module, add the following code, instead: + +```go +package keeper + +import ( + "testing" + + "github.com/cosmos/cosmos-sdk/codec" + codectypes "github.com/cosmos/cosmos-sdk/codec/types" + "github.com/cosmos/cosmos-sdk/store" + storetypes "github.com/cosmos/cosmos-sdk/store/types" + sdk "github.com/cosmos/cosmos-sdk/types" + capabilitykeeper "github.com/cosmos/cosmos-sdk/x/capability/keeper" + typesparams "github.com/cosmos/cosmos-sdk/x/params/types" + ibckeeper "github.com/cosmos/ibc-go/modules/core/keeper" + "github.com/stretchr/testify/require" + "github.com/tendermint/tendermint/libs/log" + tmproto "github.com/tendermint/tendermint/proto/tendermint/types" + tmdb "github.com/tendermint/tm-db" + "github.com/username/test/x/mars/keeper" + "github.com/username/test/x/mars/types" +) + +func MarsKeeper(t testing.TB) (*keeper.Keeper, sdk.Context) { + logger := log.NewNopLogger() + + storeKey := sdk.NewKVStoreKey(types.StoreKey) + memStoreKey := storetypes.NewMemoryStoreKey(types.MemStoreKey) + + db := tmdb.NewMemDB() + stateStore := store.NewCommitMultiStore(db) + stateStore.MountStoreWithDB(storeKey, sdk.StoreTypeIAVL, db) + stateStore.MountStoreWithDB(memStoreKey, sdk.StoreTypeMemory, nil) + require.NoError(t, stateStore.LoadLatestVersion()) + + registry := codectypes.NewInterfaceRegistry() + appCodec := codec.NewProtoCodec(registry) + capabilityKeeper := capabilitykeeper.NewKeeper(appCodec, storeKey, memStoreKey) + + amino := codec.NewLegacyAmino() + ss := typesparams.NewSubspace(appCodec, + amino, + storeKey, + memStoreKey, + "MarsSubSpace", + ) + IBCKeeper := ibckeeper.NewKeeper( + appCodec, + storeKey, + ss, + nil, + nil, + capabilityKeeper.ScopeToModule("MarsIBCKeeper"), + ) + + k := keeper.NewKeeper( + codec.NewProtoCodec(registry), + storeKey, + memStoreKey, + IBCKeeper.ChannelKeeper, + &IBCKeeper.PortKeeper, + capabilityKeeper.ScopeToModule("MarsScopedKeeper"), + ) + + ctx := sdk.NewContext(stateStore, tmproto.Header{}, false, logger) + return k, ctx +} +``` + +### testutil/network/network.go + +```go +func DefaultConfig() network.Config { + // ... + return network.Config{ + // ... + // Add sdk.DefaultPowerReduction + AccountTokens: sdk.TokensFromConsensusPower(1000, sdk.DefaultPowerReduction), + StakingTokens: sdk.TokensFromConsensusPower(500, sdk.DefaultPowerReduction), + BondedTokens: sdk.TokensFromConsensusPower(100, sdk.DefaultPowerReduction), + // ... + } +} +``` + +### testutil/sample/sample.go + +Add the following code: + +```go +package sample + +import ( + "github.com/cosmos/cosmos-sdk/crypto/keys/ed25519" + sdk "github.com/cosmos/cosmos-sdk/types" +) + +// AccAddress returns a sample account address +func AccAddress() string { + pk := ed25519.GenPrivKey().PubKey() + addr := pk.Address() + return sdk.AccAddress(addr).String() +} +``` + +### BandChain Support + +If your module includes integration with BandChain, added manually or scaffolded with `ignite scaffold band`, upgrade +the `github.com/bandprotocol/bandchain-packet` package to `v0.0.2` in `go.mod`. + +## Module + +### x/mars/keeper/keeper.go + +```go +package keeper + +// ... + +type ( + Keeper struct { + // Replace Marshaler with BinaryCodec + cdc codec.BinaryCodec + //... + } +) + +func NewKeeper( + // Replace Marshaler with BinaryCodec + cdc codec.BinaryCodec, + // ... +) *Keeper { + // ... +} +``` + +### x/mars/keeper/msg_server_test.go + +```go +package keeper_test + +import ( + //... + // Add the following: + keepertest "github.com/username/mars/testutil/keeper" + "github.com/username/mars/x/mars/keeper" +) + +func setupMsgServer(t testing.TB) (types.MsgServer, context.Context) { + // Replace + // keeper, ctx := setupKeeper(t) + // return NewMsgServerImpl(*keeper), sdk.WrapSDKContext(ctx) + + // With the following: + k, ctx := keepertest.MarsKeeper(t) + return keeper.NewMsgServerImpl(*k), sdk.WrapSDKContext(ctx) +} +``` + +### x/mars/module.go + +```go +package mars + +type AppModuleBasic struct { + // Replace Marshaler with BinaryCodec + cdc codec.BinaryCodec +} + +// Replace Marshaler with BinaryCodec +func NewAppModuleBasic(cdc codec.BinaryCodec) AppModuleBasic { + return AppModuleBasic{cdc: cdc} +} + +// Replace JSONMarshaler with JSONCodec +func (AppModuleBasic) DefaultGenesis(cdc codec.JSONCodec) json.RawMessage { + return cdc.MustMarshalJSON(types.DefaultGenesis()) +} + +// Replace JSONMarshaler with JSONCodec +func (AppModuleBasic) ValidateGenesis(cdc codec.JSONCodec, config client.TxEncodingConfig, bz json.RawMessage) error { + //... +} + +// Replace codec.Marshaller with codec.Codec +func NewAppModule(cdc codec.Codec, keeper keeper.Keeper) AppModule { + //... +} + +// Replace JSONMarshaler with JSONCodec +func (am AppModule) InitGenesis(ctx sdk.Context, cdc codec.JSONCodec, gs json.RawMessage) []abci.ValidatorUpdate { + //... +} + +// Replace JSONMarshaler with JSONCodec +func (am AppModule) ExportGenesis(ctx sdk.Context, cdc codec.JSONCodec) json.RawMessage { + //... +} + +// Add the following +func (AppModule) ConsensusVersion() uint64 { return 2 } +``` diff --git a/docs/versioned_docs/version-v29/06-migration/v0.19.2.md b/docs/versioned_docs/version-v29/06-migration/v0.19.2.md new file mode 100644 index 0000000..294ec85 --- /dev/null +++ b/docs/versioned_docs/version-v29/06-migration/v0.19.2.md @@ -0,0 +1,26 @@ +--- +sidebar_position: 998 +title: v0.19.2 +description: For chains that were scaffolded with IGNITE® CLI versions lower than v0.19.2, changes are required to use IGNITE® CLI v0.19.2. +--- + +# Upgrading a blockchain to use IGNITE® CLI v0.19.2 + +IGNITE® CLI v0.19.2 comes with IBC v2.0.2. + +With IGNITE® CLI v0.19.2, the contents of the deprecated IGNITE® CLI Modules `tendermint/spm` repo are moved to the +official IGNITE® CLI repo which introduces breaking changes. + +To migrate your chain that was scaffolded with IGNITE® CLI versions lower than v0.19.2: + +1. IBC upgrade: Use + the [IBC migration documents](https://github.com/cosmos/ibc-go/blob/v6.2.0/docs/migrations/v1-to-v2.md) + +2. In your chain's `go.mod` file, remove `tendermint/spm` and add the v0.19.2 version of `tendermint/starport`. If your + chain uses these packages, change the import paths as shown: + + - `github.com/tendermint/spm/ibckeeper` moved to `github.com/tendermint/starport/starport/pkg/cosmosibckeeper` + - `github.com/tendermint/spm/cosmoscmd` moved to `github.com/tendermint/starport/starport/pkg/cosmoscmd` + - `github.com/tendermint/spm/openapiconsole` moved to `github.com/tendermint/starport/starport/pkg/openapiconsole` + - `github.com/tendermint/spm/testutil/sample` moved + to `github.com/tendermint/starport/starport/pkg/cosmostestutil/sample` diff --git a/docs/versioned_docs/version-v29/06-migration/v0.20.0.md b/docs/versioned_docs/version-v29/06-migration/v0.20.0.md new file mode 100644 index 0000000..d46c902 --- /dev/null +++ b/docs/versioned_docs/version-v29/06-migration/v0.20.0.md @@ -0,0 +1,12 @@ +--- +sidebar_position: 997 +title: v0.20.0 +description: For chains that were scaffolded with IGNITE® CLI versions lower than v0.20.0, changes are required to use IGNITE® CLI v0.20.0. +--- + +# Upgrading a blockchain to use IGNITE® CLI v0.20.2 + +1. Upgrade your Cosmos SDK version to [v0.45.3](https://github.com/cosmos/cosmos-sdk/releases/tag/v0.45.3). + +2. Update your `SetOrderBeginBlockers` and `SetOrderEndBlockers` in your `app/app.go` to explicitly add entries for all + the modules you use in your chain. diff --git a/docs/versioned_docs/version-v29/06-migration/v0.22.0.md b/docs/versioned_docs/version-v29/06-migration/v0.22.0.md new file mode 100644 index 0000000..b2ce2f8 --- /dev/null +++ b/docs/versioned_docs/version-v29/06-migration/v0.22.0.md @@ -0,0 +1,36 @@ +--- +sidebar_position: 996 +title: v0.22.0 +description: For chains that were scaffolded with IGNITE® CLI versions lower than v0.22.0, changes are required to use IGNITE® CLI v0.22.0. +--- + +# Upgrading a blockchain to use IGNITE® CLI v0.22.0 + +IGNITE® CLI v0.22.2 changed the GitHub username from "ignite-hq" to "ignite", which means the imports must be fixed to +reflect this change. + +1. In your `go.mod` file find the require line for IGNITE® CLI that starts with `github.com/ignite-hq/cli` and is + followed by a version. + It looks something like `github.com/ignite-hq/cli v0.22.0`, and replace it by `github.com/ignite/cli v0.22.2`. + +2. Make a bulk find and replace in the import statements for `github.com/ignite-hq/cli` to be replaced + by `github.com/ignite/cli`. + +3. Finally, run `go mod tidy` and ensure there's no mention if `ignite-hq/cli` in your `go.sum` file. + +This update includes an upgrade to the `ibc-go` packages. Please make the according changes: + +1. Upgrade your IBC version to [v3](https://github.com/cosmos/ibc-go/releases/tag/v3.0.0). + + 1. Search for `github.com/cosmos/ibc-go/v2` in the import statements of your `.go` files and replace `v2` in the end + with `v3` + + 1. Open your `app.go`, + + - Update your transfer keeper by adding another `app.IBCKeeper.ChannelKeeper` as an argument + after `app.IBCKeeper.ChannelKeeper` + + - Define `var transferIBCModule = transfer.NewIBCModule(app.TransferKeeper)` in your `New()` func, and update + your existent IBC router to use it: `ibcRouter.AddRoute(ibctransfertypes.ModuleName, transferIBCModule)` + + 3. Open your `go.mod` and change the IBC line with `github.com/cosmos/ibc-go/v3 v3.0.0` diff --git a/docs/versioned_docs/version-v29/06-migration/v0.24.0.md b/docs/versioned_docs/version-v29/06-migration/v0.24.0.md new file mode 100644 index 0000000..85e42ee --- /dev/null +++ b/docs/versioned_docs/version-v29/06-migration/v0.24.0.md @@ -0,0 +1,330 @@ +--- +sidebar_position: 995 +title: v0.24.0 +description: For chains that were scaffolded with IGNITE® CLI versions lower than v0.24, changes are required to use IGNITE® CLI v0.24.0. +--- + +## Cosmos SDK v0.46 upgrade notes + +### Update dependencies + +Cosmos SDK v0.46 is compatible with the latest version of IBC Go v5. If you have a chain that is using an older version, +update the dependencies in your project. + +Throughout the code you might see the following dependencies: + +```go +package pkg_name + +import ( + "github.com/cosmos/ibc-go/v3/..." +) +``` + +Where `v3` is the version of IBC Go and `...` are different IBC Go packages. + +To upgrade the version to `v5`, a global find-and-replace should work. Replace `cosmos/ibc-go/v3` (or whicherver version +you're using) with `cosmos/ibc-go/v5` only in `*.go` files (to exclude unwated changes to files like `go,sum`). + +### Module keeper + +Add an import: + +```go +// x/{moduleName}/keeper/keeper.go + +package keeper + +// ... + +import ( + //... + storetypes "github.com/cosmos/cosmos-sdk/store/types" +) +``` + +In the `Keeper` struct replace `sdk.StoreKey` with `storetypes.StoreKey`: + +```go +// x/{moduleName}/keeper/keeper.go + +package keeper + +// ... + +type ( + Keeper struct { + cdc codec.BinaryCodec + storeKey storetypes.StoreKey + memKey storetypes.StoreKey + paramstore paramtypes.Subspace + } +) +``` + +In the argument list of the `NewKeeper` function definition: + +```go +package keeper + +// ... + +// x/{moduleName}/keeper/keeper.go + +func NewKeeper( + //... + memKey storetypes.StoreKey, +) +``` + +Store type aliases have been removed from the Cosmos SDK `types` package and now have to be imported from `store/types`, +instead. + +In the `testutil/keeper/{moduleName}.go` replace `types.StoreKey` with `storetypes.StoreKey` and `types.MemStoreKey` +with `storetypes.MemStoreKey`. + +```go +// testutil/keeper/{moduleName}.go + +package keeper + +// ... + +func {moduleName}Keeper(t testing.TB) (*keeper.Keeper, sdk.Context) { + storeKey := sdk.NewKVStoreKey(storetypes.StoreKey) + memStoreKey := storetypes.NewMemoryStoreKey(storetypes.MemStoreKey) + //... +} +``` + +### Testutil network package + +Add the `require` package for testing and `pruningtypes` and remove `storetypes`: + +```go +// testutil/network/network.go + +package network + +// ... + +import ( + pruningtypes "github.com/cosmos/cosmos-sdk/pruning/types" + "github.com/stretchr/testify/require" + // storetypes "github.com/cosmos/cosmos-sdk/store/types" <-- remove this line +) +``` + +In the `DefaultConfig` function replace `storetypes.NewPruningOptionsFromString` +with `pruningtypes.NewPruningOptionsFromString` + +```go +// testutil/network/network.go + +package network + +// ... + +func DefaultConfig() network.Config { + //... + return network.Config{ + AppConstructor: func(val network.Validator) servertypes.Application { + return app.New( + baseapp.SetPruning(pruningtypes.NewPruningOptionsFromString(val.AppConfig.Pruning)), + //... + ) + }, + //... + } +} +``` + +The `New` function in the Cosmos SDK `testutil/network` package now +accepts [three arguments](https://github.com/cosmos/cosmos-sdk/blob/v0.46.0/testutil/network/network.go#L206) instead of +two. + +In the `New` function add `t.TempDir()` as the second argument to `network.New()` and test that no error is thrown +with `require.NoError(t, err)`: + +```go +// testutil/network/network.go + +package network + +// ... + +func New(t *testing.T, configs ...network.Config) *network.Network { + //... + net, err := network.New(t, t.TempDir(), cfg) + require.NoError(t, err) + //... +} +``` + +### Testutil keeper package + +In the `{moduleName}Keeper` function make the following replacements: + +- `storetypes.StoreKey` → `types.StoreKey` +- `storetypes.MemStoreKey` → `types.MemStoreKey` +- `sdk.StoreTypeIAVL` → `storetypes.StoreTypeIAVL` +- `sdk.StoreTypeMemory` → `storetypes.StoreTypeMemory` + +```go +// testutil/keeper/{moduleName}.go + +package keeper + +// ... + +func {moduleName}Keeper(t testing.TB) (*keeper.Keeper, sdk.Context) { + storeKey := sdk.NewKVStoreKey(types.StoreKey) + memStoreKey := storetypes.NewMemoryStoreKey(types.MemStoreKey) + //... + stateStore.MountStoreWithDB(storeKey, storetypes.StoreTypeIAVL, db) + stateStore.MountStoreWithDB(memStoreKey, storetypes.StoreTypeMemory, nil) + //... +} +``` + +### IBC modules + +If you have IBC-enabled modules (for example, added with `ignite scaffold module ... --ibc` or created manually), make +the following changes to the source code. + +Cosmos SDK expects IBC modules +to [implement the `IBCModule` interface](https://ibc.cosmos.network/main/ibc/apps/ibcmodule/). Create a `IBCModule` +type that embeds the module's keeper and a method that returns a new `IBCModule`. Methods in this file will be defined +on this type. + +```go +// x/{moduleName}/module_ibc.go + +package module_name + +// ... + +type IBCModule struct { + keeper keeper.Keeper +} + +func NewIBCModule(k keeper.Keeper) IBCModule { + return IBCModule{ + keeper: k, + } +} +``` + +Replace receivers for all methods in this file from `(am AppModule)` to `(im IBCModule)`. Replace all instances of `am.` +with `im.` to fix the errors. + +`OnChanOpenInit` now returns to values: a `string` and an `error`: + +```go +// x/{moduleName}/module_ibc.go + +package module_name + +// ... + +func (im IBCModule) OnChanOpenInit( /*...*/ ) (string, error) +``` + +Ensure that all return statements (five, in the default template) in `OnChanOpenInit` return two values. For example: + +```go +// x/{moduleName}/module_ibc.go + +package module_name + +// ... + +func (im IBCModule) OnChanOpenInit( /*...*/ ) (string, error) { + //... + return "", errorsmod.Wrapf(porttypes.ErrInvalidPort, "invalid port: %s, expected %s", portID, boundPort) + //... +} +``` + +Error acknowledgments returned from Transfer `OnRecvPacket` now include a deterministic ABCI code and error message. +Remove the `.Error()` call: + +```go +// x/{moduleName}/module_ibc.go + +package module_name + +// ... + +func (im IBCModule) OnRecvPacket( /*...*/ ) { + //... + if err := modulePacketData.Unmarshal(modulePacket.GetData()); err != nil { + // return channeltypes.NewErrorAcknowledgement(errorsmod.Wrapf(sdkerrors.ErrUnknownRequest, "cannot unmarshal packet data: %s", err.Error()).Error()) + return channeltypes.NewErrorAcknowledgement(errorsmod.Wrapf(sdkerrors.ErrUnknownRequest, "cannot unmarshal packet data: %s", err.Error())) + } + + // ... + + // Dispatch packet + switch packet := modulePacketData.Packet.(type) { + // ... + default: + // errMsg := fmt.Sprintf("unrecognized %s packet type: %T", types.ModuleName, packet) + // return channeltypes.NewErrorAcknowledgement(errMsg) + err := fmt.Errorf("unrecognized %s packet type: %T", types.ModuleName, packet) + return channeltypes.NewErrorAcknowledgement(err) + } +} +``` + +After switching to using both `AppModule` and `IBCModule`, modifying the following line: + +```go +// x/{moduleName}/module.go + +package module_name + +// ... + +var ( + //... + _ porttypes.IBCModule = IBCModule{} // instead of "= AppModule{}" +) +``` + +### Main + +The `Execute` function in Cosmos SDK `server/cmd` package now +accepts [three arguments](https://github.com/cosmos/cosmos-sdk/blob/v0.46.0/server/cmd/execute.go#L20) instead of two. + +```go +// cmd/{{projectName}}d/main.go + +package projectNamed + +// ... + +func main() { + //... + if err := svrcmd.Execute(rootCmd, "", app.DefaultNodeHome); err != nil { + os.Exit(1) + } +} +``` + +### Handler + +Cosmos SDK v0.46 no longer needs a `NewHandler` function that was used to handle messages and call appropriate keeper +methods based on message types. Feel free to remove `x/{moduleName}/handler.go` file. + +Since there is no `NewHandler` now, modify the deprecated `Route` function to return `sdk.Route{}`: + +```go +// x/{moduleName}/module.go + +package module_name + +// ... + +func (am AppModule) Route() sdk.Route { return sdk.Route{} } +``` diff --git a/docs/versioned_docs/version-v29/06-migration/v0.25.0.md b/docs/versioned_docs/version-v29/06-migration/v0.25.0.md new file mode 100644 index 0000000..c739d18 --- /dev/null +++ b/docs/versioned_docs/version-v29/06-migration/v0.25.0.md @@ -0,0 +1,1187 @@ +--- +sidebar_position: 994 +title: v0.25.0 +description: For chains that were scaffolded with IGNITE® CLI versions lower than v0.25.0. changes are required to use IGNITE® CLI v0.25.0. +--- + +## Protobuf directory migration + +`v0.25.0` changes the location of scaffolded `.proto` files. Previously, `.proto` files were located in `./proto/{moduleName}/`, +where `moduleName` is the same name of the Cosmos SDK module found in `./x/{moduleName}/`. This new version of `ignite` +modifies the scaffolded protobuf files so that they are now generated in `./proto/{appName}/{moduleName}`. + +The only change that is needed to be made is to create an `{appName}` folder in the `proto` directory, and then place the +sub-directories within it. An example below demonstrates this change: + +### Previous Directory Structure + +This example shows a chain that was generated using `ignite` with `v0.24.0` using the following command: + +```bash +ignite s chain github.com/cosmos/planet --no-module +ignite s module mars +``` + +```bash +├── app +├── cmd +├── docs +├── proto +│ ├── mars +├── x +│ ├── mars +├── README.md +├── config.yml +├── go.mod +├── go.sum +└── .gitignore +``` + +### `v0.25.0` Directory Structure + +This example shows a chain that was generated using `ignite` with `v0.25.0` using the following command: + +```bash +ignite s chain github.com/cosmos/planet --no-module +ignite s module mars +``` + +```bash +├── app +├── cmd +├── docs +├── proto +│ ├── planet +│ │ ├── mars +├── x +│ ├── mars +├── README.md +├── config.yml +├── go.mod +├── go.sum +└── .gitignore +``` + +The only difference is the additional directory `planet` which is the name of the application. The name of the app can +be verified by checking the package in the `go.mod` file. In this example, the package is `github.com/cosmos/planet` +where `planet` is the app name. + + --- + +## Removing `cosmoscmd` + +`v0.25.0` removes the `cosmoscmd` package from scaffolded chains. This package provided utility for creating +commands and starting up their application. The `cosmoscmd` package is now deprecated, and it is suggested that chains +implement this functionality in their codebase so they can be more easily upgraded and customized. + +The main functionality of `cosmoscmd` will be moved to the `app` package of your chain. Some imports in these +examples contain the sample string, `{ModulePath}`. Replace this string with the Go module path of your blockchain. +For example, if your blockchain module path is `github.com/planet/mars`, `{ModulePath}/app/params` would be become +`github.com/planet/mars/app/params`. + +#### Migration in `app` package + +To begin, create a new file, `./app/params/encoding.go`, containing the following code: + +```go +package params + +import ( + "github.com/cosmos/cosmos-sdk/client" + "github.com/cosmos/cosmos-sdk/codec" + "github.com/cosmos/cosmos-sdk/codec/types" +) + +// EncodingConfig specifies the concrete encoding types to use for a given app. +// This is provided for compatibility between protobuf and amino implementations. +type EncodingConfig struct { + InterfaceRegistry types.InterfaceRegistry + Marshaler codec.Codec + TxConfig client.TxConfig + Amino *codec.LegacyAmino +} +``` + +Next, create a new file, `./app/encoding.go`, containing the following code: + +```go +package app + +import ( + "github.com/cosmos/cosmos-sdk/codec" + "github.com/cosmos/cosmos-sdk/codec/types" + "github.com/cosmos/cosmos-sdk/std" + "github.com/cosmos/cosmos-sdk/x/auth/tx" + + "{ModulePath}/app/params" +) + +// makeEncodingConfig creates an EncodingConfig for an amino based test configuration. +func makeEncodingConfig() params.EncodingConfig { + amino := codec.NewLegacyAmino() + interfaceRegistry := types.NewInterfaceRegistry() + marshaler := codec.NewProtoCodec(interfaceRegistry) + txCfg := tx.NewTxConfig(marshaler, tx.DefaultSignModes) + + return params.EncodingConfig{ + InterfaceRegistry: interfaceRegistry, + Marshaler: marshaler, + TxConfig: txCfg, + Amino: amino, + } +} + +// MakeEncodingConfig creates an EncodingConfig for testing +func MakeEncodingConfig() params.EncodingConfig { + encodingConfig := makeEncodingConfig() + std.RegisterLegacyAminoCodec(encodingConfig.Amino) + std.RegisterInterfaces(encodingConfig.InterfaceRegistry) + ModuleBasics.RegisterLegacyAminoCodec(encodingConfig.Amino) + ModuleBasics.RegisterInterfaces(encodingConfig.InterfaceRegistry) + return encodingConfig +} +``` + +Next, modify `./app/simulation_test.go` so that it looks like the following: + +```go +package app_test + +import ( + "os" + "testing" + "time" + + "github.com/cosmos/cosmos-sdk/simapp" + simulationtypes "github.com/cosmos/cosmos-sdk/types/simulation" + "github.com/cosmos/cosmos-sdk/x/simulation" + "github.com/stretchr/testify/require" + abci "github.com/tendermint/tendermint/abci/types" + tmproto "github.com/tendermint/tendermint/proto/tendermint/types" + tmtypes "github.com/tendermint/tendermint/types" + // remove-next-line + "github.com/ignite/cli/ignite/pkg/cosmoscmd" + + // highlight-next-line + "{ModulePath}/app" +) + +// remove-start +type SimApp interface { + cosmoscmd.App + GetBaseApp() *baseapp.BaseApp + AppCodec() codec.Codec + SimulationManager() *module.SimulationManager + ModuleAccountAddrs() map[string]bool + Name() string + LegacyAmino() *codec.LegacyAmino + BeginBlocker(ctx sdk.Context, req abci.RequestBeginBlock) + abci.ResponseBeginBlock + EndBlocker(ctx sdk.Context, req abci.RequestEndBlock) + abci.ResponseEndBlock + InitChainer(ctx sdk.Context, req abci.RequestInitChain) + abci.ResponseInitChain +} + +// remove-end + +// ... + +// BenchmarkSimulation run the chain simulation +// Running using starport command: +// `starport chain simulate -v --numBlocks 200 --blockSize 50` +// Running as go benchmark test: +// `go test -benchmem -run=^$ -bench ^BenchmarkSimulation ./app -NumBlocks=200 -BlockSize 50 -Commit=true -Verbose=true -Enabled=true` +func BenchmarkSimulation(b *testing.B) { + + // ... + + // remove-next-line + encoding := cosmoscmd.MakeEncodingConfig(app.ModuleBasics) + // highlight-next-line + encoding := app.MakeEncodingConfig() + + app := app.New( + logger, + db, + nil, + true, + map[int64]bool{}, + app.DefaultNodeHome, + 0, + encoding, + simapp.EmptyAppOptions{}, + ) + + // remove-start + simApp, ok := app.(SimApp) + require.True(b, ok, "can't use simapp") + // remove-end + + // Run randomized simulations + _, simParams, simErr := simulation.SimulateFromSeed( + b, + os.Stdout, + // highlight-next-line + app.BaseApp, + // highlight-next-line + simapp.AppStateFn(app.AppCodec(), app.SimulationManager()), + simulationtypes.RandomAccounts, + // highlight-next-line + simapp.SimulationOperations(app, app.AppCodec(), config), + // highlight-next-line + app.ModuleAccountAddrs(), + config, + // highlight-next-line + app.AppCodec(), + ) + + // export state and simParams before the simulation error is checked + // highlight-next-line + err = simapp.CheckExportSimulation(app, config, simParams) + require.NoError(b, err) + require.NoError(b, simErr) + + // ... +} +``` + +The main changes here are that the `SimApp` interface has been removed and is being replaced with `app`. + +The final modification in the `app` package is in `app/app.go`: + +```go +package app + +import ( + // ... + + // this line is used by starport scaffolding # stargate/app/moduleImport + + // remove-next-line + "github.com/ignite/cli/ignite/pkg/cosmoscmd" + + // highlight-start + appparams "{ModulePath}/app/params" + "{ModulePath}/docs" + // highlight-end +) + +// ... + +var ( + // remove-next-line + _ cosmoscmd.App = (*App)(nil) + _ servertypes.Application = (*App)(nil) + _ simapp.App = (*App)(nil) +) + +// ... + +// New returns a reference to an initialized blockchain app +func New( + logger log.Logger, + db dbm.DB, + traceStore io.Writer, + loadLatest bool, + skipUpgradeHeights map[int64]bool, + homePath string, + invCheckPeriod uint, + // highlight-next-line + encodingConfig appparams.EncodingConfig, + appOpts servertypes.AppOptions, + baseAppOptions ...func(*baseapp.BaseApp), + // highlight-next-line +) *App { + appCodec := encodingConfig.Marshaler + cdc := encodingConfig.Amino + interfaceRegistry := encodingConfig.InterfaceRegistry + + bApp := baseapp.NewBaseApp( + Name, + logger, + db, + encodingConfig.TxConfig.TxDecoder(), + baseAppOptions..., + ) + + // ... + +} + +// ... + +// Name returns the name of the App +func (app *App) Name() string { return app.BaseApp.Name() } + +// remove-start +// GetBaseApp returns the base app of the application +func (app App) GetBaseApp() *baseapp.BaseApp { return app.BaseApp } + +// remove-end + +// BeginBlocker application updates every begin block +func (app *App) BeginBlocker(ctx sdk.Context, req abci.RequestBeginBlock) abci.ResponseBeginBlock { + return app.mm.BeginBlock(ctx, req) +} + +// ... +``` + +Again, here we are removing the use of `cosmoscmd` and replacing it with `app`. + +#### Migration in `cmd` package + +Some imports in these +examples contain the sample string, `{binaryNamePrefix}d`. Replace this string with the binary name of your blockchain. +For example, if your blockchain module path is `github.com/planet/mars`, `./cmd/{binaryNamePrefix}d/cmd/` would be +become `./cmd/marsd/cmd/`. + +First, create the new file `./cmd/{binaryNamePrefix}d/cmd/config.go` with the following code: + +```go +package cmd + +import ( + sdk "github.com/cosmos/cosmos-sdk/types" + + "{ModulePath}/app" +) + +func initSDKConfig() { + // Set prefixes + accountPubKeyPrefix := app.AccountAddressPrefix + "pub" + validatorAddressPrefix := app.AccountAddressPrefix + "valoper" + validatorPubKeyPrefix := app.AccountAddressPrefix + "valoperpub" + consNodeAddressPrefix := app.AccountAddressPrefix + "valcons" + consNodePubKeyPrefix := app.AccountAddressPrefix + "valconspub" + + // Set and seal config + config := sdk.GetConfig() + config.SetBech32PrefixForAccount(app.AccountAddressPrefix, accountPubKeyPrefix) + config.SetBech32PrefixForValidator(validatorAddressPrefix, validatorPubKeyPrefix) + config.SetBech32PrefixForConsensusNode(consNodeAddressPrefix, consNodePubKeyPrefix) + config.Seal() +} +``` + +Next, create the new file `./cmd/{binaryNamePrefix}d/cmd/genaccounts.go` with the following code: + +```go +package cmd + +import ( + "bufio" + "encoding/json" + "errors" + "fmt" + + "github.com/cosmos/cosmos-sdk/client" + "github.com/cosmos/cosmos-sdk/client/flags" + "github.com/cosmos/cosmos-sdk/crypto/keyring" + "github.com/cosmos/cosmos-sdk/server" + sdk "github.com/cosmos/cosmos-sdk/types" + authtypes "github.com/cosmos/cosmos-sdk/x/auth/types" + authvesting "github.com/cosmos/cosmos-sdk/x/auth/vesting/types" + banktypes "github.com/cosmos/cosmos-sdk/x/bank/types" + "github.com/cosmos/cosmos-sdk/x/genutil" + genutiltypes "github.com/cosmos/cosmos-sdk/x/genutil/types" + "github.com/spf13/cobra" +) + +const ( + flagVestingStart = "vesting-start-time" + flagVestingEnd = "vesting-end-time" + flagVestingAmt = "vesting-amount" +) + +// AddGenesisAccountCmd returns add-genesis-account cobra Command. +func AddGenesisAccountCmd(defaultNodeHome string) *cobra.Command { + cmd := &cobra.Command{ + Use: "add-genesis-account [address_or_key_name] [coin][,[coin]]", + Short: "Add a genesis account to genesis.json", + Long: `Add a genesis account to genesis.json. The provided account must specify +the account address or key name and a list of initial coins. If a key name is given, +the address will be looked up in the local Keybase. The list of initial tokens must +contain valid denominations. Accounts may optionally be supplied with vesting parameters. +`, + Args: cobra.ExactArgs(2), + RunE: func(cmd *cobra.Command, args []string) error { + clientCtx := client.GetClientContextFromCmd(cmd) + cdc := clientCtx.Codec + + serverCtx := server.GetServerContextFromCmd(cmd) + config := serverCtx.Config + + config.SetRoot(clientCtx.HomeDir) + + coins, err := sdk.ParseCoinsNormalized(args[1]) + if err != nil { + return fmt.Errorf("failed to parse coins: %w", err) + } + + addr, err := sdk.AccAddressFromBech32(args[0]) + if err != nil { + inBuf := bufio.NewReader(cmd.InOrStdin()) + keyringBackend, err := cmd.Flags().GetString(flags.FlagKeyringBackend) + if err != nil { + return err + } + + // attempt to lookup address from Keybase if no address was provided + kb, err := keyring.New(sdk.KeyringServiceName(), keyringBackend, clientCtx.HomeDir, inBuf, cdc) + if err != nil { + return err + } + + info, err := kb.Key(args[0]) + if err != nil { + return fmt.Errorf("failed to get address from Keybase: %w", err) + } + + addr, err = info.GetAddress() + if err != nil { + return fmt.Errorf("failed to get address from Keybase: %w", err) + } + } + + vestingStart, err := cmd.Flags().GetInt64(flagVestingStart) + if err != nil { + return err + } + vestingEnd, err := cmd.Flags().GetInt64(flagVestingEnd) + if err != nil { + return err + } + vestingAmtStr, err := cmd.Flags().GetString(flagVestingAmt) + if err != nil { + return err + } + + vestingAmt, err := sdk.ParseCoinsNormalized(vestingAmtStr) + if err != nil { + return fmt.Errorf("failed to parse vesting amount: %w", err) + } + + // create concrete account type based on input parameters + var genAccount authtypes.GenesisAccount + + balances := banktypes.Balance{Address: addr.String(), Coins: coins.Sort()} + baseAccount := authtypes.NewBaseAccount(addr, nil, 0, 0) + + if !vestingAmt.IsZero() { + baseVestingAccount := authvesting.NewBaseVestingAccount(baseAccount, vestingAmt.Sort(), vestingEnd) + + if (balances.Coins.IsZero() && !baseVestingAccount.OriginalVesting.IsZero()) || + baseVestingAccount.OriginalVesting.IsAnyGT(balances.Coins) { + return errors.New("vesting amount cannot be greater than total amount") + } + + switch { + case vestingStart != 0 && vestingEnd != 0: + genAccount = authvesting.NewContinuousVestingAccountRaw(baseVestingAccount, vestingStart) + + case vestingEnd != 0: + genAccount = authvesting.NewDelayedVestingAccountRaw(baseVestingAccount) + + default: + return errors.New("invalid vesting parameters; must supply start and end time or end time") + } + } else { + genAccount = baseAccount + } + + if err := genAccount.Validate(); err != nil { + return fmt.Errorf("failed to validate new genesis account: %w", err) + } + + genFile := config.GenesisFile() + appState, genDoc, err := genutiltypes.GenesisStateFromGenFile(genFile) + if err != nil { + return fmt.Errorf("failed to unmarshal genesis state: %w", err) + } + + authGenState := authtypes.GetGenesisStateFromAppState(cdc, appState) + + accs, err := authtypes.UnpackAccounts(authGenState.Accounts) + if err != nil { + return fmt.Errorf("failed to get accounts from any: %w", err) + } + + if accs.Contains(addr) { + return fmt.Errorf("cannot add account at existing address %s", addr) + } + + // Add the new account to the set of genesis accounts and sanitize the + // accounts afterwards. + accs = append(accs, genAccount) + accs = authtypes.SanitizeGenesisAccounts(accs) + + genAccs, err := authtypes.PackAccounts(accs) + if err != nil { + return fmt.Errorf("failed to convert accounts into any's: %w", err) + } + authGenState.Accounts = genAccs + + authGenStateBz, err := cdc.MarshalJSON(&authGenState) + if err != nil { + return fmt.Errorf("failed to marshal auth genesis state: %w", err) + } + + appState[authtypes.ModuleName] = authGenStateBz + + bankGenState := banktypes.GetGenesisStateFromAppState(cdc, appState) + bankGenState.Balances = append(bankGenState.Balances, balances) + bankGenState.Balances = banktypes.SanitizeGenesisBalances(bankGenState.Balances) + + bankGenStateBz, err := cdc.MarshalJSON(bankGenState) + if err != nil { + return fmt.Errorf("failed to marshal bank genesis state: %w", err) + } + + appState[banktypes.ModuleName] = bankGenStateBz + + appStateJSON, err := json.Marshal(appState) + if err != nil { + return fmt.Errorf("failed to marshal application genesis state: %w", err) + } + + genDoc.AppState = appStateJSON + return genutil.ExportGenesisFile(genDoc, genFile) + }, + } + + cmd.Flags().String(flags.FlagKeyringBackend, flags.DefaultKeyringBackend, "Select keyring's backend (os|file|kwallet|pass|test)") + cmd.Flags().String(flags.FlagHome, defaultNodeHome, "The application home directory") + cmd.Flags().String(flagVestingAmt, "", "amount of coins for vesting accounts") + cmd.Flags().Int64(flagVestingStart, 0, "schedule start time (unix epoch) for vesting accounts") + cmd.Flags().Int64(flagVestingEnd, 0, "schedule end time (unix epoch) for vesting accounts") + flags.AddQueryFlagsToCmd(cmd) + + return cmd +} +``` + +This command allows one to generate new accounts: `appd add-genesis-account`. + +Next, create the new file `./cmd/{binaryNamePrefix}d/cmd/root.go` with the following code: + +```go +package cmd + +import ( + "errors" + "io" + "os" + "path/filepath" + "strings" + + "github.com/cosmos/cosmos-sdk/baseapp" + "github.com/cosmos/cosmos-sdk/client" + "github.com/cosmos/cosmos-sdk/client/config" + "github.com/cosmos/cosmos-sdk/client/debug" + "github.com/cosmos/cosmos-sdk/client/flags" + "github.com/cosmos/cosmos-sdk/client/keys" + "github.com/cosmos/cosmos-sdk/client/rpc" + "github.com/cosmos/cosmos-sdk/server" + serverconfig "github.com/cosmos/cosmos-sdk/server/config" + servertypes "github.com/cosmos/cosmos-sdk/server/types" + "github.com/cosmos/cosmos-sdk/snapshots" + snapshottypes "github.com/cosmos/cosmos-sdk/snapshots/types" + "github.com/cosmos/cosmos-sdk/store" + sdk "github.com/cosmos/cosmos-sdk/types" + authcmd "github.com/cosmos/cosmos-sdk/x/auth/client/cli" + "github.com/cosmos/cosmos-sdk/x/auth/types" + banktypes "github.com/cosmos/cosmos-sdk/x/bank/types" + "github.com/cosmos/cosmos-sdk/x/crisis" + genutilcli "github.com/cosmos/cosmos-sdk/x/genutil/client/cli" + "github.com/ignite/cli/ignite/services/network" + "github.com/spf13/cast" + "github.com/spf13/cobra" + "github.com/spf13/pflag" + tmcfg "github.com/tendermint/tendermint/config" + tmcli "github.com/tendermint/tendermint/libs/cli" + "github.com/tendermint/tendermint/libs/log" + dbm "github.com/tendermint/tm-db" + // this line is used by starport scaffolding # root/moduleImport + + "{ModulePath}/app" + appparams "{ModulePath}/app/params" +) + +// NewRootCmd creates a new root command for a Cosmos SDK application +func NewRootCmd() (*cobra.Command, appparams.EncodingConfig) { + encodingConfig := app.MakeEncodingConfig() + initClientCtx := client.Context{}. + WithCodec(encodingConfig.Marshaler). + WithInterfaceRegistry(encodingConfig.InterfaceRegistry). + WithTxConfig(encodingConfig.TxConfig). + WithLegacyAmino(encodingConfig.Amino). + WithInput(os.Stdin). + WithAccountRetriever(types.AccountRetriever{}). + WithHomeDir(app.DefaultNodeHome). + WithViper("") + + rootCmd := &cobra.Command{ + Use: app.Name + "d", + Short: "Stargate CosmosHub App", + PersistentPreRunE: func(cmd *cobra.Command, _ []string) error { + // set the default command outputs + cmd.SetOut(cmd.OutOrStdout()) + cmd.SetErr(cmd.ErrOrStderr()) + initClientCtx, err := client.ReadPersistentCommandFlags(initClientCtx, cmd.Flags()) + if err != nil { + return err + } + initClientCtx, err = config.ReadFromClientConfig(initClientCtx) + if err != nil { + return err + } + + if err := client.SetCmdClientContextHandler(initClientCtx, cmd); err != nil { + return err + } + + customAppTemplate, customAppConfig := initAppConfig() + customTMConfig := initTendermintConfig() + return server.InterceptConfigsPreRunHandler( + cmd, customAppTemplate, customAppConfig, customTMConfig, + ) + }, + } + + initRootCmd(rootCmd, encodingConfig) + overwriteFlagDefaults(rootCmd, map[string]string{ + flags.FlagChainID: strings.ReplaceAll(app.Name, "-", ""), + flags.FlagKeyringBackend: "test", + }) + + return rootCmd, encodingConfig +} + +// initTendermintConfig helps to override default Tendermint Config values. +// return tmcfg.DefaultConfig if no custom configuration is required for the application. +func initTendermintConfig() *tmcfg.Config { + cfg := tmcfg.DefaultConfig() + return cfg +} + +func initRootCmd( + rootCmd *cobra.Command, + encodingConfig appparams.EncodingConfig, +) { + // Set config + initSDKConfig() + + rootCmd.AddCommand( + genutilcli.InitCmd(app.ModuleBasics, app.DefaultNodeHome), + genutilcli.CollectGenTxsCmd(banktypes.GenesisBalancesIterator{}, app.DefaultNodeHome), + genutilcli.MigrateGenesisCmd(), + genutilcli.GenTxCmd( + app.ModuleBasics, + encodingConfig.TxConfig, + banktypes.GenesisBalancesIterator{}, + app.DefaultNodeHome, + ), + genutilcli.ValidateGenesisCmd(app.ModuleBasics), + AddGenesisAccountCmd(app.DefaultNodeHome), + tmcli.NewCompletionCmd(rootCmd, true), + debug.Cmd(), + config.Cmd(), + // this line is used by starport scaffolding # root/commands + ) + + a := appCreator{ + encodingConfig, + } + + // add server commands + server.AddCommands( + rootCmd, + app.DefaultNodeHome, + a.newApp, + a.appExport, + addModuleInitFlags, + ) + + // add keybase, auxiliary RPC, query, and tx child commands + rootCmd.AddCommand( + rpc.StatusCommand(), + queryCommand(), + txCommand(), + keys.Commands(app.DefaultNodeHome), + ) +} + +// queryCommand returns the sub-command to send queries to the app +func queryCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "query", + Aliases: []string{"q"}, + Short: "Querying subcommands", + DisableFlagParsing: true, + SuggestionsMinimumDistance: 2, + RunE: client.ValidateCmd, + } + + cmd.AddCommand( + authcmd.GetAccountCmd(), + rpc.ValidatorCommand(), + rpc.BlockCommand(), + authcmd.QueryTxsByEventsCmd(), + authcmd.QueryTxCmd(), + ) + + app.ModuleBasics.AddQueryCommands(cmd) + cmd.PersistentFlags().String(flags.FlagChainID, "", "The network chain ID") + + return cmd +} + +// txCommand returns the sub-command to send transactions to the app +func txCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "tx", + Short: "Transactions subcommands", + DisableFlagParsing: true, + SuggestionsMinimumDistance: 2, + RunE: client.ValidateCmd, + } + + cmd.AddCommand( + authcmd.GetSignCommand(), + authcmd.GetSignBatchCommand(), + authcmd.GetMultiSignCommand(), + authcmd.GetValidateSignaturesCommand(), + flags.LineBreak, + authcmd.GetBroadcastCommand(), + authcmd.GetEncodeCommand(), + authcmd.GetDecodeCommand(), + ) + + app.ModuleBasics.AddTxCommands(cmd) + cmd.PersistentFlags().String(flags.FlagChainID, "", "The network chain ID") + + return cmd +} + +func addModuleInitFlags(startCmd *cobra.Command) { + crisis.AddModuleInitFlags(startCmd) + // this line is used by starport scaffolding # root/arguments +} + +func overwriteFlagDefaults(c *cobra.Command, defaults map[string]string) { + set := func(s *pflag.FlagSet, key, val string) { + if f := s.Lookup(key); f != nil { + f.DefValue = val + f.Value.Set(val) + } + } + for key, val := range defaults { + set(c.Flags(), key, val) + set(c.PersistentFlags(), key, val) + } + for _, c := range c.Commands() { + overwriteFlagDefaults(c, defaults) + } +} + +type appCreator struct { + encodingConfig appparams.EncodingConfig +} + +// newApp creates a new Cosmos SDK app +func (a appCreator) newApp( + logger log.Logger, + db dbm.DB, + traceStore io.Writer, + appOpts servertypes.AppOptions, +) servertypes.Application { + var cache sdk.MultiStorePersistentCache + + if cast.ToBool(appOpts.Get(server.FlagInterBlockCache)) { + cache = store.NewCommitKVStoreCacheManager() + } + + skipUpgradeHeights := make(map[int64]bool) + for _, h := range cast.ToIntSlice(appOpts.Get(server.FlagUnsafeSkipUpgrades)) { + skipUpgradeHeights[int64(h)] = true + } + + pruningOpts, err := server.GetPruningOptionsFromFlags(appOpts) + if err != nil { + panic(err) + } + + snapshotDir := filepath.Join(cast.ToString(appOpts.Get(flags.FlagHome)), "data", "snapshots") + snapshotDB, err := dbm.NewDB("metadata", dbm.GoLevelDBBackend, snapshotDir) + if err != nil { + panic(err) + } + snapshotStore, err := snapshots.NewStore(snapshotDB, snapshotDir) + if err != nil { + panic(err) + } + + snapshotOptions := snapshottypes.NewSnapshotOptions( + cast.ToUint64(appOpts.Get(server.FlagStateSyncSnapshotInterval)), + cast.ToUint32(appOpts.Get(server.FlagStateSyncSnapshotKeepRecent)), + ) + + return app.New( + logger, + db, + traceStore, + true, + skipUpgradeHeights, + cast.ToString(appOpts.Get(flags.FlagHome)), + cast.ToUint(appOpts.Get(server.FlagInvCheckPeriod)), + a.encodingConfig, + appOpts, + baseapp.SetPruning(pruningOpts), + baseapp.SetMinGasPrices(cast.ToString(appOpts.Get(server.FlagMinGasPrices))), + baseapp.SetMinRetainBlocks(cast.ToUint64(appOpts.Get(server.FlagMinRetainBlocks))), + baseapp.SetHaltHeight(cast.ToUint64(appOpts.Get(server.FlagHaltHeight))), + baseapp.SetHaltTime(cast.ToUint64(appOpts.Get(server.FlagHaltTime))), + baseapp.SetInterBlockCache(cache), + baseapp.SetTrace(cast.ToBool(appOpts.Get(server.FlagTrace))), + baseapp.SetIndexEvents(cast.ToStringSlice(appOpts.Get(server.FlagIndexEvents))), + baseapp.SetSnapshot(snapshotStore, snapshotOptions), + ) +} + +// appExport creates a new simapp (optionally at a given height) +func (a appCreator) appExport( + logger log.Logger, + db dbm.DB, + traceStore io.Writer, + height int64, + forZeroHeight bool, + jailAllowedAddrs []string, + appOpts servertypes.AppOptions, +) (servertypes.ExportedApp, error) { + homePath, ok := appOpts.Get(flags.FlagHome).(string) + if !ok || homePath == "" { + return servertypes.ExportedApp{}, errors.New("application home not set") + } + + app := app.New( + logger, + db, + traceStore, + height == -1, // -1: no height provided + map[int64]bool{}, + homePath, + uint(1), + a.encodingConfig, + appOpts, + ) + + if height != -1 { + if err := app.LoadHeight(height); err != nil { + return servertypes.ExportedApp{}, err + } + } + + return app.ExportAppStateAndValidators(forZeroHeight, jailAllowedAddrs) +} + +// initAppConfig helps to override default appConfig template and configs. +// return "", nil if no custom configuration is required for the application. +func initAppConfig() (string, interface{}) { + // The following code snippet is just for reference. + + // WASMConfig defines configuration for the wasm module. + type WASMConfig struct { + // This is the maximum sdk gas (wasm and storage) that we allow for any x/wasm "smart" queries + QueryGasLimit uint64 `mapstructure:"query_gas_limit"` + + // Address defines the gRPC-web server to listen on + LruSize uint64 `mapstructure:"lru_size"` + } + + type CustomAppConfig struct { + serverconfig.Config + + WASM WASMConfig `mapstructure:"wasm"` + } + + // Optionally allow the chain developer to overwrite the SDK's default + // server config. + srvCfg := serverconfig.DefaultConfig() + // The SDK's default minimum gas price is set to "" (empty value) inside + // app.toml. If left empty by validators, the node will halt on startup. + // However, the chain developer can set a default app.toml value for their + // validators here. + // + // In summary: + // - if you leave srvCfg.MinGasPrices = "", all validators MUST tweak their + // own app.toml config, + // - if you set srvCfg.MinGasPrices non-empty, validators CAN tweak their + // own app.toml to override, or use this default value. + // + // In simapp, we set the min gas prices to 0. + srvCfg.MinGasPrices = "0stake" + + customAppConfig := CustomAppConfig{ + Config: *srvCfg, + WASM: WASMConfig{ + LruSize: 1, + QueryGasLimit: 300000, + }, + } + + customAppTemplate := serverconfig.DefaultConfigTemplate + ` +[wasm] +# This is the maximum sdk gas (wasm and storage) that we allow for any x/wasm "smart" queries +query_gas_limit = 300000 +# This is the number of wasm vm instances we keep cached in memory for speed-up +# Warning: this is currently unstable and may lead to crashes, best to keep for 0 unless testing locally +lru_size = 0` + + return customAppTemplate, customAppConfig +} +``` + +Finally, modify `./cmd/{binaryNamePrefix}d/main.go` to include the new changes: + +```go +package main + +import ( + "os" + + "github.com/cosmos/cosmos-sdk/server" + svrcmd "github.com/cosmos/cosmos-sdk/server/cmd" + // remove-next-line + "github.com/ignite/cli/ignite/pkg/cosmoscmd" + + "{ModulePath}/app" + "{ModulePath}/cmd/{BinaryNamePrefix}d/cmd" +) + +func main() { + // highlight-start + rootCmd, _ := cmd.NewRootCmd() + if err := svrcmd.Execute(rootCmd, "", app.DefaultNodeHome); err != nil { + switch e := err.(type) { + case server.ErrorCode: + os.Exit(e.Code) + + default: + os.Exit(1) + } + } + // highlight-end +} +``` + +#### Migration in `testutil` package + +Modify `./testutil/network/network.go` to include the new changes: + + +```go +package network + +import ( + "fmt" + "testing" + "time" + + "github.com/cosmos/cosmos-sdk/baseapp" + "github.com/cosmos/cosmos-sdk/crypto/hd" + "github.com/cosmos/cosmos-sdk/crypto/keyring" + pruningtypes "github.com/cosmos/cosmos-sdk/pruning/types" + servertypes "github.com/cosmos/cosmos-sdk/server/types" + "github.com/cosmos/cosmos-sdk/simapp" + "github.com/cosmos/cosmos-sdk/testutil/network" + sdk "github.com/cosmos/cosmos-sdk/types" + authtypes "github.com/cosmos/cosmos-sdk/x/auth/types" + "github.com/stretchr/testify/require" + tmrand "github.com/tendermint/tendermint/libs/rand" + tmdb "github.com/tendermint/tm-db" + + // highlight-next-line + "{ModulePath}/app" + + // remove-next-line + "github.com/ignite/cli/ignite/pkg/cosmoscmd" +) + +// ... + +// DefaultConfig will initialize config for the network with custom application, +// genesis and single validator. All other parameters are inherited from cosmos-sdk/testutil/network.DefaultConfig +func DefaultConfig() network.Config { + // highlight-next-line + encoding := app.MakeEncodingConfig() + // remove-next-line + encoding := cosmoscmd.MakeEncodingConfig(app.ModuleBasics) + return network.Config{ + Codec: encoding.Marshaler, + TxConfig: encoding.TxConfig, + LegacyAmino: encoding.Amino, + InterfaceRegistry: encoding.InterfaceRegistry, + AccountRetriever: authtypes.AccountRetriever{}, + AppConstructor: func(val network.Validator) servertypes.Application { + return app.New( + val.Ctx.Logger, tmdb.NewMemDB(), nil, true, map[int64]bool{}, val.Ctx.Config.RootDir, 0, + encoding, + simapp.EmptyAppOptions{}, + baseapp.SetPruning(pruningtypes.NewPruningOptionsFromString(val.AppConfig.Pruning)), + baseapp.SetMinGasPrices(val.AppConfig.MinGasPrices), + ) + }, + GenesisState: app.ModuleBasics.DefaultGenesis(encoding.Marshaler), + TimeoutCommit: 2 * time.Second, + ChainID: "chain-" + tmrand.NewRand().Str(6), + NumValidators: 1, + BondDenom: sdk.DefaultBondDenom, + MinGasPrices: fmt.Sprintf("0.000006%s", sdk.DefaultBondDenom), + AccountTokens: sdk.TokensFromConsensusPower(1000, sdk.DefaultPowerReduction), + StakingTokens: sdk.TokensFromConsensusPower(500, sdk.DefaultPowerReduction), + BondedTokens: sdk.TokensFromConsensusPower(100, sdk.DefaultPowerReduction), + PruningStrategy: pruningtypes.PruningOptionNothing, + CleanupDir: true, + SigningAlgo: string(hd.Secp256k1Type), + KeyringOptions: []keyring.Option{}, + } +} +``` + + --- + +## Fix ICA controller keeper wiring + +Related issue: https://github.com/ignite/cli/issues/2867 + +Apply the following changes to `app/app.go` file : + +```go +package app + +import ( + + // highlight-start + icacontrollerkeeper "github.com/cosmos/ibc-go/v5/modules/apps/27-interchain-accounts/controller/keeper" + icacontrollertypes "github.com/cosmos/ibc-go/v5/modules/apps/27-interchain-accounts/controller/types" + // highlight-end + // ... +) + +// New returns a reference to an initialized blockchain app +func New( + logger log.Logger, + db dbm.DB, + traceStore io.Writer, + loadLatest bool, + skipUpgradeHeights map[int64]bool, + homePath string, + invCheckPeriod uint, + encodingConfig appparams.EncodingConfig, + appOpts servertypes.AppOptions, + baseAppOptions ...func(*baseapp.BaseApp), +) *App { + + // ... + + keys := sdk.NewKVStoreKeys( + authtypes.StoreKey, authz.ModuleName, banktypes.StoreKey, + stakingtypes.StoreKey, + minttypes.StoreKey, distrtypes.StoreKey, slashingtypes.StoreKey, + govtypes.StoreKey, + paramstypes.StoreKey, ibchost.StoreKey, upgradetypes.StoreKey, + feegrant.StoreKey, evidencetypes.StoreKey, + ibctransfertypes.StoreKey, icahosttypes.StoreKey, + capabilitytypes.StoreKey, group.StoreKey, + // highlight-next-line + icacontrollertypes.StoreKey, + yourchainmoduletypes.StoreKey, + // this line is used by starport scaffolding # stargate/app/storeKey + ) + + // ... + + // remove-next-line + icaModule := ica.NewAppModule(nil, &app.ICAHostKeeper) + // highlight-start + icaControllerKeeper := icacontrollerkeeper.NewKeeper( + appCodec, keys[icacontrollertypes.StoreKey], + app.GetSubspace(icacontrollertypes.SubModuleName), + app.IBCKeeper.ChannelKeeper, // may be replaced with middleware such as ics29 fee + app.IBCKeeper.ChannelKeeper, &app.IBCKeeper.PortKeeper, + scopedICAControllerKeeper, app.MsgServiceRouter(), + ) + icaModule := ica.NewAppModule(&icaControllerKeeper, &app.ICAHostKeeper) + // highlight-end + icaHostIBCModule := icahost.NewIBCModule(app.ICAHostKeeper) + + // ... +} + +// ... + +// initParamsKeeper init params keeper and its subspaces +func initParamsKeeper(appCodec codec.BinaryCodec, legacyAmino *codec.LegacyAmino, key, tkey storetypes.StoreKey) paramskeeper.Keeper { + paramsKeeper := paramskeeper.NewKeeper(appCodec, legacyAmino, key, tkey) + + paramsKeeper.Subspace(authtypes.ModuleName) + paramsKeeper.Subspace(banktypes.ModuleName) + paramsKeeper.Subspace(stakingtypes.ModuleName) + paramsKeeper.Subspace(minttypes.ModuleName) + paramsKeeper.Subspace(distrtypes.ModuleName) + paramsKeeper.Subspace(slashingtypes.ModuleName) + paramsKeeper.Subspace(govtypes.ModuleName).WithKeyTable(govv1.ParamKeyTable()) + paramsKeeper.Subspace(crisistypes.ModuleName) + paramsKeeper.Subspace(ibctransfertypes.ModuleName) + paramsKeeper.Subspace(ibchost.ModuleName) + // highlight-next-line + paramsKeeper.Subspace(icacontrollertypes.SubModuleName) + paramsKeeper.Subspace(icahosttypes.SubModuleName) + paramsKeeper.Subspace(mychainmoduletypes.ModuleName) + // this line is used by starport scaffolding # stargate/app/paramSubspace + + return paramsKeeper +} +``` + + --- + +## Fix capability keeper not sealed + +Related issue: https://github.com/ignite/cli/issues/1921 + +Apply the following change to `app/app.go` file : + +```go +package app + +// New returns a reference to an initialized blockchain app +func New( + logger log.Logger, + db dbm.DB, + traceStore io.Writer, + loadLatest bool, + skipUpgradeHeights map[int64]bool, + homePath string, + invCheckPeriod uint, + encodingConfig appparams.EncodingConfig, + appOpts servertypes.AppOptions, + baseAppOptions ...func(*baseapp.BaseApp), +) *App { + + // ... + + // this line is used by starport scaffolding # stargate/app/keeperDefinition + + // highlight-start + // Sealing prevents other modules from creating scoped sub-keepers + app.CapabilityKeeper.Seal() + // highlight-end + + // Create static IBC router, add transfer route, then set and seal it + + // ... +} +``` diff --git a/docs/versioned_docs/version-v29/06-migration/v0.25.1.md b/docs/versioned_docs/version-v29/06-migration/v0.25.1.md new file mode 100644 index 0000000..b231522 --- /dev/null +++ b/docs/versioned_docs/version-v29/06-migration/v0.25.1.md @@ -0,0 +1,67 @@ +--- +sidebar_position: 993 +title: v0.25.1 +description: For chains that were scaffolded with IGNITE® CLI versions lower than v0.25.1. changes are required to use IGNITE® CLI v0.25.1. +--- + +## Drabonberry fix + +`v0.25.1` contains the Dragonberry fix, update your `go.mod` as : + +```sh +require ( + // remove-next-line + github.com/ignite/cli v0.24.0 + // highlight-next-line + github.com/ignite/cli v0.25.1 +) + +// highlight-next-line +replace github.com/confio/ics23/go => github.com/cosmos/cosmos-sdk/ics23/go v0.8.0 +``` + +Then run: + +``` +$ go mod tidy +``` + +As a result, you should see `cosmos-sdk` and `ibc-go` upgraded as well. + +Finally, apply the following change to `app/app.go`: + +```go +package app + +// New returns a reference to an initialized blockchain app +func New( + logger log.Logger, + db dbm.DB, + traceStore io.Writer, + loadLatest bool, + skipUpgradeHeights map[int64]bool, + homePath string, + invCheckPeriod uint, + encodingConfig appparams.EncodingConfig, + appOpts servertypes.AppOptions, + baseAppOptions ...func(*baseapp.BaseApp), +) *App { + + // ... + + app.ICAHostKeeper = icahostkeeper.NewKeeper( + appCodec, keys[icahosttypes.StoreKey], + app.GetSubspace(icahosttypes.SubModuleName), + app.IBCKeeper.ChannelKeeper, + // highlight-next-line + app.IBCKeeper.ChannelKeeper, + &app.IBCKeeper.PortKeeper, + app.AccountKeeper, + scopedICAHostKeeper, + app.MsgServiceRouter(), + ) + + // ... + +} +``` diff --git a/docs/versioned_docs/version-v29/06-migration/v0.26.0.md b/docs/versioned_docs/version-v29/06-migration/v0.26.0.md new file mode 100644 index 0000000..3a7301b --- /dev/null +++ b/docs/versioned_docs/version-v29/06-migration/v0.26.0.md @@ -0,0 +1,263 @@ +--- +sidebar_position: 992 +title: v0.26.0 +description: For chains that were scaffolded with IGNITE® CLI versions lower than v0.26.0. changes are required to use IGNITE® CLI v0.26.0. +--- + +IGNITE® CLI `v0.26.0` is fully compatible with chains that are compatible with `v0.25.1`. Please follow the existing +migration guides if your chain is not upgraded to `v0.25.1` support. + +## Go Version + +Chains that are newly scaffolded with IGNITE® CLI `v0.26.0` now require `go 1.19` in their `go.mod` files. It is +recommended that chains scaffolded with an older version of IGNITE® CLI also bump their required `go` version and update +their tooling to the latest version. + +## ibc-go v6 + +Chains that are newly scaffolded with IGNITE® CLI `v0.26.0` now use `ibc-go/v6` for ibc functionality. It is not +necessary, but recommended to upgrade to the newest version of `ibc-go`. Most migrations can be done by following the +`ibc-go` [migration guide](https://github.com/cosmos/ibc-go/blob/v6.2.0/docs/migrations/v5-to-v6.md), but there are some +specific changes that will need to be followed for IGNITE® scaffolded chains. + +### Removing `cosmosibckeeper` + +IGNITE® CLI `v0.26.0` has deprecated [pkg/cosmosibckeeper](https://github.com/ignite/cli/tree/v0.26.0/ignite/pkg/cosmosibckeeper). +This package contained interfaces for ibc-related keepers. Newly scaffolded chains now include the interface files in their +`./x/{moduleName}/types` directory in a new `expected_ibc_keeper.go` file. To migrate, create the following file for +each module: + +```go title="x/{moduleName}/types/expected_ibc_keeper.go" +package types + +import ( + sdk "github.com/cosmos/cosmos-sdk/types" + capabilitytypes "github.com/cosmos/cosmos-sdk/x/capability/types" + clienttypes "github.com/cosmos/ibc-go/v6/modules/core/02-client/types" + channeltypes "github.com/cosmos/ibc-go/v6/modules/core/04-channel/types" +) + +// ChannelKeeper defines the expected IBC channel keeper. +type ChannelKeeper interface { + GetChannel(ctx sdk.Context, portID, channelID string) (channeltypes.Channel, bool) + GetNextSequenceSend(ctx sdk.Context, portID, channelID string) (uint64, bool) + SendPacket( + ctx sdk.Context, + channelCap *capabilitytypes.Capability, + sourcePort string, + sourceChannel string, + timeoutHeight clienttypes.Height, + timeoutTimestamp uint64, + data []byte, + ) (uint64, error) + ChanCloseInit(ctx sdk.Context, portID, channelID string, chanCap *capabilitytypes.Capability) error +} + +// PortKeeper defines the expected IBC port keeper. +type PortKeeper interface { + BindPort(ctx sdk.Context, portID string) *capabilitytypes.Capability +} + +// ScopedKeeper defines the expected IBC scoped keeper. +type ScopedKeeper interface { + GetCapability(ctx sdk.Context, name string) (*capabilitytypes.Capability, bool) + AuthenticateCapability(ctx sdk.Context, cap *capabilitytypes.Capability, name string) bool + ClaimCapability(ctx sdk.Context, cap *capabilitytypes.Capability, name string) error +} +``` + +Next, make the following updates to each `x/{moduleName}/keeper/keeper.go` file for each ibc-enabled +module in your project: + +```go title="x/{moduleName}/keeper/keeper.go" +package keeper + +import ( + "fmt" + + // remove-start + "blogibc/x/testibc/types" + "github.com/cosmos/cosmos-sdk/codec" + storetypes "github.com/cosmos/cosmos-sdk/store/types" + sdk "github.com/cosmos/cosmos-sdk/types" + paramtypes "github.com/cosmos/cosmos-sdk/x/params/types" + "github.com/ignite/cli/ignite/pkg/cosmosibckeeper" + "github.com/tendermint/tendermint/libs/log" + // remove-end + // highlight-start + "github.com/cosmos/cosmos-sdk/codec" + storetypes "github.com/cosmos/cosmos-sdk/store/types" + sdk "github.com/cosmos/cosmos-sdk/types" + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + capabilitytypes "github.com/cosmos/cosmos-sdk/x/capability/types" + paramtypes "github.com/cosmos/cosmos-sdk/x/params/types" + channeltypes "github.com/cosmos/ibc-go/v6/modules/core/04-channel/types" + host "github.com/cosmos/ibc-go/v6/modules/core/24-host" + "github.com/cosmos/ibc-go/v6/modules/core/exported" + "github.com/tendermint/tendermint/libs/log" + + "{appName}/x/{moduleName}/types" + // highlight-end +) + +type ( + Keeper struct { + // remove-line-next + *cosmosibckeeper.Keeper + cdc codec.BinaryCodec + storeKey storetypes.StoreKey + memKey storetypes.StoreKey + paramstore paramtypes.Subspace + + // highlight-start + channelKeeper types.ChannelKeeper + portKeeper types.PortKeeper + scopedKeeper exported.ScopedKeeper + // highlight-end + } +) + +func NewKeeper( + cdc codec.BinaryCodec, + storeKey, + memKey storetypes.StoreKey, + ps paramtypes.Subspace, + // highlight-start + channelKeeper types.ChannelKeeper, + portKeeper types.PortKeeper, + scopedKeeper types.ScopedKeeper, + // highlight-end +) *Keeper { + // set KeyTable if it has not already been set + if !ps.HasKeyTable() { + ps = ps.WithKeyTable(types.ParamKeyTable()) + } + + return &Keeper{ + // remove-start + Keeper: cosmosibckeeper.NewKeeper( + types.PortKey, + storeKey, + channelKeeper, + portKeeper, + scopedKeeper, + ), + // remove-end + cdc: cdc, + storeKey: storeKey, + memKey: memKey, + paramstore: ps, + // highlight-start + channelKeeper: channelKeeper, + portKeeper: portKeeper, + scopedKeeper: scopedKeeper, + // highlight-end + } +} + +// highlight-start +// ---------------------------------------------------------------------------- +// IBC Keeper Logic +// ---------------------------------------------------------------------------- + +// ChanCloseInit defines a wrapper function for the channel Keeper's function. +func (k Keeper) ChanCloseInit(ctx sdk.Context, portID, channelID string) error { + capName := host.ChannelCapabilityPath(portID, channelID) + chanCap, ok := k.scopedKeeper.GetCapability(ctx, capName) + if !ok { + return errorsmod.Wrapf(channeltypes.ErrChannelCapabilityNotFound, "could not retrieve channel capability at: %s", capName) + } + return k.channelKeeper.ChanCloseInit(ctx, portID, channelID, chanCap) +} + +// IsBound checks if the IBC app module is already bound to the desired port +func (k Keeper) IsBound(ctx sdk.Context, portID string) bool { + _, ok := k.scopedKeeper.GetCapability(ctx, host.PortPath(portID)) + return ok +} + +// BindPort defines a wrapper function for the port Keeper's function in +// order to expose it to module's InitGenesis function +func (k Keeper) BindPort(ctx sdk.Context, portID string) error { + cap := k.portKeeper.BindPort(ctx, portID) + return k.ClaimCapability(ctx, cap, host.PortPath(portID)) +} + +// GetPort returns the portID for the IBC app module. Used in ExportGenesis +func (k Keeper) GetPort(ctx sdk.Context) string { + store := ctx.KVStore(k.storeKey) + return string(store.Get(types.PortKey)) +} + +// SetPort sets the portID for the IBC app module. Used in InitGenesis +func (k Keeper) SetPort(ctx sdk.Context, portID string) { + store := ctx.KVStore(k.storeKey) + store.Set(types.PortKey, []byte(portID)) +} + +// AuthenticateCapability wraps the scopedKeeper's AuthenticateCapability function +func (k Keeper) AuthenticateCapability(ctx sdk.Context, cap *capabilitytypes.Capability, name string) bool { + return k.scopedKeeper.AuthenticateCapability(ctx, cap, name) +} + +// ClaimCapability allows the IBC app module to claim a capability that core IBC +// passes to it +func (k Keeper) ClaimCapability(ctx sdk.Context, cap *capabilitytypes.Capability, name string) error { + return k.scopedKeeper.ClaimCapability(ctx, cap, name) +} + +//highlight-end + +func (k Keeper) Logger(ctx sdk.Context) log.Logger { + return ctx.Logger().With("module", fmt.Sprintf("x/%s", types.ModuleName)) +} +``` + +### Remaining migration + +After all uses of `cosmosibckeeper` have been removed, you can follow any remaining steps in the`ibc-go`[migration guide](https://github.com/cosmos/ibc-go/blob/v6.2.0/docs/migrations/v5-to-v6.md). + +## Scaffolded Release Workflow + +The develop branch of the CLI has been deprecated. To continue using the release workflow that uses the CLI to +automatically build and release your chain's binaries, replace develop with main in the following lines: + +```yaml title=".github/workflows/release.yml" +... + +jobs: + might_release: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v2 + with: + fetch-depth: 0 + - name: Prepare Release Variables + id: vars + // highlight-next-line + uses: ignite/cli/actions/release/vars@main + - name: Issue Release Assets + // highlight-next-line + uses: ignite/cli/actions/cli@main + if: ${{ steps.vars.outputs.should_release == 'true' }} + with: + args: chain build --release --release.prefix ${{ steps.vars.outputs.tarball_prefix }} -t linux:amd64 -t darwin:amd64 -t darwin:arm64 + - name: Delete the "latest" Release + uses: dev-drprasad/delete-tag-and-release@v0.2.0 + if: ${{ steps.vars.outputs.is_release_type_latest == 'true' }} + with: + tag_name: ${{ steps.vars.outputs.tag_name }} + delete_release: true + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Publish the Release + uses: softprops/action-gh-release@v1 + if: ${{ steps.vars.outputs.should_release == 'true' }} + with: + tag_name: ${{ steps.vars.outputs.tag_name }} + files: release/* + prerelease: true + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} +``` diff --git a/docs/versioned_docs/version-v29/06-migration/v0.27.1.md b/docs/versioned_docs/version-v29/06-migration/v0.27.1.md new file mode 100644 index 0000000..f2bf48b --- /dev/null +++ b/docs/versioned_docs/version-v29/06-migration/v0.27.1.md @@ -0,0 +1,1208 @@ +--- +sidebar_position: 991 +title: v0.27.1 +description: For chains that were scaffolded with IGNITE® CLI versions lower than v0.27.0. changes are required to use IGNITE® CLI v0.27.1. +--- + +## Cosmos SDK v0.47.3 upgrade notes + +### Imports + +To use the new cosmos SDK make sure you update `go.mod` dependencies: + +```text title="go.mod" +go 1.20 + +require ( + // remove-start + github.com/cosmos/cosmos-sdk v0.46.7 + github.com/tendermint/tendermint v0.34.24 + github.com/tendermint/tm-db v0.6.7 + github.com/cosmos/ibc-go/v7 v7.1.0 + github.com/gogo/protobuf v1.3.3 + github.com/regen-network/cosmos-proto v0.3.1 + // remove-end + // highlight-start + cosmossdk.io/api v0.3.1 + github.com/cosmos/cosmos-sdk v0.47.3 + github.com/cometbft/cometbft v0.37.1 + github.com/cometbft/cometbft-db v0.7.0 + github.com/cosmos/ibc-go/v6 v6.1.0 + github.com/cosmos/gogoproto v1.4.7 + // highlight-end + + // ... +) + +replace ( + // remove-start + github.com/confio/ics23/go => github.com/cosmos/cosmos-sdk/ics23/go v0.8.0 + github.com/gogo/protobuf => github.com/regen-network/protobuf v1.3.3-alpha.regen.1 + // remove-end + // highlight-next-line + github.com/syndtr/goleveldb => github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7 +) +``` + +The Cosmos SDK has migrated to CometBFT as its default consensus engine which requires +changes in your app imports: + +1. Replace `github.com/tendermint/tendermint` by `github.com/cometbft/cometbft` +2. Replace `github.com/tendermint/tm-db` by `github.com/cometbft/cometbft-db` +3. Verify `github.com/tendermint/tendermint` is not an indirect or direct dependency + +The SDK has also migrated from `gogo/protobuf` to `cosmos/gogoproto`. This means you must +replace all `github.com/gogo/protobuf` imports with `github.com/cosmos/gogoproto`. This change +might introduce breaking changes to your proto layout. Follow the official +[Cosmos migration guide](https://docs.cosmos.network/main/migrations/upgrading#gogoproto-import-paths) +to make sure you are using the correct layout. + +You might need to replace the following imports: + +1. Replace `github.com/cosmos/cosmos-sdk/simapp` by `cosmossdk.io/simapp` + +### App changes + +Applications scaffolded with older version of IGNITE® CLI would require the following changes +to some of the app files: + +```text title="app/app.go" +import ( + //... + + // remove-next-line + tmjson "github.com/tendermint/tendermint/libs/json" + // highlight-next-line + "encoding/json" + + // highlight-start + autocliv1 "cosmossdk.io/api/cosmos/autocli/v1" + reflectionv1 "cosmossdk.io/api/cosmos/reflection/v1" + "github.com/cosmos/cosmos-sdk/runtime" + runtimeservices "github.com/cosmos/cosmos-sdk/runtime/services" + "github.com/cosmos/cosmos-sdk/x/consensus" + consensusparamkeeper "github.com/cosmos/cosmos-sdk/x/consensus/keeper" + consensusparamtypes "github.com/cosmos/cosmos-sdk/x/consensus/types" + // highlight-end +) + +func getGovProposalHandlers() []govclient.ProposalHandler { + // ... + govProposalHandlers = append(govProposalHandlers, + paramsclient.ProposalHandler, + // remove-next-line + distrclient.ProposalHandler, + upgradeclient.LegacyProposalHandler, + // ... + ) + + return govProposalHandlers +} + +var ( + // ... + + ModuleBasics = module.NewBasicManager( + auth.AppModuleBasic{}, + authzmodule.AppModuleBasic{}, + // remove-next-line + genutil.AppModuleBasic{}, + // highlight-next-line + genutil.NewAppModuleBasic(genutiltypes.DefaultMessageValidator), + bank.AppModuleBasic{}, + // ... + vesting.AppModuleBasic{}, + // highlight-next-line + consensus.AppModuleBasic{}, + //... + ) +) + +var ( + // highlight-next-line + _ runtime.AppI = (*App)(nil) + _ servertypes.Application = (*App)(nil) + // remove-next-line + _ simapp.App = (*App)(nil) +) + +type App struct { + *baseapp.BaseApp + + cdc *codec.LegacyAmino + appCodec codec.Codec + interfaceRegistry types.InterfaceRegistry + // highlight-next-line + txConfig client.TxConfig + + invCheckPeriod uint + + // ... + // remove-start + StakingKeeper stakingkeeper.Keeper + CrisisKeeper crisiskeeper.Keeper + UpgradeKeeper upgradekeeper.Keeper + // remove-end + // highlight-start + StakingKeeper *stakingkeeper.Keeper + CrisisKeeper *crisiskeeper.Keeper + UpgradeKeeper *upgradekeeper.Keeper + // highlight-end + // ... + FeeGrantKeeper feegrantkeeper.Keeper + GroupKeeper groupkeeper.Keeper + // highlight-next-line + ConsensusParamsKeeper consensusparamkeeper.Keeper + + // ... +} + +func New( + logger log.Logger, + db dbm.DB, + traceStore io.Writer, + loadLatest bool, + skipUpgradeHeights map[int64]bool, + homePath string, + invCheckPeriod uint, + encodingConfig appparams.EncodingConfig, + appOpts servertypes.AppOptions, + baseAppOptions ...func(*baseapp.BaseApp), +) *App { + appCodec := encodingConfig.Marshaler + cdc := encodingConfig.Amino + interfaceRegistry := encodingConfig.InterfaceRegistry + // highlight-next-line + txConfig := encodingConfig.TxConfig + + // ... + + bApp.SetCommitMultiStoreTracer(traceStore) + bApp.SetVersion(version.Version) + bApp.SetInterfaceRegistry(interfaceRegistry) + // highlight-next-line + bApp.SetTxEncoder(txConfig.TxEncoder()) + + keys := sdk.NewKVStoreKeys( + // ... + banktypes.StoreKey, + stakingtypes.StoreKey, + // highlight-next-line + crisistypes.StoreKey, + // ... + group.StoreKey, + icacontrollertypes.StoreKey, + // highlight-next-line + consensusparamtypes.StoreKey, + // ... + ) + + // ... + + app := &App{ + // ... + interfaceRegistry: interfaceRegistry, + // highlight-next-line + txConfig: txConfig, + invCheckPeriod: invCheckPeriod, + // ... + } + + // ... + + // set the BaseApp's parameter store + // remove-next-line + bApp.SetParamStore(app.ParamsKeeper.Subspace(baseapp.Paramspace).WithKeyTable(paramstypes.ConsensusParamsKeyTable())) + // highlight-start + app.ConsensusParamsKeeper = consensusparamkeeper.NewKeeper(appCodec, keys[upgradetypes.StoreKey], authtypes.NewModuleAddress(govtypes.ModuleName).String()) + bApp.SetParamStore(&app.ConsensusParamsKeeper) + // highlight-end + + // ... + + app.AccountKeeper = authkeeper.NewAccountKeeper( + appCodec, + keys[authtypes.StoreKey], + // remove-next-line + app.GetSubspace(authtypes.ModuleName), + authtypes.ProtoBaseAccount, + maccPerms, + sdk.Bech32PrefixAccAddr, + // highlight-next-line + authtypes.NewModuleAddress(govtypes.ModuleName).String(), + ) + + app.BankKeeper = bankkeeper.NewBaseKeeper( + appCodec, + keys[banktypes.StoreKey], + app.AccountKeeper, + // remove-next-line + app.GetSubspace(banktypes.ModuleName), + app.BlockedModuleAccountAddrs(), + // highlight-next-line + authtypes.NewModuleAddress(govtypes.ModuleName).String(), + ) + + app.StakingKeeper = stakingkeeper.NewKeeper( + appCodec, + keys[stakingtypes.StoreKey], + app.AccountKeeper, + app.BankKeeper, + // remove-next-line + app.GetSubspace(stakingtypes.ModuleName), + // highlight-next-line + authtypes.NewModuleAddress(govtypes.ModuleName).String(), + ) + + app.MintKeeper = mintkeeper.NewKeeper( + appCodec, + keys[minttypes.StoreKey], + // remove-next-line + app.GetSubspace(minttypes.ModuleName), + // remove-next-line + &app.StakingKeeper, + // highlight-next-line + app.StakingKeeper, + app.AccountKeeper, + app.BankKeeper, + authtypes.FeeCollectorName, + // highlight-next-line + authtypes.NewModuleAddress(govtypes.ModuleName).String(), + ) + + app.DistrKeeper = distrkeeper.NewKeeper( + appCodec, + keys[distrtypes.StoreKey], + // remove-next-line + app.GetSubspace(distrtypes.ModuleName), + app.AccountKeeper, + app.BankKeeper, + // remove-next-line + &app.StakingKeeper, + // highlight-next-line + app.StakingKeeper, + authtypes.FeeCollectorName, + // highlight-next-line + authtypes.NewModuleAddress(govtypes.ModuleName).String(), + ) + + app.SlashingKeeper = slashingkeeper.NewKeeper( + appCodec, + // highlight-next-line + cdc, + keys[slashingtypes.StoreKey], + // remove-next-line + &app.StakingKeeper, + // highlight-next-line + app.StakingKeeper, + // remove-next-line + app.GetSubspace(slashingtypes.ModuleName), + // highlight-next-line + authtypes.NewModuleAddress(govtypes.ModuleName).String(), + ) + + app.CrisisKeeper = crisiskeeper.NewKeeper( + // remove-next-line + app.GetSubspace(crisistypes.ModuleName), + // highlight-start + appCodec, + keys[crisistypes.StoreKey], + // highlight-end + invCheckPeriod, + app.BankKeeper, + authtypes.FeeCollectorName, + // highlight-next-line + authtypes.NewModuleAddress(govtypes.ModuleName).String(), + ) + + // ... + + // Create evidence Keeper for to register the IBC light client misbehaviour evidence route + evidenceKeeper := evidencekeeper.NewKeeper( + appCodec, + keys[evidencetypes.StoreKey], + // remove-next-line + &app.StakingKeeper, + // highlight-next-line + app.StakingKeeper, + app.SlashingKeeper, + ) + // If evidence needs to be handled for the app, set routes in router here and seal + app.EvidenceKeeper = *evidenceKeeper + + // highlight-start + govConfig := govtypes.DefaultConfig() + govKeeper := govkeeper.NewKeeper( + appCodec, + keys[govtypes.StoreKey], + app.AccountKeeper, + app.BankKeeper, + app.StakingKeeper, + app.MsgServiceRouter(), + govConfig, + authtypes.NewModuleAddress(govtypes.ModuleName).String(), + ) + // highlight-end + + govRouter := govv1beta1.NewRouter() + govRouter. + AddRoute(govtypes.RouterKey, govv1beta1.ProposalHandler). + AddRoute(paramproposal.RouterKey, params.NewParamChangeProposalHandler(app.ParamsKeeper)). + // remove-next-line + AddRoute(distrtypes.RouterKey, distr.NewCommunityPoolSpendProposalHandler(app.DistrKeeper)). + AddRoute(upgradetypes.RouterKey, upgrade.NewSoftwareUpgradeProposalHandler(app.UpgradeKeeper)). + AddRoute(ibcclienttypes.RouterKey, ibcclient.NewClientProposalHandler(app.IBCKeeper.ClientKeeper)) + // highlight-next-line + govKeeper.SetLegacyRouter(govRouter) + + // remove-start + govConfig := govtypes.DefaultConfig() + app.GovKeeper = govkeeper.NewKeeper( + appCodec, + keys[govtypes.StoreKey], + app.GetSubspace(govtypes.ModuleName), + app.AccountKeeper, + app.BankKeeper, + &app.StakingKeeper, + govRouter, + app.MsgServiceRouter(), + govConfig, + ) + // remove-end + // highlight-start + app.GovKeeper = *govKeeper.SetHooks( + govtypes.NewMultiGovHooks( + // register the governance hooks + ), + ) + // highlight-end + + // ... + + // remove-start + app.GovKeeper.SetHooks( + govtypes.NewMultiGovHooks( + // insert governance hooks receivers here + ), + ) + // remove-end + + // ... + + app.mm = module.NewManager( + genutil.NewAppModule( + app.AccountKeeper, + app.StakingKeeper, + app.BaseApp.DeliverTx, + encodingConfig.TxConfig, + ), + // remove-next-line + auth.NewAppModule(appCodec, app.AccountKeeper, nil), + // highlight-next-line + auth.NewAppModule(appCodec, app.AccountKeeper, authsims.RandomGenesisAccounts, app.GetSubspace(authtypes.ModuleName)), + authzmodule.NewAppModule(appCodec, app.AuthzKeeper, app.AccountKeeper, app.BankKeeper, app.interfaceRegistry), + vesting.NewAppModule(app.AccountKeeper, app.BankKeeper), + // remove-start + bank.NewAppModule(appCodec, app.BankKeeper, app.AccountKeeper), + capability.NewAppModule(appCodec, *app.CapabilityKeeper), + // remove-end + // highlight-start + bank.NewAppModule(appCodec, app.BankKeeper, app.AccountKeeper, app.GetSubspace(banktypes.ModuleName)), + capability.NewAppModule(appCodec, *app.CapabilityKeeper, false), + // highlight-end + feegrantmodule.NewAppModule(appCodec, app.AccountKeeper, app.BankKeeper, app.FeeGrantKeeper, app.interfaceRegistry), + groupmodule.NewAppModule(appCodec, app.GroupKeeper, app.AccountKeeper, app.BankKeeper, app.interfaceRegistry), + // remove-start + crisis.NewAppModule(&app.CrisisKeeper, skipGenesisInvariants), + gov.NewAppModule(appCodec, app.GovKeeper, app.AccountKeeper, app.BankKeeper), + mint.NewAppModule(appCodec, app.MintKeeper, app.AccountKeeper, minttypes.DefaultInflationCalculationFn), + slashing.NewAppModule(appCodec, app.SlashingKeeper, app.AccountKeeper, app.BankKeeper, app.StakingKeeper), + distr.NewAppModule(appCodec, app.DistrKeeper, app.AccountKeeper, app.BankKeeper, app.StakingKeeper), + staking.NewAppModule(appCodec, app.StakingKeeper, app.AccountKeeper, app.BankKeeper), + // remove-end + // highlight-start + crisis.NewAppModule(app.CrisisKeeper, skipGenesisInvariants, app.GetSubspace(crisistypes.ModuleName)), + gov.NewAppModule(appCodec, &app.GovKeeper, app.AccountKeeper, app.BankKeeper, app.GetSubspace(govtypes.ModuleName)), + mint.NewAppModule(appCodec, app.MintKeeper, app.AccountKeeper, nil, app.GetSubspace(minttypes.ModuleName)), + slashing.NewAppModule(appCodec, app.SlashingKeeper, app.AccountKeeper, app.BankKeeper, app.StakingKeeper, app.GetSubspace(slashingtypes.ModuleName)), + distr.NewAppModule(appCodec, app.DistrKeeper, app.AccountKeeper, app.BankKeeper, app.StakingKeeper, app.GetSubspace(distrtypes.ModuleName)), + staking.NewAppModule(appCodec, app.StakingKeeper, app.AccountKeeper, app.BankKeeper, app.GetSubspace(stakingtypes.ModuleName)), + // highlight-end + upgrade.NewAppModule(app.UpgradeKeeper), + evidence.NewAppModule(app.EvidenceKeeper), + // highlight-next-line + consensus.NewAppModule(appCodec, app.ConsensusParamsKeeper), + ibc.NewAppModule(app.IBCKeeper), + params.NewAppModule(app.ParamsKeeper), + transferModule, + icaModule, + // this line is used by starport scaffolding # stargate/app/appModule + + ) + + app.mm.SetOrderBeginBlockers( + // ... + paramstypes.ModuleName, + vestingtypes.ModuleName, + // highlight-next-line + consensusparamtypes.ModuleName, + // ... + ) + + app.mm.SetOrderEndBlockers( + // ... + paramstypes.ModuleName, + upgradetypes.ModuleName, + vestingtypes.ModuleName, + // highlight-next-line + consensusparamtypes.ModuleName, + // ... + ) + + // remove-next-line + app.mm.SetOrderInitGenesis( + // highlight-next-line + genesisModuleOrder := []string{ + // ... + paramstypes.ModuleName, + upgradetypes.ModuleName, + vestingtypes.ModuleName, + // highlight-next-line + consensusparamtypes.ModuleName, + // ... + // remove-next-line + ) + // highlight-start + } + app.mm.SetOrderInitGenesis(genesisModuleOrder...) + app.mm.SetOrderExportGenesis(genesisModuleOrder...) + // highlight-end + + // remove-start + app.mm.RegisterInvariants(&app.CrisisKeeper) + app.mm.RegisterRoutes(app.Router(), app.QueryRouter(), encodingConfig.Amino) + // remove-end + // highlight-next-line + app.mm.RegisterInvariants(app.CrisisKeeper) + + app.configurator = module.NewConfigurator(app.appCodec, app.MsgServiceRouter(), app.GRPCQueryRouter()) + app.mm.RegisterServices(app.configurator) + + // highlight-start + autocliv1.RegisterQueryServer(app.GRPCQueryRouter(), runtimeservices.NewAutoCLIQueryService(app.mm.Modules)) + reflectionSvc, err := runtimeservices.NewReflectionService() + if err != nil { + panic(err) + } + reflectionv1.RegisterReflectionServiceServer(app.GRPCQueryRouter(), reflectionSvc) + // highlight-end + + // create the simulation manager and define the order of the modules for deterministic simulations + // remove-start + app.sm = module.NewSimulationManager( + // ... + ) + // remove-end + // highlight-start + overrideModules := map[string]module.AppModuleSimulation{ + authtypes.ModuleName: auth.NewAppModule(app.appCodec, app.AccountKeeper, authsims.RandomGenesisAccounts, app.GetSubspace(authtypes.ModuleName)), + } + app.sm = module.NewSimulationManagerFromAppModules(app.mm.Modules, overrideModules) + // highlight-end + app.sm.RegisterStoreDecoders() + + // ... + + // remove-start + app.SetInitChainer(app.InitChainer) + app.SetBeginBlocker(app.BeginBlocker) + // remove-end + + // ... +} + +func (app *App) InitChainer(ctx sdk.Context, req abci.RequestInitChain) abci.ResponseInitChain { + var genesisState GenesisState + // remove-next-line + if err := tmjson.Unmarshal(req.AppStateBytes, &genesisState); err != nil { + // highlight-next-line + if err := json.Unmarshal(req.AppStateBytes, &genesisState); err != nil { + panic(err) + } + // ... +} + +// remove-start +// GetMaccPerms returns a copy of the module account permissions +func GetMaccPerms() map[string][]string { + dupMaccPerms := make(map[string][]string) + for k, v := range maccPerms { + dupMaccPerms[k] = v + } + return dupMaccPerms +} +// remove-end + +// highlight-start +// TxConfig returns App's TxConfig. +func (app *App) TxConfig() client.TxConfig { + return app.txConfig +} + +// Configurator get app configurator +func (app *App) Configurator() module.Configurator { + return app.configurator +} + +// ModuleManager returns the app ModuleManager +func (app *App) ModuleManager() *module.Manager { + return app.mm +} +// highlight-end +``` + +```text title="app/simulation_test.go" +import ( + // ... + // remove-start + "cosmossdk.io/simapp" + tmtypes "github.com/tendermint/tendermint/types" + // remove-end + // highlight-start + "encoding/json" + "fmt" + "math/rand" + "runtime/debug" + "strings" + + dbm "github.com/cometbft/cometbft-db" + "github.com/cometbft/cometbft/libs/log" + "github.com/cosmos/cosmos-sdk/baseapp" + "github.com/cosmos/cosmos-sdk/client/flags" + "github.com/cosmos/cosmos-sdk/server" + storetypes "github.com/cosmos/cosmos-sdk/store/types" + simtestutil "github.com/cosmos/cosmos-sdk/testutil/sims" + sdk "github.com/cosmos/cosmos-sdk/types" + authtypes "github.com/cosmos/cosmos-sdk/x/auth/types" + authzkeeper "github.com/cosmos/cosmos-sdk/x/authz/keeper" + banktypes "github.com/cosmos/cosmos-sdk/x/bank/types" + capabilitytypes "github.com/cosmos/cosmos-sdk/x/capability/types" + distrtypes "github.com/cosmos/cosmos-sdk/x/distribution/types" + evidencetypes "github.com/cosmos/cosmos-sdk/x/evidence/types" + govtypes "github.com/cosmos/cosmos-sdk/x/gov/types" + minttypes "github.com/cosmos/cosmos-sdk/x/mint/types" + paramstypes "github.com/cosmos/cosmos-sdk/x/params/types" + simcli "github.com/cosmos/cosmos-sdk/x/simulation/client/cli" + slashingtypes "github.com/cosmos/cosmos-sdk/x/slashing/types" + stakingtypes "github.com/cosmos/cosmos-sdk/x/staking/types" + // highlight-end +) + +// highlight-start +type storeKeysPrefixes struct { + A storetypes.StoreKey + B storetypes.StoreKey + Prefixes [][]byte +} +// highlight-end + +// Get flags every time the simulator is run +func init() { + // remove-next-line + simapp.GetSimulatorFlags() + // highlight-next-line + simcli.GetSimulatorFlags() +} + +// remove-start +var defaultConsensusParams = &abci.ConsensusParams{ + Block: &abci.BlockParams{ + MaxBytes: 200000, + MaxGas: 2000000, + }, + Evidence: &tmproto.EvidenceParams{ + MaxAgeNumBlocks: 302400, + MaxAgeDuration: 504 * time.Hour, // 3 weeks is the max duration + MaxBytes: 10000, + }, + Validator: &tmproto.ValidatorParams{ + PubKeyTypes: []string{ + tmtypes.ABCIPubKeyTypeEd25519, + }, + }, +} +// remove-end +// highlight-start +func fauxMerkleModeOpt(bapp *baseapp.BaseApp) { + bapp.SetFauxMerkleMode() +} +// highlight-end + +func BenchmarkSimulation(b *testing.B) { + // remove-start + simapp.FlagEnabledValue = true + simapp.FlagCommitValue = true + + config, db, dir, logger, _, err := simapp.SetupSimulation("goleveldb-app-sim", "Simulation") + // remove-end + // highlight-start + simcli.FlagSeedValue = time.Now().Unix() + simcli.FlagVerboseValue = true + simcli.FlagCommitValue = true + simcli.FlagEnabledValue = true + + config := simcli.NewConfigFromFlags() + config.ChainID = "mars-simapp" + db, dir, logger, _, err := simtestutil.SetupSimulation( + config, + "leveldb-bApp-sim", + "Simulation", + simcli.FlagVerboseValue, + simcli.FlagEnabledValue, + ) + // highlight-end + + require.NoError(b, err, "simulation setup failed") + + b.Cleanup(func() { + // remove-start + db.Close() + err = os.RemoveAll(dir) + require.NoError(b, err) + // remove-end + // highlight-start + require.NoError(b, db.Close()) + require.NoError(b, os.RemoveAll(dir)) + // highlight-end + }) + + + // remove-next-line + encoding := app.MakeEncodingConfig() + // highlight-start + appOptions := make(simtestutil.AppOptionsMap, 0) + appOptions[flags.FlagHome] = app.DefaultNodeHome + appOptions[server.FlagInvCheckPeriod] = simcli.FlagPeriodValue + // highlight-end + + // remove-next-line + app := app.New( + // highlight-next-line + bApp := app.New( + logger, + db, + nil, + true, + map[int64]bool{}, + app.DefaultNodeHome, + 0, + // remove-start + encoding, + simapp.EmptyAppOptions{}, + // remove-end + // highlight-start + app.MakeEncodingConfig(), + appOptions, + baseapp.SetChainID(config.ChainID), + // highlight-end + ) + // highlight-next-line + require.Equal(b, app.Name, bApp.Name()) + + _, simParams, simErr := simulation.SimulateFromSeed( + b, + os.Stdout, + // remove-start + app.BaseApp, + simapp.AppStateFn(app.AppCodec(), app.SimulationManager()), + simulationtypes.RandomAccounts, + simapp.SimulationOperations(app, app.AppCodec(), config), + app.ModuleAccountAddrs(), + config, + app.AppCodec(), + // remove-end + // highlight-start + bApp.BaseApp, + simtestutil.AppStateFn( + bApp.AppCodec(), + bApp.SimulationManager(), + app.NewDefaultGenesisState(bApp.AppCodec()), + ), + simulationtypes.RandomAccounts, + simtestutil.SimulationOperations(bApp, bApp.AppCodec(), config), + bApp.ModuleAccountAddrs(), + config, + bApp.AppCodec(), + // highlight-end + ) + + // remove-next-line + err = simapp.CheckExportSimulation(app, config, simParams) + // highlight-next-line + err = simtestutil.CheckExportSimulation(bApp, config, simParams) + require.NoError(b, err) + require.NoError(b, simErr) + + if config.Commit { + // remove-next-line + simapp.PrintStats(db) + // highlight-next-line + simtestutil.PrintStats(db) + } +} +``` + +```text title="x/{{moduleName}}/module_simulation.go" +import ( + // ... + // remove-next-line + simappparams "cosmossdk.io/simapp/params" +) + +var ( + // ... + // remove-next-line + _ = simappparams.StakePerAccount + // highlight-next-line + _ = rand.Rand{} +) + +// remove-start +func (am AppModule) RandomizedParams(_ *rand.Rand) []simtypes.ParamChange { + // ... +} +// remove-end +// highlight-start +// ProposalMsgs returns msgs used for governance proposals for simulations. +func (am AppModule) ProposalMsgs(simState module.SimulationState) []simtypes.WeightedProposalMsg { + return []simtypes.WeightedProposalMsg{ + // this line is used by starport scaffolding # simapp/module/OpMsg + } +} +// highlight-end +``` + +### Deprecations + +The app module might contains some legacy methods that are deprecated and can be removed: + +```text title="x/{{moduleName}}/module.go" +// remove-start +// Deprecated: use RegisterServices +func (am AppModule) Route() sdk.Route { return sdk.Route{} } + +// Deprecated: use RegisterServices +func (AppModule) QuerierRoute() string { return types.RouterKey } + +// Deprecated: use RegisterServices +func (am AppModule) LegacyQuerierHandler(_ *codec.LegacyAmino) sdk.Querier { + return nil +} +// remove-end +``` + +### Other required changes + +Changes required to the network test util: + +```text title="testutil/network/network.go" +import ( + // ... + + // remove-start + "github.com/cosmos/cosmos-sdk/simapp" + pruningtypes "github.com/cosmos/cosmos-sdk/pruning/types" + // remove-end + // highlight-start + pruningtypes "github.com/cosmos/cosmos-sdk/store/pruning/types" + simtestutil "github.com/cosmos/cosmos-sdk/testutil/sims" + // highlight-end +) + +func New(t *testing.T, configs ...Config) *Network { + // ... + + net, err := network.New(t, t.TempDir(), cfg) + require.NoError(t, err) + // highlight-start + _, err = net.WaitForHeight(1) + require.NoError(t, err) + // highlight-end + + // ... +} + +func DefaultConfig() network.Config { + // remove-next-line + encoding := app.MakeEncodingConfig() + // highlight-start + var ( + encoding = app.MakeEncodingConfig() + chainID = "chain-" + tmrand.NewRand().Str(6) + ) + // highlight-end + + return network.Config{ + // ... + // remove-next-line + AppConstructor: func(val network.Validator) servertypes.Application { + // highlight-next-line + AppConstructor: func(val network.ValidatorI) servertypes.Application { + return app.New( + // remove-next-line + val.Ctx.Logger, + // highlight-next-line + val.GetCtx().Logger, + tmdb.NewMemDB(), + nil, + true, + map[int64]bool{}, + // remove-next-line + val.Ctx.Config.RootDir, + // highlight-next-line + val.GetCtx().Config.RootDir, + 0, + encoding, + // remove-start + simapp.EmptyAppOptions{}, + baseapp.SetPruning(pruningtypes.NewPruningOptionsFromString(val.AppConfig.Pruning)), + baseapp.SetMinGasPrices(val.AppConfig.MinGasPrices), + // remove-end + // highlight-start + simtestutil.EmptyAppOptions{}, + baseapp.SetPruning(pruningtypes.NewPruningOptionsFromString(val.GetAppConfig().Pruning)), + baseapp.SetMinGasPrices(val.GetAppConfig().MinGasPrices), + baseapp.SetChainID(chainID), + // highlight-end + ) + }, + // ... + // remove-next-line + ChainID: "chain-" + tmrand.NewRand().Str(6), + // highlight-next-line + ChainID: chainID, + // ... + } +} +``` + +Update the collect genesis transactions command and add the new message validator argument: + +```text title="cmd/{{binaryNamePrefix}}d/cmd/root.go" +import ( + // ... + + // highlight-start + tmtypes "github.com/cometbft/cometbft/types" + "github.com/cosmos/cosmos-sdk/x/genutil" + genutiltypes "github.com/cosmos/cosmos-sdk/x/genutil/types" + // highlight-end +) + +func initRootCmd(rootCmd *cobra.Command, encodingConfig params.EncodingConfig) { + // ... + + // highlight-next-line + gentxModule := app.ModuleBasics[genutiltypes.ModuleName].(genutil.AppModuleBasic) + rootCmd.AddCommand( + // ... + // remove-next-line + genutilcli.CollectGenTxsCmd(banktypes.GenesisBalancesIterator{}, app.DefaultHome), + // highlight-next-line + genutilcli.CollectGenTxsCmd(banktypes.GenesisBalancesIterator{}, app.DefaultNodeHome, gentxModule.GenTxValidator), + // ... + ) + + // ... +} + +func (a appCreator) newApp( + logger log.Logger, + db dbm.DB, + traceStore io.Writer, + appOpts servertypes.AppOptions, +) servertypes.Application { + // ... + + pruningOpts, err := server.GetPruningOptionsFromFlags(appOpts) + if err != nil { + panic(err) + } + + // highlight-start + homeDir := cast.ToString(appOpts.Get(flags.FlagHome)) + chainID := cast.ToString(appOpts.Get(flags.FlagChainID)) + if chainID == "" { + // fallback to genesis chain-id + appGenesis, err := tmtypes.GenesisDocFromFile(filepath.Join(homeDir, "config", "genesis.json")) + if err != nil { + panic(err) + } + + chainID = appGenesis.ChainID + } + // highlight-end + + // ... + + return app.New( + // ... + baseapp.SetPruning(pruningOpts), + baseapp.SetMinGasPrices(cast.ToString(appOpts.Get(server.FlagMinGasPrices))), + // remove-next-line + baseapp.SetMinRetainBlocks(cast.ToUint64(appOpts.Get(server.FlagMinRetainBlocks))), + baseapp.SetHaltHeight(cast.ToUint64(appOpts.Get(server.FlagHaltHeight))), + baseapp.SetHaltTime(cast.ToUint64(appOpts.Get(server.FlagHaltTime))), + // highlight-next-line + baseapp.SetMinRetainBlocks(cast.ToUint64(appOpts.Get(server.FlagMinRetainBlocks))), + // ... + baseapp.SetIAVLDisableFastNode(cast.ToBool(appOpts.Get(server.FlagDisableIAVLFastNode))), + // highlight-next-line + baseapp.SetChainID(chainID), + ) +) + +func (a appCreator) appExport( + logger log.Logger, + db dbm.DB, + traceStore io.Writer, + height int64, + forZeroHeight bool, + jailAllowedAddrs []string, + appOpts servertypes.AppOptions, + // highlight-next-line + modulesToExport []string, +) (servertypes.ExportedApp, error) { + // ... + + // remove-next-line + return app.ExportAppStateAndValidators(forZeroHeight, jailAllowedAddrs) + // highlight-next-line + return app.ExportAppStateAndValidators(forZeroHeight, jailAllowedAddrs, modulesToExport) +} +``` + +Add the new extra argument to `ExportAppStateAndValidators`: + +```text title="app/export.go" +func (app *App) ExportAppStateAndValidators( + forZeroHeight bool, + jailAllowedAddrs []string, + // highlight-next-line + modulesToExport []string, +) (servertypes.ExportedApp, error) { + // ... + + // remove-next-line + genState := app.mm.ExportGenesis(ctx, app.appCodec) + // highlight-next-line + genState := app.mm.ExportGenesisForModules(ctx, app.appCodec, modulesToExport) + appState, err := json.MarshalIndent(genState, "", " ") + if err != nil { + return servertypes.ExportedApp{}, err + } + + // ... +} +``` + +### Migration + +You can also follow other Cosmos SDK migration steps in their [upgrade guide](https://docs.cosmos.network/main/migrations/upgrading#v047x). +Specially the [parameter migration](https://docs.cosmos.network/main/migrations/upgrading#xconsensus) which +is required if you want to run the updated version keeping you current app state. + +## Query commands + +Query commands context initialization should be changed to: + +```text title="x/{moduleName}/client/cli/query_{typeName}.go" +RunE: func(cmd *cobra.Command, args []string) (err error) { + // remove-next-line + clientCtx := client.GetClientContextFromCmd(cmd) + // highlight-start + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } + // highlight-end + + // ... +} +``` + + +## ibc-go v7 + +Chains that are newly scaffolded with IGNITE® CLI `v0.27.1` now use `ibc-go/v7` for IBC functionality. It is +required to upgrade to the newest version of `ibc-go`. + +Applications scaffolded with older version of IGNITE® CLI require the following changes to the app file: + +```text title="app/app.go" +import ( + // ... + // remove-start + ica "github.com/cosmos/ibc-go/v6/modules/apps/27-interchain-accounts" + icacontrollerkeeper "github.com/cosmos/ibc-go/v6/modules/apps/27-interchain-accounts/controller/keeper" + icacontrollertypes "github.com/cosmos/ibc-go/v6/modules/apps/27-interchain-accounts/controller/types" + icahost "github.com/cosmos/ibc-go/v6/modules/apps/27-interchain-accounts/host" + icahostkeeper "github.com/cosmos/ibc-go/v6/modules/apps/27-interchain-accounts/host/keeper" + icahosttypes "github.com/cosmos/ibc-go/v6/modules/apps/27-interchain-accounts/host/types" + icatypes "github.com/cosmos/ibc-go/v6/modules/apps/27-interchain-accounts/types" + "github.com/cosmos/ibc-go/v6/modules/apps/transfer" + ibctransferkeeper "github.com/cosmos/ibc-go/v6/modules/apps/transfer/keeper" + ibctransfertypes "github.com/cosmos/ibc-go/v6/modules/apps/transfer/types" + ibc "github.com/cosmos/ibc-go/v6/modules/core" + ibcclient "github.com/cosmos/ibc-go/v6/modules/core/02-client" + ibcclientclient "github.com/cosmos/ibc-go/v6/modules/core/02-client/client" + ibcclienttypes "github.com/cosmos/ibc-go/v6/modules/core/02-client/types" + ibcporttypes "github.com/cosmos/ibc-go/v6/modules/core/05-port/types" + ibchost "github.com/cosmos/ibc-go/v6/modules/core/24-host" + ibckeeper "github.com/cosmos/ibc-go/v6/modules/core/keeper" + // remove-end + // highlight-start + ica "github.com/cosmos/ibc-go/v7/modules/apps/27-interchain-accounts" + icacontrollerkeeper "github.com/cosmos/ibc-go/v7/modules/apps/27-interchain-accounts/controller/keeper" + icacontrollertypes "github.com/cosmos/ibc-go/v7/modules/apps/27-interchain-accounts/controller/types" + icahost "github.com/cosmos/ibc-go/v7/modules/apps/27-interchain-accounts/host" + icahostkeeper "github.com/cosmos/ibc-go/v7/modules/apps/27-interchain-accounts/host/keeper" + icahosttypes "github.com/cosmos/ibc-go/v7/modules/apps/27-interchain-accounts/host/types" + icatypes "github.com/cosmos/ibc-go/v7/modules/apps/27-interchain-accounts/types" + "github.com/cosmos/ibc-go/v7/modules/apps/transfer" + ibctransferkeeper "github.com/cosmos/ibc-go/v7/modules/apps/transfer/keeper" + ibctransfertypes "github.com/cosmos/ibc-go/v7/modules/apps/transfer/types" + ibc "github.com/cosmos/ibc-go/v7/modules/core" + ibcclient "github.com/cosmos/ibc-go/v7/modules/core/02-client" + ibcclientclient "github.com/cosmos/ibc-go/v7/modules/core/02-client/client" + ibcclienttypes "github.com/cosmos/ibc-go/v7/modules/core/02-client/types" + ibcporttypes "github.com/cosmos/ibc-go/v7/modules/core/05-port/types" + ibcexported "github.com/cosmos/ibc-go/v7/modules/core/exported" + ibckeeper "github.com/cosmos/ibc-go/v7/modules/core/keeper" + solomachine "github.com/cosmos/ibc-go/v7/modules/light-clients/06-solomachine" + ibctm "github.com/cosmos/ibc-go/v7/modules/light-clients/07-tendermint" + // highlight-end +) + +var ( + // ... + + ModuleBasics = module.NewBasicManager( + // ... + groupmodule.AppModuleBasic{}, + ibc.AppModuleBasic{}, + // highlight-start + ibctm.AppModuleBasic{}, + solomachine.AppModuleBasic{}, + // highlight-end + upgrade.AppModuleBasic{}, + // ... + ) +) + +func New( + logger log.Logger, + db dbm.DB, + traceStore io.Writer, + loadLatest bool, + skipUpgradeHeights map[int64]bool, + homePath string, + invCheckPeriod uint, + encodingConfig appparams.EncodingConfig, + appOpts servertypes.AppOptions, + baseAppOptions ...func(*baseapp.BaseApp), +) *App { + // ... + + keys := sdk.NewKVStoreKeys( + // ... + govtypes.StoreKey, + paramstypes.StoreKey, + // remove-next-line + ibchost.StoreKey, + // highlight-next-line + ibcexported.StoreKey, + // ... + ) + + // ... + // grant capabilities for the ibc and ibc-transfer modules + // remove-next-line + scopedIBCKeeper := app.CapabilityKeeper.ScopeToModule(ibchost.ModuleName) + // highlight-next-line + scopedIBCKeeper := app.CapabilityKeeper.ScopeToModule(ibcexported.ModuleName) + scopedICAControllerKeeper := app.CapabilityKeeper.ScopeToModule(icacontrollertypes.SubModuleName) + + // ... + + app.IBCKeeper = ibckeeper.NewKeeper( + appCodec, + // remove-start + keys[ibchost.StoreKey], + app.GetSubspace(ibchost.ModuleName), + // remove-end + // highlight-start + keys[ibcexported.StoreKey], + app.GetSubspace(ibcexported.ModuleName), + // highlight-end + app.StakingKeeper, + app.UpgradeKeeper, + scopedIBCKeeper, + ) + + // ... + + app.mm.SetOrderBeginBlockers( + // ... + crisistypes.ModuleName, + ibctransfertypes.ModuleName, + // remove-next-line + ibchost.ModuleName, + // highlight-next-line + ibcexported.ModuleName, + // ... + ) + + app.mm.SetOrderEndBlockers( + // ... + stakingtypes.ModuleName, + ibctransfertypes.ModuleName, + // remove-next-line + ibchost.ModuleName, + // highlight-next-line + ibcexported.ModuleName, + // ... + ) + + genesisModuleOrder := []string{ + // ... + genutiltypes.ModuleName, + ibctransfertypes.ModuleName, + // remove-next-line + ibchost.ModuleName, + // highlight-next-line + ibcexported.ModuleName, + // ... + } + + // ... +) + +func initParamsKeeper(appCodec codec.BinaryCodec, legacyAmino *codec.LegacyAmino, key, tkey storetypes.StoreKey) paramskeeper.Keeper { + // ... + paramsKeeper.Subspace(crisistypes.ModuleName) + paramsKeeper.Subspace(ibctransfertypes.ModuleName) + // remove-next-line + paramsKeeper.Subspace(ibchost.ModuleName) + // highlight-next-line + paramsKeeper.Subspace(ibcexported.ModuleName) + // ... +} +``` + + +You can follow other IBC migration steps in their [migration guide v6 to v7](https://github.com/cosmos/ibc-go/blob/v7.0.1/docs/migrations/v6-to-v7.md). + +## Doctor command + +As the final steps it's recommended to run `ignite doctor` and `go mod tidy`. diff --git a/docs/versioned_docs/version-v29/06-migration/v28.0.0.md b/docs/versioned_docs/version-v29/06-migration/v28.0.0.md new file mode 100644 index 0000000..d830bb3 --- /dev/null +++ b/docs/versioned_docs/version-v29/06-migration/v28.0.0.md @@ -0,0 +1,124 @@ +--- +sidebar_position: 990 +title: v28.0.0 +description: For chains that were scaffolded with IGNITE® CLI versions lower than v28.0.0 changes are required to use IGNITE® CLI v28.0.0 +--- + +## **Upgrade to v28.0.0 - New Versioning Scheme in IGNITE®** + +With the latest update, IGNITE® has transitioned its versioning format from a leading-zero release system to a full number release system. This change marks a significant shift in how we communicate updates and stability in our software. Where the previous version was denoted as v0.27.0, it will now be upgraded to v28.0.0. + +This new versioning approach enhances our version control by clearly indicating major, minor, and patch releases. +From now on first number indicates a major release with breaking API changes, second number indicates minor release that might include new features while the last number is typically focused on bug fixes and minor improvements. +[Learn more about semantic versioning](https://semver.org/). + +## **Plugins are now called Apps. Upgrade Configuration Files** + +IGNITE® `v28.0.0` changes the plugin system which is now called IGNITE® Apps. This version includes changes +to the CLI command names and the plugin configuration file. + +The plugins configuration file is now called `igniteapps.yml` and "plugins" are now called "apps". + +The plugins configuration home directory is now `$HOME/.ignite/apps` instead `$HOME/.ignite/plugins`. + +Updates can be automatically applied by running `ignite doctor` in your blockchain application directory. +Running the command outside your blockchain application directory will only update the global plugins. + +## **IGNITE® and Cosmos SDK Upgrade Guide: From IGNITE® v0.27.0 to v28.0.0 and Cosmos SDK v0.47 to v0.50** + +### **Introduction** + +This guide provides a step-by-step process for developers to upgrade their applications from IGNITE® version 0.27.0 to 28.0.0, along with an upgrade in the Cosmos SDK from version 0.47 to v0.50. It covers essential changes, new features, and adjustments required for a smooth transition. + +### **Prerequisites** + +- Backup your current project. +- Ensure you have IGNITE® v0.27.0 and Cosmos SDK v0.47 installed. +- Basic familiarity with command line operations and the existing project structure. + +### **Step 1: Update IGNITE® CLI to Version 28.0.0** + +- **Command**: Run **`curl https://get.ignite.com/cli@v28.0.0 | bash`** in your terminal. +- **Note**: This command updates the IGNITE® CLI to the latest version. Ensure you have the necessary permissions to execute it. + +### **Step 2: Update Scaffold Chain Command** + +- **Old Command**: **`ignite scaffold chain github.com/alice/blog`** +- **New Command**: **`ignite scaffold chain blog`** +- **Explanation**: The command format has been simplified in the new version for ease of use. + +### **Step 3: Docker Version Upgrades** + +- **Action**: Upgrade the IGNITE® version for the Docker container to match the CLI version. +- **Note**: Ensure Docker compatibility with the new IGNITE® CLI version. + +### **Step 4: Change in Module Path** + +- **Old Path**: **`x/blog/module.go`** +- **New Path**: **`x/blog/module/module.go`** +- **Explanation**: The module path structure has been updated for better organization. + +### **Step 5: Frontend Scaffolding Options** + +- **Action**: Choose between Vue, React, Go, or TypeScript for frontend scaffolding. +- **Commands**: + - **`ignite scaffold react`** + - **`ignite scaffold vue`** +- **Note**: Vue is no longer the default option for frontend scaffolding. + +### **Step 6: Update Scaffold Message for CreatePost Command** + +- **Action**: Review and update the output for the scaffolded createPost command as per the new format. + +### **Step 7: AutoCLI Path Change** + +- **Old Path**: **`x/blog/client/cli/tx_create_post.go`** +- **New Path**: **`x/blog/module/autocli.go`** +- **Explanation**: AutoCLI is now integrated at a different path to streamline command-line interactions. + +### **Step 8: Adjustment in Stored Game** + +- **Old Code**: + + ```go + store := prefix.NewStore(ctx.KVStore(k.storeKey), types.KeyPrefix(types.PostKey)) + + ``` + +- **New Code**: + + ```go + storeAdapter := runtime.KVStoreAdapter(k.storeService.OpenKVStore(ctx)) + store := prefix.NewStore(storeAdapter, types.KeyPrefix(types.PostKey)) + + ``` + +- **Explanation**: The way the KVStore is accessed has changed, requiring an update in the code for stored games. + +### **Step 9: Chain-ID Requirements in CLI Transaction Commands** + +- **Action**: Add **`-chain-id`** flag to CLI transaction commands. +- **Example**: + - **Old Command**: **`blogd tx blog create-post 'Hello, World!' 'This is a blog post' --from alice`** + - **New Command**: **`blogd tx blog create-post 'Hello, World!' 'This is a blog post' --from alice --chain-id blog`** +- **Explanation**: The **`chain-id`** flag is now required for transaction commands for identification purposes. + +### **Troubleshooting Common Issues** + +- **Dependency Conflicts**: Ensure compatibility of all dependencies with IGNITE® v28.0.0 and Cosmos SDK v0.50. +- **Docker Image Compatibility**: Align Docker image versions with the CLI for seamless operations. +- **Frontend Scaffolding**: For older projects, ensure correct scaffolding as per the new commands. +- **AutoCLI Integration**: Address discrepancies due to the new AutoCLI integration path. + +### **Additional Resources** + +- [IGNITE® Documentation](https://docs.ignite.com/) +- [Cosmos SDK Release Notes](https://github.com/cosmos/cosmos-sdk/releases/tag/v0.50.1) + +### **Feedback** + +We value your feedback on this guide. Please share your experiences and suggestions for improvements. + +### **Updates Log** + +- **[01/15/24]**: Guide created for IGNITE® v28.0.0 and Cosmos SDK v0.50.1 \ No newline at end of file diff --git a/docs/versioned_docs/version-v29/06-migration/v29.0.0.md b/docs/versioned_docs/version-v29/06-migration/v29.0.0.md new file mode 100644 index 0000000..5f52591 --- /dev/null +++ b/docs/versioned_docs/version-v29/06-migration/v29.0.0.md @@ -0,0 +1,98 @@ +--- +sidebar_position: 989 +title: v29.0.0 +description: For chains that were scaffolded with IGNITE® CLI versions lower than v29.0.0 changes are required to use IGNITE® CLI v29.0.0 +--- + +## Upgrade to v29.0.0 + +The changes between v28.0.0 and v29.0.0 are not as significant as the changes between v0.27.0 and v28.0.0. + +In v29.0.0, the Cosmos SDK version has been upgraded to 0.53.0 and IBC to v10. + +Please see the [Changelog](https://github.com/ignite/cli/commit/1b7f19f08d0fa91e3ae71b4b37b8bb4171a9e320#diff-b027e7b11ff55b21dd50b32abcbdd35d95be87a889f0f6562417fbf0995d402a) for more details. + +:::tip +If you wish to keep using a chain scaffolded with IGNITE® v28, simply run the doctor command: + +```bash +ignite doctor +``` + +Note that some scaffolding commands may not work as expected, and you may need to manually adjust your code, unless you follow the migration steps below. +::: + +## Upgrade Cosmos SDK to v0.53.0 + +In order to upgrade, please navigate to the `go.mod` file in your blockchain directory and replace an earlier Cosmos-SDK version with v0.53.0. + +```diff +-github.com/cosmos/cosmos-sdk v0.50.0 ++github.com/cosmos/cosmos-sdk v0.53.0 +``` + +Review the [Cosmos SDK v0.53.0 release notes](https://github.com/cosmos/cosmos-sdk/releases/tag/v0.53.0) for changes like updated x/auth vesting or sdk.Context APIs. + +If you have custom modules, test for deprecated APIs and update as needed. + +## Add Auth to PreBlockers + +v29 configures preblockers to include the `auth` module (`authtypes.ModuleName`) for transaction processing. Ensure this is set in your v28 scaffold. + +**Edit PreBlockers**: + +- Open `mychain/app/app_config.go`. + +- Find or add the `preBlockers` slice. Ensure it includes `authtypes.ModuleName`, matching v29’s configuration: + +```go +import ( + "github.com/cosmos/cosmos-sdk/x/auth/types" + "github.com/cosmos/cosmos-sdk/x/upgrade/types" +) + +var preBlockers = []string{ + upgradetypes.ModuleName, + authtypes.ModuleName, + // this line is used by starport scaffolding # stargate/app/preBlockers +} +``` + +## Upgrade to IBC v10 + +```diff +-github.com/cosmos/ibc-go/v8 v8.5.2 ++github.com/cosmos/ibc-go/v10 v10.0.0 +``` + +The easiest path is copy the relevant files in the `app` directory from a chain scaffolded with v29 into your old v28 project, in case you did not modify anything in there. + +In case you want to see the entire difference with scaffolded chains, use our "Generate Migration Difference" Tool. + +[Checkout the Guide To use the Gen-Mig-Diff Tool](https://tutorials.ignite.com/guide-to-use-gen-mig-diffs-for/). + +Then run the command + +`gen-mig-diffs --output temp/migration --from v28 --to v29` + +Now, test if your blockchain runs using IGNITE® v29: + +Update the dependencies with: + +```bash +go mod tidy +``` + +Then run the IGNITE® doctor to update configuration files. + +```bash +ignite doctor +``` + +Now start your chain. + +```bash +ignite chain serve +``` + +If you need our help and support, do not hesitate to visit our [Discord](https://discord.com/invite/ignitecli). diff --git a/docs/versioned_docs/version-v29/07-packages/_category_.json b/docs/versioned_docs/version-v29/07-packages/_category_.json new file mode 100644 index 0000000..6dbb883 --- /dev/null +++ b/docs/versioned_docs/version-v29/07-packages/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Packages", + "link": null +} \ No newline at end of file diff --git a/docs/versioned_docs/version-v29/07-packages/chaincmd.md b/docs/versioned_docs/version-v29/07-packages/chaincmd.md new file mode 100644 index 0000000..c4f498c --- /dev/null +++ b/docs/versioned_docs/version-v29/07-packages/chaincmd.md @@ -0,0 +1,51 @@ +--- +sidebar_position: 7 +title: Chain Command Builder (chaincmd) +slug: /packages/chaincmd +--- + +# Chain Command Builder (chaincmd) + +The `chaincmd` package builds `step.Option` command definitions for Cosmos SDK daemon binaries (`simd`, `gaiad`, and others). It does not execute commands directly. + +For full API details, see the +[`chaincmd` Go package documentation](https://pkg.go.dev/github.com/ignite/cli/v29/ignite/pkg/chaincmd). + +## When to use + +- Build consistent daemon command lines from typed options. +- Reuse command composition across services and tests. +- Keep chain binary-specific flags centralized. + +## Key APIs + +- `New(appCmd string, options ...Option) ChainCmd` +- `WithHome(home string) Option` +- `WithChainID(chainID string) Option` +- `InitCommand(moniker string, options ...string) step.Option` +- `BankSendCommand(fromAddress, toAddress, amount string, options ...BankSendOption) step.Option` + +## Example + +```go +package main + +import ( + "fmt" + + "github.com/ignite/cli/v29/ignite/pkg/chaincmd" + "github.com/ignite/cli/v29/ignite/pkg/cmdrunner/step" +) + +func main() { + cmd := chaincmd.New( + "simd", + chaincmd.WithHome("./.simapp"), + chaincmd.WithChainID("demo-1"), + ) + + initStep := step.New(cmd.InitCommand("validator")) + fmt.Println(initStep.Exec.Command) + fmt.Println(initStep.Exec.Args) +} +``` diff --git a/docs/versioned_docs/version-v29/07-packages/chaincmdrunner.md b/docs/versioned_docs/version-v29/07-packages/chaincmdrunner.md new file mode 100644 index 0000000..58db8ca --- /dev/null +++ b/docs/versioned_docs/version-v29/07-packages/chaincmdrunner.md @@ -0,0 +1,40 @@ +--- +sidebar_position: 4 +title: Chain Command Runner (chaincmd/runner) +slug: /packages/chaincmdrunner +--- + +# Chain Command Runner (chaincmd/runner) + +The `chaincmdrunner` package wraps chain binary commands into typed, higher-level operations (accounts, genesis setup, tx queries, node control). + +For full API details, see the +[`chaincmdrunner` Go package documentation](https://pkg.go.dev/github.com/ignite/cli/v29/ignite/pkg/chaincmd/runner). + +## When to use + +- Execute chain lifecycle commands without manually assembling CLI arguments. +- Manage accounts and genesis setup from automation/test flows. +- Query transaction events using typed selectors instead of raw command output parsing. + +## Key APIs + +- `New(ctx context.Context, chainCmd chaincmd.ChainCmd, options ...Option) (Runner, error)` +- `(Runner) Init(ctx context.Context, moniker string, args ...string) error` +- `(Runner) Start(ctx context.Context, args ...string) error` +- `(Runner) AddAccount(ctx context.Context, name, mnemonic, coinType, accountNumber, addressIndex string) (Account, error)` +- `(Runner) AddGenesisAccount(ctx context.Context, address, coins string) error` +- `(Runner) QueryTxByEvents(ctx context.Context, selectors ...EventSelector) ([]Event, error)` +- `(Runner) WaitTx(ctx context.Context, txHash string, retryDelay time.Duration, maxRetry int) error` + +## Common Tasks + +- Build a `Runner` from a configured `chaincmd.ChainCmd` and then call `Init`/`Start` for local node workflows. +- Use `AddAccount`, `ListAccounts`, and `ShowAccount` to manage keyring state in scripted flows. +- Query and filter tx events with `NewEventSelector` plus `QueryTxByEvents`. + +## Basic import + +```go +import chaincmdrunner "github.com/ignite/cli/v29/ignite/pkg/chaincmd/runner" +``` diff --git a/docs/versioned_docs/version-v29/07-packages/chainregistry.md b/docs/versioned_docs/version-v29/07-packages/chainregistry.md new file mode 100644 index 0000000..b2558ad --- /dev/null +++ b/docs/versioned_docs/version-v29/07-packages/chainregistry.md @@ -0,0 +1,43 @@ +--- +sidebar_position: 3 +title: Chain Registry Types (chainregistry) +slug: /packages/chainregistry +--- + +# Chain Registry Types (chainregistry) + +The `chainregistry` package defines strongly-typed Go structs for Cosmos chain-registry data (`chain.json` and `assetlist.json`). + +For full API details, see the +[`chainregistry` Go package documentation](https://pkg.go.dev/github.com/ignite/cli/v29/ignite/pkg/chainregistry). + +## When to use + +- Parse chain-registry JSON into typed values. +- Build tooling that reads chain metadata (APIs, fees, staking tokens, assets). +- Validate or transform registry documents before writing them back. + +## Key APIs + +- `type Chain struct{ ... }` +- `type APIs struct{ ... }` +- `type APIProvider struct{ ... }` +- `type AssetList struct{ ... }` +- `type Asset struct{ ... }` +- `type Fees struct{ ... }` +- `type Staking struct{ ... }` +- `type Codebase struct{ ... }` +- `type ChainStatus string` +- `type ChainType string` + +## Common Tasks + +- Decode `chain.json` data into a `Chain` value and inspect RPC/REST metadata. +- Decode `assetlist.json` into `AssetList` to access denom units and logo URIs. +- Use enum-like types (`ChainStatus`, `NetworkType`, `ChainType`) to keep metadata checks explicit. + +## Basic import + +```go +import "github.com/ignite/cli/v29/ignite/pkg/chainregistry" +``` diff --git a/docs/versioned_docs/version-v29/07-packages/cosmosaccount.md b/docs/versioned_docs/version-v29/07-packages/cosmosaccount.md new file mode 100644 index 0000000..7097055 --- /dev/null +++ b/docs/versioned_docs/version-v29/07-packages/cosmosaccount.md @@ -0,0 +1,43 @@ +--- +sidebar_position: 2 +title: Account Registry (cosmosaccount) +slug: /packages/cosmosaccount +--- + +# Account Registry (cosmosaccount) + +The `cosmosaccount` package manages Cosmos keyring accounts (create/import/export/list/delete) with configurable backend and Bech32 settings. + +For full API details, see the +[`cosmosaccount` Go package documentation](https://pkg.go.dev/github.com/ignite/cli/v29/ignite/pkg/cosmosaccount). + +## When to use + +- Manage CLI account keys in Ignite services and commands. +- Switch between `test`, `os`, and `memory` keyring backends. +- Resolve addresses/public keys from named keyring entries. + +## Key APIs + +- `New(options ...Option) (Registry, error)` +- `NewInMemory(options ...Option) (Registry, error)` +- `WithKeyringBackend(backend KeyringBackend) Option` +- `WithHome(path string) Option` +- `(Registry) Create(name string) (Account, mnemonic string, err error)` +- `(Registry) Import(name, secret, passphrase string) (Account, error)` +- `(Registry) Export(name, passphrase string) (key string, err error)` +- `(Registry) GetByName(name string) (Account, error)` +- `(Registry) List() ([]Account, error)` +- `(Account) Address(accPrefix string) (string, error)` + +## Common Tasks + +- Instantiate one `Registry` with backend/home options and reuse it for all key operations. +- Call `EnsureDefaultAccount` in setup paths that require a predictable signer account. +- Resolve addresses with `Account.Address(prefix)` when your app uses non-default Bech32 prefixes. + +## Basic import + +```go +import "github.com/ignite/cli/v29/ignite/pkg/cosmosaccount" +``` diff --git a/docs/versioned_docs/version-v29/07-packages/cosmosanalysis.md b/docs/versioned_docs/version-v29/07-packages/cosmosanalysis.md new file mode 100644 index 0000000..5f1b019 --- /dev/null +++ b/docs/versioned_docs/version-v29/07-packages/cosmosanalysis.md @@ -0,0 +1,40 @@ +--- +sidebar_position: 13 +title: Cosmos Source Analysis (cosmosanalysis) +slug: /packages/cosmosanalysis +--- + +# Cosmos Source Analysis (cosmosanalysis) + +The `cosmosanalysis` package provides static analysis helpers for Cosmos SDK-based projects, especially for app structure and interface/embed discovery. + +For full API details, see the +[`cosmosanalysis` Go package documentation](https://pkg.go.dev/github.com/ignite/cli/v29/ignite/pkg/cosmosanalysis). + +## When to use + +- Validate that a directory is a Cosmos chain project before running codegen. +- Locate key app files and embedded types in Cosmos app sources. +- Detect interface implementations across module files. + +## Key APIs + +- `IsChainPath(path string) error` +- `FindAppFilePath(chainRoot string) (path string, err error)` +- `ValidateGoMod(module *modfile.File) error` +- `FindImplementation(modulePath string, interfaceList []string) (found []string, err error)` +- `DeepFindImplementation(modulePath string, interfaceList []string) (found []string, err error)` +- `FindEmbed(modulePath string, targetEmbeddedTypes []string) (found []string, err error)` +- `FindEmbedInFile(n ast.Node, targetEmbeddedTypes []string) (found []string)` + +## Common Tasks + +- Call `IsChainPath` early to fail fast on unsupported project layouts. +- Use `FindAppFilePath` before AST transformations that require the chain app entrypoint. +- Use `FindImplementation`/`DeepFindImplementation` to verify generated modules are wired as expected. + +## Basic import + +```go +import "github.com/ignite/cli/v29/ignite/pkg/cosmosanalysis" +``` diff --git a/docs/versioned_docs/version-v29/07-packages/cosmosbuf.md b/docs/versioned_docs/version-v29/07-packages/cosmosbuf.md new file mode 100644 index 0000000..89eeec5 --- /dev/null +++ b/docs/versioned_docs/version-v29/07-packages/cosmosbuf.md @@ -0,0 +1,58 @@ +--- +sidebar_position: 14 +title: Buf Integration (cosmosbuf) +slug: /packages/cosmosbuf +--- + +# Buf Integration (cosmosbuf) + +The `cosmosbuf` package wraps Buf workflows (`generate`, `export`, `format`, `migrate`, `dep update`) used by Ignite's protobuf pipelines. + +For full API details, see the +[`cosmosbuf` Go package documentation](https://pkg.go.dev/github.com/ignite/cli/v29/ignite/pkg/cosmosbuf). + +## When to use + +- Trigger Buf code generation from Go services. +- Keep Buf invocation flags and error handling consistent. +- Reuse cache-aware generation behavior. + +## Key APIs + +- `New(cacheStorage cache.Storage, goModPath string) (Buf, error)` +- `(Buf) Generate(ctx, protoPath, output, template, options...)` +- `(Buf) Format(ctx, path)` +- `(Buf) Export(ctx, protoDir, output)` +- `Version(ctx context.Context) (string, error)` + +## Example + +```go +package main + +import ( + "context" + "log" + "os" + "path/filepath" + + "github.com/ignite/cli/v29/ignite/pkg/cache" + "github.com/ignite/cli/v29/ignite/pkg/cosmosbuf" +) + +func main() { + storage, err := cache.NewStorage(filepath.Join(os.TempDir(), "ignite-cache.db")) + if err != nil { + log.Fatal(err) + } + + buf, err := cosmosbuf.New(storage, "github.com/acme/my-chain") + if err != nil { + log.Fatal(err) + } + + if err := buf.Format(context.Background(), "./proto"); err != nil { + log.Fatal(err) + } +} +``` diff --git a/docs/versioned_docs/version-v29/07-packages/cosmosclient.md b/docs/versioned_docs/version-v29/07-packages/cosmosclient.md new file mode 100644 index 0000000..bab074c --- /dev/null +++ b/docs/versioned_docs/version-v29/07-packages/cosmosclient.md @@ -0,0 +1,43 @@ +--- +sidebar_position: 1 +title: Blockchain Client (cosmosclient) +slug: /packages/cosmosclient +--- + +# Blockchain Client (cosmosclient) + +The `cosmosclient` package provides a high-level client for querying Cosmos SDK chains and building/signing/broadcasting transactions. + +For full API details, see the +[`cosmosclient` Go package documentation](https://pkg.go.dev/github.com/ignite/cli/v29/ignite/pkg/cosmosclient). + +## When to use + +- Connect Ignite tooling to a running node for status and block queries. +- Build and broadcast SDK messages with shared gas/fees/keyring settings. +- Wait for transaction inclusion and inspect block transactions/events. + +## Key APIs + +- `New(ctx context.Context, options ...Option) (Client, error)` +- `WithNodeAddress(addr string) Option` +- `WithHome(path string) Option` +- `WithKeyringBackend(backend cosmosaccount.KeyringBackend) Option` +- `WithGas(gas string) Option` +- `WithGasPrices(gasPrices string) Option` +- `(Client) BroadcastTx(ctx, account, msgs...) (Response, error)` +- `(Client) WaitForTx(ctx context.Context, hash string) (*ctypes.ResultTx, error)` +- `(Client) Status(ctx context.Context) (*ctypes.ResultStatus, error)` +- `(Client) LatestBlockHeight(ctx context.Context) (int64, error)` + +## Common Tasks + +- Initialize one `Client` instance with node and keyring options, then reuse it across operations. +- Call `CreateTxWithOptions` or `BroadcastTx` depending on whether you need fine-grained tx overrides. +- Use `WaitForTx`, `WaitForNextBlock`, or `WaitForBlockHeight` for deterministic flows in tests/automation. + +## Basic import + +```go +import "github.com/ignite/cli/v29/ignite/pkg/cosmosclient" +``` diff --git a/docs/versioned_docs/version-v29/07-packages/cosmosfaucet.md b/docs/versioned_docs/version-v29/07-packages/cosmosfaucet.md new file mode 100644 index 0000000..e2804fb --- /dev/null +++ b/docs/versioned_docs/version-v29/07-packages/cosmosfaucet.md @@ -0,0 +1,40 @@ +--- +sidebar_position: 5 +title: Token Faucet (cosmosfaucet) +slug: /packages/cosmosfaucet +--- + +# Token Faucet (cosmosfaucet) + +The `cosmosfaucet` package provides a local faucet service and client helpers to fund Cosmos accounts during development and tests. + +For full API details, see the +[`cosmosfaucet` Go package documentation](https://pkg.go.dev/github.com/ignite/cli/v29/ignite/pkg/cosmosfaucet). + +## When to use + +- Automatically fund accounts in local/devnet environments. +- Expose a faucet HTTP endpoint backed by a chain key. +- Request funds from an existing faucet endpoint from automation code. + +## Key APIs + +- `New(ctx context.Context, ccr chaincmdrunner.Runner, options ...Option) (Faucet, error)` +- `TryRetrieve(ctx context.Context, chainID, rpcAddress, faucetAddress, accountAddress string) (string, error)` +- `OpenAPI(apiAddress string) Option` +- `Coin(amount, maxAmount sdkmath.Int, denom string) Option` +- `FeeAmount(amount sdkmath.Int, denom string) Option` +- `RefreshWindow(refreshWindow time.Duration) Option` +- `NewTransferRequest(accountAddress string, coins []string) TransferRequest` + +## Common Tasks + +- Construct a `Faucet` with chain runner + options, then expose transfer endpoints for local users. +- Use `TryRetrieve` in tests before broadcasting txs to ensure accounts have spendable balance. +- Tune coin amount, max amount, and refresh window to limit faucet abuse. + +## Basic import + +```go +import "github.com/ignite/cli/v29/ignite/pkg/cosmosfaucet" +``` diff --git a/docs/versioned_docs/version-v29/07-packages/cosmosgen.md b/docs/versioned_docs/version-v29/07-packages/cosmosgen.md new file mode 100644 index 0000000..180a3a5 --- /dev/null +++ b/docs/versioned_docs/version-v29/07-packages/cosmosgen.md @@ -0,0 +1,63 @@ +--- +sidebar_position: 15 +title: Code Generation (cosmosgen) +slug: /packages/cosmosgen +--- + +# Code Generation (cosmosgen) + +The `cosmosgen` package orchestrates multi-target code generation from protobuf sources, including Go code, TS clients, composables, and OpenAPI output. + +For full API details, see the +[`cosmosgen` Go package documentation](https://pkg.go.dev/github.com/ignite/cli/v29/ignite/pkg/cosmosgen). + +## When to use + +- Run full generation pipelines from application services. +- Configure selective outputs (Go only, TS only, OpenAPI only, etc.). +- Check tool availability and maintain buf-related configuration. + +## Key APIs + +- `Generate(ctx, cacheStorage, appPath, protoDir, goModPath, frontendPath, options...)` +- `WithGoGeneration()` +- `WithTSClientGeneration(out, tsClientRootPath, useCache)` +- `WithOpenAPIGeneration(out, excludeList)` +- `DepTools() []string` + +## Example + +```go +package main + +import ( + "context" + "log" + "os" + "path/filepath" + + "github.com/ignite/cli/v29/ignite/pkg/cache" + "github.com/ignite/cli/v29/ignite/pkg/cosmosgen" +) + +func main() { + storage, err := cache.NewStorage(filepath.Join(os.TempDir(), "ignite-cache.db")) + if err != nil { + log.Fatal(err) + } + + err = cosmosgen.Generate( + context.Background(), + storage, + ".", + "proto", + "github.com/acme/my-chain", + "./web", + cosmosgen.WithGoGeneration(), + cosmosgen.WithOpenAPIGeneration("./api/openapi.yml", nil), + ) + if err != nil { + log.Fatal(err) + } +} +``` diff --git a/docs/versioned_docs/version-v29/07-packages/cosmostxcollector.md b/docs/versioned_docs/version-v29/07-packages/cosmostxcollector.md new file mode 100644 index 0000000..7014cff --- /dev/null +++ b/docs/versioned_docs/version-v29/07-packages/cosmostxcollector.md @@ -0,0 +1,200 @@ +--- +sidebar_position: 0 +title: Indexer (cosmostxcollector) +slug: /packages/cosmostxcollector +--- + +# Indexer (cosmostxcollector) + +The package implements support for collecting transactions and events from Cosmos blockchains +into a data backend and it also adds support for querying the collected data. + +## Transaction and event data collecting + +Transactions and events can be collected using the `cosmostxcollector.Collector` type. This +type uses a `cosmosclient.Client` instance to fetch the data from each block and a data backend +adapter to save the data. + +### Data backend adapters + +Data backend adapters are used to query and save the collected data into different types of data +backends and must implement the `cosmostxcollector.adapter.Adapter` interface. + +An adapter for PostgreSQL is already implemented in `cosmostxcollector.adapter.postgres.Adapter`. +This is the one used in the examples. + +### Example: Data collection + +The data collection example assumes that there is a PostgreSQL database running in the local +environment containing an empty database named "cosmos". + +The required database tables will be created automatically by the collector the first time it is run. + +When the application is run it will fetch all the transactions and events starting from one of the +recent blocks until the current block height and populate the database: + +```go +package main + +import ( + "context" + "log" + + "github.com/ignite/cli/v29/ignite/pkg/clictx" + "github.com/ignite/cli/v29/ignite/pkg/cosmosclient" + "github.com/ignite/cli/v29/ignite/pkg/cosmostxcollector" + "github.com/ignite/cli/v29/ignite/pkg/cosmostxcollector/adapter/postgres" +) + +const ( + // Name of a local PostgreSQL database + dbName = "cosmos" + + // Cosmos RPC address + rpcAddr = "https://rpc.cosmos.directory:443/cosmoshub" +) + +func collect(ctx context.Context, db postgres.Adapter) error { + // Make sure that the data backend schema is up to date + if err := db.Init(ctx); err != nil { + return err + } + + // Init the Cosmos client + client, err := cosmosclient.New(ctx, cosmosclient.WithNodeAddress(rpcAddr)) + if err != nil { + return err + } + + // Get the latest block height + latestHeight, err := client.LatestBlockHeight(ctx) + if err != nil { + return err + } + + // Collect transactions and events starting from a block height. + // The collector stops at the latest height available at the time of the call. + collector := cosmostxcollector.New(db, client) + if err := collector.Collect(ctx, latestHeight-50); err != nil { + return err + } + + return nil +} + +func main() { + ctx := clictx.From(context.Background()) + + // Init an adapter for a local PostgreSQL database running with the default values + params := map[string]string{"sslmode": "disable"} + db, err := postgres.NewAdapter(dbName, postgres.WithParams(params)) + if err != nil { + log.Fatal(err) + } + + if err := collect(ctx, db); err != nil { + log.Fatal(err) + } +} +``` + +## Queries + +Collected data can be queried through the data backend adapters using event queries or +cursor-based queries. + +Queries support sorting, paging and filtering by using different options during creation. +The cursor-based ones also support the selection of specific fields or properties and also +passing arguments in cases where the query is a function. + +By default no sorting, filtering nor paging is applied to the queries. + +### Event queries + +The event queries return events and their attributes as `[]cosmostxcollector.query.Event`. + +### Example: Query events + +The example reads transfer events from Cosmos' bank module and paginates the results. + +```go +import ( + "context" + + banktypes "github.com/cosmos/cosmos-sdk/x/bank/types" + "github.com/ignite/cli/ignite/pkg/cosmostxcollector/adapter/postgres" + "github.com/ignite/cli/ignite/pkg/cosmostxcollector/query" +) + +func queryBankTransferEvents(ctx context.Context, db postgres.Adapter) ([]query.Event, error) { + // Create an event query that returns events of type "transfer" + qry := query.NewEventQuery( + query.WithFilters( + // Filter transfer events from Cosmos' bank module + postgres.FilterByEventType(banktypes.EventTypeTransfer), + ), + query.WithPageSize(10), + query.AtPage(1), + ) + + // Execute the query + return db.QueryEvents(ctx, qry) +} +``` + +### Cursor-based queries + +This type of queries is meant to be used in contexts where the Event queries are not +useful. + +Cursor-based queries can query a single "entity" which can be a table, view or function +in relational databases or a collection or function in non relational data backends. + +The result of these types of queries is a cursor that implements the `cosmostxcollector.query.Cursor` +interface. + +### Example: Query events using cursors + +```go +import ( + "context" + + banktypes "github.com/cosmos/cosmos-sdk/x/bank/types" + "github.com/ignite/cli/ignite/pkg/cosmostxcollector/adapter/postgres" + "github.com/ignite/cli/ignite/pkg/cosmostxcollector/query" +) + +func queryBankTransferEventIDs(ctx context.Context, db postgres.Adapter) (ids []int64, err error) { + // Create a query that returns the IDs for events of type "transfer" + qry := query.New( + "event", + query.Fields("id"), + query.WithFilters( + // Filter transfer events from Cosmos' bank module + postgres.NewFilter("type", banktypes.EventTypeTransfer), + ), + query.WithPageSize(10), + query.AtPage(1), + query.SortByFields(query.SortOrderAsc, "id"), + ) + + // Execute the query + cr, err := db.Query(ctx, qry) + if err != nil { + return nil, err + } + + // Read the results + for cr.Next() { + var eventID int64 + + if err := cr.Scan(&eventID); err != nil { + return nil, err + } + + ids = append(ids, eventID) + } + + return ids, nil +} +``` diff --git a/docs/versioned_docs/version-v29/07-packages/cosmosver.md b/docs/versioned_docs/version-v29/07-packages/cosmosver.md new file mode 100644 index 0000000..7de0697 --- /dev/null +++ b/docs/versioned_docs/version-v29/07-packages/cosmosver.md @@ -0,0 +1,41 @@ +--- +sidebar_position: 11 +title: Cosmos SDK Versions (cosmosver) +slug: /packages/cosmosver +--- + +# Cosmos SDK Versions (cosmosver) + +The `cosmosver` package parses, compares, and detects Cosmos SDK versions used by a chain project. + +For full API details, see the +[`cosmosver` Go package documentation](https://pkg.go.dev/github.com/ignite/cli/v29/ignite/pkg/cosmosver). + +## When to use + +- Detect the Cosmos SDK version from a project before scaffolding or migrations. +- Compare versions to enable/disable version-specific features. +- Access Ignite's known SDK version set and latest supported baseline. + +## Key APIs + +- `Detect(appPath string) (version Version, err error)` +- `Parse(version string) (v Version, err error)` +- `var Versions = []Version{ ... }` +- `var Latest = Versions[len(Versions)-1]` +- `(Version) Is(version Version) bool` +- `(Version) LT(version Version) bool` +- `(Version) LTE(version Version) bool` +- `(Version) GTE(version Version) bool` + +## Common Tasks + +- Use `Detect` against a chain root to gate generation paths by SDK version. +- Parse user-provided versions with `Parse` before comparisons. +- Branch behavior with `LT`/`GTE` checks against well-known constants. + +## Basic import + +```go +import "github.com/ignite/cli/v29/ignite/pkg/cosmosver" +``` diff --git a/docs/versioned_docs/version-v29/08-configuration/01-config.md b/docs/versioned_docs/version-v29/08-configuration/01-config.md new file mode 100644 index 0000000..8163dcb --- /dev/null +++ b/docs/versioned_docs/version-v29/08-configuration/01-config.md @@ -0,0 +1,380 @@ +--- +sidebar_position: 1 +description: Primary configuration file to describe the development environment for your blockchain. +title: Configuration File Documentation +--- + +# Configuration File Reference + +After scaffolding a blockchain with IGNITE® CLI, you will find a configuration file at the root of your newly created directory. + +The `config.yml` file generated in your blockchain folder uses key-value pairs +to describe the development environment for your blockchain. + +Only a default set of parameters is provided. If more nuanced configuration is +required, you can add these parameters to the `config.yml` file. + +## Genesis + +The genesis file is the initial block of your blockchain. It is required to launch a chain because it contains important +information such as token balances and modules' state. +By default, genesis is stored at `$DATA_DIR/config/genesis.json`. + +Since the genesis file is frequently reinitialized during development, you can persistently set options using the +`genesis` property in your `config.yml`: + +```yml +genesis: + app_state: + staking: + params: + bond_denom: "denom" +``` + +To discover which properties the genesis file supports, initialize a chain and inspect the generated genesis file in the +data directory. + +### Overriding Genesis Parameters (e.g., chain_id, balances, etc.) + +You may need to customize specific parameters in the genesis file, such as `chain_id`, token balances, module +parameters, or custom state. + +To override genesis values with IGNITE® CLI, persistently set overrides in the `genesis`property of your `config.yml`. +Any YAML structure under `genesis` will be merged into the generated `genesis.json` during initialization. + +Eg: Changing `chain_id` and `staking` parameters + +```yml +genesis: + chain_id: "my-custom-chain" + app_state: + staking: + params: + bond_denom: "mytoken" + bank: + balances: + - address: "cosmos1..." + coins: + - denom: "mytoken" + amount: "1000000" +``` + +- `chain_id`: Sets the chain ID for your blockchain. +- `app_state`: Allows you to modify module states (e.g., staking, bank, etc.). + +> ⚠️ If you set `chain_id` in the `genesis`, it will persist across `ignite chain init` or `ignite chain serve` runs. + +The `genesis` property supports deep merging and can override any field present in the generated genesis file. +For more complex setups, you can use the `include` field in `config.yml` to split overrides into multiple files. + +## Validation + +IGNITE® uses the `validation` field to determine the kind of validation +of your blockchain. There are currently two supported kinds of validation: + +- `sovereign` which is the standard kind of validation where your blockchain + has its own validator set. This is the default value when this field is not + in the config file. +- `consumer` indicates your blockchain is a consumer chain, in the sense of + Replicated Security. That means it doesn't have a validator set, but + inherits the one of a provider chain. + +While the `sovereign` chain is the default validation when you run the `ignite scaffold +chain`, to scaffold a consumer chain, you have to run `ignite scaffold chain +--consumer`. + +This field is, at this time of writing, only used by IGNITE® at the genesis +generation step, because the genesis of a sovereign chain and a consumer chain +are different. + +## Accounts + +A list of user accounts created during genesis of the blockchain. + +```yml +accounts: + - name: alice + coins: [ '20000token', '200000000stake' ] + - name: bob + coins: [ '10000token', '100000000stake' ] +``` + +IGNITE® uses information from `accounts` when initializing the chain with `ignite +chain init` and `ignite chain start`. In the example above IGNITE® will add two +accounts to the `genesis.json` file of the chain. + +`name` is a local name of a key pair associated with an account. Once the chain +is initialized and started, you will be able to use `name` when signing +transactions. With the configuration above, you'd be able to sign transactions +both with Alice's and Bob's accounts like so `exampled tx bank send ... --from +alice`. + +`coins` is a list of token balances for the account. If a token denomination is +in this list, it will exist in the genesis balance and will be a valid token. +When initialized with the config file above, a chain will only have two accounts +at genesis (Alice and Bob) and two native tokens (with denominations `token` and +`stake`). + +By default, every time a chain is re-initialized, IGNITE® will create a new key +pair for each account. So even though the account name can remain the same +(`bob`), every chain reinitialize it will have a different mnemonic and address. + +If you want an account to have a specific address, provide the `address` field +with a valid bech32 address. The prefix (by default, `cosmos`) should match the +one expected by your chain. When an account is provided with an `address` a key +pair will not be generated, because it's impossible to derive a key from an +address. An account with a given address will be added to the genesis file (with +an associated token balance), but because there is no key pair, you will not be +able to broadcast transactions from that address. This is useful when you have +generated a key pair outside of IGNITE® (for example, using your chain's CLI or +in an extension wallet) and want to have a token balance associated with the +address of this key pair. + +```yml +accounts: + - name: bob + coins: [ '20000token', '200000000stake' ] + address: cosmos1s39200s6v4c96ml2xzuh389yxpd0guk2mzn3mz +``` + +If you want an account to be initialized from a specific mnemonic, provide the +`mnemonic` field with a valid mnemonic. A private key, a public key and an +address will be derived from a mnemonic. + +```yml +accounts: + - name: bob + coins: [ '20000token', '200000000stake' ] + mnemonic: cargo ramp supreme review change various throw air figure humble soft steel slam pole betray inhale already dentist enough away office apple sample glue +``` + +You cannot have both `address` and `mnemonic` defined for a single account. + +Some accounts are used as validator accounts (see `validators` section). +Validator accounts cannot have an `address` field, because IGNITE® needs to be +able to derive a private key (either from a random mnemonic or from a specific +one provided in the `mnemonic` field). Validator accounts should have enough +tokens of the staking denomination for self-delegation. + +By default, the `alice` account is used as a validator account, its key is +derived from a mnemonic generated randomly at genesis, the staking denomination +is `stake`, and this account has enough `stake` for self-delegation. + +If your chain is using its own +[cointype](https://github.com/satoshilabs/slips/blob/master/slip-0044.md), you +can use the `cointype` field to provide the integer value + +```yml +accounts: + - name: bob + coins: [ '20000token', '200000000stake' ] + cointype: 7777777 +``` + +## Validators + +Commands like `ignite chain init` and `ignite chain serve` initialize and launch +a validator node for development purposes. + +```yml +validators: + - name: alice + bonded: '100000000stake' +``` + +`name` refers to key name in the `accounts` list. + +`bonded` is the self-delegation amount of a validator. The `bonded` amount +should not be lower than `1000000` nor higher than the account's +balance in the `account` list. + +Validators store their node configuration files in the data directory. By +default, IGNITE® uses the name of the project as the name of the data directory, +for example, `$HOME/.example/`. To use a different path for the data directory +you can customize the `home` property. + +Configuration in the data directory is reset frequently by IGNITE®. To persist +some changes to configuration files you can use `app`, `config` and `client` +properties that correspond to `$HOME/.example/config/app.toml`, +`$HOME/.example/config/config.toml` and `$HOME/.example/config/client.toml`. + +```yml +validators: + - name: alice + bonded: '100000000stake' + home: "~/.mychain" + app: + pruning: "nothing" + config: + moniker: "mychain" + client: + output: "json" +``` + +To see which properties are available for `config.toml`, `app.toml` and +`client.toml`, initialize a chain with `ignite chain init` and open the file you +want to know more about. + +Currently, IGNITE® starts only one validator node, so the first item in the +`validators` list is used (the rest is ignored). Support for multiple validators +is in progress. + +## Build + +The `build` property lets you customize how IGNITE® builds your chain's binary. + +By default, IGNITE® builds the `main` package from `cmd/PROJECT_NAME/main.go`. If +you more than one `main` package in your project, or you have renamed the +directory, use the `main` property to provide the path to the `main` Go package: + +```yml +build: + main: cmd/hello/cmd +``` + +IGNITE® compiles your project into a binary and uses the project's name with a +`d` suffix as name for the binary. To customize the binary name use the `binary` +property: + +```yml +build: + binary: "helloworldd" +``` + +To customize the linker flags used in the build process: + +```yml +build: + ldflags: [ "-X main.Version=development", "-X main.Date=01/05/2022T19:54" ] +``` + +By default, custom protocol buffer (proto) files are located in the `proto` +directory. If your project keeps proto files in a different directory, you +should tell IGNITE® about this: + +```yml +build: + proto: + path: "myproto" +``` + +## Faucet + +The faucet service sends tokens to addresses. + +```yml +faucet: + name: bob + coins: [ "5token", "100000stake" ] +``` + +`name` refers to a key name in the `accounts` list. This is a required property. + +`coins` is the amount of tokens that will be sent to a user by the faucet. This +is a required property. + +`coins_max` is a maximum amount of tokens that can be sent to a single address. +To reset the token limit use the `rate_limit_window` property (in seconds). + +The default the faucet works on port `4500`. To use a different port number use +the `port` property. + +```yml +faucet: + name: faucet + coins: [ "100token", "5foo" ] + coins_max: [ "2000token", "1000foo" ] + port: 4500 + rate_limit_window: 3600 +``` + +## Genesis + +Genesis file is the initial block in the blockchain. It is required to launch a +blockchain, because it contains important information like token balances, and +modules' state. Genesis is stored in `$DATA_DIR/config/genesis.json`. + +Since the genesis file is reinitialized frequently during development, you can +set persistent options in the `genesis` property: + +```yml +genesis: + app_state: + staking: + params: + bond_denom: "denom" +``` + +To know which properties a genesis file supports, initialize a chain and look up +the genesis file in the data directory. + +## Client code generation + +IGNITE® can generate client-side code for interacting with your chain with the +`ignite generate` set of commands. Use the following properties to customize the +paths where the client-side code is generated. + +```yml +client: + openapi: + path: "docs/static/openapi.json" + typescript: + path: "ts-client" + composables: + path: "vue/src/composables" + hooks: + path: "react/src/hooks" +``` + +## Include + +In your main `config.yml`, use the `include` field to reference other local or remote YAML files. +It allows you to split your chain configuration across multiple files, making it easier to manage and reuse configuration parts. + +```yml +version: 1 +include: + - "./accounts.yml" + - "./validators.yml" +``` + +Include remote files via URL or server path are also valid: + +```yml +version: 1 +include: + - "localhost:3045/accounts.yml" + - "https://ignite.com/config/validators.yml" +``` + +#### Common Use Cases: + +Split your config into a base setup and an external `accounts.yml` for better separation of concerns: + +- `config.yml` +```yml +version: 1 +include: + - "./accounts.yml" +client: + typescript: + path: ts-client +``` + +- `accounts.yml` +```yml +accounts: + - name: alice + coins: + - 20000token + - 200000000stake + - name: bob + coins: + - 20000token + - 200000000stake +faucet: + name: alice + coins: + - 5token + - 100000stake +``` diff --git a/docs/versioned_docs/version-v29/08-configuration/02-config_example.md b/docs/versioned_docs/version-v29/08-configuration/02-config_example.md new file mode 100644 index 0000000..0063cc3 --- /dev/null +++ b/docs/versioned_docs/version-v29/08-configuration/02-config_example.md @@ -0,0 +1,87 @@ +--- +sidebar_position: 2 +description: Configuration File Example. +title: Configuration File Example +--- + +## Configuration File Example + +```yaml title="config.yml" +include: (string list) # Include incorporate a separate config.yml file directly in your current config file. +validation: (string) # Specifies the type of validation the blockchain uses (e.g., sovereign). +version: (uint) # Defines the configuration version number. +build: # Contains build configuration options. + main: (string) # Path to the main build file. + binary: (string) # Path to the binary file. + ldflags: (string list) # List of custom linker flags for building the binary. + proto: # Contains proto build configuration options. + path: (string) # Relative path where the application's proto files are located. +accounts: (list) # Lists the options for setting up Cosmos Accounts. + name: (string) # Local name associated with the Account's key pair. + coins: (string list) # List of token balances for the account. + mnemonic: (string) # Mnemonic phrase for the account. + address: (string) # Address of the account. + cointype: (string) # Coin type number for HD derivation (default is 118). + account_number: (string) # Account number for HD derivation (must be ≤ 2147483647). + address_index: (string) # Address index number for HD derivation (must be ≤ 2147483647). +faucet: # Configuration for the faucet. + name: (string) # Name of the faucet account. + coins: (string list) # Types and amounts of coins the faucet distributes. + coins_max: (string list) # Maximum amounts of coins that can be transferred to a single user. + rate_limit_window: (string) # Timeframe after which the limit will be refreshed. + host: (string) # Host address of the faucet server. + port: (uint) # Port number for the faucet server. + tx_fee: (string) # Tx fee the faucet needs to pay for each transaction. +client: # Configures client code generation. + typescript: # Relative path where the application's Typescript files are located. + path: (string) # Relative path where the application's Typescript files are located. + composables: # Configures Vue 3 composables code generation. + path: (string) # Relative path where the application's composable files are located. + openapi: # Configures OpenAPI spec generation for the API. + path: (string) # Relative path where the application's OpenAPI files are located. +genesis: (key/value) # Custom genesis block modifications. Follow the nesting of the genesis file here to access all the parameters. +default_denom: (string) # Default staking denom (default is stake). +validators: (list) # Contains information related to the list of validators and settings. + name: (string) # Name of the validator. + bonded: (string) # Amount staked by the validator. + app: (key/value) # Overwrites the appd's config/app.toml configurations. + config: (key/value) # Overwrites the appd's config/config.toml configurations. + client: (key/value) # Overwrites the appd's config/client.toml configurations. + home: (string) # Overwrites the default home directory used for the application. + gentx: # Overwrites the appd's config/gentx.toml configurations. + amount: (string) # Amount for the current Gentx. + moniker: (string) # Optional moniker for the validator. + keyring-backend: (string) # Backend for the keyring. + chain-id: (string) # Network chain ID. + commission-max-change-rate: (string) # Maximum commission change rate percentage per day. + commission-max-rate: (string) # Maximum commission rate percentage (e.g., 0.01 = 1%). + commission-rate: (string) # Initial commission rate percentage (e.g., 0.01 = 1%). + details: (string) # Optional details about the validator. + security-contact: (string) # Optional security contact email for the validator. + website: (string) # Optional website for the validator. + account-number: (int) # Account number of the signing account (offline mode only). + broadcast-mode: (string) # Transaction broadcasting mode (sync|async|block) (default is 'sync'). + dry-run: (bool) # Simulates the transaction without actually performing it, ignoring the --gas flag. + fee-account: (string) # Account that pays the transaction fees instead of the signer. + fee: (string) # Fee to pay with the transaction (e.g.: 10uatom). + from: (string) # Name or address of the private key used to sign the transaction. + gas: (string) # Gas limit per transaction; set to 'auto' to calculate sufficient gas automatically (default is 200000). + gas-adjustment: (string) # Factor to multiply against the estimated gas (default is 1). + gas-prices: (string) # Gas prices in decimal format to determine the transaction fee (e.g., 0.1uatom). + generate-only: (bool) # Creates an unsigned transaction and writes it to STDOUT. + identity: (string) # Identity signature (e.g., UPort or Keybase). + ip: (string) # Node's public IP address (default is '192.168.1.64'). + keyring-dir: (string) # Directory for the client keyring; defaults to the 'home' directory if omitted. + ledger: (bool) # Uses a connected Ledger device if true. + min-self-delegation: (string) # Minimum self-delegation required for the validator. + node: (string) # <host>:<port> for the Tendermint RPC interface (default 'tcp://localhost:26657') + node-id: (string) # Node's NodeID + note: (string) # Adds a description to the transaction (formerly --memo). + offline: (bool) # Operates in offline mode, disallowing any online functionality. + output: (string) # Output format (text|json) (default 'json'). + output-document: (string) # Writes the genesis transaction JSON document to the specified file instead of the default location. + pubkey: (string) # Protobuf JSON encoded public key of the validator. + sequence: (uint) # Sequence number of the signing account (offline mode only). + sign-mode: (string) # Chooses sign mode (direct|amino-json), an advanced feature. + timeout-height: (uint) # Sets a block timeout height to prevent the transaction from being committed past a certain height. +``` \ No newline at end of file diff --git a/docs/versioned_docs/version-v29/08-configuration/_category_.json b/docs/versioned_docs/version-v29/08-configuration/_category_.json new file mode 100644 index 0000000..e7e77b5 --- /dev/null +++ b/docs/versioned_docs/version-v29/08-configuration/_category_.json @@ -0,0 +1,5 @@ +{ + "label": "Configuration", + "link": null, + "collapsed": false +} \ No newline at end of file diff --git a/docs/versioned_docs/version-v29/apps/01-using-apps.md b/docs/versioned_docs/version-v29/apps/01-using-apps.md new file mode 100644 index 0000000..391cba9 --- /dev/null +++ b/docs/versioned_docs/version-v29/apps/01-using-apps.md @@ -0,0 +1,44 @@ +--- +description: Using and Developing IGNITE® Apps +--- + +# Using IGNITE® Apps + +Apps offer a way to extend the functionality of the IGNITE® CLI. There are two +core concepts within apps: `Commands` and `Hooks`. `Commands` extend the CLI's +functionality and `Hooks` extend existing CLI command functionality. + +Apps are registered in an IGNITE® scaffolded blockchain project through the +`igniteapps.yml`, or globally through `$HOME/.ignite/apps/igniteapps.yml`. + +To use an app within your project execute the following command inside the +project directory: + +```sh +ignite app install github.com/project/cli-app +``` + +The app will be available only when running `ignite` inside the project +directory. + +To use an app globally on the other hand, execute the following command: + +```sh +ignite app install -g github.com/project/cli-app +``` + +The command will compile the app and make it immediately available to the +`ignite` command lists. + +Discover recommended Apps in the [IGNITE® Apps Marketplace](https://ignite.com/marketplace). + +## Listing installed apps + +When in an ignite scaffolded blockchain you can use the command `ignite app +list` to list all IGNITE® Apps and their statuses. + +## Updating apps + +When an app in a remote repository releases updates, running `ignite app +update <path/to/app>` will update an specific app declared in your +project's `config.yml`. diff --git a/docs/versioned_docs/version-v29/apps/02-developing-apps.md b/docs/versioned_docs/version-v29/apps/02-developing-apps.md new file mode 100644 index 0000000..0a7cbfc --- /dev/null +++ b/docs/versioned_docs/version-v29/apps/02-developing-apps.md @@ -0,0 +1,258 @@ +--- +description: Using and Developing IGNITE® Apps +--- + +# Developing IGNITE® Apps + +It's easy to create an app and use it immediately in your project. First +choose a directory outside your project and run: + +```sh +$ ignite app scaffold my-app +``` + +This will create a new directory `my-app` that contains the app's code +and will output some instructions about how to use your app with the +`ignite` command. An app path can be a local directory which has several +benefits: + +- You don't need to use a Git repository during the development of your app. +- The app is recompiled each time you run the `ignite` binary in your + project if the source files are older than the app binary. + +Thus, app development workflow is as simple as: + +1. Scaffold an app with `ignite app scaffold my-app` +2. Add it to your config via `ignite app install -g /path/to/my-app` +3. Update app code +4. Run `ignite my-app` binary to compile and run the app +5. Go back to 3 + +Once your app is ready you can publish it to a Git repository and the +community can use it by calling `ignite app install github.com/foo/my-app`. + +Now let's detail how to update your app's code. + +## App interface + +Under the hood IGNITE® Apps are implemented using a plugin system based on +`github.com/hashicorp/go-plugin`. + +All apps must implement a predefined interface: + +```go title=ignite/services/plugin/interface.go +type Interface interface { + // Manifest declares app's Command(s) and Hook(s). + Manifest(context.Context) (*Manifest, error) + + // Execute will be invoked by ignite when an app Command is executed. + // It is global for all commands declared in Manifest, if you have declared + // multiple commands, use cmd.Path to distinguish them. + // The ClientAPI argument can be used by plugins to get chain app analysis info. + Execute(context.Context, *ExecutedCommand, ClientAPI) error + + // ExecuteHookPre is invoked by ignite when a command specified by the Hook + // path is invoked. + // It is global for all hooks declared in Manifest, if you have declared + // multiple hooks, use hook.Name to distinguish them. + // The ClientAPI argument can be used by plugins to get chain app analysis info. + ExecuteHookPre(context.Context, *ExecutedHook, ClientAPI) error + + // ExecuteHookPost is invoked by ignite when a command specified by the hook + // path is invoked. + // It is global for all hooks declared in Manifest, if you have declared + // multiple hooks, use hook.Name to distinguish them. + // The ClientAPI argument can be used by plugins to get chain app analysis info. + ExecuteHookPost(context.Context, *ExecutedHook, ClientAPI) error + + // ExecuteHookCleanUp is invoked by ignite when a command specified by the + // hook path is invoked. Unlike ExecuteHookPost, it is invoked regardless of + // execution status of the command and hooks. + // It is global for all hooks declared in Manifest, if you have declared + // multiple hooks, use hook.Name to distinguish them. + // The ClientAPI argument can be used by plugins to get chain app analysis info. + ExecuteHookCleanUp(context.Context, *ExecutedHook, ClientAPI) error +} +``` + +The scaffolded code already implements this interface, you just need to update +the method's body. + +## Defining app's manifest + +Here is the `Manifest` proto message definition: + +```protobuf title=proto/ignite/services/plugin/grpc/v1/types.proto +message Manifest { + // App name. + string name = 1; + + // Commands contains the commands that will be added to the list of ignite commands. + // Each commands are independent, for nested commands use the inner Commands field. + bool shared_host = 2; + + // Hooks contains the hooks that will be attached to the existing ignite commands. + repeated Command commands = 3; + + // Enables sharing a single app server across all running instances of an IGNITE® App. + // Useful if an app adds or extends long running commands. + // + // Example: if an app defines a hook on `ignite chain serve`, a server is instantiated + // when the command is run. Now if you want to interact with that instance + // from commands defined in that app, you need to enable shared host, or else the + // commands will just instantiate separate app servers. + // + // When enabled, all apps of the same path loaded from the same configuration will + // attach it's RPC client to a an existing RPC server. + // + // If an app instance has no other running app servers, it will create one and it + // will be the host. + repeated Hook hooks = 4; +} +``` + +In your app's code the `Manifest` method already returns a predefined +`Manifest` struct as an example. You must adapt it according to your need. + +If your app adds one or more new commands to `ignite`, add them to the +`Commands` field. + +If your app adds features to existing commands, add them to the `Hooks` field. + +Of course an app can declare both, `Commands` *and* `Hooks`. + +An app may also share a host process by setting `SharedHost` to `true`. +`SharedHost` is desirable if an app hooks into, or declares long running commands. +Commands executed from the same app context interact with the same app server. +Allowing all executing commands to share the same server instance, giving shared execution context. + +## Adding new commands + +App commands are custom commands added to IGNITE® CLI by an installed app. +Commands can use any path not defined already by the CLI. + +For instance, let's say your app adds a new `oracle` command to `ignite +scaffold`, then the `Manifest` method will look like : + +```go +func (app) Manifest(context.Context) (*plugin.Manifest, error) { + return &plugin.Manifest{ + Name: "oracle", + Commands: []*plugin.Command{ + { + Use: "oracle [name]", + Short: "Scaffold an oracle module", + Long: "Long description goes here...", + // Optional flags is required + Flags: []*plugin.Flag{ + {Name: "source", Type: plugin.FlagTypeString, Usage: "the oracle source"}, + }, + // Attach the command to `scaffold` + PlaceCommandUnder: "ignite scaffold", + }, + }, + }, nil +} +``` + +To update the app execution, you have to change the `Execute` command. For +example: + +```go +func (app) Execute(_ context.Context, cmd *plugin.ExecutedCommand, _ plugin.ClientAPI) error { + if len(cmd.Args) == 0 { + return fmt.Errorf("oracle name missing") + } + + flags, err := cmd.NewFlags() + if err != nil { + return err + } + + var ( + name = cmd.Args[0] + source, _ = flags.GetString("source") + ) + + // Read chain information + c, err := getChain(cmd) + if err != nil { + return err + } + + //... +} +``` + +Then, run `ignite scaffold oracle` to execute the app. + +## Adding hooks + +App `Hooks` allow existing CLI commands to be extended with new +functionality. Hooks are useful when you want to streamline functionality +without needing to run custom scripts after or before a command has been run. +This can streamline processes that where once error prone or forgotten all +together. + +The following are hooks defined which will run on a registered `ignite` +command: + +| Name | Description | +| -------- | ----------------------------------------------------------------------------------------------------------------------------------------------------- | +| Pre | Runs before a commands main functionality is invoked in the `PreRun` scope | +| Post | Runs after a commands main functionality is invoked in the `PostRun` scope | +| Clean Up | Runs after a commands main functionality is invoked. If the command returns an error it will run before the error is returned to guarantee execution. | + +*Note*: If a hook causes an error in the pre step the command will not run +resulting in `post` and `clean up` not executing. + +The following is an example of a `hook` definition. + +```go +func (app) Manifest(context.Context) (*plugin.Manifest, error) { + return &plugin.Manifest{ + Name: "oracle", + Hooks: []*plugin.Hook{ + { + Name: "my-hook", + PlaceHookOn: "ignite chain build", + }, + }, + }, nil +} + +func (app) ExecuteHookPre(_ context.Context, h *plugin.ExecutedHook, _ plugin.ClientAPI) error { + switch h.Hook.GetName() { + case "my-hook": + fmt.Println("I'm executed before ignite chain build") + default: + return fmt.Errorf("hook not defined") + } + return nil +} + +func (app) ExecuteHookPost(_ context.Context, h *plugin.ExecutedHook, _ plugin.ClientAPI) error { + switch h.Hook.GetName() { + case "my-hook": + fmt.Println("I'm executed after ignite chain build (if no error)") + default: + return fmt.Errorf("hook not defined") + } + return nil +} + +func (app) ExecuteHookCleanUp(_ context.Context, h *plugin.ExecutedHook, _ plugin.ClientAPI) error { + switch h.Hook.GetName() { + case "my-hook": + fmt.Println("I'm executed after ignite chain build (regardless errors)") + default: + return fmt.Errorf("hook not defined") + } + return nil +} +``` + +Above we can see a similar definition to `Command` where a hook has a `Name` +and a `PlaceHookOn`. You'll notice that the `Execute*` methods map directly to +each life cycle of the hook. All hooks defined within the app will invoke these +methods. diff --git a/docs/versioned_docs/version-v29/apps/_category_.json b/docs/versioned_docs/version-v29/apps/_category_.json new file mode 100644 index 0000000..0c4b5b3 --- /dev/null +++ b/docs/versioned_docs/version-v29/apps/_category_.json @@ -0,0 +1,5 @@ +{ + "label": "IGNITE® Apps", + "position": 7, + "link": null +} diff --git a/docs/versioned_sidebars/version-v0.25-sidebars.json b/docs/versioned_sidebars/version-v0.25-sidebars.json new file mode 100644 index 0000000..b50f4f2 --- /dev/null +++ b/docs/versioned_sidebars/version-v0.25-sidebars.json @@ -0,0 +1,25 @@ +{ + "tutorialSidebar": [ + { + "type": "autogenerated", + "dirName": "." + }, + { + "type": "category", + "label": "Resources", + "collapsed": false, + "items": [ + { + "type": "link", + "label": "Ignite CLI on Github", + "href": "https://github.com/ignite/cli" + }, + { + "type": "link", + "label": "Cosmos SDK Docs", + "href": "https://docs.cosmos.network/" + } + ] + } + ] +} diff --git a/docs/versioned_sidebars/version-v0.26-sidebars.json b/docs/versioned_sidebars/version-v0.26-sidebars.json new file mode 100644 index 0000000..2eeb2e3 --- /dev/null +++ b/docs/versioned_sidebars/version-v0.26-sidebars.json @@ -0,0 +1,25 @@ +{ + "tutorialSidebar": [ + { + "type": "autogenerated", + "dirName": "." + }, + { + "type": "category", + "label": "Resources", + "collapsed": true, + "items": [ + { + "type": "link", + "label": "Ignite CLI on Github", + "href": "https://github.com/ignite/cli" + }, + { + "type": "link", + "label": "Cosmos SDK Docs", + "href": "https://docs.cosmos.network/" + } + ] + } + ] +} diff --git a/docs/versioned_sidebars/version-v0.27-sidebars.json b/docs/versioned_sidebars/version-v0.27-sidebars.json new file mode 100644 index 0000000..2eeb2e3 --- /dev/null +++ b/docs/versioned_sidebars/version-v0.27-sidebars.json @@ -0,0 +1,25 @@ +{ + "tutorialSidebar": [ + { + "type": "autogenerated", + "dirName": "." + }, + { + "type": "category", + "label": "Resources", + "collapsed": true, + "items": [ + { + "type": "link", + "label": "Ignite CLI on Github", + "href": "https://github.com/ignite/cli" + }, + { + "type": "link", + "label": "Cosmos SDK Docs", + "href": "https://docs.cosmos.network/" + } + ] + } + ] +} diff --git a/docs/versioned_sidebars/version-v28-sidebars.json b/docs/versioned_sidebars/version-v28-sidebars.json new file mode 100644 index 0000000..2eeb2e3 --- /dev/null +++ b/docs/versioned_sidebars/version-v28-sidebars.json @@ -0,0 +1,25 @@ +{ + "tutorialSidebar": [ + { + "type": "autogenerated", + "dirName": "." + }, + { + "type": "category", + "label": "Resources", + "collapsed": true, + "items": [ + { + "type": "link", + "label": "Ignite CLI on Github", + "href": "https://github.com/ignite/cli" + }, + { + "type": "link", + "label": "Cosmos SDK Docs", + "href": "https://docs.cosmos.network/" + } + ] + } + ] +} diff --git a/docs/versioned_sidebars/version-v29-sidebars.json b/docs/versioned_sidebars/version-v29-sidebars.json new file mode 100644 index 0000000..be071cc --- /dev/null +++ b/docs/versioned_sidebars/version-v29-sidebars.json @@ -0,0 +1,30 @@ +{ + "tutorialSidebar": [ + { + "type": "autogenerated", + "dirName": "." + }, + { + "type": "category", + "label": "Resources", + "collapsed": true, + "items": [ + { + "type": "link", + "label": "IGNITE® CLI on Github", + "href": "https://github.com/ignite/cli" + }, + { + "type": "link", + "label": "IGNITE® Tutorials", + "href": "https://tutorials.ignite.com/" + }, + { + "type": "link", + "label": "Cosmos SDK Docs", + "href": "https://docs.cosmos.network/" + } + ] + } + ] +} diff --git a/docs/versions.json b/docs/versions.json new file mode 100644 index 0000000..e42083d --- /dev/null +++ b/docs/versions.json @@ -0,0 +1,7 @@ +[ + "v29", + "v28", + "v0.27", + "v0.26", + "v0.25" +] diff --git a/docs/yarn.lock b/docs/yarn.lock new file mode 100644 index 0000000..826fa5d --- /dev/null +++ b/docs/yarn.lock @@ -0,0 +1,8101 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + +"@algolia/autocomplete-core@1.7.4": + version "1.7.4" + resolved "https://registry.npmjs.org/@algolia/autocomplete-core/-/autocomplete-core-1.7.4.tgz" + integrity sha512-daoLpQ3ps/VTMRZDEBfU8ixXd+amZcNJ4QSP3IERGyzqnL5Ch8uSRFt/4G8pUvW9c3o6GA4vtVv4I4lmnkdXyg== + dependencies: + "@algolia/autocomplete-shared" "1.7.4" + +"@algolia/autocomplete-preset-algolia@1.7.4": + version "1.7.4" + resolved "https://registry.npmjs.org/@algolia/autocomplete-preset-algolia/-/autocomplete-preset-algolia-1.7.4.tgz" + integrity sha512-s37hrvLEIfcmKY8VU9LsAXgm2yfmkdHT3DnA3SgHaY93yjZ2qL57wzb5QweVkYuEBZkT2PIREvRoLXC2sxTbpQ== + dependencies: + "@algolia/autocomplete-shared" "1.7.4" + +"@algolia/autocomplete-shared@1.7.4": + version "1.7.4" + resolved "https://registry.npmjs.org/@algolia/autocomplete-shared/-/autocomplete-shared-1.7.4.tgz" + integrity sha512-2VGCk7I9tA9Ge73Km99+Qg87w0wzW4tgUruvWAn/gfey1ZXgmxZtyIRBebk35R1O8TbK77wujVtCnpsGpRy1kg== + +"@algolia/cache-browser-local-storage@4.16.0": + version "4.16.0" + resolved "https://registry.npmjs.org/@algolia/cache-browser-local-storage/-/cache-browser-local-storage-4.16.0.tgz" + integrity sha512-jVrk0YB3tjOhD5/lhBtYCVCeLjZmVpf2kdi4puApofytf/R0scjWz0GdozlW4HhU+Prxmt/c9ge4QFjtv5OAzQ== + dependencies: + "@algolia/cache-common" "4.16.0" + +"@algolia/cache-common@4.16.0": + version "4.16.0" + resolved "https://registry.npmjs.org/@algolia/cache-common/-/cache-common-4.16.0.tgz" + integrity sha512-4iHjkSYQYw46pITrNQgXXhvUmcekI8INz1m+SzmqLX8jexSSy4Ky4zfGhZzhhhLHXUP3+x/PK/c0qPjxEvRwKQ== + +"@algolia/cache-in-memory@4.16.0": + version "4.16.0" + resolved "https://registry.npmjs.org/@algolia/cache-in-memory/-/cache-in-memory-4.16.0.tgz" + integrity sha512-p7RYykvA6Ip6QENxrh99nOD77otVh1sJRivcgcVpnjoZb5sIN3t33eUY1DpB9QSBizcrW+qk19rNkdnZ43a+PQ== + dependencies: + "@algolia/cache-common" "4.16.0" + +"@algolia/client-account@4.16.0": + version "4.16.0" + resolved "https://registry.npmjs.org/@algolia/client-account/-/client-account-4.16.0.tgz" + integrity sha512-eydcfpdIyuWoKgUSz5iZ/L0wE/Wl7958kACkvTHLDNXvK/b8Z1zypoJavh6/km1ZNQmFpeYS2jrmq0kUSFn02w== + dependencies: + "@algolia/client-common" "4.16.0" + "@algolia/client-search" "4.16.0" + "@algolia/transporter" "4.16.0" + +"@algolia/client-analytics@4.16.0": + version "4.16.0" + resolved "https://registry.npmjs.org/@algolia/client-analytics/-/client-analytics-4.16.0.tgz" + integrity sha512-cONWXH3BfilgdlCofUm492bJRWtpBLVW/hsUlfoFtiX1u05xoBP7qeiDwh9RR+4pSLHLodYkHAf5U4honQ55Qg== + dependencies: + "@algolia/client-common" "4.16.0" + "@algolia/client-search" "4.16.0" + "@algolia/requester-common" "4.16.0" + "@algolia/transporter" "4.16.0" + +"@algolia/client-common@4.16.0": + version "4.16.0" + resolved "https://registry.npmjs.org/@algolia/client-common/-/client-common-4.16.0.tgz" + integrity sha512-QVdR4019ukBH6f5lFr27W60trRxQF1SfS1qo0IP6gjsKhXhUVJuHxOCA6ArF87jrNkeuHEoRoDU+GlvaecNo8g== + dependencies: + "@algolia/requester-common" "4.16.0" + "@algolia/transporter" "4.16.0" + +"@algolia/client-personalization@4.16.0": + version "4.16.0" + resolved "https://registry.npmjs.org/@algolia/client-personalization/-/client-personalization-4.16.0.tgz" + integrity sha512-irtLafssDGPuhYqIwxqOxiWlVYvrsBD+EMA1P9VJtkKi3vSNBxiWeQ0f0Tn53cUNdSRNEssfoEH84JL97SV2SQ== + dependencies: + "@algolia/client-common" "4.16.0" + "@algolia/requester-common" "4.16.0" + "@algolia/transporter" "4.16.0" + +"@algolia/client-search@4.16.0": + version "4.16.0" + resolved "https://registry.npmjs.org/@algolia/client-search/-/client-search-4.16.0.tgz" + integrity sha512-xsfrAE1jO/JDh1wFrRz+alVyW+aA6qnkzmbWWWZWEgVF3EaFqzIf9r1l/aDtDdBtNTNhX9H3Lg31+BRtd5izQA== + dependencies: + "@algolia/client-common" "4.16.0" + "@algolia/requester-common" "4.16.0" + "@algolia/transporter" "4.16.0" + +"@algolia/events@^4.0.1": + version "4.0.1" + resolved "https://registry.npmjs.org/@algolia/events/-/events-4.0.1.tgz" + integrity sha512-FQzvOCgoFXAbf5Y6mYozw2aj5KCJoA3m4heImceldzPSMbdyS4atVjJzXKMsfX3wnZTFYwkkt8/z8UesLHlSBQ== + +"@algolia/logger-common@4.16.0": + version "4.16.0" + resolved "https://registry.npmjs.org/@algolia/logger-common/-/logger-common-4.16.0.tgz" + integrity sha512-U9H8uCzSDuePJmbnjjTX21aPDRU6x74Tdq3dJmdYu2+pISx02UeBJm4kSgc9RW5jcR5j35G9gnjHY9Q3ngWbyQ== + +"@algolia/logger-console@4.16.0": + version "4.16.0" + resolved "https://registry.npmjs.org/@algolia/logger-console/-/logger-console-4.16.0.tgz" + integrity sha512-+qymusiM+lPZKrkf0tDjCQA158eEJO2IU+Nr/sJ9TFyI/xkFPjNPzw/Qbc8Iy/xcOXGlc6eMgmyjtVQqAWq6UA== + dependencies: + "@algolia/logger-common" "4.16.0" + +"@algolia/requester-browser-xhr@4.16.0": + version "4.16.0" + resolved "https://registry.npmjs.org/@algolia/requester-browser-xhr/-/requester-browser-xhr-4.16.0.tgz" + integrity sha512-gK+kvs6LHl/PaOJfDuwjkopNbG1djzFLsVBklGBsSU6h6VjFkxIpo6Qq80IK14p9cplYZfhfaL12va6Q9p3KVQ== + dependencies: + "@algolia/requester-common" "4.16.0" + +"@algolia/requester-common@4.16.0": + version "4.16.0" + resolved "https://registry.npmjs.org/@algolia/requester-common/-/requester-common-4.16.0.tgz" + integrity sha512-3Zmcs/iMubcm4zqZ3vZG6Zum8t+hMWxGMzo0/uY2BD8o9q5vMxIYI0c4ocdgQjkXcix189WtZNkgjSOBzSbkdw== + +"@algolia/requester-node-http@4.16.0": + version "4.16.0" + resolved "https://registry.npmjs.org/@algolia/requester-node-http/-/requester-node-http-4.16.0.tgz" + integrity sha512-L8JxM2VwZzh8LJ1Zb8TFS6G3icYsCKZsdWW+ahcEs1rGWmyk9SybsOe1MLnjonGBaqPWJkn9NjS7mRdjEmBtKA== + dependencies: + "@algolia/requester-common" "4.16.0" + +"@algolia/transporter@4.16.0": + version "4.16.0" + resolved "https://registry.npmjs.org/@algolia/transporter/-/transporter-4.16.0.tgz" + integrity sha512-H9BVB2EAjT65w7XGBNf5drpsW39x2aSZ942j4boSAAJPPlLmjtj5IpAP7UAtsV8g9Beslonh0bLa1XGmE/P0BA== + dependencies: + "@algolia/cache-common" "4.16.0" + "@algolia/logger-common" "4.16.0" + "@algolia/requester-common" "4.16.0" + +"@ampproject/remapping@^2.2.0": + version "2.2.0" + resolved "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.2.0.tgz" + integrity sha512-qRmjj8nj9qmLTQXXmaR1cck3UXSRMPrbsLJAasZpF+t3riI71BXed5ebIOYwQntykeZuhjsdweEc9BxH5Jc26w== + dependencies: + "@jridgewell/gen-mapping" "^0.1.0" + "@jridgewell/trace-mapping" "^0.3.9" + +"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.10.4", "@babel/code-frame@^7.16.0", "@babel/code-frame@^7.26.2", "@babel/code-frame@^7.8.3": + version "7.26.2" + resolved "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.26.2.tgz" + integrity sha512-RJlIHRueQgwWitWgF8OdFYGZX328Ax5BCemNGlqHfplnRT9ESi8JkFlvaVYbS+UubVY6dpv87Fs2u5M29iNFVQ== + dependencies: + "@babel/helper-validator-identifier" "^7.25.9" + js-tokens "^4.0.0" + picocolors "^1.0.0" + +"@babel/compat-data@^7.17.7", "@babel/compat-data@^7.20.1", "@babel/compat-data@^7.20.5", "@babel/compat-data@^7.26.8": + version "7.26.8" + resolved "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.26.8.tgz" + integrity sha512-oH5UPLMWR3L2wEFLnFJ1TZXqHufiTKAiLfqw5zkhS4dKXLJ10yVztfil/twG8EDTA4F/tvVNw9nOl4ZMslB8rQ== + +"@babel/core@7.12.9": + version "7.12.9" + resolved "https://registry.npmjs.org/@babel/core/-/core-7.12.9.tgz" + integrity sha512-gTXYh3M5wb7FRXQy+FErKFAv90BnlOuNn1QkCK2lREoPAjrQCO49+HVSrFoe5uakFAF5eenS75KbO2vQiLrTMQ== + dependencies: + "@babel/code-frame" "^7.10.4" + "@babel/generator" "^7.12.5" + "@babel/helper-module-transforms" "^7.12.1" + "@babel/helpers" "^7.12.5" + "@babel/parser" "^7.12.7" + "@babel/template" "^7.12.7" + "@babel/traverse" "^7.12.9" + "@babel/types" "^7.12.7" + convert-source-map "^1.7.0" + debug "^4.1.0" + gensync "^1.0.0-beta.1" + json5 "^2.1.2" + lodash "^4.17.19" + resolve "^1.3.2" + semver "^5.4.1" + source-map "^0.5.0" + +"@babel/core@^7.18.6", "@babel/core@^7.19.6": + version "7.26.10" + resolved "https://registry.npmjs.org/@babel/core/-/core-7.26.10.tgz" + integrity sha512-vMqyb7XCDMPvJFFOaT9kxtiRh42GwlZEg1/uIgtZshS5a/8OaduUfCi7kynKgc3Tw/6Uo2D+db9qBttghhmxwQ== + dependencies: + "@ampproject/remapping" "^2.2.0" + "@babel/code-frame" "^7.26.2" + "@babel/generator" "^7.26.10" + "@babel/helper-compilation-targets" "^7.26.5" + "@babel/helper-module-transforms" "^7.26.0" + "@babel/helpers" "^7.26.10" + "@babel/parser" "^7.26.10" + "@babel/template" "^7.26.9" + "@babel/traverse" "^7.26.10" + "@babel/types" "^7.26.10" + convert-source-map "^2.0.0" + debug "^4.1.0" + gensync "^1.0.0-beta.2" + json5 "^2.2.3" + semver "^6.3.1" + +"@babel/generator@^7.12.5", "@babel/generator@^7.18.7", "@babel/generator@^7.26.10", "@babel/generator@^7.27.0": + version "7.27.0" + resolved "https://registry.npmjs.org/@babel/generator/-/generator-7.27.0.tgz" + integrity sha512-VybsKvpiN1gU1sdMZIp7FcqphVVKEwcuj02x73uvcHE0PTihx1nlBcowYWhDwjpoAXRv43+gDzyggGnn1XZhVw== + dependencies: + "@babel/parser" "^7.27.0" + "@babel/types" "^7.27.0" + "@jridgewell/gen-mapping" "^0.3.5" + "@jridgewell/trace-mapping" "^0.3.25" + jsesc "^3.0.2" + +"@babel/helper-annotate-as-pure@^7.16.0", "@babel/helper-annotate-as-pure@^7.18.6": + version "7.18.6" + resolved "https://registry.npmjs.org/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.18.6.tgz" + integrity sha512-duORpUiYrEpzKIop6iNbjnwKLAKnJ47csTyRACyEmWj0QdUrm5aqNJGHSSEQSUAvNW0ojX0dOmK9dZduvkfeXA== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-builder-binary-assignment-operator-visitor@^7.18.6": + version "7.18.9" + resolved "https://registry.npmjs.org/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.18.9.tgz" + integrity sha512-yFQ0YCHoIqarl8BCRwBL8ulYUaZpz3bNsA7oFepAzee+8/+ImtADXNOmO5vJvsPff3qi+hvpkY/NYBTrBQgdNw== + dependencies: + "@babel/helper-explode-assignable-expression" "^7.18.6" + "@babel/types" "^7.18.9" + +"@babel/helper-compilation-targets@^7.17.7", "@babel/helper-compilation-targets@^7.18.9", "@babel/helper-compilation-targets@^7.20.0", "@babel/helper-compilation-targets@^7.20.7", "@babel/helper-compilation-targets@^7.26.5": + version "7.27.0" + resolved "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.27.0.tgz" + integrity sha512-LVk7fbXml0H2xH34dFzKQ7TDZ2G4/rVTOrq9V+icbbadjbVxxeFeDsNHv2SrZeWoA+6ZiTyWYWtScEIW07EAcA== + dependencies: + "@babel/compat-data" "^7.26.8" + "@babel/helper-validator-option" "^7.25.9" + browserslist "^4.24.0" + lru-cache "^5.1.1" + semver "^6.3.1" + +"@babel/helper-create-class-features-plugin@^7.18.6", "@babel/helper-create-class-features-plugin@^7.21.0": + version "7.21.0" + resolved "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.21.0.tgz" + integrity sha512-Q8wNiMIdwsv5la5SPxNYzzkPnjgC0Sy0i7jLkVOCdllu/xcVNkr3TeZzbHBJrj+XXRqzX5uCyCoV9eu6xUG7KQ== + dependencies: + "@babel/helper-annotate-as-pure" "^7.18.6" + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-function-name" "^7.21.0" + "@babel/helper-member-expression-to-functions" "^7.21.0" + "@babel/helper-optimise-call-expression" "^7.18.6" + "@babel/helper-replace-supers" "^7.20.7" + "@babel/helper-skip-transparent-expression-wrappers" "^7.20.0" + "@babel/helper-split-export-declaration" "^7.18.6" + +"@babel/helper-create-regexp-features-plugin@^7.18.6", "@babel/helper-create-regexp-features-plugin@^7.20.5": + version "7.21.0" + resolved "https://registry.npmjs.org/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.21.0.tgz" + integrity sha512-N+LaFW/auRSWdx7SHD/HiARwXQju1vXTW4fKr4u5SgBUTm51OKEjKgj+cs00ggW3kEvNqwErnlwuq7Y3xBe4eg== + dependencies: + "@babel/helper-annotate-as-pure" "^7.18.6" + regexpu-core "^5.3.1" + +"@babel/helper-define-polyfill-provider@^0.3.3": + version "0.3.3" + resolved "https://registry.npmjs.org/@babel/helper-define-polyfill-provider/-/helper-define-polyfill-provider-0.3.3.tgz" + integrity sha512-z5aQKU4IzbqCC1XH0nAqfsFLMVSo22SBKUc0BxGrLkolTdPTructy0ToNnlO2zA4j9Q/7pjMZf0DSY+DSTYzww== + dependencies: + "@babel/helper-compilation-targets" "^7.17.7" + "@babel/helper-plugin-utils" "^7.16.7" + debug "^4.1.1" + lodash.debounce "^4.0.8" + resolve "^1.14.2" + semver "^6.1.2" + +"@babel/helper-environment-visitor@^7.18.9": + version "7.24.7" + resolved "https://registry.yarnpkg.com/@babel/helper-environment-visitor/-/helper-environment-visitor-7.24.7.tgz#4b31ba9551d1f90781ba83491dd59cf9b269f7d9" + integrity sha512-DoiN84+4Gnd0ncbBOM9AZENV4a5ZiL39HYMyZJGZ/AZEykHYdJw0wW3kdcsh9/Kn+BRXHLkkklZ51ecPKmI1CQ== + dependencies: + "@babel/types" "^7.24.7" + +"@babel/helper-explode-assignable-expression@^7.18.6": + version "7.18.6" + resolved "https://registry.npmjs.org/@babel/helper-explode-assignable-expression/-/helper-explode-assignable-expression-7.18.6.tgz" + integrity sha512-eyAYAsQmB80jNfg4baAtLeWAQHfHFiR483rzFK+BhETlGZaQC9bsfrugfXDCbRHLQbIA7U5NxhhOxN7p/dWIcg== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-function-name@^7.18.9", "@babel/helper-function-name@^7.19.0", "@babel/helper-function-name@^7.21.0": + version "7.24.7" + resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.24.7.tgz#75f1e1725742f39ac6584ee0b16d94513da38dd2" + integrity sha512-FyoJTsj/PEUWu1/TYRiXTIHc8lbw+TDYkZuoE43opPS5TrI7MyONBE1oNvfguEXAD9yhQRrVBnXdXzSLQl9XnA== + dependencies: + "@babel/template" "^7.24.7" + "@babel/types" "^7.24.7" + +"@babel/helper-hoist-variables@^7.18.6": + version "7.24.7" + resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.24.7.tgz#b4ede1cde2fd89436397f30dc9376ee06b0f25ee" + integrity sha512-MJJwhkoGy5c4ehfoRyrJ/owKeMl19U54h27YYftT0o2teQ3FJ3nQUf/I3LlJsX4l3qlw7WRXUmiyajvHXoTubQ== + dependencies: + "@babel/types" "^7.24.7" + +"@babel/helper-member-expression-to-functions@^7.20.7", "@babel/helper-member-expression-to-functions@^7.21.0": + version "7.21.0" + resolved "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.21.0.tgz" + integrity sha512-Muu8cdZwNN6mRRNG6lAYErJ5X3bRevgYR2O8wN0yn7jJSnGDu6eG59RfT29JHxGUovyfrh6Pj0XzmR7drNVL3Q== + dependencies: + "@babel/types" "^7.21.0" + +"@babel/helper-module-imports@^7.0.0", "@babel/helper-module-imports@^7.16.0", "@babel/helper-module-imports@^7.18.6", "@babel/helper-module-imports@^7.25.9": + version "7.25.9" + resolved "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.25.9.tgz" + integrity sha512-tnUA4RsrmflIM6W6RFTLFSXITtl0wKjgpnLgXyowocVPrbYrLUXSBXDgTs8BlbmIzIdlBySRQjINYs2BAkiLtw== + dependencies: + "@babel/traverse" "^7.25.9" + "@babel/types" "^7.25.9" + +"@babel/helper-module-transforms@^7.12.1", "@babel/helper-module-transforms@^7.18.6", "@babel/helper-module-transforms@^7.20.11", "@babel/helper-module-transforms@^7.21.2", "@babel/helper-module-transforms@^7.26.0": + version "7.26.0" + resolved "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.26.0.tgz" + integrity sha512-xO+xu6B5K2czEnQye6BHA7DolFFmS3LB7stHZFaOLb1pAwO1HWLS8fXA+eh0A2yIvltPVmx3eNNDBJA2SLHXFw== + dependencies: + "@babel/helper-module-imports" "^7.25.9" + "@babel/helper-validator-identifier" "^7.25.9" + "@babel/traverse" "^7.25.9" + +"@babel/helper-optimise-call-expression@^7.18.6": + version "7.18.6" + resolved "https://registry.npmjs.org/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.18.6.tgz" + integrity sha512-HP59oD9/fEHQkdcbgFCnbmgH5vIQTJbxh2yf+CdM89/glUNnuzr87Q8GIjGEnOktTROemO0Pe0iPAYbqZuOUiA== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-plugin-utils@7.10.4": + version "7.10.4" + resolved "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.10.4.tgz" + integrity sha512-O4KCvQA6lLiMU9l2eawBPMf1xPP8xPfB3iEQw150hOVTqj/rfXz0ThTb4HEzqQfs2Bmo5Ay8BzxfzVtBrr9dVg== + +"@babel/helper-plugin-utils@^7.0.0", "@babel/helper-plugin-utils@^7.10.4", "@babel/helper-plugin-utils@^7.12.13", "@babel/helper-plugin-utils@^7.14.5", "@babel/helper-plugin-utils@^7.16.7", "@babel/helper-plugin-utils@^7.18.6", "@babel/helper-plugin-utils@^7.18.9", "@babel/helper-plugin-utils@^7.19.0", "@babel/helper-plugin-utils@^7.20.2", "@babel/helper-plugin-utils@^7.8.0", "@babel/helper-plugin-utils@^7.8.3": + version "7.20.2" + resolved "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.20.2.tgz" + integrity sha512-8RvlJG2mj4huQ4pZ+rU9lqKi9ZKiRmuvGuM2HlWmkmgOhbs6zEAw6IEiJ5cQqGbDzGZOhwuOQNtZMi/ENLjZoQ== + +"@babel/helper-remap-async-to-generator@^7.18.9": + version "7.18.9" + resolved "https://registry.npmjs.org/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.18.9.tgz" + integrity sha512-dI7q50YKd8BAv3VEfgg7PS7yD3Rtbi2J1XMXaalXO0W0164hYLnh8zpjRS0mte9MfVp/tltvr/cfdXPvJr1opA== + dependencies: + "@babel/helper-annotate-as-pure" "^7.18.6" + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-wrap-function" "^7.18.9" + "@babel/types" "^7.18.9" + +"@babel/helper-replace-supers@^7.18.6", "@babel/helper-replace-supers@^7.20.7": + version "7.20.7" + resolved "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.20.7.tgz" + integrity sha512-vujDMtB6LVfNW13jhlCrp48QNslK6JXi7lQG736HVbHz/mbf4Dc7tIRh1Xf5C0rF7BP8iiSxGMCmY6Ci1ven3A== + dependencies: + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-member-expression-to-functions" "^7.20.7" + "@babel/helper-optimise-call-expression" "^7.18.6" + "@babel/template" "^7.20.7" + "@babel/traverse" "^7.20.7" + "@babel/types" "^7.20.7" + +"@babel/helper-simple-access@^7.20.2": + version "7.20.2" + resolved "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.20.2.tgz" + integrity sha512-+0woI/WPq59IrqDYbVGfshjT5Dmk/nnbdpcF8SnMhhXObpTq2KNBdLFRFrkVdbDOyUmHBCxzm5FHV1rACIkIbA== + dependencies: + "@babel/types" "^7.20.2" + +"@babel/helper-skip-transparent-expression-wrappers@^7.20.0": + version "7.20.0" + resolved "https://registry.npmjs.org/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.20.0.tgz" + integrity sha512-5y1JYeNKfvnT8sZcK9DVRtpTbGiomYIHviSP3OQWmDPU3DeH4a1ZlT/N2lyQ5P8egjcRaT/Y9aNqUxK0WsnIIg== + dependencies: + "@babel/types" "^7.20.0" + +"@babel/helper-split-export-declaration@^7.18.6": + version "7.24.7" + resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.24.7.tgz#83949436890e07fa3d6873c61a96e3bbf692d856" + integrity sha512-oy5V7pD+UvfkEATUKvIjvIAH/xCzfsFVw7ygW2SI6NClZzquT+mwdTfgfdbUiceh6iQO0CHtCPsyze/MZ2YbAA== + dependencies: + "@babel/types" "^7.24.7" + +"@babel/helper-string-parser@^7.25.9": + version "7.25.9" + resolved "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.25.9.tgz" + integrity sha512-4A/SCr/2KLd5jrtOMFzaKjVtAei3+2r/NChoBNoZ3EyP/+GlhoaEGoWOZUmFmoITP7zOJyHIMm+DYRd8o3PvHA== + +"@babel/helper-validator-identifier@^7.19.1", "@babel/helper-validator-identifier@^7.25.9": + version "7.25.9" + resolved "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.25.9.tgz" + integrity sha512-Ed61U6XJc3CVRfkERJWDz4dJwKe7iLmmJsbOGu9wSloNSFttHV0I8g6UAgb7qnK5ly5bGLPd4oXZlxCdANBOWQ== + +"@babel/helper-validator-option@^7.18.6", "@babel/helper-validator-option@^7.21.0", "@babel/helper-validator-option@^7.25.9": + version "7.25.9" + resolved "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.25.9.tgz" + integrity sha512-e/zv1co8pp55dNdEcCynfj9X7nyUKUXoUEwfXqaZt0omVOmDe9oOTdKStH4GmAw6zxMFs50ZayuMfHDKlO7Tfw== + +"@babel/helper-wrap-function@^7.18.9": + version "7.20.5" + resolved "https://registry.npmjs.org/@babel/helper-wrap-function/-/helper-wrap-function-7.20.5.tgz" + integrity sha512-bYMxIWK5mh+TgXGVqAtnu5Yn1un+v8DDZtqyzKRLUzrh70Eal2O3aZ7aPYiMADO4uKlkzOiRiZ6GX5q3qxvW9Q== + dependencies: + "@babel/helper-function-name" "^7.19.0" + "@babel/template" "^7.18.10" + "@babel/traverse" "^7.20.5" + "@babel/types" "^7.20.5" + +"@babel/helpers@^7.12.5", "@babel/helpers@^7.26.10": + version "7.27.0" + resolved "https://registry.npmjs.org/@babel/helpers/-/helpers-7.27.0.tgz" + integrity sha512-U5eyP/CTFPuNE3qk+WZMxFkp/4zUzdceQlfzf7DdGdhp+Fezd7HD+i8Y24ZuTMKX3wQBld449jijbGq6OdGNQg== + dependencies: + "@babel/template" "^7.27.0" + "@babel/types" "^7.27.0" + +"@babel/parser@^7.12.7", "@babel/parser@^7.18.8", "@babel/parser@^7.26.10", "@babel/parser@^7.27.0": + version "7.27.0" + resolved "https://registry.npmjs.org/@babel/parser/-/parser-7.27.0.tgz" + integrity sha512-iaepho73/2Pz7w2eMS0Q5f83+0RKI7i4xmiYeBmDzfRVbQtTOG7Ts0S4HzJVsTMGI9keU8rNfuZr8DKfSt7Yyg== + dependencies: + "@babel/types" "^7.27.0" + +"@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@^7.18.6": + version "7.18.6" + resolved "https://registry.npmjs.org/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.18.6.tgz" + integrity sha512-Dgxsyg54Fx1d4Nge8UnvTrED63vrwOdPmyvPzlNN/boaliRP54pm3pGzZD1SJUwrBA+Cs/xdG8kXX6Mn/RfISQ== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining@^7.18.9": + version "7.20.7" + resolved "https://registry.npmjs.org/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining/-/plugin-bugfix-v8-spread-parameters-in-optional-chaining-7.20.7.tgz" + integrity sha512-sbr9+wNE5aXMBBFBICk01tt7sBf2Oc9ikRFEcem/ZORup9IMUdNhW7/wVLEbbtlWOsEubJet46mHAL2C8+2jKQ== + dependencies: + "@babel/helper-plugin-utils" "^7.20.2" + "@babel/helper-skip-transparent-expression-wrappers" "^7.20.0" + "@babel/plugin-proposal-optional-chaining" "^7.20.7" + +"@babel/plugin-proposal-async-generator-functions@^7.20.1": + version "7.20.7" + resolved "https://registry.npmjs.org/@babel/plugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.20.7.tgz" + integrity sha512-xMbiLsn/8RK7Wq7VeVytytS2L6qE69bXPB10YCmMdDZbKF4okCqY74pI/jJQ/8U0b/F6NrT2+14b8/P9/3AMGA== + dependencies: + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-plugin-utils" "^7.20.2" + "@babel/helper-remap-async-to-generator" "^7.18.9" + "@babel/plugin-syntax-async-generators" "^7.8.4" + +"@babel/plugin-proposal-class-properties@^7.18.6": + version "7.18.6" + resolved "https://registry.npmjs.org/@babel/plugin-proposal-class-properties/-/plugin-proposal-class-properties-7.18.6.tgz" + integrity sha512-cumfXOF0+nzZrrN8Rf0t7M+tF6sZc7vhQwYQck9q1/5w2OExlD+b4v4RpMJFaV1Z7WcDRgO6FqvxqxGlwo+RHQ== + dependencies: + "@babel/helper-create-class-features-plugin" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-proposal-class-static-block@^7.18.6": + version "7.21.0" + resolved "https://registry.npmjs.org/@babel/plugin-proposal-class-static-block/-/plugin-proposal-class-static-block-7.21.0.tgz" + integrity sha512-XP5G9MWNUskFuP30IfFSEFB0Z6HzLIUcjYM4bYOPHXl7eiJ9HFv8tWj6TXTN5QODiEhDZAeI4hLok2iHFFV4hw== + dependencies: + "@babel/helper-create-class-features-plugin" "^7.21.0" + "@babel/helper-plugin-utils" "^7.20.2" + "@babel/plugin-syntax-class-static-block" "^7.14.5" + +"@babel/plugin-proposal-dynamic-import@^7.18.6": + version "7.18.6" + resolved "https://registry.npmjs.org/@babel/plugin-proposal-dynamic-import/-/plugin-proposal-dynamic-import-7.18.6.tgz" + integrity sha512-1auuwmK+Rz13SJj36R+jqFPMJWyKEDd7lLSdOj4oJK0UTgGueSAtkrCvz9ewmgyU/P941Rv2fQwZJN8s6QruXw== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/plugin-syntax-dynamic-import" "^7.8.3" + +"@babel/plugin-proposal-export-namespace-from@^7.18.9": + version "7.18.9" + resolved "https://registry.npmjs.org/@babel/plugin-proposal-export-namespace-from/-/plugin-proposal-export-namespace-from-7.18.9.tgz" + integrity sha512-k1NtHyOMvlDDFeb9G5PhUXuGj8m/wiwojgQVEhJ/fsVsMCpLyOP4h0uGEjYJKrRI+EVPlb5Jk+Gt9P97lOGwtA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + "@babel/plugin-syntax-export-namespace-from" "^7.8.3" + +"@babel/plugin-proposal-json-strings@^7.18.6": + version "7.18.6" + resolved "https://registry.npmjs.org/@babel/plugin-proposal-json-strings/-/plugin-proposal-json-strings-7.18.6.tgz" + integrity sha512-lr1peyn9kOdbYc0xr0OdHTZ5FMqS6Di+H0Fz2I/JwMzGmzJETNeOFq2pBySw6X/KFL5EWDjlJuMsUGRFb8fQgQ== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/plugin-syntax-json-strings" "^7.8.3" + +"@babel/plugin-proposal-logical-assignment-operators@^7.18.9": + version "7.20.7" + resolved "https://registry.npmjs.org/@babel/plugin-proposal-logical-assignment-operators/-/plugin-proposal-logical-assignment-operators-7.20.7.tgz" + integrity sha512-y7C7cZgpMIjWlKE5T7eJwp+tnRYM89HmRvWM5EQuB5BoHEONjmQ8lSNmBUwOyy/GFRsohJED51YBF79hE1djug== + dependencies: + "@babel/helper-plugin-utils" "^7.20.2" + "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4" + +"@babel/plugin-proposal-nullish-coalescing-operator@^7.18.6": + version "7.18.6" + resolved "https://registry.npmjs.org/@babel/plugin-proposal-nullish-coalescing-operator/-/plugin-proposal-nullish-coalescing-operator-7.18.6.tgz" + integrity sha512-wQxQzxYeJqHcfppzBDnm1yAY0jSRkUXR2z8RePZYrKwMKgMlE8+Z6LUno+bd6LvbGh8Gltvy74+9pIYkr+XkKA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" + +"@babel/plugin-proposal-numeric-separator@^7.18.6": + version "7.18.6" + resolved "https://registry.npmjs.org/@babel/plugin-proposal-numeric-separator/-/plugin-proposal-numeric-separator-7.18.6.tgz" + integrity sha512-ozlZFogPqoLm8WBr5Z8UckIoE4YQ5KESVcNudyXOR8uqIkliTEgJ3RoketfG6pmzLdeZF0H/wjE9/cCEitBl7Q== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/plugin-syntax-numeric-separator" "^7.10.4" + +"@babel/plugin-proposal-object-rest-spread@7.12.1": + version "7.12.1" + resolved "https://registry.npmjs.org/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.12.1.tgz" + integrity sha512-s6SowJIjzlhx8o7lsFx5zmY4At6CTtDvgNQDdPzkBQucle58A6b/TTeEBYtyDgmcXjUTM+vE8YOGHZzzbc/ioA== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + "@babel/plugin-syntax-object-rest-spread" "^7.8.0" + "@babel/plugin-transform-parameters" "^7.12.1" + +"@babel/plugin-proposal-object-rest-spread@^7.20.2": + version "7.20.7" + resolved "https://registry.npmjs.org/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.20.7.tgz" + integrity sha512-d2S98yCiLxDVmBmE8UjGcfPvNEUbA1U5q5WxaWFUGRzJSVAZqm5W6MbPct0jxnegUZ0niLeNX+IOzEs7wYg9Dg== + dependencies: + "@babel/compat-data" "^7.20.5" + "@babel/helper-compilation-targets" "^7.20.7" + "@babel/helper-plugin-utils" "^7.20.2" + "@babel/plugin-syntax-object-rest-spread" "^7.8.3" + "@babel/plugin-transform-parameters" "^7.20.7" + +"@babel/plugin-proposal-optional-catch-binding@^7.18.6": + version "7.18.6" + resolved "https://registry.npmjs.org/@babel/plugin-proposal-optional-catch-binding/-/plugin-proposal-optional-catch-binding-7.18.6.tgz" + integrity sha512-Q40HEhs9DJQyaZfUjjn6vE8Cv4GmMHCYuMGIWUnlxH6400VGxOuwWsPt4FxXxJkC/5eOzgn0z21M9gMT4MOhbw== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" + +"@babel/plugin-proposal-optional-chaining@^7.18.9", "@babel/plugin-proposal-optional-chaining@^7.20.7": + version "7.21.0" + resolved "https://registry.npmjs.org/@babel/plugin-proposal-optional-chaining/-/plugin-proposal-optional-chaining-7.21.0.tgz" + integrity sha512-p4zeefM72gpmEe2fkUr/OnOXpWEf8nAgk7ZYVqqfFiyIG7oFfVZcCrU64hWn5xp4tQ9LkV4bTIa5rD0KANpKNA== + dependencies: + "@babel/helper-plugin-utils" "^7.20.2" + "@babel/helper-skip-transparent-expression-wrappers" "^7.20.0" + "@babel/plugin-syntax-optional-chaining" "^7.8.3" + +"@babel/plugin-proposal-private-methods@^7.18.6": + version "7.18.6" + resolved "https://registry.npmjs.org/@babel/plugin-proposal-private-methods/-/plugin-proposal-private-methods-7.18.6.tgz" + integrity sha512-nutsvktDItsNn4rpGItSNV2sz1XwS+nfU0Rg8aCx3W3NOKVzdMjJRu0O5OkgDp3ZGICSTbgRpxZoWsxoKRvbeA== + dependencies: + "@babel/helper-create-class-features-plugin" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-proposal-private-property-in-object@^7.18.6": + version "7.21.0" + resolved "https://registry.npmjs.org/@babel/plugin-proposal-private-property-in-object/-/plugin-proposal-private-property-in-object-7.21.0.tgz" + integrity sha512-ha4zfehbJjc5MmXBlHec1igel5TJXXLDDRbuJ4+XT2TJcyD9/V1919BA8gMvsdHcNMBy4WBUBiRb3nw/EQUtBw== + dependencies: + "@babel/helper-annotate-as-pure" "^7.18.6" + "@babel/helper-create-class-features-plugin" "^7.21.0" + "@babel/helper-plugin-utils" "^7.20.2" + "@babel/plugin-syntax-private-property-in-object" "^7.14.5" + +"@babel/plugin-proposal-unicode-property-regex@^7.18.6", "@babel/plugin-proposal-unicode-property-regex@^7.4.4": + version "7.18.6" + resolved "https://registry.npmjs.org/@babel/plugin-proposal-unicode-property-regex/-/plugin-proposal-unicode-property-regex-7.18.6.tgz" + integrity sha512-2BShG/d5yoZyXZfVePH91urL5wTG6ASZU9M4o03lKK8u8UW1y08OMttBSOADTcJrnPMpvDXRG3G8fyLh4ovs8w== + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-syntax-async-generators@^7.8.4": + version "7.8.4" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz" + integrity sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-class-properties@^7.12.13": + version "7.12.13" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz" + integrity sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA== + dependencies: + "@babel/helper-plugin-utils" "^7.12.13" + +"@babel/plugin-syntax-class-static-block@^7.14.5": + version "7.14.5" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-class-static-block/-/plugin-syntax-class-static-block-7.14.5.tgz" + integrity sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw== + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-syntax-dynamic-import@^7.8.3": + version "7.8.3" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.8.3.tgz" + integrity sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-export-namespace-from@^7.8.3": + version "7.8.3" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-export-namespace-from/-/plugin-syntax-export-namespace-from-7.8.3.tgz" + integrity sha512-MXf5laXo6c1IbEbegDmzGPwGNTsHZmEy6QGznu5Sh2UCWvueywb2ee+CCE4zQiZstxU9BMoQO9i6zUFSY0Kj0Q== + dependencies: + "@babel/helper-plugin-utils" "^7.8.3" + +"@babel/plugin-syntax-import-assertions@^7.20.0": + version "7.20.0" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-import-assertions/-/plugin-syntax-import-assertions-7.20.0.tgz" + integrity sha512-IUh1vakzNoWalR8ch/areW7qFopR2AEw03JlG7BbrDqmQ4X3q9uuipQwSGrUn7oGiemKjtSLDhNtQHzMHr1JdQ== + dependencies: + "@babel/helper-plugin-utils" "^7.19.0" + +"@babel/plugin-syntax-json-strings@^7.8.3": + version "7.8.3" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz" + integrity sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-jsx@7.12.1": + version "7.12.1" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.12.1.tgz" + integrity sha512-1yRi7yAtB0ETgxdY9ti/p2TivUxJkTdhu/ZbF9MshVGqOx1TdB3b7xCXs49Fupgg50N45KcAsRP/ZqWjs9SRjg== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-syntax-jsx@^7.18.6": + version "7.18.6" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.18.6.tgz" + integrity sha512-6mmljtAedFGTWu2p/8WIORGwy+61PLgOMPOdazc7YoJ9ZCWUyFy3A6CpPkRKLKD1ToAesxX8KGEViAiLo9N+7Q== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-syntax-logical-assignment-operators@^7.10.4": + version "7.10.4" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz" + integrity sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-syntax-nullish-coalescing-operator@^7.8.3": + version "7.8.3" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz" + integrity sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-numeric-separator@^7.10.4": + version "7.10.4" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz" + integrity sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-syntax-object-rest-spread@7.8.3", "@babel/plugin-syntax-object-rest-spread@^7.8.0", "@babel/plugin-syntax-object-rest-spread@^7.8.3": + version "7.8.3" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz" + integrity sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-optional-catch-binding@^7.8.3": + version "7.8.3" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz" + integrity sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-optional-chaining@^7.8.3": + version "7.8.3" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz" + integrity sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-private-property-in-object@^7.14.5": + version "7.14.5" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-private-property-in-object/-/plugin-syntax-private-property-in-object-7.14.5.tgz" + integrity sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg== + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-syntax-top-level-await@^7.14.5": + version "7.14.5" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz" + integrity sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw== + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-syntax-typescript@^7.20.0": + version "7.20.0" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.20.0.tgz" + integrity sha512-rd9TkG+u1CExzS4SM1BlMEhMXwFLKVjOAFFCDx9PbX5ycJWDoWMcwdJH9RhkPu1dOgn5TrxLot/Gx6lWFuAUNQ== + dependencies: + "@babel/helper-plugin-utils" "^7.19.0" + +"@babel/plugin-transform-arrow-functions@^7.18.6": + version "7.20.7" + resolved "https://registry.npmjs.org/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.20.7.tgz" + integrity sha512-3poA5E7dzDomxj9WXWwuD6A5F3kc7VXwIJO+E+J8qtDtS+pXPAhrgEyh+9GBwBgPq1Z+bB+/JD60lp5jsN7JPQ== + dependencies: + "@babel/helper-plugin-utils" "^7.20.2" + +"@babel/plugin-transform-async-to-generator@^7.18.6": + version "7.20.7" + resolved "https://registry.npmjs.org/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.20.7.tgz" + integrity sha512-Uo5gwHPT9vgnSXQxqGtpdufUiWp96gk7yiP4Mp5bm1QMkEmLXBO7PAGYbKoJ6DhAwiNkcHFBol/x5zZZkL/t0Q== + dependencies: + "@babel/helper-module-imports" "^7.18.6" + "@babel/helper-plugin-utils" "^7.20.2" + "@babel/helper-remap-async-to-generator" "^7.18.9" + +"@babel/plugin-transform-block-scoped-functions@^7.18.6": + version "7.18.6" + resolved "https://registry.npmjs.org/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.18.6.tgz" + integrity sha512-ExUcOqpPWnliRcPqves5HJcJOvHvIIWfuS4sroBUenPuMdmW+SMHDakmtS7qOo13sVppmUijqeTv7qqGsvURpQ== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-block-scoping@^7.20.2": + version "7.21.0" + resolved "https://registry.npmjs.org/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.21.0.tgz" + integrity sha512-Mdrbunoh9SxwFZapeHVrwFmri16+oYotcZysSzhNIVDwIAb1UV+kvnxULSYq9J3/q5MDG+4X6w8QVgD1zhBXNQ== + dependencies: + "@babel/helper-plugin-utils" "^7.20.2" + +"@babel/plugin-transform-classes@^7.20.2": + version "7.21.0" + resolved "https://registry.npmjs.org/@babel/plugin-transform-classes/-/plugin-transform-classes-7.21.0.tgz" + integrity sha512-RZhbYTCEUAe6ntPehC4hlslPWosNHDox+vAs4On/mCLRLfoDVHf6hVEd7kuxr1RnHwJmxFfUM3cZiZRmPxJPXQ== + dependencies: + "@babel/helper-annotate-as-pure" "^7.18.6" + "@babel/helper-compilation-targets" "^7.20.7" + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-function-name" "^7.21.0" + "@babel/helper-optimise-call-expression" "^7.18.6" + "@babel/helper-plugin-utils" "^7.20.2" + "@babel/helper-replace-supers" "^7.20.7" + "@babel/helper-split-export-declaration" "^7.18.6" + globals "^11.1.0" + +"@babel/plugin-transform-computed-properties@^7.18.9": + version "7.20.7" + resolved "https://registry.npmjs.org/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.20.7.tgz" + integrity sha512-Lz7MvBK6DTjElHAmfu6bfANzKcxpyNPeYBGEafyA6E5HtRpjpZwU+u7Qrgz/2OR0z+5TvKYbPdphfSaAcZBrYQ== + dependencies: + "@babel/helper-plugin-utils" "^7.20.2" + "@babel/template" "^7.20.7" + +"@babel/plugin-transform-destructuring@^7.20.2": + version "7.21.3" + resolved "https://registry.npmjs.org/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.21.3.tgz" + integrity sha512-bp6hwMFzuiE4HqYEyoGJ/V2LeIWn+hLVKc4pnj++E5XQptwhtcGmSayM029d/j2X1bPKGTlsyPwAubuU22KhMA== + dependencies: + "@babel/helper-plugin-utils" "^7.20.2" + +"@babel/plugin-transform-dotall-regex@^7.18.6", "@babel/plugin-transform-dotall-regex@^7.4.4": + version "7.18.6" + resolved "https://registry.npmjs.org/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.18.6.tgz" + integrity sha512-6S3jpun1eEbAxq7TdjLotAsl4WpQI9DxfkycRcKrjhQYzU87qpXdknpBg/e+TdcMehqGnLFi7tnFUBR02Vq6wg== + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-duplicate-keys@^7.18.9": + version "7.18.9" + resolved "https://registry.npmjs.org/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.18.9.tgz" + integrity sha512-d2bmXCtZXYc59/0SanQKbiWINadaJXqtvIQIzd4+hNwkWBgyCd5F/2t1kXoUdvPMrxzPvhK6EMQRROxsue+mfw== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-exponentiation-operator@^7.18.6": + version "7.18.6" + resolved "https://registry.npmjs.org/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.18.6.tgz" + integrity sha512-wzEtc0+2c88FVR34aQmiz56dxEkxr2g8DQb/KfaFa1JYXOFVsbhvAonFN6PwVWj++fKmku8NP80plJ5Et4wqHw== + dependencies: + "@babel/helper-builder-binary-assignment-operator-visitor" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-for-of@^7.18.8": + version "7.21.0" + resolved "https://registry.npmjs.org/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.21.0.tgz" + integrity sha512-LlUYlydgDkKpIY7mcBWvyPPmMcOphEyYA27Ef4xpbh1IiDNLr0kZsos2nf92vz3IccvJI25QUwp86Eo5s6HmBQ== + dependencies: + "@babel/helper-plugin-utils" "^7.20.2" + +"@babel/plugin-transform-function-name@^7.18.9": + version "7.18.9" + resolved "https://registry.npmjs.org/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.18.9.tgz" + integrity sha512-WvIBoRPaJQ5yVHzcnJFor7oS5Ls0PYixlTYE63lCj2RtdQEl15M68FXQlxnG6wdraJIXRdR7KI+hQ7q/9QjrCQ== + dependencies: + "@babel/helper-compilation-targets" "^7.18.9" + "@babel/helper-function-name" "^7.18.9" + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-literals@^7.18.9": + version "7.18.9" + resolved "https://registry.npmjs.org/@babel/plugin-transform-literals/-/plugin-transform-literals-7.18.9.tgz" + integrity sha512-IFQDSRoTPnrAIrI5zoZv73IFeZu2dhu6irxQjY9rNjTT53VmKg9fenjvoiOWOkJ6mm4jKVPtdMzBY98Fp4Z4cg== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-member-expression-literals@^7.18.6": + version "7.18.6" + resolved "https://registry.npmjs.org/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.18.6.tgz" + integrity sha512-qSF1ihLGO3q+/g48k85tUjD033C29TNTVB2paCwZPVmOsjn9pClvYYrM2VeJpBY2bcNkuny0YUyTNRyRxJ54KA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-modules-amd@^7.19.6": + version "7.20.11" + resolved "https://registry.npmjs.org/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.20.11.tgz" + integrity sha512-NuzCt5IIYOW0O30UvqktzHYR2ud5bOWbY0yaxWZ6G+aFzOMJvrs5YHNikrbdaT15+KNO31nPOy5Fim3ku6Zb5g== + dependencies: + "@babel/helper-module-transforms" "^7.20.11" + "@babel/helper-plugin-utils" "^7.20.2" + +"@babel/plugin-transform-modules-commonjs@^7.19.6": + version "7.21.2" + resolved "https://registry.npmjs.org/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.21.2.tgz" + integrity sha512-Cln+Yy04Gxua7iPdj6nOV96smLGjpElir5YwzF0LBPKoPlLDNJePNlrGGaybAJkd0zKRnOVXOgizSqPYMNYkzA== + dependencies: + "@babel/helper-module-transforms" "^7.21.2" + "@babel/helper-plugin-utils" "^7.20.2" + "@babel/helper-simple-access" "^7.20.2" + +"@babel/plugin-transform-modules-systemjs@^7.19.6": + version "7.20.11" + resolved "https://registry.npmjs.org/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.20.11.tgz" + integrity sha512-vVu5g9BPQKSFEmvt2TA4Da5N+QVS66EX21d8uoOihC+OCpUoGvzVsXeqFdtAEfVa5BILAeFt+U7yVmLbQnAJmw== + dependencies: + "@babel/helper-hoist-variables" "^7.18.6" + "@babel/helper-module-transforms" "^7.20.11" + "@babel/helper-plugin-utils" "^7.20.2" + "@babel/helper-validator-identifier" "^7.19.1" + +"@babel/plugin-transform-modules-umd@^7.18.6": + version "7.18.6" + resolved "https://registry.npmjs.org/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.18.6.tgz" + integrity sha512-dcegErExVeXcRqNtkRU/z8WlBLnvD4MRnHgNs3MytRO1Mn1sHRyhbcpYbVMGclAqOjdW+9cfkdZno9dFdfKLfQ== + dependencies: + "@babel/helper-module-transforms" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-named-capturing-groups-regex@^7.19.1": + version "7.20.5" + resolved "https://registry.npmjs.org/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.20.5.tgz" + integrity sha512-mOW4tTzi5iTLnw+78iEq3gr8Aoq4WNRGpmSlrogqaiCBoR1HFhpU4JkpQFOHfeYx3ReVIFWOQJS4aZBRvuZ6mA== + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.20.5" + "@babel/helper-plugin-utils" "^7.20.2" + +"@babel/plugin-transform-new-target@^7.18.6": + version "7.18.6" + resolved "https://registry.npmjs.org/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.18.6.tgz" + integrity sha512-DjwFA/9Iu3Z+vrAn+8pBUGcjhxKguSMlsFqeCKbhb9BAV756v0krzVK04CRDi/4aqmk8BsHb4a/gFcaA5joXRw== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-object-super@^7.18.6": + version "7.18.6" + resolved "https://registry.npmjs.org/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.18.6.tgz" + integrity sha512-uvGz6zk+pZoS1aTZrOvrbj6Pp/kK2mp45t2B+bTDre2UgsZZ8EZLSJtUg7m/no0zOJUWgFONpB7Zv9W2tSaFlA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-replace-supers" "^7.18.6" + +"@babel/plugin-transform-parameters@^7.12.1", "@babel/plugin-transform-parameters@^7.20.1", "@babel/plugin-transform-parameters@^7.20.7": + version "7.21.3" + resolved "https://registry.npmjs.org/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.21.3.tgz" + integrity sha512-Wxc+TvppQG9xWFYatvCGPvZ6+SIUxQ2ZdiBP+PHYMIjnPXD+uThCshaz4NZOnODAtBjjcVQQ/3OKs9LW28purQ== + dependencies: + "@babel/helper-plugin-utils" "^7.20.2" + +"@babel/plugin-transform-property-literals@^7.18.6": + version "7.18.6" + resolved "https://registry.npmjs.org/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.18.6.tgz" + integrity sha512-cYcs6qlgafTud3PAzrrRNbQtfpQ8+y/+M5tKmksS9+M1ckbH6kzY8MrexEM9mcA6JDsukE19iIRvAyYl463sMg== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-react-constant-elements@^7.18.12": + version "7.21.3" + resolved "https://registry.npmjs.org/@babel/plugin-transform-react-constant-elements/-/plugin-transform-react-constant-elements-7.21.3.tgz" + integrity sha512-4DVcFeWe/yDYBLp0kBmOGFJ6N2UYg7coGid1gdxb4co62dy/xISDMaYBXBVXEDhfgMk7qkbcYiGtwd5Q/hwDDQ== + dependencies: + "@babel/helper-plugin-utils" "^7.20.2" + +"@babel/plugin-transform-react-display-name@^7.18.6": + version "7.18.6" + resolved "https://registry.npmjs.org/@babel/plugin-transform-react-display-name/-/plugin-transform-react-display-name-7.18.6.tgz" + integrity sha512-TV4sQ+T013n61uMoygyMRm+xf04Bd5oqFpv2jAEQwSZ8NwQA7zeRPg1LMVg2PWi3zWBz+CLKD+v5bcpZ/BS0aA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-react-jsx-development@^7.18.6": + version "7.18.6" + resolved "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-development/-/plugin-transform-react-jsx-development-7.18.6.tgz" + integrity sha512-SA6HEjwYFKF7WDjWcMcMGUimmw/nhNRDWxr+KaLSCrkD/LMDBvWRmHAYgE1HDeF8KUuI8OAu+RT6EOtKxSW2qA== + dependencies: + "@babel/plugin-transform-react-jsx" "^7.18.6" + +"@babel/plugin-transform-react-jsx@^7.18.6": + version "7.21.0" + resolved "https://registry.npmjs.org/@babel/plugin-transform-react-jsx/-/plugin-transform-react-jsx-7.21.0.tgz" + integrity sha512-6OAWljMvQrZjR2DaNhVfRz6dkCAVV+ymcLUmaf8bccGOHn2v5rHJK3tTpij0BuhdYWP4LLaqj5lwcdlpAAPuvg== + dependencies: + "@babel/helper-annotate-as-pure" "^7.18.6" + "@babel/helper-module-imports" "^7.18.6" + "@babel/helper-plugin-utils" "^7.20.2" + "@babel/plugin-syntax-jsx" "^7.18.6" + "@babel/types" "^7.21.0" + +"@babel/plugin-transform-react-pure-annotations@^7.18.6": + version "7.18.6" + resolved "https://registry.npmjs.org/@babel/plugin-transform-react-pure-annotations/-/plugin-transform-react-pure-annotations-7.18.6.tgz" + integrity sha512-I8VfEPg9r2TRDdvnHgPepTKvuRomzA8+u+nhY7qSI1fR2hRNebasZEETLyM5mAUr0Ku56OkXJ0I7NHJnO6cJiQ== + dependencies: + "@babel/helper-annotate-as-pure" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-regenerator@^7.18.6": + version "7.20.5" + resolved "https://registry.npmjs.org/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.20.5.tgz" + integrity sha512-kW/oO7HPBtntbsahzQ0qSE3tFvkFwnbozz3NWFhLGqH75vLEg+sCGngLlhVkePlCs3Jv0dBBHDzCHxNiFAQKCQ== + dependencies: + "@babel/helper-plugin-utils" "^7.20.2" + regenerator-transform "^0.15.1" + +"@babel/plugin-transform-reserved-words@^7.18.6": + version "7.18.6" + resolved "https://registry.npmjs.org/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.18.6.tgz" + integrity sha512-oX/4MyMoypzHjFrT1CdivfKZ+XvIPMFXwwxHp/r0Ddy2Vuomt4HDFGmft1TAY2yiTKiNSsh3kjBAzcM8kSdsjA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-runtime@^7.18.6": + version "7.21.0" + resolved "https://registry.npmjs.org/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.21.0.tgz" + integrity sha512-ReY6pxwSzEU0b3r2/T/VhqMKg/AkceBT19X0UptA3/tYi5Pe2eXgEUH+NNMC5nok6c6XQz5tyVTUpuezRfSMSg== + dependencies: + "@babel/helper-module-imports" "^7.18.6" + "@babel/helper-plugin-utils" "^7.20.2" + babel-plugin-polyfill-corejs2 "^0.3.3" + babel-plugin-polyfill-corejs3 "^0.6.0" + babel-plugin-polyfill-regenerator "^0.4.1" + semver "^6.3.0" + +"@babel/plugin-transform-shorthand-properties@^7.18.6": + version "7.18.6" + resolved "https://registry.npmjs.org/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.18.6.tgz" + integrity sha512-eCLXXJqv8okzg86ywZJbRn19YJHU4XUa55oz2wbHhaQVn/MM+XhukiT7SYqp/7o00dg52Rj51Ny+Ecw4oyoygw== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-spread@^7.19.0": + version "7.20.7" + resolved "https://registry.npmjs.org/@babel/plugin-transform-spread/-/plugin-transform-spread-7.20.7.tgz" + integrity sha512-ewBbHQ+1U/VnH1fxltbJqDeWBU1oNLG8Dj11uIv3xVf7nrQu0bPGe5Rf716r7K5Qz+SqtAOVswoVunoiBtGhxw== + dependencies: + "@babel/helper-plugin-utils" "^7.20.2" + "@babel/helper-skip-transparent-expression-wrappers" "^7.20.0" + +"@babel/plugin-transform-sticky-regex@^7.18.6": + version "7.18.6" + resolved "https://registry.npmjs.org/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.18.6.tgz" + integrity sha512-kfiDrDQ+PBsQDO85yj1icueWMfGfJFKN1KCkndygtu/C9+XUfydLC8Iv5UYJqRwy4zk8EcplRxEOeLyjq1gm6Q== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-template-literals@^7.18.9": + version "7.18.9" + resolved "https://registry.npmjs.org/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.18.9.tgz" + integrity sha512-S8cOWfT82gTezpYOiVaGHrCbhlHgKhQt8XH5ES46P2XWmX92yisoZywf5km75wv5sYcXDUCLMmMxOLCtthDgMA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-typeof-symbol@^7.18.9": + version "7.18.9" + resolved "https://registry.npmjs.org/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.18.9.tgz" + integrity sha512-SRfwTtF11G2aemAZWivL7PD+C9z52v9EvMqH9BuYbabyPuKUvSWks3oCg6041pT925L4zVFqaVBeECwsmlguEw== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-typescript@^7.21.0": + version "7.21.3" + resolved "https://registry.npmjs.org/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.21.3.tgz" + integrity sha512-RQxPz6Iqt8T0uw/WsJNReuBpWpBqs/n7mNo18sKLoTbMp+UrEekhH+pKSVC7gWz+DNjo9gryfV8YzCiT45RgMw== + dependencies: + "@babel/helper-annotate-as-pure" "^7.18.6" + "@babel/helper-create-class-features-plugin" "^7.21.0" + "@babel/helper-plugin-utils" "^7.20.2" + "@babel/plugin-syntax-typescript" "^7.20.0" + +"@babel/plugin-transform-unicode-escapes@^7.18.10": + version "7.18.10" + resolved "https://registry.npmjs.org/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.18.10.tgz" + integrity sha512-kKAdAI+YzPgGY/ftStBFXTI1LZFju38rYThnfMykS+IXy8BVx+res7s2fxf1l8I35DV2T97ezo6+SGrXz6B3iQ== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-unicode-regex@^7.18.6": + version "7.18.6" + resolved "https://registry.npmjs.org/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.18.6.tgz" + integrity sha512-gE7A6Lt7YLnNOL3Pb9BNeZvi+d8l7tcRrG4+pwJjK9hD2xX4mEvjlQW60G9EEmfXVYRPv9VRQcyegIVHCql/AA== + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/preset-env@^7.18.6", "@babel/preset-env@^7.19.4": + version "7.20.2" + resolved "https://registry.npmjs.org/@babel/preset-env/-/preset-env-7.20.2.tgz" + integrity sha512-1G0efQEWR1EHkKvKHqbG+IN/QdgwfByUpM5V5QroDzGV2t3S/WXNQd693cHiHTlCFMpr9B6FkPFXDA2lQcKoDg== + dependencies: + "@babel/compat-data" "^7.20.1" + "@babel/helper-compilation-targets" "^7.20.0" + "@babel/helper-plugin-utils" "^7.20.2" + "@babel/helper-validator-option" "^7.18.6" + "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression" "^7.18.6" + "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining" "^7.18.9" + "@babel/plugin-proposal-async-generator-functions" "^7.20.1" + "@babel/plugin-proposal-class-properties" "^7.18.6" + "@babel/plugin-proposal-class-static-block" "^7.18.6" + "@babel/plugin-proposal-dynamic-import" "^7.18.6" + "@babel/plugin-proposal-export-namespace-from" "^7.18.9" + "@babel/plugin-proposal-json-strings" "^7.18.6" + "@babel/plugin-proposal-logical-assignment-operators" "^7.18.9" + "@babel/plugin-proposal-nullish-coalescing-operator" "^7.18.6" + "@babel/plugin-proposal-numeric-separator" "^7.18.6" + "@babel/plugin-proposal-object-rest-spread" "^7.20.2" + "@babel/plugin-proposal-optional-catch-binding" "^7.18.6" + "@babel/plugin-proposal-optional-chaining" "^7.18.9" + "@babel/plugin-proposal-private-methods" "^7.18.6" + "@babel/plugin-proposal-private-property-in-object" "^7.18.6" + "@babel/plugin-proposal-unicode-property-regex" "^7.18.6" + "@babel/plugin-syntax-async-generators" "^7.8.4" + "@babel/plugin-syntax-class-properties" "^7.12.13" + "@babel/plugin-syntax-class-static-block" "^7.14.5" + "@babel/plugin-syntax-dynamic-import" "^7.8.3" + "@babel/plugin-syntax-export-namespace-from" "^7.8.3" + "@babel/plugin-syntax-import-assertions" "^7.20.0" + "@babel/plugin-syntax-json-strings" "^7.8.3" + "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4" + "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" + "@babel/plugin-syntax-numeric-separator" "^7.10.4" + "@babel/plugin-syntax-object-rest-spread" "^7.8.3" + "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" + "@babel/plugin-syntax-optional-chaining" "^7.8.3" + "@babel/plugin-syntax-private-property-in-object" "^7.14.5" + "@babel/plugin-syntax-top-level-await" "^7.14.5" + "@babel/plugin-transform-arrow-functions" "^7.18.6" + "@babel/plugin-transform-async-to-generator" "^7.18.6" + "@babel/plugin-transform-block-scoped-functions" "^7.18.6" + "@babel/plugin-transform-block-scoping" "^7.20.2" + "@babel/plugin-transform-classes" "^7.20.2" + "@babel/plugin-transform-computed-properties" "^7.18.9" + "@babel/plugin-transform-destructuring" "^7.20.2" + "@babel/plugin-transform-dotall-regex" "^7.18.6" + "@babel/plugin-transform-duplicate-keys" "^7.18.9" + "@babel/plugin-transform-exponentiation-operator" "^7.18.6" + "@babel/plugin-transform-for-of" "^7.18.8" + "@babel/plugin-transform-function-name" "^7.18.9" + "@babel/plugin-transform-literals" "^7.18.9" + "@babel/plugin-transform-member-expression-literals" "^7.18.6" + "@babel/plugin-transform-modules-amd" "^7.19.6" + "@babel/plugin-transform-modules-commonjs" "^7.19.6" + "@babel/plugin-transform-modules-systemjs" "^7.19.6" + "@babel/plugin-transform-modules-umd" "^7.18.6" + "@babel/plugin-transform-named-capturing-groups-regex" "^7.19.1" + "@babel/plugin-transform-new-target" "^7.18.6" + "@babel/plugin-transform-object-super" "^7.18.6" + "@babel/plugin-transform-parameters" "^7.20.1" + "@babel/plugin-transform-property-literals" "^7.18.6" + "@babel/plugin-transform-regenerator" "^7.18.6" + "@babel/plugin-transform-reserved-words" "^7.18.6" + "@babel/plugin-transform-shorthand-properties" "^7.18.6" + "@babel/plugin-transform-spread" "^7.19.0" + "@babel/plugin-transform-sticky-regex" "^7.18.6" + "@babel/plugin-transform-template-literals" "^7.18.9" + "@babel/plugin-transform-typeof-symbol" "^7.18.9" + "@babel/plugin-transform-unicode-escapes" "^7.18.10" + "@babel/plugin-transform-unicode-regex" "^7.18.6" + "@babel/preset-modules" "^0.1.5" + "@babel/types" "^7.20.2" + babel-plugin-polyfill-corejs2 "^0.3.3" + babel-plugin-polyfill-corejs3 "^0.6.0" + babel-plugin-polyfill-regenerator "^0.4.1" + core-js-compat "^3.25.1" + semver "^6.3.0" + +"@babel/preset-modules@^0.1.5": + version "0.1.5" + resolved "https://registry.npmjs.org/@babel/preset-modules/-/preset-modules-0.1.5.tgz" + integrity sha512-A57th6YRG7oR3cq/yt/Y84MvGgE0eJG2F1JLhKuyG+jFxEgrd/HAMJatiFtmOiZurz+0DkrvbheCLaV5f2JfjA== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + "@babel/plugin-proposal-unicode-property-regex" "^7.4.4" + "@babel/plugin-transform-dotall-regex" "^7.4.4" + "@babel/types" "^7.4.4" + esutils "^2.0.2" + +"@babel/preset-react@^7.18.6": + version "7.18.6" + resolved "https://registry.npmjs.org/@babel/preset-react/-/preset-react-7.18.6.tgz" + integrity sha512-zXr6atUmyYdiWRVLOZahakYmOBHtWc2WGCkP8PYTgZi0iJXDY2CN180TdrIW4OGOAdLc7TifzDIvtx6izaRIzg== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-validator-option" "^7.18.6" + "@babel/plugin-transform-react-display-name" "^7.18.6" + "@babel/plugin-transform-react-jsx" "^7.18.6" + "@babel/plugin-transform-react-jsx-development" "^7.18.6" + "@babel/plugin-transform-react-pure-annotations" "^7.18.6" + +"@babel/preset-typescript@^7.18.6": + version "7.21.0" + resolved "https://registry.npmjs.org/@babel/preset-typescript/-/preset-typescript-7.21.0.tgz" + integrity sha512-myc9mpoVA5m1rF8K8DgLEatOYFDpwC+RkMkjZ0Du6uI62YvDe8uxIEYVs/VCdSJ097nlALiU/yBC7//3nI+hNg== + dependencies: + "@babel/helper-plugin-utils" "^7.20.2" + "@babel/helper-validator-option" "^7.21.0" + "@babel/plugin-transform-typescript" "^7.21.0" + +"@babel/regjsgen@^0.8.0": + version "0.8.0" + resolved "https://registry.npmjs.org/@babel/regjsgen/-/regjsgen-0.8.0.tgz" + integrity sha512-x/rqGMdzj+fWZvCOYForTghzbtqPDZ5gPwaoNGHdgDfF2QA/XZbCBp4Moo5scrkAMPhB7z26XM/AaHuIJdgauA== + +"@babel/runtime-corejs3@^7.18.6": + version "7.21.0" + resolved "https://registry.npmjs.org/@babel/runtime-corejs3/-/runtime-corejs3-7.21.0.tgz" + integrity sha512-TDD4UJzos3JJtM+tHX+w2Uc+KWj7GV+VKKFdMVd2Rx8sdA19hcc3P3AHFYd5LVOw+pYuSd5lICC3gm52B6Rwxw== + dependencies: + core-js-pure "^3.25.1" + regenerator-runtime "^0.13.11" + +"@babel/runtime@^7.1.2", "@babel/runtime@^7.10.3", "@babel/runtime@^7.12.13", "@babel/runtime@^7.12.5", "@babel/runtime@^7.18.6", "@babel/runtime@^7.20.13", "@babel/runtime@^7.8.4": + version "7.21.0" + resolved "https://registry.npmjs.org/@babel/runtime/-/runtime-7.21.0.tgz" + integrity sha512-xwII0//EObnq89Ji5AKYQaRYiW/nZ3llSv29d49IuxPhKbtJoLP+9QUUZ4nVragQVtaVGeZrpB+ZtG/Pdy/POw== + dependencies: + regenerator-runtime "^0.13.11" + +"@babel/template@^7.12.7", "@babel/template@^7.18.10", "@babel/template@^7.20.7", "@babel/template@^7.24.7", "@babel/template@^7.26.9", "@babel/template@^7.27.0": + version "7.27.0" + resolved "https://registry.npmjs.org/@babel/template/-/template-7.27.0.tgz" + integrity sha512-2ncevenBqXI6qRMukPlXwHKHchC7RyMuu4xv5JBXRfOGVcTy1mXCD12qrp7Jsoxll1EV3+9sE4GugBVRjT2jFA== + dependencies: + "@babel/code-frame" "^7.26.2" + "@babel/parser" "^7.27.0" + "@babel/types" "^7.27.0" + +"@babel/traverse@^7.12.9", "@babel/traverse@^7.18.8", "@babel/traverse@^7.20.5", "@babel/traverse@^7.20.7", "@babel/traverse@^7.25.9", "@babel/traverse@^7.26.10", "@babel/traverse@^7.4.5": + version "7.27.0" + resolved "https://registry.npmjs.org/@babel/traverse/-/traverse-7.27.0.tgz" + integrity sha512-19lYZFzYVQkkHkl4Cy4WrAVcqBkgvV2YM2TU3xG6DIwO7O3ecbDPfW3yM3bjAGcqcQHi+CCtjMR3dIEHxsd6bA== + dependencies: + "@babel/code-frame" "^7.26.2" + "@babel/generator" "^7.27.0" + "@babel/parser" "^7.27.0" + "@babel/template" "^7.27.0" + "@babel/types" "^7.27.0" + debug "^4.3.1" + globals "^11.1.0" + +"@babel/types@^7.12.7", "@babel/types@^7.18.6", "@babel/types@^7.18.9", "@babel/types@^7.20.0", "@babel/types@^7.20.2", "@babel/types@^7.20.5", "@babel/types@^7.20.7", "@babel/types@^7.21.0", "@babel/types@^7.24.7", "@babel/types@^7.25.9", "@babel/types@^7.26.10", "@babel/types@^7.27.0", "@babel/types@^7.4.4": + version "7.27.0" + resolved "https://registry.npmjs.org/@babel/types/-/types-7.27.0.tgz" + integrity sha512-H45s8fVLYjbhFH62dIJ3WtmJ6RSPt/3DRO0ZcT2SUiYiQyz3BLVb9ADEnLl91m74aQPS3AzzeajZHYOalWe3bg== + dependencies: + "@babel/helper-string-parser" "^7.25.9" + "@babel/helper-validator-identifier" "^7.25.9" + +"@colors/colors@1.5.0": + version "1.5.0" + resolved "https://registry.npmjs.org/@colors/colors/-/colors-1.5.0.tgz" + integrity sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ== + +"@discoveryjs/json-ext@0.5.7": + version "0.5.7" + resolved "https://registry.npmjs.org/@discoveryjs/json-ext/-/json-ext-0.5.7.tgz" + integrity sha512-dBVuXR082gk3jsFp7Rd/JI4kytwGHecnCoTtXFb7DB6CNHp4rg5k1bhg0nWdLGLnOV71lmDzGQaLMy8iPLY0pw== + +"@docsearch/css@3.3.3": + version "3.3.3" + resolved "https://registry.npmjs.org/@docsearch/css/-/css-3.3.3.tgz" + integrity sha512-6SCwI7P8ao+se1TUsdZ7B4XzL+gqeQZnBc+2EONZlcVa0dVrk0NjETxozFKgMv0eEGH8QzP1fkN+A1rH61l4eg== + +"@docsearch/react@^3.1.1": + version "3.3.3" + resolved "https://registry.npmjs.org/@docsearch/react/-/react-3.3.3.tgz" + integrity sha512-pLa0cxnl+G0FuIDuYlW+EBK6Rw2jwLw9B1RHIeS4N4s2VhsfJ/wzeCi3CWcs5yVfxLd5ZK50t//TMA5e79YT7Q== + dependencies: + "@algolia/autocomplete-core" "1.7.4" + "@algolia/autocomplete-preset-algolia" "1.7.4" + "@docsearch/css" "3.3.3" + algoliasearch "^4.0.0" + +"@docusaurus/core@2.4.0": + version "2.4.0" + resolved "https://registry.npmjs.org/@docusaurus/core/-/core-2.4.0.tgz" + integrity sha512-J55/WEoIpRcLf3afO5POHPguVZosKmJEQWKBL+K7TAnfuE7i+Y0NPLlkKtnWCehagGsgTqClfQEexH/UT4kELA== + dependencies: + "@babel/core" "^7.18.6" + "@babel/generator" "^7.18.7" + "@babel/plugin-syntax-dynamic-import" "^7.8.3" + "@babel/plugin-transform-runtime" "^7.18.6" + "@babel/preset-env" "^7.18.6" + "@babel/preset-react" "^7.18.6" + "@babel/preset-typescript" "^7.18.6" + "@babel/runtime" "^7.18.6" + "@babel/runtime-corejs3" "^7.18.6" + "@babel/traverse" "^7.18.8" + "@docusaurus/cssnano-preset" "2.4.0" + "@docusaurus/logger" "2.4.0" + "@docusaurus/mdx-loader" "2.4.0" + "@docusaurus/react-loadable" "5.5.2" + "@docusaurus/utils" "2.4.0" + "@docusaurus/utils-common" "2.4.0" + "@docusaurus/utils-validation" "2.4.0" + "@slorber/static-site-generator-webpack-plugin" "^4.0.7" + "@svgr/webpack" "^6.2.1" + autoprefixer "^10.4.7" + babel-loader "^8.2.5" + babel-plugin-dynamic-import-node "^2.3.3" + boxen "^6.2.1" + chalk "^4.1.2" + chokidar "^3.5.3" + clean-css "^5.3.0" + cli-table3 "^0.6.2" + combine-promises "^1.1.0" + commander "^5.1.0" + copy-webpack-plugin "^11.0.0" + core-js "^3.23.3" + css-loader "^6.7.1" + css-minimizer-webpack-plugin "^4.0.0" + cssnano "^5.1.12" + del "^6.1.1" + detect-port "^1.3.0" + escape-html "^1.0.3" + eta "^2.0.0" + file-loader "^6.2.0" + fs-extra "^10.1.0" + html-minifier-terser "^6.1.0" + html-tags "^3.2.0" + html-webpack-plugin "^5.5.0" + import-fresh "^3.3.0" + leven "^3.1.0" + lodash "^4.17.21" + mini-css-extract-plugin "^2.6.1" + postcss "^8.4.14" + postcss-loader "^7.0.0" + prompts "^2.4.2" + react-dev-utils "^12.0.1" + react-helmet-async "^1.3.0" + react-loadable "npm:@docusaurus/react-loadable@5.5.2" + react-loadable-ssr-addon-v5-slorber "^1.0.1" + react-router "^5.3.3" + react-router-config "^5.1.1" + react-router-dom "^5.3.3" + rtl-detect "^1.0.4" + semver "^7.3.7" + serve-handler "^6.1.3" + shelljs "^0.8.5" + terser-webpack-plugin "^5.3.3" + tslib "^2.4.0" + update-notifier "^5.1.0" + url-loader "^4.1.1" + wait-on "^6.0.1" + webpack "^5.73.0" + webpack-bundle-analyzer "^4.5.0" + webpack-dev-server "^4.9.3" + webpack-merge "^5.8.0" + webpackbar "^5.0.2" + +"@docusaurus/cssnano-preset@2.4.0": + version "2.4.0" + resolved "https://registry.npmjs.org/@docusaurus/cssnano-preset/-/cssnano-preset-2.4.0.tgz" + integrity sha512-RmdiA3IpsLgZGXRzqnmTbGv43W4OD44PCo+6Q/aYjEM2V57vKCVqNzuafE94jv0z/PjHoXUrjr69SaRymBKYYw== + dependencies: + cssnano-preset-advanced "^5.3.8" + postcss "^8.4.14" + postcss-sort-media-queries "^4.2.1" + tslib "^2.4.0" + +"@docusaurus/logger@2.4.0": + version "2.4.0" + resolved "https://registry.npmjs.org/@docusaurus/logger/-/logger-2.4.0.tgz" + integrity sha512-T8+qR4APN+MjcC9yL2Es+xPJ2923S9hpzDmMtdsOcUGLqpCGBbU1vp3AAqDwXtVgFkq+NsEk7sHdVsfLWR/AXw== + dependencies: + chalk "^4.1.2" + tslib "^2.4.0" + +"@docusaurus/mdx-loader@2.4.0": + version "2.4.0" + resolved "https://registry.npmjs.org/@docusaurus/mdx-loader/-/mdx-loader-2.4.0.tgz" + integrity sha512-GWoH4izZKOmFoC+gbI2/y8deH/xKLvzz/T5BsEexBye8EHQlwsA7FMrVa48N063bJBH4FUOiRRXxk5rq9cC36g== + dependencies: + "@babel/parser" "^7.18.8" + "@babel/traverse" "^7.18.8" + "@docusaurus/logger" "2.4.0" + "@docusaurus/utils" "2.4.0" + "@mdx-js/mdx" "^1.6.22" + escape-html "^1.0.3" + file-loader "^6.2.0" + fs-extra "^10.1.0" + image-size "^1.0.1" + mdast-util-to-string "^2.0.0" + remark-emoji "^2.2.0" + stringify-object "^3.3.0" + tslib "^2.4.0" + unified "^9.2.2" + unist-util-visit "^2.0.3" + url-loader "^4.1.1" + webpack "^5.73.0" + +"@docusaurus/module-type-aliases@2.4.0": + version "2.4.0" + resolved "https://registry.npmjs.org/@docusaurus/module-type-aliases/-/module-type-aliases-2.4.0.tgz" + integrity sha512-YEQO2D3UXs72qCn8Cr+RlycSQXVGN9iEUyuHwTuK4/uL/HFomB2FHSU0vSDM23oLd+X/KibQ3Ez6nGjQLqXcHg== + dependencies: + "@docusaurus/react-loadable" "5.5.2" + "@docusaurus/types" "2.4.0" + "@types/history" "^4.7.11" + "@types/react" "*" + "@types/react-router-config" "*" + "@types/react-router-dom" "*" + react-helmet-async "*" + react-loadable "npm:@docusaurus/react-loadable@5.5.2" + +"@docusaurus/plugin-client-redirects@2.4.0": + version "2.4.0" + resolved "https://registry.npmjs.org/@docusaurus/plugin-client-redirects/-/plugin-client-redirects-2.4.0.tgz" + integrity sha512-HsS+Dc2ZLWhfpjYJ5LIrOB/XfXZcElcC7o1iA4yIVtiFz+LHhwP863fhqbwSJ1c6tNDOYBH3HwbskHrc/PIn7Q== + dependencies: + "@docusaurus/core" "2.4.0" + "@docusaurus/logger" "2.4.0" + "@docusaurus/utils" "2.4.0" + "@docusaurus/utils-common" "2.4.0" + "@docusaurus/utils-validation" "2.4.0" + eta "^2.0.0" + fs-extra "^10.1.0" + lodash "^4.17.21" + tslib "^2.4.0" + +"@docusaurus/plugin-content-blog@2.4.0": + version "2.4.0" + resolved "https://registry.npmjs.org/@docusaurus/plugin-content-blog/-/plugin-content-blog-2.4.0.tgz" + integrity sha512-YwkAkVUxtxoBAIj/MCb4ohN0SCtHBs4AS75jMhPpf67qf3j+U/4n33cELq7567hwyZ6fMz2GPJcVmctzlGGThQ== + dependencies: + "@docusaurus/core" "2.4.0" + "@docusaurus/logger" "2.4.0" + "@docusaurus/mdx-loader" "2.4.0" + "@docusaurus/types" "2.4.0" + "@docusaurus/utils" "2.4.0" + "@docusaurus/utils-common" "2.4.0" + "@docusaurus/utils-validation" "2.4.0" + cheerio "^1.0.0-rc.12" + feed "^4.2.2" + fs-extra "^10.1.0" + lodash "^4.17.21" + reading-time "^1.5.0" + tslib "^2.4.0" + unist-util-visit "^2.0.3" + utility-types "^3.10.0" + webpack "^5.73.0" + +"@docusaurus/plugin-content-docs@2.4.0": + version "2.4.0" + resolved "https://registry.npmjs.org/@docusaurus/plugin-content-docs/-/plugin-content-docs-2.4.0.tgz" + integrity sha512-ic/Z/ZN5Rk/RQo+Io6rUGpToOtNbtPloMR2JcGwC1xT2riMu6zzfSwmBi9tHJgdXH6CB5jG+0dOZZO8QS5tmDg== + dependencies: + "@docusaurus/core" "2.4.0" + "@docusaurus/logger" "2.4.0" + "@docusaurus/mdx-loader" "2.4.0" + "@docusaurus/module-type-aliases" "2.4.0" + "@docusaurus/types" "2.4.0" + "@docusaurus/utils" "2.4.0" + "@docusaurus/utils-validation" "2.4.0" + "@types/react-router-config" "^5.0.6" + combine-promises "^1.1.0" + fs-extra "^10.1.0" + import-fresh "^3.3.0" + js-yaml "^4.1.0" + lodash "^4.17.21" + tslib "^2.4.0" + utility-types "^3.10.0" + webpack "^5.73.0" + +"@docusaurus/plugin-content-pages@2.4.0": + version "2.4.0" + resolved "https://registry.npmjs.org/@docusaurus/plugin-content-pages/-/plugin-content-pages-2.4.0.tgz" + integrity sha512-Pk2pOeOxk8MeU3mrTU0XLIgP9NZixbdcJmJ7RUFrZp1Aj42nd0RhIT14BGvXXyqb8yTQlk4DmYGAzqOfBsFyGw== + dependencies: + "@docusaurus/core" "2.4.0" + "@docusaurus/mdx-loader" "2.4.0" + "@docusaurus/types" "2.4.0" + "@docusaurus/utils" "2.4.0" + "@docusaurus/utils-validation" "2.4.0" + fs-extra "^10.1.0" + tslib "^2.4.0" + webpack "^5.73.0" + +"@docusaurus/plugin-debug@2.4.0": + version "2.4.0" + resolved "https://registry.npmjs.org/@docusaurus/plugin-debug/-/plugin-debug-2.4.0.tgz" + integrity sha512-KC56DdYjYT7Txyux71vXHXGYZuP6yYtqwClvYpjKreWIHWus5Zt6VNi23rMZv3/QKhOCrN64zplUbdfQMvddBQ== + dependencies: + "@docusaurus/core" "2.4.0" + "@docusaurus/types" "2.4.0" + "@docusaurus/utils" "2.4.0" + fs-extra "^10.1.0" + react-json-view "^1.21.3" + tslib "^2.4.0" + +"@docusaurus/plugin-google-analytics@2.4.0": + version "2.4.0" + resolved "https://registry.npmjs.org/@docusaurus/plugin-google-analytics/-/plugin-google-analytics-2.4.0.tgz" + integrity sha512-uGUzX67DOAIglygdNrmMOvEp8qG03X20jMWadeqVQktS6nADvozpSLGx4J0xbkblhJkUzN21WiilsP9iVP+zkw== + dependencies: + "@docusaurus/core" "2.4.0" + "@docusaurus/types" "2.4.0" + "@docusaurus/utils-validation" "2.4.0" + tslib "^2.4.0" + +"@docusaurus/plugin-google-gtag@2.4.0": + version "2.4.0" + resolved "https://registry.npmjs.org/@docusaurus/plugin-google-gtag/-/plugin-google-gtag-2.4.0.tgz" + integrity sha512-adj/70DANaQs2+TF/nRdMezDXFAV/O/pjAbUgmKBlyOTq5qoMe0Tk4muvQIwWUmiUQxFJe+sKlZGM771ownyOg== + dependencies: + "@docusaurus/core" "2.4.0" + "@docusaurus/types" "2.4.0" + "@docusaurus/utils-validation" "2.4.0" + tslib "^2.4.0" + +"@docusaurus/plugin-google-tag-manager@2.4.0": + version "2.4.0" + resolved "https://registry.npmjs.org/@docusaurus/plugin-google-tag-manager/-/plugin-google-tag-manager-2.4.0.tgz" + integrity sha512-E66uGcYs4l7yitmp/8kMEVQftFPwV9iC62ORh47Veqzs6ExwnhzBkJmwDnwIysHBF1vlxnzET0Fl2LfL5fRR3A== + dependencies: + "@docusaurus/core" "2.4.0" + "@docusaurus/types" "2.4.0" + "@docusaurus/utils-validation" "2.4.0" + tslib "^2.4.0" + +"@docusaurus/plugin-sitemap@2.4.0": + version "2.4.0" + resolved "https://registry.npmjs.org/@docusaurus/plugin-sitemap/-/plugin-sitemap-2.4.0.tgz" + integrity sha512-pZxh+ygfnI657sN8a/FkYVIAmVv0CGk71QMKqJBOfMmDHNN1FeDeFkBjWP49ejBqpqAhjufkv5UWq3UOu2soCw== + dependencies: + "@docusaurus/core" "2.4.0" + "@docusaurus/logger" "2.4.0" + "@docusaurus/types" "2.4.0" + "@docusaurus/utils" "2.4.0" + "@docusaurus/utils-common" "2.4.0" + "@docusaurus/utils-validation" "2.4.0" + fs-extra "^10.1.0" + sitemap "^7.1.1" + tslib "^2.4.0" + +"@docusaurus/preset-classic@2.4.0": + version "2.4.0" + resolved "https://registry.npmjs.org/@docusaurus/preset-classic/-/preset-classic-2.4.0.tgz" + integrity sha512-/5z5o/9bc6+P5ool2y01PbJhoGddEGsC0ej1MF6mCoazk8A+kW4feoUd68l7Bnv01rCnG3xy7kHUQP97Y0grUA== + dependencies: + "@docusaurus/core" "2.4.0" + "@docusaurus/plugin-content-blog" "2.4.0" + "@docusaurus/plugin-content-docs" "2.4.0" + "@docusaurus/plugin-content-pages" "2.4.0" + "@docusaurus/plugin-debug" "2.4.0" + "@docusaurus/plugin-google-analytics" "2.4.0" + "@docusaurus/plugin-google-gtag" "2.4.0" + "@docusaurus/plugin-google-tag-manager" "2.4.0" + "@docusaurus/plugin-sitemap" "2.4.0" + "@docusaurus/theme-classic" "2.4.0" + "@docusaurus/theme-common" "2.4.0" + "@docusaurus/theme-search-algolia" "2.4.0" + "@docusaurus/types" "2.4.0" + +"@docusaurus/react-loadable@5.5.2": + version "5.5.2" + resolved "https://registry.npmjs.org/@docusaurus/react-loadable/-/react-loadable-5.5.2.tgz" + integrity sha512-A3dYjdBGuy0IGT+wyLIGIKLRE+sAk1iNk0f1HjNDysO7u8lhL4N3VEm+FAubmJbAztn94F7MxBTPmnixbiyFdQ== + dependencies: + "@types/react" "*" + prop-types "^15.6.2" + +"@docusaurus/theme-classic@2.4.0": + version "2.4.0" + resolved "https://registry.npmjs.org/@docusaurus/theme-classic/-/theme-classic-2.4.0.tgz" + integrity sha512-GMDX5WU6Z0OC65eQFgl3iNNEbI9IMJz9f6KnOyuMxNUR6q0qVLsKCNopFUDfFNJ55UU50o7P7o21yVhkwpfJ9w== + dependencies: + "@docusaurus/core" "2.4.0" + "@docusaurus/mdx-loader" "2.4.0" + "@docusaurus/module-type-aliases" "2.4.0" + "@docusaurus/plugin-content-blog" "2.4.0" + "@docusaurus/plugin-content-docs" "2.4.0" + "@docusaurus/plugin-content-pages" "2.4.0" + "@docusaurus/theme-common" "2.4.0" + "@docusaurus/theme-translations" "2.4.0" + "@docusaurus/types" "2.4.0" + "@docusaurus/utils" "2.4.0" + "@docusaurus/utils-common" "2.4.0" + "@docusaurus/utils-validation" "2.4.0" + "@mdx-js/react" "^1.6.22" + clsx "^1.2.1" + copy-text-to-clipboard "^3.0.1" + infima "0.2.0-alpha.43" + lodash "^4.17.21" + nprogress "^0.2.0" + postcss "^8.4.14" + prism-react-renderer "^1.3.5" + prismjs "^1.28.0" + react-router-dom "^5.3.3" + rtlcss "^3.5.0" + tslib "^2.4.0" + utility-types "^3.10.0" + +"@docusaurus/theme-common@2.4.0": + version "2.4.0" + resolved "https://registry.npmjs.org/@docusaurus/theme-common/-/theme-common-2.4.0.tgz" + integrity sha512-IkG/l5f/FLY6cBIxtPmFnxpuPzc5TupuqlOx+XDN+035MdQcAh8wHXXZJAkTeYDeZ3anIUSUIvWa7/nRKoQEfg== + dependencies: + "@docusaurus/mdx-loader" "2.4.0" + "@docusaurus/module-type-aliases" "2.4.0" + "@docusaurus/plugin-content-blog" "2.4.0" + "@docusaurus/plugin-content-docs" "2.4.0" + "@docusaurus/plugin-content-pages" "2.4.0" + "@docusaurus/utils" "2.4.0" + "@docusaurus/utils-common" "2.4.0" + "@types/history" "^4.7.11" + "@types/react" "*" + "@types/react-router-config" "*" + clsx "^1.2.1" + parse-numeric-range "^1.3.0" + prism-react-renderer "^1.3.5" + tslib "^2.4.0" + use-sync-external-store "^1.2.0" + utility-types "^3.10.0" + +"@docusaurus/theme-search-algolia@2.4.0": + version "2.4.0" + resolved "https://registry.npmjs.org/@docusaurus/theme-search-algolia/-/theme-search-algolia-2.4.0.tgz" + integrity sha512-pPCJSCL1Qt4pu/Z0uxBAuke0yEBbxh0s4fOvimna7TEcBLPq0x06/K78AaABXrTVQM6S0vdocFl9EoNgU17hqA== + dependencies: + "@docsearch/react" "^3.1.1" + "@docusaurus/core" "2.4.0" + "@docusaurus/logger" "2.4.0" + "@docusaurus/plugin-content-docs" "2.4.0" + "@docusaurus/theme-common" "2.4.0" + "@docusaurus/theme-translations" "2.4.0" + "@docusaurus/utils" "2.4.0" + "@docusaurus/utils-validation" "2.4.0" + algoliasearch "^4.13.1" + algoliasearch-helper "^3.10.0" + clsx "^1.2.1" + eta "^2.0.0" + fs-extra "^10.1.0" + lodash "^4.17.21" + tslib "^2.4.0" + utility-types "^3.10.0" + +"@docusaurus/theme-translations@2.4.0": + version "2.4.0" + resolved "https://registry.npmjs.org/@docusaurus/theme-translations/-/theme-translations-2.4.0.tgz" + integrity sha512-kEoITnPXzDPUMBHk3+fzEzbopxLD3fR5sDoayNH0vXkpUukA88/aDL1bqkhxWZHA3LOfJ3f0vJbOwmnXW5v85Q== + dependencies: + fs-extra "^10.1.0" + tslib "^2.4.0" + +"@docusaurus/types@2.4.0": + version "2.4.0" + resolved "https://registry.npmjs.org/@docusaurus/types/-/types-2.4.0.tgz" + integrity sha512-xaBXr+KIPDkIaef06c+i2HeTqVNixB7yFut5fBXPGI2f1rrmEV2vLMznNGsFwvZ5XmA3Quuefd4OGRkdo97Dhw== + dependencies: + "@types/history" "^4.7.11" + "@types/react" "*" + commander "^5.1.0" + joi "^17.6.0" + react-helmet-async "^1.3.0" + utility-types "^3.10.0" + webpack "^5.73.0" + webpack-merge "^5.8.0" + +"@docusaurus/utils-common@2.4.0": + version "2.4.0" + resolved "https://registry.npmjs.org/@docusaurus/utils-common/-/utils-common-2.4.0.tgz" + integrity sha512-zIMf10xuKxddYfLg5cS19x44zud/E9I7lj3+0bv8UIs0aahpErfNrGhijEfJpAfikhQ8tL3m35nH3hJ3sOG82A== + dependencies: + tslib "^2.4.0" + +"@docusaurus/utils-validation@2.4.0": + version "2.4.0" + resolved "https://registry.npmjs.org/@docusaurus/utils-validation/-/utils-validation-2.4.0.tgz" + integrity sha512-IrBsBbbAp6y7mZdJx4S4pIA7dUyWSA0GNosPk6ZJ0fX3uYIEQgcQSGIgTeSC+8xPEx3c16o03en1jSDpgQgz/w== + dependencies: + "@docusaurus/logger" "2.4.0" + "@docusaurus/utils" "2.4.0" + joi "^17.6.0" + js-yaml "^4.1.0" + tslib "^2.4.0" + +"@docusaurus/utils@2.4.0": + version "2.4.0" + resolved "https://registry.npmjs.org/@docusaurus/utils/-/utils-2.4.0.tgz" + integrity sha512-89hLYkvtRX92j+C+ERYTuSUK6nF9bGM32QThcHPg2EDDHVw6FzYQXmX6/p+pU5SDyyx5nBlE4qXR92RxCAOqfg== + dependencies: + "@docusaurus/logger" "2.4.0" + "@svgr/webpack" "^6.2.1" + escape-string-regexp "^4.0.0" + file-loader "^6.2.0" + fs-extra "^10.1.0" + github-slugger "^1.4.0" + globby "^11.1.0" + gray-matter "^4.0.3" + js-yaml "^4.1.0" + lodash "^4.17.21" + micromatch "^4.0.5" + resolve-pathname "^3.0.0" + shelljs "^0.8.5" + tslib "^2.4.0" + url-loader "^4.1.1" + webpack "^5.73.0" + +"@emotion/is-prop-valid@^1.1.0": + version "1.2.0" + resolved "https://registry.npmjs.org/@emotion/is-prop-valid/-/is-prop-valid-1.2.0.tgz" + integrity sha512-3aDpDprjM0AwaxGE09bOPkNxHpBd+kA6jty3RnaEXdweX1DF1U3VQpPYb0g1IStAuK7SVQ1cy+bNBBKp4W3Fjg== + dependencies: + "@emotion/memoize" "^0.8.0" + +"@emotion/memoize@^0.8.0": + version "0.8.0" + resolved "https://registry.npmjs.org/@emotion/memoize/-/memoize-0.8.0.tgz" + integrity sha512-G/YwXTkv7Den9mXDO7AhLWkE3q+I92B+VqAE+dYG4NGPaHZGvt3G8Q0p9vmE+sq7rTGphUbAvmQ9YpbfMQGGlA== + +"@emotion/stylis@^0.8.4": + version "0.8.5" + resolved "https://registry.npmjs.org/@emotion/stylis/-/stylis-0.8.5.tgz" + integrity sha512-h6KtPihKFn3T9fuIrwvXXUOwlx3rfUvfZIcP5a6rh8Y7zjE3O06hT5Ss4S/YI1AYhuZ1kjaE/5EaOOI2NqSylQ== + +"@emotion/unitless@^0.7.4": + version "0.7.5" + resolved "https://registry.npmjs.org/@emotion/unitless/-/unitless-0.7.5.tgz" + integrity sha512-OWORNpfjMsSSUBVrRBVGECkhWcULOAJz9ZW8uK9qgxD+87M7jHRcvh/A96XXNhXTLmKcoYSQtBEX7lHMO7YRwg== + +"@hapi/hoek@^9.0.0": + version "9.3.0" + resolved "https://registry.npmjs.org/@hapi/hoek/-/hoek-9.3.0.tgz" + integrity sha512-/c6rf4UJlmHlC9b5BaNvzAcFv7HZ2QHaV0D4/HNlBdvFnvQq8RI4kYdhyPCl7Xj+oWvTWQ8ujhqS53LIgAe6KQ== + +"@hapi/topo@^5.0.0": + version "5.1.0" + resolved "https://registry.npmjs.org/@hapi/topo/-/topo-5.1.0.tgz" + integrity sha512-foQZKJig7Ob0BMAYBfcJk8d77QtOe7Wo4ox7ff1lQYoNNAb6jwcY1ncdoy2e9wQZzvNy7ODZCYJkK8kzmcAnAg== + dependencies: + "@hapi/hoek" "^9.0.0" + +"@jest/schemas@^29.4.3": + version "29.4.3" + resolved "https://registry.npmjs.org/@jest/schemas/-/schemas-29.4.3.tgz" + integrity sha512-VLYKXQmtmuEz6IxJsrZwzG9NvtkQsWNnWMsKxqWNu3+CnfzJQhp0WDDKWLVV9hLKr0l3SLLFRqcYHjhtyuDVxg== + dependencies: + "@sinclair/typebox" "^0.25.16" + +"@jest/types@^29.5.0": + version "29.5.0" + resolved "https://registry.npmjs.org/@jest/types/-/types-29.5.0.tgz" + integrity sha512-qbu7kN6czmVRc3xWFQcAN03RAUamgppVUdXrvl1Wr3jlNF93o9mJbGcDWrwGB6ht44u7efB1qCFgVQmca24Uog== + dependencies: + "@jest/schemas" "^29.4.3" + "@types/istanbul-lib-coverage" "^2.0.0" + "@types/istanbul-reports" "^3.0.0" + "@types/node" "*" + "@types/yargs" "^17.0.8" + chalk "^4.0.0" + +"@jridgewell/gen-mapping@^0.1.0": + version "0.1.1" + resolved "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.1.1.tgz" + integrity sha512-sQXCasFk+U8lWYEe66WxRDOE9PjVz4vSM51fTu3Hw+ClTpUSQb718772vH3pyS5pShp6lvQM7SxgIDXXXmOX7w== + dependencies: + "@jridgewell/set-array" "^1.0.0" + "@jridgewell/sourcemap-codec" "^1.4.10" + +"@jridgewell/gen-mapping@^0.3.0", "@jridgewell/gen-mapping@^0.3.5": + version "0.3.8" + resolved "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.8.tgz" + integrity sha512-imAbBGkb+ebQyxKgzv5Hu2nmROxoDOXHh80evxdoXNOrvAnVx7zimzc1Oo5h9RlfV4vPXaE2iM5pOFbvOCClWA== + dependencies: + "@jridgewell/set-array" "^1.2.1" + "@jridgewell/sourcemap-codec" "^1.4.10" + "@jridgewell/trace-mapping" "^0.3.24" + +"@jridgewell/resolve-uri@^3.1.0": + version "3.1.0" + resolved "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz" + integrity sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w== + +"@jridgewell/set-array@^1.0.0", "@jridgewell/set-array@^1.2.1": + version "1.2.1" + resolved "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.2.1.tgz" + integrity sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A== + +"@jridgewell/source-map@^0.3.2": + version "0.3.2" + resolved "https://registry.npmjs.org/@jridgewell/source-map/-/source-map-0.3.2.tgz" + integrity sha512-m7O9o2uR8k2ObDysZYzdfhb08VuEml5oWGiosa1VdaPZ/A6QyPkAJuwN0Q1lhULOf6B7MtQmHENS743hWtCrgw== + dependencies: + "@jridgewell/gen-mapping" "^0.3.0" + "@jridgewell/trace-mapping" "^0.3.9" + +"@jridgewell/source-map@^0.3.3": + version "0.3.6" + resolved "https://registry.yarnpkg.com/@jridgewell/source-map/-/source-map-0.3.6.tgz#9d71ca886e32502eb9362c9a74a46787c36df81a" + integrity sha512-1ZJTZebgqllO79ue2bm3rIGud/bOe0pP5BjSRCRxxYkEZS8STV7zN84UBbiYu7jy+eCKSnVIUgoWWE/tt+shMQ== + dependencies: + "@jridgewell/gen-mapping" "^0.3.5" + "@jridgewell/trace-mapping" "^0.3.25" + +"@jridgewell/sourcemap-codec@^1.4.10", "@jridgewell/sourcemap-codec@^1.4.14": + version "1.4.14" + resolved "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz" + integrity sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw== + +"@jridgewell/trace-mapping@^0.3.17", "@jridgewell/trace-mapping@^0.3.24", "@jridgewell/trace-mapping@^0.3.25", "@jridgewell/trace-mapping@^0.3.9": + version "0.3.25" + resolved "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz" + integrity sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ== + dependencies: + "@jridgewell/resolve-uri" "^3.1.0" + "@jridgewell/sourcemap-codec" "^1.4.14" + +"@leichtgewicht/ip-codec@^2.0.1": + version "2.0.4" + resolved "https://registry.npmjs.org/@leichtgewicht/ip-codec/-/ip-codec-2.0.4.tgz" + integrity sha512-Hcv+nVC0kZnQ3tD9GVu5xSMR4VVYOteQIr/hwFPVEvPdlXqgGEuRjiheChHgdM+JyqdgNcmzZOX/tnl0JOiI7A== + +"@mdx-js/mdx@^1.6.22": + version "1.6.22" + resolved "https://registry.npmjs.org/@mdx-js/mdx/-/mdx-1.6.22.tgz" + integrity sha512-AMxuLxPz2j5/6TpF/XSdKpQP1NlG0z11dFOlq+2IP/lSgl11GY8ji6S/rgsViN/L0BDvHvUMruRb7ub+24LUYA== + dependencies: + "@babel/core" "7.12.9" + "@babel/plugin-syntax-jsx" "7.12.1" + "@babel/plugin-syntax-object-rest-spread" "7.8.3" + "@mdx-js/util" "1.6.22" + babel-plugin-apply-mdx-type-prop "1.6.22" + babel-plugin-extract-import-names "1.6.22" + camelcase-css "2.0.1" + detab "2.0.4" + hast-util-raw "6.0.1" + lodash.uniq "4.5.0" + mdast-util-to-hast "10.0.1" + remark-footnotes "2.0.0" + remark-mdx "1.6.22" + remark-parse "8.0.3" + remark-squeeze-paragraphs "4.0.0" + style-to-object "0.3.0" + unified "9.2.0" + unist-builder "2.0.3" + unist-util-visit "2.0.3" + +"@mdx-js/react@^1.6.22": + version "1.6.22" + resolved "https://registry.npmjs.org/@mdx-js/react/-/react-1.6.22.tgz" + integrity sha512-TDoPum4SHdfPiGSAaRBw7ECyI8VaHpK8GJugbJIJuqyh6kzw9ZLJZW3HGL3NNrJGxcAixUvqROm+YuQOo5eXtg== + +"@mdx-js/util@1.6.22": + version "1.6.22" + resolved "https://registry.npmjs.org/@mdx-js/util/-/util-1.6.22.tgz" + integrity sha512-H1rQc1ZOHANWBvPcW+JpGwr+juXSxM8Q8YCkm3GhZd8REu1fHR3z99CErO1p9pkcfcxZnMdIZdIsXkOHY0NilA== + +"@nodelib/fs.scandir@2.1.5": + version "2.1.5" + resolved "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz" + integrity sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g== + dependencies: + "@nodelib/fs.stat" "2.0.5" + run-parallel "^1.1.9" + +"@nodelib/fs.stat@2.0.5", "@nodelib/fs.stat@^2.0.2": + version "2.0.5" + resolved "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz" + integrity sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A== + +"@nodelib/fs.walk@^1.2.3": + version "1.2.8" + resolved "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz" + integrity sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg== + dependencies: + "@nodelib/fs.scandir" "2.1.5" + fastq "^1.6.0" + +"@polka/url@^1.0.0-next.20": + version "1.0.0-next.21" + resolved "https://registry.npmjs.org/@polka/url/-/url-1.0.0-next.21.tgz" + integrity sha512-a5Sab1C4/icpTZVzZc5Ghpz88yQtGOyNqYXcZgOssB2uuAr+wF/MvN6bgtW32q7HHrvBki+BsZ0OuNv6EV3K9g== + +"@sideway/address@^4.1.3": + version "4.1.4" + resolved "https://registry.npmjs.org/@sideway/address/-/address-4.1.4.tgz" + integrity sha512-7vwq+rOHVWjyXxVlR76Agnvhy8I9rpzjosTESvmhNeXOXdZZB15Fl+TI9x1SiHZH5Jv2wTGduSxFDIaq0m3DUw== + dependencies: + "@hapi/hoek" "^9.0.0" + +"@sideway/formula@^3.0.1": + version "3.0.1" + resolved "https://registry.npmjs.org/@sideway/formula/-/formula-3.0.1.tgz" + integrity sha512-/poHZJJVjx3L+zVD6g9KgHfYnb443oi7wLu/XKojDviHy6HOEOA6z1Trk5aR1dGcmPenJEgb2sK2I80LeS3MIg== + +"@sideway/pinpoint@^2.0.0": + version "2.0.0" + resolved "https://registry.npmjs.org/@sideway/pinpoint/-/pinpoint-2.0.0.tgz" + integrity sha512-RNiOoTPkptFtSVzQevY/yWtZwf/RxyVnPy/OcA9HBM3MlGDnBEYL5B41H0MTn0Uec8Hi+2qUtTfG2WWZBmMejQ== + +"@sinclair/typebox@^0.25.16": + version "0.25.24" + resolved "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.25.24.tgz" + integrity sha512-XJfwUVUKDHF5ugKwIcxEgc9k8b7HbznCp6eUfWgu710hMPNIO4aw4/zB5RogDQz8nd6gyCDpU9O/m6qYEWY6yQ== + +"@sindresorhus/is@^0.14.0": + version "0.14.0" + resolved "https://registry.npmjs.org/@sindresorhus/is/-/is-0.14.0.tgz" + integrity sha512-9NET910DNaIPngYnLLPeg+Ogzqsi9uM4mSboU5y6p8S5DzMTVEsJZrawi+BoDNUVBa2DhJqQYUFvMDfgU062LQ== + +"@slorber/static-site-generator-webpack-plugin@^4.0.7": + version "4.0.7" + resolved "https://registry.npmjs.org/@slorber/static-site-generator-webpack-plugin/-/static-site-generator-webpack-plugin-4.0.7.tgz" + integrity sha512-Ug7x6z5lwrz0WqdnNFOMYrDQNTPAprvHLSh6+/fmml3qUiz6l5eq+2MzLKWtn/q5K5NpSiFsZTP/fck/3vjSxA== + dependencies: + eval "^0.1.8" + p-map "^4.0.0" + webpack-sources "^3.2.2" + +"@svgr/babel-plugin-add-jsx-attribute@^6.5.1": + version "6.5.1" + resolved "https://registry.npmjs.org/@svgr/babel-plugin-add-jsx-attribute/-/babel-plugin-add-jsx-attribute-6.5.1.tgz" + integrity sha512-9PYGcXrAxitycIjRmZB+Q0JaN07GZIWaTBIGQzfaZv+qr1n8X1XUEJ5rZ/vx6OVD9RRYlrNnXWExQXcmZeD/BQ== + +"@svgr/babel-plugin-remove-jsx-attribute@*": + version "7.0.0" + resolved "https://registry.npmjs.org/@svgr/babel-plugin-remove-jsx-attribute/-/babel-plugin-remove-jsx-attribute-7.0.0.tgz" + integrity sha512-iiZaIvb3H/c7d3TH2HBeK91uI2rMhZNwnsIrvd7ZwGLkFw6mmunOCoVnjdYua662MqGFxlN9xTq4fv9hgR4VXQ== + +"@svgr/babel-plugin-remove-jsx-empty-expression@*": + version "7.0.0" + resolved "https://registry.npmjs.org/@svgr/babel-plugin-remove-jsx-empty-expression/-/babel-plugin-remove-jsx-empty-expression-7.0.0.tgz" + integrity sha512-sQQmyo+qegBx8DfFc04PFmIO1FP1MHI1/QEpzcIcclo5OAISsOJPW76ZIs0bDyO/DBSJEa/tDa1W26pVtt0FRw== + +"@svgr/babel-plugin-replace-jsx-attribute-value@^6.5.1": + version "6.5.1" + resolved "https://registry.npmjs.org/@svgr/babel-plugin-replace-jsx-attribute-value/-/babel-plugin-replace-jsx-attribute-value-6.5.1.tgz" + integrity sha512-8DPaVVE3fd5JKuIC29dqyMB54sA6mfgki2H2+swh+zNJoynC8pMPzOkidqHOSc6Wj032fhl8Z0TVn1GiPpAiJg== + +"@svgr/babel-plugin-svg-dynamic-title@^6.5.1": + version "6.5.1" + resolved "https://registry.npmjs.org/@svgr/babel-plugin-svg-dynamic-title/-/babel-plugin-svg-dynamic-title-6.5.1.tgz" + integrity sha512-FwOEi0Il72iAzlkaHrlemVurgSQRDFbk0OC8dSvD5fSBPHltNh7JtLsxmZUhjYBZo2PpcU/RJvvi6Q0l7O7ogw== + +"@svgr/babel-plugin-svg-em-dimensions@^6.5.1": + version "6.5.1" + resolved "https://registry.npmjs.org/@svgr/babel-plugin-svg-em-dimensions/-/babel-plugin-svg-em-dimensions-6.5.1.tgz" + integrity sha512-gWGsiwjb4tw+ITOJ86ndY/DZZ6cuXMNE/SjcDRg+HLuCmwpcjOktwRF9WgAiycTqJD/QXqL2f8IzE2Rzh7aVXA== + +"@svgr/babel-plugin-transform-react-native-svg@^6.5.1": + version "6.5.1" + resolved "https://registry.npmjs.org/@svgr/babel-plugin-transform-react-native-svg/-/babel-plugin-transform-react-native-svg-6.5.1.tgz" + integrity sha512-2jT3nTayyYP7kI6aGutkyfJ7UMGtuguD72OjeGLwVNyfPRBD8zQthlvL+fAbAKk5n9ZNcvFkp/b1lZ7VsYqVJg== + +"@svgr/babel-plugin-transform-svg-component@^6.5.1": + version "6.5.1" + resolved "https://registry.npmjs.org/@svgr/babel-plugin-transform-svg-component/-/babel-plugin-transform-svg-component-6.5.1.tgz" + integrity sha512-a1p6LF5Jt33O3rZoVRBqdxL350oge54iZWHNI6LJB5tQ7EelvD/Mb1mfBiZNAan0dt4i3VArkFRjA4iObuNykQ== + +"@svgr/babel-preset@^6.5.1": + version "6.5.1" + resolved "https://registry.npmjs.org/@svgr/babel-preset/-/babel-preset-6.5.1.tgz" + integrity sha512-6127fvO/FF2oi5EzSQOAjo1LE3OtNVh11R+/8FXa+mHx1ptAaS4cknIjnUA7e6j6fwGGJ17NzaTJFUwOV2zwCw== + dependencies: + "@svgr/babel-plugin-add-jsx-attribute" "^6.5.1" + "@svgr/babel-plugin-remove-jsx-attribute" "*" + "@svgr/babel-plugin-remove-jsx-empty-expression" "*" + "@svgr/babel-plugin-replace-jsx-attribute-value" "^6.5.1" + "@svgr/babel-plugin-svg-dynamic-title" "^6.5.1" + "@svgr/babel-plugin-svg-em-dimensions" "^6.5.1" + "@svgr/babel-plugin-transform-react-native-svg" "^6.5.1" + "@svgr/babel-plugin-transform-svg-component" "^6.5.1" + +"@svgr/core@^6.5.1": + version "6.5.1" + resolved "https://registry.npmjs.org/@svgr/core/-/core-6.5.1.tgz" + integrity sha512-/xdLSWxK5QkqG524ONSjvg3V/FkNyCv538OIBdQqPNaAta3AsXj/Bd2FbvR87yMbXO2hFSWiAe/Q6IkVPDw+mw== + dependencies: + "@babel/core" "^7.19.6" + "@svgr/babel-preset" "^6.5.1" + "@svgr/plugin-jsx" "^6.5.1" + camelcase "^6.2.0" + cosmiconfig "^7.0.1" + +"@svgr/hast-util-to-babel-ast@^6.5.1": + version "6.5.1" + resolved "https://registry.npmjs.org/@svgr/hast-util-to-babel-ast/-/hast-util-to-babel-ast-6.5.1.tgz" + integrity sha512-1hnUxxjd83EAxbL4a0JDJoD3Dao3hmjvyvyEV8PzWmLK3B9m9NPlW7GKjFyoWE8nM7HnXzPcmmSyOW8yOddSXw== + dependencies: + "@babel/types" "^7.20.0" + entities "^4.4.0" + +"@svgr/plugin-jsx@^6.5.1": + version "6.5.1" + resolved "https://registry.npmjs.org/@svgr/plugin-jsx/-/plugin-jsx-6.5.1.tgz" + integrity sha512-+UdQxI3jgtSjCykNSlEMuy1jSRQlGC7pqBCPvkG/2dATdWo082zHTTK3uhnAju2/6XpE6B5mZ3z4Z8Ns01S8Gw== + dependencies: + "@babel/core" "^7.19.6" + "@svgr/babel-preset" "^6.5.1" + "@svgr/hast-util-to-babel-ast" "^6.5.1" + svg-parser "^2.0.4" + +"@svgr/plugin-svgo@^6.5.1": + version "6.5.1" + resolved "https://registry.npmjs.org/@svgr/plugin-svgo/-/plugin-svgo-6.5.1.tgz" + integrity sha512-omvZKf8ixP9z6GWgwbtmP9qQMPX4ODXi+wzbVZgomNFsUIlHA1sf4fThdwTWSsZGgvGAG6yE+b/F5gWUkcZ/iQ== + dependencies: + cosmiconfig "^7.0.1" + deepmerge "^4.2.2" + svgo "^2.8.0" + +"@svgr/webpack@^6.2.1": + version "6.5.1" + resolved "https://registry.npmjs.org/@svgr/webpack/-/webpack-6.5.1.tgz" + integrity sha512-cQ/AsnBkXPkEK8cLbv4Dm7JGXq2XrumKnL1dRpJD9rIO2fTIlJI9a1uCciYG1F2aUsox/hJQyNGbt3soDxSRkA== + dependencies: + "@babel/core" "^7.19.6" + "@babel/plugin-transform-react-constant-elements" "^7.18.12" + "@babel/preset-env" "^7.19.4" + "@babel/preset-react" "^7.18.6" + "@babel/preset-typescript" "^7.18.6" + "@svgr/core" "^6.5.1" + "@svgr/plugin-jsx" "^6.5.1" + "@svgr/plugin-svgo" "^6.5.1" + +"@szmarczak/http-timer@^1.1.2": + version "1.1.2" + resolved "https://registry.npmjs.org/@szmarczak/http-timer/-/http-timer-1.1.2.tgz" + integrity sha512-XIB2XbzHTN6ieIjfIMV9hlVcfPU26s2vafYWQcZHWXHOxiaRZYEDKEwdl129Zyg50+foYV2jCgtrqSA6qNuNSA== + dependencies: + defer-to-connect "^1.0.1" + +"@types/body-parser@*": + version "1.19.2" + resolved "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.2.tgz" + integrity sha512-ALYone6pm6QmwZoAgeyNksccT9Q4AWZQ6PvfwR37GT6r6FWUPguq6sUmNGSMV2Wr761oQoBxwGGa6DR5o1DC9g== + dependencies: + "@types/connect" "*" + "@types/node" "*" + +"@types/bonjour@^3.5.9": + version "3.5.10" + resolved "https://registry.npmjs.org/@types/bonjour/-/bonjour-3.5.10.tgz" + integrity sha512-p7ienRMiS41Nu2/igbJxxLDWrSZ0WxM8UQgCeO9KhoVF7cOVFkrKsiDr1EsJIla8vV3oEEjGcz11jc5yimhzZw== + dependencies: + "@types/node" "*" + +"@types/connect-history-api-fallback@^1.3.5": + version "1.3.5" + resolved "https://registry.npmjs.org/@types/connect-history-api-fallback/-/connect-history-api-fallback-1.3.5.tgz" + integrity sha512-h8QJa8xSb1WD4fpKBDcATDNGXghFj6/3GRWG6dhmRcu0RX1Ubasur2Uvx5aeEwlf0MwblEC2bMzzMQntxnw/Cw== + dependencies: + "@types/express-serve-static-core" "*" + "@types/node" "*" + +"@types/connect@*": + version "3.4.35" + resolved "https://registry.npmjs.org/@types/connect/-/connect-3.4.35.tgz" + integrity sha512-cdeYyv4KWoEgpBISTxWvqYsVy444DOqehiF3fM3ne10AmJ62RSyNkUnxMJXHQWRQQX2eR94m5y1IZyDwBjV9FQ== + dependencies: + "@types/node" "*" + +"@types/eslint-scope@^3.7.7": + version "3.7.7" + resolved "https://registry.yarnpkg.com/@types/eslint-scope/-/eslint-scope-3.7.7.tgz#3108bd5f18b0cdb277c867b3dd449c9ed7079ac5" + integrity sha512-MzMFlSLBqNF2gcHWO0G1vP/YQyfvrxZ0bF+u7mzUdZ1/xK4A4sru+nraZz5i3iEIk1l1uyicaDVTB4QbbEkAYg== + dependencies: + "@types/eslint" "*" + "@types/estree" "*" + +"@types/eslint@*": + version "9.6.1" + resolved "https://registry.yarnpkg.com/@types/eslint/-/eslint-9.6.1.tgz#d5795ad732ce81715f27f75da913004a56751584" + integrity sha512-FXx2pKgId/WyYo2jXw63kk7/+TY7u7AziEJxJAnSFzHlqTAS3Ync6SvgYAN/k4/PQpnnVuzoMuVnByKK2qp0ag== + dependencies: + "@types/estree" "*" + "@types/json-schema" "*" + +"@types/estree@*": + version "1.0.7" + resolved "https://registry.yarnpkg.com/@types/estree/-/estree-1.0.7.tgz#4158d3105276773d5b7695cd4834b1722e4f37a8" + integrity sha512-w28IoSUCJpidD/TGviZwwMJckNESJZXFu7NBZ5YJ4mEUnNraUn9Pm8HSZm/jDF1pDWYKspWE7oVphigUPRakIQ== + +"@types/estree@^1.0.8": + version "1.0.8" + resolved "https://registry.yarnpkg.com/@types/estree/-/estree-1.0.8.tgz#958b91c991b1867ced318bedea0e215ee050726e" + integrity sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w== + +"@types/express-serve-static-core@*", "@types/express-serve-static-core@^4.17.33": + version "4.17.33" + resolved "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-4.17.33.tgz" + integrity sha512-TPBqmR/HRYI3eC2E5hmiivIzv+bidAfXofM+sbonAGvyDhySGw9/PQZFt2BLOrjUUR++4eJVpx6KnLQK1Fk9tA== + dependencies: + "@types/node" "*" + "@types/qs" "*" + "@types/range-parser" "*" + +"@types/express@*", "@types/express@^4.17.13": + version "4.17.17" + resolved "https://registry.npmjs.org/@types/express/-/express-4.17.17.tgz" + integrity sha512-Q4FmmuLGBG58btUnfS1c1r/NQdlp3DMfGDGig8WhfpA2YRUtEkxAjkZb0yvplJGYdF1fsQ81iMDcH24sSCNC/Q== + dependencies: + "@types/body-parser" "*" + "@types/express-serve-static-core" "^4.17.33" + "@types/qs" "*" + "@types/serve-static" "*" + +"@types/hast@^2.0.0": + version "2.3.4" + resolved "https://registry.npmjs.org/@types/hast/-/hast-2.3.4.tgz" + integrity sha512-wLEm0QvaoawEDoTRwzTXp4b4jpwiJDvR5KMnFnVodm3scufTlBOWRD6N1OBf9TZMhjlNsSfcO5V+7AF4+Vy+9g== + dependencies: + "@types/unist" "*" + +"@types/history@^4.7.11": + version "4.7.11" + resolved "https://registry.npmjs.org/@types/history/-/history-4.7.11.tgz" + integrity sha512-qjDJRrmvBMiTx+jyLxvLfJU7UznFuokDv4f3WRuriHKERccVpFU+8XMQUAbDzoiJCsmexxRExQeMwwCdamSKDA== + +"@types/html-minifier-terser@^6.0.0": + version "6.1.0" + resolved "https://registry.npmjs.org/@types/html-minifier-terser/-/html-minifier-terser-6.1.0.tgz" + integrity sha512-oh/6byDPnL1zeNXFrDXFLyZjkr1MsBG667IM792caf1L2UPOOMf65NFzjUH/ltyfwjAGfs1rsX1eftK0jC/KIg== + +"@types/http-proxy@^1.17.8": + version "1.17.10" + resolved "https://registry.npmjs.org/@types/http-proxy/-/http-proxy-1.17.10.tgz" + integrity sha512-Qs5aULi+zV1bwKAg5z1PWnDXWmsn+LxIvUGv6E2+OOMYhclZMO+OXd9pYVf2gLykf2I7IV2u7oTHwChPNsvJ7g== + dependencies: + "@types/node" "*" + +"@types/istanbul-lib-coverage@*", "@types/istanbul-lib-coverage@^2.0.0": + version "2.0.4" + resolved "https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.4.tgz" + integrity sha512-z/QT1XN4K4KYuslS23k62yDIDLwLFkzxOuMplDtObz0+y7VqJCaO2o+SPwHCvLFZh7xazvvoor2tA/hPz9ee7g== + +"@types/istanbul-lib-report@*": + version "3.0.0" + resolved "https://registry.npmjs.org/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz" + integrity sha512-plGgXAPfVKFoYfa9NpYDAkseG+g6Jr294RqeqcqDixSbU34MZVJRi/P+7Y8GDpzkEwLaGZZOpKIEmeVZNtKsrg== + dependencies: + "@types/istanbul-lib-coverage" "*" + +"@types/istanbul-reports@^3.0.0": + version "3.0.1" + resolved "https://registry.npmjs.org/@types/istanbul-reports/-/istanbul-reports-3.0.1.tgz" + integrity sha512-c3mAZEuK0lvBp8tmuL74XRKn1+y2dcwOUpH7x4WrF6gk1GIgiluDRgMYQtw2OFcBvAJWlt6ASU3tSqxp0Uu0Aw== + dependencies: + "@types/istanbul-lib-report" "*" + +"@types/json-schema@*", "@types/json-schema@^7.0.4", "@types/json-schema@^7.0.5", "@types/json-schema@^7.0.8", "@types/json-schema@^7.0.9": + version "7.0.11" + resolved "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.11.tgz" + integrity sha512-wOuvG1SN4Us4rez+tylwwwCV1psiNVOkJeM3AUWUNWg/jDQY2+HE/444y5gc+jBmRqASOm2Oeh5c1axHobwRKQ== + +"@types/json-schema@^7.0.15": + version "7.0.15" + resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.15.tgz#596a1747233694d50f6ad8a7869fcb6f56cf5841" + integrity sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA== + +"@types/mdast@^3.0.0": + version "3.0.11" + resolved "https://registry.npmjs.org/@types/mdast/-/mdast-3.0.11.tgz" + integrity sha512-Y/uImid8aAwrEA24/1tcRZwpxX3pIFTSilcNDKSPn+Y2iDywSEachzRuvgAYYLR3wpGXAsMbv5lvKLDZLeYPAw== + dependencies: + "@types/unist" "*" + +"@types/mime@*": + version "3.0.1" + resolved "https://registry.npmjs.org/@types/mime/-/mime-3.0.1.tgz" + integrity sha512-Y4XFY5VJAuw0FgAqPNd6NNoV44jbq9Bz2L7Rh/J6jLTiHBSBJa9fxqQIvkIld4GsoDOcCbvzOUAbLPsSKKg+uA== + +"@types/node@*": + version "18.15.10" + resolved "https://registry.npmjs.org/@types/node/-/node-18.15.10.tgz" + integrity sha512-9avDaQJczATcXgfmMAW3MIWArOO7A+m90vuCFLr8AotWf8igO/mRoYukrk2cqZVtv38tHs33retzHEilM7FpeQ== + +"@types/node@^17.0.5": + version "17.0.45" + resolved "https://registry.npmjs.org/@types/node/-/node-17.0.45.tgz" + integrity sha512-w+tIMs3rq2afQdsPJlODhoUEKzFP1ayaoyl1CcnwtIlsVe7K7bA1NGm4s3PraqTLlXnbIN84zuBlxBWo1u9BLw== + +"@types/parse-json@^4.0.0": + version "4.0.0" + resolved "https://registry.npmjs.org/@types/parse-json/-/parse-json-4.0.0.tgz" + integrity sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA== + +"@types/parse5@^5.0.0": + version "5.0.3" + resolved "https://registry.npmjs.org/@types/parse5/-/parse5-5.0.3.tgz" + integrity sha512-kUNnecmtkunAoQ3CnjmMkzNU/gtxG8guhi+Fk2U/kOpIKjIMKnXGp4IJCgQJrXSgMsWYimYG4TGjz/UzbGEBTw== + +"@types/prop-types@*": + version "15.7.5" + resolved "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.5.tgz" + integrity sha512-JCB8C6SnDoQf0cNycqd/35A7MjcnK+ZTqE7judS6o7utxUCg6imJg3QK2qzHKszlTjcj2cn+NwMB2i96ubpj7w== + +"@types/qs@*": + version "6.9.7" + resolved "https://registry.npmjs.org/@types/qs/-/qs-6.9.7.tgz" + integrity sha512-FGa1F62FT09qcrueBA6qYTrJPVDzah9a+493+o2PCXsesWHIn27G98TsSMs3WPNbZIEj4+VJf6saSFpvD+3Zsw== + +"@types/range-parser@*": + version "1.2.4" + resolved "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.4.tgz" + integrity sha512-EEhsLsD6UsDM1yFhAvy0Cjr6VwmpMWqFBCb9w07wVugF7w9nfajxLuVmngTIpgS6svCnm6Vaw+MZhoDCKnOfsw== + +"@types/react-router-config@*", "@types/react-router-config@^5.0.6": + version "5.0.6" + resolved "https://registry.npmjs.org/@types/react-router-config/-/react-router-config-5.0.6.tgz" + integrity sha512-db1mx37a1EJDf1XeX8jJN7R3PZABmJQXR8r28yUjVMFSjkmnQo6X6pOEEmNl+Tp2gYQOGPdYbFIipBtdElZ3Yg== + dependencies: + "@types/history" "^4.7.11" + "@types/react" "*" + "@types/react-router" "*" + +"@types/react-router-dom@*": + version "5.3.3" + resolved "https://registry.npmjs.org/@types/react-router-dom/-/react-router-dom-5.3.3.tgz" + integrity sha512-kpqnYK4wcdm5UaWI3fLcELopqLrHgLqNsdpHauzlQktfkHL3npOSwtj1Uz9oKBAzs7lFtVkV8j83voAz2D8fhw== + dependencies: + "@types/history" "^4.7.11" + "@types/react" "*" + "@types/react-router" "*" + +"@types/react-router@*": + version "5.1.20" + resolved "https://registry.npmjs.org/@types/react-router/-/react-router-5.1.20.tgz" + integrity sha512-jGjmu/ZqS7FjSH6owMcD5qpq19+1RS9DeVRqfl1FeBMxTDQAGwlMWOcs52NDoXaNKyG3d1cYQFMs9rCrb88o9Q== + dependencies: + "@types/history" "^4.7.11" + "@types/react" "*" + +"@types/react@*": + version "18.0.30" + resolved "https://registry.npmjs.org/@types/react/-/react-18.0.30.tgz" + integrity sha512-AnME2cHDH11Pxt/yYX6r0w448BfTwQOLEhQEjCdwB7QskEI7EKtxhGUsExTQe/MsY3D9D5rMtu62WRocw9A8FA== + dependencies: + "@types/prop-types" "*" + "@types/scheduler" "*" + csstype "^3.0.2" + +"@types/retry@0.12.0": + version "0.12.0" + resolved "https://registry.npmjs.org/@types/retry/-/retry-0.12.0.tgz" + integrity sha512-wWKOClTTiizcZhXnPY4wikVAwmdYHp8q6DmC+EJUzAMsycb7HB32Kh9RN4+0gExjmPmZSAQjgURXIGATPegAvA== + +"@types/sax@^1.2.1": + version "1.2.4" + resolved "https://registry.npmjs.org/@types/sax/-/sax-1.2.4.tgz" + integrity sha512-pSAff4IAxJjfAXUG6tFkO7dsSbTmf8CtUpfhhZ5VhkRpC4628tJhh3+V6H1E+/Gs9piSzYKT5yzHO5M4GG9jkw== + dependencies: + "@types/node" "*" + +"@types/scheduler@*": + version "0.16.3" + resolved "https://registry.npmjs.org/@types/scheduler/-/scheduler-0.16.3.tgz" + integrity sha512-5cJ8CB4yAx7BH1oMvdU0Jh9lrEXyPkar6F9G/ERswkCuvP4KQZfZkSjcMbAICCpQTN4OuZn8tz0HiKv9TGZgrQ== + +"@types/serve-index@^1.9.1": + version "1.9.1" + resolved "https://registry.npmjs.org/@types/serve-index/-/serve-index-1.9.1.tgz" + integrity sha512-d/Hs3nWDxNL2xAczmOVZNj92YZCS6RGxfBPjKzuu/XirCgXdpKEb88dYNbrYGint6IVWLNP+yonwVAuRC0T2Dg== + dependencies: + "@types/express" "*" + +"@types/serve-static@*", "@types/serve-static@^1.13.10": + version "1.15.1" + resolved "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.15.1.tgz" + integrity sha512-NUo5XNiAdULrJENtJXZZ3fHtfMolzZwczzBbnAeBbqBwG+LaG6YaJtuwzwGSQZ2wsCrxjEhNNjAkKigy3n8teQ== + dependencies: + "@types/mime" "*" + "@types/node" "*" + +"@types/sockjs@^0.3.33": + version "0.3.33" + resolved "https://registry.npmjs.org/@types/sockjs/-/sockjs-0.3.33.tgz" + integrity sha512-f0KEEe05NvUnat+boPTZ0dgaLZ4SfSouXUgv5noUiefG2ajgKjmETo9ZJyuqsl7dfl2aHlLJUiki6B4ZYldiiw== + dependencies: + "@types/node" "*" + +"@types/unist@*", "@types/unist@^2.0.0", "@types/unist@^2.0.2", "@types/unist@^2.0.3": + version "2.0.6" + resolved "https://registry.npmjs.org/@types/unist/-/unist-2.0.6.tgz" + integrity sha512-PBjIUxZHOuj0R15/xuwJYjFi+KZdNFrehocChv4g5hu6aFroHue8m0lBP0POdK2nKzbw0cgV1mws8+V/JAcEkQ== + +"@types/ws@^8.5.1": + version "8.5.4" + resolved "https://registry.npmjs.org/@types/ws/-/ws-8.5.4.tgz" + integrity sha512-zdQDHKUgcX/zBc4GrwsE/7dVdAD8JR4EuiAXiiUhhfyIJXXb2+PrGshFyeXWQPMmmZ2XxgaqclgpIC7eTXc1mg== + dependencies: + "@types/node" "*" + +"@types/yargs-parser@*": + version "21.0.0" + resolved "https://registry.npmjs.org/@types/yargs-parser/-/yargs-parser-21.0.0.tgz" + integrity sha512-iO9ZQHkZxHn4mSakYV0vFHAVDyEOIJQrV2uZ06HxEPcx+mt8swXoZHIbaaJ2crJYFfErySgktuTZ3BeLz+XmFA== + +"@types/yargs@^17.0.8": + version "17.0.24" + resolved "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.24.tgz" + integrity sha512-6i0aC7jV6QzQB8ne1joVZ0eSFIstHsCrobmOtghM11yGlH0j43FKL2UhWdELkyps0zuf7qVTUVCCR+tgSlyLLw== + dependencies: + "@types/yargs-parser" "*" + +"@webassemblyjs/ast@1.14.1", "@webassemblyjs/ast@^1.14.1": + version "1.14.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/ast/-/ast-1.14.1.tgz#a9f6a07f2b03c95c8d38c4536a1fdfb521ff55b6" + integrity sha512-nuBEDgQfm1ccRp/8bCQrx1frohyufl4JlbMMZ4P1wpeOfDhF6FQkxZJ1b/e+PLwr6X1Nhw6OLme5usuBWYBvuQ== + dependencies: + "@webassemblyjs/helper-numbers" "1.13.2" + "@webassemblyjs/helper-wasm-bytecode" "1.13.2" + +"@webassemblyjs/floating-point-hex-parser@1.13.2": + version "1.13.2" + resolved "https://registry.yarnpkg.com/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.13.2.tgz#fcca1eeddb1cc4e7b6eed4fc7956d6813b21b9fb" + integrity sha512-6oXyTOzbKxGH4steLbLNOu71Oj+C8Lg34n6CqRvqfS2O71BxY6ByfMDRhBytzknj9yGUPVJ1qIKhRlAwO1AovA== + +"@webassemblyjs/helper-api-error@1.13.2": + version "1.13.2" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-api-error/-/helper-api-error-1.13.2.tgz#e0a16152248bc38daee76dd7e21f15c5ef3ab1e7" + integrity sha512-U56GMYxy4ZQCbDZd6JuvvNV/WFildOjsaWD3Tzzvmw/mas3cXzRJPMjP83JqEsgSbyrmaGjBfDtV7KDXV9UzFQ== + +"@webassemblyjs/helper-buffer@1.14.1": + version "1.14.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-buffer/-/helper-buffer-1.14.1.tgz#822a9bc603166531f7d5df84e67b5bf99b72b96b" + integrity sha512-jyH7wtcHiKssDtFPRB+iQdxlDf96m0E39yb0k5uJVhFGleZFoNw1c4aeIcVUPPbXUVJ94wwnMOAqUHyzoEPVMA== + +"@webassemblyjs/helper-numbers@1.13.2": + version "1.13.2" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-numbers/-/helper-numbers-1.13.2.tgz#dbd932548e7119f4b8a7877fd5a8d20e63490b2d" + integrity sha512-FE8aCmS5Q6eQYcV3gI35O4J789wlQA+7JrqTTpJqn5emA4U2hvwJmvFRC0HODS+3Ye6WioDklgd6scJ3+PLnEA== + dependencies: + "@webassemblyjs/floating-point-hex-parser" "1.13.2" + "@webassemblyjs/helper-api-error" "1.13.2" + "@xtuc/long" "4.2.2" + +"@webassemblyjs/helper-wasm-bytecode@1.13.2": + version "1.13.2" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.13.2.tgz#e556108758f448aae84c850e593ce18a0eb31e0b" + integrity sha512-3QbLKy93F0EAIXLh0ogEVR6rOubA9AoZ+WRYhNbFyuB70j3dRdwH9g+qXhLAO0kiYGlg3TxDV+I4rQTr/YNXkA== + +"@webassemblyjs/helper-wasm-section@1.14.1": + version "1.14.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.14.1.tgz#9629dda9c4430eab54b591053d6dc6f3ba050348" + integrity sha512-ds5mXEqTJ6oxRoqjhWDU83OgzAYjwsCV8Lo/N+oRsNDmx/ZDpqalmrtgOMkHwxsG0iI//3BwWAErYRHtgn0dZw== + dependencies: + "@webassemblyjs/ast" "1.14.1" + "@webassemblyjs/helper-buffer" "1.14.1" + "@webassemblyjs/helper-wasm-bytecode" "1.13.2" + "@webassemblyjs/wasm-gen" "1.14.1" + +"@webassemblyjs/ieee754@1.13.2": + version "1.13.2" + resolved "https://registry.yarnpkg.com/@webassemblyjs/ieee754/-/ieee754-1.13.2.tgz#1c5eaace1d606ada2c7fd7045ea9356c59ee0dba" + integrity sha512-4LtOzh58S/5lX4ITKxnAK2USuNEvpdVV9AlgGQb8rJDHaLeHciwG4zlGr0j/SNWlr7x3vO1lDEsuePvtcDNCkw== + dependencies: + "@xtuc/ieee754" "^1.2.0" + +"@webassemblyjs/leb128@1.13.2": + version "1.13.2" + resolved "https://registry.yarnpkg.com/@webassemblyjs/leb128/-/leb128-1.13.2.tgz#57c5c3deb0105d02ce25fa3fd74f4ebc9fd0bbb0" + integrity sha512-Lde1oNoIdzVzdkNEAWZ1dZ5orIbff80YPdHx20mrHwHrVNNTjNr8E3xz9BdpcGqRQbAEa+fkrCb+fRFTl/6sQw== + dependencies: + "@xtuc/long" "4.2.2" + +"@webassemblyjs/utf8@1.13.2": + version "1.13.2" + resolved "https://registry.yarnpkg.com/@webassemblyjs/utf8/-/utf8-1.13.2.tgz#917a20e93f71ad5602966c2d685ae0c6c21f60f1" + integrity sha512-3NQWGjKTASY1xV5m7Hr0iPeXD9+RDobLll3T9d2AO+g3my8xy5peVyjSag4I50mR1bBSN/Ct12lo+R9tJk0NZQ== + +"@webassemblyjs/wasm-edit@^1.14.1": + version "1.14.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-edit/-/wasm-edit-1.14.1.tgz#ac6689f502219b59198ddec42dcd496b1004d597" + integrity sha512-RNJUIQH/J8iA/1NzlE4N7KtyZNHi3w7at7hDjvRNm5rcUXa00z1vRz3glZoULfJ5mpvYhLybmVcwcjGrC1pRrQ== + dependencies: + "@webassemblyjs/ast" "1.14.1" + "@webassemblyjs/helper-buffer" "1.14.1" + "@webassemblyjs/helper-wasm-bytecode" "1.13.2" + "@webassemblyjs/helper-wasm-section" "1.14.1" + "@webassemblyjs/wasm-gen" "1.14.1" + "@webassemblyjs/wasm-opt" "1.14.1" + "@webassemblyjs/wasm-parser" "1.14.1" + "@webassemblyjs/wast-printer" "1.14.1" + +"@webassemblyjs/wasm-gen@1.14.1": + version "1.14.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-gen/-/wasm-gen-1.14.1.tgz#991e7f0c090cb0bb62bbac882076e3d219da9570" + integrity sha512-AmomSIjP8ZbfGQhumkNvgC33AY7qtMCXnN6bL2u2Js4gVCg8fp735aEiMSBbDR7UQIj90n4wKAFUSEd0QN2Ukg== + dependencies: + "@webassemblyjs/ast" "1.14.1" + "@webassemblyjs/helper-wasm-bytecode" "1.13.2" + "@webassemblyjs/ieee754" "1.13.2" + "@webassemblyjs/leb128" "1.13.2" + "@webassemblyjs/utf8" "1.13.2" + +"@webassemblyjs/wasm-opt@1.14.1": + version "1.14.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-opt/-/wasm-opt-1.14.1.tgz#e6f71ed7ccae46781c206017d3c14c50efa8106b" + integrity sha512-PTcKLUNvBqnY2U6E5bdOQcSM+oVP/PmrDY9NzowJjislEjwP/C4an2303MCVS2Mg9d3AJpIGdUFIQQWbPds0Sw== + dependencies: + "@webassemblyjs/ast" "1.14.1" + "@webassemblyjs/helper-buffer" "1.14.1" + "@webassemblyjs/wasm-gen" "1.14.1" + "@webassemblyjs/wasm-parser" "1.14.1" + +"@webassemblyjs/wasm-parser@1.14.1", "@webassemblyjs/wasm-parser@^1.14.1": + version "1.14.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-parser/-/wasm-parser-1.14.1.tgz#b3e13f1893605ca78b52c68e54cf6a865f90b9fb" + integrity sha512-JLBl+KZ0R5qB7mCnud/yyX08jWFw5MsoalJ1pQ4EdFlgj9VdXKGuENGsiCIjegI1W7p91rUlcB/LB5yRJKNTcQ== + dependencies: + "@webassemblyjs/ast" "1.14.1" + "@webassemblyjs/helper-api-error" "1.13.2" + "@webassemblyjs/helper-wasm-bytecode" "1.13.2" + "@webassemblyjs/ieee754" "1.13.2" + "@webassemblyjs/leb128" "1.13.2" + "@webassemblyjs/utf8" "1.13.2" + +"@webassemblyjs/wast-printer@1.14.1": + version "1.14.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-printer/-/wast-printer-1.14.1.tgz#3bb3e9638a8ae5fdaf9610e7a06b4d9f9aa6fe07" + integrity sha512-kPSSXE6De1XOR820C90RIo2ogvZG+c3KiHzqUoO/F34Y2shGzesfqv7o57xrxovZJH/MetF5UjroJ/R/3isoiw== + dependencies: + "@webassemblyjs/ast" "1.14.1" + "@xtuc/long" "4.2.2" + +"@xtuc/ieee754@^1.2.0": + version "1.2.0" + resolved "https://registry.npmjs.org/@xtuc/ieee754/-/ieee754-1.2.0.tgz" + integrity sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA== + +"@xtuc/long@4.2.2": + version "4.2.2" + resolved "https://registry.npmjs.org/@xtuc/long/-/long-4.2.2.tgz" + integrity sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ== + +accepts@~1.3.4, accepts@~1.3.5, accepts@~1.3.8: + version "1.3.8" + resolved "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz" + integrity sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw== + dependencies: + mime-types "~2.1.34" + negotiator "0.6.3" + +acorn-import-phases@^1.0.3: + version "1.0.4" + resolved "https://registry.yarnpkg.com/acorn-import-phases/-/acorn-import-phases-1.0.4.tgz#16eb850ba99a056cb7cbfe872ffb8972e18c8bd7" + integrity sha512-wKmbr/DDiIXzEOiWrTTUcDm24kQ2vGfZQvM2fwg2vXqR5uW6aapr7ObPtj1th32b9u90/Pf4AItvdTh42fBmVQ== + +acorn-walk@^8.0.0: + version "8.2.0" + resolved "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.2.0.tgz" + integrity sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA== + +acorn@^8.0.4, acorn@^8.5.0: + version "8.8.2" + resolved "https://registry.npmjs.org/acorn/-/acorn-8.8.2.tgz" + integrity sha512-xjIYgE8HBrkpd/sJqOGNspf8uHG+NOHGOw6a/Urj8taM2EXfdNAH2oFcPeIFfsv3+kz/mJrS5VuMqbNLjCa2vw== + +acorn@^8.15.0: + version "8.15.0" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.15.0.tgz#a360898bc415edaac46c8241f6383975b930b816" + integrity sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg== + +acorn@^8.8.2: + version "8.14.1" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.14.1.tgz#721d5dc10f7d5b5609a891773d47731796935dfb" + integrity sha512-OvQ/2pUDKmgfCg++xsTX1wGxfTaszcHVcTctW4UJB4hibJx2HXxxO5UmVgyjMa+ZDsiaf5wWLXYpRWMmBI0QHg== + +address@^1.0.1, address@^1.1.2: + version "1.2.2" + resolved "https://registry.npmjs.org/address/-/address-1.2.2.tgz" + integrity sha512-4B/qKCfeE/ODUaAUpSwfzazo5x29WD4r3vXiWsB7I2mSDAihwEqKO+g8GELZUQSSAo5e1XTYh3ZVfLyxBc12nA== + +aggregate-error@^3.0.0: + version "3.1.0" + resolved "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz" + integrity sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA== + dependencies: + clean-stack "^2.0.0" + indent-string "^4.0.0" + +ajv-formats@^2.1.1: + version "2.1.1" + resolved "https://registry.npmjs.org/ajv-formats/-/ajv-formats-2.1.1.tgz" + integrity sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA== + dependencies: + ajv "^8.0.0" + +ajv-keywords@^3.4.1, ajv-keywords@^3.5.2: + version "3.5.2" + resolved "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.5.2.tgz" + integrity sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ== + +ajv-keywords@^5.0.0, ajv-keywords@^5.1.0: + version "5.1.0" + resolved "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz" + integrity sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw== + dependencies: + fast-deep-equal "^3.1.3" + +ajv@^6.12.2, ajv@^6.12.4, ajv@^6.12.5: + version "6.12.6" + resolved "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz" + integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== + dependencies: + fast-deep-equal "^3.1.1" + fast-json-stable-stringify "^2.0.0" + json-schema-traverse "^0.4.1" + uri-js "^4.2.2" + +ajv@^8.0.0, ajv@^8.8.0: + version "8.12.0" + resolved "https://registry.npmjs.org/ajv/-/ajv-8.12.0.tgz" + integrity sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA== + dependencies: + fast-deep-equal "^3.1.1" + json-schema-traverse "^1.0.0" + require-from-string "^2.0.2" + uri-js "^4.2.2" + +ajv@^8.9.0: + version "8.17.1" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-8.17.1.tgz#37d9a5c776af6bc92d7f4f9510eba4c0a60d11a6" + integrity sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g== + dependencies: + fast-deep-equal "^3.1.3" + fast-uri "^3.0.1" + json-schema-traverse "^1.0.0" + require-from-string "^2.0.2" + +algoliasearch-helper@^3.10.0: + version "3.12.0" + resolved "https://registry.npmjs.org/algoliasearch-helper/-/algoliasearch-helper-3.12.0.tgz" + integrity sha512-/j1U3PEwdan0n6P/QqSnSpNSLC5+cEMvyljd5CnmNmUjDlGrys+vFEOwjVEnqELIiAGMHEA/Nl3CiKVFBUYqyQ== + dependencies: + "@algolia/events" "^4.0.1" + +algoliasearch@^4.0.0, algoliasearch@^4.13.1: + version "4.16.0" + resolved "https://registry.npmjs.org/algoliasearch/-/algoliasearch-4.16.0.tgz" + integrity sha512-HAjKJ6bBblaXqO4dYygF4qx251GuJ6zCZt+qbJ+kU7sOC+yc84pawEjVpJByh+cGP2APFCsao2Giz50cDlKNPA== + dependencies: + "@algolia/cache-browser-local-storage" "4.16.0" + "@algolia/cache-common" "4.16.0" + "@algolia/cache-in-memory" "4.16.0" + "@algolia/client-account" "4.16.0" + "@algolia/client-analytics" "4.16.0" + "@algolia/client-common" "4.16.0" + "@algolia/client-personalization" "4.16.0" + "@algolia/client-search" "4.16.0" + "@algolia/logger-common" "4.16.0" + "@algolia/logger-console" "4.16.0" + "@algolia/requester-browser-xhr" "4.16.0" + "@algolia/requester-common" "4.16.0" + "@algolia/requester-node-http" "4.16.0" + "@algolia/transporter" "4.16.0" + +ansi-align@^3.0.0, ansi-align@^3.0.1: + version "3.0.1" + resolved "https://registry.npmjs.org/ansi-align/-/ansi-align-3.0.1.tgz" + integrity sha512-IOfwwBF5iczOjp/WeY4YxyjqAFMQoZufdQWDd19SEExbVLNXqvpzSJ/M7Za4/sCPmQ0+GRquoA7bGcINcxew6w== + dependencies: + string-width "^4.1.0" + +ansi-html-community@^0.0.8: + version "0.0.8" + resolved "https://registry.npmjs.org/ansi-html-community/-/ansi-html-community-0.0.8.tgz" + integrity sha512-1APHAyr3+PCamwNw3bXCPp4HFLONZt/yIH0sZp0/469KWNTEy+qN5jQ3GVX6DMZ1UXAi34yVwtTeaG/HpBuuzw== + +ansi-regex@^5.0.1: + version "5.0.1" + resolved "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz" + integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== + +ansi-regex@^6.0.1: + version "6.0.1" + resolved "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz" + integrity sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA== + +ansi-styles@^4.0.0, ansi-styles@^4.1.0: + version "4.3.0" + resolved "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz" + integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg== + dependencies: + color-convert "^2.0.1" + +ansi-styles@^6.1.0: + version "6.2.1" + resolved "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz" + integrity sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug== + +any-promise@^1.0.0: + version "1.3.0" + resolved "https://registry.npmjs.org/any-promise/-/any-promise-1.3.0.tgz" + integrity sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A== + +anymatch@~3.1.2: + version "3.1.3" + resolved "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz" + integrity sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw== + dependencies: + normalize-path "^3.0.0" + picomatch "^2.0.4" + +arg@^5.0.0, arg@^5.0.2: + version "5.0.2" + resolved "https://registry.npmjs.org/arg/-/arg-5.0.2.tgz" + integrity sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg== + +argparse@^1.0.7: + version "1.0.10" + resolved "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz" + integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== + dependencies: + sprintf-js "~1.0.2" + +argparse@^2.0.1: + version "2.0.1" + resolved "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz" + integrity sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q== + +array-flatten@1.1.1: + version "1.1.1" + resolved "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz" + integrity sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg== + +array-flatten@^2.1.2: + version "2.1.2" + resolved "https://registry.npmjs.org/array-flatten/-/array-flatten-2.1.2.tgz" + integrity sha512-hNfzcOV8W4NdualtqBFPyVO+54DSJuZGY9qT4pRroB6S9e3iiido2ISIC5h9R2sPJ8H3FHCIiEnsv1lPXO3KtQ== + +array-union@^2.1.0: + version "2.1.0" + resolved "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz" + integrity sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw== + +asap@~2.0.3: + version "2.0.6" + resolved "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz" + integrity sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA== + +at-least-node@^1.0.0: + version "1.0.0" + resolved "https://registry.npmjs.org/at-least-node/-/at-least-node-1.0.0.tgz" + integrity sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg== + +autoprefixer@^10.4.12, autoprefixer@^10.4.14, autoprefixer@^10.4.7: + version "10.4.14" + resolved "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.14.tgz" + integrity sha512-FQzyfOsTlwVzjHxKEqRIAdJx9niO6VCBCoEwax/VLSoQF29ggECcPuBqUMZ+u8jCZOPSy8b8/8KnuFbp0SaFZQ== + dependencies: + browserslist "^4.21.5" + caniuse-lite "^1.0.30001464" + fraction.js "^4.2.0" + normalize-range "^0.1.2" + picocolors "^1.0.0" + postcss-value-parser "^4.2.0" + +axios@^0.25.0: + version "0.25.0" + resolved "https://registry.npmjs.org/axios/-/axios-0.25.0.tgz" + integrity sha512-cD8FOb0tRH3uuEe6+evtAbgJtfxr7ly3fQjYcMcuPlgkwVS9xboaVIpcDV+cYQe+yGykgwZCs1pzjntcGa6l5g== + dependencies: + follow-redirects "^1.14.7" + +babel-loader@^8.2.5: + version "8.3.0" + resolved "https://registry.npmjs.org/babel-loader/-/babel-loader-8.3.0.tgz" + integrity sha512-H8SvsMF+m9t15HNLMipppzkC+Y2Yq+v3SonZyU70RBL/h1gxPkH08Ot8pEE9Z4Kd+czyWJClmFS8qzIP9OZ04Q== + dependencies: + find-cache-dir "^3.3.1" + loader-utils "^2.0.0" + make-dir "^3.1.0" + schema-utils "^2.6.5" + +babel-plugin-apply-mdx-type-prop@1.6.22: + version "1.6.22" + resolved "https://registry.npmjs.org/babel-plugin-apply-mdx-type-prop/-/babel-plugin-apply-mdx-type-prop-1.6.22.tgz" + integrity sha512-VefL+8o+F/DfK24lPZMtJctrCVOfgbqLAGZSkxwhazQv4VxPg3Za/i40fu22KR2m8eEda+IfSOlPLUSIiLcnCQ== + dependencies: + "@babel/helper-plugin-utils" "7.10.4" + "@mdx-js/util" "1.6.22" + +babel-plugin-dynamic-import-node@^2.3.3: + version "2.3.3" + resolved "https://registry.npmjs.org/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.3.3.tgz" + integrity sha512-jZVI+s9Zg3IqA/kdi0i6UDCybUI3aSBLnglhYbSSjKlV7yF1F/5LWv8MakQmvYpnbJDS6fcBL2KzHSxNCMtWSQ== + dependencies: + object.assign "^4.1.0" + +babel-plugin-extract-import-names@1.6.22: + version "1.6.22" + resolved "https://registry.npmjs.org/babel-plugin-extract-import-names/-/babel-plugin-extract-import-names-1.6.22.tgz" + integrity sha512-yJ9BsJaISua7d8zNT7oRG1ZLBJCIdZ4PZqmH8qa9N5AK01ifk3fnkc98AXhtzE7UkfCsEumvoQWgoYLhOnJ7jQ== + dependencies: + "@babel/helper-plugin-utils" "7.10.4" + +babel-plugin-polyfill-corejs2@^0.3.3: + version "0.3.3" + resolved "https://registry.npmjs.org/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.3.3.tgz" + integrity sha512-8hOdmFYFSZhqg2C/JgLUQ+t52o5nirNwaWM2B9LWteozwIvM14VSwdsCAUET10qT+kmySAlseadmfeeSWFCy+Q== + dependencies: + "@babel/compat-data" "^7.17.7" + "@babel/helper-define-polyfill-provider" "^0.3.3" + semver "^6.1.1" + +babel-plugin-polyfill-corejs3@^0.6.0: + version "0.6.0" + resolved "https://registry.npmjs.org/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.6.0.tgz" + integrity sha512-+eHqR6OPcBhJOGgsIar7xoAB1GcSwVUA3XjAd7HJNzOXT4wv6/H7KIdA/Nc60cvUlDbKApmqNvD1B1bzOt4nyA== + dependencies: + "@babel/helper-define-polyfill-provider" "^0.3.3" + core-js-compat "^3.25.1" + +babel-plugin-polyfill-regenerator@^0.4.1: + version "0.4.1" + resolved "https://registry.npmjs.org/babel-plugin-polyfill-regenerator/-/babel-plugin-polyfill-regenerator-0.4.1.tgz" + integrity sha512-NtQGmyQDXjQqQ+IzRkBVwEOz9lQ4zxAQZgoAYEtU9dJjnl1Oc98qnN7jcp+bE7O7aYzVpavXE3/VKXNzUbh7aw== + dependencies: + "@babel/helper-define-polyfill-provider" "^0.3.3" + +"babel-plugin-styled-components@>= 1.12.0": + version "2.0.7" + resolved "https://registry.npmjs.org/babel-plugin-styled-components/-/babel-plugin-styled-components-2.0.7.tgz" + integrity sha512-i7YhvPgVqRKfoQ66toiZ06jPNA3p6ierpfUuEWxNF+fV27Uv5gxBkf8KZLHUCc1nFA9j6+80pYoIpqCeyW3/bA== + dependencies: + "@babel/helper-annotate-as-pure" "^7.16.0" + "@babel/helper-module-imports" "^7.16.0" + babel-plugin-syntax-jsx "^6.18.0" + lodash "^4.17.11" + picomatch "^2.3.0" + +babel-plugin-syntax-jsx@^6.18.0: + version "6.18.0" + resolved "https://registry.npmjs.org/babel-plugin-syntax-jsx/-/babel-plugin-syntax-jsx-6.18.0.tgz" + integrity sha512-qrPaCSo9c8RHNRHIotaufGbuOBN8rtdC4QrrFFc43vyWCCz7Kl7GL1PGaXtMGQZUXrkCjNEgxDfmAuAabr/rlw== + +bail@^1.0.0: + version "1.0.5" + resolved "https://registry.npmjs.org/bail/-/bail-1.0.5.tgz" + integrity sha512-xFbRxM1tahm08yHBP16MMjVUAvDaBMD38zsM9EMAUN61omwLmKlOpB/Zku5QkjZ8TZ4vn53pj+t518cH0S03RQ== + +balanced-match@^1.0.0: + version "1.0.2" + resolved "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz" + integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== + +base16@^1.0.0: + version "1.0.0" + resolved "https://registry.npmjs.org/base16/-/base16-1.0.0.tgz" + integrity sha512-pNdYkNPiJUnEhnfXV56+sQy8+AaPcG3POZAUnwr4EeqCUZFz4u2PePbo3e5Gj4ziYPCWGUZT9RHisvJKnwFuBQ== + +baseline-browser-mapping@^2.9.0: + version "2.9.19" + resolved "https://registry.yarnpkg.com/baseline-browser-mapping/-/baseline-browser-mapping-2.9.19.tgz#3e508c43c46d961eb4d7d2e5b8d1dd0f9ee4f488" + integrity sha512-ipDqC8FrAl/76p2SSWKSI+H9tFwm7vYqXQrItCuiVPt26Km0jS+NzSsBWAaBusvSbQcfJG+JitdMm+wZAgTYqg== + +batch@0.6.1: + version "0.6.1" + resolved "https://registry.npmjs.org/batch/-/batch-0.6.1.tgz" + integrity sha512-x+VAiMRL6UPkx+kudNvxTl6hB2XNNCG2r+7wixVfIYwu/2HKRXimwQyaumLjMveWvT2Hkd/cAJw+QBMfJ/EKVw== + +big.js@^5.2.2: + version "5.2.2" + resolved "https://registry.npmjs.org/big.js/-/big.js-5.2.2.tgz" + integrity sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ== + +binary-extensions@^2.0.0: + version "2.2.0" + resolved "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz" + integrity sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA== + +body-parser@1.20.3: + version "1.20.3" + resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.20.3.tgz#1953431221c6fb5cd63c4b36d53fab0928e548c6" + integrity sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g== + dependencies: + bytes "3.1.2" + content-type "~1.0.5" + debug "2.6.9" + depd "2.0.0" + destroy "1.2.0" + http-errors "2.0.0" + iconv-lite "0.4.24" + on-finished "2.4.1" + qs "6.13.0" + raw-body "2.5.2" + type-is "~1.6.18" + unpipe "1.0.0" + +bonjour-service@^1.0.11: + version "1.1.1" + resolved "https://registry.npmjs.org/bonjour-service/-/bonjour-service-1.1.1.tgz" + integrity sha512-Z/5lQRMOG9k7W+FkeGTNjh7htqn/2LMnfOvBZ8pynNZCM9MwkQkI3zeI4oz09uWdcgmgHugVvBqxGg4VQJ5PCg== + dependencies: + array-flatten "^2.1.2" + dns-equal "^1.0.0" + fast-deep-equal "^3.1.3" + multicast-dns "^7.2.5" + +boolbase@^1.0.0: + version "1.0.0" + resolved "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz" + integrity sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww== + +boxen@^5.0.0: + version "5.1.2" + resolved "https://registry.npmjs.org/boxen/-/boxen-5.1.2.tgz" + integrity sha512-9gYgQKXx+1nP8mP7CzFyaUARhg7D3n1dF/FnErWmu9l6JvGpNUN278h0aSb+QjoiKSWG+iZ3uHrcqk0qrY9RQQ== + dependencies: + ansi-align "^3.0.0" + camelcase "^6.2.0" + chalk "^4.1.0" + cli-boxes "^2.2.1" + string-width "^4.2.2" + type-fest "^0.20.2" + widest-line "^3.1.0" + wrap-ansi "^7.0.0" + +boxen@^6.2.1: + version "6.2.1" + resolved "https://registry.npmjs.org/boxen/-/boxen-6.2.1.tgz" + integrity sha512-H4PEsJXfFI/Pt8sjDWbHlQPx4zL/bvSQjcilJmaulGt5mLDorHOHpmdXAJcBcmru7PhYSp/cDMWRko4ZUMFkSw== + dependencies: + ansi-align "^3.0.1" + camelcase "^6.2.0" + chalk "^4.1.2" + cli-boxes "^3.0.0" + string-width "^5.0.1" + type-fest "^2.5.0" + widest-line "^4.0.1" + wrap-ansi "^8.0.1" + +brace-expansion@^1.1.7: + version "1.1.13" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.13.tgz#d37875c01dc9eff988dd49d112a57cb67b54efe6" + integrity sha512-9ZLprWS6EENmhEOpjCYW2c8VkmOvckIJZfkr7rBW6dObmfgJ/L1GpSYW5Hpo9lDz4D1+n0Ckz8rU7FwHDQiG/w== + dependencies: + balanced-match "^1.0.0" + concat-map "0.0.1" + +braces@^3.0.3, braces@~3.0.2: + version "3.0.3" + resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.3.tgz#490332f40919452272d55a8480adc0c441358789" + integrity sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA== + dependencies: + fill-range "^7.1.1" + +browserslist@^4.0.0, browserslist@^4.18.1, browserslist@^4.21.4, browserslist@^4.21.5, browserslist@^4.24.0: + version "4.24.4" + resolved "https://registry.npmjs.org/browserslist/-/browserslist-4.24.4.tgz" + integrity sha512-KDi1Ny1gSePi1vm0q4oxSF8b4DR44GF4BbmS2YdhPLOEqd8pDviZOGH/GsmRwoWJ2+5Lr085X7naowMwKHDG1A== + dependencies: + caniuse-lite "^1.0.30001688" + electron-to-chromium "^1.5.73" + node-releases "^2.0.19" + update-browserslist-db "^1.1.1" + +browserslist@^4.28.1: + version "4.28.1" + resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.28.1.tgz#7f534594628c53c63101079e27e40de490456a95" + integrity sha512-ZC5Bd0LgJXgwGqUknZY/vkUQ04r8NXnJZ3yYi4vDmSiZmC/pdSN0NbNRPxZpbtO4uAfDUAFffO8IZoM3Gj8IkA== + dependencies: + baseline-browser-mapping "^2.9.0" + caniuse-lite "^1.0.30001759" + electron-to-chromium "^1.5.263" + node-releases "^2.0.27" + update-browserslist-db "^1.2.0" + +buffer-from@^1.0.0: + version "1.1.2" + resolved "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz" + integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ== + +bytes@3.0.0: + version "3.0.0" + resolved "https://registry.npmjs.org/bytes/-/bytes-3.0.0.tgz" + integrity sha512-pMhOfFDPiv9t5jjIXkHosWmkSyQbvsgEVNkz0ERHbuLh2T/7j4Mqqpz523Fe8MVY89KC6Sh/QfS2sM+SjgFDcw== + +bytes@3.1.2: + version "3.1.2" + resolved "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz" + integrity sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg== + +cacheable-request@^6.0.0: + version "6.1.0" + resolved "https://registry.npmjs.org/cacheable-request/-/cacheable-request-6.1.0.tgz" + integrity sha512-Oj3cAGPCqOZX7Rz64Uny2GYAZNliQSqfbePrgAQ1wKAihYmCUnraBtJtKcGR4xz7wF+LoJC+ssFZvv5BgF9Igg== + dependencies: + clone-response "^1.0.2" + get-stream "^5.1.0" + http-cache-semantics "^4.0.0" + keyv "^3.0.0" + lowercase-keys "^2.0.0" + normalize-url "^4.1.0" + responselike "^1.0.2" + +call-bind-apply-helpers@^1.0.1, call-bind-apply-helpers@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz#4b5428c222be985d79c3d82657479dbe0b59b2d6" + integrity sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ== + dependencies: + es-errors "^1.3.0" + function-bind "^1.1.2" + +call-bind@^1.0.2: + version "1.0.2" + resolved "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz" + integrity sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA== + dependencies: + function-bind "^1.1.1" + get-intrinsic "^1.0.2" + +call-bound@^1.0.2: + version "1.0.4" + resolved "https://registry.yarnpkg.com/call-bound/-/call-bound-1.0.4.tgz#238de935d2a2a692928c538c7ccfa91067fd062a" + integrity sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg== + dependencies: + call-bind-apply-helpers "^1.0.2" + get-intrinsic "^1.3.0" + +callsites@^3.0.0: + version "3.1.0" + resolved "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz" + integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ== + +camel-case@^4.1.2: + version "4.1.2" + resolved "https://registry.npmjs.org/camel-case/-/camel-case-4.1.2.tgz" + integrity sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw== + dependencies: + pascal-case "^3.1.2" + tslib "^2.0.3" + +camelcase-css@2.0.1, camelcase-css@^2.0.1: + version "2.0.1" + resolved "https://registry.npmjs.org/camelcase-css/-/camelcase-css-2.0.1.tgz" + integrity sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA== + +camelcase@^6.2.0: + version "6.3.0" + resolved "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz" + integrity sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA== + +camelize@^1.0.0: + version "1.0.1" + resolved "https://registry.npmjs.org/camelize/-/camelize-1.0.1.tgz" + integrity sha512-dU+Tx2fsypxTgtLoE36npi3UqcjSSMNYfkqgmoEhtZrraP5VWq0K7FkWVTYa8eMPtnU/G2txVsfdCJTn9uzpuQ== + +caniuse-api@^3.0.0: + version "3.0.0" + resolved "https://registry.npmjs.org/caniuse-api/-/caniuse-api-3.0.0.tgz" + integrity sha512-bsTwuIg/BZZK/vreVTYYbSWoe2F+71P7K5QGEX+pT250DZbfU1MQ5prOKpPR+LL6uWKK3KMwMCAS74QB3Um1uw== + dependencies: + browserslist "^4.0.0" + caniuse-lite "^1.0.0" + lodash.memoize "^4.1.2" + lodash.uniq "^4.5.0" + +caniuse-lite@^1.0.0, caniuse-lite@^1.0.30001464, caniuse-lite@^1.0.30001688: + version "1.0.30001707" + resolved "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001707.tgz" + integrity sha512-3qtRjw/HQSMlDWf+X79N206fepf4SOOU6SQLMaq/0KkZLmSjPxAkBOQQ+FxbHKfHmYLZFfdWsO3KA90ceHPSnw== + +caniuse-lite@^1.0.30001759: + version "1.0.30001769" + resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001769.tgz#1ad91594fad7dc233777c2781879ab5409f7d9c2" + integrity sha512-BCfFL1sHijQlBGWBMuJyhZUhzo7wer5sVj9hqekB/7xn0Ypy+pER/edCYQm4exbXj4WiySGp40P8UuTh6w1srg== + +ccount@^1.0.0: + version "1.1.0" + resolved "https://registry.npmjs.org/ccount/-/ccount-1.1.0.tgz" + integrity sha512-vlNK021QdI7PNeiUh/lKkC/mNHHfV0m/Ad5JoI0TYtlBnJAslM/JIkm/tGC88bkLIwO6OQ5uV6ztS6kVAtCDlg== + +chalk@^4.0.0, chalk@^4.1.0, chalk@^4.1.2: + version "4.1.2" + resolved "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz" + integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== + dependencies: + ansi-styles "^4.1.0" + supports-color "^7.1.0" + +character-entities-legacy@^1.0.0: + version "1.1.4" + resolved "https://registry.npmjs.org/character-entities-legacy/-/character-entities-legacy-1.1.4.tgz" + integrity sha512-3Xnr+7ZFS1uxeiUDvV02wQ+QDbc55o97tIV5zHScSPJpcLm/r0DFPcoY3tYRp+VZukxuMeKgXYmsXQHO05zQeA== + +character-entities@^1.0.0: + version "1.2.4" + resolved "https://registry.npmjs.org/character-entities/-/character-entities-1.2.4.tgz" + integrity sha512-iBMyeEHxfVnIakwOuDXpVkc54HijNgCyQB2w0VfGQThle6NXn50zU6V/u+LDhxHcDUPojn6Kpga3PTAD8W1bQw== + +character-reference-invalid@^1.0.0: + version "1.1.4" + resolved "https://registry.npmjs.org/character-reference-invalid/-/character-reference-invalid-1.1.4.tgz" + integrity sha512-mKKUkUbhPpQlCOfIuZkvSEgktjPFIsZKRRbC6KWVEMvlzblj3i3asQv5ODsrwt0N3pHAEvjP8KTQPHkp0+6jOg== + +cheerio-select@^2.1.0: + version "2.1.0" + resolved "https://registry.npmjs.org/cheerio-select/-/cheerio-select-2.1.0.tgz" + integrity sha512-9v9kG0LvzrlcungtnJtpGNxY+fzECQKhK4EGJX2vByejiMX84MFNQw4UxPJl3bFbTMw+Dfs37XaIkCwTZfLh4g== + dependencies: + boolbase "^1.0.0" + css-select "^5.1.0" + css-what "^6.1.0" + domelementtype "^2.3.0" + domhandler "^5.0.3" + domutils "^3.0.1" + +cheerio@^1.0.0-rc.12: + version "1.0.0-rc.12" + resolved "https://registry.npmjs.org/cheerio/-/cheerio-1.0.0-rc.12.tgz" + integrity sha512-VqR8m68vM46BNnuZ5NtnGBKIE/DfN0cRIzg9n40EIq9NOv90ayxLBXA8fXC5gquFRGJSTRqBq25Jt2ECLR431Q== + dependencies: + cheerio-select "^2.1.0" + dom-serializer "^2.0.0" + domhandler "^5.0.3" + domutils "^3.0.1" + htmlparser2 "^8.0.1" + parse5 "^7.0.0" + parse5-htmlparser2-tree-adapter "^7.0.0" + +chokidar@^3.4.2, chokidar@^3.5.3: + version "3.5.3" + resolved "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz" + integrity sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw== + dependencies: + anymatch "~3.1.2" + braces "~3.0.2" + glob-parent "~5.1.2" + is-binary-path "~2.1.0" + is-glob "~4.0.1" + normalize-path "~3.0.0" + readdirp "~3.6.0" + optionalDependencies: + fsevents "~2.3.2" + +chrome-trace-event@^1.0.2: + version "1.0.3" + resolved "https://registry.npmjs.org/chrome-trace-event/-/chrome-trace-event-1.0.3.tgz" + integrity sha512-p3KULyQg4S7NIHixdwbGX+nFHkoBiA4YQmyWtjb8XngSKV124nJmRysgAeujbUVb15vh+RvFUfCPqU7rXk+hZg== + +ci-info@^2.0.0: + version "2.0.0" + resolved "https://registry.npmjs.org/ci-info/-/ci-info-2.0.0.tgz" + integrity sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ== + +ci-info@^3.2.0: + version "3.8.0" + resolved "https://registry.npmjs.org/ci-info/-/ci-info-3.8.0.tgz" + integrity sha512-eXTggHWSooYhq49F2opQhuHWgzucfF2YgODK4e1566GQs5BIfP30B0oenwBJHfWxAs2fyPB1s7Mg949zLf61Yw== + +clean-css@^5.2.2, clean-css@^5.3.0: + version "5.3.2" + resolved "https://registry.npmjs.org/clean-css/-/clean-css-5.3.2.tgz" + integrity sha512-JVJbM+f3d3Q704rF4bqQ5UUyTtuJ0JRKNbTKVEeujCCBoMdkEi+V+e8oktO9qGQNSvHrFTM6JZRXrUvGR1czww== + dependencies: + source-map "~0.6.0" + +clean-stack@^2.0.0: + version "2.2.0" + resolved "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz" + integrity sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A== + +cli-boxes@^2.2.1: + version "2.2.1" + resolved "https://registry.npmjs.org/cli-boxes/-/cli-boxes-2.2.1.tgz" + integrity sha512-y4coMcylgSCdVinjiDBuR8PCC2bLjyGTwEmPb9NHR/QaNU6EUOXcTY/s6VjGMD6ENSEaeQYHCY0GNGS5jfMwPw== + +cli-boxes@^3.0.0: + version "3.0.0" + resolved "https://registry.npmjs.org/cli-boxes/-/cli-boxes-3.0.0.tgz" + integrity sha512-/lzGpEWL/8PfI0BmBOPRwp0c/wFNX1RdUML3jK/RcSBA9T8mZDdQpqYBKtCFTOfQbwPqWEOpjqW+Fnayc0969g== + +cli-table3@^0.6.2: + version "0.6.3" + resolved "https://registry.npmjs.org/cli-table3/-/cli-table3-0.6.3.tgz" + integrity sha512-w5Jac5SykAeZJKntOxJCrm63Eg5/4dhMWIcuTbo9rpE+brgaSZo0RuNJZeOyMgsUdhDeojvgyQLmjI+K50ZGyg== + dependencies: + string-width "^4.2.0" + optionalDependencies: + "@colors/colors" "1.5.0" + +clone-deep@^4.0.1: + version "4.0.1" + resolved "https://registry.npmjs.org/clone-deep/-/clone-deep-4.0.1.tgz" + integrity sha512-neHB9xuzh/wk0dIHweyAXv2aPGZIVk3pLMe+/RNzINf17fe0OG96QroktYAUm7SM1PBnzTabaLboqqxDyMU+SQ== + dependencies: + is-plain-object "^2.0.4" + kind-of "^6.0.2" + shallow-clone "^3.0.0" + +clone-response@^1.0.2: + version "1.0.3" + resolved "https://registry.npmjs.org/clone-response/-/clone-response-1.0.3.tgz" + integrity sha512-ROoL94jJH2dUVML2Y/5PEDNaSHgeOdSDicUyS7izcF63G6sTc/FTjLub4b8Il9S8S0beOfYt0TaA5qvFK+w0wA== + dependencies: + mimic-response "^1.0.0" + +clsx@^1.2.1: + version "1.2.1" + resolved "https://registry.npmjs.org/clsx/-/clsx-1.2.1.tgz" + integrity sha512-EcR6r5a8bj6pu3ycsa/E/cKVGuTgZJZdsyUYHOksG/UHIiKfjxzRxYJpyVBwYaQeOvghal9fcc4PidlgzugAQg== + +collapse-white-space@^1.0.2: + version "1.0.6" + resolved "https://registry.npmjs.org/collapse-white-space/-/collapse-white-space-1.0.6.tgz" + integrity sha512-jEovNnrhMuqyCcjfEJA56v0Xq8SkIoPKDyaHahwo3POf4qcSXqMYuwNcOTzp74vTsR9Tn08z4MxWqAhcekogkQ== + +color-convert@^2.0.1: + version "2.0.1" + resolved "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz" + integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ== + dependencies: + color-name "~1.1.4" + +color-name@^1.1.4, color-name@~1.1.4: + version "1.1.4" + resolved "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz" + integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== + +colord@^2.9.1: + version "2.9.3" + resolved "https://registry.npmjs.org/colord/-/colord-2.9.3.tgz" + integrity sha512-jeC1axXpnb0/2nn/Y1LPuLdgXBLH7aDcHu4KEKfqw3CUhX7ZpfBSlPKyqXE6btIgEzfWtrX3/tyBCaCvXvMkOw== + +colorette@^2.0.10: + version "2.0.19" + resolved "https://registry.npmjs.org/colorette/-/colorette-2.0.19.tgz" + integrity sha512-3tlv/dIP7FWvj3BsbHrGLJ6l/oKh1O3TcgBqMn+yyCagOxc23fyzDS6HypQbgxWbkpDnf52p1LuR4eWDQ/K9WQ== + +combine-promises@^1.1.0: + version "1.1.0" + resolved "https://registry.npmjs.org/combine-promises/-/combine-promises-1.1.0.tgz" + integrity sha512-ZI9jvcLDxqwaXEixOhArm3r7ReIivsXkpbyEWyeOhzz1QS0iSgBPnWvEqvIQtYyamGCYA88gFhmUrs9hrrQ0pg== + +comma-separated-tokens@^1.0.0: + version "1.0.8" + resolved "https://registry.npmjs.org/comma-separated-tokens/-/comma-separated-tokens-1.0.8.tgz" + integrity sha512-GHuDRO12Sypu2cV70d1dkA2EUmXHgntrzbpvOB+Qy+49ypNfGgFQIC2fhhXbnyrJRynDCAARsT7Ou0M6hirpfw== + +commander@^2.20.0: + version "2.20.3" + resolved "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz" + integrity sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ== + +commander@^4.0.0: + version "4.1.1" + resolved "https://registry.npmjs.org/commander/-/commander-4.1.1.tgz" + integrity sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA== + +commander@^5.1.0: + version "5.1.0" + resolved "https://registry.npmjs.org/commander/-/commander-5.1.0.tgz" + integrity sha512-P0CysNDQ7rtVw4QIQtm+MRxV66vKFSvlsQvGYXZWR3qFU0jlMKHZZZgw8e+8DSah4UDKMqnknRDQz+xuQXQ/Zg== + +commander@^7.2.0: + version "7.2.0" + resolved "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz" + integrity sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw== + +commander@^8.3.0: + version "8.3.0" + resolved "https://registry.npmjs.org/commander/-/commander-8.3.0.tgz" + integrity sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww== + +commondir@^1.0.1: + version "1.0.1" + resolved "https://registry.npmjs.org/commondir/-/commondir-1.0.1.tgz" + integrity sha512-W9pAhw0ja1Edb5GVdIF1mjZw/ASI0AlShXM83UUGe2DVr5TdAPEA1OA8m/g8zWp9x6On7gqufY+FatDbC3MDQg== + +compressible@~2.0.16: + version "2.0.18" + resolved "https://registry.npmjs.org/compressible/-/compressible-2.0.18.tgz" + integrity sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg== + dependencies: + mime-db ">= 1.43.0 < 2" + +compression@^1.7.4: + version "1.7.4" + resolved "https://registry.npmjs.org/compression/-/compression-1.7.4.tgz" + integrity sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ== + dependencies: + accepts "~1.3.5" + bytes "3.0.0" + compressible "~2.0.16" + debug "2.6.9" + on-headers "~1.0.2" + safe-buffer "5.1.2" + vary "~1.1.2" + +concat-map@0.0.1: + version "0.0.1" + resolved "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz" + integrity sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg== + +configstore@^5.0.1: + version "5.0.1" + resolved "https://registry.npmjs.org/configstore/-/configstore-5.0.1.tgz" + integrity sha512-aMKprgk5YhBNyH25hj8wGt2+D52Sw1DRRIzqBwLp2Ya9mFmY8KPvvtvmna8SxVR9JMZ4kzMD68N22vlaRpkeFA== + dependencies: + dot-prop "^5.2.0" + graceful-fs "^4.1.2" + make-dir "^3.0.0" + unique-string "^2.0.0" + write-file-atomic "^3.0.0" + xdg-basedir "^4.0.0" + +connect-history-api-fallback@^2.0.0: + version "2.0.0" + resolved "https://registry.npmjs.org/connect-history-api-fallback/-/connect-history-api-fallback-2.0.0.tgz" + integrity sha512-U73+6lQFmfiNPrYbXqr6kZ1i1wiRqXnp2nhMsINseWXO8lDau0LGEffJ8kQi4EjLZympVgRdvqjAgiZ1tgzDDA== + +consola@^2.15.3: + version "2.15.3" + resolved "https://registry.npmjs.org/consola/-/consola-2.15.3.tgz" + integrity sha512-9vAdYbHj6x2fLKC4+oPH0kFzY/orMZyG2Aj+kNylHxKGJ/Ed4dpNyAQYwJOdqO4zdM7XpVHmyejQDcQHrnuXbw== + +content-disposition@0.5.2: + version "0.5.2" + resolved "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.2.tgz" + integrity sha512-kRGRZw3bLlFISDBgwTSA1TMBFN6J6GWDeubmDE3AF+3+yXL8hTWv8r5rkLbqYXY4RjPk/EzHnClI3zQf1cFmHA== + +content-disposition@0.5.4: + version "0.5.4" + resolved "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz" + integrity sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ== + dependencies: + safe-buffer "5.2.1" + +content-type@~1.0.4, content-type@~1.0.5: + version "1.0.5" + resolved "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz" + integrity sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA== + +convert-source-map@^1.7.0: + version "1.9.0" + resolved "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.9.0.tgz" + integrity sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A== + +convert-source-map@^2.0.0: + version "2.0.0" + resolved "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz" + integrity sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg== + +cookie-signature@1.0.6: + version "1.0.6" + resolved "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz" + integrity sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ== + +cookie@0.7.1: + version "0.7.1" + resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.7.1.tgz#2f73c42142d5d5cf71310a74fc4ae61670e5dbc9" + integrity sha512-6DnInpx7SJ2AK3+CTUE/ZM0vWTUboZCegxhC2xiIydHR9jNuTAASBrfEpHhiGOZw/nX51bHt6YQl8jsGo4y/0w== + +copy-text-to-clipboard@^3.0.1: + version "3.1.0" + resolved "https://registry.npmjs.org/copy-text-to-clipboard/-/copy-text-to-clipboard-3.1.0.tgz" + integrity sha512-PFM6BnjLnOON/lB3ta/Jg7Ywsv+l9kQGD4TWDCSlRBGmqnnTM5MrDkhAFgw+8HZt0wW6Q2BBE4cmy9sq+s9Qng== + +copy-webpack-plugin@^11.0.0: + version "11.0.0" + resolved "https://registry.npmjs.org/copy-webpack-plugin/-/copy-webpack-plugin-11.0.0.tgz" + integrity sha512-fX2MWpamkW0hZxMEg0+mYnA40LTosOSa5TqZ9GYIBzyJa9C3QUaMPSE2xAi/buNr8u89SfD9wHSQVBzrRa/SOQ== + dependencies: + fast-glob "^3.2.11" + glob-parent "^6.0.1" + globby "^13.1.1" + normalize-path "^3.0.0" + schema-utils "^4.0.0" + serialize-javascript "^6.0.0" + +core-js-compat@^3.25.1: + version "3.29.1" + resolved "https://registry.npmjs.org/core-js-compat/-/core-js-compat-3.29.1.tgz" + integrity sha512-QmchCua884D8wWskMX8tW5ydINzd8oSJVx38lx/pVkFGqztxt73GYre3pm/hyYq8bPf+MW5In4I/uRShFDsbrA== + dependencies: + browserslist "^4.21.5" + +core-js-pure@^3.25.1: + version "3.29.1" + resolved "https://registry.npmjs.org/core-js-pure/-/core-js-pure-3.29.1.tgz" + integrity sha512-4En6zYVi0i0XlXHVz/bi6l1XDjCqkKRq765NXuX+SnaIatlE96Odt5lMLjdxUiNI1v9OXI5DSLWYPlmTfkTktg== + +core-js@^3.23.3: + version "3.29.1" + resolved "https://registry.npmjs.org/core-js/-/core-js-3.29.1.tgz" + integrity sha512-+jwgnhg6cQxKYIIjGtAHq2nwUOolo9eoFZ4sHfUH09BLXBgxnH4gA0zEd+t+BO2cNB8idaBtZFcFTRjQJRJmAw== + +core-util-is@~1.0.0: + version "1.0.3" + resolved "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz" + integrity sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ== + +cosmiconfig@^6.0.0: + version "6.0.0" + resolved "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-6.0.0.tgz" + integrity sha512-xb3ZL6+L8b9JLLCx3ZdoZy4+2ECphCMo2PwqgP1tlfVq6M6YReyzBJtvWWtbDSpNr9hn96pkCiZqUcFEc+54Qg== + dependencies: + "@types/parse-json" "^4.0.0" + import-fresh "^3.1.0" + parse-json "^5.0.0" + path-type "^4.0.0" + yaml "^1.7.2" + +cosmiconfig@^7.0.1: + version "7.1.0" + resolved "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-7.1.0.tgz" + integrity sha512-AdmX6xUzdNASswsFtmwSt7Vj8po9IuqXm0UXz7QKPuEUmPB4XyjGfaAr2PSuELMwkRMVH1EpIkX5bTZGRB3eCA== + dependencies: + "@types/parse-json" "^4.0.0" + import-fresh "^3.2.1" + parse-json "^5.0.0" + path-type "^4.0.0" + yaml "^1.10.0" + +cosmiconfig@^8.0.0: + version "8.1.3" + resolved "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-8.1.3.tgz" + integrity sha512-/UkO2JKI18b5jVMJUp0lvKFMpa/Gye+ZgZjKD+DGEN9y7NRcf/nK1A0sp67ONmKtnDCNMS44E6jrk0Yc3bDuUw== + dependencies: + import-fresh "^3.2.1" + js-yaml "^4.1.0" + parse-json "^5.0.0" + path-type "^4.0.0" + +cross-fetch@^3.1.5: + version "3.1.5" + resolved "https://registry.npmjs.org/cross-fetch/-/cross-fetch-3.1.5.tgz" + integrity sha512-lvb1SBsI0Z7GDwmuid+mU3kWVBwTVUbe7S0H52yaaAdQOXq2YktTCZdlAcNKFzE6QtRz0snpw9bNiPeOIkkQvw== + dependencies: + node-fetch "2.6.7" + +cross-spawn@^7.0.3: + version "7.0.6" + resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.6.tgz#8a58fe78f00dcd70c370451759dfbfaf03e8ee9f" + integrity sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA== + dependencies: + path-key "^3.1.0" + shebang-command "^2.0.0" + which "^2.0.1" + +crypto-random-string@^2.0.0: + version "2.0.0" + resolved "https://registry.npmjs.org/crypto-random-string/-/crypto-random-string-2.0.0.tgz" + integrity sha512-v1plID3y9r/lPhviJ1wrXpLeyUIGAZ2SHNYTEapm7/8A9nLPoyvVp3RK/EPFqn5kEznyWgYZNsRtYYIWbuG8KA== + +css-color-keywords@^1.0.0: + version "1.0.0" + resolved "https://registry.npmjs.org/css-color-keywords/-/css-color-keywords-1.0.0.tgz" + integrity sha512-FyyrDHZKEjXDpNJYvVsV960FiqQyXc/LlYmsxl2BcdMb2WPx0OGRVgTg55rPSyLSNMqP52R9r8geSp7apN3Ofg== + +css-declaration-sorter@^6.3.1: + version "6.4.0" + resolved "https://registry.npmjs.org/css-declaration-sorter/-/css-declaration-sorter-6.4.0.tgz" + integrity sha512-jDfsatwWMWN0MODAFuHszfjphEXfNw9JUAhmY4pLu3TyTU+ohUpsbVtbU+1MZn4a47D9kqh03i4eyOm+74+zew== + +css-loader@^6.7.1: + version "6.7.3" + resolved "https://registry.npmjs.org/css-loader/-/css-loader-6.7.3.tgz" + integrity sha512-qhOH1KlBMnZP8FzRO6YCH9UHXQhVMcEGLyNdb7Hv2cpcmJbW0YrddO+tG1ab5nT41KpHIYGsbeHqxB9xPu1pKQ== + dependencies: + icss-utils "^5.1.0" + postcss "^8.4.19" + postcss-modules-extract-imports "^3.0.0" + postcss-modules-local-by-default "^4.0.0" + postcss-modules-scope "^3.0.0" + postcss-modules-values "^4.0.0" + postcss-value-parser "^4.2.0" + semver "^7.3.8" + +css-minimizer-webpack-plugin@^4.0.0: + version "4.2.2" + resolved "https://registry.npmjs.org/css-minimizer-webpack-plugin/-/css-minimizer-webpack-plugin-4.2.2.tgz" + integrity sha512-s3Of/4jKfw1Hj9CxEO1E5oXhQAxlayuHO2y/ML+C6I9sQ7FdzfEV6QgMLN3vI+qFsjJGIAFLKtQK7t8BOXAIyA== + dependencies: + cssnano "^5.1.8" + jest-worker "^29.1.2" + postcss "^8.4.17" + schema-utils "^4.0.0" + serialize-javascript "^6.0.0" + source-map "^0.6.1" + +css-select@^4.1.3: + version "4.3.0" + resolved "https://registry.npmjs.org/css-select/-/css-select-4.3.0.tgz" + integrity sha512-wPpOYtnsVontu2mODhA19JrqWxNsfdatRKd64kmpRbQgh1KtItko5sTnEpPdpSaJszTOhEMlF/RPz28qj4HqhQ== + dependencies: + boolbase "^1.0.0" + css-what "^6.0.1" + domhandler "^4.3.1" + domutils "^2.8.0" + nth-check "^2.0.1" + +css-select@^5.1.0: + version "5.1.0" + resolved "https://registry.npmjs.org/css-select/-/css-select-5.1.0.tgz" + integrity sha512-nwoRF1rvRRnnCqqY7updORDsuqKzqYJ28+oSMaJMMgOauh3fvwHqMS7EZpIPqK8GL+g9mKxF1vP/ZjSeNjEVHg== + dependencies: + boolbase "^1.0.0" + css-what "^6.1.0" + domhandler "^5.0.2" + domutils "^3.0.1" + nth-check "^2.0.1" + +css-to-react-native@^3.0.0: + version "3.2.0" + resolved "https://registry.npmjs.org/css-to-react-native/-/css-to-react-native-3.2.0.tgz" + integrity sha512-e8RKaLXMOFii+02mOlqwjbD00KSEKqblnpO9e++1aXS1fPQOpS1YoqdVHBqPjHNoxeF2mimzVqawm2KCbEdtHQ== + dependencies: + camelize "^1.0.0" + css-color-keywords "^1.0.0" + postcss-value-parser "^4.0.2" + +css-tree@^1.1.2, css-tree@^1.1.3: + version "1.1.3" + resolved "https://registry.npmjs.org/css-tree/-/css-tree-1.1.3.tgz" + integrity sha512-tRpdppF7TRazZrjJ6v3stzv93qxRcSsFmW6cX0Zm2NVKpxE1WV1HblnghVv9TreireHkqI/VDEsfolRF1p6y7Q== + dependencies: + mdn-data "2.0.14" + source-map "^0.6.1" + +css-what@^6.0.1, css-what@^6.1.0: + version "6.1.0" + resolved "https://registry.npmjs.org/css-what/-/css-what-6.1.0.tgz" + integrity sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw== + +cssesc@^3.0.0: + version "3.0.0" + resolved "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz" + integrity sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg== + +cssnano-preset-advanced@^5.3.8: + version "5.3.10" + resolved "https://registry.npmjs.org/cssnano-preset-advanced/-/cssnano-preset-advanced-5.3.10.tgz" + integrity sha512-fnYJyCS9jgMU+cmHO1rPSPf9axbQyD7iUhLO5Df6O4G+fKIOMps+ZbU0PdGFejFBBZ3Pftf18fn1eG7MAPUSWQ== + dependencies: + autoprefixer "^10.4.12" + cssnano-preset-default "^5.2.14" + postcss-discard-unused "^5.1.0" + postcss-merge-idents "^5.1.1" + postcss-reduce-idents "^5.2.0" + postcss-zindex "^5.1.0" + +cssnano-preset-default@^5.2.14: + version "5.2.14" + resolved "https://registry.npmjs.org/cssnano-preset-default/-/cssnano-preset-default-5.2.14.tgz" + integrity sha512-t0SFesj/ZV2OTylqQVOrFgEh5uanxbO6ZAdeCrNsUQ6fVuXwYTxJPNAGvGTxHbD68ldIJNec7PyYZDBrfDQ+6A== + dependencies: + css-declaration-sorter "^6.3.1" + cssnano-utils "^3.1.0" + postcss-calc "^8.2.3" + postcss-colormin "^5.3.1" + postcss-convert-values "^5.1.3" + postcss-discard-comments "^5.1.2" + postcss-discard-duplicates "^5.1.0" + postcss-discard-empty "^5.1.1" + postcss-discard-overridden "^5.1.0" + postcss-merge-longhand "^5.1.7" + postcss-merge-rules "^5.1.4" + postcss-minify-font-values "^5.1.0" + postcss-minify-gradients "^5.1.1" + postcss-minify-params "^5.1.4" + postcss-minify-selectors "^5.2.1" + postcss-normalize-charset "^5.1.0" + postcss-normalize-display-values "^5.1.0" + postcss-normalize-positions "^5.1.1" + postcss-normalize-repeat-style "^5.1.1" + postcss-normalize-string "^5.1.0" + postcss-normalize-timing-functions "^5.1.0" + postcss-normalize-unicode "^5.1.1" + postcss-normalize-url "^5.1.0" + postcss-normalize-whitespace "^5.1.1" + postcss-ordered-values "^5.1.3" + postcss-reduce-initial "^5.1.2" + postcss-reduce-transforms "^5.1.0" + postcss-svgo "^5.1.0" + postcss-unique-selectors "^5.1.1" + +cssnano-utils@^3.1.0: + version "3.1.0" + resolved "https://registry.npmjs.org/cssnano-utils/-/cssnano-utils-3.1.0.tgz" + integrity sha512-JQNR19/YZhz4psLX/rQ9M83e3z2Wf/HdJbryzte4a3NSuafyp9w/I4U+hx5C2S9g41qlstH7DEWnZaaj83OuEA== + +cssnano@^5.1.12, cssnano@^5.1.8: + version "5.1.15" + resolved "https://registry.npmjs.org/cssnano/-/cssnano-5.1.15.tgz" + integrity sha512-j+BKgDcLDQA+eDifLx0EO4XSA56b7uut3BQFH+wbSaSTuGLuiyTa/wbRYthUXX8LC9mLg+WWKe8h+qJuwTAbHw== + dependencies: + cssnano-preset-default "^5.2.14" + lilconfig "^2.0.3" + yaml "^1.10.2" + +csso@^4.2.0: + version "4.2.0" + resolved "https://registry.npmjs.org/csso/-/csso-4.2.0.tgz" + integrity sha512-wvlcdIbf6pwKEk7vHj8/Bkc0B4ylXZruLvOgs9doS5eOsOpuodOV2zJChSpkp+pRpYQLQMeF04nr3Z68Sta9jA== + dependencies: + css-tree "^1.1.2" + +csstype@^3.0.2: + version "3.1.1" + resolved "https://registry.npmjs.org/csstype/-/csstype-3.1.1.tgz" + integrity sha512-DJR/VvkAvSZW9bTouZue2sSxDwdTN92uHjqeKVm+0dAqdfNykRzQ95tay8aXMBAAPpUiq4Qcug2L7neoRh2Egw== + +debug@2.6.9, debug@^2.6.0: + version "2.6.9" + resolved "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz" + integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== + dependencies: + ms "2.0.0" + +debug@4, debug@^4.1.0, debug@^4.1.1, debug@^4.3.1: + version "4.3.4" + resolved "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz" + integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== + dependencies: + ms "2.1.2" + +decompress-response@^3.3.0: + version "3.3.0" + resolved "https://registry.npmjs.org/decompress-response/-/decompress-response-3.3.0.tgz" + integrity sha512-BzRPQuY1ip+qDonAOz42gRm/pg9F768C+npV/4JOsxRC2sq+Rlk+Q4ZCAsOhnIaMrgarILY+RMUIvMmmX1qAEA== + dependencies: + mimic-response "^1.0.0" + +deep-extend@^0.6.0: + version "0.6.0" + resolved "https://registry.npmjs.org/deep-extend/-/deep-extend-0.6.0.tgz" + integrity sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA== + +deepmerge@^4.2.2: + version "4.3.1" + resolved "https://registry.npmjs.org/deepmerge/-/deepmerge-4.3.1.tgz" + integrity sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A== + +default-gateway@^6.0.3: + version "6.0.3" + resolved "https://registry.npmjs.org/default-gateway/-/default-gateway-6.0.3.tgz" + integrity sha512-fwSOJsbbNzZ/CUFpqFBqYfYNLj1NbMPm8MMCIzHjC83iSJRBEGmDUxU+WP661BaBQImeC2yHwXtz+P/O9o+XEg== + dependencies: + execa "^5.0.0" + +defer-to-connect@^1.0.1: + version "1.1.3" + resolved "https://registry.npmjs.org/defer-to-connect/-/defer-to-connect-1.1.3.tgz" + integrity sha512-0ISdNousHvZT2EiFlZeZAHBUvSxmKswVCEf8hW7KWgG4a8MVEu/3Vb6uWYozkjylyCxe0JBIiRB1jV45S70WVQ== + +define-lazy-prop@^2.0.0: + version "2.0.0" + resolved "https://registry.npmjs.org/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz" + integrity sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og== + +define-properties@^1.1.4: + version "1.2.0" + resolved "https://registry.npmjs.org/define-properties/-/define-properties-1.2.0.tgz" + integrity sha512-xvqAVKGfT1+UAvPwKTVw/njhdQ8ZhXK4lI0bCIuCMrp2up9nPnaDftrLtmpTazqd1o+UY4zgzU+avtMbDP+ldA== + dependencies: + has-property-descriptors "^1.0.0" + object-keys "^1.1.1" + +del@^6.1.1: + version "6.1.1" + resolved "https://registry.npmjs.org/del/-/del-6.1.1.tgz" + integrity sha512-ua8BhapfP0JUJKC/zV9yHHDW/rDoDxP4Zhn3AkA6/xT6gY7jYXJiaeyBZznYVujhZZET+UgcbZiQ7sN3WqcImg== + dependencies: + globby "^11.0.1" + graceful-fs "^4.2.4" + is-glob "^4.0.1" + is-path-cwd "^2.2.0" + is-path-inside "^3.0.2" + p-map "^4.0.0" + rimraf "^3.0.2" + slash "^3.0.0" + +depd@2.0.0: + version "2.0.0" + resolved "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz" + integrity sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw== + +depd@~1.1.2: + version "1.1.2" + resolved "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz" + integrity sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ== + +destroy@1.2.0: + version "1.2.0" + resolved "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz" + integrity sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg== + +detab@2.0.4: + version "2.0.4" + resolved "https://registry.npmjs.org/detab/-/detab-2.0.4.tgz" + integrity sha512-8zdsQA5bIkoRECvCrNKPla84lyoR7DSAyf7p0YgXzBO9PDJx8KntPUay7NS6yp+KdxdVtiE5SpHKtbp2ZQyA9g== + dependencies: + repeat-string "^1.5.4" + +detect-node@^2.0.4: + version "2.1.0" + resolved "https://registry.npmjs.org/detect-node/-/detect-node-2.1.0.tgz" + integrity sha512-T0NIuQpnTvFDATNuHN5roPwSBG83rFsuO+MXXH9/3N1eFbn4wcPjttvjMLEPWJ0RGUYgQE7cGgS3tNxbqCGM7g== + +detect-port-alt@^1.1.6: + version "1.1.6" + resolved "https://registry.npmjs.org/detect-port-alt/-/detect-port-alt-1.1.6.tgz" + integrity sha512-5tQykt+LqfJFBEYaDITx7S7cR7mJ/zQmLXZ2qt5w04ainYZw6tBf9dBunMjVeVOdYVRUzUOE4HkY5J7+uttb5Q== + dependencies: + address "^1.0.1" + debug "^2.6.0" + +detect-port@^1.3.0: + version "1.5.1" + resolved "https://registry.npmjs.org/detect-port/-/detect-port-1.5.1.tgz" + integrity sha512-aBzdj76lueB6uUst5iAs7+0H/oOjqI5D16XUWxlWMIMROhcM0rfsNVk93zTngq1dDNpoXRr++Sus7ETAExppAQ== + dependencies: + address "^1.0.1" + debug "4" + +didyoumean@^1.2.2: + version "1.2.2" + resolved "https://registry.npmjs.org/didyoumean/-/didyoumean-1.2.2.tgz" + integrity sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw== + +dir-glob@^3.0.1: + version "3.0.1" + resolved "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz" + integrity sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA== + dependencies: + path-type "^4.0.0" + +dlv@^1.1.3: + version "1.1.3" + resolved "https://registry.npmjs.org/dlv/-/dlv-1.1.3.tgz" + integrity sha512-+HlytyjlPKnIG8XuRG8WvmBP8xs8P71y+SKKS6ZXWoEgLuePxtDoUEiH7WkdePWrQ5JBpE6aoVqfZfJUQkjXwA== + +dns-equal@^1.0.0: + version "1.0.0" + resolved "https://registry.npmjs.org/dns-equal/-/dns-equal-1.0.0.tgz" + integrity sha512-z+paD6YUQsk+AbGCEM4PrOXSss5gd66QfcVBFTKR/HpFL9jCqikS94HYwKww6fQyO7IxrIIyUu+g0Ka9tUS2Cg== + +dns-packet@^5.2.2: + version "5.5.0" + resolved "https://registry.npmjs.org/dns-packet/-/dns-packet-5.5.0.tgz" + integrity sha512-USawdAUzRkV6xrqTjiAEp6M9YagZEzWcSUaZTcIFAiyQWW1SoI6KyId8y2+/71wbgHKQAKd+iupLv4YvEwYWvA== + dependencies: + "@leichtgewicht/ip-codec" "^2.0.1" + +docusaurus-plugin-image-zoom@^0.1.1: + version "0.1.1" + resolved "https://registry.npmjs.org/docusaurus-plugin-image-zoom/-/docusaurus-plugin-image-zoom-0.1.1.tgz" + integrity sha512-cJXo5TKh9OR1gE4B5iS5ovLWYYDFwatqRm00iXFPOaShZG99l5tgkDKgbQPAwSL9wg4I+wz3aMwkOtDhMIpKDQ== + dependencies: + medium-zoom "^1.0.6" + +dom-converter@^0.2.0: + version "0.2.0" + resolved "https://registry.npmjs.org/dom-converter/-/dom-converter-0.2.0.tgz" + integrity sha512-gd3ypIPfOMr9h5jIKq8E3sHOTCjeirnl0WK5ZdS1AW0Odt0b1PaWaHdJ4Qk4klv+YB9aJBS7mESXjFoDQPu6DA== + dependencies: + utila "~0.4" + +dom-serializer@^1.0.1: + version "1.4.1" + resolved "https://registry.npmjs.org/dom-serializer/-/dom-serializer-1.4.1.tgz" + integrity sha512-VHwB3KfrcOOkelEG2ZOfxqLZdfkil8PtJi4P8N2MMXucZq2yLp75ClViUlOVwyoHEDjYU433Aq+5zWP61+RGag== + dependencies: + domelementtype "^2.0.1" + domhandler "^4.2.0" + entities "^2.0.0" + +dom-serializer@^2.0.0: + version "2.0.0" + resolved "https://registry.npmjs.org/dom-serializer/-/dom-serializer-2.0.0.tgz" + integrity sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg== + dependencies: + domelementtype "^2.3.0" + domhandler "^5.0.2" + entities "^4.2.0" + +domelementtype@^2.0.1, domelementtype@^2.2.0, domelementtype@^2.3.0: + version "2.3.0" + resolved "https://registry.npmjs.org/domelementtype/-/domelementtype-2.3.0.tgz" + integrity sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw== + +domhandler@^4.0.0, domhandler@^4.2.0, domhandler@^4.3.1: + version "4.3.1" + resolved "https://registry.npmjs.org/domhandler/-/domhandler-4.3.1.tgz" + integrity sha512-GrwoxYN+uWlzO8uhUXRl0P+kHE4GtVPfYzVLcUxPL7KNdHKj66vvlhiweIHqYYXWlw+T8iLMp42Lm67ghw4WMQ== + dependencies: + domelementtype "^2.2.0" + +domhandler@^5.0.1, domhandler@^5.0.2, domhandler@^5.0.3: + version "5.0.3" + resolved "https://registry.npmjs.org/domhandler/-/domhandler-5.0.3.tgz" + integrity sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w== + dependencies: + domelementtype "^2.3.0" + +domutils@^2.5.2, domutils@^2.8.0: + version "2.8.0" + resolved "https://registry.npmjs.org/domutils/-/domutils-2.8.0.tgz" + integrity sha512-w96Cjofp72M5IIhpjgobBimYEfoPjx1Vx0BSX9P30WBdZW2WIKU0T1Bd0kz2eNZ9ikjKgHbEyKx8BB6H1L3h3A== + dependencies: + dom-serializer "^1.0.1" + domelementtype "^2.2.0" + domhandler "^4.2.0" + +domutils@^3.0.1: + version "3.0.1" + resolved "https://registry.npmjs.org/domutils/-/domutils-3.0.1.tgz" + integrity sha512-z08c1l761iKhDFtfXO04C7kTdPBLi41zwOZl00WS8b5eiaebNpY00HKbztwBq+e3vyqWNwWF3mP9YLUeqIrF+Q== + dependencies: + dom-serializer "^2.0.0" + domelementtype "^2.3.0" + domhandler "^5.0.1" + +dot-case@^3.0.4: + version "3.0.4" + resolved "https://registry.npmjs.org/dot-case/-/dot-case-3.0.4.tgz" + integrity sha512-Kv5nKlh6yRrdrGvxeJ2e5y2eRUpkUosIW4A2AS38zwSz27zu7ufDwQPi5Jhs3XAlGNetl3bmnGhQsMtkKJnj3w== + dependencies: + no-case "^3.0.4" + tslib "^2.0.3" + +dot-prop@^5.2.0: + version "5.3.0" + resolved "https://registry.npmjs.org/dot-prop/-/dot-prop-5.3.0.tgz" + integrity sha512-QM8q3zDe58hqUqjraQOmzZ1LIH9SWQJTlEKCH4kJ2oQvLZk7RbQXvtDM2XEq3fwkV9CCvvH4LA0AV+ogFsBM2Q== + dependencies: + is-obj "^2.0.0" + +dunder-proto@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/dunder-proto/-/dunder-proto-1.0.1.tgz#d7ae667e1dc83482f8b70fd0f6eefc50da30f58a" + integrity sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A== + dependencies: + call-bind-apply-helpers "^1.0.1" + es-errors "^1.3.0" + gopd "^1.2.0" + +duplexer3@^0.1.4: + version "0.1.5" + resolved "https://registry.npmjs.org/duplexer3/-/duplexer3-0.1.5.tgz" + integrity sha512-1A8za6ws41LQgv9HrE/66jyC5yuSjQ3L/KOpFtoBilsAK2iA2wuS5rTt1OCzIvtS2V7nVmedsUU+DGRcjBmOYA== + +duplexer@^0.1.2: + version "0.1.2" + resolved "https://registry.npmjs.org/duplexer/-/duplexer-0.1.2.tgz" + integrity sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg== + +eastasianwidth@^0.2.0: + version "0.2.0" + resolved "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz" + integrity sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA== + +ee-first@1.1.1: + version "1.1.1" + resolved "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz" + integrity sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow== + +electron-to-chromium@^1.5.263: + version "1.5.286" + resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.5.286.tgz#142be1ab5e1cd5044954db0e5898f60a4960384e" + integrity sha512-9tfDXhJ4RKFNerfjdCcZfufu49vg620741MNs26a9+bhLThdB+plgMeou98CAaHu/WATj2iHOOHTp1hWtABj2A== + +electron-to-chromium@^1.5.73: + version "1.5.129" + resolved "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.129.tgz" + integrity sha512-JlXUemX4s0+9f8mLqib/bHH8gOHf5elKS6KeWG3sk3xozb/JTq/RLXIv8OKUWiK4Ah00Wm88EFj5PYkFr4RUPA== + +emoji-regex@^8.0.0: + version "8.0.0" + resolved "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz" + integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A== + +emoji-regex@^9.2.2: + version "9.2.2" + resolved "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz" + integrity sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg== + +emojis-list@^3.0.0: + version "3.0.0" + resolved "https://registry.npmjs.org/emojis-list/-/emojis-list-3.0.0.tgz" + integrity sha512-/kyM18EfinwXZbno9FyUGeFh87KC8HRQBQGildHZbEuRyWFOmv1U10o9BBp8XVZDVNNuQKyIGIu5ZYAAXJ0V2Q== + +emoticon@^3.2.0: + version "3.2.0" + resolved "https://registry.npmjs.org/emoticon/-/emoticon-3.2.0.tgz" + integrity sha512-SNujglcLTTg+lDAcApPNgEdudaqQFiAbJCqzjNxJkvN9vAwCGi0uu8IUVvx+f16h+V44KCY6Y2yboroc9pilHg== + +encodeurl@~1.0.2: + version "1.0.2" + resolved "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz" + integrity sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w== + +encodeurl@~2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-2.0.0.tgz#7b8ea898077d7e409d3ac45474ea38eaf0857a58" + integrity sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg== + +end-of-stream@^1.1.0: + version "1.4.4" + resolved "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz" + integrity sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q== + dependencies: + once "^1.4.0" + +enhanced-resolve@^5.19.0: + version "5.19.0" + resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-5.19.0.tgz#6687446a15e969eaa63c2fa2694510e17ae6d97c" + integrity sha512-phv3E1Xl4tQOShqSte26C7Fl84EwUdZsyOuSSk9qtAGyyQs2s3jJzComh+Abf4g187lUUAvH+H26omrqia2aGg== + dependencies: + graceful-fs "^4.2.4" + tapable "^2.3.0" + +entities@^2.0.0: + version "2.2.0" + resolved "https://registry.npmjs.org/entities/-/entities-2.2.0.tgz" + integrity sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A== + +entities@^4.2.0, entities@^4.4.0: + version "4.4.0" + resolved "https://registry.npmjs.org/entities/-/entities-4.4.0.tgz" + integrity sha512-oYp7156SP8LkeGD0GF85ad1X9Ai79WtRsZ2gxJqtBuzH+98YUV6jkHEKlZkMbcrjJjIVJNIDP/3WL9wQkoPbWA== + +error-ex@^1.3.1: + version "1.3.2" + resolved "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz" + integrity sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g== + dependencies: + is-arrayish "^0.2.1" + +es-define-property@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/es-define-property/-/es-define-property-1.0.1.tgz#983eb2f9a6724e9303f61addf011c72e09e0b0fa" + integrity sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g== + +es-errors@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/es-errors/-/es-errors-1.3.0.tgz#05f75a25dab98e4fb1dcd5e1472c0546d5057c8f" + integrity sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw== + +es-module-lexer@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/es-module-lexer/-/es-module-lexer-2.0.0.tgz#f657cd7a9448dcdda9c070a3cb75e5dc1e85f5b1" + integrity sha512-5POEcUuZybH7IdmGsD8wlf0AI55wMecM9rVBTI/qEAy2c1kTOm3DjFYjrBdI2K3BaJjJYfYFeRtM0t9ssnRuxw== + +es-object-atoms@^1.0.0, es-object-atoms@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/es-object-atoms/-/es-object-atoms-1.1.1.tgz#1c4f2c4837327597ce69d2ca190a7fdd172338c1" + integrity sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA== + dependencies: + es-errors "^1.3.0" + +escalade@^3.2.0: + version "3.2.0" + resolved "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz" + integrity sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA== + +escape-goat@^2.0.0: + version "2.1.1" + resolved "https://registry.npmjs.org/escape-goat/-/escape-goat-2.1.1.tgz" + integrity sha512-8/uIhbG12Csjy2JEW7D9pHbreaVaS/OpN3ycnyvElTdwM5n6GY6W6e2IPemfvGZeUMqZ9A/3GqIZMgKnBhAw/Q== + +escape-html@^1.0.3, escape-html@~1.0.3: + version "1.0.3" + resolved "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz" + integrity sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow== + +escape-string-regexp@^4.0.0: + version "4.0.0" + resolved "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz" + integrity sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA== + +eslint-scope@5.1.1: + version "5.1.1" + resolved "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz" + integrity sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw== + dependencies: + esrecurse "^4.3.0" + estraverse "^4.1.1" + +esprima@^4.0.0: + version "4.0.1" + resolved "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz" + integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== + +esrecurse@^4.3.0: + version "4.3.0" + resolved "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz" + integrity sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag== + dependencies: + estraverse "^5.2.0" + +estraverse@^4.1.1: + version "4.3.0" + resolved "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz" + integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw== + +estraverse@^5.2.0: + version "5.3.0" + resolved "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz" + integrity sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA== + +esutils@^2.0.2: + version "2.0.3" + resolved "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz" + integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== + +eta@^2.0.0: + version "2.0.1" + resolved "https://registry.npmjs.org/eta/-/eta-2.0.1.tgz" + integrity sha512-46E2qDPDm7QA+usjffUWz9KfXsxVZclPOuKsXs4ZWZdI/X1wpDF7AO424pt7fdYohCzWsIkXAhNGXSlwo5naAg== + +etag@~1.8.1: + version "1.8.1" + resolved "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz" + integrity sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg== + +eval@^0.1.8: + version "0.1.8" + resolved "https://registry.npmjs.org/eval/-/eval-0.1.8.tgz" + integrity sha512-EzV94NYKoO09GLXGjXj9JIlXijVck4ONSr5wiCWDvhsvj5jxSrzTmRU/9C1DyB6uToszLs8aifA6NQ7lEQdvFw== + dependencies: + "@types/node" "*" + require-like ">= 0.1.1" + +eventemitter3@^4.0.0: + version "4.0.7" + resolved "https://registry.npmjs.org/eventemitter3/-/eventemitter3-4.0.7.tgz" + integrity sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw== + +events@^3.2.0: + version "3.3.0" + resolved "https://registry.npmjs.org/events/-/events-3.3.0.tgz" + integrity sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q== + +execa@^5.0.0: + version "5.1.1" + resolved "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz" + integrity sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg== + dependencies: + cross-spawn "^7.0.3" + get-stream "^6.0.0" + human-signals "^2.1.0" + is-stream "^2.0.0" + merge-stream "^2.0.0" + npm-run-path "^4.0.1" + onetime "^5.1.2" + signal-exit "^3.0.3" + strip-final-newline "^2.0.0" + +express@^4.17.3: + version "4.21.2" + resolved "https://registry.yarnpkg.com/express/-/express-4.21.2.tgz#cf250e48362174ead6cea4a566abef0162c1ec32" + integrity sha512-28HqgMZAmih1Czt9ny7qr6ek2qddF4FclbMzwhCREB6OFfH+rXAnuNCwo1/wFvrtbgsQDb4kSbX9de9lFbrXnA== + dependencies: + accepts "~1.3.8" + array-flatten "1.1.1" + body-parser "1.20.3" + content-disposition "0.5.4" + content-type "~1.0.4" + cookie "0.7.1" + cookie-signature "1.0.6" + debug "2.6.9" + depd "2.0.0" + encodeurl "~2.0.0" + escape-html "~1.0.3" + etag "~1.8.1" + finalhandler "1.3.1" + fresh "0.5.2" + http-errors "2.0.0" + merge-descriptors "1.0.3" + methods "~1.1.2" + on-finished "2.4.1" + parseurl "~1.3.3" + path-to-regexp "0.1.12" + proxy-addr "~2.0.7" + qs "6.13.0" + range-parser "~1.2.1" + safe-buffer "5.2.1" + send "0.19.0" + serve-static "1.16.2" + setprototypeof "1.2.0" + statuses "2.0.1" + type-is "~1.6.18" + utils-merge "1.0.1" + vary "~1.1.2" + +extend-shallow@^2.0.1: + version "2.0.1" + resolved "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz" + integrity sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug== + dependencies: + is-extendable "^0.1.0" + +extend@^3.0.0: + version "3.0.2" + resolved "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz" + integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g== + +fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3: + version "3.1.3" + resolved "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz" + integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== + +fast-glob@^3.2.11, fast-glob@^3.2.12, fast-glob@^3.2.9: + version "3.2.12" + resolved "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.12.tgz" + integrity sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w== + dependencies: + "@nodelib/fs.stat" "^2.0.2" + "@nodelib/fs.walk" "^1.2.3" + glob-parent "^5.1.2" + merge2 "^1.3.0" + micromatch "^4.0.4" + +fast-json-stable-stringify@^2.0.0: + version "2.1.0" + resolved "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz" + integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw== + +fast-uri@^3.0.1: + version "3.0.6" + resolved "https://registry.yarnpkg.com/fast-uri/-/fast-uri-3.0.6.tgz#88f130b77cfaea2378d56bf970dea21257a68748" + integrity sha512-Atfo14OibSv5wAp4VWNsFYE1AchQRTv9cBGWET4pZWHzYshFSS9NQI6I57rdKn9croWVMbYFbLhJ+yJvmZIIHw== + +fast-url-parser@1.1.3: + version "1.1.3" + resolved "https://registry.npmjs.org/fast-url-parser/-/fast-url-parser-1.1.3.tgz" + integrity sha512-5jOCVXADYNuRkKFzNJ0dCCewsZiYo0dz8QNYljkOpFC6r2U4OBmKtvm/Tsuh4w1YYdDqDb31a8TVhBJ2OJKdqQ== + dependencies: + punycode "^1.3.2" + +fastq@^1.6.0: + version "1.15.0" + resolved "https://registry.npmjs.org/fastq/-/fastq-1.15.0.tgz" + integrity sha512-wBrocU2LCXXa+lWBt8RoIRD89Fi8OdABODa/kEnyeyjS5aZO5/GNvI5sEINADqP/h8M29UHTHUb53sUu5Ihqdw== + dependencies: + reusify "^1.0.4" + +faye-websocket@^0.11.3: + version "0.11.4" + resolved "https://registry.npmjs.org/faye-websocket/-/faye-websocket-0.11.4.tgz" + integrity sha512-CzbClwlXAuiRQAlUyfqPgvPoNKTckTPGfwZV4ZdAhVcP2lh9KUxJg2b5GkE7XbjKQ3YJnQ9z6D9ntLAlB+tP8g== + dependencies: + websocket-driver ">=0.5.1" + +fbemitter@^3.0.0: + version "3.0.0" + resolved "https://registry.npmjs.org/fbemitter/-/fbemitter-3.0.0.tgz" + integrity sha512-KWKaceCwKQU0+HPoop6gn4eOHk50bBv/VxjJtGMfwmJt3D29JpN4H4eisCtIPA+a8GVBam+ldMMpMjJUvpDyHw== + dependencies: + fbjs "^3.0.0" + +fbjs-css-vars@^1.0.0: + version "1.0.2" + resolved "https://registry.npmjs.org/fbjs-css-vars/-/fbjs-css-vars-1.0.2.tgz" + integrity sha512-b2XGFAFdWZWg0phtAWLHCk836A1Xann+I+Dgd3Gk64MHKZO44FfoD1KxyvbSh0qZsIoXQGGlVztIY+oitJPpRQ== + +fbjs@^3.0.0, fbjs@^3.0.1: + version "3.0.4" + resolved "https://registry.npmjs.org/fbjs/-/fbjs-3.0.4.tgz" + integrity sha512-ucV0tDODnGV3JCnnkmoszb5lf4bNpzjv80K41wd4k798Etq+UYD0y0TIfalLjZoKgjive6/adkRnszwapiDgBQ== + dependencies: + cross-fetch "^3.1.5" + fbjs-css-vars "^1.0.0" + loose-envify "^1.0.0" + object-assign "^4.1.0" + promise "^7.1.1" + setimmediate "^1.0.5" + ua-parser-js "^0.7.30" + +feed@^4.2.2: + version "4.2.2" + resolved "https://registry.npmjs.org/feed/-/feed-4.2.2.tgz" + integrity sha512-u5/sxGfiMfZNtJ3OvQpXcvotFpYkL0n9u9mM2vkui2nGo8b4wvDkJ8gAkYqbA8QpGyFCv3RK0Z+Iv+9veCS9bQ== + dependencies: + xml-js "^1.6.11" + +file-loader@^6.2.0: + version "6.2.0" + resolved "https://registry.npmjs.org/file-loader/-/file-loader-6.2.0.tgz" + integrity sha512-qo3glqyTa61Ytg4u73GultjHGjdRyig3tG6lPtyX/jOEJvHif9uB0/OCI2Kif6ctF3caQTW2G5gym21oAsI4pw== + dependencies: + loader-utils "^2.0.0" + schema-utils "^3.0.0" + +filesize@^8.0.6: + version "8.0.7" + resolved "https://registry.npmjs.org/filesize/-/filesize-8.0.7.tgz" + integrity sha512-pjmC+bkIF8XI7fWaH8KxHcZL3DPybs1roSKP4rKDvy20tAWwIObE4+JIseG2byfGKhud5ZnM4YSGKBz7Sh0ndQ== + +fill-range@^7.1.1: + version "7.1.1" + resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.1.1.tgz#44265d3cac07e3ea7dc247516380643754a05292" + integrity sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg== + dependencies: + to-regex-range "^5.0.1" + +finalhandler@1.3.1: + version "1.3.1" + resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.3.1.tgz#0c575f1d1d324ddd1da35ad7ece3df7d19088019" + integrity sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ== + dependencies: + debug "2.6.9" + encodeurl "~2.0.0" + escape-html "~1.0.3" + on-finished "2.4.1" + parseurl "~1.3.3" + statuses "2.0.1" + unpipe "~1.0.0" + +find-cache-dir@^3.3.1: + version "3.3.2" + resolved "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-3.3.2.tgz" + integrity sha512-wXZV5emFEjrridIgED11OoUKLxiYjAcqot/NJdAkOhlJ+vGzwhOAfcG5OX1jP+S0PcjEn8bdMJv+g2jwQ3Onig== + dependencies: + commondir "^1.0.1" + make-dir "^3.0.2" + pkg-dir "^4.1.0" + +find-up@^3.0.0: + version "3.0.0" + resolved "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz" + integrity sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg== + dependencies: + locate-path "^3.0.0" + +find-up@^4.0.0: + version "4.1.0" + resolved "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz" + integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw== + dependencies: + locate-path "^5.0.0" + path-exists "^4.0.0" + +find-up@^5.0.0: + version "5.0.0" + resolved "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz" + integrity sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng== + dependencies: + locate-path "^6.0.0" + path-exists "^4.0.0" + +flux@^4.0.1: + version "4.0.4" + resolved "https://registry.npmjs.org/flux/-/flux-4.0.4.tgz" + integrity sha512-NCj3XlayA2UsapRpM7va6wU1+9rE5FIL7qoMcmxWHRzbp0yujihMBm9BBHZ1MDIk5h5o2Bl6eGiCe8rYELAmYw== + dependencies: + fbemitter "^3.0.0" + fbjs "^3.0.1" + +follow-redirects@^1.0.0, follow-redirects@^1.14.7: + version "1.16.0" + resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.16.0.tgz#28474a159d3b9d11ef62050a14ed60e4df6d61bc" + integrity sha512-y5rN/uOsadFT/JfYwhxRS5R7Qce+g3zG97+JrtFZlC9klX/W5hD7iiLzScI4nZqUS7DNUdhPgw4xI8W2LuXlUw== + +fork-ts-checker-webpack-plugin@^6.5.0: + version "6.5.3" + resolved "https://registry.npmjs.org/fork-ts-checker-webpack-plugin/-/fork-ts-checker-webpack-plugin-6.5.3.tgz" + integrity sha512-SbH/l9ikmMWycd5puHJKTkZJKddF4iRLyW3DeZ08HTI7NGyLS38MXd/KGgeWumQO7YNQbW2u/NtPT2YowbPaGQ== + dependencies: + "@babel/code-frame" "^7.8.3" + "@types/json-schema" "^7.0.5" + chalk "^4.1.0" + chokidar "^3.4.2" + cosmiconfig "^6.0.0" + deepmerge "^4.2.2" + fs-extra "^9.0.0" + glob "^7.1.6" + memfs "^3.1.2" + minimatch "^3.0.4" + schema-utils "2.7.0" + semver "^7.3.2" + tapable "^1.0.0" + +forwarded@0.2.0: + version "0.2.0" + resolved "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz" + integrity sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow== + +fraction.js@^4.2.0: + version "4.2.0" + resolved "https://registry.npmjs.org/fraction.js/-/fraction.js-4.2.0.tgz" + integrity sha512-MhLuK+2gUcnZe8ZHlaaINnQLl0xRIGRfcGk2yl8xoQAfHrSsL3rYu6FCmBdkdbhc9EPlwyGHewaRsvwRMJtAlA== + +fresh@0.5.2: + version "0.5.2" + resolved "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz" + integrity sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q== + +fs-extra@^10.1.0: + version "10.1.0" + resolved "https://registry.npmjs.org/fs-extra/-/fs-extra-10.1.0.tgz" + integrity sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ== + dependencies: + graceful-fs "^4.2.0" + jsonfile "^6.0.1" + universalify "^2.0.0" + +fs-extra@^9.0.0: + version "9.1.0" + resolved "https://registry.npmjs.org/fs-extra/-/fs-extra-9.1.0.tgz" + integrity sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ== + dependencies: + at-least-node "^1.0.0" + graceful-fs "^4.2.0" + jsonfile "^6.0.1" + universalify "^2.0.0" + +fs-monkey@^1.0.3: + version "1.0.3" + resolved "https://registry.npmjs.org/fs-monkey/-/fs-monkey-1.0.3.tgz" + integrity sha512-cybjIfiiE+pTWicSCLFHSrXZ6EilF30oh91FDP9S2B051prEa7QWfrVTQm10/dDpswBDXZugPa1Ogu8Yh+HV0Q== + +fs.realpath@^1.0.0: + version "1.0.0" + resolved "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz" + integrity sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw== + +fsevents@~2.3.2: + version "2.3.3" + resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.3.tgz#cac6407785d03675a2a5e1a5305c697b347d90d6" + integrity sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw== + +function-bind@^1.1.1: + version "1.1.1" + resolved "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz" + integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== + +function-bind@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.2.tgz#2c02d864d97f3ea6c8830c464cbd11ab6eab7a1c" + integrity sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA== + +gensync@^1.0.0-beta.1, gensync@^1.0.0-beta.2: + version "1.0.0-beta.2" + resolved "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz" + integrity sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg== + +get-intrinsic@^1.0.2, get-intrinsic@^1.1.1: + version "1.2.0" + resolved "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.0.tgz" + integrity sha512-L049y6nFOuom5wGyRc3/gdTLO94dySVKRACj1RmJZBQXlbTMhtNIgkWkUHq+jYmZvKf14EW1EoJnnjbmoHij0Q== + dependencies: + function-bind "^1.1.1" + has "^1.0.3" + has-symbols "^1.0.3" + +get-intrinsic@^1.2.5, get-intrinsic@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.3.0.tgz#743f0e3b6964a93a5491ed1bffaae054d7f98d01" + integrity sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ== + dependencies: + call-bind-apply-helpers "^1.0.2" + es-define-property "^1.0.1" + es-errors "^1.3.0" + es-object-atoms "^1.1.1" + function-bind "^1.1.2" + get-proto "^1.0.1" + gopd "^1.2.0" + has-symbols "^1.1.0" + hasown "^2.0.2" + math-intrinsics "^1.1.0" + +get-own-enumerable-property-symbols@^3.0.0: + version "3.0.2" + resolved "https://registry.npmjs.org/get-own-enumerable-property-symbols/-/get-own-enumerable-property-symbols-3.0.2.tgz" + integrity sha512-I0UBV/XOz1XkIJHEUDMZAbzCThU/H8DxmSfmdGcKPnVhu2VfFqr34jr9777IyaTYvxjedWhqVIilEDsCdP5G6g== + +get-proto@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/get-proto/-/get-proto-1.0.1.tgz#150b3f2743869ef3e851ec0c49d15b1d14d00ee1" + integrity sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g== + dependencies: + dunder-proto "^1.0.1" + es-object-atoms "^1.0.0" + +get-stream@^4.1.0: + version "4.1.0" + resolved "https://registry.npmjs.org/get-stream/-/get-stream-4.1.0.tgz" + integrity sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w== + dependencies: + pump "^3.0.0" + +get-stream@^5.1.0: + version "5.2.0" + resolved "https://registry.npmjs.org/get-stream/-/get-stream-5.2.0.tgz" + integrity sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA== + dependencies: + pump "^3.0.0" + +get-stream@^6.0.0: + version "6.0.1" + resolved "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz" + integrity sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg== + +github-slugger@^1.4.0: + version "1.5.0" + resolved "https://registry.npmjs.org/github-slugger/-/github-slugger-1.5.0.tgz" + integrity sha512-wIh+gKBI9Nshz2o46B0B3f5k/W+WI9ZAv6y5Dn5WJ5SK1t0TnDimB4WE5rmTD05ZAIn8HALCZVmCsvj0w0v0lw== + +glob-parent@^5.1.2, glob-parent@~5.1.2: + version "5.1.2" + resolved "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz" + integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow== + dependencies: + is-glob "^4.0.1" + +glob-parent@^6.0.1, glob-parent@^6.0.2: + version "6.0.2" + resolved "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz" + integrity sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A== + dependencies: + is-glob "^4.0.3" + +glob-to-regexp@^0.4.1: + version "0.4.1" + resolved "https://registry.npmjs.org/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz" + integrity sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw== + +glob@7.1.6: + version "7.1.6" + resolved "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz" + integrity sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA== + dependencies: + fs.realpath "^1.0.0" + inflight "^1.0.4" + inherits "2" + minimatch "^3.0.4" + once "^1.3.0" + path-is-absolute "^1.0.0" + +glob@^7.0.0, glob@^7.1.3, glob@^7.1.6: + version "7.2.3" + resolved "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz" + integrity sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q== + dependencies: + fs.realpath "^1.0.0" + inflight "^1.0.4" + inherits "2" + minimatch "^3.1.1" + once "^1.3.0" + path-is-absolute "^1.0.0" + +global-dirs@^3.0.0: + version "3.0.1" + resolved "https://registry.npmjs.org/global-dirs/-/global-dirs-3.0.1.tgz" + integrity sha512-NBcGGFbBA9s1VzD41QXDG+3++t9Mn5t1FpLdhESY6oKY4gYTFpX4wO3sqGUa0Srjtbfj3szX0RnemmrVRUdULA== + dependencies: + ini "2.0.0" + +global-modules@^2.0.0: + version "2.0.0" + resolved "https://registry.npmjs.org/global-modules/-/global-modules-2.0.0.tgz" + integrity sha512-NGbfmJBp9x8IxyJSd1P+otYK8vonoJactOogrVfFRIAEY1ukil8RSKDz2Yo7wh1oihl51l/r6W4epkeKJHqL8A== + dependencies: + global-prefix "^3.0.0" + +global-prefix@^3.0.0: + version "3.0.0" + resolved "https://registry.npmjs.org/global-prefix/-/global-prefix-3.0.0.tgz" + integrity sha512-awConJSVCHVGND6x3tmMaKcQvwXLhjdkmomy2W+Goaui8YPgYgXJZewhg3fWC+DlfqqQuWg8AwqjGTD2nAPVWg== + dependencies: + ini "^1.3.5" + kind-of "^6.0.2" + which "^1.3.1" + +globals@^11.1.0: + version "11.12.0" + resolved "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz" + integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== + +globby@^11.0.1, globby@^11.0.4, globby@^11.1.0: + version "11.1.0" + resolved "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz" + integrity sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g== + dependencies: + array-union "^2.1.0" + dir-glob "^3.0.1" + fast-glob "^3.2.9" + ignore "^5.2.0" + merge2 "^1.4.1" + slash "^3.0.0" + +globby@^13.1.1: + version "13.1.3" + resolved "https://registry.npmjs.org/globby/-/globby-13.1.3.tgz" + integrity sha512-8krCNHXvlCgHDpegPzleMq07yMYTO2sXKASmZmquEYWEmCx6J5UTRbp5RwMJkTJGtcQ44YpiUYUiN0b9mzy8Bw== + dependencies: + dir-glob "^3.0.1" + fast-glob "^3.2.11" + ignore "^5.2.0" + merge2 "^1.4.1" + slash "^4.0.0" + +gopd@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/gopd/-/gopd-1.2.0.tgz#89f56b8217bdbc8802bd299df6d7f1081d7e51a1" + integrity sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg== + +got@^9.6.0: + version "9.6.0" + resolved "https://registry.npmjs.org/got/-/got-9.6.0.tgz" + integrity sha512-R7eWptXuGYxwijs0eV+v3o6+XH1IqVK8dJOEecQfTmkncw9AV4dcw/Dhxi8MdlqPthxxpZyizMzyg8RTmEsG+Q== + dependencies: + "@sindresorhus/is" "^0.14.0" + "@szmarczak/http-timer" "^1.1.2" + cacheable-request "^6.0.0" + decompress-response "^3.3.0" + duplexer3 "^0.1.4" + get-stream "^4.1.0" + lowercase-keys "^1.0.1" + mimic-response "^1.0.1" + p-cancelable "^1.0.0" + to-readable-stream "^1.0.0" + url-parse-lax "^3.0.0" + +graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.11, graceful-fs@^4.2.4, graceful-fs@^4.2.6, graceful-fs@^4.2.9: + version "4.2.11" + resolved "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz" + integrity sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ== + +gray-matter@^4.0.3: + version "4.0.3" + resolved "https://registry.npmjs.org/gray-matter/-/gray-matter-4.0.3.tgz" + integrity sha512-5v6yZd4JK3eMI3FqqCouswVqwugaA9r4dNZB1wwcmrD02QkV5H0y7XBQW8QwQqEaZY1pM9aqORSORhJRdNK44Q== + dependencies: + js-yaml "^3.13.1" + kind-of "^6.0.2" + section-matter "^1.0.0" + strip-bom-string "^1.0.0" + +gzip-size@^6.0.0: + version "6.0.0" + resolved "https://registry.npmjs.org/gzip-size/-/gzip-size-6.0.0.tgz" + integrity sha512-ax7ZYomf6jqPTQ4+XCpUGyXKHk5WweS+e05MBO4/y3WJ5RkmPXNKvX+bx1behVILVwr6JSQvZAku021CHPXG3Q== + dependencies: + duplexer "^0.1.2" + +handle-thing@^2.0.0: + version "2.0.1" + resolved "https://registry.npmjs.org/handle-thing/-/handle-thing-2.0.1.tgz" + integrity sha512-9Qn4yBxelxoh2Ow62nP+Ka/kMnOXRi8BXnRaUwezLNhqelnN49xKz4F/dPP8OYLxLxq6JDtZb2i9XznUQbNPTg== + +has-flag@^3.0.0: + version "3.0.0" + resolved "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz" + integrity sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw== + +has-flag@^4.0.0: + version "4.0.0" + resolved "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz" + integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ== + +has-property-descriptors@^1.0.0: + version "1.0.0" + resolved "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz" + integrity sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ== + dependencies: + get-intrinsic "^1.1.1" + +has-symbols@^1.0.3: + version "1.0.3" + resolved "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz" + integrity sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A== + +has-symbols@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.1.0.tgz#fc9c6a783a084951d0b971fe1018de813707a338" + integrity sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ== + +has-yarn@^2.1.0: + version "2.1.0" + resolved "https://registry.npmjs.org/has-yarn/-/has-yarn-2.1.0.tgz" + integrity sha512-UqBRqi4ju7T+TqGNdqAO0PaSVGsDGJUBQvk9eUWNGRY1CFGDzYhLWoM7JQEemnlvVcv/YEmc2wNW8BC24EnUsw== + +has@^1.0.3: + version "1.0.3" + resolved "https://registry.npmjs.org/has/-/has-1.0.3.tgz" + integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== + dependencies: + function-bind "^1.1.1" + +hasown@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/hasown/-/hasown-2.0.2.tgz#003eaf91be7adc372e84ec59dc37252cedb80003" + integrity sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ== + dependencies: + function-bind "^1.1.2" + +hast-to-hyperscript@^9.0.0: + version "9.0.1" + resolved "https://registry.npmjs.org/hast-to-hyperscript/-/hast-to-hyperscript-9.0.1.tgz" + integrity sha512-zQgLKqF+O2F72S1aa4y2ivxzSlko3MAvxkwG8ehGmNiqd98BIN3JM1rAJPmplEyLmGLO2QZYJtIneOSZ2YbJuA== + dependencies: + "@types/unist" "^2.0.3" + comma-separated-tokens "^1.0.0" + property-information "^5.3.0" + space-separated-tokens "^1.0.0" + style-to-object "^0.3.0" + unist-util-is "^4.0.0" + web-namespaces "^1.0.0" + +hast-util-from-parse5@^6.0.0: + version "6.0.1" + resolved "https://registry.npmjs.org/hast-util-from-parse5/-/hast-util-from-parse5-6.0.1.tgz" + integrity sha512-jeJUWiN5pSxW12Rh01smtVkZgZr33wBokLzKLwinYOUfSzm1Nl/c3GUGebDyOKjdsRgMvoVbV0VpAcpjF4NrJA== + dependencies: + "@types/parse5" "^5.0.0" + hastscript "^6.0.0" + property-information "^5.0.0" + vfile "^4.0.0" + vfile-location "^3.2.0" + web-namespaces "^1.0.0" + +hast-util-parse-selector@^2.0.0: + version "2.2.5" + resolved "https://registry.npmjs.org/hast-util-parse-selector/-/hast-util-parse-selector-2.2.5.tgz" + integrity sha512-7j6mrk/qqkSehsM92wQjdIgWM2/BW61u/53G6xmC8i1OmEdKLHbk419QKQUjz6LglWsfqoiHmyMRkP1BGjecNQ== + +hast-util-raw@6.0.1: + version "6.0.1" + resolved "https://registry.npmjs.org/hast-util-raw/-/hast-util-raw-6.0.1.tgz" + integrity sha512-ZMuiYA+UF7BXBtsTBNcLBF5HzXzkyE6MLzJnL605LKE8GJylNjGc4jjxazAHUtcwT5/CEt6afRKViYB4X66dig== + dependencies: + "@types/hast" "^2.0.0" + hast-util-from-parse5 "^6.0.0" + hast-util-to-parse5 "^6.0.0" + html-void-elements "^1.0.0" + parse5 "^6.0.0" + unist-util-position "^3.0.0" + vfile "^4.0.0" + web-namespaces "^1.0.0" + xtend "^4.0.0" + zwitch "^1.0.0" + +hast-util-to-parse5@^6.0.0: + version "6.0.0" + resolved "https://registry.npmjs.org/hast-util-to-parse5/-/hast-util-to-parse5-6.0.0.tgz" + integrity sha512-Lu5m6Lgm/fWuz8eWnrKezHtVY83JeRGaNQ2kn9aJgqaxvVkFCZQBEhgodZUDUvoodgyROHDb3r5IxAEdl6suJQ== + dependencies: + hast-to-hyperscript "^9.0.0" + property-information "^5.0.0" + web-namespaces "^1.0.0" + xtend "^4.0.0" + zwitch "^1.0.0" + +hastscript@^6.0.0: + version "6.0.0" + resolved "https://registry.npmjs.org/hastscript/-/hastscript-6.0.0.tgz" + integrity sha512-nDM6bvd7lIqDUiYEiu5Sl/+6ReP0BMk/2f4U/Rooccxkj0P5nm+acM5PrGJ/t5I8qPGiqZSE6hVAwZEdZIvP4w== + dependencies: + "@types/hast" "^2.0.0" + comma-separated-tokens "^1.0.0" + hast-util-parse-selector "^2.0.0" + property-information "^5.0.0" + space-separated-tokens "^1.0.0" + +he@^1.2.0: + version "1.2.0" + resolved "https://registry.npmjs.org/he/-/he-1.2.0.tgz" + integrity sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw== + +history@^4.9.0: + version "4.10.1" + resolved "https://registry.npmjs.org/history/-/history-4.10.1.tgz" + integrity sha512-36nwAD620w12kuzPAsyINPWJqlNbij+hpK1k9XRloDtym8mxzGYl2c17LnV6IAGB2Dmg4tEa7G7DlawS0+qjew== + dependencies: + "@babel/runtime" "^7.1.2" + loose-envify "^1.2.0" + resolve-pathname "^3.0.0" + tiny-invariant "^1.0.2" + tiny-warning "^1.0.0" + value-equal "^1.0.1" + +hoist-non-react-statics@^3.0.0, hoist-non-react-statics@^3.1.0: + version "3.3.2" + resolved "https://registry.npmjs.org/hoist-non-react-statics/-/hoist-non-react-statics-3.3.2.tgz" + integrity sha512-/gGivxi8JPKWNm/W0jSmzcMPpfpPLc3dY/6GxhX2hQ9iGj3aDfklV4ET7NjKpSinLpJ5vafa9iiGIEZg10SfBw== + dependencies: + react-is "^16.7.0" + +hpack.js@^2.1.6: + version "2.1.6" + resolved "https://registry.npmjs.org/hpack.js/-/hpack.js-2.1.6.tgz" + integrity sha512-zJxVehUdMGIKsRaNt7apO2Gqp0BdqW5yaiGHXXmbpvxgBYVZnAql+BJb4RO5ad2MgpbZKn5G6nMnegrH1FcNYQ== + dependencies: + inherits "^2.0.1" + obuf "^1.0.0" + readable-stream "^2.0.1" + wbuf "^1.1.0" + +html-entities@^2.3.2: + version "2.3.3" + resolved "https://registry.npmjs.org/html-entities/-/html-entities-2.3.3.tgz" + integrity sha512-DV5Ln36z34NNTDgnz0EWGBLZENelNAtkiFA4kyNOG2tDI6Mz1uSWiq1wAKdyjnJwyDiDO7Fa2SO1CTxPXL8VxA== + +html-minifier-terser@^6.0.2, html-minifier-terser@^6.1.0: + version "6.1.0" + resolved "https://registry.npmjs.org/html-minifier-terser/-/html-minifier-terser-6.1.0.tgz" + integrity sha512-YXxSlJBZTP7RS3tWnQw74ooKa6L9b9i9QYXY21eUEvhZ3u9XLfv6OnFsQq6RxkhHygsaUMvYsZRV5rU/OVNZxw== + dependencies: + camel-case "^4.1.2" + clean-css "^5.2.2" + commander "^8.3.0" + he "^1.2.0" + param-case "^3.0.4" + relateurl "^0.2.7" + terser "^5.10.0" + +html-tags@^3.2.0: + version "3.2.0" + resolved "https://registry.npmjs.org/html-tags/-/html-tags-3.2.0.tgz" + integrity sha512-vy7ClnArOZwCnqZgvv+ddgHgJiAFXe3Ge9ML5/mBctVJoUoYPCdxVucOywjDARn6CVoh3dRSFdPHy2sX80L0Wg== + +html-void-elements@^1.0.0: + version "1.0.5" + resolved "https://registry.npmjs.org/html-void-elements/-/html-void-elements-1.0.5.tgz" + integrity sha512-uE/TxKuyNIcx44cIWnjr/rfIATDH7ZaOMmstu0CwhFG1Dunhlp4OC6/NMbhiwoq5BpW0ubi303qnEk/PZj614w== + +html-webpack-plugin@^5.5.0: + version "5.5.0" + resolved "https://registry.npmjs.org/html-webpack-plugin/-/html-webpack-plugin-5.5.0.tgz" + integrity sha512-sy88PC2cRTVxvETRgUHFrL4No3UxvcH8G1NepGhqaTT+GXN2kTamqasot0inS5hXeg1cMbFDt27zzo9p35lZVw== + dependencies: + "@types/html-minifier-terser" "^6.0.0" + html-minifier-terser "^6.0.2" + lodash "^4.17.21" + pretty-error "^4.0.0" + tapable "^2.0.0" + +htmlparser2@^6.1.0: + version "6.1.0" + resolved "https://registry.npmjs.org/htmlparser2/-/htmlparser2-6.1.0.tgz" + integrity sha512-gyyPk6rgonLFEDGoeRgQNaEUvdJ4ktTmmUh/h2t7s+M8oPpIPxgNACWa+6ESR57kXstwqPiCut0V8NRpcwgU7A== + dependencies: + domelementtype "^2.0.1" + domhandler "^4.0.0" + domutils "^2.5.2" + entities "^2.0.0" + +htmlparser2@^8.0.1: + version "8.0.2" + resolved "https://registry.npmjs.org/htmlparser2/-/htmlparser2-8.0.2.tgz" + integrity sha512-GYdjWKDkbRLkZ5geuHs5NY1puJ+PXwP7+fHPRz06Eirsb9ugf6d8kkXav6ADhcODhFFPMIXyxkxSuMf3D6NCFA== + dependencies: + domelementtype "^2.3.0" + domhandler "^5.0.3" + domutils "^3.0.1" + entities "^4.4.0" + +http-cache-semantics@^4.0.0: + version "4.1.1" + resolved "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-4.1.1.tgz" + integrity sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ== + +http-deceiver@^1.2.7: + version "1.2.7" + resolved "https://registry.npmjs.org/http-deceiver/-/http-deceiver-1.2.7.tgz" + integrity sha512-LmpOGxTfbpgtGVxJrj5k7asXHCgNZp5nLfp+hWc8QQRqtb7fUy6kRY3BO1h9ddF6yIPYUARgxGOwB42DnxIaNw== + +http-errors@2.0.0: + version "2.0.0" + resolved "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz" + integrity sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ== + dependencies: + depd "2.0.0" + inherits "2.0.4" + setprototypeof "1.2.0" + statuses "2.0.1" + toidentifier "1.0.1" + +http-errors@~1.6.2: + version "1.6.3" + resolved "https://registry.npmjs.org/http-errors/-/http-errors-1.6.3.tgz" + integrity sha512-lks+lVC8dgGyh97jxvxeYTWQFvh4uw4yC12gVl63Cg30sjPX4wuGcdkICVXDAESr6OJGjqGA8Iz5mkeN6zlD7A== + dependencies: + depd "~1.1.2" + inherits "2.0.3" + setprototypeof "1.1.0" + statuses ">= 1.4.0 < 2" + +http-parser-js@>=0.5.1: + version "0.5.8" + resolved "https://registry.npmjs.org/http-parser-js/-/http-parser-js-0.5.8.tgz" + integrity sha512-SGeBX54F94Wgu5RH3X5jsDtf4eHyRogWX1XGT3b4HuW3tQPM4AaBzoUji/4AAJNXCEOWZ5O0DgZmJw1947gD5Q== + +http-proxy-middleware@^2.0.3: + version "2.0.9" + resolved "https://registry.yarnpkg.com/http-proxy-middleware/-/http-proxy-middleware-2.0.9.tgz#e9e63d68afaa4eee3d147f39149ab84c0c2815ef" + integrity sha512-c1IyJYLYppU574+YI7R4QyX2ystMtVXZwIdzazUIPIJsHuWNd+mho2j+bKoHftndicGj9yh+xjd+l0yj7VeT1Q== + dependencies: + "@types/http-proxy" "^1.17.8" + http-proxy "^1.18.1" + is-glob "^4.0.1" + is-plain-obj "^3.0.0" + micromatch "^4.0.2" + +http-proxy@^1.18.1: + version "1.18.1" + resolved "https://registry.npmjs.org/http-proxy/-/http-proxy-1.18.1.tgz" + integrity sha512-7mz/721AbnJwIVbnaSv1Cz3Am0ZLT/UBwkC92VlxhXv/k/BBQfM2fXElQNC27BVGr0uwUpplYPQM9LnaBMR5NQ== + dependencies: + eventemitter3 "^4.0.0" + follow-redirects "^1.0.0" + requires-port "^1.0.0" + +human-signals@^2.1.0: + version "2.1.0" + resolved "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz" + integrity sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw== + +iconv-lite@0.4.24: + version "0.4.24" + resolved "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz" + integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA== + dependencies: + safer-buffer ">= 2.1.2 < 3" + +icss-utils@^5.0.0, icss-utils@^5.1.0: + version "5.1.0" + resolved "https://registry.npmjs.org/icss-utils/-/icss-utils-5.1.0.tgz" + integrity sha512-soFhflCVWLfRNOPU3iv5Z9VUdT44xFRbzjLsEzSr5AQmgqPMTHdU3PMT1Cf1ssx8fLNJDA1juftYl+PUcv3MqA== + +ignore@^5.2.0: + version "5.2.4" + resolved "https://registry.npmjs.org/ignore/-/ignore-5.2.4.tgz" + integrity sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ== + +image-size@^1.0.1: + version "1.0.2" + resolved "https://registry.npmjs.org/image-size/-/image-size-1.0.2.tgz" + integrity sha512-xfOoWjceHntRb3qFCrh5ZFORYH8XCdYpASltMhZ/Q0KZiOwjdE/Yl2QCiWdwD+lygV5bMCvauzgu5PxBX/Yerg== + dependencies: + queue "6.0.2" + +immer@^9.0.7: + version "9.0.21" + resolved "https://registry.npmjs.org/immer/-/immer-9.0.21.tgz" + integrity sha512-bc4NBHqOqSfRW7POMkHd51LvClaeMXpm8dx0e8oE2GORbq5aRK7Bxl4FyzVLdGtLmvLKL7BTDBG5ACQm4HWjTA== + +import-fresh@^3.1.0, import-fresh@^3.2.1, import-fresh@^3.3.0: + version "3.3.0" + resolved "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz" + integrity sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw== + dependencies: + parent-module "^1.0.0" + resolve-from "^4.0.0" + +import-lazy@^2.1.0: + version "2.1.0" + resolved "https://registry.npmjs.org/import-lazy/-/import-lazy-2.1.0.tgz" + integrity sha512-m7ZEHgtw69qOGw+jwxXkHlrlIPdTGkyh66zXZ1ajZbxkDBNjSY/LGbmjc7h0s2ELsUDTAhFr55TrPSSqJGPG0A== + +imurmurhash@^0.1.4: + version "0.1.4" + resolved "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz" + integrity sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA== + +indent-string@^4.0.0: + version "4.0.0" + resolved "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz" + integrity sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg== + +infima@0.2.0-alpha.43: + version "0.2.0-alpha.43" + resolved "https://registry.npmjs.org/infima/-/infima-0.2.0-alpha.43.tgz" + integrity sha512-2uw57LvUqW0rK/SWYnd/2rRfxNA5DDNOh33jxF7fy46VWoNhGxiUQyVZHbBMjQ33mQem0cjdDVwgWVAmlRfgyQ== + +inflight@^1.0.4: + version "1.0.6" + resolved "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz" + integrity sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA== + dependencies: + once "^1.3.0" + wrappy "1" + +inherits@2, inherits@2.0.4, inherits@^2.0.0, inherits@^2.0.1, inherits@^2.0.3, inherits@~2.0.3: + version "2.0.4" + resolved "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz" + integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== + +inherits@2.0.3: + version "2.0.3" + resolved "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz" + integrity sha512-x00IRNXNy63jwGkJmzPigoySHbaqpNuzKbBOmzK+g2OdZpQ9w+sxCN+VSB3ja7IAge2OP2qpfxTjeNcyjmW1uw== + +ini@2.0.0: + version "2.0.0" + resolved "https://registry.npmjs.org/ini/-/ini-2.0.0.tgz" + integrity sha512-7PnF4oN3CvZF23ADhA5wRaYEQpJ8qygSkbtTXWBeXWXmEVRXK+1ITciHWwHhsjv1TmW0MgacIv6hEi5pX5NQdA== + +ini@^1.3.5, ini@~1.3.0: + version "1.3.8" + resolved "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz" + integrity sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew== + +inline-style-parser@0.1.1: + version "0.1.1" + resolved "https://registry.npmjs.org/inline-style-parser/-/inline-style-parser-0.1.1.tgz" + integrity sha512-7NXolsK4CAS5+xvdj5OMMbI962hU/wvwoxk+LWR9Ek9bVtyuuYScDN6eS0rUm6TxApFpw7CX1o4uJzcd4AyD3Q== + +interpret@^1.0.0: + version "1.4.0" + resolved "https://registry.npmjs.org/interpret/-/interpret-1.4.0.tgz" + integrity sha512-agE4QfB2Lkp9uICn7BAqoscw4SZP9kTE2hxiFI3jBPmXJfdqiahTbUuKGsMoN2GtqL9AxhYioAcVvgsb1HvRbA== + +invariant@^2.2.4: + version "2.2.4" + resolved "https://registry.npmjs.org/invariant/-/invariant-2.2.4.tgz" + integrity sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA== + dependencies: + loose-envify "^1.0.0" + +ipaddr.js@1.9.1: + version "1.9.1" + resolved "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz" + integrity sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g== + +ipaddr.js@^2.0.1: + version "2.0.1" + resolved "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-2.0.1.tgz" + integrity sha512-1qTgH9NG+IIJ4yfKs2e6Pp1bZg8wbDbKHT21HrLIeYBTRLgMYKnMTPAuI3Lcs61nfx5h1xlXnbJtH1kX5/d/ng== + +is-alphabetical@1.0.4, is-alphabetical@^1.0.0: + version "1.0.4" + resolved "https://registry.npmjs.org/is-alphabetical/-/is-alphabetical-1.0.4.tgz" + integrity sha512-DwzsA04LQ10FHTZuL0/grVDk4rFoVH1pjAToYwBrHSxcrBIGQuXrQMtD5U1b0U2XVgKZCTLLP8u2Qxqhy3l2Vg== + +is-alphanumerical@^1.0.0: + version "1.0.4" + resolved "https://registry.npmjs.org/is-alphanumerical/-/is-alphanumerical-1.0.4.tgz" + integrity sha512-UzoZUr+XfVz3t3v4KyGEniVL9BDRoQtY7tOyrRybkVNjDFWyo1yhXNGrrBTQxp3ib9BLAWs7k2YKBQsFRkZG9A== + dependencies: + is-alphabetical "^1.0.0" + is-decimal "^1.0.0" + +is-arrayish@^0.2.1: + version "0.2.1" + resolved "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz" + integrity sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg== + +is-binary-path@~2.1.0: + version "2.1.0" + resolved "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz" + integrity sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw== + dependencies: + binary-extensions "^2.0.0" + +is-buffer@^2.0.0: + version "2.0.5" + resolved "https://registry.npmjs.org/is-buffer/-/is-buffer-2.0.5.tgz" + integrity sha512-i2R6zNFDwgEHJyQUtJEk0XFi1i0dPFn/oqjK3/vPCcDeJvW5NQ83V8QbicfF1SupOaB0h8ntgBC2YiE7dfyctQ== + +is-ci@^2.0.0: + version "2.0.0" + resolved "https://registry.npmjs.org/is-ci/-/is-ci-2.0.0.tgz" + integrity sha512-YfJT7rkpQB0updsdHLGWrvhBJfcfzNNawYDNIyQXJz0IViGf75O8EBPKSdvw2rF+LGCsX4FZ8tcr3b19LcZq4w== + dependencies: + ci-info "^2.0.0" + +is-core-module@^2.9.0: + version "2.11.0" + resolved "https://registry.npmjs.org/is-core-module/-/is-core-module-2.11.0.tgz" + integrity sha512-RRjxlvLDkD1YJwDbroBHMb+cukurkDWNyHx7D3oNB5x9rb5ogcksMC5wHCadcXoo67gVr/+3GFySh3134zi6rw== + dependencies: + has "^1.0.3" + +is-decimal@^1.0.0: + version "1.0.4" + resolved "https://registry.npmjs.org/is-decimal/-/is-decimal-1.0.4.tgz" + integrity sha512-RGdriMmQQvZ2aqaQq3awNA6dCGtKpiDFcOzrTWrDAT2MiWrKQVPmxLGHl7Y2nNu6led0kEyoX0enY0qXYsv9zw== + +is-docker@^2.0.0, is-docker@^2.1.1: + version "2.2.1" + resolved "https://registry.npmjs.org/is-docker/-/is-docker-2.2.1.tgz" + integrity sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ== + +is-extendable@^0.1.0: + version "0.1.1" + resolved "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz" + integrity sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw== + +is-extglob@^2.1.1: + version "2.1.1" + resolved "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz" + integrity sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ== + +is-fullwidth-code-point@^3.0.0: + version "3.0.0" + resolved "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz" + integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg== + +is-glob@^4.0.1, is-glob@^4.0.3, is-glob@~4.0.1: + version "4.0.3" + resolved "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz" + integrity sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg== + dependencies: + is-extglob "^2.1.1" + +is-hexadecimal@^1.0.0: + version "1.0.4" + resolved "https://registry.npmjs.org/is-hexadecimal/-/is-hexadecimal-1.0.4.tgz" + integrity sha512-gyPJuv83bHMpocVYoqof5VDiZveEoGoFL8m3BXNb2VW8Xs+rz9kqO8LOQ5DH6EsuvilT1ApazU0pyl+ytbPtlw== + +is-installed-globally@^0.4.0: + version "0.4.0" + resolved "https://registry.npmjs.org/is-installed-globally/-/is-installed-globally-0.4.0.tgz" + integrity sha512-iwGqO3J21aaSkC7jWnHP/difazwS7SFeIqxv6wEtLU8Y5KlzFTjyqcSIT0d8s4+dDhKytsk9PJZ2BkS5eZwQRQ== + dependencies: + global-dirs "^3.0.0" + is-path-inside "^3.0.2" + +is-npm@^5.0.0: + version "5.0.0" + resolved "https://registry.npmjs.org/is-npm/-/is-npm-5.0.0.tgz" + integrity sha512-WW/rQLOazUq+ST/bCAVBp/2oMERWLsR7OrKyt052dNDk4DHcDE0/7QSXITlmi+VBcV13DfIbysG3tZJm5RfdBA== + +is-number@^7.0.0: + version "7.0.0" + resolved "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz" + integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== + +is-obj@^1.0.1: + version "1.0.1" + resolved "https://registry.npmjs.org/is-obj/-/is-obj-1.0.1.tgz" + integrity sha512-l4RyHgRqGN4Y3+9JHVrNqO+tN0rV5My76uW5/nuO4K1b6vw5G8d/cmFjP9tRfEsdhZNt0IFdZuK/c2Vr4Nb+Qg== + +is-obj@^2.0.0: + version "2.0.0" + resolved "https://registry.npmjs.org/is-obj/-/is-obj-2.0.0.tgz" + integrity sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w== + +is-path-cwd@^2.2.0: + version "2.2.0" + resolved "https://registry.npmjs.org/is-path-cwd/-/is-path-cwd-2.2.0.tgz" + integrity sha512-w942bTcih8fdJPJmQHFzkS76NEP8Kzzvmw92cXsazb8intwLqPibPPdXf4ANdKV3rYMuuQYGIWtvz9JilB3NFQ== + +is-path-inside@^3.0.2: + version "3.0.3" + resolved "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz" + integrity sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ== + +is-plain-obj@^2.0.0: + version "2.1.0" + resolved "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-2.1.0.tgz" + integrity sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA== + +is-plain-obj@^3.0.0: + version "3.0.0" + resolved "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-3.0.0.tgz" + integrity sha512-gwsOE28k+23GP1B6vFl1oVh/WOzmawBrKwo5Ev6wMKzPkaXaCDIQKzLnvsA42DRlbVTWorkgTKIviAKCWkfUwA== + +is-plain-object@^2.0.4: + version "2.0.4" + resolved "https://registry.npmjs.org/is-plain-object/-/is-plain-object-2.0.4.tgz" + integrity sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og== + dependencies: + isobject "^3.0.1" + +is-regexp@^1.0.0: + version "1.0.0" + resolved "https://registry.npmjs.org/is-regexp/-/is-regexp-1.0.0.tgz" + integrity sha512-7zjFAPO4/gwyQAAgRRmqeEeyIICSdmCqa3tsVHMdBzaXXRiqopZL4Cyghg/XulGWrtABTpbnYYzzIRffLkP4oA== + +is-root@^2.1.0: + version "2.1.0" + resolved "https://registry.npmjs.org/is-root/-/is-root-2.1.0.tgz" + integrity sha512-AGOriNp96vNBd3HtU+RzFEc75FfR5ymiYv8E553I71SCeXBiMsVDUtdio1OEFvrPyLIQ9tVR5RxXIFe5PUFjMg== + +is-stream@^2.0.0: + version "2.0.1" + resolved "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz" + integrity sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg== + +is-typedarray@^1.0.0: + version "1.0.0" + resolved "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz" + integrity sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA== + +is-whitespace-character@^1.0.0: + version "1.0.4" + resolved "https://registry.npmjs.org/is-whitespace-character/-/is-whitespace-character-1.0.4.tgz" + integrity sha512-SDweEzfIZM0SJV0EUga669UTKlmL0Pq8Lno0QDQsPnvECB3IM2aP0gdx5TrU0A01MAPfViaZiI2V1QMZLaKK5w== + +is-word-character@^1.0.0: + version "1.0.4" + resolved "https://registry.npmjs.org/is-word-character/-/is-word-character-1.0.4.tgz" + integrity sha512-5SMO8RVennx3nZrqtKwCGyyetPE9VDba5ugvKLaD4KopPG5kR4mQ7tNt/r7feL5yt5h3lpuBbIUmCOG2eSzXHA== + +is-wsl@^2.2.0: + version "2.2.0" + resolved "https://registry.npmjs.org/is-wsl/-/is-wsl-2.2.0.tgz" + integrity sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww== + dependencies: + is-docker "^2.0.0" + +is-yarn-global@^0.3.0: + version "0.3.0" + resolved "https://registry.npmjs.org/is-yarn-global/-/is-yarn-global-0.3.0.tgz" + integrity sha512-VjSeb/lHmkoyd8ryPVIKvOCn4D1koMqY+vqyjjUfc3xyKtP4dYOxM44sZrnqQSzSds3xyOrUTLTC9LVCVgLngw== + +isarray@0.0.1: + version "0.0.1" + resolved "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" + integrity sha512-D2S+3GLxWH+uhrNEcoh/fnmYeP8E8/zHl644d/jdA0g2uyXvy3sb0qxotE+ne0LtccHknQzWwZEzhak7oJ0COQ== + +isarray@~1.0.0: + version "1.0.0" + resolved "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz" + integrity sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ== + +isexe@^2.0.0: + version "2.0.0" + resolved "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz" + integrity sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw== + +isobject@^3.0.1: + version "3.0.1" + resolved "https://registry.npmjs.org/isobject/-/isobject-3.0.1.tgz" + integrity sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg== + +jest-util@^29.5.0: + version "29.5.0" + resolved "https://registry.npmjs.org/jest-util/-/jest-util-29.5.0.tgz" + integrity sha512-RYMgG/MTadOr5t8KdhejfvUU82MxsCu5MF6KuDUHl+NuwzUt+Sm6jJWxTJVrDR1j5M/gJVCPKQEpWXY+yIQ6lQ== + dependencies: + "@jest/types" "^29.5.0" + "@types/node" "*" + chalk "^4.0.0" + ci-info "^3.2.0" + graceful-fs "^4.2.9" + picomatch "^2.2.3" + +jest-worker@^27.4.5: + version "27.5.1" + resolved "https://registry.npmjs.org/jest-worker/-/jest-worker-27.5.1.tgz" + integrity sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg== + dependencies: + "@types/node" "*" + merge-stream "^2.0.0" + supports-color "^8.0.0" + +jest-worker@^29.1.2: + version "29.5.0" + resolved "https://registry.npmjs.org/jest-worker/-/jest-worker-29.5.0.tgz" + integrity sha512-NcrQnevGoSp4b5kg+akIpthoAFHxPBcb5P6mYPY0fUNT+sSvmtu6jlkEle3anczUKIKEbMxFimk9oTP/tpIPgA== + dependencies: + "@types/node" "*" + jest-util "^29.5.0" + merge-stream "^2.0.0" + supports-color "^8.0.0" + +jiti@^1.17.2: + version "1.18.2" + resolved "https://registry.npmjs.org/jiti/-/jiti-1.18.2.tgz" + integrity sha512-QAdOptna2NYiSSpv0O/BwoHBSmz4YhpzJHyi+fnMRTXFjp7B8i/YG5Z8IfusxB1ufjcD2Sre1F3R+nX3fvy7gg== + +joi@^17.6.0: + version "17.9.1" + resolved "https://registry.npmjs.org/joi/-/joi-17.9.1.tgz" + integrity sha512-FariIi9j6QODKATGBrEX7HZcja8Bsh3rfdGYy/Sb65sGlZWK/QWesU1ghk7aJWDj95knjXlQfSmzFSPPkLVsfw== + dependencies: + "@hapi/hoek" "^9.0.0" + "@hapi/topo" "^5.0.0" + "@sideway/address" "^4.1.3" + "@sideway/formula" "^3.0.1" + "@sideway/pinpoint" "^2.0.0" + +"js-tokens@^3.0.0 || ^4.0.0", js-tokens@^4.0.0: + version "4.0.0" + resolved "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz" + integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== + +js-yaml@^3.13.1: + version "3.14.1" + resolved "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz" + integrity sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g== + dependencies: + argparse "^1.0.7" + esprima "^4.0.0" + +js-yaml@^4.1.0: + version "4.1.0" + resolved "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz" + integrity sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA== + dependencies: + argparse "^2.0.1" + +jsesc@^3.0.2: + version "3.1.0" + resolved "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz" + integrity sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA== + +jsesc@~0.5.0: + version "0.5.0" + resolved "https://registry.npmjs.org/jsesc/-/jsesc-0.5.0.tgz" + integrity sha512-uZz5UnB7u4T9LvwmFqXii7pZSouaRPorGs5who1Ip7VO0wxanFvBL7GkM6dTHlgX+jhBApRetaWpnDabOeTcnA== + +json-buffer@3.0.0: + version "3.0.0" + resolved "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.0.tgz" + integrity sha512-CuUqjv0FUZIdXkHPI8MezCnFCdaTAacej1TZYulLoAg1h/PhwkdXFN4V/gzY4g+fMBCOV2xF+rp7t2XD2ns/NQ== + +json-parse-even-better-errors@^2.3.0, json-parse-even-better-errors@^2.3.1: + version "2.3.1" + resolved "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz" + integrity sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w== + +json-schema-traverse@^0.4.1: + version "0.4.1" + resolved "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz" + integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== + +json-schema-traverse@^1.0.0: + version "1.0.0" + resolved "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz" + integrity sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug== + +json5@^2.1.2, json5@^2.2.3: + version "2.2.3" + resolved "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz" + integrity sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg== + +jsonfile@^6.0.1: + version "6.1.0" + resolved "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz" + integrity sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ== + dependencies: + universalify "^2.0.0" + optionalDependencies: + graceful-fs "^4.1.6" + +keyv@^3.0.0: + version "3.1.0" + resolved "https://registry.npmjs.org/keyv/-/keyv-3.1.0.tgz" + integrity sha512-9ykJ/46SN/9KPM/sichzQ7OvXyGDYKGTaDlKMGCAlg2UK8KRy4jb0d8sFc+0Tt0YYnThq8X2RZgCg74RPxgcVA== + dependencies: + json-buffer "3.0.0" + +kind-of@^6.0.0, kind-of@^6.0.2: + version "6.0.3" + resolved "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz" + integrity sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw== + +kleur@^3.0.3: + version "3.0.3" + resolved "https://registry.npmjs.org/kleur/-/kleur-3.0.3.tgz" + integrity sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w== + +klona@^2.0.6: + version "2.0.6" + resolved "https://registry.npmjs.org/klona/-/klona-2.0.6.tgz" + integrity sha512-dhG34DXATL5hSxJbIexCft8FChFXtmskoZYnoPWjXQuebWYCNkVeV3KkGegCK9CP1oswI/vQibS2GY7Em/sJJA== + +latest-version@^5.1.0: + version "5.1.0" + resolved "https://registry.npmjs.org/latest-version/-/latest-version-5.1.0.tgz" + integrity sha512-weT+r0kTkRQdCdYCNtkMwWXQTMEswKrFBkm4ckQOMVhhqhIMI1UT2hMj+1iigIhgSZm5gTmrRXBNoGUgaTY1xA== + dependencies: + package-json "^6.3.0" + +launch-editor@^2.6.0: + version "2.6.0" + resolved "https://registry.npmjs.org/launch-editor/-/launch-editor-2.6.0.tgz" + integrity sha512-JpDCcQnyAAzZZaZ7vEiSqL690w7dAEyLao+KC96zBplnYbJS7TYNjvM3M7y3dGz+v7aIsJk3hllWuc0kWAjyRQ== + dependencies: + picocolors "^1.0.0" + shell-quote "^1.7.3" + +leven@^3.1.0: + version "3.1.0" + resolved "https://registry.npmjs.org/leven/-/leven-3.1.0.tgz" + integrity sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A== + +lilconfig@^2.0.3, lilconfig@^2.0.5, lilconfig@^2.0.6: + version "2.1.0" + resolved "https://registry.npmjs.org/lilconfig/-/lilconfig-2.1.0.tgz" + integrity sha512-utWOt/GHzuUxnLKxB6dk81RoOeoNeHgbrXiuGk4yyF5qlRz+iIVWu56E2fqGHFrXz0QNUhLB/8nKqvRH66JKGQ== + +lines-and-columns@^1.1.6: + version "1.2.4" + resolved "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz" + integrity sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg== + +loader-runner@^4.3.1: + version "4.3.1" + resolved "https://registry.yarnpkg.com/loader-runner/-/loader-runner-4.3.1.tgz#6c76ed29b0ccce9af379208299f07f876de737e3" + integrity sha512-IWqP2SCPhyVFTBtRcgMHdzlf9ul25NwaFx4wCEH/KjAXuuHY4yNjvPXsBokp8jCB936PyWRaPKUNh8NvylLp2Q== + +loader-utils@^2.0.0: + version "2.0.4" + resolved "https://registry.npmjs.org/loader-utils/-/loader-utils-2.0.4.tgz" + integrity sha512-xXqpXoINfFhgua9xiqD8fPFHgkoq1mmmpE92WlDbm9rNRd/EbRb+Gqf908T2DMfuHjjJlksiK2RbHVOdD/MqSw== + dependencies: + big.js "^5.2.2" + emojis-list "^3.0.0" + json5 "^2.1.2" + +loader-utils@^3.2.0: + version "3.2.1" + resolved "https://registry.npmjs.org/loader-utils/-/loader-utils-3.2.1.tgz" + integrity sha512-ZvFw1KWS3GVyYBYb7qkmRM/WwL2TQQBxgCK62rlvm4WpVQ23Nb4tYjApUlfjrEGvOs7KHEsmyUn75OHZrJMWPw== + +locate-path@^3.0.0: + version "3.0.0" + resolved "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz" + integrity sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A== + dependencies: + p-locate "^3.0.0" + path-exists "^3.0.0" + +locate-path@^5.0.0: + version "5.0.0" + resolved "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz" + integrity sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g== + dependencies: + p-locate "^4.1.0" + +locate-path@^6.0.0: + version "6.0.0" + resolved "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz" + integrity sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw== + dependencies: + p-locate "^5.0.0" + +lodash.curry@^4.0.1: + version "4.1.1" + resolved "https://registry.npmjs.org/lodash.curry/-/lodash.curry-4.1.1.tgz" + integrity sha512-/u14pXGviLaweY5JI0IUzgzF2J6Ne8INyzAZjImcryjgkZ+ebruBxy2/JaOOkTqScddcYtakjhSaeemV8lR0tA== + +lodash.debounce@^4.0.8: + version "4.0.8" + resolved "https://registry.npmjs.org/lodash.debounce/-/lodash.debounce-4.0.8.tgz" + integrity sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow== + +lodash.flow@^3.3.0: + version "3.5.0" + resolved "https://registry.npmjs.org/lodash.flow/-/lodash.flow-3.5.0.tgz" + integrity sha512-ff3BX/tSioo+XojX4MOsOMhJw0nZoUEF011LX8g8d3gvjVbxd89cCio4BCXronjxcTUIJUoqKEUA+n4CqvvRPw== + +lodash.memoize@^4.1.2: + version "4.1.2" + resolved "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz" + integrity sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag== + +lodash.uniq@4.5.0, lodash.uniq@^4.5.0: + version "4.5.0" + resolved "https://registry.npmjs.org/lodash.uniq/-/lodash.uniq-4.5.0.tgz" + integrity sha512-xfBaXQd9ryd9dlSDvnvI0lvxfLJlYAZzXomUYzLKtUeOQvOP5piqAWuGtrhWeqaXK9hhoM/iyJc5AV+XfsX3HQ== + +lodash@^4.17.11, lodash@^4.17.19, lodash@^4.17.20, lodash@^4.17.21: + version "4.18.1" + resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.18.1.tgz#ff2b66c1f6326d59513de2407bf881439812771c" + integrity sha512-dMInicTPVE8d1e5otfwmmjlxkZoUpiVLwyeTdUsi/Caj/gfzzblBcCE5sRHV/AsjuCmxWrte2TNGSYuCeCq+0Q== + +loose-envify@^1.0.0, loose-envify@^1.1.0, loose-envify@^1.2.0, loose-envify@^1.3.1, loose-envify@^1.4.0: + version "1.4.0" + resolved "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz" + integrity sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q== + dependencies: + js-tokens "^3.0.0 || ^4.0.0" + +lower-case@^2.0.2: + version "2.0.2" + resolved "https://registry.npmjs.org/lower-case/-/lower-case-2.0.2.tgz" + integrity sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg== + dependencies: + tslib "^2.0.3" + +lowercase-keys@^1.0.0, lowercase-keys@^1.0.1: + version "1.0.1" + resolved "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-1.0.1.tgz" + integrity sha512-G2Lj61tXDnVFFOi8VZds+SoQjtQC3dgokKdDG2mTm1tx4m50NUHBOZSBwQQHyy0V12A0JTG4icfZQH+xPyh8VA== + +lowercase-keys@^2.0.0: + version "2.0.0" + resolved "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-2.0.0.tgz" + integrity sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA== + +lru-cache@^5.1.1: + version "5.1.1" + resolved "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz" + integrity sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w== + dependencies: + yallist "^3.0.2" + +lru-cache@^6.0.0: + version "6.0.0" + resolved "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz" + integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA== + dependencies: + yallist "^4.0.0" + +make-dir@^3.0.0, make-dir@^3.0.2, make-dir@^3.1.0: + version "3.1.0" + resolved "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz" + integrity sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw== + dependencies: + semver "^6.0.0" + +markdown-escapes@^1.0.0: + version "1.0.4" + resolved "https://registry.npmjs.org/markdown-escapes/-/markdown-escapes-1.0.4.tgz" + integrity sha512-8z4efJYk43E0upd0NbVXwgSTQs6cT3T06etieCMEg7dRbzCbxUCK/GHlX8mhHRDcp+OLlHkPKsvqQTCvsRl2cg== + +math-intrinsics@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/math-intrinsics/-/math-intrinsics-1.1.0.tgz#a0dd74be81e2aa5c2f27e65ce283605ee4e2b7f9" + integrity sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g== + +mdast-squeeze-paragraphs@^4.0.0: + version "4.0.0" + resolved "https://registry.npmjs.org/mdast-squeeze-paragraphs/-/mdast-squeeze-paragraphs-4.0.0.tgz" + integrity sha512-zxdPn69hkQ1rm4J+2Cs2j6wDEv7O17TfXTJ33tl/+JPIoEmtV9t2ZzBM5LPHE8QlHsmVD8t3vPKCyY3oH+H8MQ== + dependencies: + unist-util-remove "^2.0.0" + +mdast-util-definitions@^4.0.0: + version "4.0.0" + resolved "https://registry.npmjs.org/mdast-util-definitions/-/mdast-util-definitions-4.0.0.tgz" + integrity sha512-k8AJ6aNnUkB7IE+5azR9h81O5EQ/cTDXtWdMq9Kk5KcEW/8ritU5CeLg/9HhOC++nALHBlaogJ5jz0Ybk3kPMQ== + dependencies: + unist-util-visit "^2.0.0" + +mdast-util-to-hast@10.0.1: + version "10.0.1" + resolved "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-10.0.1.tgz" + integrity sha512-BW3LM9SEMnjf4HXXVApZMt8gLQWVNXc3jryK0nJu/rOXPOnlkUjmdkDlmxMirpbU9ILncGFIwLH/ubnWBbcdgA== + dependencies: + "@types/mdast" "^3.0.0" + "@types/unist" "^2.0.0" + mdast-util-definitions "^4.0.0" + mdurl "^1.0.0" + unist-builder "^2.0.0" + unist-util-generated "^1.0.0" + unist-util-position "^3.0.0" + unist-util-visit "^2.0.0" + +mdast-util-to-string@^2.0.0: + version "2.0.0" + resolved "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-2.0.0.tgz" + integrity sha512-AW4DRS3QbBayY/jJmD8437V1Gombjf8RSOUCMFBuo5iHi58AGEgVCKQ+ezHkZZDpAQS75hcBMpLqjpJTjtUL7w== + +mdn-data@2.0.14: + version "2.0.14" + resolved "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.14.tgz" + integrity sha512-dn6wd0uw5GsdswPFfsgMp5NSB0/aDe6fK94YJV/AJDYXL6HVLWBsxeq7js7Ad+mU2K9LAlwpk6kN2D5mwCPVow== + +mdurl@^1.0.0: + version "1.0.1" + resolved "https://registry.npmjs.org/mdurl/-/mdurl-1.0.1.tgz" + integrity sha512-/sKlQJCBYVY9Ers9hqzKou4H6V5UWc/M59TH2dvkt+84itfnq7uFOMLpOiOS4ujvHP4etln18fmIxA5R5fll0g== + +media-typer@0.3.0: + version "0.3.0" + resolved "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz" + integrity sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ== + +medium-zoom@^1.0.6: + version "1.0.8" + resolved "https://registry.npmjs.org/medium-zoom/-/medium-zoom-1.0.8.tgz" + integrity sha512-CjFVuFq/IfrdqesAXfg+hzlDKu6A2n80ZIq0Kl9kWjoHh9j1N9Uvk5X0/MmN0hOfm5F9YBswlClhcwnmtwz7gA== + +memfs@^3.1.2, memfs@^3.4.3: + version "3.4.13" + resolved "https://registry.npmjs.org/memfs/-/memfs-3.4.13.tgz" + integrity sha512-omTM41g3Skpvx5dSYeZIbXKcXoAVc/AoMNwn9TKx++L/gaen/+4TTttmu8ZSch5vfVJ8uJvGbroTsIlslRg6lg== + dependencies: + fs-monkey "^1.0.3" + +merge-descriptors@1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.3.tgz#d80319a65f3c7935351e5cfdac8f9318504dbed5" + integrity sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ== + +merge-stream@^2.0.0: + version "2.0.0" + resolved "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz" + integrity sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w== + +merge2@^1.3.0, merge2@^1.4.1: + version "1.4.1" + resolved "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz" + integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg== + +methods@~1.1.2: + version "1.1.2" + resolved "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz" + integrity sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w== + +micromatch@^4.0.2, micromatch@^4.0.4, micromatch@^4.0.5: + version "4.0.8" + resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.8.tgz#d66fa18f3a47076789320b9b1af32bd86d9fa202" + integrity sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA== + dependencies: + braces "^3.0.3" + picomatch "^2.3.1" + +mime-db@1.52.0, "mime-db@>= 1.43.0 < 2": + version "1.52.0" + resolved "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz" + integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg== + +mime-db@~1.33.0: + version "1.33.0" + resolved "https://registry.npmjs.org/mime-db/-/mime-db-1.33.0.tgz" + integrity sha512-BHJ/EKruNIqJf/QahvxwQZXKygOQ256myeN/Ew+THcAa5q+PjyTTMMeNQC4DZw5AwfvelsUrA6B67NKMqXDbzQ== + +mime-types@2.1.18: + version "2.1.18" + resolved "https://registry.npmjs.org/mime-types/-/mime-types-2.1.18.tgz" + integrity sha512-lc/aahn+t4/SWV/qcmumYjymLsWfN3ELhpmVuUFjgsORruuZPVSwAQryq+HHGvO/SI2KVX26bx+En+zhM8g8hQ== + dependencies: + mime-db "~1.33.0" + +mime-types@^2.1.27, mime-types@^2.1.31, mime-types@~2.1.17, mime-types@~2.1.24, mime-types@~2.1.34: + version "2.1.35" + resolved "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz" + integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== + dependencies: + mime-db "1.52.0" + +mime@1.6.0: + version "1.6.0" + resolved "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz" + integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== + +mimic-fn@^2.1.0: + version "2.1.0" + resolved "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz" + integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg== + +mimic-response@^1.0.0, mimic-response@^1.0.1: + version "1.0.1" + resolved "https://registry.npmjs.org/mimic-response/-/mimic-response-1.0.1.tgz" + integrity sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ== + +mini-css-extract-plugin@^2.6.1: + version "2.7.5" + resolved "https://registry.npmjs.org/mini-css-extract-plugin/-/mini-css-extract-plugin-2.7.5.tgz" + integrity sha512-9HaR++0mlgom81s95vvNjxkg52n2b5s//3ZTI1EtzFb98awsLSivs2LMsVqnQ3ay0PVhqWcGNyDaTE961FOcjQ== + dependencies: + schema-utils "^4.0.0" + +minimalistic-assert@^1.0.0: + version "1.0.1" + resolved "https://registry.npmjs.org/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz" + integrity sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A== + +minimatch@3.1.2, minimatch@^3.0.4, minimatch@^3.0.5, minimatch@^3.1.1: + version "3.1.2" + resolved "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz" + integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw== + dependencies: + brace-expansion "^1.1.7" + +minimist@^1.2.0, minimist@^1.2.5: + version "1.2.8" + resolved "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz" + integrity sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA== + +mrmime@^1.0.0: + version "1.0.1" + resolved "https://registry.npmjs.org/mrmime/-/mrmime-1.0.1.tgz" + integrity sha512-hzzEagAgDyoU1Q6yg5uI+AorQgdvMCur3FcKf7NhMKWsaYg+RnbTyHRa/9IlLF9rf455MOCtcqqrQQ83pPP7Uw== + +ms@2.0.0: + version "2.0.0" + resolved "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz" + integrity sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A== + +ms@2.1.2: + version "2.1.2" + resolved "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz" + integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== + +ms@2.1.3: + version "2.1.3" + resolved "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz" + integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== + +multicast-dns@^7.2.5: + version "7.2.5" + resolved "https://registry.npmjs.org/multicast-dns/-/multicast-dns-7.2.5.tgz" + integrity sha512-2eznPJP8z2BFLX50tf0LuODrpINqP1RVIm/CObbTcBRITQgmC/TjcREF1NeTBzIcR5XO/ukWo+YHOjBbFwIupg== + dependencies: + dns-packet "^5.2.2" + thunky "^1.0.2" + +mz@^2.7.0: + version "2.7.0" + resolved "https://registry.npmjs.org/mz/-/mz-2.7.0.tgz" + integrity sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q== + dependencies: + any-promise "^1.0.0" + object-assign "^4.0.1" + thenify-all "^1.0.0" + +nanoid@^3.3.6: + version "3.3.11" + resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.11.tgz#4f4f112cefbe303202f2199838128936266d185b" + integrity sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w== + +negotiator@0.6.3: + version "0.6.3" + resolved "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz" + integrity sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg== + +neo-async@^2.6.2: + version "2.6.2" + resolved "https://registry.npmjs.org/neo-async/-/neo-async-2.6.2.tgz" + integrity sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw== + +no-case@^3.0.4: + version "3.0.4" + resolved "https://registry.npmjs.org/no-case/-/no-case-3.0.4.tgz" + integrity sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg== + dependencies: + lower-case "^2.0.2" + tslib "^2.0.3" + +node-emoji@^1.10.0: + version "1.11.0" + resolved "https://registry.npmjs.org/node-emoji/-/node-emoji-1.11.0.tgz" + integrity sha512-wo2DpQkQp7Sjm2A0cq+sN7EHKO6Sl0ctXeBdFZrL9T9+UywORbufTcTZxom8YqpLQt/FqNMUkOpkZrJVYSKD3A== + dependencies: + lodash "^4.17.21" + +node-fetch@2.6.7: + version "2.6.7" + resolved "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz" + integrity sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ== + dependencies: + whatwg-url "^5.0.0" + +node-forge@^1: + version "1.4.0" + resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-1.4.0.tgz#1c7b7d8bdc2d078739f58287d589d903a11b2fc2" + integrity sha512-LarFH0+6VfriEhqMMcLX2F7SwSXeWwnEAJEsYm5QKWchiVYVvJyV9v7UDvUv+w5HO23ZpQTXDv/GxdDdMyOuoQ== + +node-releases@^2.0.19: + version "2.0.19" + resolved "https://registry.npmjs.org/node-releases/-/node-releases-2.0.19.tgz" + integrity sha512-xxOWJsBKtzAq7DY0J+DTzuz58K8e7sJbdgwkbMWQe8UYB6ekmsQ45q0M/tJDsGaZmbC+l7n57UV8Hl5tHxO9uw== + +node-releases@^2.0.27: + version "2.0.27" + resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.27.tgz#eedca519205cf20f650f61d56b070db111231e4e" + integrity sha512-nmh3lCkYZ3grZvqcCH+fjmQ7X+H0OeZgP40OierEaAptX4XofMh5kwNbWh7lBduUzCcV/8kZ+NDLCwm2iorIlA== + +normalize-path@^3.0.0, normalize-path@~3.0.0: + version "3.0.0" + resolved "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz" + integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== + +normalize-range@^0.1.2: + version "0.1.2" + resolved "https://registry.npmjs.org/normalize-range/-/normalize-range-0.1.2.tgz" + integrity sha512-bdok/XvKII3nUpklnV6P2hxtMNrCboOjAcyBuQnWEhO665FwrSNRxU+AqpsyvO6LgGYPspN+lu5CLtw4jPRKNA== + +normalize-url@^4.1.0: + version "4.5.1" + resolved "https://registry.npmjs.org/normalize-url/-/normalize-url-4.5.1.tgz" + integrity sha512-9UZCFRHQdNrfTpGg8+1INIg93B6zE0aXMVFkw1WFwvO4SlZywU6aLg5Of0Ap/PgcbSw4LNxvMWXMeugwMCX0AA== + +normalize-url@^6.0.1: + version "6.1.0" + resolved "https://registry.npmjs.org/normalize-url/-/normalize-url-6.1.0.tgz" + integrity sha512-DlL+XwOy3NxAQ8xuC0okPgK46iuVNAK01YN7RueYBqqFeGsBjV9XmCAzAdgt+667bCl5kPh9EqKKDwnaPG1I7A== + +npm-run-path@^4.0.1: + version "4.0.1" + resolved "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz" + integrity sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw== + dependencies: + path-key "^3.0.0" + +nprogress@^0.2.0: + version "0.2.0" + resolved "https://registry.npmjs.org/nprogress/-/nprogress-0.2.0.tgz" + integrity sha512-I19aIingLgR1fmhftnbWWO3dXc0hSxqHQHQb3H8m+K3TnEn/iSeTZZOyvKXWqQESMwuUVnatlCnZdLBZZt2VSA== + +nth-check@^2.0.1: + version "2.1.1" + resolved "https://registry.npmjs.org/nth-check/-/nth-check-2.1.1.tgz" + integrity sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w== + dependencies: + boolbase "^1.0.0" + +object-assign@^4.0.1, object-assign@^4.1.0, object-assign@^4.1.1: + version "4.1.1" + resolved "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz" + integrity sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg== + +object-hash@^3.0.0: + version "3.0.0" + resolved "https://registry.npmjs.org/object-hash/-/object-hash-3.0.0.tgz" + integrity sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw== + +object-inspect@^1.13.3: + version "1.13.4" + resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.13.4.tgz#8375265e21bc20d0fa582c22e1b13485d6e00213" + integrity sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew== + +object-keys@^1.1.1: + version "1.1.1" + resolved "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz" + integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== + +object.assign@^4.1.0: + version "4.1.4" + resolved "https://registry.npmjs.org/object.assign/-/object.assign-4.1.4.tgz" + integrity sha512-1mxKf0e58bvyjSCtKYY4sRe9itRk3PJpquJOjeIkz885CczcI4IvJJDLPS72oowuSh+pBxUFROpX+TU++hxhZQ== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.4" + has-symbols "^1.0.3" + object-keys "^1.1.1" + +obuf@^1.0.0, obuf@^1.1.2: + version "1.1.2" + resolved "https://registry.npmjs.org/obuf/-/obuf-1.1.2.tgz" + integrity sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg== + +on-finished@2.4.1: + version "2.4.1" + resolved "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz" + integrity sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg== + dependencies: + ee-first "1.1.1" + +on-headers@~1.0.2: + version "1.0.2" + resolved "https://registry.npmjs.org/on-headers/-/on-headers-1.0.2.tgz" + integrity sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA== + +once@^1.3.0, once@^1.3.1, once@^1.4.0: + version "1.4.0" + resolved "https://registry.npmjs.org/once/-/once-1.4.0.tgz" + integrity sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w== + dependencies: + wrappy "1" + +onetime@^5.1.2: + version "5.1.2" + resolved "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz" + integrity sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg== + dependencies: + mimic-fn "^2.1.0" + +open@^8.0.9, open@^8.4.0: + version "8.4.2" + resolved "https://registry.npmjs.org/open/-/open-8.4.2.tgz" + integrity sha512-7x81NCL719oNbsq/3mh+hVrAWmFuEYUqrq/Iw3kUzH8ReypT9QQ0BLoJS7/G9k6N81XjW4qHWtjWwe/9eLy1EQ== + dependencies: + define-lazy-prop "^2.0.0" + is-docker "^2.1.1" + is-wsl "^2.2.0" + +opener@^1.5.2: + version "1.5.2" + resolved "https://registry.npmjs.org/opener/-/opener-1.5.2.tgz" + integrity sha512-ur5UIdyw5Y7yEj9wLzhqXiy6GZ3Mwx0yGI+5sMn2r0N0v3cKJvUmFH5yPP+WXh9e0xfyzyJX95D8l088DNFj7A== + +p-cancelable@^1.0.0: + version "1.1.0" + resolved "https://registry.npmjs.org/p-cancelable/-/p-cancelable-1.1.0.tgz" + integrity sha512-s73XxOZ4zpt1edZYZzvhqFa6uvQc1vwUa0K0BdtIZgQMAJj9IbebH+JkgKZc9h+B05PKHLOTl4ajG1BmNrVZlw== + +p-limit@^2.0.0, p-limit@^2.2.0: + version "2.3.0" + resolved "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz" + integrity sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w== + dependencies: + p-try "^2.0.0" + +p-limit@^3.0.2: + version "3.1.0" + resolved "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz" + integrity sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ== + dependencies: + yocto-queue "^0.1.0" + +p-locate@^3.0.0: + version "3.0.0" + resolved "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz" + integrity sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ== + dependencies: + p-limit "^2.0.0" + +p-locate@^4.1.0: + version "4.1.0" + resolved "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz" + integrity sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A== + dependencies: + p-limit "^2.2.0" + +p-locate@^5.0.0: + version "5.0.0" + resolved "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz" + integrity sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw== + dependencies: + p-limit "^3.0.2" + +p-map@^4.0.0: + version "4.0.0" + resolved "https://registry.npmjs.org/p-map/-/p-map-4.0.0.tgz" + integrity sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ== + dependencies: + aggregate-error "^3.0.0" + +p-retry@^4.5.0: + version "4.6.2" + resolved "https://registry.npmjs.org/p-retry/-/p-retry-4.6.2.tgz" + integrity sha512-312Id396EbJdvRONlngUx0NydfrIQ5lsYu0znKVUzVvArzEIt08V1qhtyESbGVd1FGX7UKtiFp5uwKZdM8wIuQ== + dependencies: + "@types/retry" "0.12.0" + retry "^0.13.1" + +p-try@^2.0.0: + version "2.2.0" + resolved "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz" + integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== + +package-json@^6.3.0: + version "6.5.0" + resolved "https://registry.npmjs.org/package-json/-/package-json-6.5.0.tgz" + integrity sha512-k3bdm2n25tkyxcjSKzB5x8kfVxlMdgsbPr0GkZcwHsLpba6cBjqCt1KlcChKEvxHIcTB1FVMuwoijZ26xex5MQ== + dependencies: + got "^9.6.0" + registry-auth-token "^4.0.0" + registry-url "^5.0.0" + semver "^6.2.0" + +param-case@^3.0.4: + version "3.0.4" + resolved "https://registry.npmjs.org/param-case/-/param-case-3.0.4.tgz" + integrity sha512-RXlj7zCYokReqWpOPH9oYivUzLYZ5vAPIfEmCTNViosC78F8F0H9y7T7gG2M39ymgutxF5gcFEsyZQSph9Bp3A== + dependencies: + dot-case "^3.0.4" + tslib "^2.0.3" + +parent-module@^1.0.0: + version "1.0.1" + resolved "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz" + integrity sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g== + dependencies: + callsites "^3.0.0" + +parse-entities@^2.0.0: + version "2.0.0" + resolved "https://registry.npmjs.org/parse-entities/-/parse-entities-2.0.0.tgz" + integrity sha512-kkywGpCcRYhqQIchaWqZ875wzpS/bMKhz5HnN3p7wveJTkTtyAB/AlnS0f8DFSqYW1T82t6yEAkEcB+A1I3MbQ== + dependencies: + character-entities "^1.0.0" + character-entities-legacy "^1.0.0" + character-reference-invalid "^1.0.0" + is-alphanumerical "^1.0.0" + is-decimal "^1.0.0" + is-hexadecimal "^1.0.0" + +parse-json@^5.0.0: + version "5.2.0" + resolved "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz" + integrity sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg== + dependencies: + "@babel/code-frame" "^7.0.0" + error-ex "^1.3.1" + json-parse-even-better-errors "^2.3.0" + lines-and-columns "^1.1.6" + +parse-numeric-range@^1.3.0: + version "1.3.0" + resolved "https://registry.npmjs.org/parse-numeric-range/-/parse-numeric-range-1.3.0.tgz" + integrity sha512-twN+njEipszzlMJd4ONUYgSfZPDxgHhT9Ahed5uTigpQn90FggW4SA/AIPq/6a149fTbE9qBEcSwE3FAEp6wQQ== + +parse5-htmlparser2-tree-adapter@^7.0.0: + version "7.0.0" + resolved "https://registry.npmjs.org/parse5-htmlparser2-tree-adapter/-/parse5-htmlparser2-tree-adapter-7.0.0.tgz" + integrity sha512-B77tOZrqqfUfnVcOrUvfdLbz4pu4RopLD/4vmu3HUPswwTA8OH0EMW9BlWR2B0RCoiZRAHEUu7IxeP1Pd1UU+g== + dependencies: + domhandler "^5.0.2" + parse5 "^7.0.0" + +parse5@^6.0.0: + version "6.0.1" + resolved "https://registry.npmjs.org/parse5/-/parse5-6.0.1.tgz" + integrity sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw== + +parse5@^7.0.0: + version "7.1.2" + resolved "https://registry.npmjs.org/parse5/-/parse5-7.1.2.tgz" + integrity sha512-Czj1WaSVpaoj0wbhMzLmWD69anp2WH7FXMB9n1Sy8/ZFF9jolSQVMu1Ij5WIyGmcBmhk7EOndpO4mIpihVqAXw== + dependencies: + entities "^4.4.0" + +parseurl@~1.3.2, parseurl@~1.3.3: + version "1.3.3" + resolved "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz" + integrity sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ== + +pascal-case@^3.1.2: + version "3.1.2" + resolved "https://registry.npmjs.org/pascal-case/-/pascal-case-3.1.2.tgz" + integrity sha512-uWlGT3YSnK9x3BQJaOdcZwrnV6hPpd8jFH1/ucpiLRPh/2zCVJKS19E4GvYHvaCcACn3foXZ0cLB9Wrx1KGe5g== + dependencies: + no-case "^3.0.4" + tslib "^2.0.3" + +path-exists@^3.0.0: + version "3.0.0" + resolved "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz" + integrity sha512-bpC7GYwiDYQ4wYLe+FA8lhRjhQCMcQGuSgGGqDkg/QerRWw9CmGRT0iSOVRSZJ29NMLZgIzqaljJ63oaL4NIJQ== + +path-exists@^4.0.0: + version "4.0.0" + resolved "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz" + integrity sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w== + +path-is-absolute@^1.0.0: + version "1.0.1" + resolved "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz" + integrity sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg== + +path-is-inside@1.0.2: + version "1.0.2" + resolved "https://registry.npmjs.org/path-is-inside/-/path-is-inside-1.0.2.tgz" + integrity sha512-DUWJr3+ULp4zXmol/SZkFf3JGsS9/SIv+Y3Rt93/UjPpDpklB5f1er4O3POIbUuUJ3FXgqte2Q7SrU6zAqwk8w== + +path-key@^3.0.0, path-key@^3.1.0: + version "3.1.1" + resolved "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz" + integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== + +path-parse@^1.0.7: + version "1.0.7" + resolved "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz" + integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== + +path-to-regexp@0.1.12: + version "0.1.12" + resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.12.tgz#d5e1a12e478a976d432ef3c58d534b9923164bb7" + integrity sha512-RA1GjUVMnvYFxuqovrEqZoxxW5NUZqbwKtYz/Tt7nXerk0LbLblQmrsgdeOxV5SFHf0UDggjS/bSeOZwt1pmEQ== + +path-to-regexp@2.2.1: + version "2.2.1" + resolved "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-2.2.1.tgz" + integrity sha512-gu9bD6Ta5bwGrrU8muHzVOBFFREpp2iRkVfhBJahwJ6p6Xw20SjT0MxLnwkjOibQmGSYhiUnf2FLe7k+jcFmGQ== + +path-to-regexp@^1.7.0: + version "1.8.0" + resolved "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.8.0.tgz" + integrity sha512-n43JRhlUKUAlibEJhPeir1ncUID16QnEjNpwzNdO3Lm4ywrBpBZ5oLD0I6br9evr1Y9JTqwRtAh7JLoOzAQdVA== + dependencies: + isarray "0.0.1" + +path-type@^4.0.0: + version "4.0.0" + resolved "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz" + integrity sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw== + +picocolors@^1.0.0, picocolors@^1.1.1: + version "1.1.1" + resolved "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz" + integrity sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA== + +picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.2.3, picomatch@^2.3.0, picomatch@^2.3.1: + version "2.3.2" + resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.2.tgz#5a942915e26b372dc0f0e6753149a16e6b1c5601" + integrity sha512-V7+vQEJ06Z+c5tSye8S+nHUfI51xoXIXjHQ99cQtKUkQqqO1kO/KCJUfZXuB47h/YBlDhah2H3hdUGXn8ie0oA== + +pify@^2.3.0: + version "2.3.0" + resolved "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz" + integrity sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog== + +pirates@^4.0.1: + version "4.0.5" + resolved "https://registry.npmjs.org/pirates/-/pirates-4.0.5.tgz" + integrity sha512-8V9+HQPupnaXMA23c5hvl69zXvTwTzyAYasnkb0Tts4XvO4CliqONMOnvlq26rkhLC3nWDFBJf73LU1e1VZLaQ== + +pkg-dir@^4.1.0: + version "4.2.0" + resolved "https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz" + integrity sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ== + dependencies: + find-up "^4.0.0" + +pkg-up@^3.1.0: + version "3.1.0" + resolved "https://registry.npmjs.org/pkg-up/-/pkg-up-3.1.0.tgz" + integrity sha512-nDywThFk1i4BQK4twPQ6TA4RT8bDY96yeuCVBWL3ePARCiEKDRSrNGbFIgUJpLp+XeIR65v8ra7WuJOFUBtkMA== + dependencies: + find-up "^3.0.0" + +postcss-calc@^8.2.3: + version "8.2.4" + resolved "https://registry.npmjs.org/postcss-calc/-/postcss-calc-8.2.4.tgz" + integrity sha512-SmWMSJmB8MRnnULldx0lQIyhSNvuDl9HfrZkaqqE/WHAhToYsAvDq+yAsA/kIyINDszOp3Rh0GFoNuH5Ypsm3Q== + dependencies: + postcss-selector-parser "^6.0.9" + postcss-value-parser "^4.2.0" + +postcss-colormin@^5.3.1: + version "5.3.1" + resolved "https://registry.npmjs.org/postcss-colormin/-/postcss-colormin-5.3.1.tgz" + integrity sha512-UsWQG0AqTFQmpBegeLLc1+c3jIqBNB0zlDGRWR+dQ3pRKJL1oeMzyqmH3o2PIfn9MBdNrVPWhDbT769LxCTLJQ== + dependencies: + browserslist "^4.21.4" + caniuse-api "^3.0.0" + colord "^2.9.1" + postcss-value-parser "^4.2.0" + +postcss-convert-values@^5.1.3: + version "5.1.3" + resolved "https://registry.npmjs.org/postcss-convert-values/-/postcss-convert-values-5.1.3.tgz" + integrity sha512-82pC1xkJZtcJEfiLw6UXnXVXScgtBrjlO5CBmuDQc+dlb88ZYheFsjTn40+zBVi3DkfF7iezO0nJUPLcJK3pvA== + dependencies: + browserslist "^4.21.4" + postcss-value-parser "^4.2.0" + +postcss-discard-comments@^5.1.2: + version "5.1.2" + resolved "https://registry.npmjs.org/postcss-discard-comments/-/postcss-discard-comments-5.1.2.tgz" + integrity sha512-+L8208OVbHVF2UQf1iDmRcbdjJkuBF6IS29yBDSiWUIzpYaAhtNl6JYnYm12FnkeCwQqF5LeklOu6rAqgfBZqQ== + +postcss-discard-duplicates@^5.1.0: + version "5.1.0" + resolved "https://registry.npmjs.org/postcss-discard-duplicates/-/postcss-discard-duplicates-5.1.0.tgz" + integrity sha512-zmX3IoSI2aoenxHV6C7plngHWWhUOV3sP1T8y2ifzxzbtnuhk1EdPwm0S1bIUNaJ2eNbWeGLEwzw8huPD67aQw== + +postcss-discard-empty@^5.1.1: + version "5.1.1" + resolved "https://registry.npmjs.org/postcss-discard-empty/-/postcss-discard-empty-5.1.1.tgz" + integrity sha512-zPz4WljiSuLWsI0ir4Mcnr4qQQ5e1Ukc3i7UfE2XcrwKK2LIPIqE5jxMRxO6GbI3cv//ztXDsXwEWT3BHOGh3A== + +postcss-discard-overridden@^5.1.0: + version "5.1.0" + resolved "https://registry.npmjs.org/postcss-discard-overridden/-/postcss-discard-overridden-5.1.0.tgz" + integrity sha512-21nOL7RqWR1kasIVdKs8HNqQJhFxLsyRfAnUDm4Fe4t4mCWL9OJiHvlHPjcd8zc5Myu89b/7wZDnOSjFgeWRtw== + +postcss-discard-unused@^5.1.0: + version "5.1.0" + resolved "https://registry.npmjs.org/postcss-discard-unused/-/postcss-discard-unused-5.1.0.tgz" + integrity sha512-KwLWymI9hbwXmJa0dkrzpRbSJEh0vVUd7r8t0yOGPcfKzyJJxFM8kLyC5Ev9avji6nY95pOp1W6HqIrfT+0VGw== + dependencies: + postcss-selector-parser "^6.0.5" + +postcss-import@^14.1.0: + version "14.1.0" + resolved "https://registry.npmjs.org/postcss-import/-/postcss-import-14.1.0.tgz" + integrity sha512-flwI+Vgm4SElObFVPpTIT7SU7R3qk2L7PyduMcokiaVKuWv9d/U+Gm/QAd8NDLuykTWTkcrjOeD2Pp1rMeBTGw== + dependencies: + postcss-value-parser "^4.0.0" + read-cache "^1.0.0" + resolve "^1.1.7" + +postcss-import@^15.1.0: + version "15.1.0" + resolved "https://registry.npmjs.org/postcss-import/-/postcss-import-15.1.0.tgz" + integrity sha512-hpr+J05B2FVYUAXHeK1YyI267J/dDDhMU6B6civm8hSY1jYJnBXxzKDKDswzJmtLHryrjhnDjqqp/49t8FALew== + dependencies: + postcss-value-parser "^4.0.0" + read-cache "^1.0.0" + resolve "^1.1.7" + +postcss-js@^4.0.0: + version "4.0.1" + resolved "https://registry.npmjs.org/postcss-js/-/postcss-js-4.0.1.tgz" + integrity sha512-dDLF8pEO191hJMtlHFPRa8xsizHaM82MLfNkUHdUtVEV3tgTp5oj+8qbEqYM57SLfc74KSbw//4SeJma2LRVIw== + dependencies: + camelcase-css "^2.0.1" + +postcss-load-config@^3.1.4: + version "3.1.4" + resolved "https://registry.npmjs.org/postcss-load-config/-/postcss-load-config-3.1.4.tgz" + integrity sha512-6DiM4E7v4coTE4uzA8U//WhtPwyhiim3eyjEMFCnUpzbrkK9wJHgKDT2mR+HbtSrd/NubVaYTOpSpjUl8NQeRg== + dependencies: + lilconfig "^2.0.5" + yaml "^1.10.2" + +postcss-loader@^7.0.0: + version "7.1.0" + resolved "https://registry.npmjs.org/postcss-loader/-/postcss-loader-7.1.0.tgz" + integrity sha512-vTD2DJ8vJD0Vr1WzMQkRZWRjcynGh3t7NeoLg+Sb1TeuK7etiZfL/ZwHbaVa3M+Qni7Lj/29voV9IggnIUjlIw== + dependencies: + cosmiconfig "^8.0.0" + klona "^2.0.6" + semver "^7.3.8" + +postcss-merge-idents@^5.1.1: + version "5.1.1" + resolved "https://registry.npmjs.org/postcss-merge-idents/-/postcss-merge-idents-5.1.1.tgz" + integrity sha512-pCijL1TREiCoog5nQp7wUe+TUonA2tC2sQ54UGeMmryK3UFGIYKqDyjnqd6RcuI4znFn9hWSLNN8xKE/vWcUQw== + dependencies: + cssnano-utils "^3.1.0" + postcss-value-parser "^4.2.0" + +postcss-merge-longhand@^5.1.7: + version "5.1.7" + resolved "https://registry.npmjs.org/postcss-merge-longhand/-/postcss-merge-longhand-5.1.7.tgz" + integrity sha512-YCI9gZB+PLNskrK0BB3/2OzPnGhPkBEwmwhfYk1ilBHYVAZB7/tkTHFBAnCrvBBOmeYyMYw3DMjT55SyxMBzjQ== + dependencies: + postcss-value-parser "^4.2.0" + stylehacks "^5.1.1" + +postcss-merge-rules@^5.1.4: + version "5.1.4" + resolved "https://registry.npmjs.org/postcss-merge-rules/-/postcss-merge-rules-5.1.4.tgz" + integrity sha512-0R2IuYpgU93y9lhVbO/OylTtKMVcHb67zjWIfCiKR9rWL3GUk1677LAqD/BcHizukdZEjT8Ru3oHRoAYoJy44g== + dependencies: + browserslist "^4.21.4" + caniuse-api "^3.0.0" + cssnano-utils "^3.1.0" + postcss-selector-parser "^6.0.5" + +postcss-minify-font-values@^5.1.0: + version "5.1.0" + resolved "https://registry.npmjs.org/postcss-minify-font-values/-/postcss-minify-font-values-5.1.0.tgz" + integrity sha512-el3mYTgx13ZAPPirSVsHqFzl+BBBDrXvbySvPGFnQcTI4iNslrPaFq4muTkLZmKlGk4gyFAYUBMH30+HurREyA== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-minify-gradients@^5.1.1: + version "5.1.1" + resolved "https://registry.npmjs.org/postcss-minify-gradients/-/postcss-minify-gradients-5.1.1.tgz" + integrity sha512-VGvXMTpCEo4qHTNSa9A0a3D+dxGFZCYwR6Jokk+/3oB6flu2/PnPXAh2x7x52EkY5xlIHLm+Le8tJxe/7TNhzw== + dependencies: + colord "^2.9.1" + cssnano-utils "^3.1.0" + postcss-value-parser "^4.2.0" + +postcss-minify-params@^5.1.4: + version "5.1.4" + resolved "https://registry.npmjs.org/postcss-minify-params/-/postcss-minify-params-5.1.4.tgz" + integrity sha512-+mePA3MgdmVmv6g+30rn57USjOGSAyuxUmkfiWpzalZ8aiBkdPYjXWtHuwJGm1v5Ojy0Z0LaSYhHaLJQB0P8Jw== + dependencies: + browserslist "^4.21.4" + cssnano-utils "^3.1.0" + postcss-value-parser "^4.2.0" + +postcss-minify-selectors@^5.2.1: + version "5.2.1" + resolved "https://registry.npmjs.org/postcss-minify-selectors/-/postcss-minify-selectors-5.2.1.tgz" + integrity sha512-nPJu7OjZJTsVUmPdm2TcaiohIwxP+v8ha9NehQ2ye9szv4orirRU3SDdtUmKH+10nzn0bAyOXZ0UEr7OpvLehg== + dependencies: + postcss-selector-parser "^6.0.5" + +postcss-modules-extract-imports@^3.0.0: + version "3.0.0" + resolved "https://registry.npmjs.org/postcss-modules-extract-imports/-/postcss-modules-extract-imports-3.0.0.tgz" + integrity sha512-bdHleFnP3kZ4NYDhuGlVK+CMrQ/pqUm8bx/oGL93K6gVwiclvX5x0n76fYMKuIGKzlABOy13zsvqjb0f92TEXw== + +postcss-modules-local-by-default@^4.0.0: + version "4.0.0" + resolved "https://registry.npmjs.org/postcss-modules-local-by-default/-/postcss-modules-local-by-default-4.0.0.tgz" + integrity sha512-sT7ihtmGSF9yhm6ggikHdV0hlziDTX7oFoXtuVWeDd3hHObNkcHRo9V3yg7vCAY7cONyxJC/XXCmmiHHcvX7bQ== + dependencies: + icss-utils "^5.0.0" + postcss-selector-parser "^6.0.2" + postcss-value-parser "^4.1.0" + +postcss-modules-scope@^3.0.0: + version "3.0.0" + resolved "https://registry.npmjs.org/postcss-modules-scope/-/postcss-modules-scope-3.0.0.tgz" + integrity sha512-hncihwFA2yPath8oZ15PZqvWGkWf+XUfQgUGamS4LqoP1anQLOsOJw0vr7J7IwLpoY9fatA2qiGUGmuZL0Iqlg== + dependencies: + postcss-selector-parser "^6.0.4" + +postcss-modules-values@^4.0.0: + version "4.0.0" + resolved "https://registry.npmjs.org/postcss-modules-values/-/postcss-modules-values-4.0.0.tgz" + integrity sha512-RDxHkAiEGI78gS2ofyvCsu7iycRv7oqw5xMWn9iMoR0N/7mf9D50ecQqUo5BZ9Zh2vH4bCUR/ktCqbB9m8vJjQ== + dependencies: + icss-utils "^5.0.0" + +postcss-nested@6.0.0: + version "6.0.0" + resolved "https://registry.npmjs.org/postcss-nested/-/postcss-nested-6.0.0.tgz" + integrity sha512-0DkamqrPcmkBDsLn+vQDIrtkSbNkv5AD/M322ySo9kqFkCIYklym2xEmWkwo+Y3/qZo34tzEPNUw4y7yMCdv5w== + dependencies: + postcss-selector-parser "^6.0.10" + +postcss-normalize-charset@^5.1.0: + version "5.1.0" + resolved "https://registry.npmjs.org/postcss-normalize-charset/-/postcss-normalize-charset-5.1.0.tgz" + integrity sha512-mSgUJ+pd/ldRGVx26p2wz9dNZ7ji6Pn8VWBajMXFf8jk7vUoSrZ2lt/wZR7DtlZYKesmZI680qjr2CeFF2fbUg== + +postcss-normalize-display-values@^5.1.0: + version "5.1.0" + resolved "https://registry.npmjs.org/postcss-normalize-display-values/-/postcss-normalize-display-values-5.1.0.tgz" + integrity sha512-WP4KIM4o2dazQXWmFaqMmcvsKmhdINFblgSeRgn8BJ6vxaMyaJkwAzpPpuvSIoG/rmX3M+IrRZEz2H0glrQNEA== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-normalize-positions@^5.1.1: + version "5.1.1" + resolved "https://registry.npmjs.org/postcss-normalize-positions/-/postcss-normalize-positions-5.1.1.tgz" + integrity sha512-6UpCb0G4eofTCQLFVuI3EVNZzBNPiIKcA1AKVka+31fTVySphr3VUgAIULBhxZkKgwLImhzMR2Bw1ORK+37INg== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-normalize-repeat-style@^5.1.1: + version "5.1.1" + resolved "https://registry.npmjs.org/postcss-normalize-repeat-style/-/postcss-normalize-repeat-style-5.1.1.tgz" + integrity sha512-mFpLspGWkQtBcWIRFLmewo8aC3ImN2i/J3v8YCFUwDnPu3Xz4rLohDO26lGjwNsQxB3YF0KKRwspGzE2JEuS0g== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-normalize-string@^5.1.0: + version "5.1.0" + resolved "https://registry.npmjs.org/postcss-normalize-string/-/postcss-normalize-string-5.1.0.tgz" + integrity sha512-oYiIJOf4T9T1N4i+abeIc7Vgm/xPCGih4bZz5Nm0/ARVJ7K6xrDlLwvwqOydvyL3RHNf8qZk6vo3aatiw/go3w== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-normalize-timing-functions@^5.1.0: + version "5.1.0" + resolved "https://registry.npmjs.org/postcss-normalize-timing-functions/-/postcss-normalize-timing-functions-5.1.0.tgz" + integrity sha512-DOEkzJ4SAXv5xkHl0Wa9cZLF3WCBhF3o1SKVxKQAa+0pYKlueTpCgvkFAHfk+Y64ezX9+nITGrDZeVGgITJXjg== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-normalize-unicode@^5.1.1: + version "5.1.1" + resolved "https://registry.npmjs.org/postcss-normalize-unicode/-/postcss-normalize-unicode-5.1.1.tgz" + integrity sha512-qnCL5jzkNUmKVhZoENp1mJiGNPcsJCs1aaRmURmeJGES23Z/ajaln+EPTD+rBeNkSryI+2WTdW+lwcVdOikrpA== + dependencies: + browserslist "^4.21.4" + postcss-value-parser "^4.2.0" + +postcss-normalize-url@^5.1.0: + version "5.1.0" + resolved "https://registry.npmjs.org/postcss-normalize-url/-/postcss-normalize-url-5.1.0.tgz" + integrity sha512-5upGeDO+PVthOxSmds43ZeMeZfKH+/DKgGRD7TElkkyS46JXAUhMzIKiCa7BabPeIy3AQcTkXwVVN7DbqsiCew== + dependencies: + normalize-url "^6.0.1" + postcss-value-parser "^4.2.0" + +postcss-normalize-whitespace@^5.1.1: + version "5.1.1" + resolved "https://registry.npmjs.org/postcss-normalize-whitespace/-/postcss-normalize-whitespace-5.1.1.tgz" + integrity sha512-83ZJ4t3NUDETIHTa3uEg6asWjSBYL5EdkVB0sDncx9ERzOKBVJIUeDO9RyA9Zwtig8El1d79HBp0JEi8wvGQnA== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-ordered-values@^5.1.3: + version "5.1.3" + resolved "https://registry.npmjs.org/postcss-ordered-values/-/postcss-ordered-values-5.1.3.tgz" + integrity sha512-9UO79VUhPwEkzbb3RNpqqghc6lcYej1aveQteWY+4POIwlqkYE21HKWaLDF6lWNuqCobEAyTovVhtI32Rbv2RQ== + dependencies: + cssnano-utils "^3.1.0" + postcss-value-parser "^4.2.0" + +postcss-reduce-idents@^5.2.0: + version "5.2.0" + resolved "https://registry.npmjs.org/postcss-reduce-idents/-/postcss-reduce-idents-5.2.0.tgz" + integrity sha512-BTrLjICoSB6gxbc58D5mdBK8OhXRDqud/zodYfdSi52qvDHdMwk+9kB9xsM8yJThH/sZU5A6QVSmMmaN001gIg== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-reduce-initial@^5.1.2: + version "5.1.2" + resolved "https://registry.npmjs.org/postcss-reduce-initial/-/postcss-reduce-initial-5.1.2.tgz" + integrity sha512-dE/y2XRaqAi6OvjzD22pjTUQ8eOfc6m/natGHgKFBK9DxFmIm69YmaRVQrGgFlEfc1HePIurY0TmDeROK05rIg== + dependencies: + browserslist "^4.21.4" + caniuse-api "^3.0.0" + +postcss-reduce-transforms@^5.1.0: + version "5.1.0" + resolved "https://registry.npmjs.org/postcss-reduce-transforms/-/postcss-reduce-transforms-5.1.0.tgz" + integrity sha512-2fbdbmgir5AvpW9RLtdONx1QoYG2/EtqpNQbFASDlixBbAYuTcJ0dECwlqNqH7VbaUnEnh8SrxOe2sRIn24XyQ== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-selector-parser@^6.0.10, postcss-selector-parser@^6.0.11, postcss-selector-parser@^6.0.2, postcss-selector-parser@^6.0.4, postcss-selector-parser@^6.0.5, postcss-selector-parser@^6.0.9: + version "6.0.11" + resolved "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.11.tgz" + integrity sha512-zbARubNdogI9j7WY4nQJBiNqQf3sLS3wCP4WfOidu+p28LofJqDH1tcXypGrcmMHhDk2t9wGhCsYe/+szLTy1g== + dependencies: + cssesc "^3.0.0" + util-deprecate "^1.0.2" + +postcss-sort-media-queries@^4.2.1: + version "4.3.0" + resolved "https://registry.npmjs.org/postcss-sort-media-queries/-/postcss-sort-media-queries-4.3.0.tgz" + integrity sha512-jAl8gJM2DvuIJiI9sL1CuiHtKM4s5aEIomkU8G3LFvbP+p8i7Sz8VV63uieTgoewGqKbi+hxBTiOKJlB35upCg== + dependencies: + sort-css-media-queries "2.1.0" + +postcss-svgo@^5.1.0: + version "5.1.0" + resolved "https://registry.npmjs.org/postcss-svgo/-/postcss-svgo-5.1.0.tgz" + integrity sha512-D75KsH1zm5ZrHyxPakAxJWtkyXew5qwS70v56exwvw542d9CRtTo78K0WeFxZB4G7JXKKMbEZtZayTGdIky/eA== + dependencies: + postcss-value-parser "^4.2.0" + svgo "^2.7.0" + +postcss-unique-selectors@^5.1.1: + version "5.1.1" + resolved "https://registry.npmjs.org/postcss-unique-selectors/-/postcss-unique-selectors-5.1.1.tgz" + integrity sha512-5JiODlELrz8L2HwxfPnhOWZYWDxVHWL83ufOv84NrcgipI7TaeRsatAhK4Tr2/ZiYldpK/wBvw5BD3qfaK96GA== + dependencies: + postcss-selector-parser "^6.0.5" + +postcss-value-parser@^4.0.0, postcss-value-parser@^4.0.2, postcss-value-parser@^4.1.0, postcss-value-parser@^4.2.0: + version "4.2.0" + resolved "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz" + integrity sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ== + +postcss-zindex@^5.1.0: + version "5.1.0" + resolved "https://registry.npmjs.org/postcss-zindex/-/postcss-zindex-5.1.0.tgz" + integrity sha512-fgFMf0OtVSBR1va1JNHYgMxYk73yhn/qb4uQDq1DLGYolz8gHCyr/sesEuGUaYs58E3ZJRcpoGuPVoB7Meiq9A== + +postcss@^8.0.9, postcss@^8.3.11, postcss@^8.4.14, postcss@^8.4.17, postcss@^8.4.19, postcss@^8.4.31: + version "8.4.31" + resolved "https://registry.npmjs.org/postcss/-/postcss-8.4.31.tgz" + integrity sha512-PS08Iboia9mts/2ygV3eLpY5ghnUcfLV/EXTOW1E2qYxJKGGBUtNjN76FYHnMs36RmARn41bC0AZmn+rR0OVpQ== + dependencies: + nanoid "^3.3.6" + picocolors "^1.0.0" + source-map-js "^1.0.2" + +prepend-http@^2.0.0: + version "2.0.0" + resolved "https://registry.npmjs.org/prepend-http/-/prepend-http-2.0.0.tgz" + integrity sha512-ravE6m9Atw9Z/jjttRUZ+clIXogdghyZAuWJ3qEzjT+jI/dL1ifAqhZeC5VHzQp1MSt1+jxKkFNemj/iO7tVUA== + +pretty-error@^4.0.0: + version "4.0.0" + resolved "https://registry.npmjs.org/pretty-error/-/pretty-error-4.0.0.tgz" + integrity sha512-AoJ5YMAcXKYxKhuJGdcvse+Voc6v1RgnsR3nWcYU7q4t6z0Q6T86sv5Zq8VIRbOWWFpvdGE83LtdSMNd+6Y0xw== + dependencies: + lodash "^4.17.20" + renderkid "^3.0.0" + +pretty-time@^1.1.0: + version "1.1.0" + resolved "https://registry.npmjs.org/pretty-time/-/pretty-time-1.1.0.tgz" + integrity sha512-28iF6xPQrP8Oa6uxE6a1biz+lWeTOAPKggvjB8HAs6nVMKZwf5bG++632Dx614hIWgUPkgivRfG+a8uAXGTIbA== + +prism-react-renderer@^1.3.5: + version "1.3.5" + resolved "https://registry.npmjs.org/prism-react-renderer/-/prism-react-renderer-1.3.5.tgz" + integrity sha512-IJ+MSwBWKG+SM3b2SUfdrhC+gu01QkV2KmRQgREThBfSQRoufqRfxfHUxpG1WcaFjP+kojcFyO9Qqtpgt3qLCg== + +prismjs@^1.28.0: + version "1.30.0" + resolved "https://registry.yarnpkg.com/prismjs/-/prismjs-1.30.0.tgz#d9709969d9d4e16403f6f348c63553b19f0975a9" + integrity sha512-DEvV2ZF2r2/63V+tK8hQvrR2ZGn10srHbXviTlcv7Kpzw8jWiNTqbVgjO3IY8RxrrOUF8VPMQQFysYYYv0YZxw== + +process-nextick-args@~2.0.0: + version "2.0.1" + resolved "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz" + integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== + +promise@^7.1.1: + version "7.3.1" + resolved "https://registry.npmjs.org/promise/-/promise-7.3.1.tgz" + integrity sha512-nolQXZ/4L+bP/UGlkfaIujX9BKxGwmQ9OT4mOt5yvy8iK1h3wqTEJCijzGANTCCl9nWjY41juyAn2K3Q1hLLTg== + dependencies: + asap "~2.0.3" + +prompts@^2.4.2: + version "2.4.2" + resolved "https://registry.npmjs.org/prompts/-/prompts-2.4.2.tgz" + integrity sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q== + dependencies: + kleur "^3.0.3" + sisteransi "^1.0.5" + +prop-types@^15.6.2, prop-types@^15.7.2: + version "15.8.1" + resolved "https://registry.npmjs.org/prop-types/-/prop-types-15.8.1.tgz" + integrity sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg== + dependencies: + loose-envify "^1.4.0" + object-assign "^4.1.1" + react-is "^16.13.1" + +property-information@^5.0.0, property-information@^5.3.0: + version "5.6.0" + resolved "https://registry.npmjs.org/property-information/-/property-information-5.6.0.tgz" + integrity sha512-YUHSPk+A30YPv+0Qf8i9Mbfe/C0hdPXk1s1jPVToV8pk8BQtpw10ct89Eo7OWkutrwqvT0eicAxlOg3dOAu8JA== + dependencies: + xtend "^4.0.0" + +proxy-addr@~2.0.7: + version "2.0.7" + resolved "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz" + integrity sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg== + dependencies: + forwarded "0.2.0" + ipaddr.js "1.9.1" + +pump@^3.0.0: + version "3.0.0" + resolved "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz" + integrity sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww== + dependencies: + end-of-stream "^1.1.0" + once "^1.3.1" + +punycode@^1.3.2: + version "1.4.1" + resolved "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz" + integrity sha512-jmYNElW7yvO7TV33CjSmvSiE2yco3bV2czu/OzDKdMNVZQWfxCblURLhf+47syQRBntjfLdd/H0egrzIG+oaFQ== + +punycode@^2.1.0: + version "2.3.0" + resolved "https://registry.npmjs.org/punycode/-/punycode-2.3.0.tgz" + integrity sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA== + +pupa@^2.1.1: + version "2.1.1" + resolved "https://registry.npmjs.org/pupa/-/pupa-2.1.1.tgz" + integrity sha512-l1jNAspIBSFqbT+y+5FosojNpVpF94nlI+wDUpqP9enwOTfHx9f0gh5nB96vl+6yTpsJsypeNrwfzPrKuHB41A== + dependencies: + escape-goat "^2.0.0" + +pure-color@^1.2.0: + version "1.3.0" + resolved "https://registry.npmjs.org/pure-color/-/pure-color-1.3.0.tgz" + integrity sha512-QFADYnsVoBMw1srW7OVKEYjG+MbIa49s54w1MA1EDY6r2r/sTcKKYqRX1f4GYvnXP7eN/Pe9HFcX+hwzmrXRHA== + +qs@6.13.0: + version "6.13.0" + resolved "https://registry.yarnpkg.com/qs/-/qs-6.13.0.tgz#6ca3bd58439f7e245655798997787b0d88a51906" + integrity sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg== + dependencies: + side-channel "^1.0.6" + +queue-microtask@^1.2.2: + version "1.2.3" + resolved "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz" + integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A== + +queue@6.0.2: + version "6.0.2" + resolved "https://registry.npmjs.org/queue/-/queue-6.0.2.tgz" + integrity sha512-iHZWu+q3IdFZFX36ro/lKBkSvfkztY5Y7HMiPlOUjhupPcG2JMfst2KKEpu5XndviX/3UhFbRngUPNKtgvtZiA== + dependencies: + inherits "~2.0.3" + +quick-lru@^5.1.1: + version "5.1.1" + resolved "https://registry.npmjs.org/quick-lru/-/quick-lru-5.1.1.tgz" + integrity sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA== + +randombytes@^2.1.0: + version "2.1.0" + resolved "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz" + integrity sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ== + dependencies: + safe-buffer "^5.1.0" + +range-parser@1.2.0: + version "1.2.0" + resolved "https://registry.npmjs.org/range-parser/-/range-parser-1.2.0.tgz" + integrity sha512-kA5WQoNVo4t9lNx2kQNFCxKeBl5IbbSNBl1M/tLkw9WCn+hxNBAW5Qh8gdhs63CJnhjJ2zQWFoqPJP2sK1AV5A== + +range-parser@^1.2.1, range-parser@~1.2.1: + version "1.2.1" + resolved "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz" + integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg== + +raw-body@2.5.2: + version "2.5.2" + resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.5.2.tgz#99febd83b90e08975087e8f1f9419a149366b68a" + integrity sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA== + dependencies: + bytes "3.1.2" + http-errors "2.0.0" + iconv-lite "0.4.24" + unpipe "1.0.0" + +rc@1.2.8, rc@^1.2.8: + version "1.2.8" + resolved "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz" + integrity sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw== + dependencies: + deep-extend "^0.6.0" + ini "~1.3.0" + minimist "^1.2.0" + strip-json-comments "~2.0.1" + +react-base16-styling@^0.6.0: + version "0.6.0" + resolved "https://registry.npmjs.org/react-base16-styling/-/react-base16-styling-0.6.0.tgz" + integrity sha512-yvh/7CArceR/jNATXOKDlvTnPKPmGZz7zsenQ3jUwLzHkNUR0CvY3yGYJbWJ/nnxsL8Sgmt5cO3/SILVuPO6TQ== + dependencies: + base16 "^1.0.0" + lodash.curry "^4.0.1" + lodash.flow "^3.3.0" + pure-color "^1.2.0" + +react-dev-utils@^12.0.1: + version "12.0.1" + resolved "https://registry.npmjs.org/react-dev-utils/-/react-dev-utils-12.0.1.tgz" + integrity sha512-84Ivxmr17KjUupyqzFode6xKhjwuEJDROWKJy/BthkL7Wn6NJ8h4WE6k/exAv6ImS+0oZLRRW5j/aINMHyeGeQ== + dependencies: + "@babel/code-frame" "^7.16.0" + address "^1.1.2" + browserslist "^4.18.1" + chalk "^4.1.2" + cross-spawn "^7.0.3" + detect-port-alt "^1.1.6" + escape-string-regexp "^4.0.0" + filesize "^8.0.6" + find-up "^5.0.0" + fork-ts-checker-webpack-plugin "^6.5.0" + global-modules "^2.0.0" + globby "^11.0.4" + gzip-size "^6.0.0" + immer "^9.0.7" + is-root "^2.1.0" + loader-utils "^3.2.0" + open "^8.4.0" + pkg-up "^3.1.0" + prompts "^2.4.2" + react-error-overlay "^6.0.11" + recursive-readdir "^2.2.2" + shell-quote "^1.7.3" + strip-ansi "^6.0.1" + text-table "^0.2.0" + +react-dom@^17.0.2: + version "17.0.2" + resolved "https://registry.npmjs.org/react-dom/-/react-dom-17.0.2.tgz" + integrity sha512-s4h96KtLDUQlsENhMn1ar8t2bEa+q/YAtj8pPPdIjPDGBDIVNsrD9aXNWqspUe6AzKCIG0C1HZZLqLV7qpOBGA== + dependencies: + loose-envify "^1.1.0" + object-assign "^4.1.1" + scheduler "^0.20.2" + +react-error-overlay@^6.0.11: + version "6.0.11" + resolved "https://registry.npmjs.org/react-error-overlay/-/react-error-overlay-6.0.11.tgz" + integrity sha512-/6UZ2qgEyH2aqzYZgQPxEnz33NJ2gNsnHA2o5+o4wW9bLM/JYQitNP9xPhsXwC08hMMovfGe/8retsdDsczPRg== + +react-fast-compare@^3.2.0: + version "3.2.1" + resolved "https://registry.npmjs.org/react-fast-compare/-/react-fast-compare-3.2.1.tgz" + integrity sha512-xTYf9zFim2pEif/Fw16dBiXpe0hoy5PxcD8+OwBnTtNLfIm3g6WxhKNurY+6OmdH1u6Ta/W/Vl6vjbYP1MFnDg== + +react-helmet-async@*, react-helmet-async@^1.3.0: + version "1.3.0" + resolved "https://registry.npmjs.org/react-helmet-async/-/react-helmet-async-1.3.0.tgz" + integrity sha512-9jZ57/dAn9t3q6hneQS0wukqC2ENOBgMNVEhb/ZG9ZSxUetzVIw4iAmEU38IaVg3QGYauQPhSeUTuIUtFglWpg== + dependencies: + "@babel/runtime" "^7.12.5" + invariant "^2.2.4" + prop-types "^15.7.2" + react-fast-compare "^3.2.0" + shallowequal "^1.1.0" + +react-is@^16.13.1, react-is@^16.6.0, react-is@^16.7.0: + version "16.13.1" + resolved "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz" + integrity sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ== + +react-json-view@^1.21.3: + version "1.21.3" + resolved "https://registry.npmjs.org/react-json-view/-/react-json-view-1.21.3.tgz" + integrity sha512-13p8IREj9/x/Ye4WI/JpjhoIwuzEgUAtgJZNBJckfzJt1qyh24BdTm6UQNGnyTq9dapQdrqvquZTo3dz1X6Cjw== + dependencies: + flux "^4.0.1" + react-base16-styling "^0.6.0" + react-lifecycles-compat "^3.0.4" + react-textarea-autosize "^8.3.2" + +react-lifecycles-compat@^3.0.4: + version "3.0.4" + resolved "https://registry.npmjs.org/react-lifecycles-compat/-/react-lifecycles-compat-3.0.4.tgz" + integrity sha512-fBASbA6LnOU9dOU2eW7aQ8xmYBSXUIWr+UmF9b1efZBazGNO+rcXT/icdKnYm2pTwcRylVUYwW7H1PHfLekVzA== + +react-loadable-ssr-addon-v5-slorber@^1.0.1: + version "1.0.1" + resolved "https://registry.npmjs.org/react-loadable-ssr-addon-v5-slorber/-/react-loadable-ssr-addon-v5-slorber-1.0.1.tgz" + integrity sha512-lq3Lyw1lGku8zUEJPDxsNm1AfYHBrO9Y1+olAYwpUJ2IGFBskM0DMKok97A6LWUpHm+o7IvQBOWu9MLenp9Z+A== + dependencies: + "@babel/runtime" "^7.10.3" + +"react-loadable@npm:@docusaurus/react-loadable@5.5.2": + version "5.5.2" + resolved "https://registry.npmjs.org/@docusaurus/react-loadable/-/react-loadable-5.5.2.tgz" + integrity sha512-A3dYjdBGuy0IGT+wyLIGIKLRE+sAk1iNk0f1HjNDysO7u8lhL4N3VEm+FAubmJbAztn94F7MxBTPmnixbiyFdQ== + dependencies: + "@types/react" "*" + prop-types "^15.6.2" + +react-router-config@^5.1.1: + version "5.1.1" + resolved "https://registry.npmjs.org/react-router-config/-/react-router-config-5.1.1.tgz" + integrity sha512-DuanZjaD8mQp1ppHjgnnUnyOlqYXZVjnov/JzFhjLEwd3Z4dYjMSnqrEzzGThH47vpCOqPPwJM2FtthLeJ8Pbg== + dependencies: + "@babel/runtime" "^7.1.2" + +react-router-dom@^5.3.3: + version "5.3.4" + resolved "https://registry.npmjs.org/react-router-dom/-/react-router-dom-5.3.4.tgz" + integrity sha512-m4EqFMHv/Ih4kpcBCONHbkT68KoAeHN4p3lAGoNryfHi0dMy0kCzEZakiKRsvg5wHZ/JLrLW8o8KomWiz/qbYQ== + dependencies: + "@babel/runtime" "^7.12.13" + history "^4.9.0" + loose-envify "^1.3.1" + prop-types "^15.6.2" + react-router "5.3.4" + tiny-invariant "^1.0.2" + tiny-warning "^1.0.0" + +react-router@5.3.4, react-router@^5.3.3: + version "5.3.4" + resolved "https://registry.npmjs.org/react-router/-/react-router-5.3.4.tgz" + integrity sha512-Ys9K+ppnJah3QuaRiLxk+jDWOR1MekYQrlytiXxC1RyfbdsZkS5pvKAzCCr031xHixZwpnsYNT5xysdFHQaYsA== + dependencies: + "@babel/runtime" "^7.12.13" + history "^4.9.0" + hoist-non-react-statics "^3.1.0" + loose-envify "^1.3.1" + path-to-regexp "^1.7.0" + prop-types "^15.6.2" + react-is "^16.6.0" + tiny-invariant "^1.0.2" + tiny-warning "^1.0.0" + +react-textarea-autosize@^8.3.2: + version "8.4.1" + resolved "https://registry.npmjs.org/react-textarea-autosize/-/react-textarea-autosize-8.4.1.tgz" + integrity sha512-aD2C+qK6QypknC+lCMzteOdIjoMbNlgSFmJjCV+DrfTPwp59i/it9mMNf2HDzvRjQgKAyBDPyLJhcrzElf2U4Q== + dependencies: + "@babel/runtime" "^7.20.13" + use-composed-ref "^1.3.0" + use-latest "^1.2.1" + +react@^17.0.2: + version "17.0.2" + resolved "https://registry.npmjs.org/react/-/react-17.0.2.tgz" + integrity sha512-gnhPt75i/dq/z3/6q/0asP78D0u592D5L1pd7M8P+dck6Fu/jJeL6iVVK23fptSUZj8Vjf++7wXA8UNclGQcbA== + dependencies: + loose-envify "^1.1.0" + object-assign "^4.1.1" + +read-cache@^1.0.0: + version "1.0.0" + resolved "https://registry.npmjs.org/read-cache/-/read-cache-1.0.0.tgz" + integrity sha512-Owdv/Ft7IjOgm/i0xvNDZ1LrRANRfew4b2prF3OWMQLxLfu3bS8FVhCsrSCMK4lR56Y9ya+AThoTpDCTxCmpRA== + dependencies: + pify "^2.3.0" + +readable-stream@^2.0.1: + version "2.3.8" + resolved "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz" + integrity sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA== + dependencies: + core-util-is "~1.0.0" + inherits "~2.0.3" + isarray "~1.0.0" + process-nextick-args "~2.0.0" + safe-buffer "~5.1.1" + string_decoder "~1.1.1" + util-deprecate "~1.0.1" + +readable-stream@^3.0.6: + version "3.6.2" + resolved "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz" + integrity sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA== + dependencies: + inherits "^2.0.3" + string_decoder "^1.1.1" + util-deprecate "^1.0.1" + +readdirp@~3.6.0: + version "3.6.0" + resolved "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz" + integrity sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA== + dependencies: + picomatch "^2.2.1" + +reading-time@^1.5.0: + version "1.5.0" + resolved "https://registry.npmjs.org/reading-time/-/reading-time-1.5.0.tgz" + integrity sha512-onYyVhBNr4CmAxFsKS7bz+uTLRakypIe4R+5A824vBSkQy/hB3fZepoVEf8OVAxzLvK+H/jm9TzpI3ETSm64Kg== + +rechoir@^0.6.2: + version "0.6.2" + resolved "https://registry.npmjs.org/rechoir/-/rechoir-0.6.2.tgz" + integrity sha512-HFM8rkZ+i3zrV+4LQjwQ0W+ez98pApMGM3HUrN04j3CqzPOzl9nmP15Y8YXNm8QHGv/eacOVEjqhmWpkRV0NAw== + dependencies: + resolve "^1.1.6" + +recursive-readdir@^2.2.2: + version "2.2.3" + resolved "https://registry.npmjs.org/recursive-readdir/-/recursive-readdir-2.2.3.tgz" + integrity sha512-8HrF5ZsXk5FAH9dgsx3BlUer73nIhuj+9OrQwEbLTPOBzGkL1lsFCR01am+v+0m2Cmbs1nP12hLDl5FA7EszKA== + dependencies: + minimatch "^3.0.5" + +regenerate-unicode-properties@^10.1.0: + version "10.1.0" + resolved "https://registry.npmjs.org/regenerate-unicode-properties/-/regenerate-unicode-properties-10.1.0.tgz" + integrity sha512-d1VudCLoIGitcU/hEg2QqvyGZQmdC0Lf8BqdOMXGFSvJP4bNV1+XqbPQeHHLD51Jh4QJJ225dlIFvY4Ly6MXmQ== + dependencies: + regenerate "^1.4.2" + +regenerate@^1.4.2: + version "1.4.2" + resolved "https://registry.npmjs.org/regenerate/-/regenerate-1.4.2.tgz" + integrity sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A== + +regenerator-runtime@^0.13.11: + version "0.13.11" + resolved "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.11.tgz" + integrity sha512-kY1AZVr2Ra+t+piVaJ4gxaFaReZVH40AKNo7UCX6W+dEwBo/2oZJzqfuN1qLq1oL45o56cPaTXELwrTh8Fpggg== + +regenerator-transform@^0.15.1: + version "0.15.1" + resolved "https://registry.npmjs.org/regenerator-transform/-/regenerator-transform-0.15.1.tgz" + integrity sha512-knzmNAcuyxV+gQCufkYcvOqX/qIIfHLv0u5x79kRxuGojfYVky1f15TzZEu2Avte8QGepvUNTnLskf8E6X6Vyg== + dependencies: + "@babel/runtime" "^7.8.4" + +regexpu-core@^5.3.1: + version "5.3.2" + resolved "https://registry.npmjs.org/regexpu-core/-/regexpu-core-5.3.2.tgz" + integrity sha512-RAM5FlZz+Lhmo7db9L298p2vHP5ZywrVXmVXpmAD9GuL5MPH6t9ROw1iA/wfHkQ76Qe7AaPF0nGuim96/IrQMQ== + dependencies: + "@babel/regjsgen" "^0.8.0" + regenerate "^1.4.2" + regenerate-unicode-properties "^10.1.0" + regjsparser "^0.9.1" + unicode-match-property-ecmascript "^2.0.0" + unicode-match-property-value-ecmascript "^2.1.0" + +registry-auth-token@^4.0.0: + version "4.2.2" + resolved "https://registry.npmjs.org/registry-auth-token/-/registry-auth-token-4.2.2.tgz" + integrity sha512-PC5ZysNb42zpFME6D/XlIgtNGdTl8bBOCw90xQLVMpzuuubJKYDWFAEuUNc+Cn8Z8724tg2SDhDRrkVEsqfDMg== + dependencies: + rc "1.2.8" + +registry-url@^5.0.0: + version "5.1.0" + resolved "https://registry.npmjs.org/registry-url/-/registry-url-5.1.0.tgz" + integrity sha512-8acYXXTI0AkQv6RAOjE3vOaIXZkT9wo4LOFbBKYQEEnnMNBpKqdUrI6S4NT0KPIo/WVvJ5tE/X5LF/TQUf0ekw== + dependencies: + rc "^1.2.8" + +regjsparser@^0.9.1: + version "0.9.1" + resolved "https://registry.npmjs.org/regjsparser/-/regjsparser-0.9.1.tgz" + integrity sha512-dQUtn90WanSNl+7mQKcXAgZxvUe7Z0SqXlgzv0za4LwiUhyzBC58yQO3liFoUgu8GiJVInAhJjkj1N0EtQ5nkQ== + dependencies: + jsesc "~0.5.0" + +relateurl@^0.2.7: + version "0.2.7" + resolved "https://registry.npmjs.org/relateurl/-/relateurl-0.2.7.tgz" + integrity sha512-G08Dxvm4iDN3MLM0EsP62EDV9IuhXPR6blNz6Utcp7zyV3tr4HVNINt6MpaRWbxoOHT3Q7YN2P+jaHX8vUbgog== + +remark-emoji@^2.2.0: + version "2.2.0" + resolved "https://registry.npmjs.org/remark-emoji/-/remark-emoji-2.2.0.tgz" + integrity sha512-P3cj9s5ggsUvWw5fS2uzCHJMGuXYRb0NnZqYlNecewXt8QBU9n5vW3DUUKOhepS8F9CwdMx9B8a3i7pqFWAI5w== + dependencies: + emoticon "^3.2.0" + node-emoji "^1.10.0" + unist-util-visit "^2.0.3" + +remark-footnotes@2.0.0: + version "2.0.0" + resolved "https://registry.npmjs.org/remark-footnotes/-/remark-footnotes-2.0.0.tgz" + integrity sha512-3Clt8ZMH75Ayjp9q4CorNeyjwIxHFcTkaektplKGl2A1jNGEUey8cKL0ZC5vJwfcD5GFGsNLImLG/NGzWIzoMQ== + +remark-mdx@1.6.22: + version "1.6.22" + resolved "https://registry.npmjs.org/remark-mdx/-/remark-mdx-1.6.22.tgz" + integrity sha512-phMHBJgeV76uyFkH4rvzCftLfKCr2RZuF+/gmVcaKrpsihyzmhXjA0BEMDaPTXG5y8qZOKPVo83NAOX01LPnOQ== + dependencies: + "@babel/core" "7.12.9" + "@babel/helper-plugin-utils" "7.10.4" + "@babel/plugin-proposal-object-rest-spread" "7.12.1" + "@babel/plugin-syntax-jsx" "7.12.1" + "@mdx-js/util" "1.6.22" + is-alphabetical "1.0.4" + remark-parse "8.0.3" + unified "9.2.0" + +remark-parse@8.0.3: + version "8.0.3" + resolved "https://registry.npmjs.org/remark-parse/-/remark-parse-8.0.3.tgz" + integrity sha512-E1K9+QLGgggHxCQtLt++uXltxEprmWzNfg+MxpfHsZlrddKzZ/hZyWHDbK3/Ap8HJQqYJRXP+jHczdL6q6i85Q== + dependencies: + ccount "^1.0.0" + collapse-white-space "^1.0.2" + is-alphabetical "^1.0.0" + is-decimal "^1.0.0" + is-whitespace-character "^1.0.0" + is-word-character "^1.0.0" + markdown-escapes "^1.0.0" + parse-entities "^2.0.0" + repeat-string "^1.5.4" + state-toggle "^1.0.0" + trim "0.0.1" + trim-trailing-lines "^1.0.0" + unherit "^1.0.4" + unist-util-remove-position "^2.0.0" + vfile-location "^3.0.0" + xtend "^4.0.1" + +remark-squeeze-paragraphs@4.0.0: + version "4.0.0" + resolved "https://registry.npmjs.org/remark-squeeze-paragraphs/-/remark-squeeze-paragraphs-4.0.0.tgz" + integrity sha512-8qRqmL9F4nuLPIgl92XUuxI3pFxize+F1H0e/W3llTk0UsjJaj01+RrirkMw7P21RKe4X6goQhYRSvNWX+70Rw== + dependencies: + mdast-squeeze-paragraphs "^4.0.0" + +renderkid@^3.0.0: + version "3.0.0" + resolved "https://registry.npmjs.org/renderkid/-/renderkid-3.0.0.tgz" + integrity sha512-q/7VIQA8lmM1hF+jn+sFSPWGlMkSAeNYcPLmDQx2zzuiDfaLrOmumR8iaUKlenFgh0XRPIUeSPlH3A+AW3Z5pg== + dependencies: + css-select "^4.1.3" + dom-converter "^0.2.0" + htmlparser2 "^6.1.0" + lodash "^4.17.21" + strip-ansi "^6.0.1" + +repeat-string@^1.5.4: + version "1.6.1" + resolved "https://registry.npmjs.org/repeat-string/-/repeat-string-1.6.1.tgz" + integrity sha512-PV0dzCYDNfRi1jCDbJzpW7jNNDRuCOG/jI5ctQcGKt/clZD+YcPS3yIlWuTJMmESC8aevCFmWJy5wjAFgNqN6w== + +require-from-string@^2.0.2: + version "2.0.2" + resolved "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz" + integrity sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw== + +"require-like@>= 0.1.1": + version "0.1.2" + resolved "https://registry.npmjs.org/require-like/-/require-like-0.1.2.tgz" + integrity sha512-oyrU88skkMtDdauHDuKVrgR+zuItqr6/c//FXzvmxRGMexSDc6hNvJInGW3LL46n+8b50RykrvwSUIIQH2LQ5A== + +requires-port@^1.0.0: + version "1.0.0" + resolved "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz" + integrity sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ== + +resolve-from@^4.0.0: + version "4.0.0" + resolved "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz" + integrity sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g== + +resolve-pathname@^3.0.0: + version "3.0.0" + resolved "https://registry.npmjs.org/resolve-pathname/-/resolve-pathname-3.0.0.tgz" + integrity sha512-C7rARubxI8bXFNB/hqcp/4iUeIXJhJZvFPFPiSPRnhU5UPxzMFIl+2E6yY6c4k9giDJAhtV+enfA+G89N6Csng== + +resolve@^1.1.6, resolve@^1.1.7, resolve@^1.14.2, resolve@^1.22.1, resolve@^1.3.2: + version "1.22.1" + resolved "https://registry.npmjs.org/resolve/-/resolve-1.22.1.tgz" + integrity sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw== + dependencies: + is-core-module "^2.9.0" + path-parse "^1.0.7" + supports-preserve-symlinks-flag "^1.0.0" + +responselike@^1.0.2: + version "1.0.2" + resolved "https://registry.npmjs.org/responselike/-/responselike-1.0.2.tgz" + integrity sha512-/Fpe5guzJk1gPqdJLJR5u7eG/gNY4nImjbRDaVWVMRhne55TCmj2i9Q+54PBRfatRC8v/rIiv9BN0pMd9OV5EQ== + dependencies: + lowercase-keys "^1.0.0" + +retry@^0.13.1: + version "0.13.1" + resolved "https://registry.npmjs.org/retry/-/retry-0.13.1.tgz" + integrity sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg== + +reusify@^1.0.4: + version "1.0.4" + resolved "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz" + integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw== + +rimraf@^3.0.2: + version "3.0.2" + resolved "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz" + integrity sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA== + dependencies: + glob "^7.1.3" + +rtl-detect@^1.0.4: + version "1.0.4" + resolved "https://registry.npmjs.org/rtl-detect/-/rtl-detect-1.0.4.tgz" + integrity sha512-EBR4I2VDSSYr7PkBmFy04uhycIpDKp+21p/jARYXlCSjQksTBQcJ0HFUPOO79EPPH5JS6VAhiIQbycf0O3JAxQ== + +rtlcss@^3.5.0: + version "3.5.0" + resolved "https://registry.npmjs.org/rtlcss/-/rtlcss-3.5.0.tgz" + integrity sha512-wzgMaMFHQTnyi9YOwsx9LjOxYXJPzS8sYnFaKm6R5ysvTkwzHiB0vxnbHwchHQT65PTdBjDG21/kQBWI7q9O7A== + dependencies: + find-up "^5.0.0" + picocolors "^1.0.0" + postcss "^8.3.11" + strip-json-comments "^3.1.1" + +run-parallel@^1.1.9: + version "1.2.0" + resolved "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz" + integrity sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA== + dependencies: + queue-microtask "^1.2.2" + +rxjs@^7.5.4: + version "7.8.0" + resolved "https://registry.npmjs.org/rxjs/-/rxjs-7.8.0.tgz" + integrity sha512-F2+gxDshqmIub1KdvZkaEfGDwLNpPvk9Fs6LD/MyQxNgMds/WH9OdDDXOmxUZpME+iSK3rQCctkL0DYyytUqMg== + dependencies: + tslib "^2.1.0" + +safe-buffer@5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1: + version "5.1.2" + resolved "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz" + integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== + +safe-buffer@5.2.1, safe-buffer@>=5.1.0, safe-buffer@^5.1.0, safe-buffer@~5.2.0: + version "5.2.1" + resolved "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz" + integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== + +"safer-buffer@>= 2.1.2 < 3": + version "2.1.2" + resolved "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz" + integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== + +sax@^1.2.4: + version "1.2.4" + resolved "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz" + integrity sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw== + +sax@^1.5.0: + version "1.5.0" + resolved "https://registry.yarnpkg.com/sax/-/sax-1.5.0.tgz#b5549b671069b7aa392df55ec7574cf411179eb8" + integrity sha512-21IYA3Q5cQf089Z6tgaUTr7lDAyzoTPx5HRtbhsME8Udispad8dC/+sziTNugOEx54ilvatQ9YCzl4KQLPcRHA== + +scheduler@^0.20.2: + version "0.20.2" + resolved "https://registry.npmjs.org/scheduler/-/scheduler-0.20.2.tgz" + integrity sha512-2eWfGgAqqWFGqtdMmcL5zCMK1U8KlXv8SQFGglL3CEtd0aDVDWgeF/YoCmvln55m5zSk3J/20hTaSBeSObsQDQ== + dependencies: + loose-envify "^1.1.0" + object-assign "^4.1.1" + +schema-utils@2.7.0: + version "2.7.0" + resolved "https://registry.npmjs.org/schema-utils/-/schema-utils-2.7.0.tgz" + integrity sha512-0ilKFI6QQF5nxDZLFn2dMjvc4hjg/Wkg7rHd3jK6/A4a1Hl9VFdQWvgB1UMGoU94pad1P/8N7fMcEnLnSiju8A== + dependencies: + "@types/json-schema" "^7.0.4" + ajv "^6.12.2" + ajv-keywords "^3.4.1" + +schema-utils@^2.6.5: + version "2.7.1" + resolved "https://registry.npmjs.org/schema-utils/-/schema-utils-2.7.1.tgz" + integrity sha512-SHiNtMOUGWBQJwzISiVYKu82GiV4QYGePp3odlY1tuKO7gPtphAT5R/py0fA6xtbgLL/RvtJZnU9b8s0F1q0Xg== + dependencies: + "@types/json-schema" "^7.0.5" + ajv "^6.12.4" + ajv-keywords "^3.5.2" + +schema-utils@^3.0.0, schema-utils@^3.1.1: + version "3.1.1" + resolved "https://registry.npmjs.org/schema-utils/-/schema-utils-3.1.1.tgz" + integrity sha512-Y5PQxS4ITlC+EahLuXaY86TXfR7Dc5lw294alXOq86JAHCihAIZfqv8nNCWvaEJvaC51uN9hbLGeV0cFBdH+Fw== + dependencies: + "@types/json-schema" "^7.0.8" + ajv "^6.12.5" + ajv-keywords "^3.5.2" + +schema-utils@^4.0.0: + version "4.0.0" + resolved "https://registry.npmjs.org/schema-utils/-/schema-utils-4.0.0.tgz" + integrity sha512-1edyXKgh6XnJsJSQ8mKWXnN/BVaIbFMLpouRUrXgVq7WYne5kw3MW7UPhO44uRXQSIpTSXoJbmrR2X0w9kUTyg== + dependencies: + "@types/json-schema" "^7.0.9" + ajv "^8.8.0" + ajv-formats "^2.1.1" + ajv-keywords "^5.0.0" + +schema-utils@^4.3.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-4.3.0.tgz#3b669f04f71ff2dfb5aba7ce2d5a9d79b35622c0" + integrity sha512-Gf9qqc58SpCA/xdziiHz35F4GNIWYWZrEshUc/G/r5BnLph6xpKuLeoJoQuj5WfBIx/eQLf+hmVPYHaxJu7V2g== + dependencies: + "@types/json-schema" "^7.0.9" + ajv "^8.9.0" + ajv-formats "^2.1.1" + ajv-keywords "^5.1.0" + +schema-utils@^4.3.3: + version "4.3.3" + resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-4.3.3.tgz#5b1850912fa31df90716963d45d9121fdfc09f46" + integrity sha512-eflK8wEtyOE6+hsaRVPxvUKYCpRgzLqDTb8krvAsRIwOGlHoSgYLgBXoubGgLd2fT41/OUYdb48v4k4WWHQurA== + dependencies: + "@types/json-schema" "^7.0.9" + ajv "^8.9.0" + ajv-formats "^2.1.1" + ajv-keywords "^5.1.0" + +section-matter@^1.0.0: + version "1.0.0" + resolved "https://registry.npmjs.org/section-matter/-/section-matter-1.0.0.tgz" + integrity sha512-vfD3pmTzGpufjScBh50YHKzEu2lxBWhVEHsNGoEXmCmn2hKGfeNLYMzCJpe8cD7gqX7TJluOVpBkAequ6dgMmA== + dependencies: + extend-shallow "^2.0.1" + kind-of "^6.0.0" + +select-hose@^2.0.0: + version "2.0.0" + resolved "https://registry.npmjs.org/select-hose/-/select-hose-2.0.0.tgz" + integrity sha512-mEugaLK+YfkijB4fx0e6kImuJdCIt2LxCRcbEYPqRGCs4F2ogyfZU5IAZRdjCP8JPq2AtdNoC/Dux63d9Kiryg== + +selfsigned@^2.1.1: + version "2.1.1" + resolved "https://registry.npmjs.org/selfsigned/-/selfsigned-2.1.1.tgz" + integrity sha512-GSL3aowiF7wa/WtSFwnUrludWFoNhftq8bUkH9pkzjpN2XSPOAYEgg6e0sS9s0rZwgJzJiQRPU18A6clnoW5wQ== + dependencies: + node-forge "^1" + +semver-diff@^3.1.1: + version "3.1.1" + resolved "https://registry.npmjs.org/semver-diff/-/semver-diff-3.1.1.tgz" + integrity sha512-GX0Ix/CJcHyB8c4ykpHGIAvLyOwOobtM/8d+TQkAd81/bEjgPHrfba41Vpesr7jX/t8Uh+R3EX9eAS5be+jQYg== + dependencies: + semver "^6.3.0" + +semver@^5.4.1: + version "5.7.2" + resolved "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz" + integrity sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g== + +semver@^6.0.0, semver@^6.1.1, semver@^6.1.2, semver@^6.2.0, semver@^6.3.0, semver@^6.3.1: + version "6.3.1" + resolved "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz" + integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA== + +semver@^7.3.2, semver@^7.3.4, semver@^7.3.7, semver@^7.3.8: + version "7.5.4" + resolved "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz" + integrity sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA== + dependencies: + lru-cache "^6.0.0" + +send@0.19.0: + version "0.19.0" + resolved "https://registry.yarnpkg.com/send/-/send-0.19.0.tgz#bbc5a388c8ea6c048967049dbeac0e4a3f09d7f8" + integrity sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw== + dependencies: + debug "2.6.9" + depd "2.0.0" + destroy "1.2.0" + encodeurl "~1.0.2" + escape-html "~1.0.3" + etag "~1.8.1" + fresh "0.5.2" + http-errors "2.0.0" + mime "1.6.0" + ms "2.1.3" + on-finished "2.4.1" + range-parser "~1.2.1" + statuses "2.0.1" + +serialize-javascript@^6.0.0, serialize-javascript@^6.0.1, serialize-javascript@^6.0.2: + version "6.0.2" + resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-6.0.2.tgz#defa1e055c83bf6d59ea805d8da862254eb6a6c2" + integrity sha512-Saa1xPByTTq2gdeFZYLLo+RFE35NHZkAbqZeWNd3BpzppeVisAqpDjcp8dyf6uIvEqJRd46jemmyA4iFIeVk8g== + dependencies: + randombytes "^2.1.0" + +serve-handler@^6.1.3: + version "6.1.5" + resolved "https://registry.npmjs.org/serve-handler/-/serve-handler-6.1.5.tgz" + integrity sha512-ijPFle6Hwe8zfmBxJdE+5fta53fdIY0lHISJvuikXB3VYFafRjMRpOffSPvCYsbKyBA7pvy9oYr/BT1O3EArlg== + dependencies: + bytes "3.0.0" + content-disposition "0.5.2" + fast-url-parser "1.1.3" + mime-types "2.1.18" + minimatch "3.1.2" + path-is-inside "1.0.2" + path-to-regexp "2.2.1" + range-parser "1.2.0" + +serve-index@^1.9.1: + version "1.9.1" + resolved "https://registry.npmjs.org/serve-index/-/serve-index-1.9.1.tgz" + integrity sha512-pXHfKNP4qujrtteMrSBb0rc8HJ9Ms/GrXwcUtUtD5s4ewDJI8bT3Cz2zTVRMKtri49pLx2e0Ya8ziP5Ya2pZZw== + dependencies: + accepts "~1.3.4" + batch "0.6.1" + debug "2.6.9" + escape-html "~1.0.3" + http-errors "~1.6.2" + mime-types "~2.1.17" + parseurl "~1.3.2" + +serve-static@1.16.2: + version "1.16.2" + resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.16.2.tgz#b6a5343da47f6bdd2673848bf45754941e803296" + integrity sha512-VqpjJZKadQB/PEbEwvFdO43Ax5dFBZ2UECszz8bQ7pi7wt//PWe1P6MN7eCnjsatYtBT6EuiClbjSWP2WrIoTw== + dependencies: + encodeurl "~2.0.0" + escape-html "~1.0.3" + parseurl "~1.3.3" + send "0.19.0" + +setimmediate@^1.0.5: + version "1.0.5" + resolved "https://registry.npmjs.org/setimmediate/-/setimmediate-1.0.5.tgz" + integrity sha512-MATJdZp8sLqDl/68LfQmbP8zKPLQNV6BIZoIgrscFDQ+RsvK/BxeDQOgyxKKoh0y/8h3BqVFnCqQ/gd+reiIXA== + +setprototypeof@1.1.0: + version "1.1.0" + resolved "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.0.tgz" + integrity sha512-BvE/TwpZX4FXExxOxZyRGQQv651MSwmWKZGqvmPcRIjDqWub67kTKuIMx43cZZrS/cBBzwBcNDWoFxt2XEFIpQ== + +setprototypeof@1.2.0: + version "1.2.0" + resolved "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz" + integrity sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw== + +shallow-clone@^3.0.0: + version "3.0.1" + resolved "https://registry.npmjs.org/shallow-clone/-/shallow-clone-3.0.1.tgz" + integrity sha512-/6KqX+GVUdqPuPPd2LxDDxzX6CAbjJehAAOKlNpqqUpAqPM6HeL8f+o3a+JsyGjn2lv0WY8UsTgUJjU9Ok55NA== + dependencies: + kind-of "^6.0.2" + +shallowequal@^1.1.0: + version "1.1.0" + resolved "https://registry.npmjs.org/shallowequal/-/shallowequal-1.1.0.tgz" + integrity sha512-y0m1JoUZSlPAjXVtPPW70aZWfIL/dSP7AFkRnniLCrK/8MDKog3TySTBmckD+RObVxH0v4Tox67+F14PdED2oQ== + +shebang-command@^2.0.0: + version "2.0.0" + resolved "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz" + integrity sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA== + dependencies: + shebang-regex "^3.0.0" + +shebang-regex@^3.0.0: + version "3.0.0" + resolved "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz" + integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A== + +shell-quote@^1.7.3: + version "1.8.0" + resolved "https://registry.npmjs.org/shell-quote/-/shell-quote-1.8.0.tgz" + integrity sha512-QHsz8GgQIGKlRi24yFc6a6lN69Idnx634w49ay6+jA5yFh7a1UY+4Rp6HPx/L/1zcEDPEij8cIsiqR6bQsE5VQ== + +shelljs@^0.8.5: + version "0.8.5" + resolved "https://registry.npmjs.org/shelljs/-/shelljs-0.8.5.tgz" + integrity sha512-TiwcRcrkhHvbrZbnRcFYMLl30Dfov3HKqzp5tO5b4pt6G/SezKcYhmDg15zXVBswHmctSAQKznqNW2LO5tTDow== + dependencies: + glob "^7.0.0" + interpret "^1.0.0" + rechoir "^0.6.2" + +side-channel-list@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/side-channel-list/-/side-channel-list-1.0.0.tgz#10cb5984263115d3b7a0e336591e290a830af8ad" + integrity sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA== + dependencies: + es-errors "^1.3.0" + object-inspect "^1.13.3" + +side-channel-map@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/side-channel-map/-/side-channel-map-1.0.1.tgz#d6bb6b37902c6fef5174e5f533fab4c732a26f42" + integrity sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA== + dependencies: + call-bound "^1.0.2" + es-errors "^1.3.0" + get-intrinsic "^1.2.5" + object-inspect "^1.13.3" + +side-channel-weakmap@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz#11dda19d5368e40ce9ec2bdc1fb0ecbc0790ecea" + integrity sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A== + dependencies: + call-bound "^1.0.2" + es-errors "^1.3.0" + get-intrinsic "^1.2.5" + object-inspect "^1.13.3" + side-channel-map "^1.0.1" + +side-channel@^1.0.6: + version "1.1.0" + resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.1.0.tgz#c3fcff9c4da932784873335ec9765fa94ff66bc9" + integrity sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw== + dependencies: + es-errors "^1.3.0" + object-inspect "^1.13.3" + side-channel-list "^1.0.0" + side-channel-map "^1.0.1" + side-channel-weakmap "^1.0.2" + +signal-exit@^3.0.2, signal-exit@^3.0.3: + version "3.0.7" + resolved "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz" + integrity sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ== + +sirv@^1.0.7: + version "1.0.19" + resolved "https://registry.npmjs.org/sirv/-/sirv-1.0.19.tgz" + integrity sha512-JuLThK3TnZG1TAKDwNIqNq6QA2afLOCcm+iE8D1Kj3GA40pSPsxQjjJl0J8X3tsR7T+CP1GavpzLwYkgVLWrZQ== + dependencies: + "@polka/url" "^1.0.0-next.20" + mrmime "^1.0.0" + totalist "^1.0.0" + +sisteransi@^1.0.5: + version "1.0.5" + resolved "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz" + integrity sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg== + +sitemap@^7.1.1: + version "7.1.1" + resolved "https://registry.npmjs.org/sitemap/-/sitemap-7.1.1.tgz" + integrity sha512-mK3aFtjz4VdJN0igpIJrinf3EO8U8mxOPsTBzSsy06UtjZQJ3YY3o3Xa7zSc5nMqcMrRwlChHZ18Kxg0caiPBg== + dependencies: + "@types/node" "^17.0.5" + "@types/sax" "^1.2.1" + arg "^5.0.0" + sax "^1.2.4" + +slash@^3.0.0: + version "3.0.0" + resolved "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz" + integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q== + +slash@^4.0.0: + version "4.0.0" + resolved "https://registry.npmjs.org/slash/-/slash-4.0.0.tgz" + integrity sha512-3dOsAHXXUkQTpOYcoAxLIorMTp4gIQr5IW3iVb7A7lFIp0VHhnynm9izx6TssdrIcVIESAlVjtnO2K8bg+Coew== + +sockjs@^0.3.24: + version "0.3.24" + resolved "https://registry.npmjs.org/sockjs/-/sockjs-0.3.24.tgz" + integrity sha512-GJgLTZ7vYb/JtPSSZ10hsOYIvEYsjbNU+zPdIHcUaWVNUEPivzxku31865sSSud0Da0W4lEeOPlmw93zLQchuQ== + dependencies: + faye-websocket "^0.11.3" + uuid "^8.3.2" + websocket-driver "^0.7.4" + +sort-css-media-queries@2.1.0: + version "2.1.0" + resolved "https://registry.npmjs.org/sort-css-media-queries/-/sort-css-media-queries-2.1.0.tgz" + integrity sha512-IeWvo8NkNiY2vVYdPa27MCQiR0MN0M80johAYFVxWWXQ44KU84WNxjslwBHmc/7ZL2ccwkM7/e6S5aiKZXm7jA== + +source-map-js@^1.0.2: + version "1.0.2" + resolved "https://registry.npmjs.org/source-map-js/-/source-map-js-1.0.2.tgz" + integrity sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw== + +source-map-support@~0.5.20: + version "0.5.21" + resolved "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz" + integrity sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w== + dependencies: + buffer-from "^1.0.0" + source-map "^0.6.0" + +source-map@^0.5.0: + version "0.5.7" + resolved "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz" + integrity sha512-LbrmJOMUSdEVxIKvdcJzQC+nQhe8FUZQTXQy6+I75skNgn3OoQ0DZA8YnFa7gp8tqtL3KPf1kmo0R5DoApeSGQ== + +source-map@^0.6.0, source-map@^0.6.1, source-map@~0.6.0: + version "0.6.1" + resolved "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz" + integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== + +space-separated-tokens@^1.0.0: + version "1.1.5" + resolved "https://registry.npmjs.org/space-separated-tokens/-/space-separated-tokens-1.1.5.tgz" + integrity sha512-q/JSVd1Lptzhf5bkYm4ob4iWPjx0KiRe3sRFBNrVqbJkFaBm5vbbowy1mymoPNLRa52+oadOhJ+K49wsSeSjTA== + +spdy-transport@^3.0.0: + version "3.0.0" + resolved "https://registry.npmjs.org/spdy-transport/-/spdy-transport-3.0.0.tgz" + integrity sha512-hsLVFE5SjA6TCisWeJXFKniGGOpBgMLmerfO2aCyCU5s7nJ/rpAepqmFifv/GCbSbueEeAJJnmSQ2rKC/g8Fcw== + dependencies: + debug "^4.1.0" + detect-node "^2.0.4" + hpack.js "^2.1.6" + obuf "^1.1.2" + readable-stream "^3.0.6" + wbuf "^1.7.3" + +spdy@^4.0.2: + version "4.0.2" + resolved "https://registry.npmjs.org/spdy/-/spdy-4.0.2.tgz" + integrity sha512-r46gZQZQV+Kl9oItvl1JZZqJKGr+oEkB08A6BzkiR7593/7IbtuncXHd2YoYeTsG4157ZssMu9KYvUHLcjcDoA== + dependencies: + debug "^4.1.0" + handle-thing "^2.0.0" + http-deceiver "^1.2.7" + select-hose "^2.0.0" + spdy-transport "^3.0.0" + +sprintf-js@~1.0.2: + version "1.0.3" + resolved "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz" + integrity sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g== + +stable@^0.1.8: + version "0.1.8" + resolved "https://registry.npmjs.org/stable/-/stable-0.1.8.tgz" + integrity sha512-ji9qxRnOVfcuLDySj9qzhGSEFVobyt1kIOSkj1qZzYLzq7Tos/oUUWvotUPQLlrsidqsK6tBH89Bc9kL5zHA6w== + +state-toggle@^1.0.0: + version "1.0.3" + resolved "https://registry.npmjs.org/state-toggle/-/state-toggle-1.0.3.tgz" + integrity sha512-d/5Z4/2iiCnHw6Xzghyhb+GcmF89bxwgXG60wjIiZaxnymbyOmI8Hk4VqHXiVVp6u2ysaskFfXg3ekCj4WNftQ== + +statuses@2.0.1: + version "2.0.1" + resolved "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz" + integrity sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ== + +"statuses@>= 1.4.0 < 2": + version "1.5.0" + resolved "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz" + integrity sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA== + +std-env@^3.0.1: + version "3.3.2" + resolved "https://registry.npmjs.org/std-env/-/std-env-3.3.2.tgz" + integrity sha512-uUZI65yrV2Qva5gqE0+A7uVAvO40iPo6jGhs7s8keRfHCmtg+uB2X6EiLGCI9IgL1J17xGhvoOqSz79lzICPTA== + +string-width@^4.0.0, string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.2: + version "4.2.3" + resolved "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz" + integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== + dependencies: + emoji-regex "^8.0.0" + is-fullwidth-code-point "^3.0.0" + strip-ansi "^6.0.1" + +string-width@^5.0.1: + version "5.1.2" + resolved "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz" + integrity sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA== + dependencies: + eastasianwidth "^0.2.0" + emoji-regex "^9.2.2" + strip-ansi "^7.0.1" + +string_decoder@^1.1.1: + version "1.3.0" + resolved "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz" + integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== + dependencies: + safe-buffer "~5.2.0" + +string_decoder@~1.1.1: + version "1.1.1" + resolved "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz" + integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== + dependencies: + safe-buffer "~5.1.0" + +stringify-object@^3.3.0: + version "3.3.0" + resolved "https://registry.npmjs.org/stringify-object/-/stringify-object-3.3.0.tgz" + integrity sha512-rHqiFh1elqCQ9WPLIC8I0Q/g/wj5J1eMkyoiD6eoQApWHP0FtlK7rqnhmabL5VUY9JQCcqwwvlOaSuutekgyrw== + dependencies: + get-own-enumerable-property-symbols "^3.0.0" + is-obj "^1.0.1" + is-regexp "^1.0.0" + +strip-ansi@^6.0.0, strip-ansi@^6.0.1: + version "6.0.1" + resolved "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz" + integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== + dependencies: + ansi-regex "^5.0.1" + +strip-ansi@^7.0.1: + version "7.0.1" + resolved "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.0.1.tgz" + integrity sha512-cXNxvT8dFNRVfhVME3JAe98mkXDYN2O1l7jmcwMnOslDeESg1rF/OZMtK0nRAhiari1unG5cD4jG3rapUAkLbw== + dependencies: + ansi-regex "^6.0.1" + +strip-bom-string@^1.0.0: + version "1.0.0" + resolved "https://registry.npmjs.org/strip-bom-string/-/strip-bom-string-1.0.0.tgz" + integrity sha512-uCC2VHvQRYu+lMh4My/sFNmF2klFymLX1wHJeXnbEJERpV/ZsVuonzerjfrGpIGF7LBVa1O7i9kjiWvJiFck8g== + +strip-final-newline@^2.0.0: + version "2.0.0" + resolved "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz" + integrity sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA== + +strip-json-comments@^3.1.1: + version "3.1.1" + resolved "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz" + integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig== + +strip-json-comments@~2.0.1: + version "2.0.1" + resolved "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz" + integrity sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ== + +style-to-object@0.3.0, style-to-object@^0.3.0: + version "0.3.0" + resolved "https://registry.npmjs.org/style-to-object/-/style-to-object-0.3.0.tgz" + integrity sha512-CzFnRRXhzWIdItT3OmF8SQfWyahHhjq3HwcMNCNLn+N7klOOqPjMeG/4JSu77D7ypZdGvSzvkrbyeTMizz2VrA== + dependencies: + inline-style-parser "0.1.1" + +styled-components@^5.3.6: + version "5.3.9" + resolved "https://registry.npmjs.org/styled-components/-/styled-components-5.3.9.tgz" + integrity sha512-Aj3kb13B75DQBo2oRwRa/APdB5rSmwUfN5exyarpX+x/tlM/rwZA2vVk2vQgVSP6WKaZJHWwiFrzgHt+CLtB4A== + dependencies: + "@babel/helper-module-imports" "^7.0.0" + "@babel/traverse" "^7.4.5" + "@emotion/is-prop-valid" "^1.1.0" + "@emotion/stylis" "^0.8.4" + "@emotion/unitless" "^0.7.4" + babel-plugin-styled-components ">= 1.12.0" + css-to-react-native "^3.0.0" + hoist-non-react-statics "^3.0.0" + shallowequal "^1.1.0" + supports-color "^5.5.0" + +stylehacks@^5.1.1: + version "5.1.1" + resolved "https://registry.npmjs.org/stylehacks/-/stylehacks-5.1.1.tgz" + integrity sha512-sBpcd5Hx7G6seo7b1LkpttvTz7ikD0LlH5RmdcBNb6fFR0Fl7LQwHDFr300q4cwUqi+IYrFGmsIHieMBfnN/Bw== + dependencies: + browserslist "^4.21.4" + postcss-selector-parser "^6.0.4" + +sucrase@^3.29.0: + version "3.31.0" + resolved "https://registry.npmjs.org/sucrase/-/sucrase-3.31.0.tgz" + integrity sha512-6QsHnkqyVEzYcaiHsOKkzOtOgdJcb8i54x6AV2hDwyZcY9ZyykGZVw6L/YN98xC0evwTP6utsWWrKRaa8QlfEQ== + dependencies: + commander "^4.0.0" + glob "7.1.6" + lines-and-columns "^1.1.6" + mz "^2.7.0" + pirates "^4.0.1" + ts-interface-checker "^0.1.9" + +supports-color@^5.5.0: + version "5.5.0" + resolved "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz" + integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== + dependencies: + has-flag "^3.0.0" + +supports-color@^7.1.0: + version "7.2.0" + resolved "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz" + integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw== + dependencies: + has-flag "^4.0.0" + +supports-color@^8.0.0: + version "8.1.1" + resolved "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz" + integrity sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q== + dependencies: + has-flag "^4.0.0" + +supports-preserve-symlinks-flag@^1.0.0: + version "1.0.0" + resolved "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz" + integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w== + +svg-parser@^2.0.4: + version "2.0.4" + resolved "https://registry.npmjs.org/svg-parser/-/svg-parser-2.0.4.tgz" + integrity sha512-e4hG1hRwoOdRb37cIMSgzNsxyzKfayW6VOflrwvR+/bzrkyxY/31WkbgnQpgtrNp1SdpJvpUAGTa/ZoiPNDuRQ== + +svgo@^2.7.0, svgo@^2.8.0: + version "2.8.2" + resolved "https://registry.yarnpkg.com/svgo/-/svgo-2.8.2.tgz#8e99b7ba5ac9ed7e3a446063865f61e03223fe6b" + integrity sha512-TyzE4NVGLUFy+H/Uy4N6c3G0HEeprsVfge6Lmq+0FdQQ/zqoVYB62IsBZORsiL+o96s6ff/V6/3UQo/C0cgCAA== + dependencies: + commander "^7.2.0" + css-select "^4.1.3" + css-tree "^1.1.3" + csso "^4.2.0" + picocolors "^1.0.0" + sax "^1.5.0" + stable "^0.1.8" + +tailwindcss@^3.2.7: + version "3.3.0" + resolved "https://registry.npmjs.org/tailwindcss/-/tailwindcss-3.3.0.tgz" + integrity sha512-hOXlFx+YcklJ8kXiCAfk/FMyr4Pm9ck477G0m/us2344Vuj355IpoEDB5UmGAsSpTBmr+4ZhjzW04JuFXkb/fw== + dependencies: + arg "^5.0.2" + chokidar "^3.5.3" + color-name "^1.1.4" + didyoumean "^1.2.2" + dlv "^1.1.3" + fast-glob "^3.2.12" + glob-parent "^6.0.2" + is-glob "^4.0.3" + jiti "^1.17.2" + lilconfig "^2.0.6" + micromatch "^4.0.5" + normalize-path "^3.0.0" + object-hash "^3.0.0" + picocolors "^1.0.0" + postcss "^8.0.9" + postcss-import "^14.1.0" + postcss-js "^4.0.0" + postcss-load-config "^3.1.4" + postcss-nested "6.0.0" + postcss-selector-parser "^6.0.11" + postcss-value-parser "^4.2.0" + quick-lru "^5.1.1" + resolve "^1.22.1" + sucrase "^3.29.0" + +tapable@^1.0.0: + version "1.1.3" + resolved "https://registry.npmjs.org/tapable/-/tapable-1.1.3.tgz" + integrity sha512-4WK/bYZmj8xLr+HUCODHGF1ZFzsYffasLUgEiMBY4fgtltdO6B4WJtlSbPaDTLpYTcGVwM2qLnFTICEcNxs3kA== + +tapable@^2.0.0: + version "2.2.1" + resolved "https://registry.npmjs.org/tapable/-/tapable-2.2.1.tgz" + integrity sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ== + +tapable@^2.3.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/tapable/-/tapable-2.3.0.tgz#7e3ea6d5ca31ba8e078b560f0d83ce9a14aa8be6" + integrity sha512-g9ljZiwki/LfxmQADO3dEY1CbpmXT5Hm2fJ+QaGKwSXUylMybePR7/67YW7jOrrvjEgL1Fmz5kzyAjWVWLlucg== + +terser-webpack-plugin@^5.3.16: + version "5.3.16" + resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-5.3.16.tgz#741e448cc3f93d8026ebe4f7ef9e4afacfd56330" + integrity sha512-h9oBFCWrq78NyWWVcSwZarJkZ01c2AyGrzs1crmHZO3QUg9D61Wu4NPjBy69n7JqylFF5y+CsUZYmYEIZ3mR+Q== + dependencies: + "@jridgewell/trace-mapping" "^0.3.25" + jest-worker "^27.4.5" + schema-utils "^4.3.0" + serialize-javascript "^6.0.2" + terser "^5.31.1" + +terser-webpack-plugin@^5.3.3: + version "5.3.7" + resolved "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.3.7.tgz" + integrity sha512-AfKwIktyP7Cu50xNjXF/6Qb5lBNzYaWpU6YfoX3uZicTx0zTy0stDDCsvjDapKsSDvOeWo5MEq4TmdBy2cNoHw== + dependencies: + "@jridgewell/trace-mapping" "^0.3.17" + jest-worker "^27.4.5" + schema-utils "^3.1.1" + serialize-javascript "^6.0.1" + terser "^5.16.5" + +terser@^5.10.0, terser@^5.16.5: + version "5.16.8" + resolved "https://registry.npmjs.org/terser/-/terser-5.16.8.tgz" + integrity sha512-QI5g1E/ef7d+PsDifb+a6nnVgC4F22Bg6T0xrBrz6iloVB4PUkkunp6V8nzoOOZJIzjWVdAGqCdlKlhLq/TbIA== + dependencies: + "@jridgewell/source-map" "^0.3.2" + acorn "^8.5.0" + commander "^2.20.0" + source-map-support "~0.5.20" + +terser@^5.31.1: + version "5.39.0" + resolved "https://registry.yarnpkg.com/terser/-/terser-5.39.0.tgz#0e82033ed57b3ddf1f96708d123cca717d86ca3a" + integrity sha512-LBAhFyLho16harJoWMg/nZsQYgTrg5jXOn2nCYjRUcZZEdE3qa2zb8QEDRUGVZBW4rlazf2fxkg8tztybTaqWw== + dependencies: + "@jridgewell/source-map" "^0.3.3" + acorn "^8.8.2" + commander "^2.20.0" + source-map-support "~0.5.20" + +text-table@^0.2.0: + version "0.2.0" + resolved "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz" + integrity sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw== + +thenify-all@^1.0.0: + version "1.6.0" + resolved "https://registry.npmjs.org/thenify-all/-/thenify-all-1.6.0.tgz" + integrity sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA== + dependencies: + thenify ">= 3.1.0 < 4" + +"thenify@>= 3.1.0 < 4": + version "3.3.1" + resolved "https://registry.npmjs.org/thenify/-/thenify-3.3.1.tgz" + integrity sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw== + dependencies: + any-promise "^1.0.0" + +thunky@^1.0.2: + version "1.1.0" + resolved "https://registry.npmjs.org/thunky/-/thunky-1.1.0.tgz" + integrity sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA== + +tiny-invariant@^1.0.2: + version "1.3.1" + resolved "https://registry.npmjs.org/tiny-invariant/-/tiny-invariant-1.3.1.tgz" + integrity sha512-AD5ih2NlSssTCwsMznbvwMZpJ1cbhkGd2uueNxzv2jDlEeZdU04JQfRnggJQ8DrcVBGjAsCKwFBbDlVNtEMlzw== + +tiny-warning@^1.0.0: + version "1.0.3" + resolved "https://registry.npmjs.org/tiny-warning/-/tiny-warning-1.0.3.tgz" + integrity sha512-lBN9zLN/oAf68o3zNXYrdCt1kP8WsiGW8Oo2ka41b2IM5JL/S1CTyX1rW0mb/zSuJun0ZUrDxx4sqvYS2FWzPA== + +to-readable-stream@^1.0.0: + version "1.0.0" + resolved "https://registry.npmjs.org/to-readable-stream/-/to-readable-stream-1.0.0.tgz" + integrity sha512-Iq25XBt6zD5npPhlLVXGFN3/gyR2/qODcKNNyTMd4vbm39HUaOiAM4PMq0eMVC/Tkxz+Zjdsc55g9yyz+Yq00Q== + +to-regex-range@^5.0.1: + version "5.0.1" + resolved "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz" + integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ== + dependencies: + is-number "^7.0.0" + +toidentifier@1.0.1: + version "1.0.1" + resolved "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz" + integrity sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA== + +totalist@^1.0.0: + version "1.1.0" + resolved "https://registry.npmjs.org/totalist/-/totalist-1.1.0.tgz" + integrity sha512-gduQwd1rOdDMGxFG1gEvhV88Oirdo2p+KjoYFU7k2g+i7n6AFFbDQ5kMPUsW0pNbfQsB/cwXvT1i4Bue0s9g5g== + +tr46@~0.0.3: + version "0.0.3" + resolved "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz" + integrity sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw== + +trim-trailing-lines@^1.0.0: + version "1.1.4" + resolved "https://registry.npmjs.org/trim-trailing-lines/-/trim-trailing-lines-1.1.4.tgz" + integrity sha512-rjUWSqnfTNrjbB9NQWfPMH/xRK1deHeGsHoVfpxJ++XeYXE0d6B1En37AHfw3jtfTU7dzMzZL2jjpe8Qb5gLIQ== + +trim@0.0.1: + version "0.0.1" + resolved "https://registry.npmjs.org/trim/-/trim-0.0.1.tgz" + integrity sha512-YzQV+TZg4AxpKxaTHK3c3D+kRDCGVEE7LemdlQZoQXn0iennk10RsIoY6ikzAqJTc9Xjl9C1/waHom/J86ziAQ== + +trough@^1.0.0: + version "1.0.5" + resolved "https://registry.npmjs.org/trough/-/trough-1.0.5.tgz" + integrity sha512-rvuRbTarPXmMb79SmzEp8aqXNKcK+y0XaB298IXueQ8I2PsrATcPBCSPyK/dDNa2iWOhKlfNnOjdAOTBU/nkFA== + +ts-interface-checker@^0.1.9: + version "0.1.13" + resolved "https://registry.npmjs.org/ts-interface-checker/-/ts-interface-checker-0.1.13.tgz" + integrity sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA== + +tslib@^2.0.3, tslib@^2.1.0, tslib@^2.4.0: + version "2.5.0" + resolved "https://registry.npmjs.org/tslib/-/tslib-2.5.0.tgz" + integrity sha512-336iVw3rtn2BUK7ORdIAHTyxHGRIHVReokCR3XjbckJMK7ms8FysBfhLR8IXnAgy7T0PTPNBWKiH514FOW/WSg== + +type-fest@^0.20.2: + version "0.20.2" + resolved "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz" + integrity sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ== + +type-fest@^2.5.0: + version "2.19.0" + resolved "https://registry.npmjs.org/type-fest/-/type-fest-2.19.0.tgz" + integrity sha512-RAH822pAdBgcNMAfWnCBU3CFZcfZ/i1eZjwFU/dsLKumyuuP3niueg2UAukXYF0E2AAoc82ZSSf9J0WQBinzHA== + +type-is@~1.6.18: + version "1.6.18" + resolved "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz" + integrity sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g== + dependencies: + media-typer "0.3.0" + mime-types "~2.1.24" + +typedarray-to-buffer@^3.1.5: + version "3.1.5" + resolved "https://registry.npmjs.org/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz" + integrity sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q== + dependencies: + is-typedarray "^1.0.0" + +ua-parser-js@^0.7.30: + version "0.7.34" + resolved "https://registry.npmjs.org/ua-parser-js/-/ua-parser-js-0.7.34.tgz" + integrity sha512-cJMeh/eOILyGu0ejgTKB95yKT3zOenSe9UGE3vj6WfiOwgGYnmATUsnDixMFvdU+rNMvWih83hrUP8VwhF9yXQ== + +unherit@^1.0.4: + version "1.1.3" + resolved "https://registry.npmjs.org/unherit/-/unherit-1.1.3.tgz" + integrity sha512-Ft16BJcnapDKp0+J/rqFC3Rrk6Y/Ng4nzsC028k2jdDII/rdZ7Wd3pPT/6+vIIxRagwRc9K0IUX0Ra4fKvw+WQ== + dependencies: + inherits "^2.0.0" + xtend "^4.0.0" + +unicode-canonical-property-names-ecmascript@^2.0.0: + version "2.0.0" + resolved "https://registry.npmjs.org/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-2.0.0.tgz" + integrity sha512-yY5PpDlfVIU5+y/BSCxAJRBIS1Zc2dDG3Ujq+sR0U+JjUevW2JhocOF+soROYDSaAezOzOKuyyixhD6mBknSmQ== + +unicode-match-property-ecmascript@^2.0.0: + version "2.0.0" + resolved "https://registry.npmjs.org/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-2.0.0.tgz" + integrity sha512-5kaZCrbp5mmbz5ulBkDkbY0SsPOjKqVS35VpL9ulMPfSl0J0Xsm+9Evphv9CoIZFwre7aJoa94AY6seMKGVN5Q== + dependencies: + unicode-canonical-property-names-ecmascript "^2.0.0" + unicode-property-aliases-ecmascript "^2.0.0" + +unicode-match-property-value-ecmascript@^2.1.0: + version "2.1.0" + resolved "https://registry.npmjs.org/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-2.1.0.tgz" + integrity sha512-qxkjQt6qjg/mYscYMC0XKRn3Rh0wFPlfxB0xkt9CfyTvpX1Ra0+rAmdX2QyAobptSEvuy4RtpPRui6XkV+8wjA== + +unicode-property-aliases-ecmascript@^2.0.0: + version "2.1.0" + resolved "https://registry.npmjs.org/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-2.1.0.tgz" + integrity sha512-6t3foTQI9qne+OZoVQB/8x8rk2k1eVy1gRXhV3oFQ5T6R1dqQ1xtin3XqSlx3+ATBkliTaR/hHyJBm+LVPNM8w== + +unified@9.2.0: + version "9.2.0" + resolved "https://registry.npmjs.org/unified/-/unified-9.2.0.tgz" + integrity sha512-vx2Z0vY+a3YoTj8+pttM3tiJHCwY5UFbYdiWrwBEbHmK8pvsPj2rtAX2BFfgXen8T39CJWblWRDT4L5WGXtDdg== + dependencies: + bail "^1.0.0" + extend "^3.0.0" + is-buffer "^2.0.0" + is-plain-obj "^2.0.0" + trough "^1.0.0" + vfile "^4.0.0" + +unified@^9.2.2: + version "9.2.2" + resolved "https://registry.npmjs.org/unified/-/unified-9.2.2.tgz" + integrity sha512-Sg7j110mtefBD+qunSLO1lqOEKdrwBFBrR6Qd8f4uwkhWNlbkaqwHse6e7QvD3AP/MNoJdEDLaf8OxYyoWgorQ== + dependencies: + bail "^1.0.0" + extend "^3.0.0" + is-buffer "^2.0.0" + is-plain-obj "^2.0.0" + trough "^1.0.0" + vfile "^4.0.0" + +unique-string@^2.0.0: + version "2.0.0" + resolved "https://registry.npmjs.org/unique-string/-/unique-string-2.0.0.tgz" + integrity sha512-uNaeirEPvpZWSgzwsPGtU2zVSTrn/8L5q/IexZmH0eH6SA73CmAA5U4GwORTxQAZs95TAXLNqeLoPPNO5gZfWg== + dependencies: + crypto-random-string "^2.0.0" + +unist-builder@2.0.3, unist-builder@^2.0.0: + version "2.0.3" + resolved "https://registry.npmjs.org/unist-builder/-/unist-builder-2.0.3.tgz" + integrity sha512-f98yt5pnlMWlzP539tPc4grGMsFaQQlP/vM396b00jngsiINumNmsY8rkXjfoi1c6QaM8nQ3vaGDuoKWbe/1Uw== + +unist-util-generated@^1.0.0: + version "1.1.6" + resolved "https://registry.npmjs.org/unist-util-generated/-/unist-util-generated-1.1.6.tgz" + integrity sha512-cln2Mm1/CZzN5ttGK7vkoGw+RZ8VcUH6BtGbq98DDtRGquAAOXig1mrBQYelOwMXYS8rK+vZDyyojSjp7JX+Lg== + +unist-util-is@^4.0.0: + version "4.1.0" + resolved "https://registry.npmjs.org/unist-util-is/-/unist-util-is-4.1.0.tgz" + integrity sha512-ZOQSsnce92GrxSqlnEEseX0gi7GH9zTJZ0p9dtu87WRb/37mMPO2Ilx1s/t9vBHrFhbgweUwb+t7cIn5dxPhZg== + +unist-util-position@^3.0.0: + version "3.1.0" + resolved "https://registry.npmjs.org/unist-util-position/-/unist-util-position-3.1.0.tgz" + integrity sha512-w+PkwCbYSFw8vpgWD0v7zRCl1FpY3fjDSQ3/N/wNd9Ffa4gPi8+4keqt99N3XW6F99t/mUzp2xAhNmfKWp95QA== + +unist-util-remove-position@^2.0.0: + version "2.0.1" + resolved "https://registry.npmjs.org/unist-util-remove-position/-/unist-util-remove-position-2.0.1.tgz" + integrity sha512-fDZsLYIe2uT+oGFnuZmy73K6ZxOPG/Qcm+w7jbEjaFcJgbQ6cqjs/eSPzXhsmGpAsWPkqZM9pYjww5QTn3LHMA== + dependencies: + unist-util-visit "^2.0.0" + +unist-util-remove@^2.0.0: + version "2.1.0" + resolved "https://registry.npmjs.org/unist-util-remove/-/unist-util-remove-2.1.0.tgz" + integrity sha512-J8NYPyBm4baYLdCbjmf1bhPu45Cr1MWTm77qd9istEkzWpnN6O9tMsEbB2JhNnBCqGENRqEWomQ+He6au0B27Q== + dependencies: + unist-util-is "^4.0.0" + +unist-util-stringify-position@^2.0.0: + version "2.0.3" + resolved "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-2.0.3.tgz" + integrity sha512-3faScn5I+hy9VleOq/qNbAd6pAx7iH5jYBMS9I1HgQVijz/4mv5Bvw5iw1sC/90CODiKo81G/ps8AJrISn687g== + dependencies: + "@types/unist" "^2.0.2" + +unist-util-visit-parents@^3.0.0: + version "3.1.1" + resolved "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-3.1.1.tgz" + integrity sha512-1KROIZWo6bcMrZEwiH2UrXDyalAa0uqzWCxCJj6lPOvTve2WkfgCytoDTPaMnodXh1WrXOq0haVYHj99ynJlsg== + dependencies: + "@types/unist" "^2.0.0" + unist-util-is "^4.0.0" + +unist-util-visit@2.0.3, unist-util-visit@^2.0.0, unist-util-visit@^2.0.3: + version "2.0.3" + resolved "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-2.0.3.tgz" + integrity sha512-iJ4/RczbJMkD0712mGktuGpm/U4By4FfDonL7N/9tATGIF4imikjOuagyMY53tnZq3NP6BcmlrHhEKAfGWjh7Q== + dependencies: + "@types/unist" "^2.0.0" + unist-util-is "^4.0.0" + unist-util-visit-parents "^3.0.0" + +universalify@^2.0.0: + version "2.0.0" + resolved "https://registry.npmjs.org/universalify/-/universalify-2.0.0.tgz" + integrity sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ== + +unpipe@1.0.0, unpipe@~1.0.0: + version "1.0.0" + resolved "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz" + integrity sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ== + +update-browserslist-db@^1.1.1: + version "1.1.3" + resolved "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.3.tgz" + integrity sha512-UxhIZQ+QInVdunkDAaiazvvT/+fXL5Osr0JZlJulepYu6Jd7qJtDZjlur0emRlT71EN3ScPoE7gvsuIKKNavKw== + dependencies: + escalade "^3.2.0" + picocolors "^1.1.1" + +update-browserslist-db@^1.2.0: + version "1.2.3" + resolved "https://registry.yarnpkg.com/update-browserslist-db/-/update-browserslist-db-1.2.3.tgz#64d76db58713136acbeb4c49114366cc6cc2e80d" + integrity sha512-Js0m9cx+qOgDxo0eMiFGEueWztz+d4+M3rGlmKPT+T4IS/jP4ylw3Nwpu6cpTTP8R1MAC1kF4VbdLt3ARf209w== + dependencies: + escalade "^3.2.0" + picocolors "^1.1.1" + +update-notifier@^5.1.0: + version "5.1.0" + resolved "https://registry.npmjs.org/update-notifier/-/update-notifier-5.1.0.tgz" + integrity sha512-ItnICHbeMh9GqUy31hFPrD1kcuZ3rpxDZbf4KUDavXwS0bW5m7SLbDQpGX3UYr072cbrF5hFUs3r5tUsPwjfHw== + dependencies: + boxen "^5.0.0" + chalk "^4.1.0" + configstore "^5.0.1" + has-yarn "^2.1.0" + import-lazy "^2.1.0" + is-ci "^2.0.0" + is-installed-globally "^0.4.0" + is-npm "^5.0.0" + is-yarn-global "^0.3.0" + latest-version "^5.1.0" + pupa "^2.1.1" + semver "^7.3.4" + semver-diff "^3.1.1" + xdg-basedir "^4.0.0" + +uri-js@^4.2.2: + version "4.4.1" + resolved "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz" + integrity sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg== + dependencies: + punycode "^2.1.0" + +url-loader@^4.1.1: + version "4.1.1" + resolved "https://registry.npmjs.org/url-loader/-/url-loader-4.1.1.tgz" + integrity sha512-3BTV812+AVHHOJQO8O5MkWgZ5aosP7GnROJwvzLS9hWDj00lZ6Z0wNak423Lp9PBZN05N+Jk/N5Si8jRAlGyWA== + dependencies: + loader-utils "^2.0.0" + mime-types "^2.1.27" + schema-utils "^3.0.0" + +url-parse-lax@^3.0.0: + version "3.0.0" + resolved "https://registry.npmjs.org/url-parse-lax/-/url-parse-lax-3.0.0.tgz" + integrity sha512-NjFKA0DidqPa5ciFcSrXnAltTtzz84ogy+NebPvfEgAck0+TNg4UJ4IN+fB7zRZfbgUf0syOo9MDxFkDSMuFaQ== + dependencies: + prepend-http "^2.0.0" + +use-composed-ref@^1.3.0: + version "1.3.0" + resolved "https://registry.npmjs.org/use-composed-ref/-/use-composed-ref-1.3.0.tgz" + integrity sha512-GLMG0Jc/jiKov/3Ulid1wbv3r54K9HlMW29IWcDFPEqFkSO2nS0MuefWgMJpeHQ9YJeXDL3ZUF+P3jdXlZX/cQ== + +use-isomorphic-layout-effect@^1.1.1: + version "1.1.2" + resolved "https://registry.npmjs.org/use-isomorphic-layout-effect/-/use-isomorphic-layout-effect-1.1.2.tgz" + integrity sha512-49L8yCO3iGT/ZF9QttjwLF/ZD9Iwto5LnH5LmEdk/6cFmXddqi2ulF0edxTwjj+7mqvpVVGQWvbXZdn32wRSHA== + +use-latest@^1.2.1: + version "1.2.1" + resolved "https://registry.npmjs.org/use-latest/-/use-latest-1.2.1.tgz" + integrity sha512-xA+AVm/Wlg3e2P/JiItTziwS7FK92LWrDB0p+hgXloIMuVCeJJ8v6f0eeHyPZaJrM+usM1FkFfbNCrJGs8A/zw== + dependencies: + use-isomorphic-layout-effect "^1.1.1" + +use-sync-external-store@^1.2.0: + version "1.2.0" + resolved "https://registry.npmjs.org/use-sync-external-store/-/use-sync-external-store-1.2.0.tgz" + integrity sha512-eEgnFxGQ1Ife9bzYs6VLi8/4X6CObHMw9Qr9tPY43iKwsPw8xE8+EFsf/2cFZ5S3esXgpWgtSCtLNS41F+sKPA== + +util-deprecate@^1.0.1, util-deprecate@^1.0.2, util-deprecate@~1.0.1: + version "1.0.2" + resolved "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz" + integrity sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw== + +utila@~0.4: + version "0.4.0" + resolved "https://registry.npmjs.org/utila/-/utila-0.4.0.tgz" + integrity sha512-Z0DbgELS9/L/75wZbro8xAnT50pBVFQZ+hUEueGDU5FN51YSCYM+jdxsfCiHjwNP/4LCDD0i/graKpeBnOXKRA== + +utility-types@^3.10.0: + version "3.10.0" + resolved "https://registry.npmjs.org/utility-types/-/utility-types-3.10.0.tgz" + integrity sha512-O11mqxmi7wMKCo6HKFt5AhO4BwY3VV68YU07tgxfz8zJTIxr4BpsezN49Ffwy9j3ZpwwJp4fkRwjRzq3uWE6Rg== + +utils-merge@1.0.1: + version "1.0.1" + resolved "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz" + integrity sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA== + +uuid@^8.3.2: + version "8.3.2" + resolved "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz" + integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg== + +value-equal@^1.0.1: + version "1.0.1" + resolved "https://registry.npmjs.org/value-equal/-/value-equal-1.0.1.tgz" + integrity sha512-NOJ6JZCAWr0zlxZt+xqCHNTEKOsrks2HQd4MqhP1qy4z1SkbEP467eNx6TgDKXMvUOb+OENfJCZwM+16n7fRfw== + +vary@~1.1.2: + version "1.1.2" + resolved "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz" + integrity sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg== + +vfile-location@^3.0.0, vfile-location@^3.2.0: + version "3.2.0" + resolved "https://registry.npmjs.org/vfile-location/-/vfile-location-3.2.0.tgz" + integrity sha512-aLEIZKv/oxuCDZ8lkJGhuhztf/BW4M+iHdCwglA/eWc+vtuRFJj8EtgceYFX4LRjOhCAAiNHsKGssC6onJ+jbA== + +vfile-message@^2.0.0: + version "2.0.4" + resolved "https://registry.npmjs.org/vfile-message/-/vfile-message-2.0.4.tgz" + integrity sha512-DjssxRGkMvifUOJre00juHoP9DPWuzjxKuMDrhNbk2TdaYYBNMStsNhEOt3idrtI12VQYM/1+iM0KOzXi4pxwQ== + dependencies: + "@types/unist" "^2.0.0" + unist-util-stringify-position "^2.0.0" + +vfile@^4.0.0: + version "4.2.1" + resolved "https://registry.npmjs.org/vfile/-/vfile-4.2.1.tgz" + integrity sha512-O6AE4OskCG5S1emQ/4gl8zK586RqA3srz3nfK/Viy0UPToBc5Trp9BVFb1u0CjsKrAWwnpr4ifM/KBXPWwJbCA== + dependencies: + "@types/unist" "^2.0.0" + is-buffer "^2.0.0" + unist-util-stringify-position "^2.0.0" + vfile-message "^2.0.0" + +wait-on@^6.0.1: + version "6.0.1" + resolved "https://registry.npmjs.org/wait-on/-/wait-on-6.0.1.tgz" + integrity sha512-zht+KASY3usTY5u2LgaNqn/Cd8MukxLGjdcZxT2ns5QzDmTFc4XoWBgC+C/na+sMRZTuVygQoMYwdcVjHnYIVw== + dependencies: + axios "^0.25.0" + joi "^17.6.0" + lodash "^4.17.21" + minimist "^1.2.5" + rxjs "^7.5.4" + +watchpack@^2.5.1: + version "2.5.1" + resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-2.5.1.tgz#dd38b601f669e0cbf567cb802e75cead82cde102" + integrity sha512-Zn5uXdcFNIA1+1Ei5McRd+iRzfhENPCe7LeABkJtNulSxjma+l7ltNx55BWZkRlwRnpOgHqxnjyaDgJnNXnqzg== + dependencies: + glob-to-regexp "^0.4.1" + graceful-fs "^4.1.2" + +wbuf@^1.1.0, wbuf@^1.7.3: + version "1.7.3" + resolved "https://registry.npmjs.org/wbuf/-/wbuf-1.7.3.tgz" + integrity sha512-O84QOnr0icsbFGLS0O3bI5FswxzRr8/gHwWkDlQFskhSPryQXvrTMxjxGP4+iWYoauLoBvfDpkrOauZ+0iZpDA== + dependencies: + minimalistic-assert "^1.0.0" + +web-namespaces@^1.0.0: + version "1.1.4" + resolved "https://registry.npmjs.org/web-namespaces/-/web-namespaces-1.1.4.tgz" + integrity sha512-wYxSGajtmoP4WxfejAPIr4l0fVh+jeMXZb08wNc0tMg6xsfZXj3cECqIK0G7ZAqUq0PP8WlMDtaOGVBTAWztNw== + +webidl-conversions@^3.0.0: + version "3.0.1" + resolved "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz" + integrity sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ== + +webpack-bundle-analyzer@^4.5.0: + version "4.8.0" + resolved "https://registry.npmjs.org/webpack-bundle-analyzer/-/webpack-bundle-analyzer-4.8.0.tgz" + integrity sha512-ZzoSBePshOKhr+hd8u6oCkZVwpVaXgpw23ScGLFpR6SjYI7+7iIWYarjN6OEYOfRt8o7ZyZZQk0DuMizJ+LEIg== + dependencies: + "@discoveryjs/json-ext" "0.5.7" + acorn "^8.0.4" + acorn-walk "^8.0.0" + chalk "^4.1.0" + commander "^7.2.0" + gzip-size "^6.0.0" + lodash "^4.17.20" + opener "^1.5.2" + sirv "^1.0.7" + ws "^7.3.1" + +webpack-dev-middleware@^5.3.1: + version "5.3.4" + resolved "https://registry.yarnpkg.com/webpack-dev-middleware/-/webpack-dev-middleware-5.3.4.tgz#eb7b39281cbce10e104eb2b8bf2b63fce49a3517" + integrity sha512-BVdTqhhs+0IfoeAf7EoH5WE+exCmqGerHfDM0IL096Px60Tq2Mn9MAbnaGUe6HiMa41KMCYF19gyzZmBcq/o4Q== + dependencies: + colorette "^2.0.10" + memfs "^3.4.3" + mime-types "^2.1.31" + range-parser "^1.2.1" + schema-utils "^4.0.0" + +webpack-dev-server@^4.9.3: + version "4.13.1" + resolved "https://registry.npmjs.org/webpack-dev-server/-/webpack-dev-server-4.13.1.tgz" + integrity sha512-5tWg00bnWbYgkN+pd5yISQKDejRBYGEw15RaEEslH+zdbNDxxaZvEAO2WulaSaFKb5n3YG8JXsGaDsut1D0xdA== + dependencies: + "@types/bonjour" "^3.5.9" + "@types/connect-history-api-fallback" "^1.3.5" + "@types/express" "^4.17.13" + "@types/serve-index" "^1.9.1" + "@types/serve-static" "^1.13.10" + "@types/sockjs" "^0.3.33" + "@types/ws" "^8.5.1" + ansi-html-community "^0.0.8" + bonjour-service "^1.0.11" + chokidar "^3.5.3" + colorette "^2.0.10" + compression "^1.7.4" + connect-history-api-fallback "^2.0.0" + default-gateway "^6.0.3" + express "^4.17.3" + graceful-fs "^4.2.6" + html-entities "^2.3.2" + http-proxy-middleware "^2.0.3" + ipaddr.js "^2.0.1" + launch-editor "^2.6.0" + open "^8.0.9" + p-retry "^4.5.0" + rimraf "^3.0.2" + schema-utils "^4.0.0" + selfsigned "^2.1.1" + serve-index "^1.9.1" + sockjs "^0.3.24" + spdy "^4.0.2" + webpack-dev-middleware "^5.3.1" + ws "^8.13.0" + +webpack-merge@^5.8.0: + version "5.8.0" + resolved "https://registry.npmjs.org/webpack-merge/-/webpack-merge-5.8.0.tgz" + integrity sha512-/SaI7xY0831XwP6kzuwhKWVKDP9t1QY1h65lAFLbZqMPIuYcD9QAW4u9STIbU9kaJbPBB/geU/gLr1wDjOhQ+Q== + dependencies: + clone-deep "^4.0.1" + wildcard "^2.0.0" + +webpack-sources@^3.2.2: + version "3.2.3" + resolved "https://registry.npmjs.org/webpack-sources/-/webpack-sources-3.2.3.tgz" + integrity sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w== + +webpack-sources@^3.3.3: + version "3.3.3" + resolved "https://registry.yarnpkg.com/webpack-sources/-/webpack-sources-3.3.3.tgz#d4bf7f9909675d7a070ff14d0ef2a4f3c982c723" + integrity sha512-yd1RBzSGanHkitROoPFd6qsrxt+oFhg/129YzheDGqeustzX0vTZJZsSsQjVQC4yzBQ56K55XU8gaNCtIzOnTg== + +webpack@^5.73.0: + version "5.105.0" + resolved "https://registry.yarnpkg.com/webpack/-/webpack-5.105.0.tgz#38b5e6c5db8cbe81debbd16e089335ada05ea23a" + integrity sha512-gX/dMkRQc7QOMzgTe6KsYFM7DxeIONQSui1s0n/0xht36HvrgbxtM1xBlgx596NbpHuQU8P7QpKwrZYwUX48nw== + dependencies: + "@types/eslint-scope" "^3.7.7" + "@types/estree" "^1.0.8" + "@types/json-schema" "^7.0.15" + "@webassemblyjs/ast" "^1.14.1" + "@webassemblyjs/wasm-edit" "^1.14.1" + "@webassemblyjs/wasm-parser" "^1.14.1" + acorn "^8.15.0" + acorn-import-phases "^1.0.3" + browserslist "^4.28.1" + chrome-trace-event "^1.0.2" + enhanced-resolve "^5.19.0" + es-module-lexer "^2.0.0" + eslint-scope "5.1.1" + events "^3.2.0" + glob-to-regexp "^0.4.1" + graceful-fs "^4.2.11" + json-parse-even-better-errors "^2.3.1" + loader-runner "^4.3.1" + mime-types "^2.1.27" + neo-async "^2.6.2" + schema-utils "^4.3.3" + tapable "^2.3.0" + terser-webpack-plugin "^5.3.16" + watchpack "^2.5.1" + webpack-sources "^3.3.3" + +webpackbar@^5.0.2: + version "5.0.2" + resolved "https://registry.npmjs.org/webpackbar/-/webpackbar-5.0.2.tgz" + integrity sha512-BmFJo7veBDgQzfWXl/wwYXr/VFus0614qZ8i9znqcl9fnEdiVkdbi0TedLQ6xAK92HZHDJ0QmyQ0fmuZPAgCYQ== + dependencies: + chalk "^4.1.0" + consola "^2.15.3" + pretty-time "^1.1.0" + std-env "^3.0.1" + +websocket-driver@>=0.5.1, websocket-driver@^0.7.4: + version "0.7.4" + resolved "https://registry.npmjs.org/websocket-driver/-/websocket-driver-0.7.4.tgz" + integrity sha512-b17KeDIQVjvb0ssuSDF2cYXSg2iztliJ4B9WdsuB6J952qCPKmnVq4DyW5motImXHDC1cBT/1UezrJVsKw5zjg== + dependencies: + http-parser-js ">=0.5.1" + safe-buffer ">=5.1.0" + websocket-extensions ">=0.1.1" + +websocket-extensions@>=0.1.1: + version "0.1.4" + resolved "https://registry.npmjs.org/websocket-extensions/-/websocket-extensions-0.1.4.tgz" + integrity sha512-OqedPIGOfsDlo31UNwYbCFMSaO9m9G/0faIHj5/dZFDMFqPTcx6UwqyOy3COEaEOg/9VsGIpdqn62W5KhoKSpg== + +whatwg-url@^5.0.0: + version "5.0.0" + resolved "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz" + integrity sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw== + dependencies: + tr46 "~0.0.3" + webidl-conversions "^3.0.0" + +which@^1.3.1: + version "1.3.1" + resolved "https://registry.npmjs.org/which/-/which-1.3.1.tgz" + integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ== + dependencies: + isexe "^2.0.0" + +which@^2.0.1: + version "2.0.2" + resolved "https://registry.npmjs.org/which/-/which-2.0.2.tgz" + integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA== + dependencies: + isexe "^2.0.0" + +widest-line@^3.1.0: + version "3.1.0" + resolved "https://registry.npmjs.org/widest-line/-/widest-line-3.1.0.tgz" + integrity sha512-NsmoXalsWVDMGupxZ5R08ka9flZjjiLvHVAWYOKtiKM8ujtZWr9cRffak+uSE48+Ob8ObalXpwyeUiyDD6QFgg== + dependencies: + string-width "^4.0.0" + +widest-line@^4.0.1: + version "4.0.1" + resolved "https://registry.npmjs.org/widest-line/-/widest-line-4.0.1.tgz" + integrity sha512-o0cyEG0e8GPzT4iGHphIOh0cJOV8fivsXxddQasHPHfoZf1ZexrfeA21w2NaEN1RHE+fXlfISmOE8R9N3u3Qig== + dependencies: + string-width "^5.0.1" + +wildcard@^2.0.0: + version "2.0.0" + resolved "https://registry.npmjs.org/wildcard/-/wildcard-2.0.0.tgz" + integrity sha512-JcKqAHLPxcdb9KM49dufGXn2x3ssnfjbcaQdLlfZsL9rH9wgDQjUtDxbo8NE0F6SFvydeu1VhZe7hZuHsB2/pw== + +wrap-ansi@^7.0.0: + version "7.0.0" + resolved "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz" + integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== + dependencies: + ansi-styles "^4.0.0" + string-width "^4.1.0" + strip-ansi "^6.0.0" + +wrap-ansi@^8.0.1: + version "8.1.0" + resolved "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz" + integrity sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ== + dependencies: + ansi-styles "^6.1.0" + string-width "^5.0.1" + strip-ansi "^7.0.1" + +wrappy@1: + version "1.0.2" + resolved "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz" + integrity sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ== + +write-file-atomic@^3.0.0: + version "3.0.3" + resolved "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-3.0.3.tgz" + integrity sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q== + dependencies: + imurmurhash "^0.1.4" + is-typedarray "^1.0.0" + signal-exit "^3.0.2" + typedarray-to-buffer "^3.1.5" + +ws@^7.3.1: + version "7.5.10" + resolved "https://registry.yarnpkg.com/ws/-/ws-7.5.10.tgz#58b5c20dc281633f6c19113f39b349bd8bd558d9" + integrity sha512-+dbF1tHwZpXcbOJdVOkzLDxZP1ailvSxM6ZweXTegylPny803bFhA+vqBYw4s31NSAk4S2Qz+AKXK9a4wkdjcQ== + +ws@^8.13.0: + version "8.18.1" + resolved "https://registry.yarnpkg.com/ws/-/ws-8.18.1.tgz#ea131d3784e1dfdff91adb0a4a116b127515e3cb" + integrity sha512-RKW2aJZMXeMxVpnZ6bck+RswznaxmzdULiBr6KY7XkTnW8uvt0iT9H5DkHUChXrc+uurzwa0rVI16n/Xzjdz1w== + +xdg-basedir@^4.0.0: + version "4.0.0" + resolved "https://registry.npmjs.org/xdg-basedir/-/xdg-basedir-4.0.0.tgz" + integrity sha512-PSNhEJDejZYV7h50BohL09Er9VaIefr2LMAf3OEmpCkjOi34eYyQYAXUTjEQtZJTKcF0E2UKTh+osDLsgNim9Q== + +xml-js@^1.6.11: + version "1.6.11" + resolved "https://registry.npmjs.org/xml-js/-/xml-js-1.6.11.tgz" + integrity sha512-7rVi2KMfwfWFl+GpPg6m80IVMWXLRjO+PxTq7V2CDhoGak0wzYzFgUY2m4XJ47OGdXd8eLE8EmwfAmdjw7lC1g== + dependencies: + sax "^1.2.4" + +xtend@^4.0.0, xtend@^4.0.1: + version "4.0.2" + resolved "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz" + integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ== + +yallist@^3.0.2: + version "3.1.1" + resolved "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz" + integrity sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g== + +yallist@^4.0.0: + version "4.0.0" + resolved "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz" + integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== + +yaml@^1.10.0, yaml@^1.10.2, yaml@^1.7.2: + version "1.10.3" + resolved "https://registry.yarnpkg.com/yaml/-/yaml-1.10.3.tgz#76e407ed95c42684fb8e14641e5de62fe65bbcb3" + integrity sha512-vIYeF1u3CjlhAFekPPAk2h/Kv4T3mAkMox5OymRiJQB0spDP10LHvt+K7G9Ny6NuuMAb25/6n1qyUjAcGNf/AA== + +yocto-queue@^0.1.0: + version "0.1.0" + resolved "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz" + integrity sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q== + +zwitch@^1.0.0: + version "1.0.5" + resolved "https://registry.npmjs.org/zwitch/-/zwitch-1.0.5.tgz" + integrity sha512-V50KMwwzqJV0NpZIZFwfOD5/lyny3WlSzRiXgA0G7VUnRlqttta1L6UQIHzd6EuBY/cHGfwTIck7w1yH6Q5zUw== diff --git a/firebase.json b/firebase.json new file mode 100644 index 0000000..1f11310 --- /dev/null +++ b/firebase.json @@ -0,0 +1,45 @@ +{ + "emulators": { + "hosting": { + "host": "localhost", + "port": "3000" + } + }, + "hosting": { + "redirects": [ + { + "source": "/kb/types", + "destination": "/cli#ignite-scaffold-list", + "type": 301 + }, + { + "source": "/kb/scaffold-chain", + "destination": "/cli#ignite-scaffold-chain", + "type": 301 + }, + { + "source": "/kb/serve", + "destination": "/cli#ignite-chain-serve", + "type": 301 + }, + { + "source": "/kb/genesis", + "destination": "/kb/config", + "type": 301 + }, + { + "source": "/kb/params", + "destination": "/cli#ignite-scaffold-module", + "type": 301 + }, + { + "source": "/kb/proto", + "destination": "/kb/config", + "type": 301 + } + ], + "target": "docs.ignite.com", + "public": "docs/build", + "cleanUrls": true + } +} \ No newline at end of file diff --git a/go.mod b/go.mod new file mode 100644 index 0000000..b41f509 --- /dev/null +++ b/go.mod @@ -0,0 +1,508 @@ +module git.cw.tr/mukan-network/mukan-ignite + +go 1.25.4 + +require ( + cosmossdk.io/core v0.11.3 + cosmossdk.io/math v1.5.3 + dario.cat/mergo v1.0.1 + github.com/99designs/keyring v1.2.2 + github.com/DATA-DOG/go-sqlmock v1.5.2 + github.com/blang/semver/v4 v4.0.0 + github.com/briandowns/spinner v1.23.2 + github.com/buger/jsonparser v1.1.2 + github.com/cenkalti/backoff v2.2.1+incompatible + github.com/charmbracelet/bubbles v0.21.0 + github.com/charmbracelet/bubbletea v1.3.5 + github.com/charmbracelet/fang v0.4.0 + github.com/charmbracelet/glow v1.5.1 + github.com/charmbracelet/lipgloss v1.1.0 + github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.3 + github.com/cockroachdb/errors v1.12.0 + github.com/cometbft/cometbft v0.38.21 + github.com/cosmos/cosmos-sdk v0.53.6 + github.com/cosmos/go-bip39 v1.0.0 + github.com/cosmos/gogoproto v1.7.2 + github.com/emicklei/proto v1.12.2 + github.com/emicklei/proto-contrib v0.15.0 + github.com/ettle/strcase v0.2.0 + github.com/getsentry/sentry-go v0.35.0 + github.com/go-delve/delve v1.24.0 + github.com/go-git/go-git/v5 v5.17.1 + github.com/go-openapi/analysis v0.23.0 + github.com/go-openapi/loads v0.22.0 + github.com/go-openapi/spec v0.21.0 + github.com/gobuffalo/genny/v2 v2.1.0 + github.com/gobuffalo/plush/v4 v4.1.22 + github.com/gobwas/glob v0.2.3 + github.com/goccy/go-yaml v1.15.23 + github.com/google/go-github/v48 v48.2.0 + github.com/google/go-querystring v1.1.0 + github.com/hashicorp/go-hclog v1.6.3 + github.com/hashicorp/go-plugin v1.6.3 + github.com/iancoleman/strcase v0.3.0 + github.com/ignite/web v1.0.8 + github.com/lib/pq v1.10.9 + github.com/mitchellh/mapstructure v1.5.0 + github.com/muesli/reflow v0.3.0 + github.com/nqd/flat v0.2.0 + github.com/otiai10/copy v1.14.1 + github.com/pelletier/go-toml v1.9.5 + github.com/radovskyb/watcher v1.0.7 + github.com/rogpeppe/go-internal v1.14.1 + github.com/rs/cors v1.11.1 + github.com/spf13/cobra v1.10.1 + github.com/spf13/pflag v1.0.10 + github.com/stretchr/testify v1.11.1 + go.etcd.io/bbolt v1.4.0 + golang.org/x/mod v0.30.0 + golang.org/x/sync v0.19.0 + golang.org/x/term v0.38.0 + golang.org/x/text v0.32.0 + golang.org/x/tools v0.39.0 + google.golang.org/grpc v1.79.3 + google.golang.org/protobuf v1.36.10 + gopkg.in/yaml.v3 v3.0.1 + sigs.k8s.io/yaml v1.6.0 +) + +require ( + 4d63.com/gocheckcompilerdirectives v1.2.1 // indirect + 4d63.com/gochecknoglobals v0.2.2 // indirect + buf.build/gen/go/bufbuild/bufplugin/protocolbuffers/go v1.36.6-20250718181942-e35f9b667443.1 // indirect + buf.build/gen/go/bufbuild/protovalidate/protocolbuffers/go v1.36.6-20250717185734-6c6e0d3c608e.1 // indirect + buf.build/gen/go/bufbuild/registry/connectrpc/go v1.18.1-20250721151928-2b7ae473b098.1 // indirect + buf.build/gen/go/bufbuild/registry/protocolbuffers/go v1.36.6-20250721151928-2b7ae473b098.1 // indirect + buf.build/gen/go/pluginrpc/pluginrpc/protocolbuffers/go v1.36.6-20241007202033-cf42259fcbfc.1 // indirect + buf.build/go/app v0.1.0 // indirect + buf.build/go/bufplugin v0.9.0 // indirect + buf.build/go/interrupt v1.1.0 // indirect + buf.build/go/protovalidate v0.14.0 // indirect + buf.build/go/protoyaml v0.6.0 // indirect + buf.build/go/spdx v0.2.0 // indirect + buf.build/go/standard v0.1.0 // indirect + cel.dev/expr v0.25.1 // indirect + connectrpc.com/connect v1.18.1 // indirect + connectrpc.com/otelconnect v0.7.2 // indirect + cosmossdk.io/api v0.9.2 // indirect + cosmossdk.io/collections v1.3.1 // indirect + cosmossdk.io/depinject v1.2.1 // indirect + cosmossdk.io/errors v1.0.2 // indirect + cosmossdk.io/log v1.6.1 // indirect + cosmossdk.io/schema v1.1.0 // indirect + cosmossdk.io/store v1.1.2 // indirect + cosmossdk.io/x/tx v0.14.0 // indirect + filippo.io/edwards25519 v1.1.1 // indirect + github.com/4meepo/tagalign v1.4.1 // indirect + github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4 // indirect + github.com/Abirdcfly/dupword v0.1.3 // indirect + github.com/Antonboom/errname v1.0.0 // indirect + github.com/Antonboom/nilnil v1.0.1 // indirect + github.com/Antonboom/testifylint v1.5.2 // indirect + github.com/Azure/go-ansiterm v0.0.0-20250102033503-faa5f7b0171c // indirect + github.com/BurntSushi/toml v1.4.1-0.20240526193622-a339e1f7089c // indirect + github.com/Crocmagnon/fatcontext v0.7.1 // indirect + github.com/DataDog/datadog-go v3.2.0+incompatible // indirect + github.com/DataDog/zstd v1.5.7 // indirect + github.com/Djarvur/go-err113 v0.0.0-20210108212216-aea10b59be24 // indirect + github.com/GaijinEntertainment/go-exhaustruct/v3 v3.3.0 // indirect + github.com/Masterminds/semver/v3 v3.3.1 // indirect + github.com/Microsoft/go-winio v0.6.2 // indirect + github.com/OpenPeeDeeP/depguard/v2 v2.2.0 // indirect + github.com/ProtonMail/go-crypto v1.1.6 // indirect + github.com/alecthomas/chroma v0.10.0 // indirect + github.com/alecthomas/go-check-sumtype v0.3.1 // indirect + github.com/alexkohler/nakedret/v2 v2.0.5 // indirect + github.com/alexkohler/prealloc v1.0.0 // indirect + github.com/alingse/asasalint v0.0.11 // indirect + github.com/alingse/nilnesserr v0.1.2 // indirect + github.com/antlr4-go/antlr/v4 v4.13.1 // indirect + github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 // indirect + github.com/ashanbrown/forbidigo v1.6.0 // indirect + github.com/ashanbrown/makezero v1.2.0 // indirect + github.com/atotto/clipboard v0.1.4 // indirect + github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect + github.com/aymerick/douceur v0.2.0 // indirect + github.com/beorn7/perks v1.0.1 // indirect + github.com/bgentry/speakeasy v0.2.0 // indirect + github.com/bkielbasa/cyclop v1.2.3 // indirect + github.com/blizzy78/varnamelen v0.8.0 // indirect + github.com/bombsimon/wsl/v4 v4.5.0 // indirect + github.com/breml/bidichk v0.3.2 // indirect + github.com/breml/errchkjson v0.4.0 // indirect + github.com/bufbuild/buf v1.56.0 // indirect + github.com/bufbuild/protocompile v0.14.1 // indirect + github.com/bufbuild/protoplugin v0.0.0-20250218205857-750e09ce93e1 // indirect + github.com/butuzov/ireturn v0.3.1 // indirect + github.com/butuzov/mirror v1.3.0 // indirect + github.com/bytedance/gopkg v0.1.3 // indirect + github.com/bytedance/sonic v1.15.0 // indirect + github.com/bytedance/sonic/loader v0.5.0 // indirect + github.com/calmh/randomart v1.1.0 // indirect + github.com/catenacyber/perfsprint v0.8.1 // indirect + github.com/ccojocar/zxcvbn-go v1.0.2 // indirect + github.com/cenkalti/backoff/v4 v4.3.0 // indirect + github.com/cespare/xxhash/v2 v2.3.0 // indirect + github.com/charithe/durationcheck v0.0.10 // indirect + github.com/charmbracelet/charm v0.8.7 // indirect + github.com/charmbracelet/colorprofile v0.3.1 // indirect + github.com/charmbracelet/glamour v0.6.0 // indirect + github.com/charmbracelet/x/ansi v0.8.0 // indirect + github.com/charmbracelet/x/cellbuf v0.0.13 // indirect + github.com/charmbracelet/x/exp/charmtone v0.0.0-20250603201427-c31516f43444 // indirect + github.com/charmbracelet/x/term v0.2.1 // indirect + github.com/chavacava/garif v0.1.0 // indirect + github.com/chigopher/pathlib v0.19.1 // indirect + github.com/cilium/ebpf v0.16.0 // indirect + github.com/ckaznocha/intrange v0.3.0 // indirect + github.com/cloudflare/circl v1.6.3 // indirect + github.com/cloudwego/base64x v0.1.6 // indirect + github.com/cockroachdb/fifo v0.0.0-20240606204812-0bbfbd93a7ce // indirect + github.com/cockroachdb/logtags v0.0.0-20241215232642-bb51bb14a506 // indirect + github.com/cockroachdb/pebble v1.1.5 // indirect + github.com/cockroachdb/redact v1.1.6 // indirect + github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06 // indirect + github.com/cometbft/cometbft-db v0.14.1 // indirect + github.com/containerd/errdefs v1.0.0 // indirect + github.com/containerd/errdefs/pkg v0.3.0 // indirect + github.com/containerd/stargz-snapshotter/estargz v0.17.0 // indirect + github.com/cosiner/argv v0.1.0 // indirect + github.com/cosmos/btcutil v1.0.5 // indirect + github.com/cosmos/cosmos-db v1.1.3 // indirect + github.com/cosmos/cosmos-proto v1.0.0-beta.5 // indirect + github.com/cosmos/gogogateway v1.2.0 // indirect + github.com/cosmos/iavl v1.2.2 // indirect + github.com/cosmos/ics23/go v0.11.0 // indirect + github.com/cosmos/ledger-cosmos-go v1.0.0 // indirect + github.com/cpuguy83/go-md2man/v2 v2.0.7 // indirect + github.com/curioswitch/go-reassign v0.3.0 // indirect + github.com/cyphar/filepath-securejoin v0.4.1 // indirect + github.com/daixiang0/gci v0.13.5 // indirect + github.com/danieljoos/wincred v1.2.2 // indirect + github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect + github.com/decred/dcrd/dcrec/secp256k1/v4 v4.4.0 // indirect + github.com/denis-tingaikin/go-header v0.5.0 // indirect + github.com/derekparker/trie v0.0.0-20230829180723-39f4de51ef7d // indirect + github.com/desertbit/timer v0.0.0-20180107155436-c41aec40b27f // indirect + github.com/dgraph-io/badger/v4 v4.2.0 // indirect + github.com/dgraph-io/ristretto v0.2.0 // indirect + github.com/dgrijalva/jwt-go v3.2.0+incompatible // indirect + github.com/distribution/reference v0.6.0 // indirect + github.com/dlclark/regexp2 v1.11.0 // indirect + github.com/docker/cli v29.2.0+incompatible // indirect + github.com/docker/distribution v2.8.3+incompatible // indirect + github.com/docker/docker v28.3.3+incompatible // indirect + github.com/docker/docker-credential-helpers v0.9.3 // indirect + github.com/docker/go-connections v0.5.0 // indirect + github.com/docker/go-units v0.5.0 // indirect + github.com/dustin/go-humanize v1.0.1 // indirect + github.com/dvsekhvalnov/jose2go v1.7.0 // indirect + github.com/emicklei/dot v1.6.2 // indirect + github.com/emirpasic/gods v1.18.1 // indirect + github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f // indirect + github.com/fatih/color v1.18.0 // indirect + github.com/fatih/structs v1.1.0 // indirect + github.com/fatih/structtag v1.2.0 // indirect + github.com/felixge/httpsnoop v1.0.4 // indirect + github.com/firefart/nonamedreturns v1.0.5 // indirect + github.com/fsnotify/fsnotify v1.9.0 // indirect + github.com/fzipp/gocyclo v0.6.0 // indirect + github.com/ghostiam/protogetter v0.3.9 // indirect + github.com/go-chi/chi/v5 v5.2.2 // indirect + github.com/go-critic/go-critic v0.12.0 // indirect + github.com/go-delve/liner v1.2.3-0.20231231155935-4726ab1d7f62 // indirect + github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 // indirect + github.com/go-git/go-billy/v5 v5.8.0 // indirect + github.com/go-kit/kit v0.13.0 // indirect + github.com/go-kit/log v0.2.1 // indirect + github.com/go-logfmt/logfmt v0.6.0 // indirect + github.com/go-logr/logr v1.4.3 // indirect + github.com/go-logr/stdr v1.2.2 // indirect + github.com/go-openapi/errors v0.22.0 // indirect + github.com/go-openapi/jsonpointer v0.21.0 // indirect + github.com/go-openapi/jsonreference v0.21.0 // indirect + github.com/go-openapi/strfmt v0.23.0 // indirect + github.com/go-openapi/swag v0.23.0 // indirect + github.com/go-toolsmith/astcast v1.1.0 // indirect + github.com/go-toolsmith/astcopy v1.1.0 // indirect + github.com/go-toolsmith/astequal v1.2.0 // indirect + github.com/go-toolsmith/astfmt v1.1.0 // indirect + github.com/go-toolsmith/astp v1.1.0 // indirect + github.com/go-toolsmith/strparse v1.1.0 // indirect + github.com/go-toolsmith/typep v1.1.0 // indirect + github.com/go-viper/mapstructure/v2 v2.4.0 // indirect + github.com/go-xmlfmt/xmlfmt v1.1.3 // indirect + github.com/gobuffalo/flect v0.3.0 // indirect + github.com/gobuffalo/github_flavored_markdown v1.1.4 // indirect + github.com/gobuffalo/helpers v0.6.7 // indirect + github.com/gobuffalo/logger v1.0.7 // indirect + github.com/gobuffalo/packd v1.0.2 // indirect + github.com/gobuffalo/tags/v3 v3.1.4 // indirect + github.com/gobuffalo/validate/v3 v3.3.3 // indirect + github.com/gobwas/ws v1.2.1 // indirect + github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2 // indirect + github.com/gofrs/flock v0.12.1 // indirect + github.com/gofrs/uuid v4.4.0+incompatible // indirect + github.com/gogo/googleapis v1.4.1 // indirect + github.com/gogo/protobuf v1.3.2 // indirect + github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 // indirect + github.com/golang/protobuf v1.5.4 // indirect + github.com/golang/snappy v0.0.4 // indirect + github.com/golangci/dupl v0.0.0-20180902072040-3e9179ac440a // indirect + github.com/golangci/go-printf-func-name v0.1.0 // indirect + github.com/golangci/gofmt v0.0.0-20250106114630-d62b90e6713d // indirect + github.com/golangci/golangci-lint v1.64.5 // indirect + github.com/golangci/misspell v0.6.0 // indirect + github.com/golangci/plugin-module-register v0.1.1 // indirect + github.com/golangci/revgrep v0.8.0 // indirect + github.com/golangci/unconvert v0.0.0-20240309020433-c5143eacb3ed // indirect + github.com/google/btree v1.1.3 // indirect + github.com/google/cel-go v0.26.0 // indirect + github.com/google/flatbuffers v1.12.1 // indirect + github.com/google/go-cmp v0.7.0 // indirect + github.com/google/go-containerregistry v0.20.6 // indirect + github.com/google/go-dap v0.12.0 // indirect + github.com/google/uuid v1.6.0 // indirect + github.com/gordonklaus/ineffassign v0.1.0 // indirect + github.com/gorilla/css v1.0.0 // indirect + github.com/gorilla/handlers v1.5.2 // indirect + github.com/gorilla/mux v1.8.1 // indirect + github.com/gorilla/websocket v1.5.3 // indirect + github.com/gostaticanalysis/analysisutil v0.7.1 // indirect + github.com/gostaticanalysis/comment v1.4.2 // indirect + github.com/gostaticanalysis/forcetypeassert v0.2.0 // indirect + github.com/gostaticanalysis/nilerr v0.1.1 // indirect + github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 // indirect + github.com/grpc-ecosystem/grpc-gateway v1.16.0 // indirect + github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.1 // indirect + github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c // indirect + github.com/hashicorp/go-immutable-radix v1.3.1 // indirect + github.com/hashicorp/go-immutable-radix/v2 v2.1.0 // indirect + github.com/hashicorp/go-metrics v0.5.4 // indirect + github.com/hashicorp/go-version v1.7.0 // indirect + github.com/hashicorp/golang-lru v1.0.2 // indirect + github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect + github.com/hashicorp/yamux v0.1.2 // indirect + github.com/hdevalence/ed25519consensus v0.2.0 // indirect + github.com/hexops/gotextdiff v1.0.3 // indirect + github.com/huandu/skiplist v1.2.1 // indirect + github.com/huandu/xstrings v1.4.0 // indirect + github.com/imdario/mergo v0.3.15 // indirect + github.com/improbable-eng/grpc-web v0.15.0 // indirect + github.com/inconshreveable/mousetrap v1.1.0 // indirect + github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 // indirect + github.com/jdx/go-netrc v1.0.0 // indirect + github.com/jgautheron/goconst v1.7.1 // indirect + github.com/jingyugao/rowserrcheck v1.1.1 // indirect + github.com/jinzhu/copier v0.4.0 // indirect + github.com/jjti/go-spancheck v0.6.4 // indirect + github.com/jmhodges/levigo v1.0.0 // indirect + github.com/josharian/intern v1.0.0 // indirect + github.com/julz/importas v0.2.0 // indirect + github.com/karamaru-alpha/copyloopvar v1.2.1 // indirect + github.com/kevinburke/ssh_config v1.2.0 // indirect + github.com/kisielk/errcheck v1.8.0 // indirect + github.com/kkHAIKE/contextcheck v1.1.5 // indirect + github.com/klauspost/compress v1.18.0 // indirect + github.com/klauspost/cpuid/v2 v2.2.10 // indirect + github.com/klauspost/pgzip v1.2.6 // indirect + github.com/kr/pretty v0.3.1 // indirect + github.com/kr/text v0.2.0 // indirect + github.com/kulti/thelper v0.6.3 // indirect + github.com/kunwardeep/paralleltest v1.0.10 // indirect + github.com/lasiar/canonicalheader v1.1.2 // indirect + github.com/ldez/exptostd v0.4.1 // indirect + github.com/ldez/gomoddirectives v0.6.1 // indirect + github.com/ldez/grignotin v0.9.0 // indirect + github.com/ldez/tagliatelle v0.7.1 // indirect + github.com/ldez/usetesting v0.4.2 // indirect + github.com/leonklingele/grouper v1.1.2 // indirect + github.com/linxGnu/grocksdb v1.8.14 // indirect + github.com/lucasb-eyer/go-colorful v1.2.0 // indirect + github.com/macabu/inamedparam v0.1.3 // indirect + github.com/mailru/easyjson v0.7.7 // indirect + github.com/maratori/testableexamples v1.0.0 // indirect + github.com/maratori/testpackage v1.1.1 // indirect + github.com/matoous/godox v1.1.0 // indirect + github.com/mattn/go-colorable v0.1.14 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect + github.com/mattn/go-localereader v0.0.1 // indirect + github.com/mattn/go-runewidth v0.0.16 // indirect + github.com/meowgorithm/babyenv v1.3.1 // indirect + github.com/mgechev/revive v1.6.1 // indirect + github.com/microcosm-cc/bluemonday v1.0.23 // indirect + github.com/mikesmitty/edkey v0.0.0-20170222072505-3356ea4e686a // indirect + github.com/mitchellh/go-homedir v1.1.0 // indirect + github.com/moby/docker-image-spec v1.3.1 // indirect + github.com/moby/term v0.5.2 // indirect + github.com/moricho/tparallel v0.3.2 // indirect + github.com/morikuni/aec v1.0.0 // indirect + github.com/mtibben/percent v0.2.1 // indirect + github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6 // indirect + github.com/muesli/cancelreader v0.2.2 // indirect + github.com/muesli/gitcha v0.2.0 // indirect + github.com/muesli/go-app-paths v0.2.2 // indirect + github.com/muesli/mango v0.1.0 // indirect + github.com/muesli/mango-cobra v1.2.0 // indirect + github.com/muesli/mango-pflag v0.1.0 // indirect + github.com/muesli/roff v0.1.0 // indirect + github.com/muesli/sasquatch v0.0.0-20200811221207-66979d92330a // indirect + github.com/muesli/termenv v0.16.0 // indirect + github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect + github.com/nakabonne/nestif v0.3.1 // indirect + github.com/nishanths/exhaustive v0.12.0 // indirect + github.com/nishanths/predeclared v0.2.2 // indirect + github.com/nunnatsa/ginkgolinter v0.19.0 // indirect + github.com/oasisprotocol/curve25519-voi v0.0.0-20230904125328-1f23a7beb09a // indirect + github.com/oklog/run v1.1.0 // indirect + github.com/oklog/ulid v1.3.1 // indirect + github.com/olekukonko/tablewriter v0.0.5 // indirect + github.com/opencontainers/go-digest v1.0.0 // indirect + github.com/opencontainers/image-spec v1.1.1 // indirect + github.com/otiai10/mint v1.6.3 // indirect + github.com/pelletier/go-toml/v2 v2.2.4 // indirect + github.com/petermattis/goid v0.0.0-20240813172612-4fcff4a6cae7 // indirect + github.com/pjbgf/sha1cd v0.3.2 // indirect + github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c // indirect + github.com/pkg/errors v0.9.1 // indirect + github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect + github.com/polyfloyd/go-errorlint v1.7.1 // indirect + github.com/prometheus/client_golang v1.23.0 // indirect + github.com/prometheus/client_model v0.6.2 // indirect + github.com/prometheus/common v0.65.0 // indirect + github.com/prometheus/procfs v0.16.1 // indirect + github.com/quasilyte/go-ruleguard v0.4.3-0.20240823090925-0fe6f58b47b1 // indirect + github.com/quasilyte/go-ruleguard/dsl v0.3.22 // indirect + github.com/quasilyte/gogrep v0.5.0 // indirect + github.com/quasilyte/regex/syntax v0.0.0-20210819130434-b3f0c404a727 // indirect + github.com/quasilyte/stdinfo v0.0.0-20220114132959-f7386bf02567 // indirect + github.com/quic-go/qpack v0.6.0 // indirect + github.com/quic-go/quic-go v0.57.0 // indirect + github.com/raeperd/recvcheck v0.2.0 // indirect + github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 // indirect + github.com/rivo/uniseg v0.4.7 // indirect + github.com/rs/zerolog v1.34.0 // indirect + github.com/russross/blackfriday/v2 v2.1.0 // indirect + github.com/ryancurrah/gomodguard v1.3.5 // indirect + github.com/ryanrolds/sqlclosecheck v0.5.1 // indirect + github.com/sabhiram/go-gitignore v0.0.0-20180611051255-d3107576ba94 // indirect + github.com/sagikazarmark/locafero v0.11.0 // indirect + github.com/sahilm/fuzzy v0.1.1 // indirect + github.com/sanposhiho/wastedassign/v2 v2.1.0 // indirect + github.com/santhosh-tekuri/jsonschema/v6 v6.0.1 // indirect + github.com/sasha-s/go-deadlock v0.3.5 // indirect + github.com/sashamelentyev/interfacebloat v1.1.0 // indirect + github.com/sashamelentyev/usestdlibvars v1.28.0 // indirect + github.com/securego/gosec/v2 v2.22.1 // indirect + github.com/segmentio/asm v1.2.0 // indirect + github.com/segmentio/encoding v0.5.3 // indirect + github.com/segmentio/ksuid v1.0.4 // indirect + github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 // indirect + github.com/sirupsen/logrus v1.9.3 // indirect + github.com/sivchari/containedctx v1.0.3 // indirect + github.com/sivchari/tenv v1.12.1 // indirect + github.com/skeema/knownhosts v1.3.1 // indirect + github.com/sonatard/noctx v0.1.0 // indirect + github.com/sourcegraph/annotate v0.0.0-20160123013949-f4cad6c6324d // indirect + github.com/sourcegraph/conc v0.3.1-0.20240121214520-5f936abd7ae8 // indirect + github.com/sourcegraph/go-diff v0.7.0 // indirect + github.com/sourcegraph/syntaxhighlight v0.0.0-20170531221838-bd320f5d308e // indirect + github.com/spf13/afero v1.15.0 // indirect + github.com/spf13/cast v1.10.0 // indirect + github.com/spf13/viper v1.21.0 // indirect + github.com/ssgreg/nlreturn/v2 v2.2.1 // indirect + github.com/stbenjam/no-sprintf-host-port v0.2.0 // indirect + github.com/stoewer/go-strcase v1.3.1 // indirect + github.com/stretchr/objx v0.5.2 // indirect + github.com/subosito/gotenv v1.6.0 // indirect + github.com/syndtr/goleveldb v1.0.1-0.20220721030215-126854af5e6d // indirect + github.com/tbruyelle/mdgofmt v0.1.3 // indirect + github.com/tdakkota/asciicheck v0.4.0 // indirect + github.com/tendermint/go-amino v0.16.0 // indirect + github.com/tetafro/godot v1.4.20 // indirect + github.com/tetratelabs/wazero v1.9.0 // indirect + github.com/tidwall/btree v1.7.0 // indirect + github.com/timakin/bodyclose v0.0.0-20241017074812-ed6a65f985e3 // indirect + github.com/timonwong/loggercheck v0.10.1 // indirect + github.com/tomarrell/wrapcheck/v2 v2.10.0 // indirect + github.com/tommy-muehle/go-mnd/v2 v2.5.1 // indirect + github.com/twitchyliquid64/golang-asm v0.15.1 // indirect + github.com/ultraware/funlen v0.2.0 // indirect + github.com/ultraware/whitespace v0.2.0 // indirect + github.com/uudashr/gocognit v1.2.0 // indirect + github.com/uudashr/iface v1.3.1 // indirect + github.com/vbatts/tar-split v0.12.1 // indirect + github.com/vektra/mockery/v2 v2.53.3 // indirect + github.com/xanzy/ssh-agent v0.3.3 // indirect + github.com/xen0n/gosmopolitan v1.2.2 // indirect + github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect + github.com/yagipy/maintidx v1.0.0 // indirect + github.com/yeya24/promlinter v0.3.0 // indirect + github.com/ykadowak/zerologlint v0.1.5 // indirect + github.com/yuin/goldmark v1.5.2 // indirect + github.com/yuin/goldmark-emoji v1.0.1 // indirect + github.com/zondax/golem v0.27.0 // indirect + github.com/zondax/hid v0.9.2 // indirect + github.com/zondax/ledger-go v1.0.1 // indirect + gitlab.com/bosi/decorder v0.4.2 // indirect + go-simpler.org/musttag v0.13.0 // indirect + go-simpler.org/sloglint v0.9.0 // indirect + go.lsp.dev/jsonrpc2 v0.10.0 // indirect + go.lsp.dev/pkg v0.0.0-20210717090340-384b27a52fb2 // indirect + go.lsp.dev/protocol v0.12.0 // indirect + go.lsp.dev/uri v0.3.0 // indirect + go.mongodb.org/mongo-driver v1.14.0 // indirect + go.opencensus.io v0.24.0 // indirect + go.opentelemetry.io/auto/sdk v1.2.1 // indirect + go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.62.0 // indirect + go.opentelemetry.io/otel v1.39.0 // indirect + go.opentelemetry.io/otel/metric v1.39.0 // indirect + go.opentelemetry.io/otel/trace v1.39.0 // indirect + go.starlark.net v0.0.0-20231101134539-556fd59b42f6 // indirect + go.uber.org/automaxprocs v1.6.0 // indirect + go.uber.org/multierr v1.11.0 // indirect + go.uber.org/zap v1.27.0 // indirect + go.yaml.in/yaml/v2 v2.4.2 // indirect + go.yaml.in/yaml/v3 v3.0.4 // indirect + golang.org/x/arch v0.17.0 // indirect + golang.org/x/crypto v0.46.0 // indirect + golang.org/x/exp v0.0.0-20250718183923-645b1fa84792 // indirect + golang.org/x/exp/typeparams v0.0.0-20250210185358-939b2ce775ac // indirect + golang.org/x/net v0.48.0 // indirect + golang.org/x/sys v0.39.0 // indirect + golang.org/x/telemetry v0.0.0-20251111182119-bc8e575c7b54 // indirect + golang.org/x/vuln v1.1.4 // indirect + google.golang.org/genproto v0.0.0-20250603155806-513f23925822 // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20251202230838-ff82c1b0f217 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20251202230838-ff82c1b0f217 // indirect + gopkg.in/warnings.v0 v0.1.2 // indirect + gopkg.in/yaml.v2 v2.4.0 // indirect + gotest.tools/v3 v3.5.2 // indirect + honnef.co/go/tools v0.6.0 // indirect + mvdan.cc/gofumpt v0.7.0 // indirect + mvdan.cc/unparam v0.0.0-20240528143540-8a5130ca722f // indirect + nhooyr.io/websocket v1.8.6 // indirect + pgregory.net/rapid v1.2.0 // indirect + pluginrpc.com/pluginrpc v0.5.0 // indirect +) + +tool ( + github.com/bufbuild/buf/cmd/buf + github.com/golangci/golangci-lint/cmd/golangci-lint + github.com/grpc-ecosystem/grpc-gateway/v2/protoc-gen-openapiv2 + github.com/tbruyelle/mdgofmt/cmd/mdgofmt + github.com/vektra/mockery/v2 + golang.org/x/tools/cmd/goimports + golang.org/x/vuln/cmd/govulncheck + mvdan.cc/gofumpt +) + +replace ( + github.com/99designs/keyring => github.com/cosmos/keyring v1.2.0 + github.com/dgrijalva/jwt-go => github.com/golang-jwt/jwt/v4 v4.4.2 + // Fix upstream GHSA-h395-qcrw-5vmq vulnerability. + // TODO Remove it: https://github.com/cosmos/cosmos-sdk/issues/10409 + github.com/gin-gonic/gin => github.com/gin-gonic/gin v1.7.0 + // Downgraded to avoid bugs in following commits which caused simulations to fail. + github.com/syndtr/goleveldb => github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7 +) diff --git a/go.sum b/go.sum new file mode 100644 index 0000000..f5e9fea --- /dev/null +++ b/go.sum @@ -0,0 +1,2570 @@ +4d63.com/gocheckcompilerdirectives v1.2.1 h1:AHcMYuw56NPjq/2y615IGg2kYkBdTvOaojYCBcRE7MA= +4d63.com/gocheckcompilerdirectives v1.2.1/go.mod h1:yjDJSxmDTtIHHCqX0ufRYZDL6vQtMG7tJdKVeWwsqvs= +4d63.com/gochecknoglobals v0.2.2 h1:H1vdnwnMaZdQW/N+NrkT1SZMTBmcwHe9Vq8lJcYYTtU= +4d63.com/gochecknoglobals v0.2.2/go.mod h1:lLxwTQjL5eIesRbvnzIP3jZtG140FnTdz+AlMa+ogt0= +buf.build/gen/go/bufbuild/bufplugin/protocolbuffers/go v1.36.6-20250718181942-e35f9b667443.1 h1:8kSz6PsTC64z3itQqwMgswSGR/QpB3ShZGycu+zq+58= +buf.build/gen/go/bufbuild/bufplugin/protocolbuffers/go v1.36.6-20250718181942-e35f9b667443.1/go.mod h1:TsmeaGU5CZAF7zRM05vIKgXh56GgwaoMS8X+a77RV5Q= +buf.build/gen/go/bufbuild/protovalidate/protocolbuffers/go v1.36.6-20250717185734-6c6e0d3c608e.1 h1:Lg6klmCi3v7VvpqeeLEER9/m5S8y9e9DjhqQnSCNy4k= +buf.build/gen/go/bufbuild/protovalidate/protocolbuffers/go v1.36.6-20250717185734-6c6e0d3c608e.1/go.mod h1:avRlCjnFzl98VPaeCtJ24RrV/wwHFzB8sWXhj26+n/U= +buf.build/gen/go/bufbuild/registry/connectrpc/go v1.18.1-20250721151928-2b7ae473b098.1 h1:icgV8NMRNi31JwLZ8OJQK1HNIX3RTBdhjvpRPJF4fyI= +buf.build/gen/go/bufbuild/registry/connectrpc/go v1.18.1-20250721151928-2b7ae473b098.1/go.mod h1:/MMEAJmz7PEmksjkSxhWXell82FXiG7BLUPBJRmKBsA= +buf.build/gen/go/bufbuild/registry/protocolbuffers/go v1.36.6-20250721151928-2b7ae473b098.1 h1:fgiFo9f0jCni7kb5QxQ78CccZv6WLTH5Iea2B+AvSKY= +buf.build/gen/go/bufbuild/registry/protocolbuffers/go v1.36.6-20250721151928-2b7ae473b098.1/go.mod h1:RsJBKYlgzsbl5LAhIu0cNrPPzBNenMLTAAykRxFidtw= +buf.build/gen/go/pluginrpc/pluginrpc/protocolbuffers/go v1.36.6-20241007202033-cf42259fcbfc.1 h1:trcsXBDm8exui7mvndZnvworCyBq1xuMnod2N0j79K8= +buf.build/gen/go/pluginrpc/pluginrpc/protocolbuffers/go v1.36.6-20241007202033-cf42259fcbfc.1/go.mod h1:OUbhXurY+VHFGn9FBxcRy8UB7HXk9NvJ2qCgifOMypQ= +buf.build/go/app v0.1.0 h1:nlqD/h0rhIN73ZoiDElprrPiO2N6JV+RmNK34K29Ihg= +buf.build/go/app v0.1.0/go.mod h1:0XVOYemubVbxNXVY0DnsVgWeGkcbbAvjDa1fmhBC+Wo= +buf.build/go/bufplugin v0.9.0 h1:ktZJNP3If7ldcWVqh46XKeiYJVPxHQxCfjzVQDzZ/lo= +buf.build/go/bufplugin v0.9.0/go.mod h1:Z0CxA3sKQ6EPz/Os4kJJneeRO6CjPeidtP1ABh5jPPY= +buf.build/go/interrupt v1.1.0 h1:olBuhgv9Sav4/9pkSLoxgiOsZDgM5VhRhvRpn3DL0lE= +buf.build/go/interrupt v1.1.0/go.mod h1:ql56nXPG1oHlvZa6efNC7SKAQ/tUjS6z0mhJl0gyeRM= +buf.build/go/protovalidate v0.14.0 h1:kr/rC/no+DtRyYX+8KXLDxNnI1rINz0imk5K44ZpZ3A= +buf.build/go/protovalidate v0.14.0/go.mod h1:+F/oISho9MO7gJQNYC2VWLzcO1fTPmaTA08SDYJZncA= +buf.build/go/protoyaml v0.6.0 h1:Nzz1lvcXF8YgNZXk+voPPwdU8FjDPTUV4ndNTXN0n2w= +buf.build/go/protoyaml v0.6.0/go.mod h1:RgUOsBu/GYKLDSIRgQXniXbNgFlGEZnQpRAUdLAFV2Q= +buf.build/go/spdx v0.2.0 h1:IItqM0/cMxvFJJumcBuP8NrsIzMs/UYjp/6WSpq8LTw= +buf.build/go/spdx v0.2.0/go.mod h1:bXdwQFem9Si3nsbNy8aJKGPoaPi5DKwdeEp5/ArZ6w8= +buf.build/go/standard v0.1.0 h1:g98T9IyvAl0vS3Pq8iVk6Cvj2ZiFvoUJRtfyGa0120U= +buf.build/go/standard v0.1.0/go.mod h1:PiqpHz/7ZFq+kqvYhc/SK3lxFIB9N/aiH2CFC2JHIQg= +cel.dev/expr v0.25.1 h1:1KrZg61W6TWSxuNZ37Xy49ps13NUovb66QLprthtwi4= +cel.dev/expr v0.25.1/go.mod h1:hrXvqGP6G6gyx8UAHSHJ5RGk//1Oj5nXQ2NI02Nrsg4= +cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= +cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= +cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= +cloud.google.com/go v0.44.3/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= +cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= +cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= +cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To= +cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4= +cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M= +cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc= +cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk= +cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs= +cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc= +cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= +cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI= +cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk= +cloud.google.com/go v0.75.0/go.mod h1:VGuuCn7PG0dwsd5XPVm2Mm3wlh3EL55/79EKB6hlPTY= +cloud.google.com/go v0.78.0/go.mod h1:QjdrLG0uq+YwhjoVOLsS1t7TW8fs36kLs4XO5R5ECHg= +cloud.google.com/go v0.79.0/go.mod h1:3bzgcEeQlzbuEAYu4mrWhKqWjmpprinYgKJLgKHnbb8= +cloud.google.com/go v0.81.0/go.mod h1:mk/AM35KwGk/Nm2YSeZbxXdrNK3KZOYHmLkOqC2V6E0= +cloud.google.com/go v0.83.0/go.mod h1:Z7MJUsANfY0pYPdw0lbnivPx4/vhy/e2FEkSkF7vAVY= +cloud.google.com/go v0.84.0/go.mod h1:RazrYuxIK6Kb7YrzzhPoLmCVzl7Sup4NrbKPg8KHSUM= +cloud.google.com/go v0.87.0/go.mod h1:TpDYlFy7vuLzZMMZ+B6iRiELaY7z/gJPaqbMx6mlWcY= +cloud.google.com/go v0.90.0/go.mod h1:kRX0mNRHe0e2rC6oNakvwQqzyDmg57xJ+SZU1eT2aDQ= +cloud.google.com/go v0.93.3/go.mod h1:8utlLll2EF5XMAV15woO4lSbWQlk8rer9aLOfLh7+YI= +cloud.google.com/go v0.94.1/go.mod h1:qAlAugsXlC+JWO+Bke5vCtc9ONxjQT3drlTTnAplMW4= +cloud.google.com/go v0.97.0/go.mod h1:GF7l59pYBVlXQIBLx3a761cZ41F9bBH3JUlihCt2Udc= +cloud.google.com/go v0.99.0/go.mod h1:w0Xx2nLzqWJPuozYQX+hFfCSI8WioryfRDzkoI/Y2ZA= +cloud.google.com/go v0.100.2/go.mod h1:4Xra9TjzAeYHrl5+oeLlzbM2k3mjVhZh4UqTZ//w99A= +cloud.google.com/go v0.102.0/go.mod h1:oWcCzKlqJ5zgHQt9YsaeTY9KzIvjyy0ArmiBUgpQ+nc= +cloud.google.com/go v0.102.1/go.mod h1:XZ77E9qnTEnrgEOvr4xzfdX5TRo7fB4T2F4O6+34hIU= +cloud.google.com/go v0.104.0/go.mod h1:OO6xxXdJyvuJPcEPBLN9BJPD+jep5G1+2U5B5gkRYtA= +cloud.google.com/go/aiplatform v1.22.0/go.mod h1:ig5Nct50bZlzV6NvKaTwmplLLddFx0YReh9WfTO5jKw= +cloud.google.com/go/aiplatform v1.24.0/go.mod h1:67UUvRBKG6GTayHKV8DBv2RtR1t93YRu5B1P3x99mYY= +cloud.google.com/go/analytics v0.11.0/go.mod h1:DjEWCu41bVbYcKyvlws9Er60YE4a//bK6mnhWvQeFNI= +cloud.google.com/go/analytics v0.12.0/go.mod h1:gkfj9h6XRf9+TS4bmuhPEShsh3hH8PAZzm/41OOhQd4= +cloud.google.com/go/area120 v0.5.0/go.mod h1:DE/n4mp+iqVyvxHN41Vf1CR602GiHQjFPusMFW6bGR4= +cloud.google.com/go/area120 v0.6.0/go.mod h1:39yFJqWVgm0UZqWTOdqkLhjoC7uFfgXRC8g/ZegeAh0= +cloud.google.com/go/artifactregistry v1.6.0/go.mod h1:IYt0oBPSAGYj/kprzsBjZ/4LnG/zOcHyFHjWPCi6SAQ= +cloud.google.com/go/artifactregistry v1.7.0/go.mod h1:mqTOFOnGZx8EtSqK/ZWcsm/4U8B77rbcLP6ruDU2Ixk= +cloud.google.com/go/asset v1.5.0/go.mod h1:5mfs8UvcM5wHhqtSv8J1CtxxaQq3AdBxxQi2jGW/K4o= +cloud.google.com/go/asset v1.7.0/go.mod h1:YbENsRK4+xTiL+Ofoj5Ckf+O17kJtgp3Y3nn4uzZz5s= +cloud.google.com/go/asset v1.8.0/go.mod h1:mUNGKhiqIdbr8X7KNayoYvyc4HbbFO9URsjbytpUaW0= +cloud.google.com/go/assuredworkloads v1.5.0/go.mod h1:n8HOZ6pff6re5KYfBXcFvSViQjDwxFkAkmUFffJRbbY= +cloud.google.com/go/assuredworkloads v1.6.0/go.mod h1:yo2YOk37Yc89Rsd5QMVECvjaMKymF9OP+QXWlKXUkXw= +cloud.google.com/go/assuredworkloads v1.7.0/go.mod h1:z/736/oNmtGAyU47reJgGN+KVoYoxeLBoj4XkKYscNI= +cloud.google.com/go/automl v1.5.0/go.mod h1:34EjfoFGMZ5sgJ9EoLsRtdPSNZLcfflJR39VbVNS2M0= +cloud.google.com/go/automl v1.6.0/go.mod h1:ugf8a6Fx+zP0D59WLhqgTDsQI9w07o64uf/Is3Nh5p8= +cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= +cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= +cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= +cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= +cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= +cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= +cloud.google.com/go/bigquery v1.42.0/go.mod h1:8dRTJxhtG+vwBKzE5OseQn/hiydoQN3EedCaOdYmxRA= +cloud.google.com/go/billing v1.4.0/go.mod h1:g9IdKBEFlItS8bTtlrZdVLWSSdSyFUZKXNS02zKMOZY= +cloud.google.com/go/billing v1.5.0/go.mod h1:mztb1tBc3QekhjSgmpf/CV4LzWXLzCArwpLmP2Gm88s= +cloud.google.com/go/binaryauthorization v1.1.0/go.mod h1:xwnoWu3Y84jbuHa0zd526MJYmtnVXn0syOjaJgy4+dM= +cloud.google.com/go/binaryauthorization v1.2.0/go.mod h1:86WKkJHtRcv5ViNABtYMhhNWRrD1Vpi//uKEy7aYEfI= +cloud.google.com/go/cloudtasks v1.5.0/go.mod h1:fD92REy1x5woxkKEkLdvavGnPJGEn8Uic9nWuLzqCpY= +cloud.google.com/go/cloudtasks v1.6.0/go.mod h1:C6Io+sxuke9/KNRkbQpihnW93SWDU3uXt92nu85HkYI= +cloud.google.com/go/compute v0.1.0/go.mod h1:GAesmwr110a34z04OlxYkATPBEfVhkymfTBXtfbBFow= +cloud.google.com/go/compute v1.3.0/go.mod h1:cCZiE1NHEtai4wiufUhW8I8S1JKkAnhnQJWM7YD99wM= +cloud.google.com/go/compute v1.5.0/go.mod h1:9SMHyhJlzhlkJqrPAc839t2BZFTSk6Jdj6mkzQJeu0M= +cloud.google.com/go/compute v1.6.0/go.mod h1:T29tfhtVbq1wvAPo0E3+7vhgmkOYeXjhFvz/FMzPu0s= +cloud.google.com/go/compute v1.6.1/go.mod h1:g85FgpzFvNULZ+S8AYq87axRKuf2Kh7deLqV/jJ3thU= +cloud.google.com/go/compute v1.7.0/go.mod h1:435lt8av5oL9P3fv1OEzSbSUe+ybHXGMPQHHZWZxy9U= +cloud.google.com/go/compute v1.10.0/go.mod h1:ER5CLbMxl90o2jtNbGSbtfOpQKR0t15FOtRsugnLrlU= +cloud.google.com/go/compute v1.12.0/go.mod h1:e8yNOBcBONZU1vJKCvCoDw/4JQsA0dpM4x/6PIIOocU= +cloud.google.com/go/compute v1.12.1/go.mod h1:e8yNOBcBONZU1vJKCvCoDw/4JQsA0dpM4x/6PIIOocU= +cloud.google.com/go/compute/metadata v0.1.0/go.mod h1:Z1VN+bulIf6bt4P/C37K4DyZYZEXYonfTBHHFPO/4UU= +cloud.google.com/go/compute/metadata v0.2.1/go.mod h1:jgHgmJd2RKBGzXqF5LR2EZMGxBkeanZ9wwa75XHJgOM= +cloud.google.com/go/containeranalysis v0.5.1/go.mod h1:1D92jd8gRR/c0fGMlymRgxWD3Qw9C1ff6/T7mLgVL8I= +cloud.google.com/go/containeranalysis v0.6.0/go.mod h1:HEJoiEIu+lEXM+k7+qLCci0h33lX3ZqoYFdmPcoO7s4= +cloud.google.com/go/datacatalog v1.3.0/go.mod h1:g9svFY6tuR+j+hrTw3J2dNcmI0dzmSiyOzm8kpLq0a0= +cloud.google.com/go/datacatalog v1.5.0/go.mod h1:M7GPLNQeLfWqeIm3iuiruhPzkt65+Bx8dAKvScX8jvs= +cloud.google.com/go/datacatalog v1.6.0/go.mod h1:+aEyF8JKg+uXcIdAmmaMUmZ3q1b/lKLtXCmXdnc0lbc= +cloud.google.com/go/dataflow v0.6.0/go.mod h1:9QwV89cGoxjjSR9/r7eFDqqjtvbKxAK2BaYU6PVk9UM= +cloud.google.com/go/dataflow v0.7.0/go.mod h1:PX526vb4ijFMesO1o202EaUmouZKBpjHsTlCtB4parQ= +cloud.google.com/go/dataform v0.3.0/go.mod h1:cj8uNliRlHpa6L3yVhDOBrUXH+BPAO1+KFMQQNSThKo= +cloud.google.com/go/dataform v0.4.0/go.mod h1:fwV6Y4Ty2yIFL89huYlEkwUPtS7YZinZbzzj5S9FzCE= +cloud.google.com/go/datalabeling v0.5.0/go.mod h1:TGcJ0G2NzcsXSE/97yWjIZO0bXj0KbVlINXMG9ud42I= +cloud.google.com/go/datalabeling v0.6.0/go.mod h1:WqdISuk/+WIGeMkpw/1q7bK/tFEZxsrFJOJdY2bXvTQ= +cloud.google.com/go/dataqna v0.5.0/go.mod h1:90Hyk596ft3zUQ8NkFfvICSIfHFh1Bc7C4cK3vbhkeo= +cloud.google.com/go/dataqna v0.6.0/go.mod h1:1lqNpM7rqNLVgWBJyk5NF6Uen2PHym0jtVJonplVsDA= +cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= +cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= +cloud.google.com/go/datastream v1.2.0/go.mod h1:i/uTP8/fZwgATHS/XFu0TcNUhuA0twZxxQ3EyCUQMwo= +cloud.google.com/go/datastream v1.3.0/go.mod h1:cqlOX8xlyYF/uxhiKn6Hbv6WjwPPuI9W2M9SAXwaLLQ= +cloud.google.com/go/dialogflow v1.15.0/go.mod h1:HbHDWs33WOGJgn6rfzBW1Kv807BE3O1+xGbn59zZWI4= +cloud.google.com/go/dialogflow v1.16.1/go.mod h1:po6LlzGfK+smoSmTBnbkIZY2w8ffjz/RcGSS+sh1el0= +cloud.google.com/go/dialogflow v1.17.0/go.mod h1:YNP09C/kXA1aZdBgC/VtXX74G/TKn7XVCcVumTflA+8= +cloud.google.com/go/documentai v1.7.0/go.mod h1:lJvftZB5NRiFSX4moiye1SMxHx0Bc3x1+p9e/RfXYiU= +cloud.google.com/go/documentai v1.8.0/go.mod h1:xGHNEB7CtsnySCNrCFdCyyMz44RhFEEX2Q7UD0c5IhU= +cloud.google.com/go/domains v0.6.0/go.mod h1:T9Rz3GasrpYk6mEGHh4rymIhjlnIuB4ofT1wTxDeT4Y= +cloud.google.com/go/domains v0.7.0/go.mod h1:PtZeqS1xjnXuRPKE/88Iru/LdfoRyEHYA9nFQf4UKpg= +cloud.google.com/go/edgecontainer v0.1.0/go.mod h1:WgkZ9tp10bFxqO8BLPqv2LlfmQF1X8lZqwW4r1BTajk= +cloud.google.com/go/edgecontainer v0.2.0/go.mod h1:RTmLijy+lGpQ7BXuTDa4C4ssxyXT34NIuHIgKuP4s5w= +cloud.google.com/go/firestore v1.8.0/go.mod h1:r3KB8cAdRIe8znzoPWLw8S6gpDVd9treohhn8b09424= +cloud.google.com/go/functions v1.6.0/go.mod h1:3H1UA3qiIPRWD7PeZKLvHZ9SaQhR26XIJcC0A5GbvAk= +cloud.google.com/go/functions v1.7.0/go.mod h1:+d+QBcWM+RsrgZfV9xo6KfA1GlzJfxcfZcRPEhDDfzg= +cloud.google.com/go/gaming v1.5.0/go.mod h1:ol7rGcxP/qHTRQE/RO4bxkXq+Fix0j6D4LFPzYTIrDM= +cloud.google.com/go/gaming v1.6.0/go.mod h1:YMU1GEvA39Qt3zWGyAVA9bpYz/yAhTvaQ1t2sK4KPUA= +cloud.google.com/go/gkeconnect v0.5.0/go.mod h1:c5lsNAg5EwAy7fkqX/+goqFsU1Da/jQFqArp+wGNr/o= +cloud.google.com/go/gkeconnect v0.6.0/go.mod h1:Mln67KyU/sHJEBY8kFZ0xTeyPtzbq9StAVvEULYK16A= +cloud.google.com/go/gkehub v0.9.0/go.mod h1:WYHN6WG8w9bXU0hqNxt8rm5uxnk8IH+lPY9J2TV7BK0= +cloud.google.com/go/gkehub v0.10.0/go.mod h1:UIPwxI0DsrpsVoWpLB0stwKCP+WFVG9+y977wO+hBH0= +cloud.google.com/go/grafeas v0.2.0/go.mod h1:KhxgtF2hb0P191HlY5besjYm6MqTSTj3LSI+M+ByZHc= +cloud.google.com/go/iam v0.3.0/go.mod h1:XzJPvDayI+9zsASAFO68Hk07u3z+f+JrT2xXNdp4bnY= +cloud.google.com/go/language v1.4.0/go.mod h1:F9dRpNFQmJbkaop6g0JhSBXCNlO90e1KWx5iDdxbWic= +cloud.google.com/go/language v1.6.0/go.mod h1:6dJ8t3B+lUYfStgls25GusK04NLh3eDLQnWM3mdEbhI= +cloud.google.com/go/lifesciences v0.5.0/go.mod h1:3oIKy8ycWGPUyZDR/8RNnTOYevhaMLqh5vLUXs9zvT8= +cloud.google.com/go/lifesciences v0.6.0/go.mod h1:ddj6tSX/7BOnhxCSd3ZcETvtNr8NZ6t/iPhY2Tyfu08= +cloud.google.com/go/mediatranslation v0.5.0/go.mod h1:jGPUhGTybqsPQn91pNXw0xVHfuJ3leR1wj37oU3y1f4= +cloud.google.com/go/mediatranslation v0.6.0/go.mod h1:hHdBCTYNigsBxshbznuIMFNe5QXEowAuNmmC7h8pu5w= +cloud.google.com/go/memcache v1.4.0/go.mod h1:rTOfiGZtJX1AaFUrOgsMHX5kAzaTQ8azHiuDoTPzNsE= +cloud.google.com/go/memcache v1.5.0/go.mod h1:dk3fCK7dVo0cUU2c36jKb4VqKPS22BTkf81Xq617aWM= +cloud.google.com/go/metastore v1.5.0/go.mod h1:2ZNrDcQwghfdtCwJ33nM0+GrBGlVuh8rakL3vdPY3XY= +cloud.google.com/go/metastore v1.6.0/go.mod h1:6cyQTls8CWXzk45G55x57DVQ9gWg7RiH65+YgPsNh9s= +cloud.google.com/go/networkconnectivity v1.4.0/go.mod h1:nOl7YL8odKyAOtzNX73/M5/mGZgqqMeryi6UPZTk/rA= +cloud.google.com/go/networkconnectivity v1.5.0/go.mod h1:3GzqJx7uhtlM3kln0+x5wyFvuVH1pIBJjhCpjzSt75o= +cloud.google.com/go/networksecurity v0.5.0/go.mod h1:xS6fOCoqpVC5zx15Z/MqkfDwH4+m/61A3ODiDV1xmiQ= +cloud.google.com/go/networksecurity v0.6.0/go.mod h1:Q5fjhTr9WMI5mbpRYEbiexTzROf7ZbDzvzCrNl14nyU= +cloud.google.com/go/notebooks v1.2.0/go.mod h1:9+wtppMfVPUeJ8fIWPOq1UnATHISkGXGqTkxeieQ6UY= +cloud.google.com/go/notebooks v1.3.0/go.mod h1:bFR5lj07DtCPC7YAAJ//vHskFBxA5JzYlH68kXVdk34= +cloud.google.com/go/osconfig v1.7.0/go.mod h1:oVHeCeZELfJP7XLxcBGTMBvRO+1nQ5tFG9VQTmYS2Fs= +cloud.google.com/go/osconfig v1.8.0/go.mod h1:EQqZLu5w5XA7eKizepumcvWx+m8mJUhEwiPqWiZeEdg= +cloud.google.com/go/oslogin v1.4.0/go.mod h1:YdgMXWRaElXz/lDk1Na6Fh5orF7gvmJ0FGLIs9LId4E= +cloud.google.com/go/oslogin v1.5.0/go.mod h1:D260Qj11W2qx/HVF29zBg+0fd6YCSjSqLUkY/qEenQU= +cloud.google.com/go/phishingprotection v0.5.0/go.mod h1:Y3HZknsK9bc9dMi+oE8Bim0lczMU6hrX0UpADuMefr0= +cloud.google.com/go/phishingprotection v0.6.0/go.mod h1:9Y3LBLgy0kDTcYET8ZH3bq/7qni15yVUoAxiFxnlSUA= +cloud.google.com/go/privatecatalog v0.5.0/go.mod h1:XgosMUvvPyxDjAVNDYxJ7wBW8//hLDDYmnsNcMGq1K0= +cloud.google.com/go/privatecatalog v0.6.0/go.mod h1:i/fbkZR0hLN29eEWiiwue8Pb+GforiEIBnV9yrRUOKI= +cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= +cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= +cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= +cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= +cloud.google.com/go/recaptchaenterprise v1.3.1/go.mod h1:OdD+q+y4XGeAlxRaMn1Y7/GveP6zmq76byL6tjPE7d4= +cloud.google.com/go/recaptchaenterprise/v2 v2.1.0/go.mod h1:w9yVqajwroDNTfGuhmOjPDN//rZGySaf6PtFVcSCa7o= +cloud.google.com/go/recaptchaenterprise/v2 v2.2.0/go.mod h1:/Zu5jisWGeERrd5HnlS3EUGb/D335f9k51B/FVil0jk= +cloud.google.com/go/recaptchaenterprise/v2 v2.3.0/go.mod h1:O9LwGCjrhGHBQET5CA7dd5NwwNQUErSgEDit1DLNTdo= +cloud.google.com/go/recommendationengine v0.5.0/go.mod h1:E5756pJcVFeVgaQv3WNpImkFP8a+RptV6dDLGPILjvg= +cloud.google.com/go/recommendationengine v0.6.0/go.mod h1:08mq2umu9oIqc7tDy8sx+MNJdLG0fUi3vaSVbztHgJ4= +cloud.google.com/go/recommender v1.5.0/go.mod h1:jdoeiBIVrJe9gQjwd759ecLJbxCDED4A6p+mqoqDvTg= +cloud.google.com/go/recommender v1.6.0/go.mod h1:+yETpm25mcoiECKh9DEScGzIRyDKpZ0cEhWGo+8bo+c= +cloud.google.com/go/redis v1.7.0/go.mod h1:V3x5Jq1jzUcg+UNsRvdmsfuFnit1cfe3Z/PGyq/lm4Y= +cloud.google.com/go/redis v1.8.0/go.mod h1:Fm2szCDavWzBk2cDKxrkmWBqoCiL1+Ctwq7EyqBCA/A= +cloud.google.com/go/retail v1.8.0/go.mod h1:QblKS8waDmNUhghY2TI9O3JLlFk8jybHeV4BF19FrE4= +cloud.google.com/go/retail v1.9.0/go.mod h1:g6jb6mKuCS1QKnH/dpu7isX253absFl6iE92nHwlBUY= +cloud.google.com/go/scheduler v1.4.0/go.mod h1:drcJBmxF3aqZJRhmkHQ9b3uSSpQoltBPGPxGAWROx6s= +cloud.google.com/go/scheduler v1.5.0/go.mod h1:ri073ym49NW3AfT6DZi21vLZrG07GXr5p3H1KxN5QlI= +cloud.google.com/go/secretmanager v1.6.0/go.mod h1:awVa/OXF6IiyaU1wQ34inzQNc4ISIDIrId8qE5QGgKA= +cloud.google.com/go/security v1.5.0/go.mod h1:lgxGdyOKKjHL4YG3/YwIL2zLqMFCKs0UbQwgyZmfJl4= +cloud.google.com/go/security v1.7.0/go.mod h1:mZklORHl6Bg7CNnnjLH//0UlAlaXqiG7Lb9PsPXLfD0= +cloud.google.com/go/security v1.8.0/go.mod h1:hAQOwgmaHhztFhiQ41CjDODdWP0+AE1B3sX4OFlq+GU= +cloud.google.com/go/securitycenter v1.13.0/go.mod h1:cv5qNAqjY84FCN6Y9z28WlkKXyWsgLO832YiWwkCWcU= +cloud.google.com/go/securitycenter v1.14.0/go.mod h1:gZLAhtyKv85n52XYWt6RmeBdydyxfPeTrpToDPw4Auc= +cloud.google.com/go/servicedirectory v1.4.0/go.mod h1:gH1MUaZCgtP7qQiI+F+A+OpeKF/HQWgtAddhTbhL2bs= +cloud.google.com/go/servicedirectory v1.5.0/go.mod h1:QMKFL0NUySbpZJ1UZs3oFAmdvVxhhxB6eJ/Vlp73dfg= +cloud.google.com/go/speech v1.6.0/go.mod h1:79tcr4FHCimOp56lwC01xnt/WPJZc4v3gzyT7FoBkCM= +cloud.google.com/go/speech v1.7.0/go.mod h1:KptqL+BAQIhMsj1kOP2la5DSEEerPDuOP/2mmkhHhZQ= +cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= +cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= +cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= +cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= +cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= +cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3fOKtUw0Xmo= +cloud.google.com/go/storage v1.22.1/go.mod h1:S8N1cAStu7BOeFfE8KAQzmyyLkK8p/vmRq6kuBTW58Y= +cloud.google.com/go/storage v1.23.0/go.mod h1:vOEEDNFnciUMhBeT6hsJIn3ieU5cFRmzeLgDvXzfIXc= +cloud.google.com/go/talent v1.1.0/go.mod h1:Vl4pt9jiHKvOgF9KoZo6Kob9oV4lwd/ZD5Cto54zDRw= +cloud.google.com/go/talent v1.2.0/go.mod h1:MoNF9bhFQbiJ6eFD3uSsg0uBALw4n4gaCaEjBw9zo8g= +cloud.google.com/go/videointelligence v1.6.0/go.mod h1:w0DIDlVRKtwPCn/C4iwZIJdvC69yInhW0cfi+p546uU= +cloud.google.com/go/videointelligence v1.7.0/go.mod h1:k8pI/1wAhjznARtVT9U1llUaFNPh7muw8QyOUpavru4= +cloud.google.com/go/vision v1.2.0/go.mod h1:SmNwgObm5DpFBme2xpyOyasvBc1aPdjvMk2bBk0tKD0= +cloud.google.com/go/vision/v2 v2.2.0/go.mod h1:uCdV4PpN1S0jyCyq8sIM42v2Y6zOLkZs+4R9LrGYwFo= +cloud.google.com/go/vision/v2 v2.3.0/go.mod h1:UO61abBx9QRMFkNBbf1D8B1LXdS2cGiiCRx0vSpZoUo= +cloud.google.com/go/webrisk v1.4.0/go.mod h1:Hn8X6Zr+ziE2aNd8SliSDWpEnSS1u4R9+xXZmFiHmGE= +cloud.google.com/go/webrisk v1.5.0/go.mod h1:iPG6fr52Tv7sGk0H6qUFzmL3HHZev1htXuWDEEsqMTg= +cloud.google.com/go/workflows v1.6.0/go.mod h1:6t9F5h/unJz41YqfBmqSASJSXccBLtD1Vwf+KmJENM0= +cloud.google.com/go/workflows v1.7.0/go.mod h1:JhSrZuVZWuiDfKEFxU0/F1PQjmpnpcoISEXH2bcHC3M= +connectrpc.com/connect v1.18.1 h1:PAg7CjSAGvscaf6YZKUefjoih5Z/qYkyaTrBW8xvYPw= +connectrpc.com/connect v1.18.1/go.mod h1:0292hj1rnx8oFrStN7cB4jjVBeqs+Yx5yDIC2prWDO8= +connectrpc.com/otelconnect v0.7.2 h1:WlnwFzaW64dN06JXU+hREPUGeEzpz3Acz2ACOmN8cMI= +connectrpc.com/otelconnect v0.7.2/go.mod h1:JS7XUKfuJs2adhCnXhNHPHLz6oAaZniCJdSF00OZSew= +cosmossdk.io/api v0.9.2 h1:9i9ptOBdmoIEVEVWLtYYHjxZonlF/aOVODLFaxpmNtg= +cosmossdk.io/api v0.9.2/go.mod h1:CWt31nVohvoPMTlPv+mMNCtC0a7BqRdESjCsstHcTkU= +cosmossdk.io/collections v1.3.1 h1:09e+DUId2brWsNOQ4nrk+bprVmMUaDH9xvtZkeqIjVw= +cosmossdk.io/collections v1.3.1/go.mod h1:ynvkP0r5ruAjbmedE+vQ07MT6OtJ0ZIDKrtJHK7Q/4c= +cosmossdk.io/core v0.11.3 h1:mei+MVDJOwIjIniaKelE3jPDqShCc/F4LkNNHh+4yfo= +cosmossdk.io/core v0.11.3/go.mod h1:9rL4RE1uDt5AJ4Tg55sYyHWXA16VmpHgbe0PbJc6N2Y= +cosmossdk.io/depinject v1.2.1 h1:eD6FxkIjlVaNZT+dXTQuwQTKZrFZ4UrfCq1RKgzyhMw= +cosmossdk.io/depinject v1.2.1/go.mod h1:lqQEycz0H2JXqvOgVwTsjEdMI0plswI7p6KX+MVqFOM= +cosmossdk.io/errors v1.0.2 h1:wcYiJz08HThbWxd/L4jObeLaLySopyyuUFB5w4AGpCo= +cosmossdk.io/errors v1.0.2/go.mod h1:0rjgiHkftRYPj//3DrD6y8hcm40HcPv/dR4R/4efr0k= +cosmossdk.io/log v1.6.1 h1:YXNwAgbDwMEKwDlCdH8vPcoggma48MgZrTQXCfmMBeI= +cosmossdk.io/log v1.6.1/go.mod h1:gMwsWyyDBjpdG9u2avCFdysXqxq28WJapJvu+vF1y+E= +cosmossdk.io/math v1.5.3 h1:WH6tu6Z3AUCeHbeOSHg2mt9rnoiUWVWaQ2t6Gkll96U= +cosmossdk.io/math v1.5.3/go.mod h1:uqcZv7vexnhMFJF+6zh9EWdm/+Ylyln34IvPnBauPCQ= +cosmossdk.io/schema v1.1.0 h1:mmpuz3dzouCoyjjcMcA/xHBEmMChN+EHh8EHxHRHhzE= +cosmossdk.io/schema v1.1.0/go.mod h1:Gb7pqO+tpR+jLW5qDcNOSv0KtppYs7881kfzakguhhI= +cosmossdk.io/store v1.1.2 h1:3HOZG8+CuThREKv6cn3WSohAc6yccxO3hLzwK6rBC7o= +cosmossdk.io/store v1.1.2/go.mod h1:60rAGzTHevGm592kFhiUVkNC9w7gooSEn5iUBPzHQ6A= +cosmossdk.io/x/tx v0.14.0 h1:hB3O25kIcyDW/7kMTLMaO8Ripj3yqs5imceVd6c/heA= +cosmossdk.io/x/tx v0.14.0/go.mod h1:Tn30rSRA1PRfdGB3Yz55W4Sn6EIutr9xtMKSHij+9PM= +dario.cat/mergo v1.0.1 h1:Ra4+bf83h2ztPIQYNP99R6m+Y7KfnARDfID+a+vLl4s= +dario.cat/mergo v1.0.1/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= +dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= +filippo.io/edwards25519 v1.1.1 h1:YpjwWWlNmGIDyXOn8zLzqiD+9TyIlPhGFG96P39uBpw= +filippo.io/edwards25519 v1.1.1/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4= +github.com/4meepo/tagalign v1.4.1 h1:GYTu2FaPGOGb/xJalcqHeD4il5BiCywyEYZOA55P6J4= +github.com/4meepo/tagalign v1.4.1/go.mod h1:2H9Yu6sZ67hmuraFgfZkNcg5Py9Ch/Om9l2K/2W1qS4= +github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4 h1:/vQbFIOMbk2FiG/kXiLl8BRyzTWDw7gX/Hz7Dd5eDMs= +github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4/go.mod h1:hN7oaIRCjzsZ2dE+yG5k+rsdt3qcwykqK6HVGcKwsw4= +github.com/Abirdcfly/dupword v0.1.3 h1:9Pa1NuAsZvpFPi9Pqkd93I7LIYRURj+A//dFd5tgBeE= +github.com/Abirdcfly/dupword v0.1.3/go.mod h1:8VbB2t7e10KRNdwTVoxdBaxla6avbhGzb8sCTygUMhw= +github.com/Antonboom/errname v1.0.0 h1:oJOOWR07vS1kRusl6YRSlat7HFnb3mSfMl6sDMRoTBA= +github.com/Antonboom/errname v1.0.0/go.mod h1:gMOBFzK/vrTiXN9Oh+HFs+e6Ndl0eTFbtsRTSRdXyGI= +github.com/Antonboom/nilnil v1.0.1 h1:C3Tkm0KUxgfO4Duk3PM+ztPncTFlOf0b2qadmS0s4xs= +github.com/Antonboom/nilnil v1.0.1/go.mod h1:CH7pW2JsRNFgEh8B2UaPZTEPhCMuFowP/e8Udp9Nnb0= +github.com/Antonboom/testifylint v1.5.2 h1:4s3Xhuv5AvdIgbd8wOOEeo0uZG7PbDKQyKY5lGoQazk= +github.com/Antonboom/testifylint v1.5.2/go.mod h1:vxy8VJ0bc6NavlYqjZfmp6EfqXMtBgQ4+mhCojwC1P8= +github.com/Azure/go-ansiterm v0.0.0-20250102033503-faa5f7b0171c h1:udKWzYgxTojEKWjV8V+WSxDXJ4NFATAsZjh8iIbsQIg= +github.com/Azure/go-ansiterm v0.0.0-20250102033503-faa5f7b0171c/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/BurntSushi/toml v1.4.1-0.20240526193622-a339e1f7089c h1:pxW6RcqyfI9/kWtOwnv/G+AzdKuy2ZrqINhenH4HyNs= +github.com/BurntSushi/toml v1.4.1-0.20240526193622-a339e1f7089c/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho= +github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= +github.com/Crocmagnon/fatcontext v0.7.1 h1:SC/VIbRRZQeQWj/TcQBS6JmrXcfA+BU4OGSVUt54PjM= +github.com/Crocmagnon/fatcontext v0.7.1/go.mod h1:1wMvv3NXEBJucFGfwOJBxSVWcoIO6emV215SMkW9MFU= +github.com/DATA-DOG/go-sqlmock v1.5.2 h1:OcvFkGmslmlZibjAjaHm3L//6LiuBgolP7OputlJIzU= +github.com/DATA-DOG/go-sqlmock v1.5.2/go.mod h1:88MAG/4G7SMwSE3CeA0ZKzrT5CiOU3OJ+JlNzwDqpNU= +github.com/DataDog/datadog-go v3.2.0+incompatible h1:qSG2N4FghB1He/r2mFrWKCaL7dXCilEuNEeAn20fdD4= +github.com/DataDog/datadog-go v3.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ= +github.com/DataDog/zstd v1.5.7 h1:ybO8RBeh29qrxIhCA9E8gKY6xfONU9T6G6aP9DTKfLE= +github.com/DataDog/zstd v1.5.7/go.mod h1:g4AWEaM3yOg3HYfnJ3YIawPnVdXJh9QME85blwSAmyw= +github.com/Djarvur/go-err113 v0.0.0-20210108212216-aea10b59be24 h1:sHglBQTwgx+rWPdisA5ynNEsoARbiCBOyGcJM4/OzsM= +github.com/Djarvur/go-err113 v0.0.0-20210108212216-aea10b59be24/go.mod h1:4UJr5HIiMZrwgkSPdsjy2uOQExX/WEILpIrO9UPGuXs= +github.com/GaijinEntertainment/go-exhaustruct/v3 v3.3.0 h1:/fTUt5vmbkAcMBt4YQiuC23cV0kEsN1MVMNqeOW43cU= +github.com/GaijinEntertainment/go-exhaustruct/v3 v3.3.0/go.mod h1:ONJg5sxcbsdQQ4pOW8TGdTidT2TMAUy/2Xhr8mrYaao= +github.com/Knetic/govaluate v3.0.1-0.20171022003610-9aa49832a739+incompatible/go.mod h1:r7JcOSlj0wfOMncg0iLm8Leh48TZaKVeNIfJntJ2wa0= +github.com/Masterminds/semver/v3 v3.3.1 h1:QtNSWtVZ3nBfk8mAOu/B6v7FMJ+NHTIgUPi7rj+4nv4= +github.com/Masterminds/semver/v3 v3.3.1/go.mod h1:4V+yj/TJE1HU9XfppCwVMZq3I84lprf4nC11bSS5beM= +github.com/Microsoft/go-winio v0.5.2/go.mod h1:WpS1mjBmmwHBEWmogvA2mj8546UReBk4v8QkMxJ6pZY= +github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= +github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= +github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= +github.com/OpenPeeDeeP/depguard/v2 v2.2.0 h1:vDfG60vDtIuf0MEOhmLlLLSzqaRM8EMcgJPdp74zmpA= +github.com/OpenPeeDeeP/depguard/v2 v2.2.0/go.mod h1:CIzddKRvLBC4Au5aYP/i3nyaWQ+ClszLIuVocRiCYFQ= +github.com/ProtonMail/go-crypto v1.1.6 h1:ZcV+Ropw6Qn0AX9brlQLAUXfqLBc7Bl+f/DmNxpLfdw= +github.com/ProtonMail/go-crypto v1.1.6/go.mod h1:rA3QumHc/FZ8pAHreoekgiAbzpNsfQAosU5td4SnOrE= +github.com/Shopify/sarama v1.19.0/go.mod h1:FVkBWblsNy7DGZRfXLU0O9RCGt5g3g3yEuWXgklEdEo= +github.com/Shopify/toxiproxy v2.1.4+incompatible/go.mod h1:OXgGpZ6Cli1/URJOF1DMxUHB2q5Ap20/P/eIdh4G0pI= +github.com/VividCortex/gohistogram v1.0.0 h1:6+hBz+qvs0JOrrNhhmR7lFxo5sINxBCGXrdtl/UvroE= +github.com/VividCortex/gohistogram v1.0.0/go.mod h1:Pf5mBqqDxYaXu3hDrrU+w6nw50o/4+TcAqDqk/vUH7g= +github.com/afex/hystrix-go v0.0.0-20180502004556-fa1af6a1f4f5/go.mod h1:SkGFH1ia65gfNATL8TAiHDNxPzPdmEL5uirI2Uyuz6c= +github.com/alecthomas/assert/v2 v2.11.0 h1:2Q9r3ki8+JYXvGsDyBXwH3LcJ+WK5D0gc5E8vS6K3D0= +github.com/alecthomas/assert/v2 v2.11.0/go.mod h1:Bze95FyfUr7x34QZrjL+XP+0qgp/zg8yS+TtBj1WA3k= +github.com/alecthomas/chroma v0.10.0 h1:7XDcGkCQopCNKjZHfYrNLraA+M7e0fMiJ/Mfikbfjek= +github.com/alecthomas/chroma v0.10.0/go.mod h1:jtJATyUxlIORhUOFNA9NZDWGAQ8wpxQQqNSB4rjA/1s= +github.com/alecthomas/go-check-sumtype v0.3.1 h1:u9aUvbGINJxLVXiFvHUlPEaD7VDULsrxJb4Aq31NLkU= +github.com/alecthomas/go-check-sumtype v0.3.1/go.mod h1:A8TSiN3UPRw3laIgWEUOHHLPa6/r9MtoigdlP5h3K/E= +github.com/alecthomas/repr v0.4.0 h1:GhI2A8MACjfegCPVq9f1FLvIBS+DrQ2KQBFZP1iFzXc= +github.com/alecthomas/repr v0.4.0/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4= +github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho= +github.com/alexkohler/nakedret/v2 v2.0.5 h1:fP5qLgtwbx9EJE8dGEERT02YwS8En4r9nnZ71RK+EVU= +github.com/alexkohler/nakedret/v2 v2.0.5/go.mod h1:bF5i0zF2Wo2o4X4USt9ntUWve6JbFv02Ff4vlkmS/VU= +github.com/alexkohler/prealloc v1.0.0 h1:Hbq0/3fJPQhNkN0dR95AVrr6R7tou91y0uHG5pOcUuw= +github.com/alexkohler/prealloc v1.0.0/go.mod h1:VetnK3dIgFBBKmg0YnD9F9x6Icjd+9cvfHR56wJVlKE= +github.com/alingse/asasalint v0.0.11 h1:SFwnQXJ49Kx/1GghOFz1XGqHYKp21Kq1nHad/0WQRnw= +github.com/alingse/asasalint v0.0.11/go.mod h1:nCaoMhw7a9kSJObvQyVzNTPBDbNpdocqrSP7t/cW5+I= +github.com/alingse/nilnesserr v0.1.2 h1:Yf8Iwm3z2hUUrP4muWfW83DF4nE3r1xZ26fGWUKCZlo= +github.com/alingse/nilnesserr v0.1.2/go.mod h1:1xJPrXonEtX7wyTq8Dytns5P2hNzoWymVUIaKm4HNFg= +github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be h1:9AeTilPcZAjCFIImctFaOjnTIavg87rW78vTPkQqLI8= +github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be/go.mod h1:ySMOLuWl6zY27l47sB3qLNK6tF2fkHG55UZxx8oIVo4= +github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= +github.com/antlr4-go/antlr/v4 v4.13.1 h1:SqQKkuVZ+zWkMMNkjy5FZe5mr5WURWnlpmOuzYWrPrQ= +github.com/antlr4-go/antlr/v4 v4.13.1/go.mod h1:GKmUxMtwp6ZgGwZSva4eWPC5mS6vUAmOABFgjdkM7Nw= +github.com/apache/thrift v0.12.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ= +github.com/apache/thrift v0.13.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ= +github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o= +github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8= +github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= +github.com/armon/go-metrics v0.3.10/go.mod h1:4O98XIr/9W0sxpJ8UaYkvjk10Iff7SnFrb4QAOwNTFc= +github.com/armon/go-metrics v0.4.0/go.mod h1:E6amYzXo6aW1tqzoZGT755KkbgrJsSdpwZ+3JqfkOG4= +github.com/armon/go-metrics v0.4.1/go.mod h1:E6amYzXo6aW1tqzoZGT755KkbgrJsSdpwZ+3JqfkOG4= +github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= +github.com/armon/go-radix v1.0.0/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= +github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio= +github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkYZB8zMSxRWpUBQtwG5a7fFgvEO+odwuTv2gs= +github.com/aryann/difflib v0.0.0-20170710044230-e206f873d14a/go.mod h1:DAHtR1m6lCRdSC2Tm3DSWRPvIPr6xNKyeHdqDQSQT+A= +github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 h1:DklsrG3dyBCFEj5IhUbnKptjxatkF07cF2ak3yi77so= +github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw= +github.com/ashanbrown/forbidigo v1.6.0 h1:D3aewfM37Yb3pxHujIPSpTf6oQk9sc9WZi8gerOIVIY= +github.com/ashanbrown/forbidigo v1.6.0/go.mod h1:Y8j9jy9ZYAEHXdu723cUlraTqbzjKF1MUyfOKL+AjcU= +github.com/ashanbrown/makezero v1.2.0 h1:/2Lp1bypdmK9wDIq7uWBlDF1iMUpIIS4A+pF6C9IEUU= +github.com/ashanbrown/makezero v1.2.0/go.mod h1:dxlPhHbDMC6N6xICzFBSK+4njQDdK8euNO0qjQMtGY4= +github.com/atotto/clipboard v0.1.2/go.mod h1:ZY9tmq7sm5xIbd9bOK4onWV4S6X0u6GY7Vn0Yu86PYI= +github.com/atotto/clipboard v0.1.4 h1:EH0zSVneZPSuFR11BlR9YppQTVDbh5+16AmcJi4g1z4= +github.com/atotto/clipboard v0.1.4/go.mod h1:ZY9tmq7sm5xIbd9bOK4onWV4S6X0u6GY7Vn0Yu86PYI= +github.com/aws/aws-lambda-go v1.13.3/go.mod h1:4UKl9IzQMoD+QF79YdCuzCwp8VbmG4VAQwij/eHl5CU= +github.com/aws/aws-sdk-go v1.27.0/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= +github.com/aws/aws-sdk-go-v2 v0.18.0/go.mod h1:JWVYvqSMppoMJC0x5wdwiImzgXTI9FuZwxzkQq9wy+g= +github.com/aymanbagabas/go-osc52 v1.0.3/go.mod h1:zT8H+Rk4VSabYN90pWyugflM3ZhpTZNC7cASDfUCdT4= +github.com/aymanbagabas/go-osc52 v1.2.1/go.mod h1:zT8H+Rk4VSabYN90pWyugflM3ZhpTZNC7cASDfUCdT4= +github.com/aymanbagabas/go-osc52/v2 v2.0.1 h1:HwpRHbFMcZLEVr42D4p7XBqjyuxQH5SMiErDT4WkJ2k= +github.com/aymanbagabas/go-osc52/v2 v2.0.1/go.mod h1:uYgXzlJ7ZpABp8OJ+exZzJJhRNQ2ASbcXHWsFqH8hp8= +github.com/aymanbagabas/go-udiff v0.2.0 h1:TK0fH4MteXUDspT88n8CKzvK0X9O2xu9yQjWpi6yML8= +github.com/aymanbagabas/go-udiff v0.2.0/go.mod h1:RE4Ex0qsGkTAJoQdQQCA0uG+nAzJO/pI/QwceO5fgrA= +github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuPk= +github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4= +github.com/benbjohnson/clock v1.1.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA= +github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= +github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= +github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= +github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= +github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= +github.com/bgentry/speakeasy v0.2.0 h1:tgObeVOf8WAvtuAX6DhJ4xks4CFNwPDZiqzGqIHE51E= +github.com/bgentry/speakeasy v0.2.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= +github.com/bits-and-blooms/bitset v1.24.3 h1:Bte86SlO3lwPQqww+7BE9ZuUCKIjfqnG5jtEyqA9y9Y= +github.com/bits-and-blooms/bitset v1.24.3/go.mod h1:7hO7Gc7Pp1vODcmWvKMRA9BNmbv6a/7QIWpPxHddWR8= +github.com/bkielbasa/cyclop v1.2.3 h1:faIVMIGDIANuGPWH031CZJTi2ymOQBULs9H21HSMa5w= +github.com/bkielbasa/cyclop v1.2.3/go.mod h1:kHTwA9Q0uZqOADdupvcFJQtp/ksSnytRMe8ztxG8Fuo= +github.com/blang/semver/v4 v4.0.0 h1:1PFHFE6yCCTv8C1TeyNNarDzntLi7wMI5i/pzqYIsAM= +github.com/blang/semver/v4 v4.0.0/go.mod h1:IbckMUScFkM3pff0VJDNKRiT6TG/YpiHIM2yvyW5YoQ= +github.com/blizzy78/varnamelen v0.8.0 h1:oqSblyuQvFsW1hbBHh1zfwrKe3kcSj0rnXkKzsQ089M= +github.com/blizzy78/varnamelen v0.8.0/go.mod h1:V9TzQZ4fLJ1DSrjVDfl89H7aMnTvKkApdHeyESmyR7k= +github.com/bombsimon/wsl/v4 v4.5.0 h1:iZRsEvDdyhd2La0FVi5k6tYehpOR/R7qIUjmKk7N74A= +github.com/bombsimon/wsl/v4 v4.5.0/go.mod h1:NOQ3aLF4nD7N5YPXMruR6ZXDOAqLoM0GEpLwTdvmOSc= +github.com/breml/bidichk v0.3.2 h1:xV4flJ9V5xWTqxL+/PMFF6dtJPvZLPsyixAoPe8BGJs= +github.com/breml/bidichk v0.3.2/go.mod h1:VzFLBxuYtT23z5+iVkamXO386OB+/sVwZOpIj6zXGos= +github.com/breml/errchkjson v0.4.0 h1:gftf6uWZMtIa/Is3XJgibewBm2ksAQSY/kABDNFTAdk= +github.com/breml/errchkjson v0.4.0/go.mod h1:AuBOSTHyLSaaAFlWsRSuRBIroCh3eh7ZHh5YeelDIk8= +github.com/briandowns/spinner v1.23.2 h1:Zc6ecUnI+YzLmJniCfDNaMbW0Wid1d5+qcTq4L2FW8w= +github.com/briandowns/spinner v1.23.2/go.mod h1:LaZeM4wm2Ywy6vO571mvhQNRcWfRUnXOs0RcKV0wYKM= +github.com/btcsuite/btcd/btcec/v2 v2.3.5 h1:dpAlnAwmT1yIBm3exhT1/8iUSD98RDJM5vqJVQDQLiU= +github.com/btcsuite/btcd/btcec/v2 v2.3.5/go.mod h1:m22FrOAiuxl/tht9wIqAoGHcbnCCaPWyauO8y2LGGtQ= +github.com/btcsuite/btcd/btcutil v1.1.6 h1:zFL2+c3Lb9gEgqKNzowKUPQNb8jV7v5Oaodi/AYFd6c= +github.com/btcsuite/btcd/btcutil v1.1.6/go.mod h1:9dFymx8HpuLqBnsPELrImQeTQfKBQqzqGbbV3jK55aE= +github.com/bufbuild/buf v1.56.0 h1:Z0eK+npK01FB924rtDVMOJtvBh9c421mYLo9QhUP3pM= +github.com/bufbuild/buf v1.56.0/go.mod h1:uDNMYshCJIXL99OQc71SDeFiDqOse9sSHXPpZlrqElw= +github.com/bufbuild/protocompile v0.14.1 h1:iA73zAf/fyljNjQKwYzUHD6AD4R8KMasmwa/FBatYVw= +github.com/bufbuild/protocompile v0.14.1/go.mod h1:ppVdAIhbr2H8asPk6k4pY7t9zB1OU5DoEw9xY/FUi1c= +github.com/bufbuild/protoplugin v0.0.0-20250218205857-750e09ce93e1 h1:V1xulAoqLqVg44rY97xOR+mQpD2N+GzhMHVwJ030WEU= +github.com/bufbuild/protoplugin v0.0.0-20250218205857-750e09ce93e1/go.mod h1:c5D8gWRIZ2HLWO3gXYTtUfw/hbJyD8xikv2ooPxnklQ= +github.com/buger/jsonparser v1.1.2 h1:frqHqw7otoVbk5M8LlE/L7HTnIq2v9RX6EJ48i9AxJk= +github.com/buger/jsonparser v1.1.2/go.mod h1:6RYKKt7H4d4+iWqouImQ9R2FZql3VbhNgx27UK13J/0= +github.com/butuzov/ireturn v0.3.1 h1:mFgbEI6m+9W8oP/oDdfA34dLisRFCj2G6o/yiI1yZrY= +github.com/butuzov/ireturn v0.3.1/go.mod h1:ZfRp+E7eJLC0NQmk1Nrm1LOrn/gQlOykv+cVPdiXH5M= +github.com/butuzov/mirror v1.3.0 h1:HdWCXzmwlQHdVhwvsfBb2Au0r3HyINry3bDWLYXiKoc= +github.com/butuzov/mirror v1.3.0/go.mod h1:AEij0Z8YMALaq4yQj9CPPVYOyJQyiexpQEQgihajRfI= +github.com/bytedance/gopkg v0.1.3 h1:TPBSwH8RsouGCBcMBktLt1AymVo2TVsBVCY4b6TnZ/M= +github.com/bytedance/gopkg v0.1.3/go.mod h1:576VvJ+eJgyCzdjS+c4+77QF3p7ubbtiKARP3TxducM= +github.com/bytedance/sonic v1.15.0 h1:/PXeWFaR5ElNcVE84U0dOHjiMHQOwNIx3K4ymzh/uSE= +github.com/bytedance/sonic v1.15.0/go.mod h1:tFkWrPz0/CUCLEF4ri4UkHekCIcdnkqXw9VduqpJh0k= +github.com/bytedance/sonic/loader v0.5.0 h1:gXH3KVnatgY7loH5/TkeVyXPfESoqSBSBEiDd5VjlgE= +github.com/bytedance/sonic/loader v0.5.0/go.mod h1:AR4NYCk5DdzZizZ5djGqQ92eEhCCcdf5x77udYiSJRo= +github.com/calmh/randomart v1.1.0 h1:evl+iwc10LXtHdMZhzLxmsCQVmWnkXs44SbC6Uk0Il8= +github.com/calmh/randomart v1.1.0/go.mod h1:DQUbPVyP+7PAs21w/AnfMKG5NioxS3TbZ2F9MSK/jFM= +github.com/casbin/casbin/v2 v2.1.2/go.mod h1:YcPU1XXisHhLzuxH9coDNf2FbKpjGlbCg3n9yuLkIJQ= +github.com/catenacyber/perfsprint v0.8.1 h1:bGOHuzHe0IkoGeY831RW4aSlt1lPRd3WRAScSWOaV7E= +github.com/catenacyber/perfsprint v0.8.1/go.mod h1:/wclWYompEyjUD2FuIIDVKNkqz7IgBIWXIH3V0Zol50= +github.com/ccojocar/zxcvbn-go v1.0.2 h1:na/czXU8RrhXO4EZme6eQJLR4PzcGsahsBOAwU6I3Vg= +github.com/ccojocar/zxcvbn-go v1.0.2/go.mod h1:g1qkXtUSvHP8lhHp5GrSmTz6uWALGRMQdw6Qnz/hi60= +github.com/cenkalti/backoff v2.2.1+incompatible h1:tNowT99t7UNflLxfYYSlKYsBpXdEet03Pg2g16Swow4= +github.com/cenkalti/backoff v2.2.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM= +github.com/cenkalti/backoff/v4 v4.1.1/go.mod h1:scbssz8iZGpm3xbr14ovlUdkxfGXNInqkPWOWmG2CLw= +github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8= +github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= +github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= +github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= +github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/charithe/durationcheck v0.0.10 h1:wgw73BiocdBDQPik+zcEoBG/ob8uyBHf2iyoHGPf5w4= +github.com/charithe/durationcheck v0.0.10/go.mod h1:bCWXb7gYRysD1CU3C+u4ceO49LoGOY1C1L6uouGNreQ= +github.com/charmbracelet/bubbles v0.7.5/go.mod h1:IRTORFvhEI6OUH7WhN2Ks8Z8miNGimk1BE6cmHijOkM= +github.com/charmbracelet/bubbles v0.15.0/go.mod h1:Y7gSFbBzlMpUDR/XM9MhZI374Q+1p1kluf1uLl8iK74= +github.com/charmbracelet/bubbles v0.21.0 h1:9TdC97SdRVg/1aaXNVWfFH3nnLAwOXr8Fn6u6mfQdFs= +github.com/charmbracelet/bubbles v0.21.0/go.mod h1:HF+v6QUR4HkEpz62dx7ym2xc71/KBHg+zKwJtMw+qtg= +github.com/charmbracelet/bubbletea v0.12.2/go.mod h1:3gZkYELUOiEUOp0bTInkxguucy/xRbGSOcbMs1geLxg= +github.com/charmbracelet/bubbletea v0.23.1/go.mod h1:JAfGK/3/pPKHTnAS8JIE2u9f61BjWTQY57RbT25aMXU= +github.com/charmbracelet/bubbletea v0.23.2/go.mod h1:FaP3WUivcTM0xOKNmhciz60M6I+weYLF76mr1JyI7sM= +github.com/charmbracelet/bubbletea v1.3.5 h1:JAMNLTbqMOhSwoELIr0qyP4VidFq72/6E9j7HHmRKQc= +github.com/charmbracelet/bubbletea v1.3.5/go.mod h1:TkCnmH+aBd4LrXhXcqrKiYwRs7qyQx5rBgH5fVY3v54= +github.com/charmbracelet/charm v0.8.7 h1:FJ9b7IxWUWHOPR72zS/QJLEqtudOB2Mwfc+Sir0eZR8= +github.com/charmbracelet/charm v0.8.7/go.mod h1:ApJYwJljEjODkOYJgFDzbUqztLrCWQct9zyPD+xcVr4= +github.com/charmbracelet/colorprofile v0.3.1 h1:k8dTHMd7fgw4bnFd7jXTLZrSU/CQrKnL3m+AxCzDz40= +github.com/charmbracelet/colorprofile v0.3.1/go.mod h1:/GkGusxNs8VB/RSOh3fu0TJmQ4ICMMPApIIVn0KszZ0= +github.com/charmbracelet/fang v0.4.0 h1:boBxmdcFghTeotqkD2itXi7SMBozdIlcslRqjboSJDg= +github.com/charmbracelet/fang v0.4.0/go.mod h1:9gCUAHmVx5BwSafeyNr3GI0GgvlB1WYjL21SkPp1jyU= +github.com/charmbracelet/glamour v0.6.0 h1:wi8fse3Y7nfcabbbDuwolqTqMQPMnVPeZhDM273bISc= +github.com/charmbracelet/glamour v0.6.0/go.mod h1:taqWV4swIMMbWALc0m7AfE9JkPSU8om2538k9ITBxOc= +github.com/charmbracelet/glow v1.5.1 h1:o1mwT4xXXpkfUhJG6euQayNxLZf9yKctOCNHLztrwdE= +github.com/charmbracelet/glow v1.5.1/go.mod h1:rGgop0a2/4gXWiAxUW1iEQseoE+9Ctpb7M4sM9cY9CU= +github.com/charmbracelet/harmonica v0.2.0/go.mod h1:KSri/1RMQOZLbw7AHqgcBycp8pgJnQMYYT8QZRqZ1Ao= +github.com/charmbracelet/lipgloss v0.6.0/go.mod h1:tHh2wr34xcHjC2HCXIlGSG1jaDF0S0atAUvBMP6Ppuk= +github.com/charmbracelet/lipgloss v1.1.0 h1:vYXsiLHVkK7fp74RkV7b2kq9+zDLoEU4MZoFqR/noCY= +github.com/charmbracelet/lipgloss v1.1.0/go.mod h1:/6Q8FR2o+kj8rz4Dq0zQc3vYf7X+B0binUUBwA0aL30= +github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.3 h1:W6DpZX6zSkZr0iFq6JVh1vItLoxfYtNlaxOJtWp8Kis= +github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.3/go.mod h1:65HTtKURcv/ict9ZQhr6zT84JqIjMcJbyrZYHHKNfKA= +github.com/charmbracelet/x/ansi v0.8.0 h1:9GTq3xq9caJW8ZrBTe0LIe2fvfLR/bYXKTx2llXn7xE= +github.com/charmbracelet/x/ansi v0.8.0/go.mod h1:wdYl/ONOLHLIVmQaxbIYEC/cRKOQyjTkowiI4blgS9Q= +github.com/charmbracelet/x/cellbuf v0.0.13 h1:/KBBKHuVRbq1lYx5BzEHBAFBP8VcQzJejZ/IA3iR28k= +github.com/charmbracelet/x/cellbuf v0.0.13/go.mod h1:xe0nKWGd3eJgtqZRaN9RjMtK7xUYchjzPr7q6kcvCCs= +github.com/charmbracelet/x/exp/charmtone v0.0.0-20250603201427-c31516f43444 h1:IJDiTgVE56gkAGfq0lBEloWgkXMk4hl/bmuPoicI4R0= +github.com/charmbracelet/x/exp/charmtone v0.0.0-20250603201427-c31516f43444/go.mod h1:T9jr8CzFpjhFVHjNjKwbAD7KwBNyFnj2pntAO7F2zw0= +github.com/charmbracelet/x/exp/golden v0.0.0-20241011142426-46044092ad91 h1:payRxjMjKgx2PaCWLZ4p3ro9y97+TVLZNaRZgJwSVDQ= +github.com/charmbracelet/x/exp/golden v0.0.0-20241011142426-46044092ad91/go.mod h1:wDlXFlCrmJ8J+swcL/MnGUuYnqgQdW9rhSD61oNMb6U= +github.com/charmbracelet/x/term v0.2.1 h1:AQeHeLZ1OqSXhrAWpYUtZyX1T3zVxfpZuEQMIQaGIAQ= +github.com/charmbracelet/x/term v0.2.1/go.mod h1:oQ4enTYFV7QN4m0i9mzHrViD7TQKvNEEkHUMCmsxdUg= +github.com/chavacava/garif v0.1.0 h1:2JHa3hbYf5D9dsgseMKAmc/MZ109otzgNFk5s87H9Pc= +github.com/chavacava/garif v0.1.0/go.mod h1:XMyYCkEL58DF0oyW4qDjjnPWONs2HBqYKI+UIPD+Gww= +github.com/chigopher/pathlib v0.19.1 h1:RoLlUJc0CqBGwq239cilyhxPNLXTK+HXoASGyGznx5A= +github.com/chigopher/pathlib v0.19.1/go.mod h1:tzC1dZLW8o33UQpWkNkhvPwL5n4yyFRFm/jL1YGWFvY= +github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= +github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= +github.com/chzyer/readline v1.5.1 h1:upd/6fQk4src78LMRzh5vItIt361/o4uq553V8B5sGI= +github.com/chzyer/readline v1.5.1/go.mod h1:Eh+b79XXUwfKfcPLepksvw2tcLE/Ct21YObkaSkeBlk= +github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= +github.com/cilium/ebpf v0.16.0 h1:+BiEnHL6Z7lXnlGUsXQPPAE7+kenAd4ES8MQ5min0Ok= +github.com/cilium/ebpf v0.16.0/go.mod h1:L7u2Blt2jMM/vLAVgjxluxtBKlz3/GWjB0dMOEngfwE= +github.com/circonus-labs/circonus-gometrics v2.3.1+incompatible/go.mod h1:nmEj6Dob7S7YxXgwXpfOuvO54S+tGdZdw9fuRZt25Ag= +github.com/circonus-labs/circonusllhist v0.1.3/go.mod h1:kMXHVDlOchFAehlya5ePtbp5jckzBHf4XRpQvBOLI+I= +github.com/ckaznocha/intrange v0.3.0 h1:VqnxtK32pxgkhJgYQEeOArVidIPg+ahLP7WBOXZd5ZY= +github.com/ckaznocha/intrange v0.3.0/go.mod h1:+I/o2d2A1FBHgGELbGxzIcyd3/9l9DuwjM8FsbSS3Lo= +github.com/clbanning/x2j v0.0.0-20191024224557-825249438eec/go.mod h1:jMjuTZXRI4dUb/I5gc9Hdhagfvm9+RyrPryS/auMzxE= +github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= +github.com/cloudflare/circl v1.6.3 h1:9GPOhQGF9MCYUeXyMYlqTR6a5gTrgR/fBLXvUgtVcg8= +github.com/cloudflare/circl v1.6.3/go.mod h1:2eXP6Qfat4O/Yhh8BznvKnJ+uzEoTQ6jVKJRn81BiS4= +github.com/cloudwego/base64x v0.1.6 h1:t11wG9AECkCDk5fMSoxmufanudBtJ+/HemLstXDLI2M= +github.com/cloudwego/base64x v0.1.6/go.mod h1:OFcloc187FXDaYHvrNIjxSe8ncn0OOM8gEHfghB2IPU= +github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= +github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4/go.mod h1:6pvJx4me5XPnfI9Z40ddWsdw2W/uZgQLFXToKeRcDiI= +github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20211001041855-01bcc9b48dfe/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cockroachdb/datadriven v0.0.0-20190809214429-80d97fb3cbaa/go.mod h1:zn76sxSg3SzpJ0PPJaLDCu+Bu0Lg3sKTORVIj19EIF8= +github.com/cockroachdb/datadriven v1.0.3-0.20230413201302-be42291fc80f h1:otljaYPt5hWxV3MUfO5dFPFiOXg9CyG5/kCfayTqsJ4= +github.com/cockroachdb/datadriven v1.0.3-0.20230413201302-be42291fc80f/go.mod h1:a9RdTaap04u637JoCzcUoIcDmvwSUtcUFtT/C3kJlTU= +github.com/cockroachdb/errors v1.12.0 h1:d7oCs6vuIMUQRVbi6jWWWEJZahLCfJpnJSVobd1/sUo= +github.com/cockroachdb/errors v1.12.0/go.mod h1:SvzfYNNBshAVbZ8wzNc/UPK3w1vf0dKDUP41ucAIf7g= +github.com/cockroachdb/fifo v0.0.0-20240606204812-0bbfbd93a7ce h1:giXvy4KSc/6g/esnpM7Geqxka4WSqI1SZc7sMJFd3y4= +github.com/cockroachdb/fifo v0.0.0-20240606204812-0bbfbd93a7ce/go.mod h1:9/y3cnZ5GKakj/H4y9r9GTjCvAFta7KLgSHPJJYc52M= +github.com/cockroachdb/logtags v0.0.0-20241215232642-bb51bb14a506 h1:ASDL+UJcILMqgNeV5jiqR4j+sTuvQNHdf2chuKj1M5k= +github.com/cockroachdb/logtags v0.0.0-20241215232642-bb51bb14a506/go.mod h1:Mw7HqKr2kdtu6aYGn3tPmAftiP3QPX63LdK/zcariIo= +github.com/cockroachdb/pebble v1.1.5 h1:5AAWCBWbat0uE0blr8qzufZP5tBjkRyy/jWe1QWLnvw= +github.com/cockroachdb/pebble v1.1.5/go.mod h1:17wO9el1YEigxkP/YtV8NtCivQDgoCyBg5c4VR/eOWo= +github.com/cockroachdb/redact v1.1.6 h1:zXJBwDZ84xJNlHl1rMyCojqyIxv+7YUpQiJLQ7n4314= +github.com/cockroachdb/redact v1.1.6/go.mod h1:BVNblN9mBWFyMyqK1k3AAiSxhvhfK2oOZZ2lK+dpvRg= +github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06 h1:zuQyyAKVxetITBuuhv3BI9cMrmStnpT18zmgmTxunpo= +github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06/go.mod h1:7nc4anLGjupUW/PeY5qiNYsdNXj7zopG+eqsS7To5IQ= +github.com/codahale/hdrhistogram v0.0.0-20161010025455-3a0bb77429bd/go.mod h1:sE/e/2PUdi/liOCUjSTXgM1o87ZssimdTWN964YiIeI= +github.com/cometbft/cometbft v0.38.21 h1:qcIJSH9LiwU5s6ZgKR5eRbsLNucbubfraDs5bzgjtOI= +github.com/cometbft/cometbft v0.38.21/go.mod h1:UCu8dlHqvkAsmAFmWDRWNZJPlu6ya2fTWZlDrWsivwo= +github.com/cometbft/cometbft-db v0.14.1 h1:SxoamPghqICBAIcGpleHbmoPqy+crij/++eZz3DlerQ= +github.com/cometbft/cometbft-db v0.14.1/go.mod h1:KHP1YghilyGV/xjD5DP3+2hyigWx0WTp9X+0Gnx0RxQ= +github.com/containerd/console v1.0.1/go.mod h1:XUsP6YE/mKtz6bxc+I8UiKKTP04qjQL4qcS3XoQ5xkw= +github.com/containerd/console v1.0.3/go.mod h1:7LqA/THxQ86k76b8c/EMSiaJ3h1eZkMkXar0TQ1gf3U= +github.com/containerd/errdefs v1.0.0 h1:tg5yIfIlQIrxYtu9ajqY42W3lpS19XqdxRQeEwYG8PI= +github.com/containerd/errdefs v1.0.0/go.mod h1:+YBYIdtsnF4Iw6nWZhJcqGSg/dwvV7tyJ/kCkyJ2k+M= +github.com/containerd/errdefs/pkg v0.3.0 h1:9IKJ06FvyNlexW690DXuQNx2KA2cUJXx151Xdx3ZPPE= +github.com/containerd/errdefs/pkg v0.3.0/go.mod h1:NJw6s9HwNuRhnjJhM7pylWwMyAkmCQvQ4GpJHEqRLVk= +github.com/containerd/log v0.1.0 h1:TCJt7ioM2cr/tfR8GPbGf9/VRAX8D2B4PjzCpfX540I= +github.com/containerd/log v0.1.0/go.mod h1:VRRf09a7mHDIRezVKTRCrOq78v577GXq3bSa3EhrzVo= +github.com/containerd/stargz-snapshotter/estargz v0.17.0 h1:+TyQIsR/zSFI1Rm31EQBwpAA1ovYgIKHy7kctL3sLcE= +github.com/containerd/stargz-snapshotter/estargz v0.17.0/go.mod h1:s06tWAiJcXQo9/8AReBCIo/QxcXFZ2n4qfsRnpl71SM= +github.com/coreos/bbolt v1.3.2/go.mod h1:iRUV2dpdMOn7Bo10OQBFzIJO9kkE559Wcmn+qkEiiKk= +github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= +github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= +github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= +github.com/coreos/go-systemd v0.0.0-20180511133405-39ca1b05acc7/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= +github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= +github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= +github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= +github.com/coreos/pkg v0.0.0-20160727233714-3ac0863d7acf/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= +github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= +github.com/cosiner/argv v0.1.0 h1:BVDiEL32lwHukgJKP87btEPenzrrHUjajs/8yzaqcXg= +github.com/cosiner/argv v0.1.0/go.mod h1:EusR6TucWKX+zFgtdUsKT2Cvg45K5rtpCcWz4hK06d8= +github.com/cosmos/btcutil v1.0.5 h1:t+ZFcX77LpKtDBhjucvnOH8C2l2ioGsBNEQ3jef8xFk= +github.com/cosmos/btcutil v1.0.5/go.mod h1:IyB7iuqZMJlthe2tkIFL33xPyzbFYP0XVdS8P5lUPis= +github.com/cosmos/cosmos-db v1.1.3 h1:7QNT77+vkefostcKkhrzDK9uoIEryzFrU9eoMeaQOPY= +github.com/cosmos/cosmos-db v1.1.3/go.mod h1:kN+wGsnwUJZYn8Sy5Q2O0vCYA99MJllkKASbs6Unb9U= +github.com/cosmos/cosmos-proto v1.0.0-beta.5 h1:eNcayDLpip+zVLRLYafhzLvQlSmyab+RC5W7ZfmxJLA= +github.com/cosmos/cosmos-proto v1.0.0-beta.5/go.mod h1:hQGLpiIUloJBMdQMMWb/4wRApmI9hjHH05nefC0Ojec= +github.com/cosmos/cosmos-sdk v0.53.6 h1:aJeInld7rbsHtH1qLHu2aZJF9t40mGlqp3ylBLDT0HI= +github.com/cosmos/cosmos-sdk v0.53.6/go.mod h1:N6YuprhAabInbT3YGumGDKONbvPX5dNro7RjHvkQoKE= +github.com/cosmos/go-bip39 v1.0.0 h1:pcomnQdrdH22njcAatO0yWojsUnCO3y2tNoV1cb6hHY= +github.com/cosmos/go-bip39 v1.0.0/go.mod h1:RNJv0H/pOIVgxw6KS7QeX2a0Uo0aKUlfhZ4xuwvCdJw= +github.com/cosmos/gogogateway v1.2.0 h1:Ae/OivNhp8DqBi/sh2A8a1D0y638GpL3tkmLQAiKxTE= +github.com/cosmos/gogogateway v1.2.0/go.mod h1:iQpLkGWxYcnCdz5iAdLcRBSw3h7NXeOkZ4GUkT+tbFI= +github.com/cosmos/gogoproto v1.4.2/go.mod h1:cLxOsn1ljAHSV527CHOtaIP91kK6cCrZETRBrkzItWU= +github.com/cosmos/gogoproto v1.7.2 h1:5G25McIraOC0mRFv9TVO139Uh3OklV2hczr13KKVHCA= +github.com/cosmos/gogoproto v1.7.2/go.mod h1:8S7w53P1Y1cHwND64o0BnArT6RmdgIvsBuco6uTllsk= +github.com/cosmos/iavl v1.2.2 h1:qHhKW3I70w+04g5KdsdVSHRbFLgt3yY3qTMd4Xa4rC8= +github.com/cosmos/iavl v1.2.2/go.mod h1:GiM43q0pB+uG53mLxLDzimxM9l/5N9UuSY3/D0huuVw= +github.com/cosmos/ics23/go v0.11.0 h1:jk5skjT0TqX5e5QJbEnwXIS2yI2vnmLOgpQPeM5RtnU= +github.com/cosmos/ics23/go v0.11.0/go.mod h1:A8OjxPE67hHST4Icw94hOxxFEJMBG031xIGF/JHNIY0= +github.com/cosmos/keyring v1.2.0 h1:8C1lBP9xhImmIabyXW4c3vFjjLiBdGCmfLUfeZlV1Yo= +github.com/cosmos/keyring v1.2.0/go.mod h1:fc+wB5KTk9wQ9sDx0kFXB3A0MaeGHM9AwRStKOQ5vOA= +github.com/cosmos/ledger-cosmos-go v1.0.0 h1:jNKW89nPf0vR0EkjHG8Zz16h6p3zqwYEOxlHArwgYtw= +github.com/cosmos/ledger-cosmos-go v1.0.0/go.mod h1:mGaw2wDOf+Z6SfRJsMGxU9DIrBa4du0MAiPlpPhLAOE= +github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= +github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= +github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= +github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g= +github.com/cpuguy83/go-md2man/v2 v2.0.7 h1:zbFlGlXEAKlwXpmvle3d8Oe3YnkKIK4xSRTd3sHPnBo= +github.com/cpuguy83/go-md2man/v2 v2.0.7/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g= +github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY= +github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/creack/pty v1.1.20 h1:VIPb/a2s17qNeQgDnkfZC35RScx+blkKF8GV68n80J4= +github.com/creack/pty v1.1.20/go.mod h1:MOBLtS5ELjhRRrroQr9kyvTxUAFNvYEK993ew/Vr4O4= +github.com/curioswitch/go-reassign v0.3.0 h1:dh3kpQHuADL3cobV/sSGETA8DOv457dwl+fbBAhrQPs= +github.com/curioswitch/go-reassign v0.3.0/go.mod h1:nApPCCTtqLJN/s8HfItCcKV0jIPwluBOvZP+dsJGA88= +github.com/cyphar/filepath-securejoin v0.4.1 h1:JyxxyPEaktOD+GAnqIqTf9A8tHyAG22rowi7HkoSU1s= +github.com/cyphar/filepath-securejoin v0.4.1/go.mod h1:Sdj7gXlvMcPZsbhwhQ33GguGLDGQL7h7bg04C/+u9jI= +github.com/daixiang0/gci v0.13.5 h1:kThgmH1yBmZSBCh1EJVxQ7JsHpm5Oms0AMed/0LaH4c= +github.com/daixiang0/gci v0.13.5/go.mod h1:12etP2OniiIdP4q+kjUGrC/rUagga7ODbqsom5Eo5Yk= +github.com/danieljoos/wincred v1.2.2 h1:774zMFJrqaeYCK2W57BgAem/MLi6mtSE47MB6BOJ0i0= +github.com/danieljoos/wincred v1.2.2/go.mod h1:w7w4Utbrz8lqeMbDAK0lkNJUv5sAOkFi7nd/ogr0Uh8= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= +github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/decred/dcrd/crypto/blake256 v1.1.0 h1:zPMNGQCm0g4QTY27fOCorQW7EryeQ/U0x++OzVrdms8= +github.com/decred/dcrd/crypto/blake256 v1.1.0/go.mod h1:2OfgNZ5wDpcsFmHmCK5gZTPcCXqlm2ArzUIkw9czNJo= +github.com/decred/dcrd/dcrec/secp256k1/v4 v4.4.0 h1:NMZiJj8QnKe1LgsbDayM4UoHwbvwDRwnI3hwNaAHRnc= +github.com/decred/dcrd/dcrec/secp256k1/v4 v4.4.0/go.mod h1:ZXNYxsqcloTdSy/rNShjYzMhyjf0LaoftYK0p+A3h40= +github.com/denis-tingaikin/go-header v0.5.0 h1:SRdnP5ZKvcO9KKRP1KJrhFR3RrlGuD+42t4429eC9k8= +github.com/denis-tingaikin/go-header v0.5.0/go.mod h1:mMenU5bWrok6Wl2UsZjy+1okegmwQ3UgWl4V1D8gjlY= +github.com/derekparker/trie v0.0.0-20230829180723-39f4de51ef7d h1:hUWoLdw5kvo2xCsqlsIBMvWUc1QCSsCYD2J2+Fg6YoU= +github.com/derekparker/trie v0.0.0-20230829180723-39f4de51ef7d/go.mod h1:C7Es+DLenIpPc9J6IYw4jrK0h7S9bKj4DNl8+KxGEXU= +github.com/desertbit/timer v0.0.0-20180107155436-c41aec40b27f h1:U5y3Y5UE0w7amNe7Z5G/twsBW0KEalRQXZzf8ufSh9I= +github.com/desertbit/timer v0.0.0-20180107155436-c41aec40b27f/go.mod h1:xH/i4TFMt8koVQZ6WFms69WAsDWr2XsYL3Hkl7jkoLE= +github.com/dgraph-io/badger/v4 v4.2.0 h1:kJrlajbXXL9DFTNuhhu9yCx7JJa4qpYWxtE8BzuWsEs= +github.com/dgraph-io/badger/v4 v4.2.0/go.mod h1:qfCqhPoWDFJRx1gp5QwwyGo8xk1lbHUxvK9nK0OGAak= +github.com/dgraph-io/ristretto v0.2.0 h1:XAfl+7cmoUDWW/2Lx8TGZQjjxIQ2Ley9DSf52dru4WE= +github.com/dgraph-io/ristretto v0.2.0/go.mod h1:8uBHCU/PBV4Ag0CJrP47b9Ofby5dqWNh4FicAdoqFNU= +github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 h1:fAjc9m62+UWV/WAFKLNi6ZS0675eEUC9y3AlwSbQu1Y= +github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw= +github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no= +github.com/distribution/reference v0.6.0 h1:0IXCQ5g4/QMHHkarYzh5l+u8T3t73zM5QvfrDyIgxBk= +github.com/distribution/reference v0.6.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E= +github.com/dlclark/regexp2 v1.4.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc= +github.com/dlclark/regexp2 v1.11.0 h1:G/nrcoOa7ZXlpoa/91N3X7mM3r8eIlMBBJZvsz/mxKI= +github.com/dlclark/regexp2 v1.11.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8= +github.com/docker/cli v29.2.0+incompatible h1:9oBd9+YM7rxjZLfyMGxjraKBKE4/nVyvVfN4qNl9XRM= +github.com/docker/cli v29.2.0+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8= +github.com/docker/distribution v2.8.3+incompatible h1:AtKxIZ36LoNK51+Z6RpzLpddBirtxJnzDrHLEKxTAYk= +github.com/docker/distribution v2.8.3+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= +github.com/docker/docker v28.3.3+incompatible h1:Dypm25kh4rmk49v1eiVbsAtpAsYURjYkaKubwuBdxEI= +github.com/docker/docker v28.3.3+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/docker-credential-helpers v0.9.3 h1:gAm/VtF9wgqJMoxzT3Gj5p4AqIjCBS4wrsOh9yRqcz8= +github.com/docker/docker-credential-helpers v0.9.3/go.mod h1:x+4Gbw9aGmChi3qTLZj8Dfn0TD20M/fuWy0E5+WDeCo= +github.com/docker/go-connections v0.5.0 h1:USnMq7hx7gwdVZq1L49hLXaFtUdTADjXGp+uj1Br63c= +github.com/docker/go-connections v0.5.0/go.mod h1:ov60Kzw0kKElRwhNs9UlUHAE/F9Fe6GLaXnqyDdmEXc= +github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4= +github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= +github.com/dustin/go-humanize v0.0.0-20171111073723-bb3d318650d4/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= +github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= +github.com/dustin/go-humanize v1.0.1-0.20200219035652-afde56e7acac/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= +github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY= +github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= +github.com/dvsekhvalnov/jose2go v1.7.0 h1:bnQc8+GMnidJZA8zc6lLEAb4xNrIqHwO+9TzqvtQZPo= +github.com/dvsekhvalnov/jose2go v1.7.0/go.mod h1:QsHjhyTlD/lAVqn/NSbVZmSCGeDehTB/mPZadG+mhXU= +github.com/eapache/go-resiliency v1.1.0/go.mod h1:kFI+JgMyC7bLPUVY133qvEBtVayf5mFgVsvEsIPBvNs= +github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21/go.mod h1:+020luEh2TKB4/GOp8oxxtq0Daoen/Cii55CzbTV6DU= +github.com/eapache/queue v1.1.0/go.mod h1:6eCeP0CKFpHLu8blIFXhExK/dRa7WDZfr6jVFPTqq+I= +github.com/edsrzf/mmap-go v1.0.0/go.mod h1:YO35OhQPt3KJa3ryjFM5Bs14WD66h8eGKpfaBNrHW5M= +github.com/elazarl/goproxy v1.7.2 h1:Y2o6urb7Eule09PjlhQRGNsqRfPmYI3KKQLFpCAV3+o= +github.com/elazarl/goproxy v1.7.2/go.mod h1:82vkLNir0ALaW14Rc399OTTjyNREgmdL2cVoIbS6XaE= +github.com/emicklei/dot v1.6.2 h1:08GN+DD79cy/tzN6uLCT84+2Wk9u+wvqP+Hkx/dIR8A= +github.com/emicklei/dot v1.6.2/go.mod h1:DeV7GvQtIw4h2u73RKBkkFdvVAz0D9fzeJrgPW6gy/s= +github.com/emicklei/proto v1.12.2 h1:ZDyDzrfMt7ncmyor/j07uoOCGLKtU5F87vTPwIzLe/o= +github.com/emicklei/proto v1.12.2/go.mod h1:rn1FgRS/FANiZdD2djyH7TMA9jdRDcYQ9IEN9yvjX0A= +github.com/emicklei/proto-contrib v0.15.0 h1:5D8JKpV1qekMDFwEJp8NVJGY1We6t14dn9D4G05fpyo= +github.com/emicklei/proto-contrib v0.15.0/go.mod h1:p6zmoy14hFYiwUb35X7nJ4u4l1vfvjc1mWrIt8QB3kw= +github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc= +github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FMNAnJvWQ= +github.com/envoyproxy/go-control-plane v0.6.9/go.mod h1:SBwIajubJHhxtWwsL9s8ss4safvEdbitLhGGK48rN6g= +github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= +github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po= +github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= +github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= +github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ= +github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0= +github.com/envoyproxy/go-control-plane v0.10.2-0.20220325020618-49ff273808a1/go.mod h1:KJwIaB5Mv44NWtYuAOFCVOjcI94vtpEz2JU/D2v6IjE= +github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= +github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f h1:Y/CXytFA4m6baUTXGLOoWe4PQhGxaX0KpnayAqC48p4= +github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f/go.mod h1:vw97MGsxSvLiUE2X8qFplwetxpGLQrlU1Q9AUEIzCaM= +github.com/ettle/strcase v0.2.0 h1:fGNiVF21fHXpX1niBgk0aROov1LagYsOwV/xqKDKR/Q= +github.com/ettle/strcase v0.2.0/go.mod h1:DajmHElDSaX76ITe3/VHVyMin4LWSJN5Z909Wp+ED1A= +github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= +github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU= +github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= +github.com/fatih/color v1.18.0 h1:S8gINlzdQ840/4pfAwic/ZE0djQEH3wM94VfqLTZcOM= +github.com/fatih/color v1.18.0/go.mod h1:4FelSpRwEGDpQ12mAdzqdOukCy4u8WUtOY6lkT/6HfU= +github.com/fatih/structs v1.1.0 h1:Q7juDM0QtcnhCpeyLGQKyg4TOIghuNXrkL32pHAUMxo= +github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M= +github.com/fatih/structtag v1.2.0 h1:/OdNE99OxoI/PqaW/SuSK9uxxT3f/tcSZgon/ssNSx4= +github.com/fatih/structtag v1.2.0/go.mod h1:mBJUNpUnHmRKrKlQQlmCrh5PuhftFbNv8Ys4/aAZl94= +github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= +github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= +github.com/firefart/nonamedreturns v1.0.5 h1:tM+Me2ZaXs8tfdDw3X6DOX++wMCOqzYUho6tUTYIdRA= +github.com/firefart/nonamedreturns v1.0.5/go.mod h1:gHJjDqhGM4WyPt639SOZs+G89Ko7QKH5R5BhnO6xJhw= +github.com/fortytw2/leaktest v1.3.0 h1:u8491cBMTQ8ft8aeV+adlcytMZylmA5nnwwkRZjI8vw= +github.com/fortytw2/leaktest v1.3.0/go.mod h1:jDsjWgpAGjm2CA7WthBh/CdZYEPF31XHquHwclZch5g= +github.com/franela/goblin v0.0.0-20200105215937-c9ffbefa60db/go.mod h1:7dvUGVsVBjqR7JHJk0brhHOZYGmfBYOrK0ZhYMEtBr4= +github.com/franela/goreq v0.0.0-20171204163338-bcd34c9993f8/go.mod h1:ZhphrRTfi2rbfLwlschooIH4+wKKDR4Pdxhh+TRoA20= +github.com/frankban/quicktest v1.14.3/go.mod h1:mgiwOwqx65TmIk1wJ6Q7wvnVMocbUorkibMOrVTHZps= +github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8= +github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0= +github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= +github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= +github.com/fsnotify/fsnotify v1.6.0/go.mod h1:sl3t1tCWJFWoRz9R8WJCbQihKKwmorjAbSClcnxKAGw= +github.com/fsnotify/fsnotify v1.9.0 h1:2Ml+OJNzbYCTzsxtv8vKSFD9PbJjmhYF14k/jKC7S9k= +github.com/fsnotify/fsnotify v1.9.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0= +github.com/fzipp/gocyclo v0.6.0 h1:lsblElZG7d3ALtGMx9fmxeTKZaLLpU8mET09yN4BBLo= +github.com/fzipp/gocyclo v0.6.0/go.mod h1:rXPyn8fnlpa0R2csP/31uerbiVBugk5whMdlyaLkLoA= +github.com/getsentry/sentry-go v0.35.0 h1:+FJNlnjJsZMG3g0/rmmP7GiKjQoUF5EXfEtBwtPtkzY= +github.com/getsentry/sentry-go v0.35.0/go.mod h1:C55omcY9ChRQIUcVcGcs+Zdy4ZpQGvNJ7JYHIoSWOtE= +github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= +github.com/ghostiam/protogetter v0.3.9 h1:j+zlLLWzqLay22Cz/aYwTHKQ88GE2DQ6GkWSYFOI4lQ= +github.com/ghostiam/protogetter v0.3.9/go.mod h1:WZ0nw9pfzsgxuRsPOFQomgDVSWtDLJRfQJEhsGbmQMA= +github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE= +github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI= +github.com/gin-gonic/gin v1.7.0 h1:jGB9xAJQ12AIGNB4HguylppmDK1Am9ppF7XnGXXJuoU= +github.com/gin-gonic/gin v1.7.0/go.mod h1:jD2toBW3GZUr5UMcdrwQA10I7RuaFOl/SGeDjXkfUtY= +github.com/gliderlabs/ssh v0.3.8 h1:a4YXD1V7xMF9g5nTkdfnja3Sxy1PVDCj1Zg4Wb8vY6c= +github.com/gliderlabs/ssh v0.3.8/go.mod h1:xYoytBv1sV0aL3CavoDuJIQNURXkkfPA/wxQ1pL1fAU= +github.com/go-chi/chi/v5 v5.2.2 h1:CMwsvRVTbXVytCk1Wd72Zy1LAsAh9GxMmSNWLHCG618= +github.com/go-chi/chi/v5 v5.2.2/go.mod h1:L2yAIGWB3H+phAw1NxKwWM+7eUH/lU8pOMm5hHcoops= +github.com/go-critic/go-critic v0.12.0 h1:iLosHZuye812wnkEz1Xu3aBwn5ocCPfc9yqmFG9pa6w= +github.com/go-critic/go-critic v0.12.0/go.mod h1:DpE0P6OVc6JzVYzmM5gq5jMU31zLr4am5mB/VfFK64w= +github.com/go-delve/delve v1.24.0 h1:M1auuI7kyfXZm5LMDQEqhqr4koKWOzGKhCgwMxsLQfo= +github.com/go-delve/delve v1.24.0/go.mod h1:yNWXOuo4yslMOOj7O8gIRrf/trDBrFy5ZXwJL4ZzOos= +github.com/go-delve/liner v1.2.3-0.20231231155935-4726ab1d7f62 h1:IGtvsNyIuRjl04XAOFGACozgUD7A82UffYxZt4DWbvA= +github.com/go-delve/liner v1.2.3-0.20231231155935-4726ab1d7f62/go.mod h1:biJCRbqp51wS+I92HMqn5H8/A0PAhxn2vyOT+JqhiGI= +github.com/go-errors/errors v1.4.2 h1:J6MZopCL4uSllY1OfXM374weqZFFItUbrImctkmUxIA= +github.com/go-errors/errors v1.4.2/go.mod h1:sIVyrIiJhuEF+Pj9Ebtd6P/rEYROXFi3BopGUQ5a5Og= +github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 h1:+zs/tPmkDkHx3U66DAb0lQFJrpS6731Oaa12ikc+DiI= +github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376/go.mod h1:an3vInlBmSxCcxctByoQdvwPiA7DTK7jaaFDBTtu0ic= +github.com/go-git/go-billy/v5 v5.8.0 h1:I8hjc3LbBlXTtVuFNJuwYuMiHvQJDq1AT6u4DwDzZG0= +github.com/go-git/go-billy/v5 v5.8.0/go.mod h1:RpvI/rw4Vr5QA+Z60c6d6LXH0rYJo0uD5SqfmrrheCY= +github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20231010084843-55a94097c399 h1:eMje31YglSBqCdIqdhKBW8lokaMrL3uTkpGYlE2OOT4= +github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20231010084843-55a94097c399/go.mod h1:1OCfN199q1Jm3HZlxleg+Dw/mwps2Wbk9frAWm+4FII= +github.com/go-git/go-git/v5 v5.17.1 h1:WnljyxIzSj9BRRUlnmAU35ohDsjRK0EKmL0evDqi5Jk= +github.com/go-git/go-git/v5 v5.17.1/go.mod h1:pW/VmeqkanRFqR6AljLcs7EA7FbZaN5MQqO7oZADXpo= +github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-kit/kit v0.10.0/go.mod h1:xUsJbQ/Fp4kEt7AFgCuvyX4a71u8h9jB8tj/ORgOZ7o= +github.com/go-kit/kit v0.13.0 h1:OoneCcHKHQ03LfBpoQCUfCluwd2Vt3ohz+kvbJneZAU= +github.com/go-kit/kit v0.13.0/go.mod h1:phqEHMMUbyrCFCTgH48JueqrM3md2HcAZ8N3XE4FKDg= +github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY= +github.com/go-kit/log v0.2.1 h1:MRVx0/zhvdseW+Gza6N9rVzU/IVzaeE1SFI4raAhmBU= +github.com/go-kit/log v0.2.1/go.mod h1:NwTd00d/i8cPZ3xOwwiv2PO5MOcx78fFErGNcVmBjv0= +github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= +github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= +github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= +github.com/go-logfmt/logfmt v0.6.0 h1:wGYYu3uicYdqXVgoYbvnkrPVXkuLM1p1ifugDMEdRi4= +github.com/go-logfmt/logfmt v0.6.0/go.mod h1:WYhtIu8zTZfxdn5+rREduYbwxfcBr/Vr6KEVveWlfTs= +github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI= +github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= +github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/go-openapi/analysis v0.23.0 h1:aGday7OWupfMs+LbmLZG4k0MYXIANxcuBTYUC03zFCU= +github.com/go-openapi/analysis v0.23.0/go.mod h1:9mz9ZWaSlV8TvjQHLl2mUW2PbZtemkE8yA5v22ohupo= +github.com/go-openapi/errors v0.22.0 h1:c4xY/OLxUBSTiepAg3j/MHuAv5mJhnf53LLMWFB+u/w= +github.com/go-openapi/errors v0.22.0/go.mod h1:J3DmZScxCDufmIMsdOuDHxJbdOGC0xtUynjIx092vXE= +github.com/go-openapi/jsonpointer v0.21.0 h1:YgdVicSA9vH5RiHs9TZW5oyafXZFc6+2Vc1rr/O9oNQ= +github.com/go-openapi/jsonpointer v0.21.0/go.mod h1:IUyH9l/+uyhIYQ/PXVA41Rexl+kOkAPDdXEYns6fzUY= +github.com/go-openapi/jsonreference v0.21.0 h1:Rs+Y7hSXT83Jacb7kFyjn4ijOuVGSvOdF2+tg1TRrwQ= +github.com/go-openapi/jsonreference v0.21.0/go.mod h1:LmZmgsrTkVg9LG4EaHeY8cBDslNPMo06cago5JNLkm4= +github.com/go-openapi/loads v0.22.0 h1:ECPGd4jX1U6NApCGG1We+uEozOAvXvJSF4nnwHZ8Aco= +github.com/go-openapi/loads v0.22.0/go.mod h1:yLsaTCS92mnSAZX5WWoxszLj0u+Ojl+Zs5Stn1oF+rs= +github.com/go-openapi/spec v0.21.0 h1:LTVzPc3p/RzRnkQqLRndbAzjY0d0BCL72A6j3CdL9ZY= +github.com/go-openapi/spec v0.21.0/go.mod h1:78u6VdPw81XU44qEWGhtr982gJ5BWg2c0I5XwVMotYk= +github.com/go-openapi/strfmt v0.23.0 h1:nlUS6BCqcnAk0pyhi9Y+kdDVZdZMHfEKQiS4HaMgO/c= +github.com/go-openapi/strfmt v0.23.0/go.mod h1:NrtIpfKtWIygRkKVsxh7XQMDQW5HKQl6S5ik2elW+K4= +github.com/go-openapi/swag v0.23.0 h1:vsEVJDUo2hPJ2tu0/Xc+4noaxyEffXNIs3cOULZ+GrE= +github.com/go-openapi/swag v0.23.0/go.mod h1:esZ8ITTYEsH1V2trKHjAN8Ai7xHb8RV+YSZ577vPjgQ= +github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= +github.com/go-playground/locales v0.13.0 h1:HyWk6mgj5qFqCT5fjGBuRArbVDfE4hi8+e8ceBS/t7Q= +github.com/go-playground/locales v0.13.0/go.mod h1:taPMhCMXrRLJO55olJkUXHZBHCxTMfnGwq/HNwmWNS8= +github.com/go-playground/universal-translator v0.17.0 h1:icxd5fm+REJzpZx7ZfpaD876Lmtgy7VtROAbHHXk8no= +github.com/go-playground/universal-translator v0.17.0/go.mod h1:UkSxE5sNxxRwHyU+Scu5vgOQjsIJAF8j9muTVoKLVtA= +github.com/go-playground/validator/v10 v10.4.1 h1:pH2c5ADXtd66mxoE0Zm9SUhxE20r7aM3F26W0hOn+GE= +github.com/go-playground/validator/v10 v10.4.1/go.mod h1:nlOn6nFhuKACm19sB/8EGNn9GlaMV7XkbRSipzJ0Ii4= +github.com/go-quicktest/qt v1.101.0 h1:O1K29Txy5P2OK0dGo59b7b0LR6wKfIhttaAhHUyn7eI= +github.com/go-quicktest/qt v1.101.0/go.mod h1:14Bz/f7NwaXPtdYEgzsx46kqSxVwTbzVZsDC26tQJow= +github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w= +github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= +github.com/go-task/slim-sprig/v3 v3.0.0 h1:sUs3vkvUymDpBKi3qH1YSqBQk9+9D/8M2mN1vB6EwHI= +github.com/go-task/slim-sprig/v3 v3.0.0/go.mod h1:W848ghGpv3Qj3dhTPRyJypKRiqCdHZiAzKg9hl15HA8= +github.com/go-toolsmith/astcast v1.1.0 h1:+JN9xZV1A+Re+95pgnMgDboWNVnIMMQXwfBwLRPgSC8= +github.com/go-toolsmith/astcast v1.1.0/go.mod h1:qdcuFWeGGS2xX5bLM/c3U9lewg7+Zu4mr+xPwZIB4ZU= +github.com/go-toolsmith/astcopy v1.1.0 h1:YGwBN0WM+ekI/6SS6+52zLDEf8Yvp3n2seZITCUBt5s= +github.com/go-toolsmith/astcopy v1.1.0/go.mod h1:hXM6gan18VA1T/daUEHCFcYiW8Ai1tIwIzHY6srfEAw= +github.com/go-toolsmith/astequal v1.0.3/go.mod h1:9Ai4UglvtR+4up+bAD4+hCj7iTo4m/OXVTSLnCyTAx4= +github.com/go-toolsmith/astequal v1.1.0/go.mod h1:sedf7VIdCL22LD8qIvv7Nn9MuWJruQA/ysswh64lffQ= +github.com/go-toolsmith/astequal v1.2.0 h1:3Fs3CYZ1k9Vo4FzFhwwewC3CHISHDnVUPC4x0bI2+Cw= +github.com/go-toolsmith/astequal v1.2.0/go.mod h1:c8NZ3+kSFtFY/8lPso4v8LuJjdJiUFVnSuU3s0qrrDY= +github.com/go-toolsmith/astfmt v1.1.0 h1:iJVPDPp6/7AaeLJEruMsBUlOYCmvg0MoCfJprsOmcco= +github.com/go-toolsmith/astfmt v1.1.0/go.mod h1:OrcLlRwu0CuiIBp/8b5PYF9ktGVZUjlNMV634mhwuQ4= +github.com/go-toolsmith/astp v1.1.0 h1:dXPuCl6u2llURjdPLLDxJeZInAeZ0/eZwFJmqZMnpQA= +github.com/go-toolsmith/astp v1.1.0/go.mod h1:0T1xFGz9hicKs8Z5MfAqSUitoUYS30pDMsRVIDHs8CA= +github.com/go-toolsmith/pkgload v1.2.2 h1:0CtmHq/02QhxcF7E9N5LIFcYFsMR5rdovfqTtRKkgIk= +github.com/go-toolsmith/pkgload v1.2.2/go.mod h1:R2hxLNRKuAsiXCo2i5J6ZQPhnPMOVtU+f0arbFPWCus= +github.com/go-toolsmith/strparse v1.0.0/go.mod h1:YI2nUKP9YGZnL/L1/DLFBfixrcjslWct4wyljWhSRy8= +github.com/go-toolsmith/strparse v1.1.0 h1:GAioeZUK9TGxnLS+qfdqNbA4z0SSm5zVNtCQiyP2Bvw= +github.com/go-toolsmith/strparse v1.1.0/go.mod h1:7ksGy58fsaQkGQlY8WVoBFNyEPMGuJin1rfoPS4lBSQ= +github.com/go-toolsmith/typep v1.1.0 h1:fIRYDyF+JywLfqzyhdiHzRop/GQDxxNhLGQ6gFUNHus= +github.com/go-toolsmith/typep v1.1.0/go.mod h1:fVIw+7zjdsMxDA3ITWnH1yOiw1rnTQKCsF/sk2H/qig= +github.com/go-viper/mapstructure/v2 v2.4.0 h1:EBsztssimR/CONLSZZ04E8qAkxNYq4Qp9LvH92wZUgs= +github.com/go-viper/mapstructure/v2 v2.4.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM= +github.com/go-xmlfmt/xmlfmt v1.1.3 h1:t8Ey3Uy7jDSEisW2K3somuMKIpzktkWptA0iFCnRUWY= +github.com/go-xmlfmt/xmlfmt v1.1.3/go.mod h1:aUCEOzzezBEjDBbFBoSiya/gduyIiWYRP6CnSFIV8AM= +github.com/gobuffalo/flect v0.3.0 h1:erfPWM+K1rFNIQeRPdeEXxo8yFr/PO17lhRnS8FUrtk= +github.com/gobuffalo/flect v0.3.0/go.mod h1:5pf3aGnsvqvCj50AVni7mJJF8ICxGZ8HomberC3pXLE= +github.com/gobuffalo/genny/v2 v2.1.0 h1:cCRBbqzo3GfNvj3UetD16zRgUvWFEyyl0qTqquuIqOM= +github.com/gobuffalo/genny/v2 v2.1.0/go.mod h1:4yoTNk4bYuP3BMM6uQKYPvtP6WsXFGm2w2EFYZdRls8= +github.com/gobuffalo/github_flavored_markdown v1.1.3/go.mod h1:IzgO5xS6hqkDmUh91BW/+Qxo/qYnvfzoz3A7uLkg77I= +github.com/gobuffalo/github_flavored_markdown v1.1.4 h1:WacrEGPXUDX+BpU1GM/Y0ADgMzESKNWls9hOTG1MHVs= +github.com/gobuffalo/github_flavored_markdown v1.1.4/go.mod h1:Vl9686qrVVQou4GrHRK/KOG3jCZOKLUqV8MMOAYtlso= +github.com/gobuffalo/helpers v0.6.7 h1:C9CedoRSfgWg2ZoIkVXgjI5kgmSpL34Z3qdnzpfNVd8= +github.com/gobuffalo/helpers v0.6.7/go.mod h1:j0u1iC1VqlCaJEEVkZN8Ia3TEzfj/zoXANqyJExTMTA= +github.com/gobuffalo/logger v1.0.7 h1:LTLwWelETXDYyqF/ASf0nxaIcdEOIJNxRokPcfI/xbU= +github.com/gobuffalo/logger v1.0.7/go.mod h1:u40u6Bq3VVvaMcy5sRBclD8SXhBYPS0Qk95ubt+1xJM= +github.com/gobuffalo/packd v1.0.2 h1:Yg523YqnOxGIWCp69W12yYBKsoChwI7mtu6ceM9Bwfw= +github.com/gobuffalo/packd v1.0.2/go.mod h1:sUc61tDqGMXON80zpKGp92lDb86Km28jfvX7IAyxFT8= +github.com/gobuffalo/plush/v4 v4.1.16/go.mod h1:6t7swVsarJ8qSLw1qyAH/KbrcSTwdun2ASEQkOznakg= +github.com/gobuffalo/plush/v4 v4.1.22 h1:bPQr5PsiTg54UGMsfvnIAvFmUfxzD/ri+wbpu7PlmTM= +github.com/gobuffalo/plush/v4 v4.1.22/go.mod h1:WiKHJx3qBvfaDVlrv8zT7NCd3dEMaVR/fVxW4wqV17M= +github.com/gobuffalo/tags/v3 v3.1.4 h1:X/ydLLPhgXV4h04Hp2xlbI2oc5MDaa7eub6zw8oHjsM= +github.com/gobuffalo/tags/v3 v3.1.4/go.mod h1:ArRNo3ErlHO8BtdA0REaZxijuWnWzF6PUXngmMXd2I0= +github.com/gobuffalo/validate/v3 v3.3.3 h1:o7wkIGSvZBYBd6ChQoLxkz2y1pfmhbI4jNJYh6PuNJ4= +github.com/gobuffalo/validate/v3 v3.3.3/go.mod h1:YC7FsbJ/9hW/VjQdmXPvFqvRis4vrRYFxr69WiNZw6g= +github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y= +github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8= +github.com/gobwas/httphead v0.0.0-20180130184737-2c6c146eadee/go.mod h1:L0fX3K22YWvt/FAX9NnzrNzcI4wNYi9Yku4O0LKYflo= +github.com/gobwas/httphead v0.1.0 h1:exrUm0f4YX0L7EBwZHuCF4GDp8aJfVeBrlLQrs6NqWU= +github.com/gobwas/httphead v0.1.0/go.mod h1:O/RXo79gxV8G+RqlR/otEwx4Q36zl9rqC5u12GKvMCM= +github.com/gobwas/pool v0.2.0/go.mod h1:q8bcK0KcYlCgd9e7WYLm9LpyS+YeLd8JVDW6WezmKEw= +github.com/gobwas/pool v0.2.1 h1:xfeeEhW7pwmX8nuLVlqbzVc7udMDrwetjEv+TZIz1og= +github.com/gobwas/pool v0.2.1/go.mod h1:q8bcK0KcYlCgd9e7WYLm9LpyS+YeLd8JVDW6WezmKEw= +github.com/gobwas/ws v1.0.2/go.mod h1:szmBTxLgaFppYjEmNtny/v3w89xOydFnnZMcgRRu/EM= +github.com/gobwas/ws v1.2.1 h1:F2aeBZrm2NDsc7vbovKrWSogd4wvfAxg0FQ89/iqOTk= +github.com/gobwas/ws v1.2.1/go.mod h1:hRKAFb8wOxFROYNsT1bqfWnhX+b5MFeJM9r2ZSwg/KY= +github.com/goccy/go-yaml v1.15.23 h1:WS0GAX1uNPDLUvLkNU2vXq6oTnsmfVFocjQ/4qA48qo= +github.com/goccy/go-yaml v1.15.23/go.mod h1:XBurs7gK8ATbW4ZPGKgcbrY1Br56PdM69F7LkFRi1kA= +github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2 h1:ZpnhV/YsD2/4cESfV5+Hoeu/iUR3ruzNvZ+yQfO03a0= +github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2/go.mod h1:bBOAhwG1umN6/6ZUMtDFBMQR8jRg9O75tm9K00oMsK4= +github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= +github.com/gofrs/flock v0.12.1 h1:MTLVXXHf8ekldpJk3AKicLij9MdwOWkZ+a/jHHZby9E= +github.com/gofrs/flock v0.12.1/go.mod h1:9zxTsyu5xtJ9DK+1tFZyibEV7y3uwDxPPfbxeeHCoD0= +github.com/gofrs/uuid v4.2.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM= +github.com/gofrs/uuid v4.4.0+incompatible h1:3qXRTX8/NbyulANqlc0lchS1gqAVxRgsuW1YrTJupqA= +github.com/gofrs/uuid v4.4.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM= +github.com/gogo/googleapis v1.1.0/go.mod h1:gf4bu3Q80BeJ6H1S1vYPm8/ELATdvryBaNFGgqEef3s= +github.com/gogo/googleapis v1.4.1-0.20201022092350-68b0159b7869/go.mod h1:5YRNX2z1oM5gXdAkurHa942MDgEJyk02w4OecKY87+c= +github.com/gogo/googleapis v1.4.1 h1:1Yx4Myt7BxzvUr5ldGSbwYiZG6t9wGBZ+8/fX3Wvtq0= +github.com/gogo/googleapis v1.4.1/go.mod h1:2lpHqI5OcWCtVElxXnPt+s8oJvMpySlOyM6xDCrzib4= +github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= +github.com/gogo/protobuf v1.2.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= +github.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zVXpSg4= +github.com/gogo/protobuf v1.3.1/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o= +github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= +github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/golang-jwt/jwt/v4 v4.0.0 h1:RAqyYixv1p7uEnocuy8P1nru5wprCh/MH2BIlW5z5/o= +github.com/golang-jwt/jwt/v4 v4.0.0/go.mod h1:/xlHOz8bRuivTWchD4jCa+NbatV+wEUSzwAxVc6locg= +github.com/golang-jwt/jwt/v4 v4.4.2 h1:rcc4lwaZgFMCZ5jxF9ABolDcIHdBytAFgqFPbSJQAYs= +github.com/golang-jwt/jwt/v4 v4.4.2/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= +github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= +github.com/golang/groupcache v0.0.0-20160516000752-02826c3e7903/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 h1:f+oWsMOmNPc8JmEHVZIycC7hBoQxHH9pNKQORJNozsQ= +github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8/go.mod h1:wcDNUvekVysuuOpQKo3191zZyTpiI6se1N1ULghS0sw= +github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= +github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= +github.com/golang/mock v1.5.0/go.mod h1:CWnOUgYIOo4TcNZ0wHX3YZCqsaM1I1Jvs6v3mP3KVu8= +github.com/golang/mock v1.6.0 h1:ErTB+efbowRARo13NNdxyJji2egdxLGQhRaY+DUumQc= +github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs= +github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.0/go.mod h1:Qd/q+1AKNOZr9uGQzbzCmRO6sUih6GTPZv6a1/R87v0= +github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk= +github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= +github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= +github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= +github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= +github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= +github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= +github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= +github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx4u74HPM= +github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= +github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= +github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= +github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/golangci/dupl v0.0.0-20180902072040-3e9179ac440a h1:w8hkcTqaFpzKqonE9uMCefW1WDie15eSP/4MssdenaM= +github.com/golangci/dupl v0.0.0-20180902072040-3e9179ac440a/go.mod h1:ryS0uhF+x9jgbj/N71xsEqODy9BN81/GonCZiOzirOk= +github.com/golangci/go-printf-func-name v0.1.0 h1:dVokQP+NMTO7jwO4bwsRwLWeudOVUPPyAKJuzv8pEJU= +github.com/golangci/go-printf-func-name v0.1.0/go.mod h1:wqhWFH5mUdJQhweRnldEywnR5021wTdZSNgwYceV14s= +github.com/golangci/gofmt v0.0.0-20250106114630-d62b90e6713d h1:viFft9sS/dxoYY0aiOTsLKO2aZQAPT4nlQCsimGcSGE= +github.com/golangci/gofmt v0.0.0-20250106114630-d62b90e6713d/go.mod h1:ivJ9QDg0XucIkmwhzCDsqcnxxlDStoTl89jDMIoNxKY= +github.com/golangci/golangci-lint v1.64.5 h1:5omC86XFBKXZgCrVdUWU+WNHKd+CWCxNx717KXnzKZY= +github.com/golangci/golangci-lint v1.64.5/go.mod h1:WZnwq8TF0z61h3jLQ7Sk5trcP7b3kUFxLD6l1ivtdvU= +github.com/golangci/misspell v0.6.0 h1:JCle2HUTNWirNlDIAUO44hUsKhOFqGPoC4LZxlaSXDs= +github.com/golangci/misspell v0.6.0/go.mod h1:keMNyY6R9isGaSAu+4Q8NMBwMPkh15Gtc8UCVoDtAWo= +github.com/golangci/plugin-module-register v0.1.1 h1:TCmesur25LnyJkpsVrupv1Cdzo+2f7zX0H6Jkw1Ol6c= +github.com/golangci/plugin-module-register v0.1.1/go.mod h1:TTpqoB6KkwOJMV8u7+NyXMrkwwESJLOkfl9TxR1DGFc= +github.com/golangci/revgrep v0.8.0 h1:EZBctwbVd0aMeRnNUsFogoyayvKHyxlV3CdUA46FX2s= +github.com/golangci/revgrep v0.8.0/go.mod h1:U4R/s9dlXZsg8uJmaR1GrloUr14D7qDl8gi2iPXJH8k= +github.com/golangci/unconvert v0.0.0-20240309020433-c5143eacb3ed h1:IURFTjxeTfNFP0hTEi1YKjB/ub8zkpaOqFFMApi2EAs= +github.com/golangci/unconvert v0.0.0-20240309020433-c5143eacb3ed/go.mod h1:XLXN8bNw4CGRPaqgl3bv/lhz7bsGPh4/xSaMTbo2vkQ= +github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/btree v1.1.3 h1:CVpQJjYgC4VbzxeGVHfvZrv1ctoYCAI8vbl07Fcxlyg= +github.com/google/btree v1.1.3/go.mod h1:qOPhT0dTNdNzV6Z/lhRX0YXUafgPLFUh+gZMl761Gm4= +github.com/google/cel-go v0.26.0 h1:DPGjXackMpJWH680oGY4lZhYjIameYmR+/6RBdDGmaI= +github.com/google/cel-go v0.26.0/go.mod h1:A9O8OU9rdvrK5MQyrqfIxo1a0u4g3sF8KB6PUIaryMM= +github.com/google/flatbuffers v1.12.1 h1:MVlul7pQNoDzWRLTw5imwYsl+usrS1TXG2H4jg6ImGw= +github.com/google/flatbuffers v1.12.1/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8= +github.com/google/go-cmdtest v0.4.1-0.20220921163831-55ab3332a786 h1:rcv+Ippz6RAtvaGgKxc+8FQIpxHgsF+HBzPyYL2cyVU= +github.com/google/go-cmdtest v0.4.1-0.20220921163831-55ab3332a786/go.mod h1:apVn/GCasLZUVpAJ6oWAuyP7Ne7CEsQbTnc0plM3m+o= +github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= +github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE= +github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= +github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= +github.com/google/go-containerregistry v0.20.6 h1:cvWX87UxxLgaH76b4hIvya6Dzz9qHB31qAwjAohdSTU= +github.com/google/go-containerregistry v0.20.6/go.mod h1:T0x8MuoAoKX/873bkeSfLD2FAkwCDf9/HZgsFJ02E2Y= +github.com/google/go-dap v0.12.0 h1:rVcjv3SyMIrpaOoTAdFDyHs99CwVOItIJGKLQFQhNeM= +github.com/google/go-dap v0.12.0/go.mod h1:tNjCASCm5cqePi/RVXXWEVqtnNLV1KTWtYOqu6rZNzc= +github.com/google/go-github/v48 v48.2.0 h1:68puzySE6WqUY9KWmpOsDEQfDZsso98rT6pZcz9HqcE= +github.com/google/go-github/v48 v48.2.0/go.mod h1:dDlehKBDo850ZPvCTK0sEqTCVWcrGl2LcDiajkYi89Y= +github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8= +github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17icRSOU623lUBU= +github.com/google/gofuzz v0.0.0-20170612174753-24818f796faf/go.mod h1:HP5RmnzzSNb993RKQDq4+1A4ia9nllfqcQFTQJedwGI= +github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0= +github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/goterm v0.0.0-20190703233501-fc88cf888a3f/go.mod h1:nOFQdrUlIlx6M6ODdSpBj1NVA+VgLC6kmw60mkw34H4= +github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= +github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= +github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= +github.com/google/martian/v3 v3.2.1/go.mod h1:oBOf6HBosgwRXnUGWUB05QECsc6uvmMiJ3+6W4l/CUk= +github.com/google/orderedcode v0.0.1 h1:UzfcAexk9Vhv8+9pNOgRu41f16lHq725vPwnSeiG/Us= +github.com/google/orderedcode v0.0.1/go.mod h1:iVyU4/qPKHY5h/wSd6rZZCDcLJNxiWO6dvsYES2Sb20= +github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20201218002935-b9804c9f04c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210601050228-01bbb1931b22/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210609004039-a478d1d731e9/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20241210010833-40e02aabc2ad h1:a6HEuzUHeKH6hwfN/ZoQgRgVIWFJljSWa/zetS2WTvg= +github.com/google/pprof v0.0.0-20241210010833-40e02aabc2ad/go.mod h1:vavhavw2zAxS5dIdcRluK6cSGGPlZynqzFM8NdvU144= +github.com/google/renameio v0.1.0 h1:GOZbcHa3HfsPKPlmyPyN2KEohoMXOhdMbHrvbpl2QaA= +github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= +github.com/google/uuid v1.0.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/googleapis/enterprise-certificate-proxy v0.0.0-20220520183353-fd19c99a87aa/go.mod h1:17drOmN3MwGY7t0e+Ei9b45FFGA3fBs3x36SsCg1hq8= +github.com/googleapis/enterprise-certificate-proxy v0.1.0/go.mod h1:17drOmN3MwGY7t0e+Ei9b45FFGA3fBs3x36SsCg1hq8= +github.com/googleapis/enterprise-certificate-proxy v0.2.0/go.mod h1:8C0jb7/mgJe/9KK8Lm7X9ctZC2t60YyIpYEI16jx0Qg= +github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= +github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= +github.com/googleapis/gax-go/v2 v2.1.0/go.mod h1:Q3nei7sK6ybPYH7twZdmQpAd1MKb7pfu6SK+H1/DsU0= +github.com/googleapis/gax-go/v2 v2.1.1/go.mod h1:hddJymUZASv3XPyGkUpKj8pPO47Rmb0eJc8R6ouapiM= +github.com/googleapis/gax-go/v2 v2.2.0/go.mod h1:as02EH8zWkzwUoLbBaFeQ+arQaj/OthfcblKl4IGNaM= +github.com/googleapis/gax-go/v2 v2.3.0/go.mod h1:b8LNqSzNabLiUpXKkY7HAR5jr6bIT99EXz9pXxye9YM= +github.com/googleapis/gax-go/v2 v2.4.0/go.mod h1:XOTVJ59hdnfJLIP/dh8n5CGryZR2LxK9wbMD5+iXC6c= +github.com/googleapis/gax-go/v2 v2.5.1/go.mod h1:h6B0KMMFNtI2ddbGJn3T3ZbwkeT6yqEF02fYlzkUCyo= +github.com/googleapis/gax-go/v2 v2.6.0/go.mod h1:1mjbznJAPHFpesgE5ucqfYEscaz5kMdcIDwU/6+DDoY= +github.com/googleapis/go-type-adapters v1.0.0/go.mod h1:zHW75FOG2aur7gAO2B+MLby+cLsWGBF62rFAi7WjWO4= +github.com/googleapis/google-cloud-go-testing v0.0.0-20200911160855-bcd43fbb19e8/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g= +github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= +github.com/gordonklaus/ineffassign v0.1.0 h1:y2Gd/9I7MdY1oEIt+n+rowjBNDcLQq3RsH5hwJd0f9s= +github.com/gordonklaus/ineffassign v0.1.0/go.mod h1:Qcp2HIAYhR7mNUVSIxZww3Guk4it82ghYcEXIAk+QT0= +github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg= +github.com/gorilla/css v1.0.0 h1:BQqNyPTi50JCFMTw/b67hByjMVXZRwGha6wxVGkeihY= +github.com/gorilla/css v1.0.0/go.mod h1:Dn721qIggHpt4+EFCcTLTU/vk5ySda2ReITrtgBl60c= +github.com/gorilla/handlers v1.5.2 h1:cLTUSsNkgcwhgRqvCNmdbRWG0A3N4F+M2nWKdScwyEE= +github.com/gorilla/handlers v1.5.2/go.mod h1:dX+xVpaxdSw+q0Qek8SSsl3dfMk3jNddUkMzo0GtH0w= +github.com/gorilla/mux v1.6.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= +github.com/gorilla/mux v1.7.3/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= +github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY= +github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ= +github.com/gorilla/websocket v0.0.0-20170926233335-4201258b820c/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= +github.com/gorilla/websocket v1.4.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= +github.com/gorilla/websocket v1.4.1/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= +github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg= +github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= +github.com/gostaticanalysis/analysisutil v0.7.1 h1:ZMCjoue3DtDWQ5WyU16YbjbQEQ3VuzwxALrpYd+HeKk= +github.com/gostaticanalysis/analysisutil v0.7.1/go.mod h1:v21E3hY37WKMGSnbsw2S/ojApNWb6C1//mXO48CXbVc= +github.com/gostaticanalysis/comment v1.4.1/go.mod h1:ih6ZxzTHLdadaiSnF5WY3dxUoXfXAlTaRzuaNDlSado= +github.com/gostaticanalysis/comment v1.4.2 h1:hlnx5+S2fY9Zo9ePo4AhgYsYHbM2+eAv8m/s1JiCd6Q= +github.com/gostaticanalysis/comment v1.4.2/go.mod h1:KLUTGDv6HOCotCH8h2erHKmpci2ZoR8VPu34YA2uzdM= +github.com/gostaticanalysis/forcetypeassert v0.2.0 h1:uSnWrrUEYDr86OCxWa4/Tp2jeYDlogZiZHzGkWFefTk= +github.com/gostaticanalysis/forcetypeassert v0.2.0/go.mod h1:M5iPavzE9pPqWyeiVXSFghQjljW1+l/Uke3PXHS6ILY= +github.com/gostaticanalysis/nilerr v0.1.1 h1:ThE+hJP0fEp4zWLkWHWcRyI2Od0p7DlgYG3Uqrmrcpk= +github.com/gostaticanalysis/nilerr v0.1.1/go.mod h1:wZYb6YI5YAxxq0i1+VJbY0s2YONW0HU0GPE3+5PWN4A= +github.com/gostaticanalysis/testutil v0.3.1-0.20210208050101-bfb5c8eec0e4/go.mod h1:D+FIZ+7OahH3ePw/izIEeH5I06eKs1IKI4Xr64/Am3M= +github.com/gostaticanalysis/testutil v0.5.0 h1:Dq4wT1DdTwTGCQQv3rl3IvD5Ld0E6HiY+3Zh0sUGqw8= +github.com/gostaticanalysis/testutil v0.5.0/go.mod h1:OLQSbuM6zw2EvCcXTz1lVq5unyoNft372msDY0nY5Hs= +github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= +github.com/grpc-ecosystem/go-grpc-middleware v1.0.1-0.20190118093823-f849b5445de4/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= +github.com/grpc-ecosystem/go-grpc-middleware v1.2.2/go.mod h1:EaizFBKfUKtMIF5iaDEhniwNedqGo9FuLFzppDr3uwI= +github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 h1:UH//fgunKIs4JdUbpDl1VZCDaL56wXCB/5+wF6uHfaI= +github.com/grpc-ecosystem/go-grpc-middleware v1.4.0/go.mod h1:g5qyo/la0ALbONm6Vbp88Yd8NsDy6rZz+RcrMPxvld8= +github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk= +github.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= +github.com/grpc-ecosystem/grpc-gateway v1.9.5/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= +github.com/grpc-ecosystem/grpc-gateway v1.16.0 h1:gmcG1KaJ57LophUzW0Hy8NmPhnMZb4M0+kPpLofRdBo= +github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.1 h1:X5VWvz21y3gzm9Nw/kaUeku/1+uBhcekkmy4IkffJww= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.1/go.mod h1:Zanoh4+gvIgluNqcfMVTJueD4wSS5hT7zTt4Mrutd90= +github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c h1:6rhixN/i8ZofjG1Y75iExal34USq5p+wiN1tpie8IrU= +github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c/go.mod h1:NMPJylDgVpX0MLRlPy15sqSwOFv/U1GZ2m21JhFfek0= +github.com/hashicorp/consul/api v1.3.0/go.mod h1:MmDNSzIMUjNpY/mQ398R4bk2FnqQLoPndWW5VkKPlCE= +github.com/hashicorp/consul/api v1.15.3/go.mod h1:/g/qgcoBcEXALCNZgRRisyTW0nY86++L0KbeAMXYCeY= +github.com/hashicorp/consul/sdk v0.3.0/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8= +github.com/hashicorp/consul/sdk v0.11.0/go.mod h1:yPkX5Q6CsxTFMjQQDJwzeNmUUF5NUGGbrDsv9wTb8cw= +github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= +github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= +github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= +github.com/hashicorp/go-hclog v0.12.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ= +github.com/hashicorp/go-hclog v0.14.1/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ= +github.com/hashicorp/go-hclog v1.2.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ= +github.com/hashicorp/go-hclog v1.6.3 h1:Qr2kF+eVWjTiYmU7Y31tYlP1h0q/X3Nl3tPGdaB11/k= +github.com/hashicorp/go-hclog v1.6.3/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M= +github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= +github.com/hashicorp/go-immutable-radix v1.3.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= +github.com/hashicorp/go-immutable-radix v1.3.1 h1:DKHmCUm2hRBK510BaiZlwvpD40f8bJFeZnpfm2KLowc= +github.com/hashicorp/go-immutable-radix v1.3.1/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= +github.com/hashicorp/go-immutable-radix/v2 v2.1.0 h1:CUW5RYIcysz+D3B+l1mDeXrQ7fUvGGCwJfdASSzbrfo= +github.com/hashicorp/go-immutable-radix/v2 v2.1.0/go.mod h1:hgdqLXA4f6NIjRVisM1TJ9aOJVNRqKZj+xDGF6m7PBw= +github.com/hashicorp/go-metrics v0.5.4 h1:8mmPiIJkTPPEbAiV97IxdAGNdRdaWwVap1BU6elejKY= +github.com/hashicorp/go-metrics v0.5.4/go.mod h1:CG5yz4NZ/AI/aQt9Ucm/vdBnbh7fvmv4lxZ350i+QQI= +github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM= +github.com/hashicorp/go-msgpack v0.5.5/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM= +github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= +github.com/hashicorp/go-multierror v1.1.0/go.mod h1:spPvp8C1qA32ftKqdAHm4hHTbPw+vmowP0z+KUhOZdA= +github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= +github.com/hashicorp/go-plugin v1.6.3 h1:xgHB+ZUSYeuJi96WtxEjzi23uh7YQpznjGh0U0UUrwg= +github.com/hashicorp/go-plugin v1.6.3/go.mod h1:MRobyh+Wc/nYy1V4KAXUiYfzxoYhs7V1mlH1Z7iY2h0= +github.com/hashicorp/go-retryablehttp v0.5.3/go.mod h1:9B5zBasrRhHXnJnui7y6sL7es7NDiJgTc6Er0maI1Xs= +github.com/hashicorp/go-rootcerts v1.0.0/go.mod h1:K6zTfqpRlCUIjkwsN4Z+hiSfzSTQa6eBIzfwKfwNnHU= +github.com/hashicorp/go-rootcerts v1.0.2/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8= +github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU= +github.com/hashicorp/go-sockaddr v1.0.2/go.mod h1:rB4wwRAUzs07qva3c5SdrY/NEtAUjGlgmH/UkBUC97A= +github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4= +github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go-uuid v1.0.2/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go-uuid v1.0.3 h1:2gKiV6YVmrJ1i2CKKa9obLvRieoRGviZFL26PcT/Co8= +github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go-version v1.2.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= +github.com/hashicorp/go-version v1.2.1/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= +github.com/hashicorp/go-version v1.7.0 h1:5tqGy27NaOTB8yJKUZELlFAS/LTKJkrmONwQKeRZfjY= +github.com/hashicorp/go-version v1.7.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= +github.com/hashicorp/go.net v0.0.1/go.mod h1:hjKkEWcCURg++eb33jQU7oqQcI9XDCnUzHA0oac0k90= +github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4= +github.com/hashicorp/golang-lru v1.0.2 h1:dV3g9Z/unq5DpblPpw+Oqcv4dU/1omnb4Ok8iPY6p1c= +github.com/hashicorp/golang-lru v1.0.2/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4= +github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k= +github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM= +github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= +github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= +github.com/hashicorp/mdns v1.0.0/go.mod h1:tL+uN++7HEJ6SQLQ2/p+z2pH24WQKWjBPkE0mNTz8vQ= +github.com/hashicorp/mdns v1.0.4/go.mod h1:mtBihi+LeNXGtG8L9dX59gAEa12BDtBQSp4v/YAJqrc= +github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2pPBoIllUwCN7I= +github.com/hashicorp/memberlist v0.3.0/go.mod h1:MS2lj3INKhZjWNqd3N0m3J+Jxf3DAOnAH9VT3Sh9MUE= +github.com/hashicorp/memberlist v0.3.1/go.mod h1:MS2lj3INKhZjWNqd3N0m3J+Jxf3DAOnAH9VT3Sh9MUE= +github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc= +github.com/hashicorp/serf v0.9.7/go.mod h1:TXZNMjZQijwlDvp+r0b63xZ45H7JmCmgg4gpTwn9UV4= +github.com/hashicorp/serf v0.9.8/go.mod h1:TXZNMjZQijwlDvp+r0b63xZ45H7JmCmgg4gpTwn9UV4= +github.com/hashicorp/yamux v0.1.2 h1:XtB8kyFOyHXYVFnwT5C3+Bdo8gArse7j2AQ0DA0Uey8= +github.com/hashicorp/yamux v0.1.2/go.mod h1:C+zze2n6e/7wshOZep2A70/aQU6QBRWJO/G6FT1wIns= +github.com/hdevalence/ed25519consensus v0.2.0 h1:37ICyZqdyj0lAZ8P4D1d1id3HqbbG1N3iBb1Tb4rdcU= +github.com/hdevalence/ed25519consensus v0.2.0/go.mod h1:w3BHWjwJbFU29IRHL1Iqkw3sus+7FctEyM4RqDxYNzo= +github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM= +github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg= +github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= +github.com/huandu/go-assert v1.1.5 h1:fjemmA7sSfYHJD7CUqs9qTwwfdNAx7/j2/ZlHXzNB3c= +github.com/huandu/go-assert v1.1.5/go.mod h1:yOLvuqZwmcHIC5rIzrBhT7D3Q9c3GFnd0JrPVhn/06U= +github.com/huandu/skiplist v1.2.1 h1:dTi93MgjwErA/8idWTzIw4Y1kZsMWx35fmI2c8Rij7w= +github.com/huandu/skiplist v1.2.1/go.mod h1:7v3iFjLcSAzO4fN5B8dvebvo/qsfumiLiDXMrPiHF9w= +github.com/huandu/xstrings v1.4.0 h1:D17IlohoQq4UcpqD7fDk80P7l+lwAmlFaBHgOipl2FU= +github.com/huandu/xstrings v1.4.0/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= +github.com/hudl/fargo v1.3.0/go.mod h1:y3CKSmjA+wD2gak7sUSXTAoopbhU08POFhmITJgmKTg= +github.com/iancoleman/strcase v0.3.0 h1:nTXanmYxhfFAMjZL34Ov6gkzEsSJZ5DbhxWjvSASxEI= +github.com/iancoleman/strcase v0.3.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho= +github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/ignite/web v1.0.8 h1:St3L6UJj70+h16+No5em8Vn2Hx93tS2G1MyWO/Kt1cc= +github.com/ignite/web v1.0.8/go.mod h1:WZWBaBYF8RazN7dE462BLpvXDY8ScacxcJ07BKwX/jY= +github.com/imdario/mergo v0.3.12/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= +github.com/imdario/mergo v0.3.15 h1:M8XP7IuFNsqUx6VPK2P9OSmsYsI/YFaGil0uD21V3dM= +github.com/imdario/mergo v0.3.15/go.mod h1:WBLT9ZmE3lPoWsEzCh9LPo3TiwVN+ZKEjmz+hD27ysY= +github.com/improbable-eng/grpc-web v0.15.0 h1:BN+7z6uNXZ1tQGcNAuaU1YjsLTApzkjt2tzCixLaUPQ= +github.com/improbable-eng/grpc-web v0.15.0/go.mod h1:1sy9HKV4Jt9aEs9JSnkWlRJPuPtwNr0l57L4f878wP8= +github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= +github.com/inconshreveable/mousetrap v1.0.1/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= +github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= +github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= +github.com/influxdata/influxdb1-client v0.0.0-20191209144304-8bf82d3c094d/go.mod h1:qj24IKcXYK6Iy9ceXlo3Tc+vtHo9lIhSX5JddghvEPo= +github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A= +github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo= +github.com/jdx/go-netrc v1.0.0 h1:QbLMLyCZGj0NA8glAhxUpf1zDg6cxnWgMBbjq40W0gQ= +github.com/jdx/go-netrc v1.0.0/go.mod h1:Gh9eFQJnoTNIRHXl2j5bJXA1u84hQWJWgGh569zF3v8= +github.com/jgautheron/goconst v1.7.1 h1:VpdAG7Ca7yvvJk5n8dMwQhfEZJh95kl/Hl9S1OI5Jkk= +github.com/jgautheron/goconst v1.7.1/go.mod h1:aAosetZ5zaeC/2EfMeRswtxUFBpe2Hr7HzkgX4fanO4= +github.com/jhump/protoreflect v1.17.0 h1:qOEr613fac2lOuTgWN4tPAtLL7fUSbuJL5X5XumQh94= +github.com/jhump/protoreflect v1.17.0/go.mod h1:h9+vUUL38jiBzck8ck+6G/aeMX8Z4QUY/NiJPwPNi+8= +github.com/jhump/protoreflect/v2 v2.0.0-beta.2 h1:qZU+rEZUOYTz1Bnhi3xbwn+VxdXkLVeEpAeZzVXLY88= +github.com/jhump/protoreflect/v2 v2.0.0-beta.2/go.mod h1:4tnOYkB/mq7QTyS3YKtVtNrJv4Psqout8HA1U+hZtgM= +github.com/jingyugao/rowserrcheck v1.1.1 h1:zibz55j/MJtLsjP1OF4bSdgXxwL1b+Vn7Tjzq7gFzUs= +github.com/jingyugao/rowserrcheck v1.1.1/go.mod h1:4yvlZSDb3IyDTUZJUmpZfm2Hwok+Dtp+nu2qOq+er9c= +github.com/jinzhu/copier v0.4.0 h1:w3ciUoD19shMCRargcpm0cm91ytaBhDvuRpz1ODO/U8= +github.com/jinzhu/copier v0.4.0/go.mod h1:DfbEm0FYsaqBcKcFuvmOZb218JkPGtvSHsKg8S8hyyg= +github.com/jjti/go-spancheck v0.6.4 h1:Tl7gQpYf4/TMU7AT84MN83/6PutY21Nb9fuQjFTpRRc= +github.com/jjti/go-spancheck v0.6.4/go.mod h1:yAEYdKJ2lRkDA8g7X+oKUHXOWVAXSBJRv04OhF+QUjk= +github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= +github.com/jmhodges/levigo v1.0.0 h1:q5EC36kV79HWeTBWsod3mG11EgStG3qArTKcvlksN1U= +github.com/jmhodges/levigo v1.0.0/go.mod h1:Q6Qx+uH3RAqyK4rFQroq9RL7mdkABMcfhEI+nNuzMJQ= +github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo= +github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= +github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= +github.com/josharian/native v1.1.0 h1:uuaP0hAbW7Y4l0ZRQ6C9zfb7Mg1mbFKry/xzDAfmtLA= +github.com/josharian/native v1.1.0/go.mod h1:7X/raswPFr05uY3HiLlYeyQntB6OO7E/d2Cu7qoaN2w= +github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4= +github.com/jsimonetti/rtnetlink/v2 v2.0.1 h1:xda7qaHDSVOsADNouv7ukSuicKZO7GgVUCXxpaIEIlM= +github.com/jsimonetti/rtnetlink/v2 v2.0.1/go.mod h1:7MoNYNbb3UaDHtF8udiJo/RH6VsTKP1pqKLUTVCvToE= +github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= +github.com/json-iterator/go v1.1.7/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.8/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= +github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= +github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= +github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= +github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= +github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= +github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM= +github.com/julz/importas v0.2.0 h1:y+MJN/UdL63QbFJHws9BVC5RpA2iq0kpjrFajTGivjQ= +github.com/julz/importas v0.2.0/go.mod h1:pThlt589EnCYtMnmhmRYY/qn9lCf/frPOK+WMx3xiJY= +github.com/karamaru-alpha/copyloopvar v1.2.1 h1:wmZaZYIjnJ0b5UoKDjUHrikcV0zuPyyxI4SVplLd2CI= +github.com/karamaru-alpha/copyloopvar v1.2.1/go.mod h1:nFmMlFNlClC2BPvNaHMdkirmTJxVCY0lhxBtlfOypMM= +github.com/kevinburke/ssh_config v1.2.0 h1:x584FjTGwHzMwvHx18PXxbBVzfnxogHaAReU4gf13a4= +github.com/kevinburke/ssh_config v1.2.0/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM= +github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q= +github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQLJ+jE2L00= +github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= +github.com/kisielk/errcheck v1.8.0 h1:ZX/URYa7ilESY19ik/vBmCn6zdGQLxACwjAcWbHlYlg= +github.com/kisielk/errcheck v1.8.0/go.mod h1:1kLL+jV4e+CFfueBmI1dSK2ADDyQnlrnrY/FqKluHJQ= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/kisielk/sqlstruct v0.0.0-20201105191214-5f3e10d3ab46/go.mod h1:yyMNCyc/Ib3bDTKd379tNMpB/7/H5TjM2Y9QJ5THLbE= +github.com/kkHAIKE/contextcheck v1.1.5 h1:CdnJh63tcDe53vG+RebdpdXJTc9atMgGqdx8LXxiilg= +github.com/kkHAIKE/contextcheck v1.1.5/go.mod h1:O930cpht4xb1YQpK+1+AgoM3mFsvxr7uyFptcnWTYUA= +github.com/klauspost/compress v1.10.3/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= +github.com/klauspost/compress v1.11.7/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= +github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo= +github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ= +github.com/klauspost/cpuid/v2 v2.2.10 h1:tBs3QSyvjDyFTq3uoc/9xFpCuOsJQFNPiAhYdw2skhE= +github.com/klauspost/cpuid/v2 v2.2.10/go.mod h1:hqwkgyIinND0mEev00jJYCxPNVRVXFQeu1XKlok6oO0= +github.com/klauspost/pgzip v1.2.6 h1:8RXeL5crjEUFnR2/Sn6GJNWtSQ3Dk8pq4CL3jvdDyjU= +github.com/klauspost/pgzip v1.2.6/go.mod h1:Ch1tH69qFZu15pkjo5kYi6mth2Zzwzt50oCQKQE9RUs= +github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= +github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= +github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= +github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/kulti/thelper v0.6.3 h1:ElhKf+AlItIu+xGnI990no4cE2+XaSu1ULymV2Yulxs= +github.com/kulti/thelper v0.6.3/go.mod h1:DsqKShOvP40epevkFrvIwkCMNYxMeTNjdWL4dqWHZ6I= +github.com/kunwardeep/paralleltest v1.0.10 h1:wrodoaKYzS2mdNVnc4/w31YaXFtsc21PCTdvWJ/lDDs= +github.com/kunwardeep/paralleltest v1.0.10/go.mod h1:2C7s65hONVqY7Q5Efj5aLzRCNLjw2h4eMc9EcypGjcY= +github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= +github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= +github.com/lasiar/canonicalheader v1.1.2 h1:vZ5uqwvDbyJCnMhmFYimgMZnJMjwljN5VGY0VKbMXb4= +github.com/lasiar/canonicalheader v1.1.2/go.mod h1:qJCeLFS0G/QlLQ506T+Fk/fWMa2VmBUiEI2cuMK4djI= +github.com/ldez/exptostd v0.4.1 h1:DIollgQ3LWZMp3HJbSXsdE2giJxMfjyHj3eX4oiD6JU= +github.com/ldez/exptostd v0.4.1/go.mod h1:iZBRYaUmcW5jwCR3KROEZ1KivQQp6PHXbDPk9hqJKCQ= +github.com/ldez/gomoddirectives v0.6.1 h1:Z+PxGAY+217f/bSGjNZr/b2KTXcyYLgiWI6geMBN2Qc= +github.com/ldez/gomoddirectives v0.6.1/go.mod h1:cVBiu3AHR9V31em9u2kwfMKD43ayN5/XDgr+cdaFaKs= +github.com/ldez/grignotin v0.9.0 h1:MgOEmjZIVNn6p5wPaGp/0OKWyvq42KnzAt/DAb8O4Ow= +github.com/ldez/grignotin v0.9.0/go.mod h1:uaVTr0SoZ1KBii33c47O1M8Jp3OP3YDwhZCmzT9GHEk= +github.com/ldez/tagliatelle v0.7.1 h1:bTgKjjc2sQcsgPiT902+aadvMjCeMHrY7ly2XKFORIk= +github.com/ldez/tagliatelle v0.7.1/go.mod h1:3zjxUpsNB2aEZScWiZTHrAXOl1x25t3cRmzfK1mlo2I= +github.com/ldez/usetesting v0.4.2 h1:J2WwbrFGk3wx4cZwSMiCQQ00kjGR0+tuuyW0Lqm4lwA= +github.com/ldez/usetesting v0.4.2/go.mod h1:eEs46T3PpQ+9RgN9VjpY6qWdiw2/QmfiDeWmdZdrjIQ= +github.com/leodido/go-urn v1.2.0 h1:hpXL4XnriNwQ/ABnpepYM/1vCLWNDfUNts8dX3xTG6Y= +github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII= +github.com/leonklingele/grouper v1.1.2 h1:o1ARBDLOmmasUaNDesWqWCIFH3u7hoFlM84YrjT3mIY= +github.com/leonklingele/grouper v1.1.2/go.mod h1:6D0M/HVkhs2yRKRFZUoGjeDy7EZTfFBE9gl4kjmIGkA= +github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw= +github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +github.com/lightstep/lightstep-tracer-common/golang/gogo v0.0.0-20190605223551-bc2310a04743/go.mod h1:qklhhLq1aX+mtWk9cPHPzaBjWImj5ULL6C7HFJtXQMM= +github.com/lightstep/lightstep-tracer-go v0.18.1/go.mod h1:jlF1pusYV4pidLvZ+XD0UBX0ZE6WURAspgAczcDHrL4= +github.com/linxGnu/grocksdb v1.8.14 h1:HTgyYalNwBSG/1qCQUIott44wU5b2Y9Kr3z7SK5OfGQ= +github.com/linxGnu/grocksdb v1.8.14/go.mod h1:QYiYypR2d4v63Wj1adOOfzglnoII0gLj3PNh4fZkcFA= +github.com/lucasb-eyer/go-colorful v1.0.3/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0= +github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY= +github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0= +github.com/lyft/protoc-gen-validate v0.0.13/go.mod h1:XbGvPuh87YZc5TdIa2/I4pLk0QoUACkjt2znoq26NVQ= +github.com/macabu/inamedparam v0.1.3 h1:2tk/phHkMlEL/1GNe/Yf6kkR/hkcUdAEY3L0hjYV1Mk= +github.com/macabu/inamedparam v0.1.3/go.mod h1:93FLICAIk/quk7eaPPQvbzihUdn/QkGDwIZEoLtpH6I= +github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= +github.com/magiconair/properties v1.8.6/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= +github.com/magiconair/properties v1.8.10 h1:s31yESBquKXCV9a/ScB3ESkOjUYYv+X0rg8SYxI99mE= +github.com/magiconair/properties v1.8.10/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0= +github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= +github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= +github.com/manifoldco/promptui v0.9.0 h1:3V4HzJk1TtXW1MTZMP7mdlwbBpIinw3HztaIlYthEiA= +github.com/manifoldco/promptui v0.9.0/go.mod h1:ka04sppxSGFAtxX0qhlYQjISsg9mR4GWtQEhdbn6Pgg= +github.com/maratori/testableexamples v1.0.0 h1:dU5alXRrD8WKSjOUnmJZuzdxWOEQ57+7s93SLMxb2vI= +github.com/maratori/testableexamples v1.0.0/go.mod h1:4rhjL1n20TUTT4vdh3RDqSizKLyXp7K2u6HgraZCGzE= +github.com/maratori/testpackage v1.1.1 h1:S58XVV5AD7HADMmD0fNnziNHqKvSdDuEKdPD1rNTU04= +github.com/maratori/testpackage v1.1.1/go.mod h1:s4gRK/ym6AMrqpOa/kEbQTV4Q4jb7WeLZzVhVVVOQMc= +github.com/matoous/godox v1.1.0 h1:W5mqwbyWrwZv6OQ5Z1a/DHGMOvXYCBP3+Ht7KMoJhq4= +github.com/matoous/godox v1.1.0/go.mod h1:jgE/3fUXiTurkdHOLT5WEkThTSuE7yxHv5iWPa80afs= +github.com/matryer/is v1.4.0 h1:sosSmIWwkYITGrxZ25ULNDeKiMNzFSr4V/eqBQP0PeE= +github.com/matryer/is v1.4.0/go.mod h1:8I/i5uYgLzgsgEloJE1U6xx5HkBQpAZvepWuujKwMRU= +github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= +github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= +github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= +github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= +github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHPsaIE= +github.com/mattn/go-colorable v0.1.14/go.mod h1:6LmQG8QLFO4G5z1gPvYEzlUgJ2wF+stgPZH1UqBm1s8= +github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= +github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= +github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= +github.com/mattn/go-isatty v0.0.10/go.mod h1:qgIWMr58cqv1PHHyhnkY9lrL7etaEgOFcMEpPG5Rm84= +github.com/mattn/go-isatty v0.0.11/go.mod h1:PhnuNfih5lzO57/f3n+odYbM4JtupLOxQOAqxQCu2WE= +github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= +github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= +github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= +github.com/mattn/go-isatty v0.0.17/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= +github.com/mattn/go-isatty v0.0.18/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mattn/go-localereader v0.0.1 h1:ygSAOl7ZXTx4RdPYinUpg6W99U8jWvWi9Ye2JC/oIi4= +github.com/mattn/go-localereader v0.0.1/go.mod h1:8fBrzywKY7BI3czFoHkuzRoWE9C+EiG4R1k4Cjx5p88= +github.com/mattn/go-runewidth v0.0.2/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU= +github.com/mattn/go-runewidth v0.0.3/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU= +github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI= +github.com/mattn/go-runewidth v0.0.10/go.mod h1:RAqKPSqVFrSLVXbA8x7dzmKdmGzieGRCM46jaSJTDAk= +github.com/mattn/go-runewidth v0.0.12/go.mod h1:RAqKPSqVFrSLVXbA8x7dzmKdmGzieGRCM46jaSJTDAk= +github.com/mattn/go-runewidth v0.0.13/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= +github.com/mattn/go-runewidth v0.0.14/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= +github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc= +github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= +github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= +github.com/mdlayher/netlink v1.7.2 h1:/UtM3ofJap7Vl4QWCPDGXY8d3GIY2UGSDbK+QWmY8/g= +github.com/mdlayher/netlink v1.7.2/go.mod h1:xraEF7uJbxLhc5fpHL4cPe221LI2bdttWlU+ZGLfQSw= +github.com/mdlayher/socket v0.4.1 h1:eM9y2/jlbs1M615oshPQOHZzj6R6wMT7bX5NPiQvn2U= +github.com/mdlayher/socket v0.4.1/go.mod h1:cAqeGjoufqdxWkD7DkpyS+wcefOtmu5OQ8KuoJGIReA= +github.com/meowgorithm/babyenv v1.3.0/go.mod h1:lwNX+J6AGBFqNrMZ2PTLkM6SO+W4X8DOg9zBDO4j3Ig= +github.com/meowgorithm/babyenv v1.3.1 h1:18ZEYIgbzoFQfRLF9+lxjRfk/ui6w8U0FWl07CgWvvc= +github.com/meowgorithm/babyenv v1.3.1/go.mod h1:lwNX+J6AGBFqNrMZ2PTLkM6SO+W4X8DOg9zBDO4j3Ig= +github.com/mgechev/revive v1.6.1 h1:ncK0ZCMWtb8GXwVAmk+IeWF2ULIDsvRxSRfg5sTwQ2w= +github.com/mgechev/revive v1.6.1/go.mod h1:/2tfHWVO8UQi/hqJsIYNEKELi+DJy/e+PQpLgTB1v88= +github.com/microcosm-cc/bluemonday v1.0.20/go.mod h1:yfBmMi8mxvaZut3Yytv+jTXRY8mxyjJ0/kQBTElld50= +github.com/microcosm-cc/bluemonday v1.0.21/go.mod h1:ytNkv4RrDrLJ2pqlsSI46O6IVXmZOBBD4SaJyDwwTkM= +github.com/microcosm-cc/bluemonday v1.0.22/go.mod h1:ytNkv4RrDrLJ2pqlsSI46O6IVXmZOBBD4SaJyDwwTkM= +github.com/microcosm-cc/bluemonday v1.0.23 h1:SMZe2IGa0NuHvnVNAZ+6B38gsTbi5e4sViiWJyDDqFY= +github.com/microcosm-cc/bluemonday v1.0.23/go.mod h1:mN70sk7UkkF8TUr2IGBpNN0jAgStuPzlK76QuruE/z4= +github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= +github.com/miekg/dns v1.1.26/go.mod h1:bPDLeHnStXmXAq1m/Ch/hvfNHr14JKNPMBo3VZKjuso= +github.com/miekg/dns v1.1.41/go.mod h1:p6aan82bvRIyn+zDIv9xYNUpwa73JcSh9BKwknJysuI= +github.com/mikesmitty/edkey v0.0.0-20170222072505-3356ea4e686a h1:eU8j/ClY2Ty3qdHnn0TyW3ivFoPC/0F1gQZz8yTxbbE= +github.com/mikesmitty/edkey v0.0.0-20170222072505-3356ea4e686a/go.mod h1:v8eSC2SMp9/7FTKUncp7fH9IwPfw+ysMObcEz5FWheQ= +github.com/minio/highwayhash v1.0.3 h1:kbnuUMoHYyVl7szWjSxJnxw11k2U709jqFPPmIUyD6Q= +github.com/minio/highwayhash v1.0.3/go.mod h1:GGYsuwP/fPD6Y9hMiXuapVvlIUEhFhMTh0rxU3ik1LQ= +github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc= +github.com/mitchellh/cli v1.1.0/go.mod h1:xcISNoH86gajksDmfB23e/pu+B+GeFRMYmoHXxx3xhI= +github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= +github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= +github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= +github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= +github.com/mitchellh/go-wordwrap v1.0.0/go.mod h1:ZXFpozHsX6DPmq2I0TCekCxypsnAUbP2oI0UX1GXzOo= +github.com/mitchellh/gox v0.4.0/go.mod h1:Sd9lOJ0+aimLBi73mGofS1ycjY8lL3uZM3JPS42BGNg= +github.com/mitchellh/iochan v1.0.0/go.mod h1:JwYml1nuB7xOzsp52dPpHFffvOCDupsG0QubkSMEySY= +github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= +github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= +github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= +github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/moby/docker-image-spec v1.3.1 h1:jMKff3w6PgbfSa69GfNg+zN/XLhfXJGnEx3Nl2EsFP0= +github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo= +github.com/moby/sys/atomicwriter v0.1.0 h1:kw5D/EqkBwsBFi0ss9v1VG3wIkVhzGvLklJ+w3A14Sw= +github.com/moby/sys/atomicwriter v0.1.0/go.mod h1:Ul8oqv2ZMNHOceF643P6FKPXeCmYtlQMvpizfsSoaWs= +github.com/moby/sys/sequential v0.6.0 h1:qrx7XFUd/5DxtqcoH1h438hF5TmOvzC/lspjy7zgvCU= +github.com/moby/sys/sequential v0.6.0/go.mod h1:uyv8EUTrca5PnDsdMGXhZe6CCe8U/UiTWd+lL+7b/Ko= +github.com/moby/term v0.5.2 h1:6qk3FJAFDs6i/q3W/pQ97SX192qKfZgGjCQqfCJkgzQ= +github.com/moby/term v0.5.2/go.mod h1:d3djjFCrjnB+fl8NJux+EJzu0msscUP+f8it8hPkFLc= +github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= +github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= +github.com/moricho/tparallel v0.3.2 h1:odr8aZVFA3NZrNybggMkYO3rgPRcqjeQUlBBFVxKHTI= +github.com/moricho/tparallel v0.3.2/go.mod h1:OQ+K3b4Ln3l2TZveGCywybl68glfLEwFGqvnjok8b+U= +github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A= +github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc= +github.com/mtibben/percent v0.2.1 h1:5gssi8Nqo8QU/r2pynCm+hBQHpkB/uNK7BJCFogWdzs= +github.com/mtibben/percent v0.2.1/go.mod h1:KG9uO+SZkUp+VkRHsCdYQV3XSZrrSpR3O9ibNBTZrns= +github.com/muesli/ansi v0.0.0-20211018074035-2e021307bc4b/go.mod h1:fQuZ0gauxyBcmsdE3ZT4NasjaRdxmbCS0jRHsrWu3Ho= +github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6 h1:ZK8zHtRHOkbHy6Mmr5D264iyp3TiX5OmNcI5cIARiQI= +github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6/go.mod h1:CJlz5H+gyd6CUWT45Oy4q24RdLyn7Md9Vj2/ldJBSIo= +github.com/muesli/cancelreader v0.2.2 h1:3I4Kt4BQjOR54NavqnDogx/MIoWBFa0StPA8ELUXHmA= +github.com/muesli/cancelreader v0.2.2/go.mod h1:3XuTXfFS2VjM+HTLZY9Ak0l6eUKfijIfMUZ4EgX0QYo= +github.com/muesli/gitcha v0.2.0 h1:+wOgT2dI9s2Tznj1t1rb/qkK5e0cb6qD8c4IX2TR/YY= +github.com/muesli/gitcha v0.2.0/go.mod h1:Ri8m9TZS4+ORG4JVmVKUQcWZuxDvUW3UKxMdQfzG2zI= +github.com/muesli/go-app-paths v0.2.1/go.mod h1:SxS3Umca63pcFcLtbjVb+J0oD7cl4ixQWoBKhGEtEho= +github.com/muesli/go-app-paths v0.2.2 h1:NqG4EEZwNIhBq/pREgfBmgDmt3h1Smr1MjZiXbpZUnI= +github.com/muesli/go-app-paths v0.2.2/go.mod h1:SxS3Umca63pcFcLtbjVb+J0oD7cl4ixQWoBKhGEtEho= +github.com/muesli/mango v0.1.0 h1:DZQK45d2gGbql1arsYA4vfg4d7I9Hfx5rX/GCmzsAvI= +github.com/muesli/mango v0.1.0/go.mod h1:5XFpbC8jY5UUv89YQciiXNlbi+iJgt29VDC5xbzrLL4= +github.com/muesli/mango-cobra v1.2.0 h1:DQvjzAM0PMZr85Iv9LIMaYISpTOliMEg+uMFtNbYvWg= +github.com/muesli/mango-cobra v1.2.0/go.mod h1:vMJL54QytZAJhCT13LPVDfkvCUJ5/4jNUKF/8NC2UjA= +github.com/muesli/mango-pflag v0.1.0 h1:UADqbYgpUyRoBja3g6LUL+3LErjpsOwaC9ywvBWe7Sg= +github.com/muesli/mango-pflag v0.1.0/go.mod h1:YEQomTxaCUp8PrbhFh10UfbhbQrM/xJ4i2PB8VTLLW0= +github.com/muesli/reflow v0.2.1-0.20210115123740-9e1d0d53df68/go.mod h1:Xk+z4oIWdQqJzsxyjgl3P22oYZnHdZ8FFTHAQQt5BMQ= +github.com/muesli/reflow v0.3.0 h1:IFsN6K9NfGtjeggFP+68I4chLZV2yIKsXJFNZ+eWh6s= +github.com/muesli/reflow v0.3.0/go.mod h1:pbwTDkVPibjO2kyvBQRBxTWEEGDGq0FlB1BIKtnHY/8= +github.com/muesli/roff v0.1.0 h1:YD0lalCotmYuF5HhZliKWlIx7IEhiXeSfq7hNjFqGF8= +github.com/muesli/roff v0.1.0/go.mod h1:pjAHQM9hdUUwm/krAfrLGgJkXJ+YuhtsfZ42kieB2Ig= +github.com/muesli/sasquatch v0.0.0-20200811221207-66979d92330a h1:Hw/15RYEOUD6T9UCRkUmNBa33kJkH33Fui6hE4sRLKU= +github.com/muesli/sasquatch v0.0.0-20200811221207-66979d92330a/go.mod h1:+XG0ne5zXWBTSbbe7Z3/RWxaT8PZY6zaZ1dX6KjprYY= +github.com/muesli/termenv v0.7.2/go.mod h1:ct2L5N2lmix82RaY3bMWwVu/jUFc9Ule0KGDCiKYPh8= +github.com/muesli/termenv v0.7.4/go.mod h1:pZ7qY9l3F7e5xsAOS0zCew2tME+p7bWeBkotCEcIIcc= +github.com/muesli/termenv v0.11.1-0.20220204035834-5ac8409525e0/go.mod h1:Bd5NYQ7pd+SrtBSrSNoBBmXlcY8+Xj4BMJgh8qcZrvs= +github.com/muesli/termenv v0.13.0/go.mod h1:sP1+uffeLaEYpyOTb8pLCUctGcGLnoFjSn4YJK5e2bc= +github.com/muesli/termenv v0.14.0/go.mod h1:kG/pF1E7fh949Xhe156crRUrHNyK221IuGO7Ez60Uc8= +github.com/muesli/termenv v0.15.1/go.mod h1:HeAQPTzpfs016yGtA4g00CsdYnVLJvxsS4ANqrZs2sQ= +github.com/muesli/termenv v0.16.0 h1:S5AlUN9dENB57rsbnkPyfdGuWIlkmzJjbFf0Tf5FWUc= +github.com/muesli/termenv v0.16.0/go.mod h1:ZRfOIKPFDYQoDFF4Olj7/QJbW60Ol/kL1pU3VfY/Cnk= +github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA= +github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= +github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= +github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f h1:KUppIJq7/+SVif2QVs3tOP0zanoHgBEVAwHxUSIzRqU= +github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= +github.com/mwitkow/grpc-proxy v0.0.0-20181017164139-0f1106ef9c76/go.mod h1:x5OoJHDHqxHS801UIuhqGl6QdSAEJvtausosHSdazIo= +github.com/nakabonne/nestif v0.3.1 h1:wm28nZjhQY5HyYPx+weN3Q65k6ilSBxDb8v5S81B81U= +github.com/nakabonne/nestif v0.3.1/go.mod h1:9EtoZochLn5iUprVDmDjqGKPofoUEBL8U4Ngq6aY7OE= +github.com/nats-io/jwt v0.3.0/go.mod h1:fRYCDE99xlTsqUzISS1Bi75UBJ6ljOJQOAAu5VglpSg= +github.com/nats-io/jwt v0.3.2/go.mod h1:/euKqTS1ZD+zzjYrY7pseZrTtWQSjujC7xjPc8wL6eU= +github.com/nats-io/nats-server/v2 v2.1.2/go.mod h1:Afk+wRZqkMQs/p45uXdrVLuab3gwv3Z8C4HTBu8GD/k= +github.com/nats-io/nats.go v1.9.1/go.mod h1:ZjDU1L/7fJ09jvUSRVBR2e7+RnLiiIQyqyzEE/Zbp4w= +github.com/nats-io/nkeys v0.1.0/go.mod h1:xpnFELMwJABBLVhffcfd1MZx6VsNRFpEugbxziKVo7w= +github.com/nats-io/nkeys v0.1.3/go.mod h1:xpnFELMwJABBLVhffcfd1MZx6VsNRFpEugbxziKVo7w= +github.com/nats-io/nuid v1.0.1/go.mod h1:19wcPz3Ph3q0Jbyiqsd0kePYG7A95tJPxeL+1OSON2c= +github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= +github.com/nishanths/exhaustive v0.12.0 h1:vIY9sALmw6T/yxiASewa4TQcFsVYZQQRUQJhKRf3Swg= +github.com/nishanths/exhaustive v0.12.0/go.mod h1:mEZ95wPIZW+x8kC4TgC+9YCUgiST7ecevsVDTgc2obs= +github.com/nishanths/predeclared v0.2.2 h1:V2EPdZPliZymNAn79T8RkNApBjMmVKh5XRpLm/w98Vk= +github.com/nishanths/predeclared v0.2.2/go.mod h1:RROzoN6TnGQupbC+lqggsOlcgysk3LMK/HI84Mp280c= +github.com/nqd/flat v0.2.0 h1:g6lXtMxsxrz6PZOO+rNnAJUn/GGRrK4FgVEhy/v+cHI= +github.com/nqd/flat v0.2.0/go.mod h1:FOuslZmNY082wVfVUUb7qAGWKl8z8Nor9FMg+Xj2Nss= +github.com/nunnatsa/ginkgolinter v0.19.0 h1:CnHRFAeBS3LdLI9h+Jidbcc5KH71GKOmaBZQk8Srnto= +github.com/nunnatsa/ginkgolinter v0.19.0/go.mod h1:jkQ3naZDmxaZMXPWaS9rblH+i+GWXQCaS/JFIWcOH2s= +github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= +github.com/nxadm/tail v1.4.11 h1:8feyoE3OzPrcshW5/MJ4sGESc5cqmGkGCWlco4l0bqY= +github.com/nxadm/tail v1.4.11/go.mod h1:OTaG3NK980DZzxbRq6lEuzgU+mug70nY11sMd4JXXHc= +github.com/oasisprotocol/curve25519-voi v0.0.0-20230904125328-1f23a7beb09a h1:dlRvE5fWabOchtH7znfiFCcOvmIYgOeAS5ifBXBlh9Q= +github.com/oasisprotocol/curve25519-voi v0.0.0-20230904125328-1f23a7beb09a/go.mod h1:hVoHR2EVESiICEMbg137etN/Lx+lSrHPTD39Z/uE+2s= +github.com/oklog/oklog v0.3.2/go.mod h1:FCV+B7mhrz4o+ueLpx+KqkyXRGMWOYEvfiXtdGtbWGs= +github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA= +github.com/oklog/run v1.1.0 h1:GEenZ1cK0+q0+wsJew9qUg/DyD8k3JzYsZAi5gYi2mA= +github.com/oklog/run v1.1.0/go.mod h1:sVPdnTZT1zYwAJeCMu2Th4T21pA3FPOQRfWjQlk7DVU= +github.com/oklog/ulid v1.3.1 h1:EGfNDEx6MqHz8B3uNV6QAib1UR2Lm97sHi3ocA6ESJ4= +github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U= +github.com/olekukonko/tablewriter v0.0.0-20170122224234-a0225b3f23b5/go.mod h1:vsDQFd/mU46D+Z4whnwzcISnGGzXWMclvtLoiIKAKIo= +github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N7AbDhec= +github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY= +github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= +github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= +github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk= +github.com/onsi/ginkgo v1.14.0 h1:2mOpI4JVVPBN+WQRa0WKH2eXR+Ey+uK4n7Zj0aYpIQA= +github.com/onsi/ginkgo v1.14.0/go.mod h1:iSB4RoI2tjJc9BBv4NKIKWKya62Rps+oPG/Lv9klQyY= +github.com/onsi/ginkgo/v2 v2.22.2 h1:/3X8Panh8/WwhU/3Ssa6rCKqPLuAkVY2I0RoyDLySlU= +github.com/onsi/ginkgo/v2 v2.22.2/go.mod h1:oeMosUL+8LtarXBHu/c0bx2D/K9zyQ6uX3cTyztHwsk= +github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= +github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= +github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= +github.com/onsi/gomega v1.36.2 h1:koNYke6TVk6ZmnyHrCXba/T/MoLBXFjeC1PtvYgw0A8= +github.com/onsi/gomega v1.36.2/go.mod h1:DdwyADRjrc825LhMEkD76cHR5+pUnjhUN8GlHlRPHzY= +github.com/op/go-logging v0.0.0-20160315200505-970db520ece7/go.mod h1:HzydrMdWErDVzsI23lYNej1Htcns9BCg93Dk0bBINWk= +github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= +github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= +github.com/opencontainers/image-spec v1.1.1 h1:y0fUlFfIZhPF1W537XOLg0/fcx6zcHCJwooC2xJA040= +github.com/opencontainers/image-spec v1.1.1/go.mod h1:qpqAh3Dmcf36wStyyWU+kCeDgrGnAve2nCC8+7h8Q0M= +github.com/opentracing-contrib/go-observer v0.0.0-20170622124052-a52f23424492/go.mod h1:Ngi6UdF0k5OKD5t5wlmGhe/EDKPoUM3BXZSSfIuJbis= +github.com/opentracing/basictracer-go v1.0.0/go.mod h1:QfBfYuafItcjQuMwinw9GhYKwFXS9KnPs5lxoYwgW74= +github.com/opentracing/opentracing-go v1.0.2/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= +github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= +github.com/openzipkin-contrib/zipkin-go-opentracing v0.4.5/go.mod h1:/wsWhb9smxSfWAKL3wpBW7V8scJMt8N8gnaMCS9E/cA= +github.com/openzipkin/zipkin-go v0.1.6/go.mod h1:QgAqvLzwWbR/WpD4A3cGpPtJrZXNIiJc5AZX7/PBEpw= +github.com/openzipkin/zipkin-go v0.2.1/go.mod h1:NaW6tEwdmWMaCDZzg8sh+IBNOxHMPnhQw8ySjnjRyN4= +github.com/openzipkin/zipkin-go v0.2.2/go.mod h1:NaW6tEwdmWMaCDZzg8sh+IBNOxHMPnhQw8ySjnjRyN4= +github.com/otiai10/copy v1.2.0/go.mod h1:rrF5dJ5F0t/EWSYODDu4j9/vEeYHMkc8jt0zJChqQWw= +github.com/otiai10/copy v1.14.1 h1:5/7E6qsUMBaH5AnQ0sSLzzTg1oTECmcCmT6lvF45Na8= +github.com/otiai10/copy v1.14.1/go.mod h1:oQwrEDDOci3IM8dJF0d8+jnbfPDllW6vUjNc3DoZm9I= +github.com/otiai10/curr v0.0.0-20150429015615-9b4961190c95/go.mod h1:9qAhocn7zKJG+0mI8eUu6xqkFDYS2kb2saOteoSB3cE= +github.com/otiai10/curr v1.0.0/go.mod h1:LskTG5wDwr8Rs+nNQ+1LlxRjAtTZZjtJW4rMXl6j4vs= +github.com/otiai10/mint v1.3.0/go.mod h1:F5AjcsTsWUqX+Na9fpHb52P8pcRX2CI6A3ctIT91xUo= +github.com/otiai10/mint v1.3.1/go.mod h1:/yxELlJQ0ufhjUwhshSj+wFjZ78CnZ48/1wtmBH1OTc= +github.com/otiai10/mint v1.6.3 h1:87qsV/aw1F5as1eH1zS/yqHY85ANKVMgkDrf9rcxbQs= +github.com/otiai10/mint v1.6.3/go.mod h1:MJm72SBthJjz8qhefc4z1PYEieWmy8Bku7CjcAqyUSM= +github.com/pact-foundation/pact-go v1.0.4/go.mod h1:uExwJY4kCzNPcHRj+hCR/HBbOOIwwtUjcrb0b5/5kLM= +github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= +github.com/pascaldekloe/goe v0.1.0 h1:cBOtyMzM9HTpWjXfbbunk26uA6nG3a8n06Wieeh0MwY= +github.com/pascaldekloe/goe v0.1.0/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= +github.com/pborman/uuid v1.2.0/go.mod h1:X/NO0urCmaxf9VXbdlT7C2Yzkj2IKimNn4k+gtPdI/k= +github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic= +github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3ve8= +github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= +github.com/pelletier/go-toml/v2 v2.0.5/go.mod h1:OMHamSCAODeSsVrwwvcJOaoN0LIUIaFVNZzmWyNfXas= +github.com/pelletier/go-toml/v2 v2.2.4 h1:mye9XuhQ6gvn5h28+VilKrrPoQVanw5PMw/TB0t5Ec4= +github.com/pelletier/go-toml/v2 v2.2.4/go.mod h1:2gIqNv+qfxSVS7cM2xJQKtLSTLUE9V8t9Stt+h56mCY= +github.com/performancecopilot/speed v3.0.0+incompatible/go.mod h1:/CLtqpZ5gBg1M9iaPbIdPPGyKcA8hKdoy6hAWba7Yac= +github.com/petermattis/goid v0.0.0-20240813172612-4fcff4a6cae7 h1:Dx7Ovyv/SFnMFw3fD4oEoeorXc6saIiQ23LrGLth0Gw= +github.com/petermattis/goid v0.0.0-20240813172612-4fcff4a6cae7/go.mod h1:pxMtw7cyUw6B2bRH0ZBANSPg+AoSud1I1iyJHI69jH4= +github.com/pierrec/lz4 v1.0.2-0.20190131084431-473cd7ce01a1/go.mod h1:3/3N9NVKO0jef7pBehbT1qWhCMrIgbYNnFAZCqQ5LRc= +github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY= +github.com/pingcap/errors v0.11.4 h1:lFuQV/oaUMGcD2tqt+01ROSmJs75VG1ToEOkZIZ4nE4= +github.com/pingcap/errors v0.11.4/go.mod h1:Oi8TUi2kEtXXLMJk9l1cGmz20kV3TaQ0usTwv5KuLY8= +github.com/pjbgf/sha1cd v0.3.2 h1:a9wb0bp1oC2TGwStyn0Umc/IGKQnEgF0vVaZ8QF8eo4= +github.com/pjbgf/sha1cd v0.3.2/go.mod h1:zQWigSxVmsHEZow5qaLtPYxpcKMMQpa09ixqBxuCS6A= +github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ= +github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c/go.mod h1:7rwL4CYBLnjLxUqIJNnCWiEdr3bn6IUYi15bNlnbCCU= +github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= +github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/profile v1.2.1/go.mod h1:hJw3o1OdXxsrSjjVksARp5W95eeEaEfptyVZyv6JUPA= +github.com/pkg/sftp v1.13.1/go.mod h1:3HaPG6Dq1ILlpPZRO0HVMrsydcdLt6HRDccSgb87qRg= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= +github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/polyfloyd/go-errorlint v1.7.1 h1:RyLVXIbosq1gBdk/pChWA8zWYLsq9UEw7a1L5TVMCnA= +github.com/polyfloyd/go-errorlint v1.7.1/go.mod h1:aXjNb1x2TNhoLsk26iv1yl7a+zTnXPhwEMtEXukiLR8= +github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI= +github.com/posener/complete v1.2.3/go.mod h1:WZIdtGGp+qx0sLrYKtIRAruyNpv6hFCicSgv7Sy7s/s= +github.com/prashantv/gostub v1.1.0 h1:BTyx3RfQjRHnUWaGF9oQos79AlQ5k8WNktv7VGvVH4g= +github.com/prashantv/gostub v1.1.0/go.mod h1:A5zLQHz7ieHGG7is6LLXLz7I8+3LZzsrV0P1IAHhP5U= +github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= +github.com/prometheus/client_golang v0.9.3-0.20190127221311-3c4408c8b829/go.mod h1:p2iRAGwDERtqlqzRXnrOVns+ignqQo//hLXqYxZYVNs= +github.com/prometheus/client_golang v0.9.3/go.mod h1:/TN21ttK/J9q6uSwhBd54HahCDft0ttaMvbicHlPoso= +github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= +github.com/prometheus/client_golang v1.3.0/go.mod h1:hJaj2vgQTGQmVCsAACORcieXFeDPbaTKGT+JTgUa3og= +github.com/prometheus/client_golang v1.4.0/go.mod h1:e9GMxYsXl05ICDXkRhurwBS4Q3OK1iX/F2sw+iXX5zU= +github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M= +github.com/prometheus/client_golang v1.11.1/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0= +github.com/prometheus/client_golang v1.23.0 h1:ust4zpdl9r4trLY/gSjlm07PuiBq2ynaXXlptpfy8Uc= +github.com/prometheus/client_golang v1.23.0/go.mod h1:i/o0R9ByOnHX0McrTMTyhYvKE4haaf2mW08I+jGAjEE= +github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= +github.com/prometheus/client_model v0.0.0-20190115171406-56726106282f/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= +github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.1.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.6.2 h1:oBsgwpGs7iVziMvrGhE53c/GrLUsZdHnqNwqPLxwZyk= +github.com/prometheus/client_model v0.6.2/go.mod h1:y3m2F6Gdpfy6Ut/GBsUqTWZqCUvMVzSfMLjcu6wAwpE= +github.com/prometheus/common v0.0.0-20181113130724-41aa239b4cce/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro= +github.com/prometheus/common v0.2.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= +github.com/prometheus/common v0.4.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= +github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= +github.com/prometheus/common v0.7.0/go.mod h1:DjGbpBbp5NYNiECxcL/VnbXCCaQpKd3tt26CguLLsqA= +github.com/prometheus/common v0.9.1/go.mod h1:yhUN8i9wzaXS3w1O07YhxHEBxD+W35wd8bs7vj7HSQ4= +github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo= +github.com/prometheus/common v0.15.0/go.mod h1:U+gB1OBLb1lF3O42bTCL+FK18tX9Oar16Clt/msog/s= +github.com/prometheus/common v0.26.0/go.mod h1:M7rCNAaPfAosfx8veZJCuw84e35h3Cfd9VFqTh1DIvc= +github.com/prometheus/common v0.65.0 h1:QDwzd+G1twt//Kwj/Ww6E9FQq1iVMmODnILtW1t2VzE= +github.com/prometheus/common v0.65.0/go.mod h1:0gZns+BLRQ3V6NdaerOhMbwwRbNh9hkGINtQAsP5GS8= +github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/prometheus/procfs v0.0.0-20190117184657-bf6a532e95b1/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/prometheus/procfs v0.0.0-20190507164030-5867b95ac084/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= +github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= +github.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+GxbHq6oeK9A= +github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= +github.com/prometheus/procfs v0.3.0/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= +github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= +github.com/prometheus/procfs v0.16.1 h1:hZ15bTNuirocR6u0JZ6BAHHmwS1p8B4P6MRqxtzMyRg= +github.com/prometheus/procfs v0.16.1/go.mod h1:teAbpZRB1iIAJYREa1LsoWUXykVXA1KlTmWl8x/U+Is= +github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU= +github.com/quasilyte/go-ruleguard v0.4.3-0.20240823090925-0fe6f58b47b1 h1:+Wl/0aFp0hpuHM3H//KMft64WQ1yX9LdJY64Qm/gFCo= +github.com/quasilyte/go-ruleguard v0.4.3-0.20240823090925-0fe6f58b47b1/go.mod h1:GJLgqsLeo4qgavUoL8JeGFNS7qcisx3awV/w9eWTmNI= +github.com/quasilyte/go-ruleguard/dsl v0.3.22 h1:wd8zkOhSNr+I+8Qeciml08ivDt1pSXe60+5DqOpCjPE= +github.com/quasilyte/go-ruleguard/dsl v0.3.22/go.mod h1:KeCP03KrjuSO0H1kTuZQCWlQPulDV6YMIXmpQss17rU= +github.com/quasilyte/gogrep v0.5.0 h1:eTKODPXbI8ffJMN+W2aE0+oL0z/nh8/5eNdiO34SOAo= +github.com/quasilyte/gogrep v0.5.0/go.mod h1:Cm9lpz9NZjEoL1tgZ2OgeUKPIxL1meE7eo60Z6Sk+Ng= +github.com/quasilyte/regex/syntax v0.0.0-20210819130434-b3f0c404a727 h1:TCg2WBOl980XxGFEZSS6KlBGIV0diGdySzxATTWoqaU= +github.com/quasilyte/regex/syntax v0.0.0-20210819130434-b3f0c404a727/go.mod h1:rlzQ04UMyJXu/aOvhd8qT+hvDrFpiwqp8MRXDY9szc0= +github.com/quasilyte/stdinfo v0.0.0-20220114132959-f7386bf02567 h1:M8mH9eK4OUR4lu7Gd+PU1fV2/qnDNfzT635KRSObncs= +github.com/quasilyte/stdinfo v0.0.0-20220114132959-f7386bf02567/go.mod h1:DWNGW8A4Y+GyBgPuaQJuWiy0XYftx4Xm/y5Jqk9I6VQ= +github.com/quic-go/qpack v0.6.0 h1:g7W+BMYynC1LbYLSqRt8PBg5Tgwxn214ZZR34VIOjz8= +github.com/quic-go/qpack v0.6.0/go.mod h1:lUpLKChi8njB4ty2bFLX2x4gzDqXwUpaO1DP9qMDZII= +github.com/quic-go/quic-go v0.57.0 h1:AsSSrrMs4qI/hLrKlTH/TGQeTMY0ib1pAOX7vA3AdqE= +github.com/quic-go/quic-go v0.57.0/go.mod h1:ly4QBAjHA2VhdnxhojRsCUOeJwKYg+taDlos92xb1+s= +github.com/radovskyb/watcher v1.0.7 h1:AYePLih6dpmS32vlHfhCeli8127LzkIgwJGcwwe8tUE= +github.com/radovskyb/watcher v1.0.7/go.mod h1:78okwvY5wPdzcb1UYnip1pvrZNIVEIh/Cm+ZuvsUYIg= +github.com/raeperd/recvcheck v0.2.0 h1:GnU+NsbiCqdC2XX5+vMZzP+jAJC5fht7rcVTAhX74UI= +github.com/raeperd/recvcheck v0.2.0/go.mod h1:n04eYkwIR0JbgD73wT8wL4JjPC3wm0nFtzBnWNocnYU= +github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4= +github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 h1:N/ElC8H3+5XpJzTSTfLsJV/mx9Q9g7kxmchpfZyxgzM= +github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4= +github.com/rivo/uniseg v0.1.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= +github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= +github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ= +github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= +github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg= +github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= +github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= +github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= +github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ= +github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc= +github.com/rs/cors v1.7.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU= +github.com/rs/cors v1.11.1 h1:eU3gRzXLRK57F5rKMGMZURNdIG4EoAmX8k94r9wXWHA= +github.com/rs/cors v1.11.1/go.mod h1:XyqrcTp5zjWr1wsJ8PIRZssZ8b/WMcMf71DJnit4EMU= +github.com/rs/xid v1.6.0/go.mod h1:7XoLgs4eV+QndskICGsho+ADou8ySMSjJKDIan90Nz0= +github.com/rs/zerolog v1.34.0 h1:k43nTLIwcTVQAncfCw4KZ2VY6ukYoZaBPNOE8txlOeY= +github.com/rs/zerolog v1.34.0/go.mod h1:bJsvje4Z08ROH4Nhs5iH600c3IkWhwp44iRc54W6wYQ= +github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= +github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/ryancurrah/gomodguard v1.3.5 h1:cShyguSwUEeC0jS7ylOiG/idnd1TpJ1LfHGpV3oJmPU= +github.com/ryancurrah/gomodguard v1.3.5/go.mod h1:MXlEPQRxgfPQa62O8wzK3Ozbkv9Rkqr+wKjSxTdsNJE= +github.com/ryanrolds/sqlclosecheck v0.5.1 h1:dibWW826u0P8jNLsLN+En7+RqWWTYrjCB9fJfSfdyCU= +github.com/ryanrolds/sqlclosecheck v0.5.1/go.mod h1:2g3dUjoS6AL4huFdv6wn55WpLIDjY7ZgUR4J8HOO/XQ= +github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= +github.com/ryanuber/columnize v2.1.0+incompatible/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= +github.com/sabhiram/go-gitignore v0.0.0-20180611051255-d3107576ba94 h1:G04eS0JkAIVZfaJLjla9dNxkJCPiKIGZlw9AfOhzOD0= +github.com/sabhiram/go-gitignore v0.0.0-20180611051255-d3107576ba94/go.mod h1:b18R55ulyQ/h3RaWyloPyER7fWQVZvimKKhnI5OfrJQ= +github.com/sagikazarmark/crypt v0.8.0/go.mod h1:TmKwZAo97S4Fy4sfMH/HX/cQP5D+ijra2NyLpNNmttY= +github.com/sagikazarmark/locafero v0.11.0 h1:1iurJgmM9G3PA/I+wWYIOw/5SyBtxapeHDcg+AAIFXc= +github.com/sagikazarmark/locafero v0.11.0/go.mod h1:nVIGvgyzw595SUSUE6tvCp3YYTeHs15MvlmU87WwIik= +github.com/sahilm/fuzzy v0.1.0/go.mod h1:VFvziUEIMCrT6A6tw2RFIXPXXmzXbOsSHF0DOI8ZK9Y= +github.com/sahilm/fuzzy v0.1.1 h1:ceu5RHF8DGgoi+/dR5PsECjCDH1BE3Fnmpo7aVXOdRA= +github.com/sahilm/fuzzy v0.1.1/go.mod h1:VFvziUEIMCrT6A6tw2RFIXPXXmzXbOsSHF0DOI8ZK9Y= +github.com/samuel/go-zookeeper v0.0.0-20190923202752-2cc03de413da/go.mod h1:gi+0XIa01GRL2eRQVjQkKGqKF3SF9vZR/HnPullcV2E= +github.com/sanposhiho/wastedassign/v2 v2.1.0 h1:crurBF7fJKIORrV85u9UUpePDYGWnwvv3+A96WvwXT0= +github.com/sanposhiho/wastedassign/v2 v2.1.0/go.mod h1:+oSmSC+9bQ+VUAxA66nBb0Z7N8CK7mscKTDYC6aIek4= +github.com/santhosh-tekuri/jsonschema/v6 v6.0.1 h1:PKK9DyHxif4LZo+uQSgXNqs0jj5+xZwwfKHgph2lxBw= +github.com/santhosh-tekuri/jsonschema/v6 v6.0.1/go.mod h1:JXeL+ps8p7/KNMjDQk3TCwPpBy0wYklyWTfbkIzdIFU= +github.com/sasha-s/go-deadlock v0.3.5 h1:tNCOEEDG6tBqrNDOX35j/7hL5FcFViG6awUGROb2NsU= +github.com/sasha-s/go-deadlock v0.3.5/go.mod h1:bugP6EGbdGYObIlx7pUZtWqlvo8k9H6vCBBsiChJQ5U= +github.com/sashamelentyev/interfacebloat v1.1.0 h1:xdRdJp0irL086OyW1H/RTZTr1h/tMEOsumirXcOJqAw= +github.com/sashamelentyev/interfacebloat v1.1.0/go.mod h1:+Y9yU5YdTkrNvoX0xHc84dxiN1iBi9+G8zZIhPVoNjQ= +github.com/sashamelentyev/usestdlibvars v1.28.0 h1:jZnudE2zKCtYlGzLVreNp5pmCdOxXUzwsMDBkR21cyQ= +github.com/sashamelentyev/usestdlibvars v1.28.0/go.mod h1:9nl0jgOfHKWNFS43Ojw0i7aRoS4j6EBye3YBhmAIRF8= +github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= +github.com/securego/gosec/v2 v2.22.1 h1:IcBt3TpI5Y9VN1YlwjSpM2cHu0i3Iw52QM+PQeg7jN8= +github.com/securego/gosec/v2 v2.22.1/go.mod h1:4bb95X4Jz7VSEPdVjC0hD7C/yR6kdeUBvCPOy9gDQ0g= +github.com/segmentio/asm v1.2.0 h1:9BQrFxC+YOHJlTlHGkTrFWf59nbL3XnCoFLTwDCI7ys= +github.com/segmentio/asm v1.2.0/go.mod h1:BqMnlJP91P8d+4ibuonYZw9mfnzI9HfxselHZr5aAcs= +github.com/segmentio/encoding v0.5.3 h1:OjMgICtcSFuNvQCdwqMCv9Tg7lEOXGwm1J5RPQccx6w= +github.com/segmentio/encoding v0.5.3/go.mod h1:HS1ZKa3kSN32ZHVZ7ZLPLXWvOVIiZtyJnO1gPH1sKt0= +github.com/segmentio/ksuid v1.0.4 h1:sBo2BdShXjmcugAMwjugoGUdUV0pcxY5mW4xKRn3v4c= +github.com/segmentio/ksuid v1.0.4/go.mod h1:/XUiZBD3kVx5SmUOl55voK5yeAbBNNIed+2O73XgrPE= +github.com/sergi/go-diff v1.2.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= +github.com/sergi/go-diff v1.3.1/go.mod h1:aMJSSKb2lpPvRNec0+w3fl7LP9IOFzdc9Pa4NFbPK1I= +github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 h1:n661drycOFuPLCN3Uc8sB6B/s6Z4t2xvBgU1htSHuq8= +github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3/go.mod h1:A0bzQcvG0E7Rwjx0REVgAGH58e96+X0MeOfepqsbeW4= +github.com/shurcooL/go v0.0.0-20180423040247-9e1955d9fb6e/go.mod h1:TDJrrUr11Vxrven61rcy3hJMUqaf/CLWYhHNPmT14Lk= +github.com/shurcooL/go-goon v0.0.0-20170922171312-37c2f522c041/go.mod h1:N5mDOmsrJOB+vfqUK+7DmDyjhSLIIBnXo9lvZJj3MWQ= +github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= +github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= +github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= +github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88= +github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= +github.com/sirupsen/logrus v1.9.0/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= +github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= +github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= +github.com/sivchari/containedctx v1.0.3 h1:x+etemjbsh2fB5ewm5FeLNi5bUjK0V8n0RB+Wwfd0XE= +github.com/sivchari/containedctx v1.0.3/go.mod h1:c1RDvCbnJLtH4lLcYD/GqwiBSSf4F5Qk0xld2rBqzJ4= +github.com/sivchari/tenv v1.12.1 h1:+E0QzjktdnExv/wwsnnyk4oqZBUfuh89YMQT1cyuvSY= +github.com/sivchari/tenv v1.12.1/go.mod h1:1LjSOUCc25snIr5n3DtGGrENhX3LuWefcplwVGC24mw= +github.com/skeema/knownhosts v1.3.1 h1:X2osQ+RAjK76shCbvhHHHVl3ZlgDm8apHEHFqRjnBY8= +github.com/skeema/knownhosts v1.3.1/go.mod h1:r7KTdC8l4uxWRyK2TpQZ/1o5HaSzh06ePQNxPwTcfiY= +github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= +github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= +github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM= +github.com/sonatard/noctx v0.1.0 h1:JjqOc2WN16ISWAjAk8M5ej0RfExEXtkEyExl2hLW+OM= +github.com/sonatard/noctx v0.1.0/go.mod h1:0RvBxqY8D4j9cTTTWE8ylt2vqj2EPI8fHmrxHdsaZ2c= +github.com/sony/gobreaker v0.4.1/go.mod h1:ZKptC7FHNvhBz7dN2LGjPVBz2sZJmc0/PkyDJOjmxWY= +github.com/sourcegraph/annotate v0.0.0-20160123013949-f4cad6c6324d h1:yKm7XZV6j9Ev6lojP2XaIshpT4ymkqhMeSghO5Ps00E= +github.com/sourcegraph/annotate v0.0.0-20160123013949-f4cad6c6324d/go.mod h1:UdhH50NIW0fCiwBSr0co2m7BnFLdv4fQTgdqdJTHFeE= +github.com/sourcegraph/conc v0.3.1-0.20240121214520-5f936abd7ae8 h1:+jumHNA0Wrelhe64i8F6HNlS8pkoyMv5sreGx2Ry5Rw= +github.com/sourcegraph/conc v0.3.1-0.20240121214520-5f936abd7ae8/go.mod h1:3n1Cwaq1E1/1lhQhtRK2ts/ZwZEhjcQeJQ1RuC6Q/8U= +github.com/sourcegraph/go-diff v0.7.0 h1:9uLlrd5T46OXs5qpp8L/MTltk0zikUGi0sNNyCpA8G0= +github.com/sourcegraph/go-diff v0.7.0/go.mod h1:iBszgVvyxdc8SFZ7gm69go2KDdt3ag071iBaWPF6cjs= +github.com/sourcegraph/syntaxhighlight v0.0.0-20170531221838-bd320f5d308e h1:qpG93cPwA5f7s/ZPBJnGOYQNK/vKsaDaseuKT5Asee8= +github.com/sourcegraph/syntaxhighlight v0.0.0-20170531221838-bd320f5d308e/go.mod h1:HuIsMU8RRBOtsCgI77wP899iHVBQpCmg4ErYMZB+2IA= +github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= +github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ= +github.com/spf13/afero v1.9.2/go.mod h1:iUV7ddyEEZPO5gA3zD4fJt6iStLlL+Lg4m2cihcDf8Y= +github.com/spf13/afero v1.15.0 h1:b/YBCLWAJdFWJTN9cLhiXXcD7mzKn9Dm86dNnfyQw1I= +github.com/spf13/afero v1.15.0/go.mod h1:NC2ByUVxtQs4b3sIUphxK0NioZnmxgyCrfzeuq8lxMg= +github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= +github.com/spf13/cast v1.5.0/go.mod h1:SpXXQ5YoyJw6s3/6cMTQuxvgRl3PCJiyaX9p6b155UU= +github.com/spf13/cast v1.10.0 h1:h2x0u2shc1QuLHfxi+cTJvs30+ZAHOGRic8uyGTDWxY= +github.com/spf13/cast v1.10.0/go.mod h1:jNfB8QC9IA6ZuY2ZjDp0KtFO2LZZlg4S/7bzP6qqeHo= +github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ= +github.com/spf13/cobra v1.0.0/go.mod h1:/6GTrnGXV9HjY+aR4k0oJ5tcvakLuG6EuKReYlHNrgE= +github.com/spf13/cobra v1.6.1/go.mod h1:IOw/AERYS7UzyrGinqmz6HLUo219MORXGxhbaJUqzrY= +github.com/spf13/cobra v1.10.1 h1:lJeBwCfmrnXthfAupyUTzJ/J4Nc1RsHC/mSRU2dll/s= +github.com/spf13/cobra v1.10.1/go.mod h1:7SmJGaTHFVBY0jW4NXGluQoLvhqFQM+6XSKD+P4XaB0= +github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo= +github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo= +github.com/spf13/pflag v1.0.1/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= +github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= +github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/spf13/pflag v1.0.9/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/spf13/pflag v1.0.10 h1:4EBh2KAYBwaONj6b2Ye1GiHfwjqyROoF4RwYO+vPwFk= +github.com/spf13/pflag v1.0.10/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/spf13/viper v1.4.0/go.mod h1:PTJ7Z/lr49W6bUbkmS1V3by4uWynFiR9p7+dSq/yZzE= +github.com/spf13/viper v1.14.0/go.mod h1:WT//axPky3FdvXHzGw33dNdXXXfFQqmEalje+egj8As= +github.com/spf13/viper v1.21.0 h1:x5S+0EU27Lbphp4UKm1C+1oQO+rKx36vfCoaVebLFSU= +github.com/spf13/viper v1.21.0/go.mod h1:P0lhsswPGWD/1lZJ9ny3fYnVqxiegrlNrEmgLjbTCAY= +github.com/ssgreg/nlreturn/v2 v2.2.1 h1:X4XDI7jstt3ySqGU86YGAURbxw3oTDPK9sPEi6YEwQ0= +github.com/ssgreg/nlreturn/v2 v2.2.1/go.mod h1:E/iiPB78hV7Szg2YfRgyIrk1AD6JVMTRkkxBiELzh2I= +github.com/stbenjam/no-sprintf-host-port v0.2.0 h1:i8pxvGrt1+4G0czLr/WnmyH7zbZ8Bg8etvARQ1rpyl4= +github.com/stbenjam/no-sprintf-host-port v0.2.0/go.mod h1:eL0bQ9PasS0hsyTyfTjjG+E80QIyPnBVQbYZyv20Jfk= +github.com/stoewer/go-strcase v1.3.1 h1:iS0MdW+kVTxgMoE1LAZyMiYJFKlOzLooE4MxjirtkAs= +github.com/stoewer/go-strcase v1.3.1/go.mod h1:fAH5hQ5pehh+j3nZfvwdk2RgEgQjAoM8wodgtPmh1xo= +github.com/streadway/amqp v0.0.0-20190404075320-75d898a42a94/go.mod h1:AZpEONHx3DKn8O/DFsRAY58/XVQiIPMTMB1SddzLXVw= +github.com/streadway/amqp v0.0.0-20190827072141-edfb9018d271/go.mod h1:AZpEONHx3DKn8O/DFsRAY58/XVQiIPMTMB1SddzLXVw= +github.com/streadway/handy v0.0.0-20190108123426-d5acb3125c2a/go.mod h1:qNTQ5P5JnDBl6z3cMAg/SywNDC5ABu5ApDIw6lUbRmI= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= +github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= +github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= +github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.2/go.mod h1:R6va5+xMeoiuVRoj+gSkQ7d3FALtqAAGI1FQKckRals= +github.com/stretchr/testify v1.7.5/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= +github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= +github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= +github.com/subosito/gotenv v1.4.1/go.mod h1:ayKnFf/c6rvx/2iiLrJUk1e6plDbT3edrFNGqEflhK0= +github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8= +github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU= +github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7 h1:epCh84lMvA70Z7CTTCmYQn2CKbY8j86K7/FAIr141uY= +github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7/go.mod h1:q4W45IWZaF22tdD+VEXcAWRA037jwmWEB5VWYORlTpc= +github.com/tbruyelle/mdgofmt v0.1.3 h1:ZQo2nbYhK7CG0kApQmgUeMBFugIgIO9tHvyChaMzf30= +github.com/tbruyelle/mdgofmt v0.1.3/go.mod h1:D3fyKvx4oZq99YeQm5j/gnGmc9w4HogvQMujPVzW+zQ= +github.com/tdakkota/asciicheck v0.4.0 h1:VZ13Itw4k1i7d+dpDSNS8Op645XgGHpkCEh/WHicgWw= +github.com/tdakkota/asciicheck v0.4.0/go.mod h1:0k7M3rCfRXb0Z6bwgvkEIMleKH3kXNz9UqJ9Xuqopr8= +github.com/tendermint/go-amino v0.16.0 h1:GyhmgQKvqF82e2oZeuMSp9JTN0N09emoSZlb2lyGa2E= +github.com/tendermint/go-amino v0.16.0/go.mod h1:TQU0M1i/ImAo+tYpZi73AU3V/dKeCoMC9Sphe2ZwGME= +github.com/tenntenn/modver v1.0.1 h1:2klLppGhDgzJrScMpkj9Ujy3rXPUspSjAcev9tSEBgA= +github.com/tenntenn/modver v1.0.1/go.mod h1:bePIyQPb7UeioSRkw3Q0XeMhYZSMx9B8ePqg6SAMGH0= +github.com/tenntenn/text/transform v0.0.0-20200319021203-7eef512accb3 h1:f+jULpRQGxTSkNYKJ51yaw6ChIqO+Je8UqsTKN/cDag= +github.com/tenntenn/text/transform v0.0.0-20200319021203-7eef512accb3/go.mod h1:ON8b8w4BN/kE1EOhwT0o+d62W65a6aPw1nouo9LMgyY= +github.com/tetafro/godot v1.4.20 h1:z/p8Ek55UdNvzt4TFn2zx2KscpW4rWqcnUrdmvWJj7E= +github.com/tetafro/godot v1.4.20/go.mod h1:2oVxTBSftRTh4+MVfUaUXR6bn2GDXCaMcOG4Dk3rfio= +github.com/tetratelabs/wazero v1.9.0 h1:IcZ56OuxrtaEz8UYNRHBrUa9bYeX9oVY93KspZZBf/I= +github.com/tetratelabs/wazero v1.9.0/go.mod h1:TSbcXCfFP0L2FGkRPxHphadXPjo1T6W+CseNNY7EkjM= +github.com/tidwall/btree v1.7.0 h1:L1fkJH/AuEh5zBnnBbmTwQ5Lt+bRJ5A8EWecslvo9iI= +github.com/tidwall/btree v1.7.0/go.mod h1:twD9XRA5jj9VUQGELzDO4HPQTNJsoWWfYEL+EUQ2cKY= +github.com/timakin/bodyclose v0.0.0-20241017074812-ed6a65f985e3 h1:y4mJRFlM6fUyPhoXuFg/Yu02fg/nIPFMOY8tOqppoFg= +github.com/timakin/bodyclose v0.0.0-20241017074812-ed6a65f985e3/go.mod h1:mkjARE7Yr8qU23YcGMSALbIxTQ9r9QBVahQOBRfU460= +github.com/timonwong/loggercheck v0.10.1 h1:uVZYClxQFpw55eh+PIoqM7uAOHMrhVcDoWDery9R8Lg= +github.com/timonwong/loggercheck v0.10.1/go.mod h1:HEAWU8djynujaAVX7QI65Myb8qgfcZ1uKbdpg3ZzKl8= +github.com/tmc/grpc-websocket-proxy v0.0.0-20170815181823-89b8d40f7ca8/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= +github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= +github.com/tomarrell/wrapcheck/v2 v2.10.0 h1:SzRCryzy4IrAH7bVGG4cK40tNUhmVmMDuJujy4XwYDg= +github.com/tomarrell/wrapcheck/v2 v2.10.0/go.mod h1:g9vNIyhb5/9TQgumxQyOEqDHsmGYcGsVMOx/xGkqdMo= +github.com/tommy-muehle/go-mnd/v2 v2.5.1 h1:NowYhSdyE/1zwK9QCLeRb6USWdoif80Ie+v+yU8u1Zw= +github.com/tommy-muehle/go-mnd/v2 v2.5.1/go.mod h1:WsUAkMJMYww6l/ufffCD3m+P7LEvr8TnZn9lwVDlgzw= +github.com/tv42/httpunix v0.0.0-20150427012821-b75d8614f926/go.mod h1:9ESjWnEqriFuLhtthL60Sar/7RFoluCcXsuvEwTV5KM= +github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI= +github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08= +github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc= +github.com/ugorji/go v1.1.7 h1:/68gy2h+1mWMrwZFeD1kQialdSzAb432dtpeJ42ovdo= +github.com/ugorji/go v1.1.7/go.mod h1:kZn38zHttfInRq0xu/PH0az30d+z6vm202qpg1oXVMw= +github.com/ugorji/go/codec v1.1.7 h1:2SvQaVZ1ouYrrKKwoSk2pzd4A9evlKJb9oTL+OaLUSs= +github.com/ugorji/go/codec v1.1.7/go.mod h1:Ax+UKWsSmolVDwsd+7N3ZtXu+yMGCf907BLYF3GoBXY= +github.com/ultraware/funlen v0.2.0 h1:gCHmCn+d2/1SemTdYMiKLAHFYxTYz7z9VIDRaTGyLkI= +github.com/ultraware/funlen v0.2.0/go.mod h1:ZE0q4TsJ8T1SQcjmkhN/w+MceuatI6pBFSxxyteHIJA= +github.com/ultraware/whitespace v0.2.0 h1:TYowo2m9Nfj1baEQBjuHzvMRbp19i+RCcRYrSWoFa+g= +github.com/ultraware/whitespace v0.2.0/go.mod h1:XcP1RLD81eV4BW8UhQlpaR+SDc2givTvyI8a586WjW8= +github.com/urfave/cli v1.20.0/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA= +github.com/urfave/cli v1.22.1/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= +github.com/uudashr/gocognit v1.2.0 h1:3BU9aMr1xbhPlvJLSydKwdLN3tEUUrzPSSM8S4hDYRA= +github.com/uudashr/gocognit v1.2.0/go.mod h1:k/DdKPI6XBZO1q7HgoV2juESI2/Ofj9AcHPZhBBdrTU= +github.com/uudashr/iface v1.3.1 h1:bA51vmVx1UIhiIsQFSNq6GZ6VPTk3WNMZgRiCe9R29U= +github.com/uudashr/iface v1.3.1/go.mod h1:4QvspiRd3JLPAEXBQ9AiZpLbJlrWWgRChOKDJEuQTdg= +github.com/vbatts/tar-split v0.12.1 h1:CqKoORW7BUWBe7UL/iqTVvkTBOF8UvOMKOIZykxnnbo= +github.com/vbatts/tar-split v0.12.1/go.mod h1:eF6B6i6ftWQcDqEn3/iGFRFRo8cBIMSJVOpnNdfTMFA= +github.com/vektra/mockery/v2 v2.53.3 h1:yBU8XrzntcZdcNRRv+At0anXgSaFtgkyVUNm3f4an3U= +github.com/vektra/mockery/v2 v2.53.3/go.mod h1:hIFFb3CvzPdDJJiU7J4zLRblUMv7OuezWsHPmswriwo= +github.com/xanzy/ssh-agent v0.3.3 h1:+/15pJfg/RsTxqYcX6fHqOXZwwMP+2VyYWJeWM2qQFM= +github.com/xanzy/ssh-agent v0.3.3/go.mod h1:6dzNDKs0J9rVPHPhaGCukekBHKqfl+L3KghI1Bc68Uw= +github.com/xen0n/gosmopolitan v1.2.2 h1:/p2KTnMzwRexIW8GlKawsTWOxn7UHA+jCMF/V8HHtvU= +github.com/xen0n/gosmopolitan v1.2.2/go.mod h1:7XX7Mj61uLYrj0qmeN0zi7XDon9JRAEhYQqAPLVNTeg= +github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU= +github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e h1:JVG44RsyaB9T2KIHavMF/ppJZNG9ZpyihvCd0w101no= +github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e/go.mod h1:RbqR21r5mrJuqunuUZ/Dhy/avygyECGrLceyNeo4LiM= +github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q= +github.com/yagipy/maintidx v1.0.0 h1:h5NvIsCz+nRDapQ0exNv4aJ0yXSI0420omVANTv3GJM= +github.com/yagipy/maintidx v1.0.0/go.mod h1:0qNf/I/CCZXSMhsRsrEPDZ+DkekpKLXAJfsTACwgXLk= +github.com/yeya24/promlinter v0.3.0 h1:JVDbMp08lVCP7Y6NP3qHroGAO6z2yGKQtS5JsjqtoFs= +github.com/yeya24/promlinter v0.3.0/go.mod h1:cDfJQQYv9uYciW60QT0eeHlFodotkYZlL+YcPQN+mW4= +github.com/ykadowak/zerologlint v0.1.5 h1:Gy/fMz1dFQN9JZTPjv1hxEk+sRWm05row04Yoolgdiw= +github.com/ykadowak/zerologlint v0.1.5/go.mod h1:KaUskqF3e/v59oPmdq1U1DnKcuHokl2/K1U4pmIELKg= +github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +github.com/yuin/goldmark v1.5.2 h1:ALmeCk/px5FSm1MAcFBAsVKZjDuMVj8Tm7FFIlMJnqU= +github.com/yuin/goldmark v1.5.2/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +github.com/yuin/goldmark-emoji v1.0.1 h1:ctuWEyzGBwiucEqxzwe0SOYDXPAucOrE9NQC18Wa1os= +github.com/yuin/goldmark-emoji v1.0.1/go.mod h1:2w1E6FEWLcDQkoTE+7HU6QF1F6SLlNGjRIBbIZQFqkQ= +github.com/zondax/golem v0.27.0 h1:IbBjGIXF3SoGOZHsILJvIM/F/ylwJzMcHAcggiqniPw= +github.com/zondax/golem v0.27.0/go.mod h1:AmorCgJPt00L8xN1VrMBe13PSifoZksnQ1Ge906bu4A= +github.com/zondax/hid v0.9.2 h1:WCJFnEDMiqGF64nlZz28E9qLVZ0KSJ7xpc5DLEyma2U= +github.com/zondax/hid v0.9.2/go.mod h1:l5wttcP0jwtdLjqjMMWFVEE7d1zO0jvSPA9OPZxWpEM= +github.com/zondax/ledger-go v1.0.1 h1:Ks/2tz/dOF+dbRynfZ0dEhcdL1lqw43Sa0zMXHpQ3aQ= +github.com/zondax/ledger-go v1.0.1/go.mod h1:j7IgMY39f30apthJYMd1YsHZRqdyu4KbVmUp0nU78X0= +gitlab.com/bosi/decorder v0.4.2 h1:qbQaV3zgwnBZ4zPMhGLW4KZe7A7NwxEhJx39R3shffo= +gitlab.com/bosi/decorder v0.4.2/go.mod h1:muuhHoaJkA9QLcYHq4Mj8FJUwDZ+EirSHRiaTcTf6T8= +go-simpler.org/assert v0.9.0 h1:PfpmcSvL7yAnWyChSjOz6Sp6m9j5lyK8Ok9pEL31YkQ= +go-simpler.org/assert v0.9.0/go.mod h1:74Eqh5eI6vCK6Y5l3PI8ZYFXG4Sa+tkr70OIPJAUr28= +go-simpler.org/musttag v0.13.0 h1:Q/YAW0AHvaoaIbsPj3bvEI5/QFP7w696IMUpnKXQfCE= +go-simpler.org/musttag v0.13.0/go.mod h1:FTzIGeK6OkKlUDVpj0iQUXZLUO1Js9+mvykDQy9C5yM= +go-simpler.org/sloglint v0.9.0 h1:/40NQtjRx9txvsB/RN022KsUJU+zaaSb/9q9BSefSrE= +go-simpler.org/sloglint v0.9.0/go.mod h1:G/OrAF6uxj48sHahCzrbarVMptL2kjWTaUeC8+fOGww= +go.etcd.io/bbolt v1.3.2/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= +go.etcd.io/bbolt v1.3.3/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= +go.etcd.io/bbolt v1.4.0 h1:TU77id3TnN/zKr7CO/uk+fBCwF2jGcMuw2B/FMAzYIk= +go.etcd.io/bbolt v1.4.0/go.mod h1:AsD+OCi/qPN1giOX1aiLAha3o1U8rAz65bvN4j0sRuk= +go.etcd.io/etcd v0.0.0-20191023171146-3cf2f69b5738/go.mod h1:dnLIgRNXwCJa5e+c6mIZCrds/GIG4ncV9HhK5PX7jPg= +go.etcd.io/etcd/api/v3 v3.5.5/go.mod h1:KFtNaxGDw4Yx/BA4iPPwevUTAuqcsPxzyX8PHydchN8= +go.etcd.io/etcd/client/pkg/v3 v3.5.5/go.mod h1:ggrwbk069qxpKPq8/FKkQ3Xq9y39kbFR4LnKszpRXeQ= +go.etcd.io/etcd/client/v2 v2.305.5/go.mod h1:zQjKllfqfBVyVStbt4FaosoX2iYd8fV/GRy/PbowgP4= +go.etcd.io/etcd/client/v3 v3.5.5/go.mod h1:aApjR4WGlSumpnJ2kloS75h6aHUmAyaPLjHMxpc7E7c= +go.lsp.dev/jsonrpc2 v0.10.0 h1:Pr/YcXJoEOTMc/b6OTmcR1DPJ3mSWl/SWiU1Cct6VmI= +go.lsp.dev/jsonrpc2 v0.10.0/go.mod h1:fmEzIdXPi/rf6d4uFcayi8HpFP1nBF99ERP1htC72Ac= +go.lsp.dev/pkg v0.0.0-20210717090340-384b27a52fb2 h1:hCzQgh6UcwbKgNSRurYWSqh8MufqRRPODRBblutn4TE= +go.lsp.dev/pkg v0.0.0-20210717090340-384b27a52fb2/go.mod h1:gtSHRuYfbCT0qnbLnovpie/WEmqyJ7T4n6VXiFMBtcw= +go.lsp.dev/protocol v0.12.0 h1:tNprUI9klQW5FAFVM4Sa+AbPFuVQByWhP1ttNUAjIWg= +go.lsp.dev/protocol v0.12.0/go.mod h1:Qb11/HgZQ72qQbeyPfJbu3hZBH23s1sr4st8czGeDMQ= +go.lsp.dev/uri v0.3.0 h1:KcZJmh6nFIBeJzTugn5JTU6OOyG0lDOo3R9KwTxTYbo= +go.lsp.dev/uri v0.3.0/go.mod h1:P5sbO1IQR+qySTWOCnhnK7phBx+W3zbLqSMDJNTw88I= +go.mongodb.org/mongo-driver v1.14.0 h1:P98w8egYRjYe3XDjxhYJagTokP/H6HzlsnojRgZRd80= +go.mongodb.org/mongo-driver v1.14.0/go.mod h1:Vzb0Mk/pa7e6cWw85R4F/endUC3u0U9jGcNU603k65c= +go.opencensus.io v0.20.1/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk= +go.opencensus.io v0.20.2/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk= +go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= +go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= +go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= +go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= +go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= +go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= +go.opentelemetry.io/auto/sdk v1.2.1 h1:jXsnJ4Lmnqd11kwkBV2LgLoFMZKizbCi5fNZ/ipaZ64= +go.opentelemetry.io/auto/sdk v1.2.1/go.mod h1:KRTj+aOaElaLi+wW1kO/DZRXwkF4C5xPbEe3ZiIhN7Y= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.62.0 h1:Hf9xI/XLML9ElpiHVDNwvqI0hIFlzV8dgIr35kV1kRU= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.62.0/go.mod h1:NfchwuyNoMcZ5MLHwPrODwUF1HWCXWrL31s8gSAdIKY= +go.opentelemetry.io/otel v1.39.0 h1:8yPrr/S0ND9QEfTfdP9V+SiwT4E0G7Y5MO7p85nis48= +go.opentelemetry.io/otel v1.39.0/go.mod h1:kLlFTywNWrFyEdH0oj2xK0bFYZtHRYUdv1NklR/tgc8= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.37.0 h1:Ahq7pZmv87yiyn3jeFz/LekZmPLLdKejuO3NcK9MssM= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.37.0/go.mod h1:MJTqhM0im3mRLw1i8uGHnCvUEeS7VwRyxlLC78PA18M= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.33.0 h1:wpMfgF8E1rkrT1Z6meFh1NDtownE9Ii3n3X2GJYjsaU= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.33.0/go.mod h1:wAy0T/dUbs468uOlkT31xjvqQgEVXv58BRFWEgn5v/0= +go.opentelemetry.io/otel/metric v1.39.0 h1:d1UzonvEZriVfpNKEVmHXbdf909uGTOQjA0HF0Ls5Q0= +go.opentelemetry.io/otel/metric v1.39.0/go.mod h1:jrZSWL33sD7bBxg1xjrqyDjnuzTUB0x1nBERXd7Ftcs= +go.opentelemetry.io/otel/sdk v1.39.0 h1:nMLYcjVsvdui1B/4FRkwjzoRVsMK8uL/cj0OyhKzt18= +go.opentelemetry.io/otel/sdk v1.39.0/go.mod h1:vDojkC4/jsTJsE+kh+LXYQlbL8CgrEcwmt1ENZszdJE= +go.opentelemetry.io/otel/sdk/metric v1.39.0 h1:cXMVVFVgsIf2YL6QkRF4Urbr/aMInf+2WKg+sEJTtB8= +go.opentelemetry.io/otel/sdk/metric v1.39.0/go.mod h1:xq9HEVH7qeX69/JnwEfp6fVq5wosJsY1mt4lLfYdVew= +go.opentelemetry.io/otel/trace v1.39.0 h1:2d2vfpEDmCJ5zVYz7ijaJdOF59xLomrvj7bjt6/qCJI= +go.opentelemetry.io/otel/trace v1.39.0/go.mod h1:88w4/PnZSazkGzz/w84VHpQafiU4EtqqlVdxWy+rNOA= +go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= +go.opentelemetry.io/proto/otlp v1.7.0 h1:jX1VolD6nHuFzOYso2E73H85i92Mv8JQYk0K9vz09os= +go.opentelemetry.io/proto/otlp v1.7.0/go.mod h1:fSKjH6YJ7HDlwzltzyMj036AJ3ejJLCgCSHGj4efDDo= +go.starlark.net v0.0.0-20231101134539-556fd59b42f6 h1:+eC0F/k4aBLC4szgOcjd7bDTEnpxADJyWJE0yowgM3E= +go.starlark.net v0.0.0-20231101134539-556fd59b42f6/go.mod h1:LcLNIzVOMp4oV+uusnpk+VU+SzXaJakUuBjoCSWH5dM= +go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= +go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= +go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= +go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= +go.uber.org/atomic v1.9.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= +go.uber.org/automaxprocs v1.6.0 h1:O3y2/QNTOdbF+e/dpXNNW7Rx2hZ4sTIPyybbxyNqTUs= +go.uber.org/automaxprocs v1.6.0/go.mod h1:ifeIMSnPZuznNm6jmdzmU3/bfk01Fe2fotchwEFJ8r8= +go.uber.org/goleak v1.1.10/go.mod h1:8a7PlsEVH3e/a/GLqe5IIrQx6GzcnRmZEufDUTk4A7A= +go.uber.org/goleak v1.1.11/go.mod h1:cwTWslyiVhfpKIDGSZEM2HlOvcqm+tG4zioyIeLoqMQ= +go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= +go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= +go.uber.org/mock v0.6.0 h1:hyF9dfmbgIX5EfOdasqLsWD6xqpNZlXblLB/Dbnwv3Y= +go.uber.org/mock v0.6.0/go.mod h1:KiVJ4BqZJaMj4svdfmHM0AUx4NJYO8ZNpPnZn1Z+BBU= +go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= +go.uber.org/multierr v1.3.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4= +go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= +go.uber.org/multierr v1.8.0/go.mod h1:7EAYxJLBy9rStEaz58O2t4Uvip6FSURkq8/ppBp95ak= +go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0= +go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y= +go.uber.org/tools v0.0.0-20190618225709-2cfd321de3ee/go.mod h1:vJERXedbb3MVM5f9Ejo0C68/HhF8uaILCdgjnY+goOA= +go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= +go.uber.org/zap v1.13.0/go.mod h1:zwrFLgMcdUuIBviXEYEH1YKNaOBnKXsx2IPda5bBwHM= +go.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo= +go.uber.org/zap v1.18.1/go.mod h1:xg/QME4nWcxGxrpdeYfq7UvYrLh66cuVKdrbD1XF/NI= +go.uber.org/zap v1.21.0/go.mod h1:wjWOCqI0f2ZZrJF/UufIOkiC8ii6tm1iqIsLo76RfJw= +go.uber.org/zap v1.27.0 h1:aJMhYGrd5QSmlpLMr2MftRKl7t8J8PTZPA732ud/XR8= +go.uber.org/zap v1.27.0/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E= +go.yaml.in/yaml/v2 v2.4.2 h1:DzmwEr2rDGHl7lsFgAHxmNz/1NlQ7xLIrlN2h5d1eGI= +go.yaml.in/yaml/v2 v2.4.2/go.mod h1:081UH+NErpNdqlCXm3TtEran0rJZGxAYx9hb/ELlsPU= +go.yaml.in/yaml/v3 v3.0.4 h1:tfq32ie2Jv2UxXFdLJdh3jXuOzWiL1fo0bu/FbuKpbc= +go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg= +golang.org/x/arch v0.17.0 h1:4O3dfLzd+lQewptAHqjewQZQDyEdejz3VwgeYwkZneU= +golang.org/x/arch v0.17.0/go.mod h1:bdwinDaKcfZUGpH09BB7ZmOfhalA8lQdzl62l8gGWsk= +golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190923035154-9ee001bba392/go.mod h1:/lpIB1dKB+9EgE3H3cr1v9wB50oz8l4C4h62xy7jSTY= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20200728195943-123391ffb6de/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20201012173705-84dcc777aaee/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20201016220609-9e8e0b390897/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20220525230936-793ad666bf5e/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc= +golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4= +golang.org/x/crypto v0.46.0 h1:cKRW/pmt1pKAfetfu+RCEvjvZkA9RimPbh7bhFjGVBU= +golang.org/x/crypto v0.46.0/go.mod h1:Evb/oLKmMraqjZ2iQTwDwvCtJkczlDuTmdJXoZVzqU0= +golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= +golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= +golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY= +golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= +golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= +golang.org/x/exp v0.0.0-20200331195152-e8c3332aa8e5/go.mod h1:4M0jN8W1tt0AVLNr8HDosyJCDCDuyL9N9+3m7wDWgKw= +golang.org/x/exp v0.0.0-20250718183923-645b1fa84792 h1:R9PFI6EUdfVKgwKjZef7QIwGcBKu86OEFpJ9nUEP2l4= +golang.org/x/exp v0.0.0-20250718183923-645b1fa84792/go.mod h1:A+z0yzpGtvnG90cToK5n2tu8UJVP2XUATh+r+sfOOOc= +golang.org/x/exp/typeparams v0.0.0-20220428152302-39d4317da171/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk= +golang.org/x/exp/typeparams v0.0.0-20230203172020-98cc5a0785f9/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk= +golang.org/x/exp/typeparams v0.0.0-20250210185358-939b2ce775ac h1:TSSpLIG4v+p0rPv1pNOQtl1I8knsO4S9trOxNMOLVP4= +golang.org/x/exp/typeparams v0.0.0-20250210185358-939b2ce775ac/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk= +golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= +golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= +golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= +golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= +golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20210508222113-6edffad5e616/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= +golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= +golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= +golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= +golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3/go.mod h1:3p9vT2HGsQu2K1YbXdKPJLVgG5VJdoTa1poYQBtP1AY= +golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= +golang.org/x/mod v0.7.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.9.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.13.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= +golang.org/x/mod v0.30.0 h1:fDEXFVZ/fmCKProc/yAXXUijritrDzahmwwefnjoPFk= +golang.org/x/mod v0.30.0/go.mod h1:lAsf5O2EvJeSFMiBxXDki7sCgAxEUcZHXoXMKT4GJKc= +golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181201002055-351d144fa1fc/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190125091013-d26f9f9a57f3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190522155817-f3200d17e092/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= +golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= +golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190813141303-74dc4d7220e7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190923162816-aa69164e4478/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200421231249-e086a090c8fd/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200813134508-3edf25e44fcc/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc= +golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= +golang.org/x/net v0.0.0-20210410081132-afb366fc7cd1/go.mod h1:9tjilg8BloeKEkVJvy7fQ90B1CfIiPueXVOjqfkSzI8= +golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20211216030914-fe4d6282115f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220325170049-de3da57026de/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220412020605-290c469a71a5/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220425223048-2871e0cb64e4/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220607020251-c690dde0001d/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.0.0-20220617184016-355a448f1bc9/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.0.0-20220624214902-1bab6f366d9e/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.0.0-20220826154423-83b083e8dc8b/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk= +golang.org/x/net v0.0.0-20220909164309-bea034e7d591/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk= +golang.org/x/net v0.0.0-20221002022538-bcab6841153b/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk= +golang.org/x/net v0.0.0-20221014081412-f15817d10f9b/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk= +golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY= +golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc= +golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= +golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk= +golang.org/x/net v0.16.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE= +golang.org/x/net v0.48.0 h1:zyQRTTrjc33Lhh0fBgT/H3oZq9WuvRR5gPC70xpDiQU= +golang.org/x/net v0.48.0/go.mod h1:+ndRgGjkh8FGtu1w1FGbEC31if4VrNVMuKTgcAAnQRY= +golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= +golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210220000619-9bb904979d93/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210313182246-cd4f82c27b84/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210628180205-a41e5a781914/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210805134026-6f1e6394065a/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210819190943-2bc19b11175f/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20220223155221-ee480838109b/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= +golang.org/x/oauth2 v0.0.0-20220309155454-6242fa91716a/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= +golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= +golang.org/x/oauth2 v0.0.0-20220608161450-d0670ef3b1eb/go.mod h1:jaDAt6Dkxork7LmZnYtzbRWj0W47D86a3TGe0YHBvmE= +golang.org/x/oauth2 v0.0.0-20220622183110-fd043fe589d2/go.mod h1:jaDAt6Dkxork7LmZnYtzbRWj0W47D86a3TGe0YHBvmE= +golang.org/x/oauth2 v0.0.0-20220822191816-0ebed06d0094/go.mod h1:h4gKUeWbJ4rQPri7E0u6Gs4e9Ri2zaLxzw5DI5XGrYg= +golang.org/x/oauth2 v0.0.0-20220909003341-f21342109be1/go.mod h1:h4gKUeWbJ4rQPri7E0u6Gs4e9Ri2zaLxzw5DI5XGrYg= +golang.org/x/oauth2 v0.0.0-20221014153046-6fdb5e3db783/go.mod h1:h4gKUeWbJ4rQPri7E0u6Gs4e9Ri2zaLxzw5DI5XGrYg= +golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220601150217-0de741cfad7f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220929204114-8fcdb60fdcc0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= +golang.org/x/sync v0.4.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= +golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4= +golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= +golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190826190057-c7b8b68b1456/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190922100055-0a153f010e69/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190924154521-2837fb4f24fe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191008105621-543471e840be/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191220142924-d4481acd189f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200420163511-1957bb5e6d1f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200519105757-fe76b779f299/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200615200032-f1bc736245b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200814200057-3d37ad5750ed/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200916030750-2334cc1a136f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201009025420-dfb3f7c4e634/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201020230747-6e5568b54d1a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210225134936-a50acf3fe073/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210303074136-134d130e1a04/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210403161142-5e06dd20ab57/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423185535-09eb48e85fd7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210514084401-e8d321eab015/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210603081109-ebe580a85c40/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210603125802-9665404d3644/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210823070655-63515b42dcdf/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210908233432-aa78b53d3365/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211025201205-69cdffdb9359/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211105183446-c75c47738b0c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211117180635-dee7805ff2e1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211124211545-fe61309f8881/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211210111614-af8b64212486/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220128215802-99c3d69c2c27/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220204135822-1c1b9b1eba6a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220209214540-3681064d5158/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220227234510-4e6760a101f9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220315194320-039c03cc5b86/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220328115105-d36c6a25d886/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220502124256-b6088ccd6cba/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220610221304-9f5ed59c137d/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220615213510-4f61da869c0c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220624220833-87e55d714810/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.39.0 h1:CvCKL8MeisomCi6qNZ+wbb0DN9E5AATixKsvNtMoMFk= +golang.org/x/sys v0.39.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= +golang.org/x/telemetry v0.0.0-20251111182119-bc8e575c7b54 h1:E2/AqCUMZGgd73TQkxUMcMla25GB9i/5HOdLr+uH7Vo= +golang.org/x/telemetry v0.0.0-20251111182119-bc8e575c7b54/go.mod h1:hKdjCMrbv9skySur+Nek8Hd0uJ0GuxJIoIX2payrIdQ= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.0.0-20220722155259-a9ba230a4035/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc= +golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= +golang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U= +golang.org/x/term v0.7.0/go.mod h1:P32HKFT3hSsZrRxla30E9HqToFYAQPCMs/zFMBUFqPY= +golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= +golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU= +golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U= +golang.org/x/term v0.38.0 h1:PQ5pkm/rLO6HnxFR7N2lJHOZX6Kez5Y1gDSJla6jo7Q= +golang.org/x/term v0.38.0/go.mod h1:bSEAKrOT1W+VSu9TSCMtoGEOUcKxOKgl3LE5QEF/xVg= +golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= +golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= +golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= +golang.org/x/text v0.32.0 h1:ZD01bjUt1FQ9WJ0ClOL5vxgxOI/sVCNgX1YtKwcY0mU= +golang.org/x/text v0.32.0/go.mod h1:o/rUWzghvpD5TXrTIBuJU77MTaN0ljMWE47kxGJQ7jY= +golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20220609170525-579cf78fd858/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.12.0 h1:ScB/8o8olJvc+CQPWrK3fPZNfh7qgwCrY0zJmoEQLSE= +golang.org/x/time v0.12.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg= +golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20180828015842-6cd1fcedba52/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20181030221726-6c7e314b6563/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= +golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190907020128-2ca718005c18/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191108193012-7d206e10da11/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200103221440-774c71fcf114/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200324003944-a576cf524670/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= +golang.org/x/tools v0.0.0-20200329025819-fd4102a86c65/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= +golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= +golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200724022722-7017fd6b1305/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200820010801-b793a1359eac/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= +golang.org/x/tools v0.0.0-20201023174141-c8cfbd0f21e6/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= +golang.org/x/tools v0.1.1-0.20210205202024-ef80cdb6ec6d/go.mod h1:9bzcO0MWcOuT0tm1iBGzDVPshzfwoVvREIui8C+MHqU= +golang.org/x/tools v0.1.1-0.20210302220138-2ac05c832e1a/go.mod h1:9bzcO0MWcOuT0tm1iBGzDVPshzfwoVvREIui8C+MHqU= +golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.10/go.mod h1:Uh6Zz+xoGYZom868N8YTex3t7RhtHDBrE8Gzo9bV56E= +golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= +golang.org/x/tools v0.3.0/go.mod h1:/rWhSS2+zyEVwoJf8YAX6L2f0ntZ7Kn/mGgAWcipA5k= +golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= +golang.org/x/tools v0.7.0/go.mod h1:4pg6aUX35JBAogB10C9AtvVL+qowtN4pT3CGSQex14s= +golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58= +golang.org/x/tools v0.14.0/go.mod h1:uYBEerGOWcJyEORxN+Ek8+TT266gXkNlHdJBwexUsBg= +golang.org/x/tools v0.39.0 h1:ik4ho21kwuQln40uelmciQPp9SipgNDdrafrYA4TmQQ= +golang.org/x/tools v0.39.0/go.mod h1:JnefbkDPyD8UU2kI5fuf8ZX4/yUeh9W877ZeBONxUqQ= +golang.org/x/tools/go/expect v0.1.0-deprecated h1:jY2C5HGYR5lqex3gEniOQL0r7Dq5+VGVgY1nudX5lXY= +golang.org/x/tools/go/expect v0.1.0-deprecated/go.mod h1:eihoPOH+FgIqa3FpoTwguz/bVUSGBlGQU67vpBeOrBY= +golang.org/x/tools/go/packages/packagestest v0.1.1-deprecated h1:1h2MnaIAIXISqTFKdENegdpAgUXz6NrPEsbIeWaBRvM= +golang.org/x/tools/go/packages/packagestest v0.1.1-deprecated/go.mod h1:RVAQXBGNv1ib0J382/DPCRS/BPnsGebyM1Gj5VSDpG8= +golang.org/x/vuln v1.1.4 h1:Ju8QsuyhX3Hk8ma3CesTbO8vfJD9EvUBgHvkxHBzj0I= +golang.org/x/vuln v1.1.4/go.mod h1:F+45wmU18ym/ca5PLTPLsSzr2KppzswxPP603ldA67s= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20220411194840-2f41105eb62f/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20220517211312-f3a8303e98df/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8= +golang.org/x/xerrors v0.0.0-20220609144429-65e65417b02f/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8= +golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8= +gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk= +gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E= +google.golang.org/api v0.3.1/go.mod h1:6wY9I6uQWHQ8EM57III9mq/AjF+i8G65rmVagqKMtkk= +google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= +google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= +google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM= +google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc= +google.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg= +google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE= +google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8= +google.golang.org/api v0.41.0/go.mod h1:RkxM5lITDfTzmyKFPt+wGrCJbVfniCr2ool8kTBzRTU= +google.golang.org/api v0.43.0/go.mod h1:nQsDGjRXMo4lvh5hP0TKqF244gqhGcr/YSIykhUk/94= +google.golang.org/api v0.47.0/go.mod h1:Wbvgpq1HddcWVtzsVLyfLp8lDg6AA241LmgIL59tHXo= +google.golang.org/api v0.48.0/go.mod h1:71Pr1vy+TAZRPkPs/xlCf5SsU8WjuAWv1Pfjbtukyy4= +google.golang.org/api v0.50.0/go.mod h1:4bNT5pAuq5ji4SRZm+5QIkjny9JAyVD/3gaSihNefaw= +google.golang.org/api v0.51.0/go.mod h1:t4HdrdoNgyN5cbEfm7Lum0lcLDLiise1F8qDKX00sOU= +google.golang.org/api v0.54.0/go.mod h1:7C4bFFOvVDGXjfDTAsgGwDgAxRDeQ4X8NvUedIt6z3k= +google.golang.org/api v0.55.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE= +google.golang.org/api v0.56.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE= +google.golang.org/api v0.57.0/go.mod h1:dVPlbZyBo2/OjBpmvNdpn2GRm6rPy75jyU7bmhdrMgI= +google.golang.org/api v0.61.0/go.mod h1:xQRti5UdCmoCEqFxcz93fTl338AVqDgyaDRuOZ3hg9I= +google.golang.org/api v0.63.0/go.mod h1:gs4ij2ffTRXwuzzgJl/56BdwJaA194ijkfn++9tDuPo= +google.golang.org/api v0.67.0/go.mod h1:ShHKP8E60yPsKNw/w8w+VYaj9H6buA5UqDp8dhbQZ6g= +google.golang.org/api v0.70.0/go.mod h1:Bs4ZM2HGifEvXwd50TtW70ovgJffJYw2oRCOFU/SkfA= +google.golang.org/api v0.71.0/go.mod h1:4PyU6e6JogV1f9eA4voyrTY2batOLdgZ5qZ5HOCc4j8= +google.golang.org/api v0.74.0/go.mod h1:ZpfMZOVRMywNyvJFeqL9HRWBgAuRfSjJFpe9QtRRyDs= +google.golang.org/api v0.75.0/go.mod h1:pU9QmyHLnzlpar1Mjt4IbapUCy8J+6HD6GeELN69ljA= +google.golang.org/api v0.77.0/go.mod h1:pU9QmyHLnzlpar1Mjt4IbapUCy8J+6HD6GeELN69ljA= +google.golang.org/api v0.78.0/go.mod h1:1Sg78yoMLOhlQTeF+ARBoytAcH1NNyyl390YMy6rKmw= +google.golang.org/api v0.80.0/go.mod h1:xY3nI94gbvBrE0J6NHXhxOmW97HG7Khjkku6AFB3Hyg= +google.golang.org/api v0.84.0/go.mod h1:NTsGnUFJMYROtiquksZHBWtHfeMC7iYthki7Eq3pa8o= +google.golang.org/api v0.85.0/go.mod h1:AqZf8Ep9uZ2pyTvgL+x0D3Zt0eoT9b5E8fmzfu6FO2g= +google.golang.org/api v0.90.0/go.mod h1:+Sem1dnrKlrXMR/X0bPnMWyluQe4RsNoYfmNLhOIkzw= +google.golang.org/api v0.93.0/go.mod h1:+Sem1dnrKlrXMR/X0bPnMWyluQe4RsNoYfmNLhOIkzw= +google.golang.org/api v0.95.0/go.mod h1:eADj+UBuxkh5zlrSntJghuNeg8HwQ1w5lTKkuqaETEI= +google.golang.org/api v0.96.0/go.mod h1:w7wJQLTM+wvQpNf5JyEcBoxK0RH7EDrh/L4qfsuJ13s= +google.golang.org/api v0.97.0/go.mod h1:w7wJQLTM+wvQpNf5JyEcBoxK0RH7EDrh/L4qfsuJ13s= +google.golang.org/api v0.98.0/go.mod h1:w7wJQLTM+wvQpNf5JyEcBoxK0RH7EDrh/L4qfsuJ13s= +google.golang.org/api v0.100.0/go.mod h1:ZE3Z2+ZOr87Rx7dqFsdRQkRBk36kDtp/h+QpHbB7a70= +google.golang.org/api v0.102.0/go.mod h1:3VFl6/fzoA+qNuS1N1/VfXY4LjoXN/wzeIp7TweWwGo= +google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= +google.golang.org/appengine v1.2.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= +google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20180831171423-11092d34479b/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190530194941-fb225487d101/go.mod h1:z3L6/3dTEVtUr6QSP8miRzeRqwQOioJ9I66odjN4I7s= +google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= +google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA= +google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200423170343-7949de9c1215/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= +google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= +google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= +google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200904004341-0bd0a958aa1d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210108203827-ffc7fda8c3d7/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210126160654-44e461bb6506/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210222152913-aa3ee6e6a81c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210226172003-ab064af71705/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210303154014-9728d6b83eeb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210310155132-4ce2db91004e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210319143718-93e7006c17a6/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210329143202-679c6ae281ee/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A= +google.golang.org/genproto v0.0.0-20210402141018-6c239bbf2bb1/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A= +google.golang.org/genproto v0.0.0-20210513213006-bf773b8c8384/go.mod h1:P3QM42oQyzQSnHPnZ/vqoCdDmzH28fzWByN9asMeM8A= +google.golang.org/genproto v0.0.0-20210602131652-f16073e35f0c/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= +google.golang.org/genproto v0.0.0-20210604141403-392c879c8b08/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= +google.golang.org/genproto v0.0.0-20210608205507-b6d2f5bf0d7d/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= +google.golang.org/genproto v0.0.0-20210624195500-8bfb893ecb84/go.mod h1:SzzZ/N+nwJDaO1kznhnlzqS8ocJICar6hYhVyhi++24= +google.golang.org/genproto v0.0.0-20210713002101-d411969a0d9a/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k= +google.golang.org/genproto v0.0.0-20210716133855-ce7ef5c701ea/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k= +google.golang.org/genproto v0.0.0-20210728212813-7823e685a01f/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48= +google.golang.org/genproto v0.0.0-20210805201207-89edb61ffb67/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48= +google.golang.org/genproto v0.0.0-20210813162853-db860fec028c/go.mod h1:cFeNkxwySK631ADgubI+/XFU/xp8FD5KIVV4rj8UC5w= +google.golang.org/genproto v0.0.0-20210821163610-241b8fcbd6c8/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210828152312-66f60bf46e71/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210831024726-fe130286e0e2/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210903162649-d08c68adba83/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210909211513-a8c4777a87af/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210924002016-3dee208752a0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211118181313-81c1377c94b1/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211206160659-862468c7d6e0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211208223120-3a66f561d7aa/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211221195035-429b39de9b1c/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20220126215142-9970aeb2e350/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20220207164111-0872dc986b00/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20220218161850-94dd64e39d7c/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= +google.golang.org/genproto v0.0.0-20220222213610-43724f9ea8cf/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= +google.golang.org/genproto v0.0.0-20220304144024-325a89244dc8/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= +google.golang.org/genproto v0.0.0-20220310185008-1973136f34c6/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= +google.golang.org/genproto v0.0.0-20220314164441-57ef72a4c106/go.mod h1:hAL49I2IFola2sVEjAn7MEwsja0xp51I0tlGAf9hz4E= +google.golang.org/genproto v0.0.0-20220324131243-acbaeb5b85eb/go.mod h1:hAL49I2IFola2sVEjAn7MEwsja0xp51I0tlGAf9hz4E= +google.golang.org/genproto v0.0.0-20220407144326-9054f6ed7bac/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= +google.golang.org/genproto v0.0.0-20220413183235-5e96e2839df9/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= +google.golang.org/genproto v0.0.0-20220414192740-2d67ff6cf2b4/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= +google.golang.org/genproto v0.0.0-20220421151946-72621c1f0bd3/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= +google.golang.org/genproto v0.0.0-20220429170224-98d788798c3e/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= +google.golang.org/genproto v0.0.0-20220502173005-c8bf987b8c21/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4= +google.golang.org/genproto v0.0.0-20220505152158-f39f71e6c8f3/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4= +google.golang.org/genproto v0.0.0-20220518221133-4f43b3371335/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4= +google.golang.org/genproto v0.0.0-20220523171625-347a074981d8/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4= +google.golang.org/genproto v0.0.0-20220608133413-ed9918b62aac/go.mod h1:KEWEmljWE5zPzLBa/oHl6DaEt9LmfH6WtH1OHIvleBA= +google.golang.org/genproto v0.0.0-20220616135557-88e70c0c3a90/go.mod h1:KEWEmljWE5zPzLBa/oHl6DaEt9LmfH6WtH1OHIvleBA= +google.golang.org/genproto v0.0.0-20220617124728-180714bec0ad/go.mod h1:KEWEmljWE5zPzLBa/oHl6DaEt9LmfH6WtH1OHIvleBA= +google.golang.org/genproto v0.0.0-20220624142145-8cd45d7dbd1f/go.mod h1:KEWEmljWE5zPzLBa/oHl6DaEt9LmfH6WtH1OHIvleBA= +google.golang.org/genproto v0.0.0-20220628213854-d9e0b6570c03/go.mod h1:KEWEmljWE5zPzLBa/oHl6DaEt9LmfH6WtH1OHIvleBA= +google.golang.org/genproto v0.0.0-20220722212130-b98a9ff5e252/go.mod h1:GkXuJDJ6aQ7lnJcRF+SJVgFdQhypqgl3LB1C9vabdRE= +google.golang.org/genproto v0.0.0-20220801145646-83ce21fca29f/go.mod h1:iHe1svFLAZg9VWz891+QbRMwUv9O/1Ww+/mngYeThbc= +google.golang.org/genproto v0.0.0-20220815135757-37a418bb8959/go.mod h1:dbqgFATTzChvnt+ujMdZwITVAJHFtfyN1qUhDqEiIlk= +google.golang.org/genproto v0.0.0-20220817144833-d7fd3f11b9b1/go.mod h1:dbqgFATTzChvnt+ujMdZwITVAJHFtfyN1qUhDqEiIlk= +google.golang.org/genproto v0.0.0-20220822174746-9e6da59bd2fc/go.mod h1:dbqgFATTzChvnt+ujMdZwITVAJHFtfyN1qUhDqEiIlk= +google.golang.org/genproto v0.0.0-20220829144015-23454907ede3/go.mod h1:dbqgFATTzChvnt+ujMdZwITVAJHFtfyN1qUhDqEiIlk= +google.golang.org/genproto v0.0.0-20220829175752-36a9c930ecbf/go.mod h1:dbqgFATTzChvnt+ujMdZwITVAJHFtfyN1qUhDqEiIlk= +google.golang.org/genproto v0.0.0-20220913154956-18f8339a66a5/go.mod h1:0Nb8Qy+Sk5eDzHnzlStwW3itdNaWoZA5XeSG+R3JHSo= +google.golang.org/genproto v0.0.0-20220914142337-ca0e39ece12f/go.mod h1:0Nb8Qy+Sk5eDzHnzlStwW3itdNaWoZA5XeSG+R3JHSo= +google.golang.org/genproto v0.0.0-20220915135415-7fd63a7952de/go.mod h1:0Nb8Qy+Sk5eDzHnzlStwW3itdNaWoZA5XeSG+R3JHSo= +google.golang.org/genproto v0.0.0-20220916172020-2692e8806bfa/go.mod h1:0Nb8Qy+Sk5eDzHnzlStwW3itdNaWoZA5XeSG+R3JHSo= +google.golang.org/genproto v0.0.0-20220919141832-68c03719ef51/go.mod h1:0Nb8Qy+Sk5eDzHnzlStwW3itdNaWoZA5XeSG+R3JHSo= +google.golang.org/genproto v0.0.0-20220920201722-2b89144ce006/go.mod h1:ht8XFiar2npT/g4vkk7O0WYS1sHOHbdujxbEp7CJWbw= +google.golang.org/genproto v0.0.0-20220926165614-551eb538f295/go.mod h1:woMGP53BroOrRY3xTxlbr8Y3eB/nzAvvFM83q7kG2OI= +google.golang.org/genproto v0.0.0-20220926220553-6981cbe3cfce/go.mod h1:woMGP53BroOrRY3xTxlbr8Y3eB/nzAvvFM83q7kG2OI= +google.golang.org/genproto v0.0.0-20221010155953-15ba04fc1c0e/go.mod h1:3526vdqwhZAwq4wsRUaVG555sVgsNmIjRtO7t/JH29U= +google.golang.org/genproto v0.0.0-20221014173430-6e2ab493f96b/go.mod h1:1vXfmgAz9N9Jx0QA82PqRVauvCz1SGSz739p0f183jM= +google.golang.org/genproto v0.0.0-20221014213838-99cd37c6964a/go.mod h1:1vXfmgAz9N9Jx0QA82PqRVauvCz1SGSz739p0f183jM= +google.golang.org/genproto v0.0.0-20221024183307-1bc688fe9f3e/go.mod h1:9qHF0xnpdSfF6knlcsnpzUu5y+rpwgbvsyGAZPBMg4s= +google.golang.org/genproto v0.0.0-20250603155806-513f23925822 h1:rHWScKit0gvAPuOnu87KpaYtjK5zBMLcULh7gxkCXu4= +google.golang.org/genproto v0.0.0-20250603155806-513f23925822/go.mod h1:HubltRL7rMh0LfnQPkMH4NPDFEWp0jw3vixw7jEM53s= +google.golang.org/genproto/googleapis/api v0.0.0-20251202230838-ff82c1b0f217 h1:fCvbg86sFXwdrl5LgVcTEvNC+2txB5mgROGmRL5mrls= +google.golang.org/genproto/googleapis/api v0.0.0-20251202230838-ff82c1b0f217/go.mod h1:+rXWjjaukWZun3mLfjmVnQi18E1AsFbDN9QdJ5YXLto= +google.golang.org/genproto/googleapis/rpc v0.0.0-20251202230838-ff82c1b0f217 h1:gRkg/vSppuSQoDjxyiGfN4Upv/h/DQmIR10ZU8dh4Ww= +google.golang.org/genproto/googleapis/rpc v0.0.0-20251202230838-ff82c1b0f217/go.mod h1:7i2o+ce6H/6BluujYR+kqX3GKH+dChPTQU19wjRPiGk= +google.golang.org/grpc v1.17.0/go.mod h1:6QZJwpn2B+Zp71q/5VxRsJ6NXXVCE5NRUHRo+f3cWCs= +google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= +google.golang.org/grpc v1.20.0/go.mod h1:chYK+tFQF0nDUGJgXMSgLCQk3phJEuONr2DCgLDdAQM= +google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= +google.golang.org/grpc v1.21.0/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= +google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= +google.golang.org/grpc v1.22.1/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.23.1/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= +google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60= +google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= +google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.32.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0= +google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= +google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8= +google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.36.1/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.37.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= +google.golang.org/grpc v1.37.1/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= +google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= +google.golang.org/grpc v1.39.0/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE= +google.golang.org/grpc v1.39.1/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE= +google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= +google.golang.org/grpc v1.40.1/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= +google.golang.org/grpc v1.41.0/go.mod h1:U3l9uK9J0sini8mHphKoXyaqDA/8VyGnDee1zzIUK6k= +google.golang.org/grpc v1.44.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU= +google.golang.org/grpc v1.45.0/go.mod h1:lN7owxKUQEqMfSyQikvvk5tf/6zMPsrK+ONuO11+0rQ= +google.golang.org/grpc v1.46.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= +google.golang.org/grpc v1.46.2/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= +google.golang.org/grpc v1.47.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= +google.golang.org/grpc v1.48.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= +google.golang.org/grpc v1.49.0/go.mod h1:ZgQEeidpAuNRZ8iRrlBKXZQP1ghovWIVhdJRyCDK+GI= +google.golang.org/grpc v1.50.0/go.mod h1:ZgQEeidpAuNRZ8iRrlBKXZQP1ghovWIVhdJRyCDK+GI= +google.golang.org/grpc v1.50.1/go.mod h1:ZgQEeidpAuNRZ8iRrlBKXZQP1ghovWIVhdJRyCDK+GI= +google.golang.org/grpc v1.79.3 h1:sybAEdRIEtvcD68Gx7dmnwjZKlyfuc61Dyo9pGXXkKE= +google.golang.org/grpc v1.79.3/go.mod h1:KmT0Kjez+0dde/v2j9vzwoAScgEPx/Bw1CYChhHLrHQ= +google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= +google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= +google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= +google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= +google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= +google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= +google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= +google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= +google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= +google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +google.golang.org/protobuf v1.36.10 h1:AYd7cD/uASjIL6Q9LiTjz8JLcrh/88q5UObnmY3aOOE= +google.golang.org/protobuf v1.36.10/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco= +gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20200902074654-038fdea0a05b/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/cheggaaa/pb.v1 v1.0.25/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qStrOgw= +gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= +gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= +gopkg.in/gcfg.v1 v1.2.3/go.mod h1:yesOnuUOFQAhST5vPY4nbZsb/huCgGGXlipJsBn0b3o= +gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= +gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo= +gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ= +gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= +gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME= +gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI= +gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74= +gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gotest.tools/v3 v3.5.2 h1:7koQfIKdy+I8UTetycgUqXWSDwpgv193Ka+qRsmBY8Q= +gotest.tools/v3 v3.5.2/go.mod h1:LtdLGcnqToBH83WByAAi/wiwSFCArdFIUV/xxN4pcjA= +honnef.co/go/tools v0.0.0-20180728063816-88497007e858/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= +honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +honnef.co/go/tools v0.6.0 h1:TAODvD3knlq75WCp2nyGJtT4LeRV/o7NN9nYPeVJXf8= +honnef.co/go/tools v0.6.0/go.mod h1:3puzxxljPCe8RGJX7BIy1plGbxEOZni5mR2aXe3/uk4= +mvdan.cc/gofumpt v0.7.0 h1:bg91ttqXmi9y2xawvkuMXyvAA/1ZGJqYAEGjXuP0JXU= +mvdan.cc/gofumpt v0.7.0/go.mod h1:txVFJy/Sc/mvaycET54pV8SW8gWxTlUuGHVEcncmNUo= +mvdan.cc/unparam v0.0.0-20240528143540-8a5130ca722f h1:lMpcwN6GxNbWtbpI1+xzFLSW8XzX0u72NttUGVFjO3U= +mvdan.cc/unparam v0.0.0-20240528143540-8a5130ca722f/go.mod h1:RSLa7mKKCNeTTMHBw5Hsy2rfJmd6O2ivt9Dw9ZqCQpQ= +nhooyr.io/websocket v1.8.6 h1:s+C3xAMLwGmlI31Nyn/eAehUlZPwfYZu2JXM621Q5/k= +nhooyr.io/websocket v1.8.6/go.mod h1:B70DZP8IakI65RVQ51MsWP/8jndNma26DVA/nFSCgW0= +pgregory.net/rapid v1.2.0 h1:keKAYRcjm+e1F0oAuU5F5+YPAWcyxNNRK2wud503Gnk= +pgregory.net/rapid v1.2.0/go.mod h1:PY5XlDGj0+V1FCq0o192FdRhpKHGTRIWBgqjDBTrq04= +pluginrpc.com/pluginrpc v0.5.0 h1:tOQj2D35hOmvHyPu8e7ohW2/QvAnEtKscy2IJYWQ2yo= +pluginrpc.com/pluginrpc v0.5.0/go.mod h1:UNWZ941hcVAoOZUn8YZsMmOZBzbUjQa3XMns8RQLp9o= +rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= +rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= +rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= +sigs.k8s.io/yaml v1.1.0/go.mod h1:UJmg0vDUVViEyp3mgSv9WPwZCDxu4rQW1olrI1uml+o= +sigs.k8s.io/yaml v1.2.0/go.mod h1:yfXDCHCao9+ENCvLSE62v9VSji2MKu5jeNfTrofGhJc= +sigs.k8s.io/yaml v1.6.0 h1:G8fkbMSAFqgEFgh4b1wmtzDnioxFCUgTZhlbj5P9QYs= +sigs.k8s.io/yaml v1.6.0/go.mod h1:796bPqUfzR/0jLAl6XjHl3Ck7MiyVv8dbTdyT3/pMf4= +sourcegraph.com/sourcegraph/appdash v0.0.0-20190731080439-ebfcffb1b5c0/go.mod h1:hI742Nqp5OhwiqlzhgfbWU4mW4yO10fP+LoT9WOswdU= diff --git a/ignite/cmd/account.go b/ignite/cmd/account.go new file mode 100644 index 0000000..c412881 --- /dev/null +++ b/ignite/cmd/account.go @@ -0,0 +1,151 @@ +package ignitecmd + +import ( + "os" + + "github.com/spf13/cobra" + flag "github.com/spf13/pflag" + + "github.com/ignite/cli/v29/ignite/pkg/cliui/bubbleconfirm" + "github.com/ignite/cli/v29/ignite/pkg/cliui/entrywriter" + "github.com/ignite/cli/v29/ignite/pkg/cosmosaccount" +) + +const ( + flagAddressPrefix = "address-prefix" + flagCoinType = "coin-type" + flagPassphrase = "passphrase" + flagNonInteractive = "non-interactive" + flagKeyringBackend = "keyring-backend" + flagKeyringDir = "keyring-dir" +) + +func NewAccount() *cobra.Command { + c := &cobra.Command{ + Use: "account [command]", + Short: "Create, delete, and show Ignite accounts", + Long: `Commands for managing Ignite accounts. An Ignite account is a private/public +keypair stored in a keyring. Currently Ignite accounts are used when interacting +with Ignite Apps (namely ignite relayer, ignite network and ignite connect). + +Note: Ignite account commands are not for managing your chain's keys and accounts. Use +you chain's binary to manage accounts from "config.yml". For example, if your +blockchain is called "mychain", use "mychaind keys" to manage keys for the +chain. +`, + Aliases: []string{"a"}, + Args: cobra.ExactArgs(1), + } + + c.PersistentFlags().AddFlagSet(flagSetKeyringBackend()) + c.PersistentFlags().AddFlagSet(flagSetKeyringDir()) + + c.AddCommand( + NewAccountCreate(), + NewAccountDelete(), + NewAccountShow(), + NewAccountList(), + NewAccountImport(), + NewAccountExport(), + ) + + return c +} + +func printAccounts(cmd *cobra.Command, accounts ...cosmosaccount.Account) error { + var accEntries [][]string + for _, acc := range accounts { + addr, err := acc.Address(getAddressPrefix(cmd)) + if err != nil { + return err + } + + pubKey, err := acc.PubKey() + if err != nil { + return err + } + + accEntries = append(accEntries, []string{acc.Name, addr, pubKey}) + } + return entrywriter.MustWrite(os.Stdout, []string{"name", "address", "public key"}, accEntries...) +} + +func flagSetKeyringBackend() *flag.FlagSet { + fs := flag.NewFlagSet("", flag.ContinueOnError) + fs.String(flagKeyringBackend, string(cosmosaccount.KeyringTest), "keyring backend to store your account keys") + return fs +} + +func getKeyringBackend(cmd *cobra.Command) cosmosaccount.KeyringBackend { + backend, _ := cmd.Flags().GetString(flagKeyringBackend) + return cosmosaccount.KeyringBackend(backend) +} + +func flagSetKeyringDir() *flag.FlagSet { + fs := flag.NewFlagSet("", flag.ContinueOnError) + fs.String(flagKeyringDir, cosmosaccount.KeyringHome, "accounts keyring directory") + return fs +} + +func getKeyringDir(cmd *cobra.Command) string { + keyringDir, _ := cmd.Flags().GetString(flagKeyringDir) + return keyringDir +} + +func flagSetAccountPrefixes() *flag.FlagSet { + fs := flag.NewFlagSet("", flag.ContinueOnError) + fs.String(flagAddressPrefix, cosmosaccount.AccountPrefixCosmos, "account address prefix") + return fs +} + +func getAddressPrefix(cmd *cobra.Command) string { + prefix, _ := cmd.Flags().GetString(flagAddressPrefix) + return prefix +} + +func flagSetCoinType() *flag.FlagSet { + fs := flag.NewFlagSet("", flag.ContinueOnError) + fs.Uint32(flagCoinType, cosmosaccount.CoinTypeCosmos, "coin type to use for the account") + return fs +} + +func getCoinType(cmd *cobra.Command) uint32 { + coinType, _ := cmd.Flags().GetUint32(flagCoinType) + return coinType +} + +func flagSetAccountImport() *flag.FlagSet { + fs := flag.NewFlagSet("", flag.ContinueOnError) + fs.Bool(flagNonInteractive, false, "do not enter into interactive mode") + fs.String(flagPassphrase, "", "passphrase to decrypt the imported key (ignored when secret is a mnemonic)") + return fs +} + +func flagSetAccountExport() *flag.FlagSet { + fs := flag.NewFlagSet("", flag.ContinueOnError) + fs.Bool(flagNonInteractive, false, "do not enter into interactive mode") + fs.String(flagPassphrase, "", "passphrase to encrypt the exported key") + return fs +} + +func getIsNonInteractive(cmd *cobra.Command) bool { + is, _ := cmd.Flags().GetBool(flagNonInteractive) + return is +} + +func getPassphrase(cmd *cobra.Command) (string, error) { + pass, _ := cmd.Flags().GetString(flagPassphrase) + + if pass == "" && !getIsNonInteractive(cmd) { + if err := bubbleconfirm.Ask( + bubbleconfirm.NewQuestion("Passphrase", + &pass, + bubbleconfirm.HideAnswer(), + bubbleconfirm.GetConfirmation(), + )); err != nil { + return "", err + } + } + + return pass, nil +} diff --git a/ignite/cmd/account_create.go b/ignite/cmd/account_create.go new file mode 100644 index 0000000..a6c7e09 --- /dev/null +++ b/ignite/cmd/account_create.go @@ -0,0 +1,46 @@ +package ignitecmd + +import ( + "github.com/spf13/cobra" + + "github.com/ignite/cli/v29/ignite/pkg/cliui" + "github.com/ignite/cli/v29/ignite/pkg/cosmosaccount" + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +func NewAccountCreate() *cobra.Command { + c := &cobra.Command{ + Use: "create [name]", + Short: "Create a new account", + Args: cobra.ExactArgs(1), + RunE: accountCreateHandler, + } + + c.Flags().AddFlagSet(flagSetCoinType()) + + return c +} + +func accountCreateHandler(cmd *cobra.Command, args []string) error { + var ( + name = args[0] + session = cliui.New(cliui.StartSpinnerWithText(statusCreating)) + ) + defer session.End() + + ca, err := cosmosaccount.New( + cosmosaccount.WithKeyringBackend(getKeyringBackend(cmd)), + cosmosaccount.WithHome(getKeyringDir(cmd)), + cosmosaccount.WithCoinType(getCoinType(cmd)), + ) + if err != nil { + return errors.Errorf("unable to create registry: %w", err) + } + + _, mnemonic, err := ca.Create(name) + if err != nil { + return errors.Errorf("unable to create account: %w", err) + } + + return session.Printf("Account %q created, keep your mnemonic in a secret place:\n\n%s\n", name, mnemonic) +} diff --git a/ignite/cmd/account_delete.go b/ignite/cmd/account_delete.go new file mode 100644 index 0000000..6a2c876 --- /dev/null +++ b/ignite/cmd/account_delete.go @@ -0,0 +1,41 @@ +package ignitecmd + +import ( + "github.com/spf13/cobra" + + "github.com/ignite/cli/v29/ignite/pkg/cliui" + "github.com/ignite/cli/v29/ignite/pkg/cosmosaccount" +) + +func NewAccountDelete() *cobra.Command { + c := &cobra.Command{ + Use: "delete [name]", + Short: "Delete an account by name", + Args: cobra.ExactArgs(1), + RunE: accountDeleteHandler, + } + + return c +} + +func accountDeleteHandler(cmd *cobra.Command, args []string) error { + var ( + name = args[0] + session = cliui.New(cliui.StartSpinnerWithText(statusDeleting)) + ) + defer session.End() + + ca, err := cosmosaccount.New( + cosmosaccount.WithKeyringBackend(getKeyringBackend(cmd)), + cosmosaccount.WithHome(getKeyringDir(cmd)), + ) + if err != nil { + return err + } + + if err := ca.DeleteByName(name); err != nil { + return err + } + + return session.Printf("Account %s deleted.\n", name) +} diff --git a/ignite/cmd/account_export.go b/ignite/cmd/account_export.go new file mode 100644 index 0000000..6664c3e --- /dev/null +++ b/ignite/cmd/account_export.go @@ -0,0 +1,72 @@ +package ignitecmd + +import ( + "fmt" + "os" + "path/filepath" + + "github.com/spf13/cobra" + + "github.com/ignite/cli/v29/ignite/pkg/cliui" + "github.com/ignite/cli/v29/ignite/pkg/cosmosaccount" + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +func NewAccountExport() *cobra.Command { + c := &cobra.Command{ + Use: "export [name]", + Short: "Export an account as a private key", + Args: cobra.ExactArgs(1), + RunE: accountExportHandler, + } + + c.Flags().AddFlagSet(flagSetAccountExport()) + c.Flags().String(flagPath, "", "path to export private key. default: ./key_[name]") + + return c +} + +func accountExportHandler(cmd *cobra.Command, args []string) error { + var ( + name = args[0] + path = flagGetPath(cmd) + session = cliui.New(cliui.StartSpinnerWithText(statusExporting)) + ) + defer session.End() + + passphrase, err := getPassphrase(cmd) + if err != nil { + return err + } + const minPassLength = 8 + if len(passphrase) < minPassLength { + return errors.Errorf("passphrase must be at least %d characters", minPassLength) + } + + ca, err := cosmosaccount.New( + cosmosaccount.WithKeyringBackend(getKeyringBackend(cmd)), + cosmosaccount.WithHome(getKeyringDir(cmd)), + ) + if err != nil { + return err + } + + armored, err := ca.Export(name, passphrase) + if err != nil { + return err + } + + if path == "" { + path = fmt.Sprintf("./key_%s", name) + } + path, err = filepath.Abs(path) + if err != nil { + return err + } + + if err := os.WriteFile(path, []byte(armored), 0o600); err != nil { + return err + } + + return session.Printf("Account %q exported to file: %s\n", name, path) +} diff --git a/ignite/cmd/account_import.go b/ignite/cmd/account_import.go new file mode 100644 index 0000000..23f3d92 --- /dev/null +++ b/ignite/cmd/account_import.go @@ -0,0 +1,81 @@ +package ignitecmd + +import ( + "os" + + "github.com/cosmos/go-bip39" + "github.com/spf13/cobra" + + "github.com/ignite/cli/v29/ignite/pkg/cliui" + "github.com/ignite/cli/v29/ignite/pkg/cliui/bubbleconfirm" + "github.com/ignite/cli/v29/ignite/pkg/cosmosaccount" + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +const flagSecret = "secret" + +func NewAccountImport() *cobra.Command { + c := &cobra.Command{ + Use: "import [name]", + Short: "Import an account by using a mnemonic or a private key", + Args: cobra.ExactArgs(1), + RunE: accountImportHandler, + } + + c.Flags().String(flagSecret, "", "Your mnemonic or path to your private key (use interactive mode instead to securely pass your mnemonic)") + c.Flags().AddFlagSet(flagSetAccountImport()) + c.Flags().AddFlagSet(flagSetCoinType()) + + return c +} + +func accountImportHandler(cmd *cobra.Command, args []string) error { + var ( + name = args[0] + secret, _ = cmd.Flags().GetString(flagSecret) + session = cliui.New(cliui.StartSpinnerWithText(statusImporting)) + ) + defer session.End() + + if secret == "" { + session.StopSpinner() + + if err := bubbleconfirm.Ask( + bubbleconfirm.NewQuestion("Your mnemonic or path to your private key", &secret, bubbleconfirm.Required())); err != nil { + return err + } + } + + var passphrase string + if !bip39.IsMnemonicValid(secret) { + var err error + passphrase, err = getPassphrase(cmd) + if err != nil { + return err + } + + privKey, err := os.ReadFile(secret) + if os.IsNotExist(err) { + return errors.New("mnemonic is not valid or private key not found at path") + } + if err != nil { + return err + } + secret = string(privKey) + } + + ca, err := cosmosaccount.New( + cosmosaccount.WithKeyringBackend(getKeyringBackend(cmd)), + cosmosaccount.WithHome(getKeyringDir(cmd)), + cosmosaccount.WithCoinType(getCoinType(cmd)), + ) + if err != nil { + return err + } + + if _, err := ca.Import(name, secret, passphrase); err != nil { + return err + } + + return session.Printf("Account %q imported.\n", name) +} diff --git a/ignite/cmd/account_list.go b/ignite/cmd/account_list.go new file mode 100644 index 0000000..e451788 --- /dev/null +++ b/ignite/cmd/account_list.go @@ -0,0 +1,37 @@ +package ignitecmd + +import ( + "github.com/spf13/cobra" + + "github.com/ignite/cli/v29/ignite/pkg/cosmosaccount" +) + +func NewAccountList() *cobra.Command { + c := &cobra.Command{ + Use: "list", + Short: "Show a list of all accounts", + RunE: accountListHandler, + } + + c.Flags().AddFlagSet(flagSetAccountPrefixes()) + + return c +} + +func accountListHandler(cmd *cobra.Command, _ []string) error { + ca, err := cosmosaccount.New( + cosmosaccount.WithKeyringBackend(getKeyringBackend(cmd)), + cosmosaccount.WithHome(getKeyringDir(cmd)), + cosmosaccount.WithBech32Prefix(getAddressPrefix(cmd)), + ) + if err != nil { + return err + } + + accounts, err := ca.List() + if err != nil { + return err + } + + return printAccounts(cmd, accounts...) +} diff --git a/ignite/cmd/account_show.go b/ignite/cmd/account_show.go new file mode 100644 index 0000000..01bdaa8 --- /dev/null +++ b/ignite/cmd/account_show.go @@ -0,0 +1,40 @@ +package ignitecmd + +import ( + "github.com/spf13/cobra" + + "github.com/ignite/cli/v29/ignite/pkg/cosmosaccount" +) + +func NewAccountShow() *cobra.Command { + c := &cobra.Command{ + Use: "show [name]", + Short: "Show detailed information about a particular account", + Args: cobra.ExactArgs(1), + RunE: accountShowHandler, + } + + c.Flags().AddFlagSet(flagSetAccountPrefixes()) + + return c +} + +func accountShowHandler(cmd *cobra.Command, args []string) error { + name := args[0] + + ca, err := cosmosaccount.New( + cosmosaccount.WithKeyringBackend(getKeyringBackend(cmd)), + cosmosaccount.WithHome(getKeyringDir(cmd)), + cosmosaccount.WithBech32Prefix(getAddressPrefix(cmd)), + ) + if err != nil { + return err + } + + acc, err := ca.GetByName(name) + if err != nil { + return err + } + + return printAccounts(cmd, acc) +} diff --git a/ignite/cmd/bubblemodel/chain_debug.go b/ignite/cmd/bubblemodel/chain_debug.go new file mode 100644 index 0000000..d6c8d23 --- /dev/null +++ b/ignite/cmd/bubblemodel/chain_debug.go @@ -0,0 +1,143 @@ +package cmdmodel + +import ( + "context" + "fmt" + "strings" + + tea "github.com/charmbracelet/bubbletea" + + "github.com/ignite/cli/v29/ignite/pkg/cliui/colors" + "github.com/ignite/cli/v29/ignite/pkg/cliui/icons" + cliuimodel "github.com/ignite/cli/v29/ignite/pkg/cliui/model" + "github.com/ignite/cli/v29/ignite/pkg/events" + "github.com/ignite/cli/v29/ignite/pkg/xstrings" +) + +const ( + stateChainDebugStarting uint = iota + stateChainDebugRunning +) + +var msgStopDebug = colors.Faint("Press the 'q' key to stop debug server") + +// NewChainDebug returns a new UI model for the chain debug command. +func NewChainDebug(mCtx Context, bus events.Provider, cmd tea.Cmd) ChainDebug { + // Initialize a context and cancel function to stop execution + ctx, quit := context.WithCancel(mCtx.Context()) + + // Update the context to allow stopping by using the 'q' key + mCtx.SetContext(ctx) + + return ChainDebug{ + cmd: cmd, + quit: quit, + model: cliuimodel.NewEvents(bus), + } +} + +// ChainDebug defines a UI model for the chain debug command. +type ChainDebug struct { + cmd tea.Cmd + quit context.CancelFunc + state uint + error error + model cliuimodel.Events +} + +// Init is the first function that will be called. +// It returns a batch command that listen events and also runs the debug server. +func (m ChainDebug) Init() tea.Cmd { + return tea.Batch(m.model.WaitEvent, m.cmd) +} + +// Update is called when a message is received. +// It handles messages and executes the logic that updates the model. +func (m ChainDebug) Update(msg tea.Msg) (tea.Model, tea.Cmd) { + switch msg := msg.(type) { + case cliuimodel.QuitMsg: + return m.processQuitMsg(msg) + case cliuimodel.ErrorMsg: + return m.processErrorMsg(msg) + case tea.KeyMsg: + return m.processKeyMsg(msg) + case cliuimodel.EventMsg: + return m.processEventMsg(msg) + } + + return m.updateModel(msg) +} + +// View renders the UI after every update. +func (m ChainDebug) View() string { + if m.error != nil { + s := xstrings.ToUpperFirst(m.error.Error()) + return fmt.Sprintf("%s %s\n", icons.NotOK, colors.Error(s)) + } + + var view strings.Builder + + switch m.state { + case stateChainServeStarting: + view.WriteString(m.renderStartView()) + case stateChainServeRunning: + view.WriteString(m.renderRunView()) + view.WriteString(m.renderActions()) + } + + return cliuimodel.FormatView(view.String()) +} + +func (m ChainDebug) updateModel(msg tea.Msg) (tea.Model, tea.Cmd) { + var cmd tea.Cmd + m.model, cmd = m.model.Update(msg) + return m, cmd +} + +func (m ChainDebug) processQuitMsg(cliuimodel.QuitMsg) (tea.Model, tea.Cmd) { + return m, tea.Quit +} + +func (m ChainDebug) processErrorMsg(msg cliuimodel.ErrorMsg) (tea.Model, tea.Cmd) { + m.error = msg.Error + return m, tea.Quit +} + +func (m ChainDebug) processKeyMsg(msg tea.KeyMsg) (tea.Model, tea.Cmd) { + if checkQuitKeyMsg(msg) { + m.quit() + } + + return m, nil +} + +func (m ChainDebug) processEventMsg(msg cliuimodel.EventMsg) (tea.Model, tea.Cmd) { + if m.state == stateChainDebugStarting { + // Start view displays status events until the debug server is running. + // When the status finish event is not an error it means that the debug + // server started successfully and the run view is displayed. + if msg.ProgressIndication == events.IndicationFinish { + m.model.ClearEvents() + m.state = stateChainDebugRunning + } + } + + return m.updateModel(msg) +} + +func (m ChainDebug) renderActions() string { + return fmt.Sprintf("\n%s\n", msgStopDebug) +} + +func (m ChainDebug) renderStartView() string { + return m.model.View() +} + +func (m ChainDebug) renderRunView() string { + var view strings.Builder + + view.WriteString("Blockchain is running\n\n") + view.WriteString(m.model.View()) + + return view.String() +} diff --git a/ignite/cmd/bubblemodel/chain_debug_test.go b/ignite/cmd/bubblemodel/chain_debug_test.go new file mode 100644 index 0000000..062034d --- /dev/null +++ b/ignite/cmd/bubblemodel/chain_debug_test.go @@ -0,0 +1,83 @@ +package cmdmodel_test + +import ( + "fmt" + "testing" + + tea "github.com/charmbracelet/bubbletea" + "github.com/stretchr/testify/require" + + cmdmodel "github.com/ignite/cli/v29/ignite/cmd/bubblemodel" + "github.com/ignite/cli/v29/ignite/cmd/bubblemodel/testdata" + "github.com/ignite/cli/v29/ignite/pkg/cliui/colors" + "github.com/ignite/cli/v29/ignite/pkg/cliui/icons" + cliuimodel "github.com/ignite/cli/v29/ignite/pkg/cliui/model" + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/events" +) + +func TestChainDebugErrorView(t *testing.T) { + // Arrange + var model tea.Model + + err := errors.New("Test error") + model = cmdmodel.NewChainDebug(testdata.ModelContext{}, testdata.DummyEventsProvider{}, testdata.FooCmd) + want := fmt.Sprintf("%s %s\n", icons.NotOK, colors.Error(err.Error())) + + // Arrange: Update model with an error message + model, _ = model.Update(cliuimodel.ErrorMsg{Error: err}) + + // Act + view := model.View() + + // Assert + require.Equal(t, want, view) +} + +func TestChainDebugStartView(t *testing.T) { + // Arrange + var model tea.Model + + spinner := cliuimodel.NewSpinner() + queue := []string{"Event 1...", "Event 2..."} + model = cmdmodel.NewChainDebug(testdata.ModelContext{}, testdata.DummyEventsProvider{}, testdata.FooCmd) + + want := fmt.Sprintf("\n%s%s\n", spinner.View(), queue[1]) + want = cliuimodel.FormatView(want) + + // Arrange: Update model with status events + for _, s := range queue { + model, _ = model.Update(cliuimodel.EventMsg{ + Event: events.New(s, events.ProgressStart()), + }) + } + + // Act + view := model.View() + + // Assert + require.Equal(t, want, view) +} + +func TestChainDebugRunView(t *testing.T) { + // Arrange + var model tea.Model + + evt := "Debug server: tcp://127.0.0.1:30500" + actions := colors.Faint("Press the 'q' key to stop debug server") + model = cmdmodel.NewChainDebug(testdata.ModelContext{}, testdata.DummyEventsProvider{}, testdata.FooCmd) + + want := fmt.Sprintf("Blockchain is running\n\n%s\n\n%s\n", evt, actions) + want = cliuimodel.FormatView(want) + + // Arrange: Update model with a server running event + model, _ = model.Update(cliuimodel.EventMsg{ + Event: events.New(evt, events.ProgressFinish()), + }) + + // Act + view := model.View() + + // Assert + require.Equal(t, want, view) +} diff --git a/ignite/cmd/bubblemodel/chain_serve.go b/ignite/cmd/bubblemodel/chain_serve.go new file mode 100644 index 0000000..90a5e63 --- /dev/null +++ b/ignite/cmd/bubblemodel/chain_serve.go @@ -0,0 +1,278 @@ +package cmdmodel + +import ( + "context" + "fmt" + "strings" + + tea "github.com/charmbracelet/bubbletea" + + "github.com/ignite/cli/v29/ignite/internal/announcements" + "github.com/ignite/cli/v29/ignite/pkg/cliui/colors" + "github.com/ignite/cli/v29/ignite/pkg/cliui/icons" + cliuimodel "github.com/ignite/cli/v29/ignite/pkg/cliui/model" + "github.com/ignite/cli/v29/ignite/pkg/events" +) + +const ( + maxStatusEvents = 7 +) + +const ( + stateChainServeStarting uint = iota + stateChainServeRunning + stateChainServeRebuilding + stateChainServeQuitting +) + +var ( + msgStopServe = colors.Faint("Press the 'q' key to stop serve") + msgWaitingFix = colors.Info("Waiting for a fix before retrying...") +) + +type Context interface { + // Context returns the current context. + Context() context.Context + + // SetContext updates the context with a new one. + SetContext(context.Context) +} + +// NewChainServe returns a new UI model for the chain serve command. +func NewChainServe(mCtx Context, bus events.Provider, cmd tea.Cmd) ChainServe { + // Initialize a context and cancel function to stop execution + ctx, quit := context.WithCancel(mCtx.Context()) + + // Update the context to allow stopping by using the 'q' key + mCtx.SetContext(ctx) + + return ChainServe{ + cmd: cmd, + quit: quit, + startModel: cliuimodel.NewStatusEvents(bus, maxStatusEvents), + runModel: cliuimodel.NewEvents(bus), + rebuildModel: cliuimodel.NewStatusEvents(bus, maxStatusEvents), + quitModel: cliuimodel.NewEvents(bus), + } +} + +// ChainServe defines a UI model for the chain serve command. +type ChainServe struct { + cmd tea.Cmd + quit context.CancelFunc + + state uint // Keeps track of the model/view being displayed + broken bool // True when blockchain app's source code has issues + error error // Critical error returned during command execution + + // Model definitions for the chain serve views + startModel cliuimodel.StatusEvents + runModel cliuimodel.Events + rebuildModel cliuimodel.StatusEvents + quitModel cliuimodel.Events +} + +// Init is the first function that will be called. +// It returns a batch command that listen events and also runs the blockchain app. +func (m ChainServe) Init() tea.Cmd { + // On initialization wait for status events and start serving the blockchain + return tea.Batch(m.startModel.WaitEvent, m.cmd) +} + +// Update is called when a message is received. +// It handles messages and executes the logic that updates the model. +func (m ChainServe) Update(msg tea.Msg) (tea.Model, tea.Cmd) { + if checkQuitKeyMsg(msg) { + m.state = stateChainServeQuitting + } + + switch msg := msg.(type) { + case cliuimodel.QuitMsg: + return m.processQuitMsg(msg) + case cliuimodel.ErrorMsg: + return m.processErrorMsg(msg) + case tea.KeyMsg: + return m.processKeyMsg(msg) + case cliuimodel.EventMsg: + return m.processEventMsg(msg) + default: + return m.updateCurrentModel(msg) + } +} + +// View renders the UI after every update. +func (m ChainServe) View() string { + if m.error != nil { + return fmt.Sprintf("%s %s\n", icons.NotOK, colors.Error(m.error.Error())) + } + + var view strings.Builder + + switch m.state { + case stateChainServeStarting: + view.WriteString(m.renderStartView()) + case stateChainServeRunning: + view.WriteString(m.renderRunView()) + case stateChainServeRebuilding: + view.WriteString(m.renderRebuildView()) + case stateChainServeQuitting: + view.WriteString(m.renderQuitView()) + } + + if m.state != stateChainServeQuitting { + view.WriteString(m.renderActions()) + } + + return cliuimodel.FormatView(view.String()) +} + +func (m ChainServe) updateCurrentModel(msg tea.Msg) (tea.Model, tea.Cmd) { + var cmd tea.Cmd + + switch m.state { + case stateChainServeStarting: + m.startModel, cmd = m.startModel.Update(msg) + case stateChainServeRunning: + m.runModel, cmd = m.runModel.Update(msg) + case stateChainServeRebuilding: + m.rebuildModel, cmd = m.rebuildModel.Update(msg) + case stateChainServeQuitting: + m.quitModel, cmd = m.quitModel.Update(msg) + } + + return m, cmd +} + +func (m ChainServe) processQuitMsg(cliuimodel.QuitMsg) (tea.Model, tea.Cmd) { + return m, tea.Quit +} + +func (m ChainServe) processErrorMsg(msg cliuimodel.ErrorMsg) (tea.Model, tea.Cmd) { + m.error = msg.Error + return m, tea.Quit +} + +func (m ChainServe) processKeyMsg(msg tea.KeyMsg) (tea.Model, tea.Cmd) { + if checkQuitKeyMsg(msg) { + // Cancel the context to signal stop + m.quit() + } + + return m, nil +} + +func (m ChainServe) processEventMsg(msg cliuimodel.EventMsg) (tea.Model, tea.Cmd) { + // When an error event is received it means there is an issue with + // the blockchain app's source code that the user must fix. + m.broken = msg.Group == events.GroupError + + // UI responds to key press or mouse events by default but we use + // events and the events bus to interact with the UI during execution. + // Check if the state must be changed to switch to a different view. + switch m.state { + case stateChainServeStarting: + // Start view displays status events until the blockchain is running or an + // error event is received in which case it displays the run view with an + // error traceback and waits until the issue is fixed. + // When the status finish event is not an error it means that the blockchain + // started successfully and the run view is displayed. + if msg.ProgressIndication == events.IndicationFinish { + m.state = stateChainServeRunning + } + case stateChainServeRunning: + // Run view shows account addresses, API URLs and the paths required to + // have a context on the running blockchain app and waits for errors or + // changes in the blockchain app source code. + // If an error event is received during run it means that there is an error + // in the app source code in which case the error message and traceback are + // displayed until the code is fixed, or otherwise when an status event is + // received it means that the code changed so the app must be rebuilt. + if m.broken { + // Clear events to only display the error received with the last event message + m.runModel.ClearEvents() + } else if msg.InProgress() { + // When a status event is received during run it means something + // changed in the source code which triggers the blockchain rebuild. + m.runModel.ClearEvents() + m.state = stateChainServeRebuilding + } + case stateChainServeRebuilding: + // Rebuild view is similar to run view but only displayed when the source + // code changes and the blockchain is rebuilt. + // When the status finish event is not an error it means that the blockchain + // was rebuilt successfully and the run view is displayed. + if msg.ProgressIndication == events.IndicationFinish { + m.rebuildModel.ClearEvents() + m.state = stateChainServeRunning + } + } + + // Update the model that is being displayed + return m.updateCurrentModel(msg) +} + +func (m ChainServe) renderActions() string { + return fmt.Sprintf("\n%s\n", msgStopServe) +} + +func (m ChainServe) renderStartView() string { + return m.startModel.View() +} + +func (m ChainServe) renderRunView() string { + var view strings.Builder + + if !m.broken { + view.WriteString("Blockchain is running\n\n") + } + + view.WriteString(m.runModel.View()) + + if m.broken { + fmt.Fprintf(&view, "\n%s\n", msgWaitingFix) + } + + return view.String() +} + +func (m ChainServe) renderRebuildView() string { + var view strings.Builder + + if !m.broken { + view.WriteString("Changes detected, restarting...\n\n") + } + + view.WriteString(m.rebuildModel.View()) + + if m.broken { + fmt.Fprintf(&view, "\n%s\n", msgWaitingFix) + } + + return view.String() +} + +func (m ChainServe) renderQuitView() string { + var view strings.Builder + + // Display the events received during quit + if s := m.quitModel.View(); s != "" { + view.WriteString(s) + view.WriteRune('\n') + } + + fmt.Fprintf(&view, "%s %s\n\n", icons.Info, colors.Info("Stopped")) + view.WriteString(announcements.Fetch()) + + return view.String() +} + +func checkQuitKeyMsg(m tea.Msg) bool { + msg, ok := m.(tea.KeyMsg) + if !ok { + return false + } + + key := msg.String() + + return key == "q" || key == "ctrl+c" +} diff --git a/ignite/cmd/bubblemodel/chain_serve_test.go b/ignite/cmd/bubblemodel/chain_serve_test.go new file mode 100644 index 0000000..94ab6fd --- /dev/null +++ b/ignite/cmd/bubblemodel/chain_serve_test.go @@ -0,0 +1,173 @@ +package cmdmodel_test + +import ( + "fmt" + "strings" + "testing" + "time" + + tea "github.com/charmbracelet/bubbletea" + "github.com/stretchr/testify/require" + + cmdmodel "github.com/ignite/cli/v29/ignite/cmd/bubblemodel" + "github.com/ignite/cli/v29/ignite/cmd/bubblemodel/testdata" + "github.com/ignite/cli/v29/ignite/pkg/cliui/colors" + "github.com/ignite/cli/v29/ignite/pkg/cliui/icons" + cliuimodel "github.com/ignite/cli/v29/ignite/pkg/cliui/model" + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/events" +) + +var chainServeActions = colors.Faint("Press the 'q' key to stop serve") + +func TestChainServeErrorView(t *testing.T) { + // Arrange + var model tea.Model + + err := errors.New("Test error") + model = cmdmodel.NewChainServe(testdata.ModelContext{}, testdata.DummyEventsProvider{}, testdata.FooCmd) + want := fmt.Sprintf("%s %s\n", icons.NotOK, colors.Error(err.Error())) + + // Arrange: Update model with an error message + model, _ = model.Update(cliuimodel.ErrorMsg{Error: err}) + + // Act + view := model.View() + + // Assert + require.Equal(t, want, view) +} + +func TestChainServeStartView(t *testing.T) { + // Arrange + var model tea.Model + + spinner := cliuimodel.NewSpinner() + queue := []string{"Event 1...", "Event 2..."} + model = cmdmodel.NewChainServe(testdata.ModelContext{}, testdata.DummyEventsProvider{}, testdata.FooCmd) + + want := fmt.Sprintf( + "%s%s\n\n%s %s %s\n\n%s\n", + spinner.View(), + queue[1], + icons.OK, + strings.TrimSuffix(queue[0], "..."), + colors.Faint("0s"), + chainServeActions, + ) + want = cliuimodel.FormatView(want) + + // Arrange: Update model with status events + for _, s := range queue { + model, _ = model.Update(cliuimodel.EventMsg{ + Event: events.New(s, events.ProgressStart()), + Start: time.Now(), + }) + } + + // Act + view := model.View() + + // Assert + require.Equal(t, want, view) +} + +func TestChainServeRunView(t *testing.T) { + // Arrange + var model tea.Model + + queue := []string{"Event 1", "Event 2"} + model = cmdmodel.NewChainServe(testdata.ModelContext{}, testdata.DummyEventsProvider{}, testdata.FooCmd) + + want := fmt.Sprintf("Blockchain is running\n\n%s\n%s\n\n%s\n", queue[0], queue[1], chainServeActions) + want = cliuimodel.FormatView(want) + + // Arrange: Update model with events + for _, s := range queue { + model, _ = model.Update(cliuimodel.EventMsg{ + Event: events.New(s, events.ProgressFinish()), + }) + } + + // Act + view := model.View() + + // Assert + require.Equal(t, want, view) +} + +func TestChainServeRunBrokenView(t *testing.T) { + // Arrange + var model tea.Model + + model = cmdmodel.NewChainServe(testdata.ModelContext{}, testdata.DummyEventsProvider{}, testdata.FooCmd) + traceback := "Error traceback\nFoo" + waitingFix := colors.Info("Waiting for a fix before retrying...") + + want := fmt.Sprintf("%s\n\n%s\n\n%s\n", traceback, waitingFix, chainServeActions) + want = cliuimodel.FormatView(want) + + // Arrange: Update model to display the run view + model, _ = model.Update(cliuimodel.EventMsg{ + Event: events.New("Run", events.ProgressFinish()), + }) + + // Arrange: Update model to display traceback within the run view + model, _ = model.Update(cliuimodel.EventMsg{ + Event: events.New(traceback, events.Group(events.GroupError)), + }) + + // Act + view := model.View() + + // Assert + require.Equal(t, want, view) +} + +func TestChainServeRebuildView(t *testing.T) { + // Arrange + var model tea.Model + + spinner := cliuimodel.NewSpinner() + duration := colors.Faint("0s") + queue := []string{"Event 1", "Event 2"} + model = cmdmodel.NewChainServe(testdata.ModelContext{}, testdata.DummyEventsProvider{}, testdata.FooCmd) + + want := fmt.Sprintf( + "Changes detected, restarting...\n\n%s%s\n\n%s %s %s\n%s Rebuild %s\n\n%s\n", + spinner.View(), + queue[1], + icons.OK, + queue[0], + duration, + icons.OK, + duration, + chainServeActions, + ) + want = cliuimodel.FormatView(want) + + // Arrange: Update model to display the run view + model, _ = model.Update(cliuimodel.EventMsg{ + Event: events.New("Run", events.ProgressFinish()), + }) + + // Arrange: Update model to display the rebuild view + model, _ = model.Update(cliuimodel.EventMsg{ + Event: events.New("Rebuild", events.ProgressStart()), + Start: time.Now(), + }) + + // Arrange: Update model with a status events + for _, s := range queue { + model, _ = model.Update(cliuimodel.EventMsg{ + Event: events.New(s, events.ProgressUpdate()), + Start: time.Now(), + }) + } + + // Act + view := model.View() + + // Assert + require.Equal(t, want, view) +} diff --git a/ignite/cmd/bubblemodel/testdata/testdata.go b/ignite/cmd/bubblemodel/testdata/testdata.go new file mode 100644 index 0000000..c3d52ab --- /dev/null +++ b/ignite/cmd/bubblemodel/testdata/testdata.go @@ -0,0 +1,24 @@ +package testdata + +import ( + "context" + + tea "github.com/charmbracelet/bubbletea" + + "github.com/ignite/cli/v29/ignite/pkg/events" +) + +func FooCmd() tea.Msg { return nil } + +type ModelContext struct{} + +func (ModelContext) Context() context.Context { return context.TODO() } +func (ModelContext) SetContext(context.Context) {} + +type DummyEventsProvider struct{} + +func (DummyEventsProvider) Events() <-chan events.Event { + c := make(chan events.Event) + close(c) + return c +} diff --git a/ignite/cmd/bubblemodel/testnet_multi_node.go b/ignite/cmd/bubblemodel/testnet_multi_node.go new file mode 100644 index 0000000..ca0d3c2 --- /dev/null +++ b/ignite/cmd/bubblemodel/testnet_multi_node.go @@ -0,0 +1,495 @@ +package cmdmodel + +import ( + "bufio" + "context" + "fmt" + "os/exec" + "path/filepath" + "strconv" + "strings" + "syscall" + + "github.com/charmbracelet/bubbles/help" + tea "github.com/charmbracelet/bubbletea" + "github.com/charmbracelet/lipgloss" + "golang.org/x/sync/errgroup" + + "github.com/ignite/cli/v29/ignite/services/chain" +) + +// NodeStatus is an integer data type that represents the status of a node. +type NodeStatus int + +const ( + // Stopped indicates that the node is currently stopped. + Stopped NodeStatus = iota + + // Running indicates that the node is currently running. + Running +) + +// ui styling constants. +var ( + // base colors. + activeColor = lipgloss.Color("#1B7FCA") // bright blue + subtleColor = lipgloss.Color("#5C6A72") // dark gray + textColor = lipgloss.Color("#232326") // nearly black + highlightColor = lipgloss.Color("#10B981") // green + warningColor = lipgloss.Color("#FF5436") // red + focusedColor = lipgloss.Color("#A27DF8") // purple + + // tabs styling. + activeTabBorder = lipgloss.Border{ + Top: "─", + Bottom: " ", + Left: "│", + Right: "│", + TopLeft: "╭", + TopRight: "╮", + BottomLeft: "┘", + BottomRight: "└", + } + + tabBorder = lipgloss.Border{ + Top: "─", + Bottom: "─", + Left: "│", + Right: "│", + TopLeft: "╭", + TopRight: "╮", + BottomLeft: "╰", + BottomRight: "╯", + } + + tabStyle = lipgloss.NewStyle(). + Border(tabBorder). + BorderForeground(subtleColor). + Padding(0, 1) + + activeTabStyle = lipgloss.NewStyle(). + Border(activeTabBorder). + BorderForeground(activeColor). + Foreground(activeColor). + Bold(true). + Padding(0, 1) + + // active/stopped tab styles. + runningTabStyle = lipgloss.NewStyle(). + Border(tabBorder). + BorderForeground(highlightColor). + Foreground(subtleColor). + Padding(0, 1) + + activeRunningTabStyle = lipgloss.NewStyle(). + Border(activeTabBorder). + BorderForeground(highlightColor). + Foreground(highlightColor). + Bold(true). + Padding(0, 1) + + // node status styles. + nodeActiveStyle = lipgloss.NewStyle().Foreground(highlightColor).Bold(true) + nodeStoppedStyle = lipgloss.NewStyle().Foreground(warningColor) + tcpStyle = lipgloss.NewStyle().Foreground(activeColor) + infoStyle = lipgloss.NewStyle().Foreground(subtleColor) + + // header styling. + headerStyle = lipgloss.NewStyle(). + Foreground(focusedColor). + Bold(true). + Padding(0, 0, 1, 0) + + // log styles. + logEntryStyle = lipgloss.NewStyle(). + Foreground(textColor). + PaddingLeft(2) + + logBoxStyle = lipgloss.NewStyle(). + Border(lipgloss.RoundedBorder()). + BorderForeground(subtleColor). + Padding(1, 2). + Width(80) +) + +// Make sure MultiNode implements tea.Model interface. +var _ tea.Model = MultiNode{} + +// MultiNode represents a set of nodes, managing state and information related to them. +type MultiNode struct { + ctx context.Context + appd string + args chain.MultiNodeArgs + + nodeStatuses []NodeStatus + pids []int // Store the PIDs of the running processes + numNodes int // Number of nodes + logs [][]string // Store logs for each node + + // UI state + selectedNode int // Currently selected node index + help help.Model // Help menu model + showHelp bool // Whether to show the help menu +} + +// ToggleNodeMsg is a structure used to pass messages +// to enable or disable a node based on the node index. +type ToggleNodeMsg struct { + nodeIdx int +} + +// UpdateStatusMsg defines a message that updates the status of a node by index. +type UpdateStatusMsg struct { + nodeIdx int + status NodeStatus +} + +// UpdateLogsMsg is for continuously updating the chain logs in the View. +type UpdateLogsMsg struct{} + +// SwitchFocusMsg indicates a switch in focus to another node. +type SwitchFocusMsg struct { + nodeIdx int +} + +// UpdateDeemon returns a command that sends an UpdateLogsMsg. +// This command is intended to continuously refresh the logs displayed in the user interface. +func UpdateDeemon() tea.Cmd { + return func() tea.Msg { + return UpdateLogsMsg{} + } +} + +// NewModel initializes the model. +func NewModel(ctx context.Context, chainname string, args chain.MultiNodeArgs) (MultiNode, error) { + numNodes, err := strconv.Atoi(args.NumValidator) + if err != nil { + return MultiNode{}, err + } + + h := help.New() + h.ShowAll = true + + return MultiNode{ + ctx: ctx, + appd: chainname + "d", + args: args, + nodeStatuses: make([]NodeStatus, numNodes), // initial states of nodes + pids: make([]int, numNodes), + numNodes: numNodes, + logs: make([][]string, numNodes), // Initialize logs for each node + selectedNode: 0, // Select the first node initially + help: h, + showHelp: false, + }, nil +} + +// Init implements the Init method of the tea.Model interface. +func (m MultiNode) Init() tea.Cmd { + // start all nodes as soon as the application launches + return m.StartAllNodes() +} + +// ToggleNode toggles the state of a node. +func ToggleNode(nodeIdx int) tea.Cmd { + return func() tea.Msg { + return ToggleNodeMsg{nodeIdx: nodeIdx} + } +} + +// SwitchFocus changes the focus to a specific node. +func SwitchFocus(nodeIdx int) tea.Cmd { + return func() tea.Msg { + return SwitchFocusMsg{nodeIdx: nodeIdx} + } +} + +// RunNode runs or stops the node based on its status. +func RunNode(nodeIdx int, start bool, m MultiNode) tea.Cmd { + var ( + pid = &m.pids[nodeIdx] + args = m.args + appd = m.appd + ) + + return func() tea.Msg { + if start { + nodeHome := filepath.Join(args.OutputDir, args.NodeDirPrefix+strconv.Itoa(nodeIdx)) + // Create the command to run in the background as a daemon + cmd := exec.Command(appd, "start", "--home", nodeHome) + + // Start the process as a daemon + cmd.SysProcAttr = &syscall.SysProcAttr{ + Setpgid: true, // Ensure it runs in a new process group + } + + stdout, err := cmd.StdoutPipe() // Get stdout for logging + if err != nil { + fmt.Printf("Failed to start node %d: %v\n", nodeIdx+1, err) + return UpdateStatusMsg{nodeIdx: nodeIdx, status: Stopped} + } + + err = cmd.Start() // Start the node in the background + if err != nil { + fmt.Printf("Failed to start node %d: %v\n", nodeIdx+1, err) + return UpdateStatusMsg{nodeIdx: nodeIdx, status: Stopped} + } + + *pid = cmd.Process.Pid // Store the PID + + // Create an errgroup with context + g, gCtx := errgroup.WithContext(m.ctx) + g.Go(func() error { + scanner := bufio.NewScanner(stdout) + for scanner.Scan() { + select { + case <-gCtx.Done(): + // Handle context cancellation + return gCtx.Err() + default: + line := scanner.Text() + // Add log line to the respective node's log slice + m.logs[nodeIdx] = append(m.logs[nodeIdx], line) + // Keep only the last 5 lines + if len(m.logs[nodeIdx]) > 5 { + m.logs[nodeIdx] = m.logs[nodeIdx][len(m.logs[nodeIdx])-5:] + } + } + } + if err := scanner.Err(); err != nil { + return err + } + return nil + }) + + // Goroutine to handle stopping the node if context is canceled + g.Go(func() error { + <-gCtx.Done() // Wait for context to be canceled + + // Stop the daemon process if context is canceled + if *pid != 0 { + err := syscall.Kill(-*pid, syscall.SIGTERM) // Stop the daemon process + if err != nil { + fmt.Printf("Failed to stop node %d: %v\n", nodeIdx+1, err) + } else { + *pid = 0 // Reset PID after stopping + } + } + + return gCtx.Err() + }) + + return UpdateStatusMsg{nodeIdx: nodeIdx, status: Running} + } + // Use kill to stop the node process by PID + if *pid != 0 { + err := syscall.Kill(-*pid, syscall.SIGTERM) // Stop the daemon process + if err != nil { + fmt.Printf("Failed to stop node %d: %v\n", nodeIdx+1, err) + } else { + *pid = 0 // Reset PID after stopping + } + } + return UpdateStatusMsg{nodeIdx: nodeIdx, status: Stopped} + } +} + +// StopAllNodes stops all nodes. +func (m *MultiNode) StopAllNodes() { + for i := range m.numNodes { + if m.nodeStatuses[i] == Running { + RunNode(i, false, *m)() // Stop node + } + } +} + +// StartAllNodes starts all nodes that are currently stopped. +func (m *MultiNode) StartAllNodes() tea.Cmd { + cmds := make([]tea.Cmd, 0, m.numNodes) + for i := range m.numNodes { + if m.nodeStatuses[i] == Stopped { + cmds = append(cmds, RunNode(i, true, *m)) + } + } + + return tea.Batch(cmds...) +} + +// Update handles messages and updates the model. +func (m MultiNode) Update(msg tea.Msg) (tea.Model, tea.Cmd) { + switch msg := msg.(type) { + case tea.KeyMsg: + switch msg.String() { + case "q", "ctrl+c": + m.StopAllNodes() // Stop all nodes before quitting + return m, tea.Quit + case "h": + // Toggle help screen + m.showHelp = !m.showHelp + return m, nil + case "tab", "right": + // Move selection to the next node + m.selectedNode = (m.selectedNode + 1) % m.numNodes + return m, nil + case "shift+tab", "left": + // Move selection to the previous node + m.selectedNode = (m.selectedNode - 1 + m.numNodes) % m.numNodes + return m, nil + default: + // Check for numbers from 1 to numNodes + for i := 0; i < m.numNodes; i++ { + if msg.String() == fmt.Sprintf("%d", i+1) { + // First switch focus to this node + m.selectedNode = i + // Then toggle the node state + return m, ToggleNode(i) + } + } + } + + case SwitchFocusMsg: + m.selectedNode = msg.nodeIdx + return m, nil + + case ToggleNodeMsg: + if m.nodeStatuses[msg.nodeIdx] == Running { + return m, RunNode(msg.nodeIdx, false, m) // Stop node + } + return m, RunNode(msg.nodeIdx, true, m) // Start node + + case UpdateStatusMsg: + m.nodeStatuses[msg.nodeIdx] = msg.status + return m, UpdateDeemon() + + case UpdateLogsMsg: + return m, UpdateDeemon() + } + + return m, nil +} + +// View renders the interface. +func (m MultiNode) View() string { + if m.showHelp { + return renderHelpView() + } + + // Create tabs for nodes + tabs := []string{} + for i := 0; i < m.numNodes; i++ { + var status string + if m.nodeStatuses[i] == Running { + status = "●" + } else { + status = "○" + } + + tabText := fmt.Sprintf("Node %d %s", i+1, status) + + // apply different styling based on node status and selection + if i == m.selectedNode { + if m.nodeStatuses[i] == Running { + tabs = append(tabs, activeRunningTabStyle.Render(tabText)) + } else { + tabs = append(tabs, activeTabStyle.Render(tabText)) + } + } else { + if m.nodeStatuses[i] == Running { + tabs = append(tabs, runningTabStyle.Render(tabText)) + } else { + tabs = append(tabs, tabStyle.Render(tabText)) + } + } + } + + // Render the tab row + tabRow := lipgloss.JoinHorizontal(lipgloss.Bottom, tabs...) + + // Header row with status + header := lipgloss.JoinHorizontal( + lipgloss.Left, + headerStyle.Render("Ignite Node Dashboard"), + ) + + // Render selected node details + nodeDetails := renderNodeDetails(m, m.selectedNode) + + // Render the keyboard controls help at the bottom + controls := fmt.Sprintf("%s ←/→: Switch node • %s 1-%d: Toggle node • %s q: Quit • %s h: Help", + infoStyle.Render("•"), + infoStyle.Render("•"), + m.numNodes, + infoStyle.Render("•"), + infoStyle.Render("•"), + ) + + // Assemble the final view + return fmt.Sprintf("%s\n%s\n\n%s\n\n%s", + header, + tabRow, + nodeDetails, + controls, + ) +} + +// renderNodeDetails renders the details of a specific node. +func renderNodeDetails(m MultiNode, nodeIdx int) string { + status := nodeStoppedStyle.Render("[Stopped]") + statusVerb := "start" + + if m.nodeStatuses[nodeIdx] == Running { + status = nodeActiveStyle.Render("[Running]") + statusVerb = "stop" + } + + tcpAddress := tcpStyle.Render(fmt.Sprintf("tcp://127.0.0.1:%d", m.args.ListPorts[nodeIdx])) + nodeInfo := fmt.Sprintf("Node %d %s\nEndpoint: %s", + nodeIdx+1, + status, + tcpAddress, + ) + + // Action button + actionPrompt := fmt.Sprintf("Press [%d] to %s", nodeIdx+1, statusVerb) + + // Log section + var logContent string + if len(m.logs[nodeIdx]) > 0 { + logEntries := []string{} + for _, line := range m.logs[nodeIdx] { + logEntries = append(logEntries, logEntryStyle.Render(line)) + } + logContent = strings.Join(logEntries, "\n") + } else { + logContent = infoStyle.Render("No logs available") + } + + logs := fmt.Sprintf("Logs:\n%s", logBoxStyle.Render(logContent)) + + return fmt.Sprintf("%s\n%s\n\n%s", nodeInfo, actionPrompt, logs) +} + +// renderHelpView displays help information. +func renderHelpView() string { + return lipgloss.NewStyle(). + BorderStyle(lipgloss.RoundedBorder()). + BorderForeground(subtleColor). + Padding(1, 2). + Render(`Ignite Node Dashboard Help + +Navigation: + • Left/Right or Tab/Shift+Tab: Switch between nodes + • 1-4: Toggle the corresponding node on/off + • h: Toggle this help screen + • q or Ctrl+c: Quit and stop all nodes + +Node Status: + • [Running]: The node is active and processing blocks + • [Stopped]: The node is inactive + +This dashboard allows you to manage multiple validator nodes +in your local testnet environment. You can start and stop nodes +independently and monitor their logs in real-time. + +Press h to return to the dashboard.`) +} diff --git a/ignite/cmd/chain.go b/ignite/cmd/chain.go new file mode 100644 index 0000000..86de446 --- /dev/null +++ b/ignite/cmd/chain.go @@ -0,0 +1,232 @@ +package ignitecmd + +import ( + "bytes" + "fmt" + "os" + "path/filepath" + "strings" + + "github.com/spf13/cobra" + "golang.org/x/mod/modfile" + + chainconfig "github.com/ignite/cli/v29/ignite/config/chain" + "github.com/ignite/cli/v29/ignite/pkg/cliui" + "github.com/ignite/cli/v29/ignite/pkg/cliui/colors" + "github.com/ignite/cli/v29/ignite/pkg/cliui/icons" + "github.com/ignite/cli/v29/ignite/pkg/cosmosgen" + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/goanalysis" +) + +const ( + msgMigration = "Migrating blockchain config file from v%d to v%d..." + msgMigrationPrefix = "Your blockchain config version is v%d and the latest is v%d." + msgMigrationPrompt = "Would you like to upgrade your config file to v%d" + msgMigrationAddTools = "Some required imports are missing in %s file: %s. Would you like to add them" + msgMigrationRemoveTools = "File %s contains deprecated imports: %s. Would you like to remove them" +) + +// NewChain returns a command that groups sub commands related to compiling, serving +// blockchains and so on. +func NewChain() *cobra.Command { + c := &cobra.Command{ + Use: "chain [command]", + Short: "Build, init and start a blockchain node", + Long: `Commands in this namespace let you to build, initialize, and start your +blockchain node locally for development purposes. + +To run these commands you should be inside the project's directory so that +Ignite can find the source code. To ensure that you are, run "ls", you should +see the following files in the output: "go.mod", "x", "proto", "app", etc. + +By default the "build" command will identify the "main" package of the project, +install dependencies if necessary, set build flags, compile the project into a +binary and install the binary. The "build" command is useful if you just want +the compiled binary, for example, to initialize and start the chain manually. It +can also be used to release your chain's binaries automatically as part of +continuous integration workflow. + +The "init" command will build the chain's binary and use it to initialize a +local validator node. By default the validator node will be initialized in your +$HOME directory in a hidden directory that matches the name of your project. +This directory is called a data directory and contains a chain's genesis file +and a validator key. This command is useful if you want to quickly build and +initialize the data directory and use the chain's binary to manually start the +blockchain. The "init" command is meant only for development purposes, not +production. + +The "serve" command builds, initializes, and starts your blockchain locally with +a single validator node for development purposes. "serve" also watches the +source code directory for file changes and intelligently +re-builds/initializes/starts the chain, essentially providing "code-reloading". +The "serve" command is meant only for development purposes, not production. + +To distinguish between production and development consider the following. + +In production, blockchains often run the same software on many validator nodes +that are run by different people and entities. To launch a blockchain in +production, the validator entities coordinate the launch process to start their +nodes simultaneously. + +During development, a blockchain can be started locally on a single validator +node. This convenient process lets you restart a chain quickly and iterate +faster. Starting a chain on a single node in development is similar to starting +a traditional web application on a local server. + +The "faucet" command lets you send tokens to an address from the "faucet" +account defined in "config.yml". Alternatively, you can use the chain's binary +to send token from any other account that exists on chain. + +The "simulate" command helps you start a simulation testing process for your +chain. +`, + Aliases: []string{"c"}, + Args: cobra.ExactArgs(1), + PersistentPreRunE: preRunHandler, + } + + // Add flags required for the configMigrationPreRunHandler + c.PersistentFlags().AddFlagSet(flagSetConfig()) + c.PersistentFlags().AddFlagSet(flagSetYes()) + + c.AddCommand( + NewChainServe(), + NewChainBuild(), + NewChainInit(), + NewChainFaucet(), + NewChainSimulate(), + NewChainDebug(), + NewChainLint(), + NewChainModules(), + ) + + return c +} + +func preRunHandler(cmd *cobra.Command, _ []string) error { + session := cliui.New(cliui.WithoutUserInteraction(getYes(cmd))) + defer session.End() + + appPath, err := goModulePath(cmd) + if err != nil { + return err + } + + _, cfgPath, err := getChainConfig(cmd) + if err != nil { + return err + } + + if err := configMigrationPreRunHandler(cmd, session, appPath, cfgPath); err != nil { + return err + } + + if err := toolsMigrationPreRunHandler(cmd, session, appPath); err != nil { + return err + } + + return nil +} + +func toolsMigrationPreRunHandler(cmd *cobra.Command, session *cliui.Session, appPath string) error { + session.StartSpinner("Checking missing tools...") + + goModPath := filepath.Join(appPath, "go.mod") + data, err := os.ReadFile(goModPath) + if err != nil { + return errors.Errorf("failed to read go.mod file: %w", err) + } + + f, err := modfile.Parse(goModPath, data, nil) + if err != nil { + return errors.Errorf("failed to parse go.mod file: %w", err) + } + + missing := cosmosgen.MissingTools(f) + unused := cosmosgen.UnusedTools(f) + + session.StopSpinner() + if !getYes(cmd) { + if len(missing) > 0 { + question := fmt.Sprintf( + msgMigrationAddTools, + goModPath, + strings.Join(missing, ", "), + ) + if err := session.AskConfirm(question); err != nil { + missing = []string{} + } + } + + if len(unused) > 0 { + question := fmt.Sprintf( + msgMigrationRemoveTools, + goModPath, + strings.Join(unused, ", "), + ) + if err := session.AskConfirm(question); err != nil { + unused = []string{} + } + } + } + if len(missing) == 0 && len(unused) == 0 { + return nil + } + + session.StartSpinner("Migrating tools...") + var buf bytes.Buffer + if err := goanalysis.AddOrRemoveTools(f, &buf, missing, unused); err != nil { + return err + } + + return os.WriteFile(goModPath, buf.Bytes(), 0o600) +} + +func configMigrationPreRunHandler(cmd *cobra.Command, session *cliui.Session, appPath, cfgPath string) error { + rawCfg, err := os.ReadFile(cfgPath) + if err != nil { + return err + } + + version, err := chainconfig.ReadConfigVersion(bytes.NewReader(rawCfg)) + if err != nil { + return err + } + + // Config files with older versions must be migrated to the latest before executing the command + if version != chainconfig.LatestVersion { + if !getYes(cmd) { + prefix := fmt.Sprintf(msgMigrationPrefix, version, chainconfig.LatestVersion) + question := fmt.Sprintf(msgMigrationPrompt, chainconfig.LatestVersion) + + // Confirm before overwriting the config file + session.Println(prefix) + if err := session.AskConfirm(question); err != nil { + if errors.Is(err, cliui.ErrAbort) { + return errors.Errorf("stopping because config version v%d is required to run the command", chainconfig.LatestVersion) + } + + return err + } + + // Confirm before migrating the config if there are uncommitted changes + if err := confirmWhenUncommittedChanges(session, appPath); err != nil { + return err + } + } else { + session.Printf("%s %s\n", icons.Info, colors.Infof(msgMigration, version, chainconfig.LatestVersion)) + } + + // Convert the current config to the latest version and update the YAML file + var buf bytes.Buffer + if err := chainconfig.MigrateLatest(bytes.NewReader(rawCfg), &buf); err != nil { + return err + } + + if err := os.WriteFile(cfgPath, buf.Bytes(), 0o600); err != nil { + return errors.Errorf("config file migration failed: %w", err) + } + } + return nil +} diff --git a/ignite/cmd/chain_build.go b/ignite/cmd/chain_build.go new file mode 100644 index 0000000..6f0303c --- /dev/null +++ b/ignite/cmd/chain_build.go @@ -0,0 +1,195 @@ +package ignitecmd + +import ( + "os/exec" + "path/filepath" + + "github.com/spf13/cobra" + flag "github.com/spf13/pflag" + + "github.com/ignite/cli/v29/ignite/pkg/chaincmd" + "github.com/ignite/cli/v29/ignite/pkg/cliui" + "github.com/ignite/cli/v29/ignite/pkg/cliui/colors" + "github.com/ignite/cli/v29/ignite/pkg/goenv" + "github.com/ignite/cli/v29/ignite/services/chain" +) + +const ( + flagCheckDependencies = "check-dependencies" + flagDebug = "debug" + flagOutput = "output" + flagRelease = "release" + flagBuildTags = "build.tags" + flagReleasePrefix = "release.prefix" + flagReleaseTargets = "release.targets" +) + +// NewChainBuild returns a new build command to build a blockchain app. +func NewChainBuild() *cobra.Command { + c := &cobra.Command{ + Use: "build", + Short: "Build a node binary", + Long: ` +The build command compiles the source code of the project into a binary and +installs the binary in the $(go env GOPATH)/bin directory. + +You can customize the output directory for the binary using a flag: + + ignite chain build --output dist + +To compile the binary Ignite first compiles protocol buffer (proto) files into +Go source code. Proto files contain required type and services definitions. If +you're using another program to compile proto files, you can use a flag to tell +Ignite to skip the proto compilation step: + + ignite chain build --skip-proto + +Afterwards, Ignite install dependencies specified in the go.mod file. By default +Ignite doesn't check that dependencies of the main module stored in the module +cache have not been modified since they were downloaded. To enforce dependency +checking (essentially, running "go mod verify") use a flag: + + ignite chain build --check-dependencies + +Next, Ignite identifies the "main" package of the project. By default the "main" +package is located in "cmd/{app}d" directory, where "{app}" is the name of the +scaffolded project and "d" stands for daemon. If your project contains more +than one "main" package, specify the path to the one that Ignite should compile +in config.yml: + + build: + main: custom/path/to/main + +By default the binary name will match the top-level module name (specified in +go.mod) with a suffix "d". This can be customized in config.yml: + + build: + binary: mychaind + +You can also specify custom linker flags: + + build: + ldflags: + - "-X main.Version=development" + - "-X main.Date=01/05/2022T19:54" + +To build binaries for a release, use the --release flag. The binaries for one or +more specified release targets are built in a "release/" directory in the +project's source directory. Specify the release targets with GOOS:GOARCH build +tags. If the optional --release.targets is not specified, a binary is created +for your current environment. + + ignite chain build --release -t linux:amd64 -t darwin:amd64 -t darwin:arm64 +`, + Args: cobra.NoArgs, + RunE: chainBuildHandler, + } + + flagSetPath(c) + flagSetClearCache(c) + c.Flags().AddFlagSet(flagSetCheckDependencies()) + c.Flags().AddFlagSet(flagSetSkipProto()) + c.Flags().AddFlagSet(flagSetDebug()) + c.Flags().AddFlagSet(flagSetVerbose()) + c.Flags().Bool(flagRelease, false, "build for a release") + c.Flags().StringSliceP(flagReleaseTargets, "t", []string{}, "release targets. Available only with --release flag") + c.Flags().StringSlice(flagBuildTags, []string{}, "parameters to build the chain binary") + c.Flags().String(flagReleasePrefix, "", "tarball prefix for each release target. Available only with --release flag") + c.Flags().StringP(flagOutput, "o", "", "binary output path") + + return c +} + +func chainBuildHandler(cmd *cobra.Command, _ []string) error { + var ( + isRelease, _ = cmd.Flags().GetBool(flagRelease) + releaseTargets, _ = cmd.Flags().GetStringSlice(flagReleaseTargets) + releasePrefix, _ = cmd.Flags().GetString(flagReleasePrefix) + buildTags, _ = cmd.Flags().GetStringSlice(flagBuildTags) + output, _ = cmd.Flags().GetString(flagOutput) + session = cliui.New( + cliui.WithVerbosity(getVerbosity(cmd)), + cliui.StartSpinner(), + ) + ) + defer session.End() + + chainOption := []chain.Option{ + chain.KeyringBackend(chaincmd.KeyringBackendTest), + chain.WithOutputer(session), + chain.CollectEvents(session.EventBus()), + chain.CheckCosmosSDKVersion(), + } + + if flagGetCheckDependencies(cmd) { + chainOption = append(chainOption, chain.CheckDependencies()) + } + + // check if custom config is defined + config, _ := cmd.Flags().GetString(flagConfig) + if config != "" { + chainOption = append(chainOption, chain.ConfigFile(config)) + } + + c, err := chain.NewWithHomeFlags(cmd, chainOption...) + if err != nil { + return err + } + + cacheStorage, err := newCache(cmd) + if err != nil { + return err + } + + ctx := cmd.Context() + if isRelease { + releasePath, err := c.BuildRelease(ctx, cacheStorage, buildTags, output, releasePrefix, releaseTargets...) + if err != nil { + return err + } + + return session.Printf("🗃 Release created: %s\n", colors.Info(releasePath)) + } + + binaryName, err := c.Build(ctx, cacheStorage, buildTags, output, flagGetSkipProto(cmd), flagGetDebug(cmd)) + if err != nil { + return err + } + + if output == "" { + session.Printf("🗃 Installed. Use with: %s\n", colors.Info(binaryName)) + + if _, err := exec.LookPath(binaryName); err != nil { + session.Printf("⚠️ Warning: Binary not found in PATH\n") + return session.Printf(" To run from anywhere, add Go bin to your PATH: export PATH=$PATH:%s\n", colors.Info(goenv.Bin())) + } + + return nil + } + + binaryPath := filepath.Join(output, binaryName) + return session.Printf("🗃 Binary built at the path: %s\n", colors.Info(binaryPath)) +} + +func flagSetCheckDependencies() *flag.FlagSet { + usage := "verify that cached dependencies have not been modified since they were downloaded" + fs := flag.NewFlagSet("", flag.ContinueOnError) + fs.Bool(flagCheckDependencies, false, usage) + return fs +} + +func flagGetCheckDependencies(cmd *cobra.Command) (check bool) { + check, _ = cmd.Flags().GetBool(flagCheckDependencies) + return +} + +func flagSetDebug() *flag.FlagSet { + fs := flag.NewFlagSet("", flag.ContinueOnError) + fs.Bool(flagDebug, false, "build a debug binary") + return fs +} + +func flagGetDebug(cmd *cobra.Command) (debug bool) { + debug, _ = cmd.Flags().GetBool(flagDebug) + return +} diff --git a/ignite/cmd/chain_debug.go b/ignite/cmd/chain_debug.go new file mode 100644 index 0000000..9874e20 --- /dev/null +++ b/ignite/cmd/chain_debug.go @@ -0,0 +1,188 @@ +package ignitecmd + +import ( + "context" + "fmt" + + tea "github.com/charmbracelet/bubbletea" + "github.com/spf13/cobra" + + cmdmodel "github.com/ignite/cli/v29/ignite/cmd/bubblemodel" + chainconfig "github.com/ignite/cli/v29/ignite/config/chain" + "github.com/ignite/cli/v29/ignite/pkg/chaincmd" + "github.com/ignite/cli/v29/ignite/pkg/cliui" + "github.com/ignite/cli/v29/ignite/pkg/cliui/icons" + cliuimodel "github.com/ignite/cli/v29/ignite/pkg/cliui/model" + "github.com/ignite/cli/v29/ignite/pkg/debugger" + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/events" + "github.com/ignite/cli/v29/ignite/pkg/xurl" + "github.com/ignite/cli/v29/ignite/services/chain" +) + +const ( + flagServer = "server" + flagServerAddress = "server-address" +) + +// NewChainDebug returns a new debug command to debug a blockchain app. +func NewChainDebug() *cobra.Command { + c := &cobra.Command{ + Use: "debug", + Short: "Launch a debugger for a blockchain app", + Long: `The debug command starts a debug server and launches a debugger. + +Ignite uses the Delve debugger by default. Delve enables you to interact with +your program by controlling the execution of the process, evaluating variables, +and providing information of thread / goroutine state, CPU register state and +more. + +A debug server can optionally be started in cases where default terminal client +is not desirable. When the server starts it first runs the blockchain app, +attaches to it and finally waits for a client connection. It accepts both +JSON-RPC or DAP client connections. + +To start a debug server use the following flag: + + ignite chain debug --server + +To start a debug server with a custom address use the following flags: + + ignite chain debug --server --server-address 127.0.0.1:30500 + +The debug server stops automatically when the client connection is closed. +`, + Args: cobra.NoArgs, + RunE: chainDebugHandler, + } + + flagSetPath(c) + c.Flags().Bool(flagServer, false, "start a debug server") + c.Flags().String(flagServerAddress, debugger.DefaultAddress, "debug server address") + + return c +} + +func chainDebugHandler(cmd *cobra.Command, _ []string) error { + // Prepare session options. + // Events are ignored by the session when the debug server UI is used. + options := []cliui.Option{ + cliui.StartSpinnerWithText("Initializing..."), + cliui.WithoutUserInteraction(getYes(cmd)), + } + server, _ := cmd.Flags().GetBool(flagServer) + if server { + options = append(options, cliui.IgnoreEvents()) + } + + session := cliui.New(options...) + defer session.End() + + // Start debug server + if server { + bus := session.EventBus() + m := cmdmodel.NewChainDebug(cmd, bus, chainDebugCmd(cmd, session)) + _, err := tea.NewProgram(m, tea.WithInput(cmd.InOrStdin())).Run() + return err + } + + return chainDebug(cmd, session) +} + +func chainDebugCmd(cmd *cobra.Command, session *cliui.Session) tea.Cmd { + return func() tea.Msg { + if err := chainDebug(cmd, session); err != nil && !errors.Is(err, context.Canceled) { + return cliuimodel.ErrorMsg{Error: err} + } + return cliuimodel.QuitMsg{} + } +} + +func chainDebug(cmd *cobra.Command, session *cliui.Session) error { + chainOptions := []chain.Option{ + chain.KeyringBackend(chaincmd.KeyringBackendTest), + } + + // check if custom config is defined + config, _ := cmd.Flags().GetString(flagConfig) + if config != "" { + chainOptions = append(chainOptions, chain.ConfigFile(config)) + } + + c, err := chain.NewWithHomeFlags(cmd, chainOptions...) + if err != nil { + return err + } + + cfg, err := c.Config() + if err != nil { + return err + } + + validator, err := chainconfig.FirstValidator(cfg) + if err != nil { + return err + } + + servers, err := validator.GetServers() + if err != nil { + return err + } + + home, err := c.Home() + if err != nil { + return err + } + + binPath, err := c.AbsBinaryPath() + if err != nil { + return err + } + + // Common debugger options + debugOptions := []debugger.Option{ + debugger.WorkingDir(flagGetPath(cmd)), + debugger.BinaryArgs( + "start", + "--pruning", "nothing", + "--grpc.address", servers.GRPC.Address, + "--home", home, + ), + } + + // Start debug server + ctx := cmd.Context() + bus := session.EventBus() + if server, _ := cmd.Flags().GetBool(flagServer); server { + addr, _ := cmd.Flags().GetString(flagServerAddress) + tcpAddr, err := xurl.TCP(addr) + if err != nil { + return err + } + + debugOptions = append(debugOptions, + debugger.Address(addr), + debugger.ServerStartHook(func() { + bus.Send( + fmt.Sprintf("Debug server: %s", tcpAddr), + events.Icon(icons.Earth), + events.ProgressFinish(), + ) + }), + ) + + bus.Send("Launching debug server", events.ProgressUpdate()) + return debugger.Start(ctx, binPath, debugOptions...) + } + + // Launch a debugger client + debugOptions = append(debugOptions, + debugger.ClientRunHook(func() { + // End session to allow debugger to gain control of stdout + session.End() + }), + ) + + bus.Send("Launching debugger", events.ProgressUpdate()) + return debugger.Run(ctx, binPath, debugOptions...) +} diff --git a/ignite/cmd/chain_faucet.go b/ignite/cmd/chain_faucet.go new file mode 100644 index 0000000..6308c95 --- /dev/null +++ b/ignite/cmd/chain_faucet.go @@ -0,0 +1,72 @@ +package ignitecmd + +import ( + "github.com/spf13/cobra" + + sdk "github.com/cosmos/cosmos-sdk/types" + + "github.com/ignite/cli/v29/ignite/pkg/chaincmd" + "github.com/ignite/cli/v29/ignite/pkg/cliui" + "github.com/ignite/cli/v29/ignite/services/chain" +) + +// NewChainFaucet creates a new faucet command to send coins to accounts. +func NewChainFaucet() *cobra.Command { + c := &cobra.Command{ + Use: "faucet [address] [coin<,...>]", + Short: "Send coins to an account", + Args: cobra.ExactArgs(2), + RunE: chainFaucetHandler, + } + + flagSetPath(c) + c.Flags().AddFlagSet(flagSetHome()) + c.Flags().BoolP("verbose", "v", false, "verbose output") + + return c +} + +func chainFaucetHandler(cmd *cobra.Command, args []string) error { + var ( + toAddress = args[0] + coins = args[1] + session = cliui.New(cliui.StartSpinner()) + ) + defer session.End() + + chainOption := []chain.Option{ + chain.KeyringBackend(chaincmd.KeyringBackendTest), + chain.WithOutputer(session), + chain.CollectEvents(session.EventBus()), + } + + config, _ := cmd.Flags().GetString(flagConfig) + if config != "" { + chainOption = append(chainOption, chain.ConfigFile(config)) + } + + c, err := chain.NewWithHomeFlags(cmd, chainOption...) + if err != nil { + return err + } + + faucet, err := c.Faucet(cmd.Context()) + if err != nil { + return err + } + + // parse provided coins + parsedCoins, err := sdk.ParseCoinsNormalized(coins) + if err != nil { + return err + } + + // perform transfer from faucet + hash, err := faucet.Transfer(cmd.Context(), toAddress, parsedCoins) + if err != nil { + return err + } + + _ = session.Println("📨 Coins sent.") + return session.Printf("Transaction Hash: %s\n", hash) +} diff --git a/ignite/cmd/chain_init.go b/ignite/cmd/chain_init.go new file mode 100644 index 0000000..a0f447a --- /dev/null +++ b/ignite/cmd/chain_init.go @@ -0,0 +1,150 @@ +package ignitecmd + +import ( + "github.com/spf13/cobra" + + "github.com/ignite/cli/v29/ignite/pkg/chaincmd" + "github.com/ignite/cli/v29/ignite/pkg/cliui" + "github.com/ignite/cli/v29/ignite/pkg/cliui/colors" + "github.com/ignite/cli/v29/ignite/services/chain" +) + +func NewChainInit() *cobra.Command { + c := &cobra.Command{ + Use: "init", + Short: "Initialize your chain", + Long: `The init command compiles and installs the binary (like "ignite chain build") +and uses that binary to initialize the blockchain's data directory for one +validator. To learn how the build process works, refer to "ignite chain build +--help". + +By default, the data directory will be initialized in $HOME/.mychain, where +"mychain" is the name of the project. To set a custom data directory use the +--home flag or set the value in config.yml: + + validators: + - name: alice + bonded: '100000000stake' + home: "~/.customdir" + +The data directory contains three files in the "config" directory: app.toml, +config.toml, client.toml. These files let you customize the behavior of your +blockchain node and the client executable. When a chain is re-initialized the +data directory can be reset. To make some values in these files persistent, set +them in config.yml: + + validators: + - name: alice + bonded: '100000000stake' + app: + minimum-gas-prices: "0.025stake" + config: + consensus: + timeout_commit: "5s" + timeout_propose: "5s" + client: + output: "json" + +The configuration above changes the minimum gas price of the validator (by +default the gas price is set to 0 to allow "free" transactions), sets the block +time to 5s, and changes the output format to JSON. To see what kind of values +this configuration accepts see the generated TOML files in the data directory. + +As part of the initialization process Ignite creates on-chain accounts with +token balances. By default, config.yml has two accounts in the top-level +"accounts" property. You can add more accounts and change their token balances. +Refer to config.yml guide to see which values you can set. + +One of these accounts is a validator account and the amount of self-delegated +tokens can be set in the top-level "validator" property. + +One of the most important components of an initialized chain is the genesis +file, the 0th block of the chain. The genesis file is stored in the data +directory "config" subdirectory and contains the initial state of the chain, +including consensus and module parameters. You can customize the values of the +genesis in config.yml: + + genesis: + app_state: + staking: + params: + bond_denom: "foo" + +The example above changes the staking token to "foo". If you change the staking +denom, make sure the validator account has the right tokens. + +The init command is meant to be used ONLY FOR DEVELOPMENT PURPOSES. Under the +hood it runs commands like "appd init", "appd add-genesis-account", "appd +gentx", and "appd collect-gentx". For production, you may want to run these +commands manually to ensure a production-level node initialization. +`, + Args: cobra.NoArgs, + RunE: chainInitHandler, + } + + flagSetPath(c) + flagSetClearCache(c) + c.Flags().AddFlagSet(flagSetHome()) + c.Flags().AddFlagSet(flagSetCheckDependencies()) + c.Flags().AddFlagSet(flagSetSkipProto()) + c.Flags().AddFlagSet(flagSetDebug()) + c.Flags().AddFlagSet(flagSetVerbose()) + c.Flags().StringSlice(flagBuildTags, []string{}, "parameters to build the chain binary") + + return c +} + +func chainInitHandler(cmd *cobra.Command, _ []string) error { + session := cliui.New( + cliui.WithVerbosity(getVerbosity(cmd)), + cliui.StartSpinner(), + cliui.WithoutUserInteraction(getYes(cmd)), + ) + defer session.End() + + chainOption := []chain.Option{ + chain.KeyringBackend(chaincmd.KeyringBackendTest), + chain.WithOutputer(session), + chain.CollectEvents(session.EventBus()), + chain.CheckCosmosSDKVersion(), + } + + if flagGetCheckDependencies(cmd) { + chainOption = append(chainOption, chain.CheckDependencies()) + } + + // check if custom config is defined + config, _ := cmd.Flags().GetString(flagConfig) + if config != "" { + chainOption = append(chainOption, chain.ConfigFile(config)) + } + + c, err := chain.NewWithHomeFlags(cmd, chainOption...) + if err != nil { + return err + } + + cacheStorage, err := newCache(cmd) + if err != nil { + return err + } + + var ( + ctx = cmd.Context() + buildTags, _ = cmd.Flags().GetStringSlice(flagBuildTags) + ) + if _, err = c.Build(ctx, cacheStorage, buildTags, "", flagGetSkipProto(cmd), flagGetDebug(cmd)); err != nil { + return err + } + + if err := c.Init(ctx, chain.InitArgsAll); err != nil { + return err + } + + home, err := c.Home() + if err != nil { + return err + } + + return session.Printf("🗃 Initialized. Checkout your chain's home (data) directory: %s\n", colors.Info(home)) +} diff --git a/ignite/cmd/chain_lint.go b/ignite/cmd/chain_lint.go new file mode 100644 index 0000000..93a4818 --- /dev/null +++ b/ignite/cmd/chain_lint.go @@ -0,0 +1,39 @@ +package ignitecmd + +import ( + "github.com/spf13/cobra" + + "github.com/ignite/cli/v29/ignite/pkg/cliui" + "github.com/ignite/cli/v29/ignite/services/chain" +) + +// NewChainLint returns a lint command to build a blockchain app. +func NewChainLint() *cobra.Command { + c := &cobra.Command{ + Use: "lint", + Short: "Lint codebase using golangci-lint", + Long: "The lint command runs the golangci-lint tool to lint the codebase.", + Args: cobra.NoArgs, + RunE: func(cmd *cobra.Command, _ []string) error { + session := cliui.New( + cliui.StartSpinnerWithText("Linting..."), + cliui.WithoutUserInteraction(getYes(cmd)), + ) + defer session.End() + + chainOption := []chain.Option{ + chain.WithOutputer(session), + chain.CollectEvents(session.EventBus()), + } + + c, err := chain.NewWithHomeFlags(cmd, chainOption...) + if err != nil { + return err + } + + return c.Lint(cmd.Context()) + }, + } + + return c +} diff --git a/ignite/cmd/chain_modules.go b/ignite/cmd/chain_modules.go new file mode 100644 index 0000000..fc31051 --- /dev/null +++ b/ignite/cmd/chain_modules.go @@ -0,0 +1,21 @@ +package ignitecmd + +import ( + "github.com/spf13/cobra" +) + +// NewChainModules returns the modules command. +func NewChainModules() *cobra.Command { + c := &cobra.Command{ + Use: "modules", + Short: "Manage modules", + Long: "The modules command allows you to manage modules in the codebase.", + Args: cobra.NoArgs, + } + + c.AddCommand( + NewChainModulesList(), + ) + + return c +} diff --git a/ignite/cmd/chain_modules_list.go b/ignite/cmd/chain_modules_list.go new file mode 100644 index 0000000..1a421d2 --- /dev/null +++ b/ignite/cmd/chain_modules_list.go @@ -0,0 +1,165 @@ +package ignitecmd + +import ( + "sort" + "strings" + + "github.com/spf13/cobra" + + "github.com/ignite/cli/v29/ignite/pkg/cliui" + "github.com/ignite/cli/v29/ignite/pkg/cosmosanalysis/app" + "github.com/ignite/cli/v29/ignite/pkg/gomodule" + "github.com/ignite/cli/v29/ignite/services/chain" +) + +func NewChainModulesList() *cobra.Command { + c := &cobra.Command{ + Use: "list", + Short: "List all Cosmos SDK modules in the app", + Long: "The list command lists all modules in the app.", + Args: cobra.NoArgs, + RunE: func(cmd *cobra.Command, _ []string) error { + session := cliui.New(cliui.StartSpinner()) + defer session.End() + + chainOption := []chain.Option{ + chain.WithOutputer(session), + chain.CollectEvents(session.EventBus()), + } + + c, err := chain.NewWithHomeFlags(cmd, chainOption...) + if err != nil { + return err + } + + modules, err := app.FindRegisteredModules(c.AppPath()) + if err != nil { + return err + } + + if len(modules) == 0 { + session.Println("no modules found") + return nil + } + + modFile, err := gomodule.ParseAt(c.AppPath()) + if err != nil { + return err + } + + deps, err := gomodule.ResolveDependencies(modFile, false) + if err != nil { + return err + } + + depMap := make(map[string]string) + for _, dep := range deps { + depMap[dep.Path] = dep.Version + } + + // create a map of replaced modules for easy lookup + // check the original required modules, not the resolved ones + replacedMap := make(map[string]bool) + for _, replace := range modFile.Replace { + replacedMap[replace.Old.Path] = true + } + + // get the app's module path to identify app modules + appModulePath := modFile.Module.Mod.Path + + var entries [][]string + for _, m := range modules { + ver := depMap[m] + modName := m + + switch { + case strings.HasPrefix(m, appModulePath+"/"): + ver = "main" + case strings.HasPrefix(m, cosmosSDKModulePrefix+"/"): + ver = depMap[cosmosSDKModulePrefix] + modName = strings.TrimPrefix(m, cosmosSDKModulePrefix+"/") + case strings.Contains(m, ibcModulePrefix+"/v"): + modName, ver = getIBCVersion(m, depMap) + case isModuleReplaced(m, replacedMap): + ver = "locally replaced" + } + + if ver == "" { + ver = findBestMatchingVersion(m, depMap) + if ver == "" { + ver = "-" + } + } + + entries = append(entries, []string{modName, ver}) + } + + session.StopSpinner() + + // Sort entries by module name + sort.SliceStable(entries, func(i, j int) bool { + return entries[i][0] < entries[j][0] + }) + + header := []string{"module", "version"} + return session.PrintTable(header, entries...) + }, + } + + return c +} + +const ( + cosmosSDKModulePrefix = "github.com/cosmos/cosmos-sdk" + ibcModulePrefix = "github.com/cosmos/ibc-go" +) + +// isModuleReplaced checks if a module path (or its parent paths) is in the replaced map. +func isModuleReplaced(modulePath string, replacedMap map[string]bool) bool { + checkPath := modulePath + for checkPath != "" && checkPath != "." { + if replacedMap[checkPath] { + return true + } + // check parent path + if idx := strings.LastIndex(checkPath, "/"); idx > 0 { + checkPath = checkPath[:idx] + } else { + break + } + } + return false +} + +// for a given module path by checking progressively shorter paths. +func findBestMatchingVersion(modulePath string, depMap map[string]string) string { + checkPath := modulePath + for checkPath != "" && checkPath != "." { + if version, exists := depMap[checkPath]; exists { + return version + } + // check parent path + if idx := strings.LastIndex(checkPath, "/"); idx > 0 { + checkPath = checkPath[:idx] + } else { + break + } + } + return "" +} + +// getIBCVersion tries to extract the ibc-go version from the module path or dependencies. +func getIBCVersion(modulePath string, depMap map[string]string) (string, string) { + // find the root ibc-go module path (with major version) + parts := strings.Split(modulePath, "/") + for i := range parts { + if parts[i] == "ibc-go" && i+1 < len(parts) && strings.HasPrefix(parts[i+1], "v") { + root := strings.Join(parts[:i+2], "/") + ver := depMap[root] + // clean module name after root + mod := strings.TrimPrefix(modulePath, root+"/") + return mod, ver + } + } + return modulePath, "" +} diff --git a/ignite/cmd/chain_serve.go b/ignite/cmd/chain_serve.go new file mode 100644 index 0000000..680610f --- /dev/null +++ b/ignite/cmd/chain_serve.go @@ -0,0 +1,276 @@ +package ignitecmd + +import ( + "context" + "io" + "os" + "os/signal" + "syscall" + + tea "github.com/charmbracelet/bubbletea" + "github.com/spf13/cobra" + "golang.org/x/term" + + cmdmodel "github.com/ignite/cli/v29/ignite/cmd/bubblemodel" + "github.com/ignite/cli/v29/ignite/pkg/cliui" + uilog "github.com/ignite/cli/v29/ignite/pkg/cliui/log" + cliuimodel "github.com/ignite/cli/v29/ignite/pkg/cliui/model" + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/events" + "github.com/ignite/cli/v29/ignite/services/chain" +) + +const ( + flagVerbose = "verbose" + flagConfig = "config" + flagForceReset = "force-reset" + flagGenerateClients = "generate-clients" + flagQuitOnFail = "quit-on-fail" + flagResetOnce = "reset-once" + flagOutputFile = "output-file" +) + +var isTerminal = term.IsTerminal + +// NewChainServe creates a new serve command to serve a blockchain. +func NewChainServe() *cobra.Command { + c := &cobra.Command{ + Use: "serve", + Short: "Start a blockchain node in development", + Long: `The serve command compiles and installs the binary (like "ignite chain build"), +uses that binary to initialize the blockchain's data directory for one validator +(like "ignite chain init"), and starts the node locally for development purposes +with automatic code reloading. + +Automatic code reloading means Ignite starts watching the project directory. +Whenever a file change is detected, Ignite automatically rebuilds, reinitializes +and restarts the node. + +Whenever possible Ignite will try to keep the current state of the chain by +exporting and importing the genesis file. + +To force Ignite to start from a clean slate even if a genesis file exists, use +the following flag: + + ignite chain serve --reset-once + +To force Ignite to reset the state every time the source code is modified, use +the following flag: + + ignite chain serve --force-reset + +With Ignite it's possible to start more than one blockchain from the same source +code using different config files. This is handy if you're building +inter-blockchain functionality and, for example, want to try sending packets +from one blockchain to another. To start a node using a specific config file: + + ignite chain serve --config mars.yml + +The serve command is meant to be used ONLY FOR DEVELOPMENT PURPOSES. Under the +hood, it runs "appd start", where "appd" is the name of your chain's binary. For +production, you may want to run "appd start" manually. +`, + Args: cobra.NoArgs, + RunE: chainServeHandler, + } + + flagSetPath(c) + flagSetClearCache(c) + c.Flags().AddFlagSet(flagSetHome()) + c.Flags().AddFlagSet(flagSetCheckDependencies()) + c.Flags().AddFlagSet(flagSetSkipProto()) + c.Flags().AddFlagSet(flagSetSkipBuild()) + c.Flags().AddFlagSet(flagSetVerbose()) + c.Flags().BoolP(flagForceReset, "f", false, "force reset of the app state on start and every source change") + c.Flags().BoolP(flagResetOnce, "r", false, "reset the app state once on init") + c.Flags().Bool(flagGenerateClients, false, "generate code for the configured clients on reset or source code change") + c.Flags().Bool(flagQuitOnFail, false, "quit program if the app fails to start") + c.Flags().StringSlice(flagBuildTags, []string{}, "parameters to build the chain binary") + c.Flags().StringP(flagOutputFile, "o", "", "output file logging the chain output (no UI, no stdin, listens for SIGTERM, implies --yes) (default: stdout)") + + return c +} + +func chainServeHandler(cmd *cobra.Command, _ []string) error { + if shouldRunServeInDaemonMode(cmd) { + return daemonMode(cmd) + } + + options := []cliui.Option{cliui.WithoutUserInteraction(getYes(cmd))} + + // Session must not handle events when the verbosity is the default + // to allow render of the UI and events using bubbletea. The custom + // UI is not used for other verbosity levels in which the session + // must handle the events to use custom output prefixes. + verbosity := getVerbosity(cmd) + if verbosity == uilog.VerbosityDefault { + options = append(options, cliui.IgnoreEvents()) + } else { + options = append(options, cliui.WithVerbosity(verbosity)) + } + + session := cliui.New(options...) + defer session.End() + + // Depending on the verbosity execute the serve command within + // a bubbletea context to display the custom UI. + if verbosity == uilog.VerbosityDefault { + bus := session.EventBus() + bus.Send("Initializing...", events.ProgressStart()) + + // Render UI + m := cmdmodel.NewChainServe(cmd, bus, chainServeCmd(cmd, session)) + _, err := tea.NewProgram(m, tea.WithInput(cmd.InOrStdin())).Run() + return err + } + + // Otherwise run the serve command directly + return chainServe(cmd, session) +} + +func shouldRunServeInDaemonMode(cmd *cobra.Command) bool { + if cmd.Flags().Changed(flagOutputFile) { + return true + } + + return !hasTerminalInputAndOutput(cmd.InOrStdin(), cmd.OutOrStdout()) +} + +func hasTerminalInputAndOutput(stdin io.Reader, stdout io.Writer) bool { + stdinFD, ok := fileDescriptor(stdin) + if !ok { + return false + } + + stdoutFD, ok := fileDescriptor(stdout) + if !ok { + return false + } + + return isTerminal(stdinFD) && isTerminal(stdoutFD) +} + +type fileDescriptorProvider interface { + Fd() uintptr +} + +func fileDescriptor(v any) (int, bool) { + provider, ok := v.(fileDescriptorProvider) + if !ok { + return 0, false + } + + return int(provider.Fd()), true +} + +// daemonMode runs the chain serve command without user interaction, UI in verbose mode. Useful to be used as daemon. +func daemonMode(cmd *cobra.Command) error { + // always yes, no user interaction + options := []cliui.Option{cliui.WithoutUserInteraction(true)} + options = append(options, + cliui.WithVerbosity(uilog.VerbosityVerbose), + ) + + // output file logic + outputFile, _ := cmd.Flags().GetString(flagOutputFile) + var output *os.File + var err error + if outputFile != "" { + output, err = os.OpenFile(outputFile, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0o644) + if err != nil { + return err + } + defer output.Close() + options = append(options, cliui.WithStdout(output), cliui.WithStderr(output)) + } else { + options = append(options, cliui.WithStdout(os.Stdout), cliui.WithStderr(os.Stderr)) + } + + session := cliui.New(options...) + defer session.End() + + ctx, stop := signal.NotifyContext(cmd.Context(), os.Interrupt, syscall.SIGTERM) + defer stop() + cmd.SetContext(ctx) + return chainServe(cmd, session) +} + +func chainServeCmd(cmd *cobra.Command, session *cliui.Session) tea.Cmd { + return func() tea.Msg { + if err := chainServe(cmd, session); err != nil && !errors.Is(err, context.Canceled) { + return cliuimodel.ErrorMsg{Error: err} + } + return cliuimodel.QuitMsg{} + } +} + +func chainServe(cmd *cobra.Command, session *cliui.Session) error { + chainOption := []chain.Option{ + chain.WithOutputer(session), + chain.CollectEvents(session.EventBus()), + chain.CheckCosmosSDKVersion(), + } + + if flagGetCheckDependencies(cmd) { + chainOption = append(chainOption, chain.CheckDependencies()) + } + + // check if custom config is defined + config, _ := cmd.Flags().GetString(flagConfig) + if config != "" { + chainOption = append(chainOption, chain.ConfigFile(config)) + } + + // create the chain + c, err := chain.NewWithHomeFlags(cmd, chainOption...) + if err != nil { + return err + } + + cacheStorage, err := newCache(cmd) + if err != nil { + return err + } + + // serve the chain + var serveOptions []chain.ServeOption + + forceUpdate, _ := cmd.Flags().GetBool(flagForceReset) + if forceUpdate { + serveOptions = append(serveOptions, chain.ServeForceReset()) + } + + resetOnce, _ := cmd.Flags().GetBool(flagResetOnce) + if resetOnce { + serveOptions = append(serveOptions, chain.ServeResetOnce()) + } + + quitOnFail, _ := cmd.Flags().GetBool(flagQuitOnFail) + if quitOnFail { + serveOptions = append(serveOptions, chain.QuitOnFail()) + } + + generateClients, _ := cmd.Flags().GetBool(flagGenerateClients) + if generateClients { + serveOptions = append(serveOptions, chain.GenerateClients()) + } + + buildTags, _ := cmd.Flags().GetStringSlice(flagBuildTags) + if len(buildTags) > 0 { + serveOptions = append(serveOptions, chain.BuildTags(buildTags...)) + } + + if flagGetSkipProto(cmd) { + serveOptions = append(serveOptions, chain.ServeSkipProto()) + } + + if flagGetSkipBuild(cmd) { + serveOptions = append(serveOptions, chain.ServeSkipBuild()) + } + + if quitOnFail { + serveOptions = append(serveOptions, chain.QuitOnFail()) + } + + return c.Serve(cmd.Context(), cacheStorage, serveOptions...) +} diff --git a/ignite/cmd/chain_serve_test.go b/ignite/cmd/chain_serve_test.go new file mode 100644 index 0000000..4b09913 --- /dev/null +++ b/ignite/cmd/chain_serve_test.go @@ -0,0 +1,56 @@ +package ignitecmd + +import ( + "bytes" + "os" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestShouldRunServeInDaemonMode(t *testing.T) { + cmd := NewChainServe() + + require.NoError(t, cmd.Flags().Set(flagOutputFile, "/tmp/serve.log")) + require.True(t, shouldRunServeInDaemonMode(cmd)) +} + +func TestShouldRunServeInDaemonModeNoTerminal(t *testing.T) { + cmd := NewChainServe() + cmd.SetIn(bytes.NewBufferString("")) + cmd.SetOut(bytes.NewBuffer(nil)) + + require.True(t, shouldRunServeInDaemonMode(cmd)) +} + +func TestShouldRunServeInDaemonModeInteractiveTerminal(t *testing.T) { + cmd := NewChainServe() + cmd.SetIn(os.Stdin) + cmd.SetOut(os.Stdout) + + originalIsTerminal := isTerminal + t.Cleanup(func() { + isTerminal = originalIsTerminal + }) + isTerminal = func(_ int) bool { return true } + + require.False(t, shouldRunServeInDaemonMode(cmd)) +} + +func TestShouldRunServeInDaemonModeMixedTerminal(t *testing.T) { + cmd := NewChainServe() + cmd.SetIn(os.Stdin) + cmd.SetOut(os.Stdout) + + originalIsTerminal := isTerminal + t.Cleanup(func() { + isTerminal = originalIsTerminal + }) + calls := 0 + isTerminal = func(_ int) bool { + calls++ + return calls == 1 + } + + require.True(t, shouldRunServeInDaemonMode(cmd)) +} diff --git a/ignite/cmd/chain_simulate.go b/ignite/cmd/chain_simulate.go new file mode 100644 index 0000000..33a176b --- /dev/null +++ b/ignite/cmd/chain_simulate.go @@ -0,0 +1,119 @@ +package ignitecmd + +import ( + "path/filepath" + + "github.com/spf13/cobra" + + "github.com/cosmos/cosmos-sdk/types/simulation" + + "github.com/ignite/cli/v29/ignite/services/chain" +) + +const ( + flagSimappGenesis = "genesis" + flagSimappParams = "params" + flagSimappExportParamsPath = "exportParamsPath" + flagSimappExportParamsHeight = "exportParamsHeight" + flagSimappExportStatePath = "exportStatePath" + flagSimappExportStatsPath = "exportStatsPath" + flagSimappSeed = "seed" + flagSimappInitialBlockHeight = "initialBlockHeight" + flagSimappNumBlocks = "numBlocks" + flagSimappBlockSize = "blockSize" + flagSimappLean = "lean" + flagSimappGenesisTime = "genesisTime" + flagSimName = "simName" +) + +// NewChainSimulate creates a new simulation command to run the blockchain simulation. +func NewChainSimulate() *cobra.Command { + c := &cobra.Command{ + Use: "simulate", + Short: "Run simulation testing for the blockchain", + Long: "Run simulation testing for the blockchain. It sends many randomized-input messages of each module to a simulated node.", + Args: cobra.NoArgs, + RunE: chainSimulationHandler, + } + simappFlags(c) + return c +} + +func chainSimulationHandler(cmd *cobra.Command, _ []string) error { + var ( + genesisTime, _ = cmd.Flags().GetInt64(flagSimappGenesisTime) + simName, _ = cmd.Flags().GetString(flagSimName) + config = newConfigFromFlags(cmd) + appPath = flagGetPath(cmd) + ) + // create the chain with path + absPath, err := filepath.Abs(appPath) + if err != nil { + return err + } + c, err := chain.New(absPath) + if err != nil { + return err + } + + config.ChainID, err = c.ID() + if err != nil { + return err + } + + return c.Simulate(cmd.Context(), + chain.SimappWithSimulationTestName(simName), + chain.SimappWithGenesisTime(genesisTime), + chain.SimappWithConfig(config), + ) +} + +// newConfigFromFlags creates a simulation from the retrieved values of the flags. +func newConfigFromFlags(cmd *cobra.Command) simulation.Config { + var ( + genesis, _ = cmd.Flags().GetString(flagSimappGenesis) + params, _ = cmd.Flags().GetString(flagSimappParams) + exportParamsPath, _ = cmd.Flags().GetString(flagSimappExportParamsPath) + exportParamsHeight, _ = cmd.Flags().GetInt(flagSimappExportParamsHeight) + exportStatePath, _ = cmd.Flags().GetString(flagSimappExportStatePath) + exportStatsPath, _ = cmd.Flags().GetString(flagSimappExportStatsPath) + seed, _ = cmd.Flags().GetInt64(flagSimappSeed) + initialBlockHeight, _ = cmd.Flags().GetInt(flagSimappInitialBlockHeight) + numBlocks, _ = cmd.Flags().GetInt(flagSimappNumBlocks) + blockSize, _ = cmd.Flags().GetInt(flagSimappBlockSize) + lean, _ = cmd.Flags().GetBool(flagSimappLean) + ) + return simulation.Config{ + Commit: true, + GenesisFile: genesis, + ParamsFile: params, + ExportParamsPath: exportParamsPath, + ExportParamsHeight: exportParamsHeight, + ExportStatePath: exportStatePath, + ExportStatsPath: exportStatsPath, + Seed: seed, + InitialBlockHeight: initialBlockHeight, + NumBlocks: numBlocks, + BlockSize: blockSize, + Lean: lean, + } +} + +func simappFlags(c *cobra.Command) { + // config fields + c.Flags().String(flagSimappGenesis, "", "custom simulation genesis file; cannot be used with params file") + c.Flags().String(flagSimappParams, "", "custom simulation params file which overrides any random params; cannot be used with genesis") + c.Flags().String(flagSimappExportParamsPath, "", "custom file path to save the exported params JSON") + c.Flags().Int(flagSimappExportParamsHeight, 0, "height to which export the randomly generated params") + c.Flags().String(flagSimappExportStatePath, "", "custom file path to save the exported app state JSON") + c.Flags().String(flagSimappExportStatsPath, "", "custom file path to save the exported simulation statistics JSON") + c.Flags().Int64(flagSimappSeed, 42, "simulation random seed") + c.Flags().Int(flagSimappInitialBlockHeight, 1, "initial block to start the simulation") + c.Flags().Int(flagSimappNumBlocks, 200, "number of new blocks to simulate from the initial block height") + c.Flags().Int(flagSimappBlockSize, 30, "operations per block") + c.Flags().Bool(flagSimappLean, false, "lean simulation log output") + + // simulation flags + c.Flags().String(flagSimName, "TestFullAppSimulation", "name of the simulation to run") + c.Flags().Int64(flagSimappGenesisTime, 0, "override genesis UNIX time instead of using a random UNIX time") +} diff --git a/ignite/cmd/cmd.go b/ignite/cmd/cmd.go new file mode 100644 index 0000000..d7c4fdd --- /dev/null +++ b/ignite/cmd/cmd.go @@ -0,0 +1,306 @@ +package ignitecmd + +import ( + "context" + "fmt" + "os" + "path/filepath" + "slices" + "strings" + "time" + + "github.com/spf13/cobra" + flag "github.com/spf13/pflag" + + "github.com/ignite/cli/v29/ignite/config" + chainconfig "github.com/ignite/cli/v29/ignite/config/chain" + "github.com/ignite/cli/v29/ignite/internal/announcements" + "github.com/ignite/cli/v29/ignite/pkg/cache" + "github.com/ignite/cli/v29/ignite/pkg/cliui" + uilog "github.com/ignite/cli/v29/ignite/pkg/cliui/log" + "github.com/ignite/cli/v29/ignite/pkg/dircache" + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/goenv" + "github.com/ignite/cli/v29/ignite/pkg/gomodulepath" + "github.com/ignite/cli/v29/ignite/version" +) + +type key int + +const ( + keyChainConfig key = iota + keyChainConfigPath key = iota +) + +const ( + flagPath = "path" + flagHome = "home" + flagYes = "yes" + flagClearCache = "clear-cache" + flagSkipProto = "skip-proto" + flagSkipBuild = "skip-build" + + checkVersionTimeout = time.Millisecond * 600 + cacheFileName = "ignite_cache.db" + + statusGenerating = "Generating..." + statusImporting = "Importing..." + statusExporting = "Exporting..." + statusCreating = "Creating..." + statusDeleting = "Deleting..." +) + +// List of CLI level one commands that should not load Ignite app instances. +var skipAppsLoadCommands = []string{"version", "help", "docs", "completion", cobra.ShellCompRequestCmd, cobra.ShellCompNoDescRequestCmd} + +// New creates a new root command for `Ignite CLI` with its sub commands. +// Returns the cobra.Command, a cleanup function and an error. The cleanup +// function must be invoked by the caller to clean eventual Ignite App instances. +func New(ctx context.Context) (*cobra.Command, func(), error) { + cobra.EnableCommandSorting = false + + c := &cobra.Command{ + Use: "ignite", + Short: "Ignite CLI offers everything you need to scaffold, test, build, and launch your blockchain", + Long: fmt.Sprintf(`Ignite CLI is a tool for creating sovereign blockchains built with Cosmos SDK, the world's +most popular modular blockchain framework. Ignite CLI offers everything you need to scaffold, +test, build, and launch your blockchain. + +To get started, create a blockchain: + +$ ignite scaffold chain example + +%s`, announcements.Fetch()), + SilenceUsage: true, + SilenceErrors: true, + Args: cobra.MinimumNArgs(0), // note(@julienrbrt): without this, ignite __complete(noDesc) hidden commands are not working. + PersistentPreRunE: func(cmd *cobra.Command, _ []string) error { + // Check for new versions only when shell completion scripts are not being + // generated to avoid invalid output to stdout when a new version is available + if cmd.Use != "completion" || !strings.HasPrefix(cmd.Use, cobra.ShellCompRequestCmd) { + checkNewVersion(cmd) + } + + return goenv.ConfigurePath() + }, + } + + c.AddCommand( + NewScaffold(), + NewChain(), + NewGenerate(), + NewAccount(), + NewDocs(), + NewVersion(), + NewApp(), + NewDoctor(), + NewCompletionCmd(), + NewTestnet(), + ) + c.AddCommand(deprecated()...) + c.SetContext(ctx) + + // Don't load Ignite apps for level one commands that doesn't allow them + if len(os.Args) >= 2 && slices.Contains(skipAppsLoadCommands, os.Args[1]) { + return c, func() {}, nil + } + + // Load plugins if any + session := cliui.New(cliui.WithStdout(os.Stdout)) + if err := LoadPlugins(ctx, c, session); err != nil { + return nil, nil, errors.Errorf("error while loading apps: %w", err) + } + return c, func() { + UnloadPlugins() + session.End() + }, nil +} + +func flagSetVerbose() *flag.FlagSet { + fs := flag.NewFlagSet("", flag.ContinueOnError) + fs.BoolP(flagVerbose, "v", false, "verbose output") + return fs +} + +func getVerbosity(cmd *cobra.Command) uilog.Verbosity { + if verbose, _ := cmd.Flags().GetBool(flagVerbose); verbose { + return uilog.VerbosityVerbose + } + + return uilog.VerbosityDefault +} + +func flagSetPath(cmd *cobra.Command) { + cmd.PersistentFlags().StringP(flagPath, "p", ".", "path of the app") +} + +func flagGetPath(cmd *cobra.Command) (path string) { + path, _ = cmd.Flags().GetString(flagPath) + return +} + +func goModulePath(cmd *cobra.Command) (string, error) { + path := flagGetPath(cmd) + path, err := filepath.Abs(path) + if err != nil { + return "", err + } + + _, appPath, err := gomodulepath.Find(path) + if err != nil { + return "", err + } + return appPath, err +} + +func flagSetHome() *flag.FlagSet { + fs := flag.NewFlagSet("", flag.ContinueOnError) + fs.String(flagHome, "", "directory where the blockchain node is initialized") + return fs +} + +func flagSetConfig() *flag.FlagSet { + fs := flag.NewFlagSet("", flag.ContinueOnError) + fs.StringP(flagConfig, "c", "", "path to Ignite config file (default: ./config.yml)") + return fs +} + +func getConfig(cmd *cobra.Command) (config string) { + config, _ = cmd.Flags().GetString(flagConfig) + return +} + +func getChainConfig(cmd *cobra.Command) (*chainconfig.Config, string, error) { + cfg, ok := cmd.Context().Value(keyChainConfig).(*chainconfig.Config) + if ok { + configPath := cmd.Context().Value(keyChainConfigPath).(string) + return cfg, configPath, nil + } + configPath := getConfig(cmd) + + path, err := goModulePath(cmd) + if err != nil { + return nil, "", err + } + + if configPath == "" { + if configPath, err = chainconfig.LocateDefault(path); err != nil { + return nil, "", err + } + } + + cfg, err = chainconfig.ParseFile(configPath) + if err != nil { + return nil, "", err + } + ctx := context.WithValue(cmd.Context(), keyChainConfig, cfg) + ctx = context.WithValue(ctx, keyChainConfigPath, configPath) + cmd.SetContext(ctx) + + return cfg, configPath, err +} + +func flagSetYes() *flag.FlagSet { + fs := flag.NewFlagSet("", flag.ContinueOnError) + fs.BoolP(flagYes, "y", false, "answers interactive yes/no questions with yes") + return fs +} + +func getYes(cmd *cobra.Command) (ok bool) { + ok, _ = cmd.Flags().GetBool(flagYes) + return +} + +func flagSetSkipProto() *flag.FlagSet { + fs := flag.NewFlagSet("", flag.ContinueOnError) + fs.Bool(flagSkipProto, false, "skip file generation from proto") + return fs +} + +func flagGetSkipProto(cmd *cobra.Command) bool { + skip, _ := cmd.Flags().GetBool(flagSkipProto) + return skip +} + +func flagSetSkipBuild() *flag.FlagSet { + fs := flag.NewFlagSet("", flag.ContinueOnError) + fs.Bool(flagSkipBuild, false, "skip initial build of the app (uses local binary)") + return fs +} + +func flagGetSkipBuild(cmd *cobra.Command) bool { + skip, _ := cmd.Flags().GetBool(flagSkipBuild) + return skip +} + +func flagSetClearCache(cmd *cobra.Command) { + cmd.PersistentFlags().Bool(flagClearCache, false, "clear the build cache (advanced)") +} + +func flagGetClearCache(cmd *cobra.Command) bool { + clearCache, _ := cmd.Flags().GetBool(flagClearCache) + return clearCache +} + +func deprecated() []*cobra.Command { + return []*cobra.Command{ + { + Use: "build", + Hidden: true, + Deprecated: "use `ignite chain build` instead.", + }, + { + Use: "serve", + Hidden: true, + Deprecated: "use `ignite chain serve` instead.", + }, + { + Use: "faucet", + Hidden: true, + Deprecated: "use `ignite chain faucet` instead.", + }, + { + Use: "node", + Hidden: true, + Deprecated: "use ignite connect app instead (ignite app install -g github.com/ignite/apps/connect).", + }, + } +} + +func checkNewVersion(cmd *cobra.Command) { + ctx, cancel := context.WithTimeout(cmd.Context(), checkVersionTimeout) + defer cancel() + + isAvailable, next, err := version.CheckNext(ctx) + if err != nil || !isAvailable { + return + } + + cmd.Printf("⬆️ Ignite CLI %s is available! To upgrade: https://docs.ignite.com/welcome/install#upgrade (or use snap or homebrew)\n\n", next) +} + +func newCache(cmd *cobra.Command) (cache.Storage, error) { + cacheRootDir, err := config.DirPath() + if err != nil { + return cache.Storage{}, err + } + + storage, err := cache.NewStorage( + filepath.Join(cacheRootDir, cacheFileName), + cache.WithVersion(version.Version), + ) + if err != nil { + return cache.Storage{}, err + } + + if flagGetClearCache(cmd) { + if err := storage.Clear(); err != nil { + return cache.Storage{}, err + } + if err := dircache.ClearCache(); err != nil { + return cache.Storage{}, err + } + } + + return storage, nil +} diff --git a/ignite/cmd/completion.go b/ignite/cmd/completion.go new file mode 100644 index 0000000..9cdbcc0 --- /dev/null +++ b/ignite/cmd/completion.go @@ -0,0 +1,42 @@ +package ignitecmd + +import ( + "fmt" + "os" + + "github.com/spf13/cobra" +) + +// NewCompletionCmd represents the completion command. +func NewCompletionCmd() *cobra.Command { + return &cobra.Command{ + Use: "completion [bash|zsh|fish|powershell]", + Short: "Generates shell completion script.", + Run: func(cmd *cobra.Command, args []string) { + if len(args) == 0 { + if err := cmd.Help(); err != nil { + fmt.Fprintln(os.Stderr, "Error displaying help:", err) + os.Exit(1) + } + os.Exit(0) + } + var err error + switch args[0] { + case "bash": + err = cmd.Root().GenBashCompletion(os.Stdout) + case "zsh": + err = cmd.Root().GenZshCompletion(os.Stdout) + case "fish": + err = cmd.Root().GenFishCompletion(os.Stdout, true) + case "powershell": + err = cmd.Root().GenPowerShellCompletion(os.Stdout) + default: + err = cmd.Help() + } + if err != nil { + fmt.Fprintln(os.Stderr, "Error generating completion script:", err) + os.Exit(1) + } + }, + } +} diff --git a/ignite/cmd/docs.go b/ignite/cmd/docs.go new file mode 100644 index 0000000..6814730 --- /dev/null +++ b/ignite/cmd/docs.go @@ -0,0 +1,29 @@ +package ignitecmd + +import ( + "github.com/spf13/cobra" + + "github.com/ignite/cli/v29/docs" + "github.com/ignite/cli/v29/ignite/pkg/localfs" + "github.com/ignite/cli/v29/ignite/pkg/markdownviewer" +) + +func NewDocs() *cobra.Command { + c := &cobra.Command{ + Use: "docs", + Short: "Show Ignite CLI docs", + Args: cobra.NoArgs, + RunE: docsHandler, + } + return c +} + +func docsHandler(*cobra.Command, []string) error { + path, cleanup, err := localfs.SaveTemp(docs.Docs) + if err != nil { + return err + } + defer cleanup() + + return markdownviewer.View(path) +} diff --git a/ignite/cmd/doctor.go b/ignite/cmd/doctor.go new file mode 100644 index 0000000..608f2b3 --- /dev/null +++ b/ignite/cmd/doctor.go @@ -0,0 +1,57 @@ +package ignitecmd + +import ( + "github.com/spf13/cobra" + + chainconfig "github.com/ignite/cli/v29/ignite/config/chain" + "github.com/ignite/cli/v29/ignite/pkg/cliui" + "github.com/ignite/cli/v29/ignite/services/doctor" +) + +func NewDoctor() *cobra.Command { + c := &cobra.Command{ + Use: "doctor", + Short: "Fix chain configuration", + Hidden: true, + RunE: func(cmd *cobra.Command, _ []string) error { + session := cliui.New( + cliui.WithoutUserInteraction(getYes(cmd)), + ) + defer session.End() + appPath := flagGetPath(cmd) + + doc := doctor.New(doctor.CollectEvents(session.EventBus())) + + cacheStorage, err := newCache(cmd) + if err != nil { + return err + } + + if err := doc.MigrateToolsGo(appPath); err != nil { + return err + } + + configPath, err := chainconfig.LocateDefault(appPath) + if err != nil { + return err + } + + if err := doc.MigrateChainConfig(configPath); err != nil { + return err + } + + if err := doc.MigrateBufConfig(cmd.Context(), cacheStorage, appPath, configPath); err != nil { + return err + } + + if err := doc.MigratePluginsConfig(); err != nil { + return err + } + + return nil + }, + } + + flagSetPath(c) + return c +} diff --git a/ignite/cmd/generate.go b/ignite/cmd/generate.go new file mode 100644 index 0000000..0466a37 --- /dev/null +++ b/ignite/cmd/generate.go @@ -0,0 +1,55 @@ +package ignitecmd + +import ( + "github.com/spf13/cobra" + flag "github.com/spf13/pflag" +) + +const ( + flagEnableProtoVendor = "enable-proto-vendor" +) + +// NewGenerate returns a command that groups code generation related sub commands. +func NewGenerate() *cobra.Command { + c := &cobra.Command{ + Use: "generate [command]", + Short: "Generate clients, API docs from source code", + Long: `Generate clients, API docs from source code. + +Such as compiling protocol buffer files into Go or implement particular +functionality, for example, generating an OpenAPI spec. + +Produced source code can be regenerated by running a command again and is not +meant to be edited by hand. +`, + Aliases: []string{"g"}, + Args: cobra.ExactArgs(1), + PersistentPreRunE: migrationPreRunHandler, + } + + c.PersistentFlags().AddFlagSet(flagSetEnableProtoVendor()) + c.PersistentFlags().AddFlagSet(flagSetVerbose()) + + flagSetPath(c) + flagSetClearCache(c) + + c.AddCommand( + NewGenerateGo(), + NewGenerateTSClient(), + NewGenerateComposables(), + NewGenerateOpenAPI(), + ) + + return c +} + +func flagSetEnableProtoVendor() *flag.FlagSet { + fs := flag.NewFlagSet("", flag.ContinueOnError) + fs.Bool(flagEnableProtoVendor, false, "enable proto package vendor for missing Buf dependencies") + return fs +} + +func flagGetEnableProtoVendor(cmd *cobra.Command) bool { + skip, _ := cmd.Flags().GetBool(flagEnableProtoVendor) + return skip +} diff --git a/ignite/cmd/generate_composables.go b/ignite/cmd/generate_composables.go new file mode 100644 index 0000000..31a85c5 --- /dev/null +++ b/ignite/cmd/generate_composables.go @@ -0,0 +1,58 @@ +package ignitecmd + +import ( + "github.com/spf13/cobra" + + "github.com/ignite/cli/v29/ignite/pkg/cliui" + "github.com/ignite/cli/v29/ignite/pkg/cliui/icons" + "github.com/ignite/cli/v29/ignite/services/chain" +) + +func NewGenerateComposables() *cobra.Command { + c := &cobra.Command{ + Use: "composables", + Short: "TypeScript frontend client and Vue 3 composables", + RunE: generateComposablesHandler, + } + + c.Flags().AddFlagSet(flagSetYes()) + c.Flags().StringP(flagOutput, "o", "", "Vue 3 composables output path") + + return c +} + +func generateComposablesHandler(cmd *cobra.Command, _ []string) error { + session := cliui.New( + cliui.StartSpinnerWithText(statusGenerating), + cliui.WithoutUserInteraction(getYes(cmd)), + ) + defer session.End() + + c, err := chain.NewWithHomeFlags( + cmd, + chain.WithOutputer(session), + chain.CollectEvents(session.EventBus()), + chain.PrintGeneratedPaths()) + if err != nil { + return err + } + + cacheStorage, err := newCache(cmd) + if err != nil { + return err + } + + output, _ := cmd.Flags().GetString(flagOutput) + + var opts []chain.GenerateTarget + if flagGetEnableProtoVendor(cmd) { + opts = append(opts, chain.GenerateProtoVendor()) + } + + err = c.Generate(cmd.Context(), cacheStorage, chain.GenerateComposables(output), opts...) + if err != nil { + return err + } + + return session.Println(icons.OK, "Generated Typescript Client and Vue 3 composables") +} diff --git a/ignite/cmd/generate_go.go b/ignite/cmd/generate_go.go new file mode 100644 index 0000000..5194da5 --- /dev/null +++ b/ignite/cmd/generate_go.go @@ -0,0 +1,56 @@ +package ignitecmd + +import ( + "github.com/spf13/cobra" + + "github.com/ignite/cli/v29/ignite/pkg/cliui" + "github.com/ignite/cli/v29/ignite/pkg/cliui/icons" + "github.com/ignite/cli/v29/ignite/services/chain" +) + +func NewGenerateGo() *cobra.Command { + c := &cobra.Command{ + Use: "proto-go", + Short: "Compile protocol buffer files to Go source code required by Cosmos SDK", + RunE: generateGoHandler, + } + + c.Flags().AddFlagSet(flagSetYes()) + + return c +} + +func generateGoHandler(cmd *cobra.Command, _ []string) error { + session := cliui.New( + cliui.StartSpinnerWithText(statusGenerating), + cliui.WithoutUserInteraction(getYes(cmd)), + ) + defer session.End() + + c, err := chain.NewWithHomeFlags( + cmd, + chain.WithOutputer(session), + chain.CollectEvents(session.EventBus()), + chain.CheckCosmosSDKVersion(), + ) + if err != nil { + return err + } + + cacheStorage, err := newCache(cmd) + if err != nil { + return err + } + + var opts []chain.GenerateTarget + if flagGetEnableProtoVendor(cmd) { + opts = append(opts, chain.GenerateProtoVendor()) + } + + err = c.Generate(cmd.Context(), cacheStorage, chain.GenerateGo(), opts...) + if err != nil { + return err + } + + return session.Println(icons.OK, "Generated Go code") +} diff --git a/ignite/cmd/generate_openapi.go b/ignite/cmd/generate_openapi.go new file mode 100644 index 0000000..c0a1142 --- /dev/null +++ b/ignite/cmd/generate_openapi.go @@ -0,0 +1,61 @@ +package ignitecmd + +import ( + "github.com/spf13/cobra" + + "github.com/ignite/cli/v29/ignite/pkg/cliui" + "github.com/ignite/cli/v29/ignite/pkg/cliui/icons" + "github.com/ignite/cli/v29/ignite/services/chain" +) + +var excludeFlag = "exclude" + +func NewGenerateOpenAPI() *cobra.Command { + c := &cobra.Command{ + Use: "openapi", + Short: "OpenAPI spec for your chain", + RunE: generateOpenAPIHandler, + } + + c.Flags().AddFlagSet(flagSetYes()) + c.Flags().StringSlice(excludeFlag, []string{}, "List of proto files or directories to exclude from the OpenAPI spec generation") + + return c +} + +func generateOpenAPIHandler(cmd *cobra.Command, _ []string) error { + session := cliui.New( + cliui.StartSpinnerWithText(statusGenerating), + cliui.WithoutUserInteraction(getYes(cmd)), + ) + defer session.End() + + c, err := chain.NewWithHomeFlags( + cmd, + chain.WithOutputer(session), + chain.CollectEvents(session.EventBus()), + chain.PrintGeneratedPaths(), + ) + if err != nil { + return err + } + + cacheStorage, err := newCache(cmd) + if err != nil { + return err + } + + var opts []chain.GenerateTarget + if flagGetEnableProtoVendor(cmd) { + opts = append(opts, chain.GenerateProtoVendor()) + } + + excludeList, _ := cmd.Flags().GetStringArray(excludeFlag) + + err = c.Generate(cmd.Context(), cacheStorage, chain.GenerateOpenAPI(excludeList), opts...) + if err != nil { + return err + } + + return session.Println(icons.OK, "Generated OpenAPI spec") +} diff --git a/ignite/cmd/generate_typescript_client.go b/ignite/cmd/generate_typescript_client.go new file mode 100644 index 0000000..0fcf2ea --- /dev/null +++ b/ignite/cmd/generate_typescript_client.go @@ -0,0 +1,81 @@ +package ignitecmd + +import ( + "github.com/spf13/cobra" + + "github.com/ignite/cli/v29/ignite/pkg/cliui" + "github.com/ignite/cli/v29/ignite/pkg/cliui/icons" + "github.com/ignite/cli/v29/ignite/services/chain" +) + +const flagDisableCache = "disable-cache" + +func NewGenerateTSClient() *cobra.Command { + c := &cobra.Command{ + Use: "ts-client", + Short: "TypeScript frontend client", + Long: `Generate a framework agnostic TypeScript client for your blockchain project. + +By default the TypeScript client is generated in the "ts-client/" directory. You +can customize the output directory in config.yml: + + client: + typescript: + path: new-path + +Output can also be customized by using a flag: + + ignite generate ts-client --output new-path + +TypeScript client code can be automatically regenerated on reset or source code +changes when the blockchain is started with a flag: + + ignite chain serve --generate-clients +`, + RunE: generateTSClientHandler, + } + + c.Flags().AddFlagSet(flagSetYes()) + c.Flags().StringP(flagOutput, "o", "", "TypeScript client output path") + c.Flags().Bool(flagDisableCache, false, "disable build cache") + + return c +} + +func generateTSClientHandler(cmd *cobra.Command, _ []string) error { + session := cliui.New( + cliui.StartSpinnerWithText(statusGenerating), + cliui.WithoutUserInteraction(getYes(cmd)), + ) + defer session.End() + + c, err := chain.NewWithHomeFlags( + cmd, + chain.WithOutputer(session), + chain.CollectEvents(session.EventBus()), + chain.PrintGeneratedPaths(), + ) + if err != nil { + return err + } + + cacheStorage, err := newCache(cmd) + if err != nil { + return err + } + + output, _ := cmd.Flags().GetString(flagOutput) + disableCache, _ := cmd.Flags().GetBool(flagDisableCache) + + var opts []chain.GenerateTarget + if flagGetEnableProtoVendor(cmd) { + opts = append(opts, chain.GenerateProtoVendor()) + } + + err = c.Generate(cmd.Context(), cacheStorage, chain.GenerateTSClient(output, !disableCache), opts...) + if err != nil { + return err + } + + return session.Println(icons.OK, "Generated Typescript Client") +} diff --git a/ignite/cmd/ignite/main.go b/ignite/cmd/ignite/main.go new file mode 100644 index 0000000..eb9d6f3 --- /dev/null +++ b/ignite/cmd/ignite/main.go @@ -0,0 +1,140 @@ +package main + +import ( + "context" + "fmt" + "image/color" + "os" + "sync" + + "github.com/charmbracelet/fang" + "github.com/charmbracelet/lipgloss/v2" + "google.golang.org/grpc/status" + + ignitecmd "github.com/ignite/cli/v29/ignite/cmd" + chainconfig "github.com/ignite/cli/v29/ignite/config/chain" + "github.com/ignite/cli/v29/ignite/internal/analytics" + "github.com/ignite/cli/v29/ignite/pkg/clictx" + "github.com/ignite/cli/v29/ignite/pkg/cliui/colors" + "github.com/ignite/cli/v29/ignite/pkg/cliui/icons" + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/xstrings" + "github.com/ignite/cli/v29/ignite/version" +) + +const exitCodeOK, exitCodeError = 0, 1 + +func main() { + os.Exit(run()) +} + +func run() int { + ctx := clictx.From(context.Background()) + cmd, cleanUp, err := ignitecmd.New(ctx) + if err != nil { + fmt.Printf("%v\n", err) + return exitCodeError + } + defer cleanUp() + + // find command and send to analytics + subCmd, _, err := cmd.Find(os.Args[1:]) + if err != nil { + fmt.Printf("%v\n", err) + return exitCodeError + } + var wg sync.WaitGroup + analytics.SendMetric(&wg, subCmd) + analytics.EnableSentry(ctx, &wg) + + // use charm's fang to improve CLI output + err = fang.Execute(ctx, cmd, + fang.WithColorSchemeFunc(cliColorScheme), + fang.WithVersion(version.Version), + ) + if err != nil { + err = ensureError(err) + } + + if errors.Is(ctx.Err(), context.Canceled) || errors.Is(err, context.Canceled) { + fmt.Println("aborted") + return exitCodeOK + } + + if err != nil { + var ( + validationErr errors.ValidationError + versionErr chainconfig.VersionError + msg string + ) + + if errors.As(err, &validationErr) { + msg = validationErr.ValidationInfo() + } else { + msg = err.Error() + } + + // Make sure the error message starts with an upper case character + msg = xstrings.ToUpperFirst(msg) + + fmt.Printf("%s %s\n", icons.NotOK, colors.Error(msg)) + + if errors.As(err, &versionErr) { + fmt.Println("Use a more recent CLI version or upgrade blockchain app's config") + } + + return exitCodeError + } + + // waits for analytics to finish + wg.Wait() + + return exitCodeOK +} + +func ensureError(err error) error { + // Extract gRPC error status. + // These errors are returned by the plugins. + s, ok := status.FromError(err) + if !ok { + // The error is not a gRPC error + return err + } + + // Get the error message + cause := s.Proto().GetMessage() + if cause == "" { + return err + } + + // Restore context canceled errors + if cause == context.Canceled.Error() { + return context.Canceled + } + + // Use the gRPC description as error to avoid printing + // extra gRPC error information like code or prefix. + return errors.New(cause) +} + +// cliColorScheme returns a ColorScheme for the CLI. +var cliColorScheme = func(c lipgloss.LightDarkFunc) fang.ColorScheme { + return fang.ColorScheme{ + Base: c(lipgloss.Color("#2F2E36"), lipgloss.Color(colors.White)), + Title: lipgloss.Color(colors.HiBlue), + Codeblock: c(lipgloss.Color("#F5F5F5"), lipgloss.Color("#2F2E36")), + Program: c(lipgloss.Color(colors.Blue), lipgloss.Color(colors.Cyan)), + Command: c(lipgloss.Color(colors.Magenta), lipgloss.Color(colors.HiBlue)), + DimmedArgument: c(lipgloss.Color(colors.Magenta), lipgloss.Color("#AAAAAA")), + Comment: c(lipgloss.Color("#666666"), lipgloss.Color("#CCCCCC")), + Flag: c(lipgloss.Color(colors.Green), lipgloss.Color(colors.Green)), + Argument: c(lipgloss.Color("#2F2E36"), lipgloss.Color(colors.White)), + Description: c(lipgloss.Color("#2F2E36"), lipgloss.Color(colors.White)), // flag and command descriptions + FlagDefault: c(lipgloss.Color(colors.Blue), lipgloss.Color(colors.HiBlue)), // flag default values in descriptions + QuotedString: c(lipgloss.Color(colors.Yellow), lipgloss.Color(colors.Yellow)), + ErrorHeader: [2]color.Color{ + lipgloss.Color(colors.Yellow), + lipgloss.Color(colors.Red), + }, + } +} diff --git a/ignite/cmd/plugin.go b/ignite/cmd/plugin.go new file mode 100644 index 0000000..e7554c7 --- /dev/null +++ b/ignite/cmd/plugin.go @@ -0,0 +1,763 @@ +package ignitecmd + +import ( + "context" + "fmt" + "os" + "strings" + "time" + + "github.com/spf13/cobra" + flag "github.com/spf13/pflag" + + pluginsconfig "github.com/ignite/cli/v29/ignite/config/plugins" + "github.com/ignite/cli/v29/ignite/pkg/clictx" + "github.com/ignite/cli/v29/ignite/pkg/cliui" + "github.com/ignite/cli/v29/ignite/pkg/cliui/icons" + "github.com/ignite/cli/v29/ignite/pkg/cosmosanalysis" + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/gomodule" + "github.com/ignite/cli/v29/ignite/pkg/xfilepath" + "github.com/ignite/cli/v29/ignite/pkg/xgit" + "github.com/ignite/cli/v29/ignite/services/chain" + "github.com/ignite/cli/v29/ignite/services/plugin" +) + +const ( + flagPluginsGlobal = "global" +) + +// plugins hold the list of plugin declared in the config. +// A global variable is used so the list is accessible to the plugin commands. +var plugins []*plugin.Plugin + +// LoadPlugins tries to load all the plugins found in configurations. +// If no configurations found, it returns w/o error. +func LoadPlugins(ctx context.Context, cmd *cobra.Command, session *cliui.Session) error { + var pluginsConfigs []pluginsconfig.Plugin + localCfg, err := parseLocalPlugins() + if err != nil && !errors.As(err, &cosmosanalysis.ErrPathNotChain{}) { + return err + } else if err == nil { + pluginsConfigs = append(pluginsConfigs, localCfg.Apps...) + } + + globalCfg, err := parseGlobalPlugins() + if err == nil { + pluginsConfigs = append(pluginsConfigs, globalCfg.Apps...) + } + ensureDefaultPlugins(cmd, globalCfg) + + if len(pluginsConfigs) == 0 { + return nil + } + + uniquePlugins := pluginsconfig.RemoveDuplicates(pluginsConfigs) + plugins, err = plugin.Load(ctx, uniquePlugins, plugin.CollectEvents(session.EventBus())) + if err != nil { + return err + } + if len(plugins) == 0 { + return nil + } + + return linkPlugins(ctx, cmd.Root(), plugins) +} + +func parseLocalPlugins() (*pluginsconfig.Config, error) { + wd, err := os.Getwd() + if err != nil { + return nil, errors.Errorf("parse local apps: %w", err) + } + + if err := cosmosanalysis.IsChainPath(wd); err != nil { + return nil, err + } + + return pluginsconfig.ParseDir(wd) +} + +func parseGlobalPlugins() (cfg *pluginsconfig.Config, err error) { + globalDir, err := plugin.PluginsPath() + if err != nil { + return cfg, err + } + + cfg, err = pluginsconfig.ParseDir(globalDir) + // if there is error parsing, return empty config and continue execution to load + // local plugins if they exist. + if err != nil { + return &pluginsconfig.Config{}, nil + } + + for i := range cfg.Apps { + cfg.Apps[i].Global = true + } + return +} + +func linkPlugins(ctx context.Context, rootCmd *cobra.Command, plugins []*plugin.Plugin) error { + // Link plugins to related commands + var linkErrors []*plugin.Plugin + for _, p := range plugins { + if p.Error != nil { + linkErrors = append(linkErrors, p) + continue + } + + manifest, err := p.Interface.Manifest(ctx) + if err != nil { + p.Error = err + linkErrors = append(linkErrors, p) + continue + } + + linkPluginHooks(rootCmd, p, manifest.Hooks) + if p.Error != nil { + linkErrors = append(linkErrors, p) + continue + } + + linkPluginCmds(rootCmd, p, manifest.Commands) + if p.Error != nil { + linkErrors = append(linkErrors, p) + continue + } + } + + if len(linkErrors) > 0 { + // unload any plugin that could have been loaded + defer UnloadPlugins() + + if err := printPlugins(ctx, cliui.New(cliui.WithStdout(os.Stdout))); err != nil { + // content of loadErrors is more important than a print error, so we don't + // return here, just print the error. + fmt.Printf("fail to print: %v\n", err) + } + + var s strings.Builder + for _, p := range linkErrors { + fmt.Fprintf(&s, "%s: %v", p.Path, p.Error) + } + return errors.Errorf("fail to link: %v", s.String()) + } + return nil +} + +// UnloadPlugins releases any loaded plugins, which is basically killing the +// plugin server instance. +func UnloadPlugins() { + for _, p := range plugins { + p.KillClient() + } +} + +func linkPluginHooks(rootCmd *cobra.Command, p *plugin.Plugin, hooks []*plugin.Hook) { + if p.Error != nil { + return + } + for _, hook := range hooks { + linkPluginHook(rootCmd, p, hook) + } +} + +func linkPluginHook(rootCmd *cobra.Command, p *plugin.Plugin, hook *plugin.Hook) { + cmdPath := hook.CommandPath() + cmd := findCommandByPath(rootCmd, cmdPath) + if cmd == nil { + p.Error = errors.Errorf("unable to find command path %q for app hook %q", cmdPath, hook.Name) + return + } + if !cmd.Runnable() { + p.Error = errors.Errorf("can't attach app hook %q to non executable command %q", hook.Name, hook.PlaceHookOn) + return + } + + newExecutedHook := func(hook *plugin.Hook, cmd *cobra.Command, args []string) *plugin.ExecutedHook { + hook.ImportFlags(cmd) + execHook := &plugin.ExecutedHook{ + Hook: hook, + ExecutedCommand: &plugin.ExecutedCommand{ + Use: cmd.Use, + Path: cmd.CommandPath(), + Args: args, + OsArgs: os.Args, + With: p.With, + Flags: hook.Flags, + }, + } + execHook.ExecutedCommand.ImportFlags(cmd) + return execHook + } + + for _, f := range hook.Flags { + var fs *flag.FlagSet + if f.Persistent { + fs = cmd.PersistentFlags() + } else { + fs = cmd.Flags() + } + + if err := f.ExportToFlagSet(fs); err != nil { + p.Error = errors.Errorf("can't attach hook flags %q to command %q", hook.Flags, hook.PlaceHookOn) + return + } + } + + preRun := cmd.PreRunE + cmd.PreRunE = func(cmd *cobra.Command, args []string) error { + if preRun != nil { + err := preRun(cmd, args) + if err != nil { + return err + } + } + + api, err := newAppClientAPI(cmd) + if err != nil { + return err + } + + ctx := cmd.Context() + execHook := newExecutedHook(hook, cmd, args) + err = p.Interface.ExecuteHookPre(ctx, execHook, api) + if err != nil { + return errors.Errorf("app %q ExecuteHookPre() error: %w", p.Path, err) + } + return nil + } + + runCmd := cmd.RunE + cmd.RunE = func(cmd *cobra.Command, args []string) error { + if runCmd != nil { + err := runCmd(cmd, args) + // if the command has failed the `PostRun` will not execute. here we execute the cleanup step before returning. + if err != nil { + api, err := newAppClientAPI(cmd) + if err != nil { + return err + } + + ctx := cmd.Context() + execHook := newExecutedHook(hook, cmd, args) + err = p.Interface.ExecuteHookCleanUp(ctx, execHook, api) + if err != nil { + cmd.Printf("app %q ExecuteHookCleanUp() error: %v", p.Path, err) + } + } + return err + } + + time.Sleep(100 * time.Millisecond) + return nil + } + + postCmd := cmd.PostRunE + cmd.PostRunE = func(cmd *cobra.Command, args []string) error { + api, err := newAppClientAPI(cmd) + if err != nil { + return err + } + + ctx := cmd.Context() + execHook := newExecutedHook(hook, cmd, args) + + defer func() { + err := p.Interface.ExecuteHookCleanUp(ctx, execHook, api) + if err != nil { + cmd.Printf("app %q ExecuteHookCleanUp() error: %v", p.Path, err) + } + }() + + if postCmd != nil { + err := postCmd(cmd, args) + if err != nil { + // dont return the error, log it and let execution continue to `Run` + return err + } + } + + err = p.Interface.ExecuteHookPost(ctx, execHook, api) + if err != nil { + return errors.Errorf("app %q ExecuteHookPost() error : %w", p.Path, err) + } + return nil + } +} + +// linkPluginCmds tries to add the plugin commands to the legacy ignite +// commands. +func linkPluginCmds(rootCmd *cobra.Command, p *plugin.Plugin, pluginCmds []*plugin.Command) { + if p.Error != nil { + return + } + for _, pluginCmd := range pluginCmds { + linkPluginCmd(rootCmd, p, pluginCmd) + if p.Error != nil { + return + } + } +} + +func linkPluginCmd(rootCmd *cobra.Command, p *plugin.Plugin, pluginCmd *plugin.Command) { + cmdPath := pluginCmd.Path() + cmd := findCommandByPath(rootCmd, cmdPath) + if cmd == nil { + p.Error = errors.Errorf("unable to find command path %q for app %q", cmdPath, p.Path) + return + } + if cmd.Runnable() { + p.Error = errors.Errorf("can't attach app command %q to runnable command %q", pluginCmd.Use, cmd.CommandPath()) + return + } + + // Check for existing commands + // pluginCmd.Use can be like `command [args]` so we need to remove those + // extra args if any. + pluginCmdName := strings.Split(pluginCmd.Use, " ")[0] + for _, cmd := range cmd.Commands() { + if cmd.Name() == pluginCmdName { + p.Error = errors.Errorf("app command %q already exists in Ignite's commands", pluginCmdName) + return + } + } + + newCmd, err := pluginCmd.ToCobraCommand() + if err != nil { + p.Error = err + return + } + cmd.AddCommand(newCmd) + + if len(pluginCmd.Commands) == 0 { + // pluginCmd has no sub commands, so it's runnable + newCmd.RunE = func(cmd *cobra.Command, args []string) error { + ctx := cmd.Context() + return clictx.Do(ctx, func() error { + api, err := newAppClientAPI(cmd) + if err != nil { + return err + } + + // Call the plugin Execute + execCmd := &plugin.ExecutedCommand{ + Use: cmd.Use, + Path: cmd.CommandPath(), + Args: args, + OsArgs: os.Args, + With: p.With, + } + execCmd.ImportFlags(cmd) + err = p.Interface.Execute(ctx, execCmd, api) + + return err + }) + } + } else { + for _, pluginCmd := range pluginCmd.Commands { + pluginCmd.PlaceCommandUnder = newCmd.CommandPath() + linkPluginCmd(newCmd, p, pluginCmd) + if p.Error != nil { + return + } + } + } +} + +func findCommandByPath(cmd *cobra.Command, cmdPath string) *cobra.Command { + if cmd.CommandPath() == cmdPath { + return cmd + } + for _, cmd := range cmd.Commands() { + if cmd := findCommandByPath(cmd, cmdPath); cmd != nil { + return cmd + } + } + return nil +} + +// NewApp returns a command that groups Ignite App related sub commands. +func NewApp() *cobra.Command { + c := &cobra.Command{ + Use: "app [command]", + Short: "Create and manage Ignite Apps", + } + + c.AddCommand( + NewAppList(), + NewAppUpdate(), + NewAppScaffold(), + NewAppDescribe(), + NewAppInstall(), + NewAppUninstall(), + ) + + return c +} + +func NewAppList() *cobra.Command { + lstCmd := &cobra.Command{ + Use: "list", + Short: "List installed apps", + Long: "Prints status and information of all installed Ignite Apps.", + RunE: func(cmd *cobra.Command, _ []string) error { + s := cliui.New(cliui.WithStdout(os.Stdout)) + return printPlugins(cmd.Context(), s) + }, + } + return lstCmd +} + +func NewAppUpdate() *cobra.Command { + return &cobra.Command{ + Use: "update [path]", + Short: "Update app", + Long: `Updates an Ignite App specified by path. + +If no path is specified all declared apps are updated.`, + Example: "ignite app update github.com/org/my-app/", + Args: cobra.MaximumNArgs(1), + RunE: func(_ *cobra.Command, args []string) error { + if len(args) == 0 { + // update all plugins + return plugin.Update(plugins...) + } + pluginPath, err := getAppPath(args[0]) + if err != nil { + return err + } + + // find the plugin to update + for _, p := range plugins { + if p.HasPath(pluginPath) { + return plugin.Update(p) + } + } + return errors.Errorf("App %q not found", pluginPath) + }, + } +} + +func NewAppInstall() *cobra.Command { + cmdPluginAdd := &cobra.Command{ + Use: "install [path] [key=value]...", + Short: "Install app", + Long: `Installs an Ignite App. + +Respects key value pairs declared after the app path to be added to the generated configuration definition.`, + Example: "ignite app install github.com/org/my-app/ foo=bar baz=qux", + Args: cobra.MinimumNArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + session := cliui.New( + cliui.WithStdout(os.Stdout), + cliui.WithoutUserInteraction(getYes(cmd)), + ) + defer session.End() + + var ( + conf *pluginsconfig.Config + err error + ) + + global := flagGetPluginsGlobal(cmd) + if global { + conf, err = parseGlobalPlugins() + } else { + conf, err = parseLocalPlugins() + } + if err != nil { + return err + } + + pluginPath, err := getAppPath(args[0]) + if err != nil { + return err + } + + for _, p := range conf.Apps { + if p.HasPath(pluginPath) { + return errors.Errorf("app %s is already installed", pluginPath) + } + } + + p := pluginsconfig.Plugin{ + Path: pluginPath, + With: make(map[string]string), + Global: global, + } + + pluginsOptions := []plugin.Option{ + plugin.CollectEvents(session.EventBus()), + } + + var pluginArgs []string + if len(args) > 1 { + pluginArgs = args[1:] + } + + for _, pa := range pluginArgs { + kv := strings.Split(pa, "=") + if len(kv) != 2 { + return errors.Errorf("malformed key=value arg: %s", pa) + } + p.With[kv[0]] = kv[1] + } + + plugins, err := plugin.Load(cmd.Context(), []pluginsconfig.Plugin{p}, pluginsOptions...) + if err != nil { + return err + } + defer plugins[0].KillClient() + + if err := plugins[0].Error; err != nil { + if strings.Contains(err.Error(), "go.mod file not found in current directory") { + return errors.Errorf("unable to find an App at the root of this repository (%s). Please ensure your repository URL is correct. If you're trying to install an App under a subfolder, include the path at the end of your repository URL, e.g., github.com/ignite/apps/appregistry", pluginPath) + } + + return errors.Errorf("error while loading app %q: %w", pluginPath, plugins[0].Error) + } + session.Println(icons.OK, "Done loading apps") + conf.Apps = append(conf.Apps, p) + + if err := conf.Save(); err != nil { + return err + } + + session.Printf("%s Installed %s\n", icons.Tada, pluginPath) + return nil + }, + } + + cmdPluginAdd.Flags().AddFlagSet(flagSetPluginsGlobal()) + + return cmdPluginAdd +} + +func NewAppUninstall() *cobra.Command { + cmdPluginRemove := &cobra.Command{ + Use: "uninstall [path]", + Aliases: []string{"rm"}, + Short: "Uninstall app", + Long: "Uninstalls an Ignite App specified by path.", + Example: "ignite app uninstall github.com/org/my-app/", + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + s := cliui.New(cliui.WithStdout(os.Stdout)) + + var ( + conf *pluginsconfig.Config + err error + ) + + global := flagGetPluginsGlobal(cmd) + if global { + conf, err = parseGlobalPlugins() + } else { + conf, err = parseLocalPlugins() + } + if err != nil { + return err + } + + pluginPath, err := getAppPath(args[0]) + if err != nil { + return err + } + + removed := false + for i, cp := range conf.Apps { + if cp.HasPath(pluginPath) { + conf.Apps = append(conf.Apps[:i], conf.Apps[i+1:]...) + removed = true + break + } + } + + if !removed { + // return if no matching plugin path found + return errors.Errorf("app %s not found", pluginPath) + } + + if err := conf.Save(); err != nil { + return err + } + + s.Printf("%s %s uninstalled\n", icons.OK, pluginPath) + s.Printf("\t%s updated\n", conf.Path()) + + return nil + }, + } + + cmdPluginRemove.Flags().AddFlagSet(flagSetPluginsGlobal()) + + return cmdPluginRemove +} + +func NewAppScaffold() *cobra.Command { + return &cobra.Command{ + Use: "scaffold [name]", + Short: "Scaffold a new Ignite App", + Long: `Scaffolds a new Ignite App in the current directory. + +A git repository will be created with the given module name, unless the current directory is already a git repository.`, + Example: "ignite app scaffold github.com/org/my-app/", + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + session := cliui.New( + cliui.StartSpinnerWithText(statusScaffolding), + cliui.WithoutUserInteraction(getYes(cmd)), + ) + defer session.End() + + wd, err := os.Getwd() + if err != nil { + return err + } + moduleName := args[0] + path, err := plugin.Scaffold(cmd.Context(), session, wd, moduleName, false) + if err != nil { + return err + } + if err := xgit.InitAndCommit(path); err != nil { + return err + } + + message := `⭐️ Successfully created a new Ignite App '%[1]s'. + +👉 Update app code at '%[2]s/main.go' + +👉 Test Ignite App integration by installing the app within the chain directory: + + ignite app install %[2]s + +Or globally: + + ignite app install -g %[2]s + +👉 Once the app is pushed to a repository, replace the local path by the repository path. +` + session.Printf(message, moduleName, path) + return nil + }, + } +} + +func NewAppDescribe() *cobra.Command { + return &cobra.Command{ + Use: "describe [path]", + Short: "Print information about installed apps", + Long: "Print information about an installed Ignite App commands and hooks.", + Example: "ignite app describe github.com/org/my-app/", + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + var ( + s = cliui.New(cliui.WithStdout(os.Stdout)) + ctx = cmd.Context() + ) + + pluginPath, err := getAppPath(args[0]) + if err != nil { + return err + } + + for _, p := range plugins { + if p.HasPath(pluginPath) { + manifest, err := p.Interface.Manifest(ctx) + if err != nil { + return errors.Errorf("error while loading app manifest: %w", err) + } + + if len(manifest.Commands) > 0 { + s.Println("Commands:") + for i, c := range manifest.Commands { + cmdPath := fmt.Sprintf("%s %s", c.Path(), c.Use) + s.Printf(" %d) %s\n", i+1, cmdPath) + } + } + + if len(manifest.Hooks) > 0 { + s.Println("Hooks:") + for i, h := range manifest.Hooks { + s.Printf(" %d) '%s' on command '%s'\n", i+1, h.Name, h.CommandPath()) + } + } + + break + } + } + + return nil + }, + } +} + +func getPluginLocationName(p *plugin.Plugin) string { + if p.IsGlobal() { + return "global" + } + return "local" +} + +func getPluginStatus(ctx context.Context, p *plugin.Plugin) string { + if p.Error != nil { + return fmt.Sprintf("%s Error: %v", icons.NotOK, p.Error) + } + + _, err := p.Interface.Manifest(ctx) + if err != nil { + return fmt.Sprintf("%s Error: Manifest() returned %v", icons.NotOK, err) + } + + return fmt.Sprintf("%s Loaded", icons.OK) +} + +func printPlugins(ctx context.Context, session *cliui.Session) error { + var entries [][]string + for _, p := range plugins { + entries = append(entries, []string{p.Path, getPluginLocationName(p), getPluginStatus(ctx, p)}) + } + + if err := session.PrintTable([]string{"Path", "Config", "Status"}, entries...); err != nil { + return errors.Errorf("error while printing apps: %w", err) + } + return nil +} + +func newAppClientAPI(cmd *cobra.Command) (plugin.ClientAPI, error) { + // Get chain when the plugin runs inside an blockchain app + c, err := chain.NewWithHomeFlags(cmd) + if err != nil && !errors.Is(err, gomodule.ErrGoModNotFound) { + return nil, err + } + + var options []plugin.APIOption + if c != nil { + options = append(options, plugin.WithChain(c)) + } + + return plugin.NewClientAPI(options...), nil +} + +func flagSetPluginsGlobal() *flag.FlagSet { + fs := flag.NewFlagSet("", flag.ContinueOnError) + fs.BoolP(flagPluginsGlobal, "g", false, "use global plugins configuration ($HOME/.ignite/apps/igniteapps.yml)") + return fs +} + +func flagGetPluginsGlobal(cmd *cobra.Command) bool { + global, _ := cmd.Flags().GetBool(flagPluginsGlobal) + return global +} + +func getAppPath(path string) (string, error) { + if xfilepath.IsDir(path) { + // if directory is relative, make it absolute + pluginPathAbs, err := xfilepath.MustAbs(path) + if err != nil { + return "", errors.Wrapf(err, "failed to get absolute path of %s", path) + } + path = pluginPathAbs + } + return path, nil +} diff --git a/ignite/cmd/plugin_default.go b/ignite/cmd/plugin_default.go new file mode 100644 index 0000000..6956eb3 --- /dev/null +++ b/ignite/cmd/plugin_default.go @@ -0,0 +1,108 @@ +package ignitecmd + +import ( + "github.com/spf13/cobra" + + pluginsconfig "github.com/ignite/cli/v29/ignite/config/plugins" + "github.com/ignite/cli/v29/ignite/pkg/cliui" + "github.com/ignite/cli/v29/ignite/services/plugin" +) + +type defaultPlugin struct { + use string + short string + aliases []string + path string +} + +const ( + PluginRelayerVersion = "hermes/v0.3.0" + PluginRelayerPath = "github.com/ignite/apps/hermes@" + PluginRelayerVersion + PluginAppRegistryVersion = "appregistry/v0.1.3" + PluginAppRegistryPath = "github.com/ignite/apps/appregistry@" + PluginAppRegistryVersion +) + +// defaultPlugins holds the plugin that are considered trustable and for which +// a command will added if the plugin is not already installed. +// When the user executes that command, the plugin is automatically installed. +var defaultPlugins = []defaultPlugin{ + { + use: "relayer", + short: "Connect blockchains with an IBC relayer", + aliases: []string{"r"}, + path: PluginRelayerPath, + }, + { + use: "appregistry", + short: "Browse the Ignite App Registry App", + aliases: []string{"mp"}, + path: PluginAppRegistryPath, + }, +} + +// ensureDefaultPlugins ensures that all defaultPlugins are whether registered +// in cfg OR have an install command added to rootCmd. +func ensureDefaultPlugins(rootCmd *cobra.Command, cfg *pluginsconfig.Config) { + for _, dp := range defaultPlugins { + // Check if plugin is declared in global config + if cfg.HasPlugin(dp.path) { + // plugin found nothing to do + continue + } + // plugin not found in config, add a proxy install command + rootCmd.AddCommand(newPluginInstallCmd(dp)) + } +} + +// newPluginInstallCmd mimics the plugin command but acts as proxy to first: +// - register the config in the global config +// - load the plugin +// - execute the command thanks to the loaded plugin. +func newPluginInstallCmd(dp defaultPlugin) *cobra.Command { + return &cobra.Command{ + Use: dp.use, + Short: dp.short, + Aliases: dp.aliases, + DisableFlagParsing: true, // Avoid -h to skip command run + RunE: func(cmd *cobra.Command, _ []string) error { + cfg, err := parseGlobalPlugins() + if err != nil { + return err + } + + // add plugin to config + pluginCfg := pluginsconfig.Plugin{ + Path: dp.path, + } + cfg.Apps = append(cfg.Apps, pluginCfg) + if err := cfg.Save(); err != nil { + return err + } + + session := cliui.New(cliui.WithoutUserInteraction(getYes(cmd))) + defer session.End() + + // load and link the plugin + plugins, err := plugin.Load( + cmd.Context(), + []pluginsconfig.Plugin{pluginCfg}, + plugin.CollectEvents(session.EventBus()), + ) + if err != nil { + return err + } + defer plugins[0].KillClient() + + // Keep reference of the root command before removal + rootCmd := cmd.Root() + // Remove this command before call to linkPlugins because a plugin is + // usually not allowed to override an existing command. + rootCmd.RemoveCommand(cmd) + if err := linkPlugins(cmd.Context(), rootCmd, plugins); err != nil { + return err + } + // Execute the command + return rootCmd.Execute() + }, + } +} diff --git a/ignite/cmd/plugin_default_test.go b/ignite/cmd/plugin_default_test.go new file mode 100644 index 0000000..49f50be --- /dev/null +++ b/ignite/cmd/plugin_default_test.go @@ -0,0 +1,47 @@ +package ignitecmd + +import ( + "testing" + + "github.com/spf13/cobra" + "github.com/stretchr/testify/assert" + + pluginsconfig "github.com/ignite/cli/v29/ignite/config/plugins" +) + +func TestEnsureDefaultPlugins(t *testing.T) { + tests := []struct { + name string + cfg *pluginsconfig.Config + expectAddedInCommand bool + }{ + { + name: "should add because absent from config", + cfg: &pluginsconfig.Config{}, + expectAddedInCommand: true, + }, + { + name: "should not add because already present in config", + cfg: &pluginsconfig.Config{ + Apps: []pluginsconfig.Plugin{{ + Path: PluginRelayerPath, + }}, + }, + expectAddedInCommand: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + cmd := &cobra.Command{Use: "ignite"} + + ensureDefaultPlugins(cmd, tt.cfg) + + expectedCmd := findCommandByPath(cmd, "ignite relayer") + if tt.expectAddedInCommand { + assert.NotNil(t, expectedCmd) + } else { + assert.Nil(t, expectedCmd) + } + }) + } +} diff --git a/ignite/cmd/plugin_test.go b/ignite/cmd/plugin_test.go new file mode 100644 index 0000000..b3f9105 --- /dev/null +++ b/ignite/cmd/plugin_test.go @@ -0,0 +1,656 @@ +package ignitecmd + +import ( + "context" + "fmt" + "io" + "os" + "strings" + "testing" + + "github.com/spf13/cobra" + "github.com/spf13/pflag" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" + "github.com/stretchr/testify/require" + + pluginsconfig "github.com/ignite/cli/v29/ignite/config/plugins" + "github.com/ignite/cli/v29/ignite/services/plugin" + "github.com/ignite/cli/v29/ignite/services/plugin/mocks" +) + +func buildRootCmd(ctx context.Context) *cobra.Command { + var ( + rootCmd = &cobra.Command{ + Use: "ignite", + } + scaffoldCmd = &cobra.Command{ + Use: "scaffold", + } + scaffoldChainCmd = &cobra.Command{ + Use: "chain", + Run: func(*cobra.Command, []string) {}, + } + scaffoldModuleCmd = &cobra.Command{ + Use: "module", + Run: func(*cobra.Command, []string) {}, + } + ) + scaffoldChainCmd.Flags().String("path", "", "the path") + scaffoldCmd.AddCommand(scaffoldChainCmd) + scaffoldCmd.AddCommand(scaffoldModuleCmd) + rootCmd.AddCommand(scaffoldCmd) + rootCmd.SetContext(ctx) + return rootCmd +} + +func assertFlags(t *testing.T, expectedFlags plugin.Flags, execCmd *plugin.ExecutedCommand) { + t.Helper() + var ( + have []string + expected []string + ) + + t.Helper() + + flags, err := execCmd.NewFlags() + assert.NoError(t, err) + + flags.VisitAll(func(f *pflag.Flag) { + if f.Name == "help" { + // ignore help flag + return + } + + have = append(have, f.Name) + }) + + for _, f := range expectedFlags { + expected = append(expected, f.Name) + } + + assert.Equal(t, expected, have) +} + +func TestLinkPluginCmds(t *testing.T) { + t.Skip("passes locally and with act, but fails in CI") + + var ( + args = []string{"arg1", "arg2"} + pluginParams = map[string]string{"key": "val"} + // define a plugin with command flags + pluginWithFlags = &plugin.Command{ + Use: "flaggy", + Flags: plugin.Flags{ + {Name: "flag1", Type: plugin.FlagTypeString}, + {Name: "flag2", Type: plugin.FlagTypeInt, DefaultValue: "0", Value: "0"}, + }, + } + ) + + // helper to assert pluginInterface.Execute() calls + expectExecute := func(t *testing.T, _ context.Context, p *mocks.PluginInterface, cmd *plugin.Command) { + t.Helper() + p.EXPECT(). + Execute( + mock.Anything, + mock.MatchedBy(func(execCmd *plugin.ExecutedCommand) bool { + fmt.Println(cmd.Use == execCmd.Use, cmd.Use, execCmd.Use) + return cmd.Use == execCmd.Use + }), + mock.Anything, + ). + Run(func(_ context.Context, execCmd *plugin.ExecutedCommand, _ plugin.ClientAPI) { + // Assert execCmd is populated correctly + assert.True(t, strings.HasSuffix(execCmd.Path, cmd.Use), "wrong path %s", execCmd.Path) + assert.Equal(t, args, execCmd.Args) + assertFlags(t, cmd.Flags, execCmd) + assert.Equal(t, pluginParams, execCmd.With) + }). + Return(nil) + } + + tests := []struct { + name string + setup func(*testing.T, context.Context, *mocks.PluginInterface) + expectedDumpCmd string + expectedError string + }{ + { + name: "ok: link foo at root", + setup: func(t *testing.T, ctx context.Context, p *mocks.PluginInterface) { + t.Helper() + cmd := &plugin.Command{ + Use: "foo", + } + p.EXPECT(). + Manifest(ctx). + Return(&plugin.Manifest{Commands: []*plugin.Command{cmd}}, nil) + expectExecute(t, ctx, p, cmd) + }, + expectedDumpCmd: ` +ignite + foo* + scaffold + chain* --path=string + module* +`, + }, + { + name: "ok: link foo at subcommand", + setup: func(t *testing.T, ctx context.Context, p *mocks.PluginInterface) { + t.Helper() + cmd := &plugin.Command{ + Use: "foo", + PlaceCommandUnder: "ignite scaffold", + } + p.EXPECT(). + Manifest(ctx). + Return(&plugin.Manifest{Commands: []*plugin.Command{cmd}}, nil) + expectExecute(t, ctx, p, cmd) + }, + expectedDumpCmd: ` +ignite + scaffold + chain* --path=string + foo* + module* +`, + }, + { + name: "ok: link foo at subcommand with incomplete PlaceCommandUnder", + setup: func(t *testing.T, ctx context.Context, p *mocks.PluginInterface) { + t.Helper() + cmd := &plugin.Command{ + Use: "foo", + PlaceCommandUnder: "scaffold", + } + p.EXPECT(). + Manifest(ctx). + Return(&plugin.Manifest{Commands: []*plugin.Command{cmd}}, nil) + expectExecute(t, ctx, p, cmd) + }, + expectedDumpCmd: ` +ignite + scaffold + chain* --path=string + foo* + module* +`, + }, + { + name: "fail: link to runnable command", + setup: func(t *testing.T, ctx context.Context, p *mocks.PluginInterface) { + t.Helper() + p.EXPECT(). + Manifest(ctx). + Return(&plugin.Manifest{ + Commands: []*plugin.Command{ + { + Use: "foo", + PlaceCommandUnder: "ignite scaffold chain", + }, + }, + }, + nil, + ) + }, + expectedError: `can't attach app command "foo" to runnable command "ignite scaffold chain"`, + }, + { + name: "fail: link to unknown command", + setup: func(t *testing.T, ctx context.Context, p *mocks.PluginInterface) { + t.Helper() + p.EXPECT(). + Manifest(ctx). + Return(&plugin.Manifest{ + Commands: []*plugin.Command{ + { + Use: "foo", + PlaceCommandUnder: "ignite unknown", + }, + }, + }, + nil, + ) + }, + expectedError: `unable to find command path "ignite unknown" for app "foo"`, + }, + { + name: "fail: plugin name exists in legacy commands", + setup: func(t *testing.T, ctx context.Context, p *mocks.PluginInterface) { + t.Helper() + p.EXPECT(). + Manifest(ctx). + Return(&plugin.Manifest{ + Commands: []*plugin.Command{ + { + Use: "scaffold", + }, + }, + }, + nil, + ) + }, + expectedError: `app command "scaffold" already exists in Ignite's commands`, + }, + { + name: "fail: plugin name with args exists in legacy commands", + setup: func(t *testing.T, ctx context.Context, p *mocks.PluginInterface) { + t.Helper() + p.EXPECT(). + Manifest(ctx). + Return(&plugin.Manifest{ + Commands: []*plugin.Command{ + { + Use: "scaffold [args]", + }, + }, + }, + nil, + ) + }, + expectedError: `app command "scaffold" already exists in Ignite's commands`, + }, + { + name: "fail: plugin name exists in legacy sub commands", + setup: func(t *testing.T, ctx context.Context, p *mocks.PluginInterface) { + t.Helper() + p.EXPECT(). + Manifest(ctx). + Return(&plugin.Manifest{ + Commands: []*plugin.Command{ + { + Use: "chain", + PlaceCommandUnder: "scaffold", + }, + }, + }, + nil, + ) + }, + expectedError: `app command "chain" already exists in Ignite's commands`, + }, + { + name: "ok: link multiple at root", + setup: func(t *testing.T, ctx context.Context, p *mocks.PluginInterface) { + t.Helper() + fooCmd := &plugin.Command{ + Use: "foo", + } + barCmd := &plugin.Command{ + Use: "bar", + } + p.EXPECT(). + Manifest(ctx). + Return(&plugin.Manifest{ + Commands: []*plugin.Command{ + fooCmd, barCmd, pluginWithFlags, + }, + }, nil) + expectExecute(t, ctx, p, fooCmd) + expectExecute(t, ctx, p, barCmd) + expectExecute(t, ctx, p, pluginWithFlags) + }, + expectedDumpCmd: ` +ignite + bar* + flaggy* --flag1=string --flag2=int + foo* + scaffold + chain* --path=string + module* +`, + }, + { + name: "ok: link with subcommands", + setup: func(t *testing.T, ctx context.Context, p *mocks.PluginInterface) { + t.Helper() + cmd := &plugin.Command{ + Use: "foo", + Commands: []*plugin.Command{ + {Use: "bar"}, + {Use: "baz"}, + pluginWithFlags, + }, + } + p.EXPECT(). + Manifest(ctx). + Return(&plugin.Manifest{Commands: []*plugin.Command{cmd}}, nil) + // cmd is not executed because it's not runnable, only sub-commands + // are executed. + expectExecute(t, ctx, p, cmd.Commands[0]) + expectExecute(t, ctx, p, cmd.Commands[1]) + expectExecute(t, ctx, p, cmd.Commands[2]) + }, + expectedDumpCmd: ` +ignite + foo + bar* + baz* + flaggy* --flag1=string --flag2=int + scaffold + chain* --path=string + module* +`, + }, + { + name: "ok: link with multiple subcommands", + setup: func(t *testing.T, ctx context.Context, p *mocks.PluginInterface) { + t.Helper() + cmd := &plugin.Command{ + Use: "foo", + Commands: []*plugin.Command{ + {Use: "bar", Commands: []*plugin.Command{{Use: "baz"}}}, + {Use: "qux", Commands: []*plugin.Command{{Use: "quux"}, {Use: "corge"}}}, + }, + } + p.EXPECT(). + Manifest(ctx). + Return(&plugin.Manifest{Commands: []*plugin.Command{cmd}}, nil) + expectExecute(t, ctx, p, cmd.Commands[0].Commands[0]) + expectExecute(t, ctx, p, cmd.Commands[1].Commands[0]) + expectExecute(t, ctx, p, cmd.Commands[1].Commands[1]) + }, + expectedDumpCmd: ` +ignite + foo + bar + baz* + qux + corge* + quux* + scaffold + chain* --path=string + module* +`, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + + require := require.New(t) + assert := assert.New(t) + pi := mocks.NewPluginInterface(t) + p := &plugin.Plugin{ + Plugin: pluginsconfig.Plugin{ + Path: "foo", + With: pluginParams, + }, + Interface: pi, + } + rootCmd := buildRootCmd(ctx) + tt.setup(t, ctx, pi) + + _ = linkPlugins(ctx, rootCmd, []*plugin.Plugin{p}) + + if tt.expectedError != "" { + require.Error(p.Error) + require.EqualError(p.Error, tt.expectedError) + return + } + require.NoError(p.Error) + var s strings.Builder + s.WriteString("\n") + dumpCmd(rootCmd, &s, 0) + assert.Equal(tt.expectedDumpCmd, s.String()) + execCmd(t, rootCmd, args) + }) + } +} + +// dumpCmd helps in comparing cobra.Command by writing their Use and Commands. +// Runnable commands are marked with a *. +func dumpCmd(c *cobra.Command, w io.Writer, ntabs int) { + fmt.Fprintf(w, "%s%s", strings.Repeat(" ", ntabs), c.Use) + ntabs++ + if c.Runnable() { + fmt.Fprintf(w, "*") + } + c.Flags().VisitAll(func(f *pflag.Flag) { + fmt.Fprintf(w, " --%s=%s", f.Name, f.Value.Type()) + }) + fmt.Fprintf(w, "\n") + for _, cc := range c.Commands() { + dumpCmd(cc, w, ntabs) + } +} + +func TestLinkPluginHooks(t *testing.T) { + t.Skip("passes locally and with act, but fails in CI") + + var ( + args = []string{"arg1", "arg2"} + pluginParams = map[string]string{"key": "val"} + ctx = context.Background() + + // helper to assert pluginInterface.ExecuteHook*() calls in expected order + // (pre, then post, then cleanup) + expectExecuteHook = func(t *testing.T, p *mocks.PluginInterface, expectedFlags plugin.Flags, hooks ...*plugin.Hook) { + t.Helper() + matcher := func(hook *plugin.Hook) any { + return mock.MatchedBy(func(execHook *plugin.ExecutedHook) bool { + return hook.Name == execHook.Hook.Name && + hook.PlaceHookOn == execHook.Hook.PlaceHookOn + }) + } + asserter := func(hook *plugin.Hook) func(_ context.Context, hook *plugin.ExecutedHook, _ plugin.ClientAPI) { + return func(_ context.Context, execHook *plugin.ExecutedHook, _ plugin.ClientAPI) { + assert.True(t, strings.HasSuffix(execHook.ExecutedCommand.Path, hook.PlaceHookOn), "wrong path %q want %q", execHook.ExecutedCommand.Path, hook.PlaceHookOn) + assert.Equal(t, args, execHook.ExecutedCommand.Args) + assertFlags(t, expectedFlags, execHook.ExecutedCommand) + assert.Equal(t, pluginParams, execHook.ExecutedCommand.With) + } + } + var lastPre *mock.Call + for _, hook := range hooks { + pre := p.EXPECT(). + ExecuteHookPre(ctx, matcher(hook), mock.Anything). + Run(asserter(hook)). + Return(nil). + Call + if lastPre != nil { + pre.NotBefore(lastPre) + } + lastPre = pre + } + for _, hook := range hooks { + post := p.EXPECT(). + ExecuteHookPost(ctx, matcher(hook), mock.Anything). + Run(asserter(hook)). + Return(nil). + Call + cleanup := p.EXPECT(). + ExecuteHookCleanUp(ctx, matcher(hook), mock.Anything). + Run(asserter(hook)). + Return(nil). + Call + post.NotBefore(lastPre) + cleanup.NotBefore(post) + } + } + ) + tests := []struct { + name string + expectedError string + setup func(*testing.T, context.Context, *mocks.PluginInterface) + }{ + { + name: "fail: command not runnable", + setup: func(t *testing.T, ctx context.Context, p *mocks.PluginInterface) { + t.Helper() + p.EXPECT(). + Manifest(ctx). + Return(&plugin.Manifest{ + Hooks: []*plugin.Hook{ + { + Name: "test-hook", + PlaceHookOn: "ignite scaffold", + }, + }, + }, + nil, + ) + }, + expectedError: `can't attach app hook "test-hook" to non executable command "ignite scaffold"`, + }, + { + name: "fail: command doesn't exists", + setup: func(t *testing.T, ctx context.Context, p *mocks.PluginInterface) { + t.Helper() + p.EXPECT(). + Manifest(ctx). + Return(&plugin.Manifest{ + Hooks: []*plugin.Hook{ + { + Name: "test-hook", + PlaceHookOn: "ignite chain", + }, + }, + }, + nil, + ) + }, + expectedError: `unable to find command path "ignite chain" for app hook "test-hook"`, + }, + { + name: "ok: single hook", + setup: func(t *testing.T, ctx context.Context, p *mocks.PluginInterface) { + t.Helper() + hook := &plugin.Hook{ + Name: "test-hook", + PlaceHookOn: "scaffold chain", + } + p.EXPECT(). + Manifest(ctx). + Return(&plugin.Manifest{Hooks: []*plugin.Hook{hook}}, nil) + expectExecuteHook(t, p, plugin.Flags{{Name: "path"}}, hook) + }, + }, + { + name: "ok: multiple hooks on same command", + setup: func(t *testing.T, ctx context.Context, p *mocks.PluginInterface) { + t.Helper() + hook1 := &plugin.Hook{ + Name: "test-hook-1", + PlaceHookOn: "scaffold chain", + } + hook2 := &plugin.Hook{ + Name: "test-hook-2", + PlaceHookOn: "scaffold chain", + } + p.EXPECT(). + Manifest(ctx). + Return(&plugin.Manifest{Hooks: []*plugin.Hook{hook1, hook2}}, nil) + expectExecuteHook(t, p, plugin.Flags{{Name: "path"}}, hook1, hook2) + }, + }, + { + name: "ok: multiple hooks on different commands", + setup: func(t *testing.T, ctx context.Context, p *mocks.PluginInterface) { + t.Helper() + hookChain1 := &plugin.Hook{ + Name: "test-hook-1", + PlaceHookOn: "scaffold chain", + } + hookChain2 := &plugin.Hook{ + Name: "test-hook-2", + PlaceHookOn: "scaffold chain", + } + hookModule := &plugin.Hook{ + Name: "test-hook-3", + PlaceHookOn: "scaffold module", + } + p.EXPECT(). + Manifest(ctx). + Return(&plugin.Manifest{Hooks: []*plugin.Hook{hookChain1, hookChain2, hookModule}}, nil) + expectExecuteHook(t, p, plugin.Flags{{Name: "path"}}, hookChain1, hookChain2) + expectExecuteHook(t, p, nil, hookModule) + }, + }, + { + name: "ok: duplicate hook names on same command", + setup: func(t *testing.T, ctx context.Context, p *mocks.PluginInterface) { + t.Helper() + hooks := []*plugin.Hook{ + { + Name: "test-hook", + PlaceHookOn: "ignite scaffold chain", + }, + { + Name: "test-hook", + PlaceHookOn: "ignite scaffold chain", + }, + } + p.EXPECT(). + Manifest(ctx). + Return(&plugin.Manifest{Hooks: hooks}, nil) + expectExecuteHook(t, p, plugin.Flags{{Name: "path"}}, hooks...) + }, + }, + { + name: "ok: duplicate hook names on different commands", + setup: func(t *testing.T, ctx context.Context, p *mocks.PluginInterface) { + t.Helper() + hookChain := &plugin.Hook{ + Name: "test-hook", + PlaceHookOn: "ignite scaffold chain", + } + hookModule := &plugin.Hook{ + Name: "test-hook", + PlaceHookOn: "ignite scaffold module", + } + p.EXPECT(). + Manifest(ctx). + Return(&plugin.Manifest{Hooks: []*plugin.Hook{hookChain, hookModule}}, nil) + expectExecuteHook(t, p, plugin.Flags{{Name: "path"}}, hookChain) + expectExecuteHook(t, p, nil, hookModule) + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + + require := require.New(t) + pi := mocks.NewPluginInterface(t) + p := &plugin.Plugin{ + Plugin: pluginsconfig.Plugin{ + Path: "foo", + With: pluginParams, + }, + Interface: pi, + } + rootCmd := buildRootCmd(ctx) + tt.setup(t, ctx, pi) + + _ = linkPlugins(ctx, rootCmd, []*plugin.Plugin{p}) + + if tt.expectedError != "" { + require.EqualError(p.Error, tt.expectedError) + return + } + require.NoError(p.Error) + execCmd(t, rootCmd, args) + }) + } +} + +// execCmd executes all the runnable commands contained in c. +func execCmd(t *testing.T, c *cobra.Command, args []string) { + t.Helper() + if c.Runnable() { + os.Args = strings.Fields(c.CommandPath()) + os.Args = append(os.Args, args...) + err := c.Execute() + require.NoError(t, err) + return + } + for _, c := range c.Commands() { + execCmd(t, c, args) + } +} diff --git a/ignite/cmd/scaffold.go b/ignite/cmd/scaffold.go new file mode 100644 index 0000000..ee24f76 --- /dev/null +++ b/ignite/cmd/scaffold.go @@ -0,0 +1,328 @@ +package ignitecmd + +import ( + "github.com/spf13/cobra" + flag "github.com/spf13/pflag" + + "github.com/ignite/cli/v29/ignite/pkg/cliui" + "github.com/ignite/cli/v29/ignite/pkg/cliui/colors" + "github.com/ignite/cli/v29/ignite/pkg/cosmosver" + "github.com/ignite/cli/v29/ignite/pkg/env" + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/gocmd" + "github.com/ignite/cli/v29/ignite/pkg/xgenny" + "github.com/ignite/cli/v29/ignite/pkg/xgit" + "github.com/ignite/cli/v29/ignite/services/scaffolder" + "github.com/ignite/cli/v29/ignite/templates/field" + "github.com/ignite/cli/v29/ignite/version" +) + +// flags related to component scaffolding. +const ( + flagModule = "module" + flagNoMessage = "no-message" + flagNoSimulation = "no-simulation" + flagResponse = "response" + flagDescription = "desc" + flagProtoDir = "proto-dir" + + msgCommitPrefix = "Your project changes have not been committed.\nTo enable reverting to your current state, commit your saved changes." + msgCommitPrompt = "Do you want to proceed without committing your saved changes" + + statusScaffolding = "Scaffolding..." + multipleCoinDisclaimer = `**Disclaimer** +The 'coins' and 'dec.coins' argument types require special attention when used in CLI commands. +Due to current limitations in the AutoCLI, only one variadic (slice) argument is supported per command. +If a message contains more than one field of type 'coins' or 'dec.coins', only the last one will accept multiple values via the CLI. +For the best user experience, manual command handling or scaffolding is recommended when working with messages containing multiple 'coins' or 'dec.coins' fields. +` +) + +// NewScaffold returns a command that groups scaffolding related sub commands. +func NewScaffold() *cobra.Command { + c := &cobra.Command{ + Use: "scaffold [command]", + Short: "Create a new blockchain, module, message, query, and more", + Long: `Scaffolding is a quick way to generate code for major pieces of your +application. + +For details on each scaffolding target (chain, module, message, etc.) run the +corresponding command with a "--help" flag, for example, "ignite scaffold chain +--help". + +The Ignite team strongly recommends committing the code to a version control +system before running scaffolding commands. This will make it easier to see the +changes to the source code as well as undo the command if you've decided to roll +back the changes. + +This blockchain you create with the chain scaffolding command uses the modular +Cosmos SDK framework and imports many standard modules for functionality like +proof of stake, token transfer, inter-blockchain connectivity, governance, and +more. Custom functionality is implemented in modules located by convention in +the "x/" directory. By default, your blockchain comes with an empty custom +module. Use the module scaffolding command to create an additional module. + +An empty custom module doesn't do much, it's basically a container for logic +that is responsible for processing transactions and changing the application +state. Cosmos SDK blockchains work by processing user-submitted signed +transactions, which contain one or more messages. A message contains data that +describes a state transition. A module can be responsible for handling any +number of messages. + +A message scaffolding command will generate the code for handling a new type of +Cosmos SDK message. Message fields describe the state transition that the +message is intended to produce if processed without errors. + +Scaffolding messages is useful to create individual "actions" that your module +can perform. Sometimes, however, you want your blockchain to have the +functionality to create, read, update and delete (CRUD) instances of a +particular type. Depending on how you want to store the data there are three +commands that scaffold CRUD functionality for a type: list, map, and single. +These commands create four messages (one for each CRUD action), and the logic to +add, delete, and fetch the data from the store. If you want to scaffold only the +logic, for example, you've decided to scaffold messages separately, you can do +that as well with the "--no-message" flag. + +Reading data from a blockchain happens with a help of queries. Similar to how +you can scaffold messages to write data, you can scaffold queries to read the +data back from your blockchain application. + +You can also scaffold a type, which just produces a new protocol buffer file +with a proto message description. Note that proto messages produce (and +correspond with) Go types whereas Cosmos SDK messages correspond to proto "rpc" +in the "Msg" service. + +If you're building an application with custom IBC logic, you might need to +scaffold IBC packets. An IBC packet represents the data sent from one blockchain +to another. You can only scaffold IBC packets in IBC-enabled modules scaffolded +with an "--ibc" flag. Note that the default module is not IBC-enabled. +`, + Aliases: []string{"s"}, + Args: cobra.ExactArgs(1), + } + + c.AddCommand( + NewScaffoldTypeList(), + NewScaffoldChain(), + NewScaffoldModule(), + NewScaffoldMigration(), + NewScaffoldList(), + NewScaffoldMap(), + NewScaffoldSingle(), + NewScaffoldType(), + NewScaffoldParams(), + NewScaffoldConfigs(), + NewScaffoldMessage(), + NewScaffoldQuery(), + NewScaffoldPacket(), + NewScaffoldVue(), + NewScaffoldReact(), + NewScaffoldChainRegistry(), + ) + + // same flag as for chain serve but different behavior + // the verbose flag on scaffold sets the IGNT_DEBUG env var + // while on serve it bypass the session logger for the app default + c.PersistentFlags().AddFlagSet(flagSetVerbose()) + + return c +} + +func migrationPreRunHandler(cmd *cobra.Command, args []string) error { + if verbose := flagGetVerbose(cmd); verbose { + // sets the IGNT_DEBUG env var to enable verbose logging + env.SetDebug() + } + + if err := gitChangesConfirmPreRunHandler(cmd, args); err != nil { + return err + } + + session := cliui.New(cliui.WithoutUserInteraction(getYes(cmd))) + defer session.End() + + appPath, err := goModulePath(cmd) + if err != nil { + return err + } + + ver, err := cosmosver.Detect(appPath) + if err != nil { + return err + } + + if err := version.AssertSupportedCosmosSDKVersion(ver); err != nil { + return err + } + + if err := toolsMigrationPreRunHandler(cmd, session, appPath); err != nil { + return err + } + + // we go mod tidy in case new dependencies were added or removed + if err := gocmd.ModTidy(cmd.Context(), appPath); err != nil { + return err + } + + return nil +} + +func scaffoldType( + cmd *cobra.Command, + args []string, + kind scaffolder.AddTypeKind, +) error { + var ( + typeName = args[0] + fields = args[1:] + moduleName = flagGetModule(cmd) + withoutMessage = flagGetNoMessage(cmd) + withoutSimulation = flagGetNoSimulation(cmd) + signer = flagGetSigner(cmd) + appPath = flagGetPath(cmd) + ) + + var options []scaffolder.AddTypeOption + + if len(fields) > 0 { + options = append(options, scaffolder.TypeWithFields(fields...)) + } + if moduleName != "" { + options = append(options, scaffolder.TypeWithModule(moduleName)) + } + if withoutMessage { + options = append(options, scaffolder.TypeWithoutMessage()) + } else { + if signer != "" { + options = append(options, scaffolder.TypeWithSigner(signer)) + } + if withoutSimulation { + options = append(options, scaffolder.TypeWithoutSimulation()) + } + } + + session := cliui.New( + cliui.StartSpinnerWithText(statusScaffolding), + cliui.WithoutUserInteraction(getYes(cmd)), + ) + defer session.End() + + if !withoutMessage { + hasMultipleCoinSlice, err := field.MultipleCoins(fields) + if err != nil { + return err + } + if hasMultipleCoinSlice { + session.PauseSpinner() + _ = session.Print(colors.Info(multipleCoinDisclaimer)) + session.StartSpinner(statusScaffolding) + } + } + + cfg, _, err := getChainConfig(cmd) + if err != nil { + return err + } + + sc, err := scaffolder.New(cmd.Context(), appPath, cfg.Build.Proto.Path) + if err != nil { + return err + } + + cacheStorage, err := newCache(cmd) + if err != nil { + return err + } + + err = sc.AddType(cmd.Context(), typeName, kind, options...) + if err != nil { + return err + } + + sm, err := sc.ApplyModifications(xgenny.ApplyPreRun(scaffolder.AskOverwriteFiles(session))) + if err != nil { + return err + } + + if err := sc.PostScaffold(cmd.Context(), cacheStorage, false); err != nil { + return err + } + + modificationsStr, err := sm.String() + if err != nil { + return err + } + + session.Println(modificationsStr) + session.Printf("\n🎉 %s added. \n\n", typeName) + + return nil +} + +func gitChangesConfirmPreRunHandler(cmd *cobra.Command, _ []string) error { + // Don't confirm when the "--yes" flag is present + if getYes(cmd) { + return nil + } + + appPath := flagGetPath(cmd) + session := cliui.New(cliui.WithoutUserInteraction(getYes(cmd))) + + defer session.End() + + return confirmWhenUncommittedChanges(session, appPath) +} + +func confirmWhenUncommittedChanges(session *cliui.Session, appPath string) error { + cleanState, err := xgit.AreChangesCommitted(appPath) + if err != nil { + return err + } + + if !cleanState { + session.Println(msgCommitPrefix) + if err := session.AskConfirm(msgCommitPrompt); err != nil { + if errors.Is(err, cliui.ErrAbort) { + return errors.New("No") + } + + return err + } + } + + return nil +} + +func flagSetScaffoldType() *flag.FlagSet { + f := flag.NewFlagSet("", flag.ContinueOnError) + f.String(flagModule, "", "specify which module to generate code in") + f.Bool(flagNoMessage, false, "skip generating message handling logic") + f.Bool(flagNoSimulation, false, "skip simulation logic") + f.String(flagSigner, "", "label for the message signer (default: creator)") + return f +} + +func flagGetModule(cmd *cobra.Command) string { + module, _ := cmd.Flags().GetString(flagModule) + return module +} + +func flagGetNoSimulation(cmd *cobra.Command) bool { + noMessage, _ := cmd.Flags().GetBool(flagNoSimulation) + return noMessage +} + +func flagGetNoMessage(cmd *cobra.Command) bool { + noMessage, _ := cmd.Flags().GetBool(flagNoMessage) + return noMessage +} + +func flagGetSigner(cmd *cobra.Command) string { + signer, _ := cmd.Flags().GetString(flagSigner) + return signer +} + +func flagGetVerbose(cmd *cobra.Command) bool { + verbose, _ := cmd.Flags().GetBool(flagVerbose) + return verbose +} diff --git a/ignite/cmd/scaffold_chain.go b/ignite/cmd/scaffold_chain.go new file mode 100644 index 0000000..3b35dfa --- /dev/null +++ b/ignite/cmd/scaffold_chain.go @@ -0,0 +1,183 @@ +package ignitecmd + +import ( + "github.com/spf13/cobra" + + "github.com/ignite/cli/v29/ignite/config/chain/defaults" + "github.com/ignite/cli/v29/ignite/pkg/cliui" + "github.com/ignite/cli/v29/ignite/pkg/env" + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/xfilepath" + "github.com/ignite/cli/v29/ignite/pkg/xgit" + "github.com/ignite/cli/v29/ignite/services/scaffolder" +) + +const defaultIgniteDenom = "stake" + +const ( + flagMinimal = "minimal" + flagNoDefaultModule = "no-module" + flagSkipGit = "skip-git" + flagDefaultDenom = "default-denom" + + tplScaffoldChainSuccess = ` +⭐️ Successfully created a new blockchain '%[1]v'. +👉 Get started with the following commands: + + %% cd %[1]v + %% ignite chain serve + +Documentation: https://docs.ignite.com +` +) + +// NewScaffoldChain creates new command to scaffold a Comos-SDK based blockchain. +func NewScaffoldChain() *cobra.Command { + c := &cobra.Command{ + Use: "chain [name]", + Short: "New Cosmos SDK blockchain", + Long: `Create a new application-specific Cosmos SDK blockchain. + +For example, the following command will create a blockchain called "hello" in +the "hello/" directory: + + ignite scaffold chain hello + +A project name can be a simple name or a URL. The name will be used as the Go +module path for the project. Examples of project names: + + ignite scaffold chain foo + ignite scaffold chain foo/bar + ignite scaffold chain example.org/foo + ignite scaffold chain github.com/username/foo + +A new directory with source code files will be created in the current directory. +To use a different path use the "--path" flag. + +Most of the logic of your blockchain is written in custom modules. Each module +effectively encapsulates an independent piece of functionality. Following the +Cosmos SDK convention, custom modules are stored inside the "x/" directory. By +default, Ignite creates a module with a name that matches the name of the +project. To create a blockchain without a default module use the "--no-module" +flag. Additional modules can be added after a project is created with "ignite +scaffold module" command. + +Account addresses on Cosmos SDK-based blockchains have string prefixes. For +example, the Cosmos Hub blockchain uses the default "cosmos" prefix, so that +addresses look like this: "cosmos12fjzdtqfrrve7zyg9sv8j25azw2ua6tvu07ypf". To +use a custom address prefix use the "--address-prefix" flag. For example: + + ignite scaffold chain foo --address-prefix bar + +By default when compiling a blockchain's source code Ignite creates a cache to +speed up the build process. To clear the cache when building a blockchain use +the "--clear-cache" flag. It is very unlikely you will ever need to use this +flag. + +The blockchain is using the Cosmos SDK modular blockchain framework. Learn more +about Cosmos SDK on https://docs.cosmos.network +`, + Args: cobra.ExactArgs(1), + PersistentPreRun: func(cmd *cobra.Command, _ []string) { + if verbose := flagGetVerbose(cmd); verbose { + env.SetDebug() + } + }, + RunE: scaffoldChainHandler, + } + + flagSetClearCache(c) + c.Flags().AddFlagSet(flagSetAccountPrefixes()) + c.Flags().AddFlagSet(flagSetCoinType()) + c.Flags().String(flagDefaultDenom, defaultIgniteDenom, "default staking denom") + c.Flags().StringP(flagPath, "p", "", "create a project in a specific path") + c.Flags().Bool(flagNoDefaultModule, false, "create a project without a default module") + c.Flags().StringSlice(flagParams, []string{}, "add default module parameters") + c.Flags().StringSlice(flagModuleConfigs, []string{}, "add module configs") + c.Flags().Bool(flagSkipGit, false, "skip Git repository initialization") + c.Flags().Bool(flagSkipProto, false, "skip proto generation") + c.Flags().Bool(flagMinimal, false, "create a minimal blockchain (with the minimum required Cosmos SDK modules)") + c.Flags().String(flagProtoDir, defaults.ProtoDir, "chain proto directory") + + // consumer scaffolding have been migrated to an ignite app + _ = c.Flags().MarkDeprecated("consumer", "use 'ignite consumer' app instead") + + return c +} + +func scaffoldChainHandler(cmd *cobra.Command, args []string) error { + session := cliui.New( + cliui.StartSpinnerWithText(statusScaffolding), + cliui.WithoutUserInteraction(getYes(cmd)), + ) + defer session.End() + + var ( + name = args[0] + addressPrefix = getAddressPrefix(cmd) + coinType = getCoinType(cmd) + appPath = flagGetPath(cmd) + + noDefaultModule, _ = cmd.Flags().GetBool(flagNoDefaultModule) + skipGit, _ = cmd.Flags().GetBool(flagSkipGit) + minimal, _ = cmd.Flags().GetBool(flagMinimal) + params, _ = cmd.Flags().GetStringSlice(flagParams) + moduleConfigs, _ = cmd.Flags().GetStringSlice(flagModuleConfigs) + skipProto, _ = cmd.Flags().GetBool(flagSkipProto) + protoDir, _ = cmd.Flags().GetString(flagProtoDir) + defaultDenom, _ = cmd.Flags().GetString(flagDefaultDenom) + ) + + if cmd.Flags().Changed(flagDefaultDenom) && len(defaultDenom) <= 2 { + return errors.New("default denom must be at least 3 characters and maximum 128 characters") + } + + if noDefaultModule { + if len(params) > 0 { + return errors.New("params flag is only supported if the default module is enabled") + } else if len(moduleConfigs) > 0 { + return errors.New("module configs flag is only supported if the default module is enabled") + } + skipProto = true + } + + cacheStorage, err := newCache(cmd) + if err != nil { + return err + } + + appDir, goModule, err := scaffolder.Init( + cmd.Context(), + appPath, + name, + addressPrefix, + coinType, + defaultDenom, + protoDir, + noDefaultModule, + minimal, + params, + moduleConfigs, + ) + if err != nil { + return err + } + + path, err := xfilepath.RelativePath(appDir) + if err != nil { + return err + } + + if err := scaffolder.PostScaffold(cmd.Context(), cacheStorage, appDir, protoDir, goModule, skipProto); err != nil { + return err + } + + if !skipGit { + // Initialize git repository and perform the first commit + if err := xgit.InitAndCommit(path); err != nil { + return err + } + } + + return session.Printf(tplScaffoldChainSuccess, path) +} diff --git a/ignite/cmd/scaffold_chain_registry.go b/ignite/cmd/scaffold_chain_registry.go new file mode 100644 index 0000000..4eba830 --- /dev/null +++ b/ignite/cmd/scaffold_chain_registry.go @@ -0,0 +1,71 @@ +package ignitecmd + +import ( + "github.com/spf13/cobra" + + "github.com/ignite/cli/v29/ignite/pkg/cliui" + "github.com/ignite/cli/v29/ignite/services/chain" + "github.com/ignite/cli/v29/ignite/services/scaffolder" +) + +// NewScaffoldChainRegistry returns the command to scaffold the chain registry chain.json and assets.json files. +func NewScaffoldChainRegistry() *cobra.Command { + c := &cobra.Command{ + Use: "chain-registry", + Short: "Configs for the chain registry", + Long: `Scaffold the chain registry chain.json and assets.json files. + +The chain registry is a GitHub repo, hosted at https://github.com/cosmos/chain-registry, that +contains the chain.json and assets.json files of most of chains in the Cosmos ecosystem. +It is good practices, when creating a new chain, and about to launch a testnet or mainnet, to +publish the chain's metadata in the chain registry. + +Read more about the chain.json at https://github.com/cosmos/chain-registry?tab=readme-ov-file#chainjson +Read more about the assets.json at https://github.com/cosmos/chain-registry?tab=readme-ov-file#assetlists`, + Args: cobra.NoArgs, + PreRunE: migrationPreRunHandler, + RunE: scaffoldChainRegistryFiles, + } + + flagSetPath(c) + flagSetClearCache(c) + + c.Flags().AddFlagSet(flagSetYes()) + + return c +} + +func scaffoldChainRegistryFiles(cmd *cobra.Command, _ []string) error { + session := cliui.New( + cliui.StartSpinnerWithText(statusScaffolding), + cliui.WithoutUserInteraction(getYes(cmd)), + ) + defer session.End() + + cfg, _, err := getChainConfig(cmd) + if err != nil { + return err + } + + c, err := chain.NewWithHomeFlags(cmd) + if err != nil { + return err + } + + appPath := flagGetPath(cmd) + sc, err := scaffolder.New(cmd.Context(), appPath, cfg.Build.Proto.Path) + if err != nil { + return err + } + + if err = sc.CreateChainRegistryFiles(c, cfg); err != nil { + return err + } + + // no need for post scaffolding, as we are just creating two files + // that are not part of the build process + + session.Printf("🎉 chain-registry files successfully scaffolded\n") + + return nil +} diff --git a/ignite/cmd/scaffold_configs.go b/ignite/cmd/scaffold_configs.go new file mode 100644 index 0000000..598aa89 --- /dev/null +++ b/ignite/cmd/scaffold_configs.go @@ -0,0 +1,96 @@ +package ignitecmd + +import ( + "strings" + + "github.com/spf13/cobra" + + "github.com/ignite/cli/v29/ignite/pkg/cliui" + "github.com/ignite/cli/v29/ignite/pkg/xgenny" + "github.com/ignite/cli/v29/ignite/services/scaffolder" +) + +// NewScaffoldConfigs returns the command to scaffold a Cosmos SDK configs into a module. +func NewScaffoldConfigs() *cobra.Command { + c := &cobra.Command{ + Use: "configs [configs]...", + Short: "Configs for a custom Cosmos SDK module", + Long: `Scaffold a new config for a Cosmos SDK module. + +A Cosmos SDK module can have configurations. An example of a config is "address prefix" of the +"auth" module. A config can be scaffolded into a module using the "--module-configs" into +the scaffold module command or using the "scaffold configs" command. By default +configs are of type "string", but you can specify a type for each config. For example: + + ignite scaffold configs foo baz:uint bar:bool + +Refer to Cosmos SDK documentation to learn more about modules, dependencies and +configs. +`, + Args: cobra.MinimumNArgs(1), + PreRunE: migrationPreRunHandler, + RunE: scaffoldConfigsHandler, + } + + flagSetPath(c) + flagSetClearCache(c) + + c.Flags().AddFlagSet(flagSetYes()) + + c.Flags().String(flagModule, "", "module to add the query into (default: app's main module)") + + return c +} + +func scaffoldConfigsHandler(cmd *cobra.Command, args []string) error { + var ( + configs = args[0:] + appPath = flagGetPath(cmd) + moduleName = flagGetModule(cmd) + ) + + session := cliui.New( + cliui.StartSpinnerWithText(statusScaffolding), + cliui.WithoutUserInteraction(getYes(cmd)), + ) + defer session.End() + + cfg, _, err := getChainConfig(cmd) + if err != nil { + return err + } + + cacheStorage, err := newCache(cmd) + if err != nil { + return err + } + + sc, err := scaffolder.New(cmd.Context(), appPath, cfg.Build.Proto.Path) + if err != nil { + return err + } + + err = sc.CreateConfigs(moduleName, configs...) + if err != nil { + return err + } + + sm, err := sc.ApplyModifications(xgenny.ApplyPreRun(scaffolder.AskOverwriteFiles(session))) + if err != nil { + return err + } + + if err := sc.PostScaffold(cmd.Context(), cacheStorage, false); err != nil { + return err + } + + modificationsStr, err := sm.String() + if err != nil { + return err + } + + session.Println(modificationsStr) + session.Printf("\n🎉 New configs added to the module:\n\n- %s\n\n", strings.Join(configs, "\n- ")) + + return nil +} diff --git a/ignite/cmd/scaffold_list.go b/ignite/cmd/scaffold_list.go new file mode 100644 index 0000000..03e8c51 --- /dev/null +++ b/ignite/cmd/scaffold_list.go @@ -0,0 +1,112 @@ +package ignitecmd + +import ( + "github.com/spf13/cobra" + + "github.com/ignite/cli/v29/ignite/services/scaffolder" +) + +// NewScaffoldList returns a new command to scaffold a list. +func NewScaffoldList() *cobra.Command { + c := &cobra.Command{ + Use: "list NAME [field]...", + Short: "CRUD for data stored as an array", + Long: `The "list" scaffolding command is used to generate files that implement the +logic for storing and interacting with data stored as a list in the blockchain +state. + +The command accepts a NAME argument that will be used as the name of a new type +of data. It also accepts a list of FIELDs that describe the type. + +The interaction with the data follows the create, read, updated, and delete +(CRUD) pattern. For each type three Cosmos SDK messages are defined for writing +data to the blockchain: MsgCreate{Name}, MsgUpdate{Name}, MsgDelete{Name}. For +reading data two queries are defined: {Name} and {Name}All. The type, messages, +and queries are defined in the "proto/" directory as protocol buffer messages. +Messages and queries are mounted in the "Msg" and "Query" services respectively. + +When messages are handled, the appropriate keeper methods are called. By +convention, the methods are defined in +"x/{moduleName}/keeper/msg_server_{name}.go". Helpful methods for getting, +setting, removing, and appending are defined in the same "keeper" package in +"{name}.go". + +The "list" command essentially allows you to define a new type of data and +provides the logic to create, read, update, and delete instances of the type. +For example, let's review a command that generates the code to handle a list of +posts and each post has "title" and "body" fields: + + ignite scaffold list post title body + +This provides you with a "Post" type, MsgCreatePost, MsgUpdatePost, +MsgDeletePost and two queries: Post and PostAll. The compiled CLI, let's say the +binary is "blogd" and the module is "blog", has commands to query the chain (see +"blogd q blog") and broadcast transactions with the messages above (see "blogd +tx blog"). + +The code generated with the list command is meant to be edited and tailored to +your application needs. Consider the code to be a "skeleton" for the actual +business logic you will implement next. + +By default, all fields are assumed to be strings. If you want a field of a +different type, you can specify it after a colon ":". The following types are +supported: string, bool, int, uint, coin, array.string, array.int, array.uint, +array.coin. An example of using field types: + + ignite scaffold list pool amount:coin tags:array.string height:int + +For detailed type information use ignite scaffold type --help + +"Index" indicates whether the type can be used as an index in +"ignite scaffold map". + +Ignite also supports custom types: + + ignite scaffold list product-details name desc + ignite scaffold list product price:coin details:ProductDetails + +In the example above the "ProductDetails" type was defined first, and then used +as a custom type for the "details" field. + +Your chain will accept custom types in JSON-notation: + + exampled tx example create-product 100coin '{"name": "x", "desc": "y"}' --from alice + +By default the code will be scaffolded in the module that matches your project's +name. If you have several modules in your project, you might want to specify a +different module: + + ignite scaffold list post title body --module blog + +By default, each message comes with a "creator" field that represents the +address of the transaction signer. You can customize the name of this field with +a flag: + + ignite scaffold list post title body --signer author + +It's possible to scaffold just the getter/setter logic without the CRUD +messages. This is useful when you want the methods to handle a type, but would +like to scaffold messages manually. Use a flag to skip message scaffolding: + + ignite scaffold list post title body --no-message + +The "creator" field is not generated if a list is scaffolded with the +"--no-message" flag. +`, + Args: cobra.MinimumNArgs(1), + PreRunE: migrationPreRunHandler, + RunE: scaffoldListHandler, + } + + flagSetPath(c) + flagSetClearCache(c) + + c.Flags().AddFlagSet(flagSetYes()) + c.Flags().AddFlagSet(flagSetScaffoldType()) + + return c +} + +func scaffoldListHandler(cmd *cobra.Command, args []string) error { + return scaffoldType(cmd, args, scaffolder.ListType()) +} diff --git a/ignite/cmd/scaffold_map.go b/ignite/cmd/scaffold_map.go new file mode 100644 index 0000000..9a0feda --- /dev/null +++ b/ignite/cmd/scaffold_map.go @@ -0,0 +1,69 @@ +package ignitecmd + +import ( + "github.com/spf13/cobra" + + "github.com/ignite/cli/v29/ignite/services/scaffolder" +) + +const FlagIndexName = "index" + +// NewScaffoldMap returns a new command to scaffold a map. +func NewScaffoldMap() *cobra.Command { + c := &cobra.Command{ + Use: "map NAME [field]...", + Short: "CRUD for data stored as key-value pairs", + Long: `The "map" scaffolding command is used to generate files that implement the logic +for storing and interacting with data stored as key-value pairs (or a +dictionary) in the blockchain state. + +The "map" command is very similar to "ignite scaffold list" with the main +difference in how values are indexed. With "list" values are indexed by an +incrementing integer, whereas "map" values are indexed by a user-provided value +(or multiple values). + +Let's use the same blog post example: + + ignite scaffold map post title body:string + +This command scaffolds a "Post" type and CRUD functionality to create, read, +updated, and delete posts. However, when creating a new post with your chain's +binary (or by submitting a transaction through the chain's API) you will be +required to provide an "index": + + blogd tx blog create-post [index] [title] [body] + blogd tx blog create-post hello "My first post" "This is the body" + +This command will create a post and store it in the blockchain's state under the +"hello" index. You will be able to fetch back the value of the post by querying +for the "hello" key. + + blogd q blog show-post hello + +By default, the index is called "index", to customize the index, use the "--index" flag. + +Since the behavior of "list" and "map" scaffolding is very similar, you can use +the "--no-message", "--module", "--signer" flags as well as the colon syntax for +custom types. + +For detailed type information use ignite scaffold type --help +`, + Args: cobra.MinimumNArgs(1), + PreRunE: migrationPreRunHandler, + RunE: scaffoldMapHandler, + } + + flagSetPath(c) + flagSetClearCache(c) + + c.Flags().AddFlagSet(flagSetYes()) + c.Flags().AddFlagSet(flagSetScaffoldType()) + c.Flags().String(FlagIndexName, "index", "field that index the value") + + return c +} + +func scaffoldMapHandler(cmd *cobra.Command, args []string) error { + index, _ := cmd.Flags().GetString(FlagIndexName) + return scaffoldType(cmd, args, scaffolder.MapType(index)) +} diff --git a/ignite/cmd/scaffold_message.go b/ignite/cmd/scaffold_message.go new file mode 100644 index 0000000..71b98cb --- /dev/null +++ b/ignite/cmd/scaffold_message.go @@ -0,0 +1,165 @@ +package ignitecmd + +import ( + "github.com/spf13/cobra" + + "github.com/ignite/cli/v29/ignite/pkg/cliui" + "github.com/ignite/cli/v29/ignite/pkg/cliui/colors" + "github.com/ignite/cli/v29/ignite/pkg/xgenny" + "github.com/ignite/cli/v29/ignite/services/scaffolder" + "github.com/ignite/cli/v29/ignite/templates/field" +) + +const flagSigner = "signer" + +// NewScaffoldMessage returns the command to scaffold messages. +func NewScaffoldMessage() *cobra.Command { + c := &cobra.Command{ + Use: "message [name] [field1:type1] [field2:type2] ...", + Short: "Message to perform state transition on the blockchain", + Long: `Message scaffolding is useful for quickly adding functionality to your +blockchain to handle specific Cosmos SDK messages. + +Messages are objects whose end goal is to trigger state transitions on the +blockchain. A message is a container for fields of data that affect how the +blockchain's state will change. You can think of messages as "actions" that a +user can perform. + +For example, the bank module has a "Send" message for token transfers between +accounts. The send message has three fields: from address (sender), to address +(recipient), and a token amount. When this message is successfully processed, +the token amount will be deducted from the sender's account and added to the +recipient's account. + +Ignite's message scaffolding lets you create new types of messages and add them +to your chain. For example: + + ignite scaffold message add-pool amount:coins denom active:bool --module dex + +The command above will create a new message MsgAddPool with three fields: amount +(in tokens), denom (a string), and active (a boolean). The message will be added +to the "dex" module. + +For detailed type information use ignite scaffold type --help + +By default, the message is defined as a proto message in the +"proto/{app}/{module}/tx.proto" and registered in the "Msg" service. A CLI command to +create and broadcast a transaction with MsgAddPool is created in the module's +"cli" package. Additionally, Ignite scaffolds a message constructor and the code +to satisfy the sdk.Msg interface and register the message in the module. + +Most importantly in the "keeper" package Ignite scaffolds an "AddPool" function. +Inside this function, you can implement message handling logic. + +When successfully processed a message can return data. Use the —response flag to +specify response fields and their types. For example + + ignite scaffold message create-post title body --response id:int,title + +The command above will scaffold MsgCreatePost which returns both an ID (an +integer) and a title (a string). + +Message scaffolding follows the rules as "ignite scaffold list/map/single" and +supports fields with standard and custom types. See "ignite scaffold list —help" +for details. +`, + Args: cobra.MinimumNArgs(1), + PreRunE: migrationPreRunHandler, + RunE: messageHandler, + } + + flagSetPath(c) + flagSetClearCache(c) + + c.Flags().AddFlagSet(flagSetYes()) + c.Flags().String(flagModule, "", "module to add the message into. Default: app's main module") + c.Flags().StringSliceP(flagResponse, "r", []string{}, "response fields") + c.Flags().Bool(flagNoSimulation, false, "disable CRUD simulation scaffolding") + c.Flags().StringP(flagDescription, "d", "", "description of the command") + c.Flags().String(flagSigner, "", "label for the message signer (default: creator)") + + return c +} + +func messageHandler(cmd *cobra.Command, args []string) error { + var ( + module, _ = cmd.Flags().GetString(flagModule) + resFields, _ = cmd.Flags().GetStringSlice(flagResponse) + desc, _ = cmd.Flags().GetString(flagDescription) + signer = flagGetSigner(cmd) + appPath = flagGetPath(cmd) + withoutSimulation = flagGetNoSimulation(cmd) + ) + + session := cliui.New( + cliui.StartSpinnerWithText(statusScaffolding), + cliui.WithoutUserInteraction(getYes(cmd)), + ) + defer session.End() + + hasMultipleCoinSlice, err := field.MultipleCoins(resFields) + if err != nil { + return err + } + if hasMultipleCoinSlice { + session.PauseSpinner() + _ = session.Print(colors.Info(multipleCoinDisclaimer)) + session.StartSpinner(statusScaffolding) + } + + cfg, _, err := getChainConfig(cmd) + if err != nil { + return err + } + + cacheStorage, err := newCache(cmd) + if err != nil { + return err + } + + var options []scaffolder.MessageOption + + // Get description + if desc != "" { + options = append(options, scaffolder.WithDescription(desc)) + } + + // Get signer + if signer != "" { + options = append(options, scaffolder.WithSigner(signer)) + } + + // Skip scaffold simulation + if withoutSimulation { + options = append(options, scaffolder.WithoutSimulation()) + } + + sc, err := scaffolder.New(cmd.Context(), appPath, cfg.Build.Proto.Path) + if err != nil { + return err + } + + err = sc.AddMessage(cmd.Context(), module, args[0], args[1:], resFields, options...) + if err != nil { + return err + } + + sm, err := sc.ApplyModifications(xgenny.ApplyPreRun(scaffolder.AskOverwriteFiles(session))) + if err != nil { + return err + } + + if err := sc.PostScaffold(cmd.Context(), cacheStorage, false); err != nil { + return err + } + + modificationsStr, err := sm.String() + if err != nil { + return err + } + + session.Println(modificationsStr) + session.Printf("\n🎉 Created a message `%[1]v`.\n\n", args[0]) + + return nil +} diff --git a/ignite/cmd/scaffold_migration.go b/ignite/cmd/scaffold_migration.go new file mode 100644 index 0000000..ec46392 --- /dev/null +++ b/ignite/cmd/scaffold_migration.go @@ -0,0 +1,77 @@ +package ignitecmd + +import ( + "github.com/spf13/cobra" + + "github.com/ignite/cli/v29/ignite/pkg/cache" + "github.com/ignite/cli/v29/ignite/pkg/cliui" + "github.com/ignite/cli/v29/ignite/pkg/xgenny" + "github.com/ignite/cli/v29/ignite/services/scaffolder" +) + +// NewScaffoldMigration returns the command to scaffold a module migration. +func NewScaffoldMigration() *cobra.Command { + c := &cobra.Command{ + Use: "migration [module]", + Short: "Module migration boilerplate", + Long: `Scaffold no-op migration boilerplate for an existing Cosmos SDK module. + +This command creates a new migration file in "x/<module>/migrations/vN/", +increments the module consensus version, and registers the new migration handler +inside "x/<module>/module/module.go".`, + Args: cobra.ExactArgs(1), + PreRunE: migrationPreRunHandler, + RunE: scaffoldMigrationHandler, + } + + flagSetPath(c) + c.Flags().AddFlagSet(flagSetYes()) + + return c +} + +func scaffoldMigrationHandler(cmd *cobra.Command, args []string) error { + var ( + moduleName = args[0] + appPath = flagGetPath(cmd) + ) + + session := cliui.New( + cliui.StartSpinnerWithText(statusScaffolding), + cliui.WithoutUserInteraction(getYes(cmd)), + ) + defer session.End() + + cfg, _, err := getChainConfig(cmd) + if err != nil { + return err + } + + sc, err := scaffolder.New(cmd.Context(), appPath, cfg.Build.Proto.Path) + if err != nil { + return err + } + + if err := sc.CreateModuleMigration(moduleName); err != nil { + return err + } + + sm, err := sc.ApplyModifications(xgenny.ApplyPreRun(scaffolder.AskOverwriteFiles(session))) + if err != nil { + return err + } + + if err := sc.PostScaffold(cmd.Context(), cache.Storage{}, true); err != nil { + return err + } + + modificationsStr, err := sm.String() + if err != nil { + return err + } + + session.Println(modificationsStr) + session.Printf("\n🎉 Migration added to module %s.\n\n", moduleName) + + return nil +} diff --git a/ignite/cmd/scaffold_module.go b/ignite/cmd/scaffold_module.go new file mode 100644 index 0000000..50629a4 --- /dev/null +++ b/ignite/cmd/scaffold_module.go @@ -0,0 +1,210 @@ +package ignitecmd + +import ( + "bytes" + "fmt" + "regexp" + "strings" + + "github.com/spf13/cobra" + + "github.com/ignite/cli/v29/ignite/pkg/cliui" + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/xgenny" + "github.com/ignite/cli/v29/ignite/services/scaffolder" + modulecreate "github.com/ignite/cli/v29/ignite/templates/module/create" +) + +// moduleNameKeeperAlias is a map of well known module names that have a different keeper name than the usual <module-name>Keeper. +var moduleNameKeeperAlias = map[string]string{} + +const ( + flagDep = "dep" + flagIBC = "ibc" + flagParams = "params" + flagModuleConfigs = "module-configs" + flagIBCOrdering = "ordering" + flagRequireRegistration = "require-registration" +) + +// NewScaffoldModule returns the command to scaffold a Cosmos SDK module. +func NewScaffoldModule() *cobra.Command { + c := &cobra.Command{ + Use: "module [name]", + Short: "Custom Cosmos SDK module", + Long: `Scaffold a new Cosmos SDK module. + +Cosmos SDK is a modular framework and each independent piece of functionality is +implemented in a separate module. By default your blockchain imports a set of +standard Cosmos SDK modules. To implement custom functionality of your +blockchain, scaffold a module and implement the logic of your application. + +This command does the following: + +* Creates a directory with module's protocol buffer files in "proto/" +* Creates a directory with module's boilerplate Go code in "x/" +* Imports the newly created module by modifying "app/app.go" + +This command will proceed with module scaffolding even if "app/app.go" doesn't +have the required default placeholders. If the placeholders are missing, you +will need to modify "app/app.go" manually to import the module. If you want the +command to fail if it can't import the module, use the "--require-registration" +flag. + +To scaffold an IBC-enabled module use the "--ibc" flag. An IBC-enabled module is +like a regular module with the addition of IBC-specific logic and placeholders +to scaffold IBC packets with "ignite scaffold packet". + +A module can depend on one or more other modules and import their keeper +methods. To scaffold a module with a dependency use the "--dep" flag + +For example, your new custom module "foo" might have functionality that requires +sending tokens between accounts. The method for sending tokens is a defined in +the "bank"'s module keeper. You can scaffold a "foo" module with the dependency +on "bank" with the following command: + + ignite scaffold module foo --dep bank + +You can then define which methods you want to import from the "bank" keeper in +"expected_keepers.go". + +You can also scaffold a module with a list of dependencies that can include both +standard and custom modules (provided they exist): + + ignite scaffold module bar --dep foo,mint,account,FeeGrant + +Note: the "--dep" flag doesn't install third-party modules into your +application, it just generates extra code that specifies which existing modules +your new custom module depends on. + +A Cosmos SDK module can have parameters (or "params"). Params are values that +can be set at the genesis of the blockchain and can be modified while the +blockchain is running. An example of a param is "Inflation rate change" of the +"mint" module. A module can be scaffolded with params using the "--params" flag +that accepts a list of param names. By default params are of type "string", but +you can specify a type for each param. For example: + + ignite scaffold module foo --params baz:uint,bar:bool + +Refer to Cosmos SDK documentation to learn more about modules, dependencies and +params. +`, + Args: cobra.ExactArgs(1), + PreRunE: migrationPreRunHandler, + RunE: scaffoldModuleHandler, + } + + flagSetPath(c) + flagSetClearCache(c) + + c.Flags().AddFlagSet(flagSetYes()) + c.Flags().StringSlice(flagDep, []string{}, "add a dependency on another module") + c.Flags().Bool(flagIBC, false, "add IBC functionality") + c.Flags().String(flagIBCOrdering, "none", "channel ordering of the IBC module [none|ordered|unordered]") + c.Flags().Bool(flagRequireRegistration, false, "fail if module can't be registered") + c.Flags().StringSlice(flagParams, []string{}, "add module parameters") + c.Flags().StringSlice(flagModuleConfigs, []string{}, "add module configs") + + return c +} + +func scaffoldModuleHandler(cmd *cobra.Command, args []string) error { + var ( + name = args[0] + appPath = flagGetPath(cmd) + ) + + session := cliui.New( + cliui.StartSpinnerWithText(statusScaffolding), + cliui.WithoutUserInteraction(getYes(cmd)), + ) + defer session.End() + + cfg, _, err := getChainConfig(cmd) + if err != nil { + return err + } + + ibcModule, _ := cmd.Flags().GetBool(flagIBC) + ibcOrdering, _ := cmd.Flags().GetString(flagIBCOrdering) + requireRegistration, _ := cmd.Flags().GetBool(flagRequireRegistration) + params, _ := cmd.Flags().GetStringSlice(flagParams) + + moduleConfigs, err := cmd.Flags().GetStringSlice(flagModuleConfigs) + if err != nil { + return err + } + + cacheStorage, err := newCache(cmd) + if err != nil { + return err + } + + options := []scaffolder.ModuleCreationOption{ + scaffolder.WithParams(params), + scaffolder.WithModuleConfigs(moduleConfigs), + } + + // Check if the module must be an IBC module + if ibcModule { + options = append(options, scaffolder.WithIBCChannelOrdering(ibcOrdering), scaffolder.WithIBC()) + } + + // Get module dependencies + dependencies, _ := cmd.Flags().GetStringSlice(flagDep) + if len(dependencies) > 0 { + var deps []modulecreate.Dependency + + isValid := regexp.MustCompile(`^[a-zA-Z]+$`).MatchString + + for _, name := range dependencies { + if !isValid(name) { + return errors.Errorf("invalid module dependency name format '%s'", name) + } + + if alias, ok := moduleNameKeeperAlias[strings.ToLower(name)]; ok { + name = alias + } + + deps = append(deps, modulecreate.NewDependency(name)) + } + + options = append(options, scaffolder.WithDependencies(deps)) + } + + var msg bytes.Buffer + fmt.Fprintf(&msg, "\n🎉 Module created %s.\n\n", name) + + sc, err := scaffolder.New(cmd.Context(), appPath, cfg.Build.Proto.Path) + if err != nil { + return err + } + + if err := sc.CreateModule(name, options...); err != nil { + var validationErr errors.ValidationError + if !requireRegistration && errors.As(err, &validationErr) { + fmt.Fprintf(&msg, "Can't register module '%s'.\n", name) + fmt.Fprintln(&msg, validationErr.ValidationInfo()) + } else { + return err + } + } + + sm, err := sc.ApplyModifications(xgenny.ApplyPreRun(scaffolder.AskOverwriteFiles(session))) + if err != nil { + return err + } + + if err := sc.PostScaffold(cmd.Context(), cacheStorage, false); err != nil { + return err + } + + modificationsStr, err := sm.String() + if err != nil { + return err + } + + session.Println(modificationsStr) + + return session.Print(msg.String()) +} diff --git a/ignite/cmd/scaffold_packet.go b/ignite/cmd/scaffold_packet.go new file mode 100644 index 0000000..8067e62 --- /dev/null +++ b/ignite/cmd/scaffold_packet.go @@ -0,0 +1,106 @@ +package ignitecmd + +import ( + "github.com/spf13/cobra" + + "github.com/ignite/cli/v29/ignite/pkg/cliui" + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/xgenny" + "github.com/ignite/cli/v29/ignite/services/scaffolder" +) + +const ( + flagAck = "ack" +) + +// NewScaffoldPacket creates a new packet in the module. +func NewScaffoldPacket() *cobra.Command { + c := &cobra.Command{ + Use: "packet [packetName] [field1] [field2] ... --module [moduleName]", + Short: "Message for sending an IBC packet", + Long: "Scaffold an IBC packet in a specific IBC-enabled Cosmos SDK module", + Args: cobra.MinimumNArgs(1), + PreRunE: migrationPreRunHandler, + RunE: createPacketHandler, + } + + flagSetPath(c) + flagSetClearCache(c) + + c.Flags().AddFlagSet(flagSetYes()) + c.Flags().StringSlice(flagAck, []string{}, "custom acknowledgment type (field1,field2,...)") + c.Flags().String(flagModule, "", "IBC Module to add the packet into") + c.Flags().String(flagSigner, "", "label for the message signer (default: creator)") + c.Flags().Bool(flagNoMessage, false, "disable send message scaffolding") + + return c +} + +func createPacketHandler(cmd *cobra.Command, args []string) error { + var ( + packet = args[0] + packetFields = args[1:] + signer = flagGetSigner(cmd) + appPath = flagGetPath(cmd) + ) + + session := cliui.New( + cliui.StartSpinnerWithText(statusScaffolding), + cliui.WithoutUserInteraction(getYes(cmd)), + ) + defer session.End() + + cfg, _, err := getChainConfig(cmd) + if err != nil { + return err + } + + module, _ := cmd.Flags().GetString(flagModule) + if module == "" { + return errors.New("please specify a module to create the packet into: --module <module_name>") + } + + ackFields, _ := cmd.Flags().GetStringSlice(flagAck) + noMessage, _ := cmd.Flags().GetBool(flagNoMessage) + + cacheStorage, err := newCache(cmd) + if err != nil { + return err + } + + var options []scaffolder.PacketOption + if noMessage { + options = append(options, scaffolder.PacketWithoutMessage()) + } else if signer != "" { + options = append(options, scaffolder.PacketWithSigner(signer)) + } + + sc, err := scaffolder.New(cmd.Context(), appPath, cfg.Build.Proto.Path) + if err != nil { + return err + } + + err = sc.AddPacket(cmd.Context(), module, packet, packetFields, ackFields, options...) + if err != nil { + return err + } + + sm, err := sc.ApplyModifications(xgenny.ApplyPreRun(scaffolder.AskOverwriteFiles(session))) + if err != nil { + return err + } + + if err := sc.PostScaffold(cmd.Context(), cacheStorage, false); err != nil { + return err + } + + modificationsStr, err := sm.String() + if err != nil { + return err + } + + session.Println(modificationsStr) + session.Printf("\n🎉 Created a packet `%[1]v`.\n\n", args[0]) + + return nil +} diff --git a/ignite/cmd/scaffold_params.go b/ignite/cmd/scaffold_params.go new file mode 100644 index 0000000..06fe56b --- /dev/null +++ b/ignite/cmd/scaffold_params.go @@ -0,0 +1,98 @@ +package ignitecmd + +import ( + "strings" + + "github.com/spf13/cobra" + + "github.com/ignite/cli/v29/ignite/pkg/cliui" + "github.com/ignite/cli/v29/ignite/pkg/xgenny" + "github.com/ignite/cli/v29/ignite/services/scaffolder" +) + +// NewScaffoldParams returns the command to scaffold a Cosmos SDK parameters into a module. +func NewScaffoldParams() *cobra.Command { + c := &cobra.Command{ + Use: "params [param]...", + Short: "Parameters for a custom Cosmos SDK module", + Long: `Scaffold a new parameter for a Cosmos SDK module. + +A Cosmos SDK module can have parameters (or "params"). Params are values that +can be set at the genesis of the blockchain and can be modified while the +blockchain is running. An example of a param is "Inflation rate change" of the +"mint" module. A params can be scaffolded into a module using the "--params" into +the scaffold module command or using the "scaffold params" command. By default +params are of type "string", but you can specify a type for each param. For example: + + ignite scaffold params foo baz:uint bar:bool + +Refer to Cosmos SDK documentation to learn more about modules, dependencies and +params. +`, + Args: cobra.MinimumNArgs(1), + PreRunE: migrationPreRunHandler, + RunE: scaffoldParamsHandler, + } + + flagSetPath(c) + flagSetClearCache(c) + + c.Flags().AddFlagSet(flagSetYes()) + + c.Flags().String(flagModule, "", "module to add the query into. Default: app's main module") + + return c +} + +func scaffoldParamsHandler(cmd *cobra.Command, args []string) error { + var ( + params = args[0:] + appPath = flagGetPath(cmd) + moduleName = flagGetModule(cmd) + ) + + session := cliui.New( + cliui.StartSpinnerWithText(statusScaffolding), + cliui.WithoutUserInteraction(getYes(cmd)), + ) + defer session.End() + + cfg, _, err := getChainConfig(cmd) + if err != nil { + return err + } + + cacheStorage, err := newCache(cmd) + if err != nil { + return err + } + + sc, err := scaffolder.New(cmd.Context(), appPath, cfg.Build.Proto.Path) + if err != nil { + return err + } + + err = sc.CreateParams(moduleName, params...) + if err != nil { + return err + } + + sm, err := sc.ApplyModifications(xgenny.ApplyPreRun(scaffolder.AskOverwriteFiles(session))) + if err != nil { + return err + } + + if err := sc.PostScaffold(cmd.Context(), cacheStorage, false); err != nil { + return err + } + + modificationsStr, err := sm.String() + if err != nil { + return err + } + + session.Println(modificationsStr) + session.Printf("\n🎉 New parameters added to the module:\n\n- %s\n\n", strings.Join(params, "\n- ")) + + return nil +} diff --git a/ignite/cmd/scaffold_query.go b/ignite/cmd/scaffold_query.go new file mode 100644 index 0000000..ff56ed7 --- /dev/null +++ b/ignite/cmd/scaffold_query.go @@ -0,0 +1,105 @@ +package ignitecmd + +import ( + "fmt" + + "github.com/spf13/cobra" + + "github.com/ignite/cli/v29/ignite/pkg/cliui" + "github.com/ignite/cli/v29/ignite/pkg/xgenny" + "github.com/ignite/cli/v29/ignite/services/scaffolder" +) + +const ( + flagPaginated = "paginated" +) + +// NewScaffoldQuery command creates a new type command to scaffold queries. +func NewScaffoldQuery() *cobra.Command { + c := &cobra.Command{ + Use: "query [name] [field1:type1] [field2:type2] ...", + Short: "Query for fetching data from a blockchain", + Long: `Query for fetching data from a blockchain. + +For detailed type information use ignite scaffold type --help.`, + Args: cobra.MinimumNArgs(1), + PreRunE: migrationPreRunHandler, + RunE: queryHandler, + } + + flagSetPath(c) + flagSetClearCache(c) + + c.Flags().AddFlagSet(flagSetYes()) + c.Flags().String(flagModule, "", "module to add the query into. Default: app's main module") + c.Flags().StringSliceP(flagResponse, "r", []string{}, "response fields") + c.Flags().StringP(flagDescription, "d", "", "description of the CLI to broadcast a tx with the message") + c.Flags().Bool(flagPaginated, false, "define if the request can be paginated") + + return c +} + +func queryHandler(cmd *cobra.Command, args []string) error { + session := cliui.New( + cliui.StartSpinnerWithText(statusScaffolding), + cliui.WithoutUserInteraction(getYes(cmd)), + ) + defer session.End() + + cfg, _, err := getChainConfig(cmd) + if err != nil { + return err + } + + // Get the module to add the type into + module, _ := cmd.Flags().GetString(flagModule) + + // Get request fields + resFields, _ := cmd.Flags().GetStringSlice(flagResponse) + + // Get description + desc, _ := cmd.Flags().GetString(flagDescription) + if desc == "" { + // Use a default description + desc = fmt.Sprintf("Query %s", args[0]) + } + + var ( + paginated, _ = cmd.Flags().GetBool(flagPaginated) + appPath = flagGetPath(cmd) + ) + + cacheStorage, err := newCache(cmd) + if err != nil { + return err + } + + sc, err := scaffolder.New(cmd.Context(), appPath, cfg.Build.Proto.Path) + if err != nil { + return err + } + + err = sc.AddQuery(cmd.Context(), module, args[0], desc, args[1:], resFields, paginated) + if err != nil { + return err + } + + sm, err := sc.ApplyModifications(xgenny.ApplyPreRun(scaffolder.AskOverwriteFiles(session))) + if err != nil { + return err + } + + if err := sc.PostScaffold(cmd.Context(), cacheStorage, false); err != nil { + return err + } + + modificationsStr, err := sm.String() + if err != nil { + return err + } + + session.Println(modificationsStr) + session.Printf("\n🎉 Created a query `%[1]v`.\n\n", args[0]) + + return nil +} diff --git a/ignite/cmd/scaffold_react.go b/ignite/cmd/scaffold_react.go new file mode 100644 index 0000000..f51f386 --- /dev/null +++ b/ignite/cmd/scaffold_react.go @@ -0,0 +1,15 @@ +package ignitecmd + +import ( + "github.com/spf13/cobra" +) + +// NewScaffoldReact scaffolds a React app for a chain. +func NewScaffoldReact() *cobra.Command { + c := &cobra.Command{ + Use: "react", + Deprecated: "the React scaffolding feature is removed from Ignite CLI.\nPlease use the Ignite CCA app to create a React app.\nFor more information, visit: https://ignite.com/marketplace/CCA", + } + + return c +} diff --git a/ignite/cmd/scaffold_single.go b/ignite/cmd/scaffold_single.go new file mode 100644 index 0000000..6913ed5 --- /dev/null +++ b/ignite/cmd/scaffold_single.go @@ -0,0 +1,34 @@ +package ignitecmd + +import ( + "github.com/spf13/cobra" + + "github.com/ignite/cli/v29/ignite/services/scaffolder" +) + +// NewScaffoldSingle returns a new command to scaffold a singleton. +func NewScaffoldSingle() *cobra.Command { + c := &cobra.Command{ + Use: "single NAME [field:type]...", + Short: "CRUD for data stored in a single location", + Long: `CRUD for data stored in a single location. + +For detailed type information use ignite scaffold type --help.`, + Example: " ignite scaffold single todo-single title:string done:bool", + Args: cobra.MinimumNArgs(1), + PreRunE: migrationPreRunHandler, + RunE: scaffoldSingleHandler, + } + + flagSetPath(c) + flagSetClearCache(c) + + c.Flags().AddFlagSet(flagSetYes()) + c.Flags().AddFlagSet(flagSetScaffoldType()) + + return c +} + +func scaffoldSingleHandler(cmd *cobra.Command, args []string) error { + return scaffoldType(cmd, args, scaffolder.SingletonType()) +} diff --git a/ignite/cmd/scaffold_type.go b/ignite/cmd/scaffold_type.go new file mode 100644 index 0000000..f37bf32 --- /dev/null +++ b/ignite/cmd/scaffold_type.go @@ -0,0 +1,39 @@ +package ignitecmd + +import ( + "fmt" + "strings" + + "github.com/spf13/cobra" + + "github.com/ignite/cli/v29/ignite/services/scaffolder" + "github.com/ignite/cli/v29/ignite/templates/field/datatype" +) + +// NewScaffoldType returns a new command to scaffold a type. +func NewScaffoldType() *cobra.Command { + b := strings.Builder{} + _ = datatype.PrintScaffoldTypeList(&b) + + c := &cobra.Command{ + Use: "type NAME [field:type] ...", + Short: "Type definition", + Long: fmt.Sprintf("Type information\n\n%s\n", b.String()), + Example: " ignite scaffold type todo-item priority:int desc:string tags:array.string done:bool", + Args: cobra.MinimumNArgs(1), + PreRunE: migrationPreRunHandler, + RunE: scaffoldTypeHandler, + } + + flagSetPath(c) + flagSetClearCache(c) + + c.Flags().AddFlagSet(flagSetYes()) + c.Flags().AddFlagSet(flagSetScaffoldType()) + + return c +} + +func scaffoldTypeHandler(cmd *cobra.Command, args []string) error { + return scaffoldType(cmd, args, scaffolder.DryType()) +} diff --git a/ignite/cmd/scaffold_type_list.go b/ignite/cmd/scaffold_type_list.go new file mode 100644 index 0000000..b8526c3 --- /dev/null +++ b/ignite/cmd/scaffold_type_list.go @@ -0,0 +1,29 @@ +package ignitecmd + +import ( + "github.com/spf13/cobra" + + "github.com/ignite/cli/v29/ignite/pkg/cliui" + "github.com/ignite/cli/v29/ignite/templates/field/datatype" +) + +// NewScaffoldTypeList returns a new command to list all scaffold types. +func NewScaffoldTypeList() *cobra.Command { + c := &cobra.Command{ + Use: "type-list", + Short: "List scaffold types", + Long: "List all available scaffold types", + Args: cobra.NoArgs, + RunE: func(cmd *cobra.Command, _ []string) error { + session := cliui.New( + cliui.StartSpinnerWithText("printing..."), + cliui.WithoutUserInteraction(getYes(cmd)), + ) + defer session.End() + session.StopSpinner() + return datatype.PrintScaffoldTypeList(cmd.OutOrStdout()) + }, + } + + return c +} diff --git a/ignite/cmd/scaffold_vue.go b/ignite/cmd/scaffold_vue.go new file mode 100644 index 0000000..f0e4319 --- /dev/null +++ b/ignite/cmd/scaffold_vue.go @@ -0,0 +1,41 @@ +package ignitecmd + +import ( + "path/filepath" + + "github.com/spf13/cobra" + + chainconfig "github.com/ignite/cli/v29/ignite/config/chain" + "github.com/ignite/cli/v29/ignite/pkg/cliui" + "github.com/ignite/cli/v29/ignite/pkg/cosmosgen" +) + +// NewScaffoldVue scaffolds a Vue.js app for a chain. +func NewScaffoldVue() *cobra.Command { + c := &cobra.Command{ + Use: "vue", + Short: "Vue 3 web app template", + Args: cobra.NoArgs, + PreRunE: migrationPreRunHandler, + RunE: scaffoldVueHandler, + } + + c.Flags().AddFlagSet(flagSetYes()) + + return c +} + +func scaffoldVueHandler(cmd *cobra.Command, _ []string) error { + session := cliui.New( + cliui.StartSpinnerWithText(statusScaffolding), + cliui.WithoutUserInteraction(getYes(cmd)), + ) + defer session.End() + + path := filepath.Join(".", chainconfig.DefaultVuePath) + if err := cosmosgen.Vue(path); err != nil { + return err + } + + return session.Printf("\n🎉 Scaffolded a Vue.js app in %s.\n\n", path) +} diff --git a/ignite/cmd/testnet.go b/ignite/cmd/testnet.go new file mode 100644 index 0000000..1fb4272 --- /dev/null +++ b/ignite/cmd/testnet.go @@ -0,0 +1,24 @@ +package ignitecmd + +import ( + "github.com/spf13/cobra" +) + +// NewTestnet returns a command that groups scaffolding related sub commands. +func NewTestnet() *cobra.Command { + c := &cobra.Command{ + Use: "testnet [command]", + Short: "Simulate and manage test networks", + Long: `Comprehensive toolset for managing and simulating blockchain test networks. It allows users to either run a test network in place using mainnet data or set up a multi-node environment for more complex testing scenarios. Additionally, it includes a subcommand for simulating the chain, which is useful for fuzz testing and other testing-related tasks.`, + Aliases: []string{"t"}, + Args: cobra.ExactArgs(1), + } + + c.AddCommand( + NewTestnetInPlace(), + NewTestnetMultiNode(), + NewChainSimulate(), // While this is not per se a testnet command, it is related to testing. + ) + + return c +} diff --git a/ignite/cmd/testnet_inplace.go b/ignite/cmd/testnet_inplace.go new file mode 100644 index 0000000..bc8177d --- /dev/null +++ b/ignite/cmd/testnet_inplace.go @@ -0,0 +1,152 @@ +package ignitecmd + +import ( + "fmt" + + "github.com/spf13/cobra" + + "github.com/cosmos/cosmos-sdk/codec/address" + sdk "github.com/cosmos/cosmos-sdk/types" + + "github.com/ignite/cli/v29/ignite/pkg/cliui" + "github.com/ignite/cli/v29/ignite/pkg/cosmosaccount" + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/services/chain" +) + +func NewTestnetInPlace() *cobra.Command { + c := &cobra.Command{ + Use: "in-place", + Short: "Create and start a testnet from current local net state", + Long: `Testnet in-place command is used to create and start a testnet from current local net state(including mainnet). +After using this command in the repo containing the config.yml file, the network will start. +We can create a testnet from the local network state and mint additional coins for the desired accounts from the config.yml file.`, + Args: cobra.NoArgs, + RunE: testnetInPlaceHandler, + } + flagSetPath(c) + flagSetClearCache(c) + c.Flags().AddFlagSet(flagSetHome()) + c.Flags().AddFlagSet(flagSetCheckDependencies()) + c.Flags().AddFlagSet(flagSetSkipProto()) + c.Flags().AddFlagSet(flagSetVerbose()) + c.Flags().AddFlagSet(flagSetAccountPrefixes()) + c.Flags().AddFlagSet(flagSetCoinType()) + + return c +} + +func testnetInPlaceHandler(cmd *cobra.Command, _ []string) error { + session := cliui.New( + cliui.WithVerbosity(getVerbosity(cmd)), + cliui.WithoutUserInteraction(getYes(cmd)), + ) + defer session.End() + + // Otherwise run the serve command directly + return testnetInplace(cmd, session) +} + +func testnetInplace(cmd *cobra.Command, session *cliui.Session) error { + chainOption := []chain.Option{ + chain.WithOutputer(session), + chain.CollectEvents(session.EventBus()), + chain.CheckCosmosSDKVersion(), + } + + if flagGetCheckDependencies(cmd) { + chainOption = append(chainOption, chain.CheckDependencies()) + } + + // check if custom config is defined + config, _ := cmd.Flags().GetString(flagConfig) + if config != "" { + chainOption = append(chainOption, chain.ConfigFile(config)) + } + + c, err := chain.NewWithHomeFlags(cmd, chainOption...) + if err != nil { + return err + } + + cfg, err := c.Config() + if err != nil { + return err + } + + home, err := c.Home() + if err != nil { + return err + } + + keyringBackend, err := c.KeyringBackend() + if err != nil { + return err + } + + prefix := getAddressPrefix(cmd) + addressCodec := address.NewBech32Codec(prefix) + valAddressCodec := address.NewBech32Codec(prefix + "valoper") + coinType := getCoinType(cmd) + + ca, err := cosmosaccount.New( + cosmosaccount.WithKeyringBackend(cosmosaccount.KeyringBackend(keyringBackend)), + cosmosaccount.WithHome(home), + cosmosaccount.WithBech32Prefix(prefix), + cosmosaccount.WithCoinType(coinType), + ) + if err != nil { + return err + } + + var ( + operatorAddress sdk.ValAddress + accounts string + accErr *cosmosaccount.AccountDoesNotExistError + ) + + for _, acc := range cfg.Accounts { + sdkAcc, err := ca.GetByName(acc.Name) + if errors.As(err, &accErr) { + sdkAcc, _, err = ca.Create(acc.Name) + } + if err != nil { + return err + } + + sdkAddr, err := sdkAcc.Address(prefix) + if err != nil { + return err + } + if len(cfg.Validators) == 0 { + return errors.Errorf("no validators found for account %s", sdkAcc.Name) + } + + if cfg.Validators[0].Name == acc.Name { + accAddr, err := addressCodec.StringToBytes(sdkAddr) + if err != nil { + return err + } + operatorAddress = accAddr + } + accounts = accounts + "," + sdkAddr + } + + chainID, err := c.ID() + if err != nil { + return err + } + + operatorAddressStr, err := valAddressCodec.BytesToString(operatorAddress) + if err != nil { + return err + } + + args := chain.InPlaceArgs{ + NewChainID: fmt.Sprintf("local%s", chainID), + NewOperatorAddress: operatorAddressStr, + AccountsToFund: accounts, + } + + return c.TestnetInPlace(cmd.Context(), args) +} diff --git a/ignite/cmd/testnet_multi_node.go b/ignite/cmd/testnet_multi_node.go new file mode 100644 index 0000000..73eed0e --- /dev/null +++ b/ignite/cmd/testnet_multi_node.go @@ -0,0 +1,166 @@ +package ignitecmd + +import ( + "os" + "path" + "strconv" + + tea "github.com/charmbracelet/bubbletea" + "github.com/spf13/cobra" + + sdk "github.com/cosmos/cosmos-sdk/types" + + cmdmodel "github.com/ignite/cli/v29/ignite/cmd/bubblemodel" + igcfg "github.com/ignite/cli/v29/ignite/config" + v1 "github.com/ignite/cli/v29/ignite/config/chain/v1" + "github.com/ignite/cli/v29/ignite/pkg/availableport" + "github.com/ignite/cli/v29/ignite/pkg/cliui" + "github.com/ignite/cli/v29/ignite/pkg/xfilepath" + "github.com/ignite/cli/v29/ignite/services/chain" +) + +const ( + flagNodeDirPrefix = "node-dir-prefix" +) + +func NewTestnetMultiNode() *cobra.Command { + c := &cobra.Command{ + Use: "multi-node", + Short: "Initialize and provide multi-node on/off functionality", + Long: `Initialize the test network with the number of nodes and bonded from the config.yml file:: + ... + validators: + - name: alice + bonded: 100000000stake + - name: validator1 + bonded: 100000000stake + - name: validator2 + bonded: 200000000stake + - name: validator3 + bonded: 300000000stake + + + The "multi-node" command allows developers to easily set up, initialize, and manage multiple nodes for a + testnet environment. This command provides full flexibility in enabling or disabling each node as desired, + making it a powerful tool for simulating a multi-node blockchain network during development. + + Usage: + ignite testnet multi-node [flags] + + `, + Args: cobra.NoArgs, + RunE: testnetMultiNodeHandler, + } + flagSetPath(c) + flagSetClearCache(c) + c.Flags().AddFlagSet(flagSetHome()) + c.Flags().AddFlagSet(flagSetCheckDependencies()) + c.Flags().AddFlagSet(flagSetSkipProto()) + c.Flags().AddFlagSet(flagSetVerbose()) + c.Flags().BoolP(flagResetOnce, "r", false, "reset the app state once on init") + c.Flags().String(flagNodeDirPrefix, "validator", "prefix of dir node") + + return c +} + +func testnetMultiNodeHandler(cmd *cobra.Command, _ []string) error { + session := cliui.New( + cliui.WithVerbosity(getVerbosity(cmd)), + cliui.WithoutUserInteraction(getYes(cmd)), + ) + defer session.End() + + return testnetMultiNode(cmd, session) +} + +func testnetMultiNode(cmd *cobra.Command, session *cliui.Session) error { + chainOption := []chain.Option{ + chain.WithOutputer(session), + chain.CollectEvents(session.EventBus()), + chain.CheckCosmosSDKVersion(), + } + + if flagGetCheckDependencies(cmd) { + chainOption = append(chainOption, chain.CheckDependencies()) + } + + // check if custom config is defined + config, _ := cmd.Flags().GetString(flagConfig) + if config != "" { + chainOption = append(chainOption, chain.ConfigFile(config)) + } + + c, err := chain.NewWithHomeFlags(cmd, chainOption...) + if err != nil { + return err + } + + cfg, err := c.Config() + if err != nil { + return err + } + + numVal, amountDetails, err := getValidatorAmountStake(cfg.Validators) + if err != nil { + return err + } + nodeDirPrefix, _ := cmd.Flags().GetString(flagNodeDirPrefix) + + outputDir, err := xfilepath.Join(igcfg.DirPath, xfilepath.Path(path.Join("local-chains", c.Name(), "testnet")))() + if err != nil { + return err + } + + ports, err := availableport.Find(uint(numVal)) //nolint:gosec,nolintlint // conversion is fine + if err != nil { + return err + } + + args := chain.MultiNodeArgs{ + OutputDir: outputDir, + NumValidator: strconv.Itoa(numVal), + ValidatorsStakeAmount: amountDetails, + NodeDirPrefix: nodeDirPrefix, + ListPorts: ports, + } + + resetOnce, _ := cmd.Flags().GetBool(flagResetOnce) + if resetOnce { + // If resetOnce is true, the app state will be reset by deleting the output directory. + if err := os.RemoveAll(outputDir); err != nil { + return err + } + } + + if err = c.TestnetMultiNode(cmd.Context(), args); err != nil { + return err + } + + model, err := cmdmodel.NewModel(cmd.Context(), c.Name(), args) + if err != nil { + return err + } + + _, err = tea.NewProgram(model, tea.WithInput(cmd.InOrStdin())).Run() + return err +} + +// getValidatorAmountStake returns the number of validators and the amountStakes arg from config.MultiNode. +func getValidatorAmountStake(validators []v1.Validator) (int, string, error) { + numVal := len(validators) + var amounts string + + for _, v := range validators { + stakeAmount, err := sdk.ParseCoinNormalized(v.Bonded) + if err != nil { + return numVal, amounts, err + } + if amounts == "" { + amounts = stakeAmount.Amount.String() + } else { + amounts = amounts + "," + stakeAmount.Amount.String() + } + } + + return numVal, amounts, nil +} diff --git a/ignite/cmd/version.go b/ignite/cmd/version.go new file mode 100644 index 0000000..5114892 --- /dev/null +++ b/ignite/cmd/version.go @@ -0,0 +1,24 @@ +package ignitecmd + +import ( + "github.com/spf13/cobra" + + "github.com/ignite/cli/v29/ignite/version" +) + +// NewVersion creates a new version command to show the Ignite CLI version. +func NewVersion() *cobra.Command { + c := &cobra.Command{ + Use: "version", + Short: "Print the current build information", + RunE: func(cmd *cobra.Command, _ []string) error { + v, err := version.Long(cmd.Context()) + if err != nil { + return err + } + cmd.Println(v) + return nil + }, + } + return c +} diff --git a/ignite/config/chain/base/config.go b/ignite/config/chain/base/config.go new file mode 100644 index 0000000..1db9bb0 --- /dev/null +++ b/ignite/config/chain/base/config.go @@ -0,0 +1,174 @@ +package base + +import ( + "dario.cat/mergo" + + "github.com/ignite/cli/v29/ignite/config/chain/defaults" + "github.com/ignite/cli/v29/ignite/config/chain/version" + "github.com/ignite/cli/v29/ignite/pkg/xyaml" +) + +// Account holds the options related to setting up Cosmos wallets. +type Account struct { + Name string `yaml:"name" doc:"Local name associated with the Account's key pair."` + Coins []string `yaml:"coins,omitempty" doc:"List of token balances for the account."` + Mnemonic string `yaml:"mnemonic,omitempty" doc:"Mnemonic phrase for the account."` + Address string `yaml:"address,omitempty" doc:"Address of the account."` + CoinType string `yaml:"cointype,omitempty" doc:"Coin type number for HD derivation (default is 118)."` + AccountNumber string `yaml:"account_number,omitempty" doc:"Account number for HD derivation (must be ≤ 2147483647)."` + AddressIndex string `yaml:"address_index,omitempty" doc:"Address index number for HD derivation (must be ≤ 2147483647)."` +} + +// Build holds build configs. +type Build struct { + Main string `yaml:"main,omitempty" doc:"Path to the main build file."` + Binary string `yaml:"binary,omitempty" doc:"Path to the binary file."` + LDFlags []string `yaml:"ldflags,omitempty" doc:"List of custom linker flags for building the binary."` + Proto Proto `yaml:"proto" doc:"Contains proto build configuration options."` +} + +// Proto holds proto build configs. +type Proto struct { + // Path is the relative path of where app's proto files are located at. + Path string `yaml:"path" doc:"Relative path where the application's proto files are located."` +} + +// Client configures code generation for clients. +type Client struct { + // TSClient configures code generation for Typescript Client. + Typescript Typescript `yaml:"typescript,omitempty" doc:"Relative path where the application's Typescript files are located."` + + // Composables configures code generation for Vue 3 composables. + Composables Composables `yaml:"composables,omitempty" doc:"Configures Vue 3 composables code generation."` + + // OpenAPI configures OpenAPI spec generation for API. + OpenAPI OpenAPI `yaml:"openapi,omitempty" doc:"Configures OpenAPI spec generation for the API."` +} + +// Typescript configures code generation for Typescript Client. +type Typescript struct { + // Path configures out location for generated Typescript Client code. + Path string `yaml:"path" doc:"Relative path where the application's Typescript files are located."` +} + +// Composables configures code generation for vue-query hooks. +type Composables struct { + // Path configures out location for generated vue-query hooks. + Path string `yaml:"path" doc:"Relative path where the application's composable files are located."` +} + +// OpenAPI configures OpenAPI spec generation for API. +type OpenAPI struct { + Path string `yaml:"path" doc:"Relative path where the application's OpenAPI files are located."` + ExcludeList []string `yaml:"exclude_list" doc:"List of proto paths to exclude OpenAPI from generation (supports wildcards)."` +} + +// Faucet configuration. +type Faucet struct { + // Name is faucet account's name. + Name *string `yaml:"name" doc:"Name of the faucet account."` + + // Coins holds type of coin denoms and amounts to distribute. + Coins []string `yaml:"coins" doc:"Types and amounts of coins the faucet distributes."` + + // CoinsMax holds of chain denoms and their max amounts that can be transferred to single user. + CoinsMax []string `yaml:"coins_max,omitempty" doc:"Maximum amounts of coins that can be transferred to a single user."` + + // LimitRefreshTime sets the timeframe at the end of which the limit will be refreshed. + RateLimitWindow string `yaml:"rate_limit_window,omitempty" doc:"Timeframe after which the limit will be refreshed."` + + // Host is the host of the faucet server. + Host string `yaml:"host,omitempty" doc:"Host address of the faucet server."` + + // Port number for faucet server to listen at. + Port uint `yaml:"port,omitempty" doc:"Port number for the faucet server."` + + // TxFee is the tx fee the faucet needs to pay for each transaction. + TxFee string `yaml:"tx_fee,omitempty" doc:"Tx fee the faucet needs to pay for each transaction."` +} + +// Init overwrites sdk configurations with given values. +// Deprecated: Used in config v0 only. +type Init struct { + // App overwrites appd's config/app.toml configs. + App xyaml.Map `yaml:"app" doc:"Overwrites the appd's config/app.toml configurations."` + + // Client overwrites appd's config/client.toml configs. + Client xyaml.Map `yaml:"client" doc:"Overwrites the appd's config/client.toml configurations."` + + // Config overwrites appd's config/config.toml configs. + Config xyaml.Map `yaml:"config" doc:"Overwrites the appd's config/config.toml configurations."` + + // Home overwrites default home directory used for the app. + Home string `yaml:"home" doc:"Overwrites the default home directory used for the application."` +} + +// Host keeps configuration related to started servers. +// Deprecated: Used in config v0 only. +type Host struct { + RPC string `yaml:"rpc" doc:"RPC server address."` + P2P string `yaml:"p2p" doc:"P2P server address."` + Prof string `yaml:"prof" doc:"Profiling server address."` + GRPC string `yaml:"grpc" doc:"GRPC server address."` + GRPCWeb string `yaml:"grpc-web" doc:"GRPC Web server address."` + API string `yaml:"api" doc:"API server address."` +} + +// Validation describes the kind of validation the chain has. +type Validation string + +const ( + // ValidationSovereign is when the chain has his own validator set. + // Note that an empty string is also considered as a sovereign validation, + // because this is the default value. + ValidationSovereign = "sovereign" + // ValidationConsumer is when the chain is validated by a provider chain. + // Such chain is called a consumer chain. + // This is a special case for ICS chains, used by the consumer ignite app (https://github.com/ignite/apps/issues/101). + ValidationConsumer = "consumer" +) + +// Config defines a struct with the fields that are common to all config versions. +type Config struct { + Include []string `yaml:"include,omitempty" doc:"Include incorporate a separate config.yml file directly in your current config file."` + Validation Validation `yaml:"validation,omitempty" doc:"Specifies the type of validation the blockchain uses (e.g., sovereign)."` + Version version.Version `yaml:"version" doc:"Defines the configuration version number."` + Build Build `yaml:"build,omitempty" doc:"Contains build configuration options."` + Accounts []Account `yaml:"accounts" doc:"Lists the options for setting up Cosmos Accounts."` + Faucet Faucet `yaml:"faucet,omitempty" doc:"Configuration for the faucet."` + Client Client `yaml:"client,omitempty" doc:"Configures client code generation."` + Genesis xyaml.Map `yaml:"genesis,omitempty" doc:"Custom genesis block modifications. Follow the nesting of the genesis file here to access all the parameters."` + DefaultDenom string `yaml:"default_denom,omitempty" doc:"Default staking denom (default is stake)."` +} + +// GetVersion returns the config version. +func (c Config) GetVersion() version.Version { + return c.Version +} + +func (c Config) IsSovereignChain() bool { + return c.Validation == "" || c.Validation == ValidationSovereign +} + +func (c Config) IsConsumerChain() bool { + return c.Validation == ValidationConsumer +} + +// SetDefaults assigns default values to empty config fields. +func (c *Config) SetDefaults() error { + return mergo.Merge(c, DefaultConfig()) +} + +// DefaultConfig returns a base config with default values. +func DefaultConfig() Config { + return Config{ + Build: Build{ + Proto: Proto{ + Path: defaults.ProtoDir, + }, + }, + Faucet: Faucet{ + Host: defaults.FaucetHost, + }, + } +} diff --git a/ignite/config/chain/config.go b/ignite/config/chain/config.go new file mode 100644 index 0000000..663d077 --- /dev/null +++ b/ignite/config/chain/config.go @@ -0,0 +1,145 @@ +package chain + +import ( + "fmt" + "io" + "os" + "path/filepath" + "strings" + + "gopkg.in/yaml.v3" + + v0 "github.com/ignite/cli/v29/ignite/config/chain/v0" + v1 "github.com/ignite/cli/v29/ignite/config/chain/v1" + "github.com/ignite/cli/v29/ignite/config/chain/version" +) + +var ( + // ConfigFilenames is a list of recognized names as Ignite's chain config file. + ConfigFilenames = []string{"config.yml", "config.yaml"} + + // DefaultTSClientPath defines the default relative path to use when generating the TS client. + // The path is relative to the app's directory. + DefaultTSClientPath = "ts-client" + + // DefaultVuePath defines the default relative path to use when scaffolding a Vue app. + // The path is relative to the app's directory. + DefaultVuePath = "vue" + + // DefaultComposablesPath defines the default relative path to use when generating useQuery composables for a Vue app. + // The path is relative to the app's directory. + DefaultComposablesPath = "vue/src/composables" + + // DefaultVueTypesPath defines the default vue types path. + DefaultVueTypesPath = "vue/src/views/Types.vue" + + // DefaultOpenAPIPath defines the default relative path to use when generating an OpenAPI schema. + // The path is relative to the app's directory. + DefaultOpenAPIPath = "docs/static/openapi.json" + + // LatestVersion defines the latest version of the config. + LatestVersion version.Version = 1 + + // Versions holds config types for the supported versions. + Versions = map[version.Version]version.Converter{ + 0: &v0.Config{}, + 1: &v1.Config{}, + } +) + +type ( + // Config defines the latest chain config. + Config = v1.Config + + // Validator defines the latest validator settings. + Validator = v1.Validator +) + +// DefaultChainConfig returns a config for the latest version initialized with default values. +func DefaultChainConfig() *Config { + return v1.DefaultConfig() +} + +// FaucetHost returns the faucet host to use. +func FaucetHost(cfg *Config) string { + // We keep supporting Port option for backward compatibility + // TODO: drop this option in the future + host := cfg.Faucet.Host + if cfg.Faucet.Port != uint(0) { + host = fmt.Sprintf(":%d", cfg.Faucet.Port) + } + + return host +} + +// TSClientPath returns the relative path to the Typescript client directory. +// Path is relative to the app's directory. +func TSClientPath(conf Config) string { + if path := strings.TrimSpace(conf.Client.Typescript.Path); path != "" { + return filepath.Clean(path) + } + + return DefaultTSClientPath +} + +// ComposablesPath returns the relative path to the Vue useQuery composables directory. +// Path is relative to the app's directory. +func ComposablesPath(conf *Config) string { + if path := strings.TrimSpace(conf.Client.Composables.Path); path != "" { + return filepath.Clean(path) + } + + return DefaultComposablesPath +} + +// LocateDefault locates the default path for the config file. +// Returns ErrConfigNotFound when no config file found. +func LocateDefault(root string) (path string, err error) { + for _, name := range ConfigFilenames { + path = filepath.Join(root, name) + if _, err := os.Stat(path); err == nil { + return path, nil + } else if !os.IsNotExist(err) { + return "", err + } + } + + return "", ErrConfigNotFound +} + +// CheckVersion checks that the config version is the latest +// and if not a VersionError is returned. +func CheckVersion(configFile io.Reader) error { + version, err := ReadConfigVersion(configFile) + if err != nil { + return err + } + + if version != LatestVersion { + return VersionError{version} + } + + return nil +} + +// Save saves a config to a YAML file. +func Save(c Config, path string) error { + file, err := os.OpenFile(path, os.O_WRONLY|os.O_TRUNC, 0o755) + if err != nil { + return err + } + + defer file.Close() + + return yaml.NewEncoder(file).Encode(c) +} + +// FirstValidator returns the first validator from the validators list. +// An error is returned when there are no validators defined in the config. +func FirstValidator(conf *Config) (Validator, error) { + if len(conf.Validators) == 0 { + return Validator{}, &ValidationError{"at least one validator is required"} + } + + return conf.Validators[0], nil +} diff --git a/ignite/config/chain/config_test.go b/ignite/config/chain/config_test.go new file mode 100644 index 0000000..01fcda1 --- /dev/null +++ b/ignite/config/chain/config_test.go @@ -0,0 +1,38 @@ +package chain_test + +import ( + "bytes" + "fmt" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/config/chain" + "github.com/ignite/cli/v29/ignite/config/chain/version" +) + +func TestCheckVersion(t *testing.T) { + // Arrange + cfg := bytes.NewBufferString( + fmt.Sprintf("version: %d", chain.LatestVersion), + ) + + // Act + err := chain.CheckVersion(cfg) + + // Assert + require.NoError(t, err) +} + +func TestCheckVersionWithOutdatedVersion(t *testing.T) { + // Arrange + cfg := bytes.NewBufferString("version: 0") + wantError := chain.VersionError{} + + // Act + err := chain.CheckVersion(cfg) + + // Assert + require.ErrorAs(t, err, &wantError) + require.Equal(t, wantError.Version, version.Version(0)) +} diff --git a/ignite/config/chain/convert.go b/ignite/config/chain/convert.go new file mode 100644 index 0000000..d2b4c4e --- /dev/null +++ b/ignite/config/chain/convert.go @@ -0,0 +1,42 @@ +package chain + +import ( + "io" + + "gopkg.in/yaml.v3" + + "github.com/ignite/cli/v29/ignite/config/chain/version" +) + +// Build time check for the latest config version type. +// This is required to be sure that conversion to latest +// doesn't break when a new config version is added without +// updating the references to the previous version. +var _ = Versions[LatestVersion].(*Config) + +// ConvertLatest converts a config to the latest version. +func ConvertLatest(c version.Converter) (_ *Config, err error) { + for c.GetVersion() < LatestVersion { + c, err = c.ConvertNext() + if err != nil { + return nil, err + } + } + + // Cast to the latest version type. + // This is safe because there is a build time check that makes sure + // the type for the latest config version is the right one here. + return c.(*Config), nil +} + +// MigrateLatest migrates a config file to the latest version. +func MigrateLatest(current io.Reader, latest io.Writer) error { + cfg, err := Parse(current) + if err != nil { + return err + } + + encoder := yaml.NewEncoder(latest) + encoder.SetIndent(2) + return encoder.Encode(cfg) +} diff --git a/ignite/config/chain/convert_test.go b/ignite/config/chain/convert_test.go new file mode 100644 index 0000000..3026913 --- /dev/null +++ b/ignite/config/chain/convert_test.go @@ -0,0 +1,40 @@ +package chain_test + +import ( + "bytes" + "testing" + + "github.com/stretchr/testify/require" + + chainconfig "github.com/ignite/cli/v29/ignite/config/chain" + v0testdata "github.com/ignite/cli/v29/ignite/config/chain/v0/testdata" + "github.com/ignite/cli/v29/ignite/config/testdata" +) + +func TestConvertLatest(t *testing.T) { + // Arrange + cfgV0 := v0testdata.GetConfig(t) + + // Act + cfgLatest, err := chainconfig.ConvertLatest(cfgV0) + + // Assert + require.NoError(t, err) + require.Equal(t, chainconfig.LatestVersion, cfgLatest.GetVersion()) + require.Equal(t, testdata.GetLatestConfig(t), cfgLatest) +} + +func TestMigrateLatest(t *testing.T) { + // Arrange + current := bytes.NewReader(testdata.Versions[chainconfig.LatestVersion-1]) + latest := bytes.Buffer{} + want := string(testdata.Versions[chainconfig.LatestVersion]) + + // Act + err := chainconfig.MigrateLatest(current, &latest) + + // Assert + require.NotEmpty(t, want, "testdata is missing the latest config version") + require.NoError(t, err) + require.Equal(t, want, latest.String()) +} diff --git a/ignite/config/chain/defaults/default.go b/ignite/config/chain/defaults/default.go new file mode 100644 index 0000000..904c079 --- /dev/null +++ b/ignite/config/chain/defaults/default.go @@ -0,0 +1,27 @@ +package defaults + +const ( + // GRPCAddress is the default GRPC address. + GRPCAddress = "0.0.0.0:9090" + + // GRPCWebAddress is the default GRPC-Web address. + GRPCWebAddress = "0.0.0.0:9091" + + // APIAddress is the default API address. + APIAddress = "0.0.0.0:1317" + + // RPCAddress is the default RPC address. + RPCAddress = "0.0.0.0:26657" + + // P2PAddress is the default P2P address. + P2PAddress = "0.0.0.0:26656" + + // PProfAddress is the default Prof address. + PProfAddress = "0.0.0.0:6060" + + // ProtoDir is the default proto directory path. + ProtoDir = "proto" + + // FaucetHost is the default faucet host. + FaucetHost = "0.0.0.0:4500" +) diff --git a/ignite/config/chain/errors.go b/ignite/config/chain/errors.go new file mode 100644 index 0000000..94ffb70 --- /dev/null +++ b/ignite/config/chain/errors.go @@ -0,0 +1,50 @@ +package chain + +import ( + "fmt" + + "github.com/ignite/cli/v29/ignite/config/chain/version" + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +// ErrConfigNotFound indicates that the config.yml can't be found. +var ErrConfigNotFound = errors.New("could not locate a config.yml in your chain") + +// ValidationError is returned when a configuration is invalid. +type ValidationError struct { + Message string +} + +func (e ValidationError) Error() string { + return fmt.Sprintf("config is not valid: %s", e.Message) +} + +// UnsupportedVersionError is returned when the version of the config is not supported. +type UnsupportedVersionError struct { + Version version.Version +} + +func (e UnsupportedVersionError) Error() string { + return fmt.Sprintf("config version %s is not supported", e.Version) +} + +// VersionError is returned when config version doesn't match with the version CLI supports. +type VersionError struct { + Version version.Version +} + +func (e VersionError) Error() string { + if LatestVersion > e.Version { + return fmt.Sprintf( + "blockchain app uses a previous config version %s and CLI expects %s", + e.Version, + LatestVersion, + ) + } + + return fmt.Sprintf( + "blockchain app uses a newer config version %s and CLI expects %s", + e.Version, + LatestVersion, + ) +} diff --git a/ignite/config/chain/network/testdata/config.yaml b/ignite/config/chain/network/testdata/config.yaml new file mode 100644 index 0000000..5bec23e --- /dev/null +++ b/ignite/config/chain/network/testdata/config.yaml @@ -0,0 +1,31 @@ +version: 1 +accounts: + - name: alice + coins: + - 100000000uatom + - 100000000000000000000aevmos + address: cosmos1adn9gxjmrc3hrsdx5zpc9sj2ra7kgqkmphf8yw + - name: bob + coins: + - 5000000000000aevmos + address: cosmos1adn9gxjmrc3hrsdx5zpc9sj2ra7kgqkmphf8yw +genesis: + app_state: + crisis: + constant_fee: + denom: aevmos + evm: + params: + evm_denom: aevmos + gov: + deposit_params: + min_deposit: + - amount: "10000000" + denom: aevmos + mint: + params: + mint_denom: aevmos + staking: + params: + bond_denom: aevmos + chain_id: evmosd_9000-1 diff --git a/ignite/config/chain/network/testdata/testdata.go b/ignite/config/chain/network/testdata/testdata.go new file mode 100644 index 0000000..b096595 --- /dev/null +++ b/ignite/config/chain/network/testdata/testdata.go @@ -0,0 +1,27 @@ +package testdata + +import ( + "bytes" + _ "embed" + "testing" + + "github.com/stretchr/testify/require" + "gopkg.in/yaml.v3" + + v1 "github.com/ignite/cli/v29/ignite/config/chain/v1" +) + +//go:embed config.yaml +var ConfigYAML []byte + +func GetConfig(t *testing.T) *v1.Config { + c := &v1.Config{} + + err := yaml.NewDecoder(bytes.NewReader(ConfigYAML)).Decode(c) + require.NoError(t, err) + + err = c.SetDefaults() + require.NoError(t, err) + + return c +} diff --git a/ignite/config/chain/parse.go b/ignite/config/chain/parse.go new file mode 100644 index 0000000..d471c4b --- /dev/null +++ b/ignite/config/chain/parse.go @@ -0,0 +1,276 @@ +package chain + +import ( + "bytes" + "io" + "net/http" + "net/url" + "os" + "path/filepath" + "time" + + "dario.cat/mergo" + "gopkg.in/yaml.v3" + + "github.com/cosmos/cosmos-sdk/types/bech32" + + "github.com/ignite/cli/v29/ignite/config/chain/defaults" + "github.com/ignite/cli/v29/ignite/config/chain/version" + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +// Parse reads a config file. +// When the version of the file being read is not the latest +// it is automatically migrated to the latest version. +func Parse(configFile io.Reader) (*Config, error) { + cfg, err := parse(configFile) + if err != nil { + return cfg, errors.Errorf("error parsing config file: %w", err) + } + + return cfg, validateConfig(cfg) +} + +// ParseNetwork reads a config file for Ignite Network genesis. +// When the version of the file being read is not the latest +// it is automatically migrated to the latest version. +func ParseNetwork(configFile io.Reader) (*Config, error) { + cfg, err := parse(configFile) + if err != nil { + return cfg, err + } + + return cfg, validateNetworkConfig(cfg) +} + +func parse(configFile io.Reader) (*Config, error) { + var buf bytes.Buffer + + // Read the config file version first to know how to decode it + v, err := ReadConfigVersion(io.TeeReader(configFile, &buf)) + if err != nil { + return DefaultChainConfig(), err + } + + // Decode the current config file version and assign default + // values for the fields that are empty + c, err := decodeConfig(&buf, v) + if err != nil { + return DefaultChainConfig(), err + } + + // Make sure that the empty fields contain default values + // after reading the config from the YAML file + if err = c.SetDefaults(); err != nil { + return DefaultChainConfig(), err + } + + // Finally make sure the config is the latest one before validating it + cfg, err := ConvertLatest(c) + if err != nil { + return DefaultChainConfig(), err + } + + // Handle includes + if err := handleIncludes(cfg); err != nil { + return DefaultChainConfig(), err + } + + return cfg, nil +} + +// ParseFile parses a config from a file path. +func ParseFile(path string) (*Config, error) { + file, err := os.Open(path) + if err != nil { + return DefaultChainConfig(), err + } + + defer file.Close() + + return Parse(file) +} + +// ParseNetworkFile parses a config for Ignite Network genesis from a file path. +func ParseNetworkFile(path string) (*Config, error) { + file, err := os.Open(path) + if err != nil { + return DefaultChainConfig(), err + } + + defer file.Close() + + return ParseNetwork(file) +} + +// ReadConfigVersion reads the config version. +func ReadConfigVersion(configFile io.Reader) (version.Version, error) { + c := struct { + Version version.Version `yaml:"version"` + }{} + + err := yaml.NewDecoder(configFile).Decode(&c) + + return c.Version, err +} + +// ReadProtoPath reads the proto path. +func ReadProtoPath(configFile io.Reader) (string, error) { + c := struct { + Build struct { + Proto struct { + Path string `yaml:"path"` + } `yaml:"proto"` + } `yaml:"build"` + }{} + + c.Build.Proto.Path = defaults.ProtoDir + err := yaml.NewDecoder(configFile).Decode(&c) + + return c.Build.Proto.Path, err +} + +// decodeConfig decodes a config from an io.Reader using the specified version. +// It returns a version.Converter interface or an error if version is not supported +// or if decoding fails. +func decodeConfig(r io.Reader, version version.Version) (version.Converter, error) { + c, ok := Versions[version] + if !ok { + return nil, &UnsupportedVersionError{version} + } + + cfg, err := c.Clone() + if err != nil { + return nil, err + } + + if err = cfg.Decode(r); err != nil { + return nil, err + } + + return cfg, nil +} + +// validateConfig validates a chain configuration by checking that at least one +// account exists and that all validators have required name and bonded fields. +func validateConfig(c *Config) error { + if len(c.Accounts) == 0 { + return &ValidationError{"at least one account is required"} + } + + for _, validator := range c.Validators { + if validator.Name == "" { + return &ValidationError{"validator 'name' is required"} + } + + if validator.Bonded == "" { + return &ValidationError{"validator 'bonded' is required"} + } + } + + return nil +} + +// validateNetworkConfig validates a network genesis configuration by ensuring +// no validators exist and that all accounts have valid addresses, coins and no mnemonics. +func validateNetworkConfig(c *Config) error { + if len(c.Validators) != 0 { + return &ValidationError{"no validators can be used in config for network genesis"} + } + + for _, account := range c.Accounts { + // must have valid bech32 addr. + if _, _, err := bech32.DecodeAndConvert(account.Address); err != nil { + return errors.Errorf("invalid address %s: %w", account.Address, err) + } + + if account.Coins == nil { + return &ValidationError{"account coins is required"} + } + + if account.Mnemonic != "" { + return &ValidationError{"cannot include mnemonic in network config genesis"} + } + } + + return nil +} + +// handleIncludes processes included configuration files referenced in the main config. +// It supports both local files and remote URLs, merging their contents with the main config. +func handleIncludes(cfg *Config) error { + if len(cfg.Include) == 0 { + return nil + } + + for _, includePath := range cfg.Include { + if u, err := url.ParseRequestURI(includePath); err == nil && u.Scheme != "" { + includePath, err = fetchConfigFile(includePath) + if err != nil { + return errors.Wrapf(err, "failed to fetch included config file '%s'", includePath) + } + defer os.Remove(includePath) + } + + // Resolve path - if relative, use the base directory. + absPath, err := filepath.Abs(includePath) + if err != nil { + return errors.Wrapf(err, "failed to resolve included path '%s'", includePath) + } + + includeFile, err := os.Open(absPath) + if err != nil { + return errors.Errorf("failed to open included file '%s'", includePath) + } + defer includeFile.Close() + + // Parse the included config. + includeCfg, err := parse(includeFile) + if err != nil { + return errors.Wrapf(err, "failed to parse included config file '%s'", includePath) + } + + if cfg.Version != includeCfg.Version { + return errors.Errorf("included config version '%d' does not match with chain config version '%d'", includeCfg.Version, cfg.Version) + } + + // Merge the included config with the primary config. + if err = mergo.Merge(cfg, includeCfg, mergo.WithAppendSlice, mergo.WithOverride); err != nil { + return errors.Wrapf(err, "failed to merge included file '%s'", includePath) + } + } + + return nil +} + +// fetchConfigFile downloads a configuration file from a URL and saves it to a temporary file. +// Returns the path to the temporary file or an error if the download fails. +func fetchConfigFile(url string) (string, error) { + // Download the file from URL to a temporary file. + tmpFile, err := os.CreateTemp("", "config-*.yml") + if err != nil { + return "", errors.Wrapf(err, "failed to create temp file for URL") + } + defer tmpFile.Close() + + client := &http.Client{Timeout: 30 * time.Second} + resp, err := client.Get(url) + if err != nil { + return "", errors.Wrapf(err, "failed to download from URL '%s'", url) + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return "", errors.Errorf("failed to download file, status code: %d", resp.StatusCode) + } + + if _, err = io.Copy(tmpFile, resp.Body); err != nil { + return "", errors.Wrapf(err, "failed to save downloaded file from '%s'", url) + } + + if _, err = tmpFile.Seek(0, io.SeekStart); err != nil { + return "", errors.Wrapf(err, "failed to rewind temp file from '%s'", url) + } + + return tmpFile.Name(), nil +} diff --git a/ignite/config/chain/parse_test.go b/ignite/config/chain/parse_test.go new file mode 100644 index 0000000..690339d --- /dev/null +++ b/ignite/config/chain/parse_test.go @@ -0,0 +1,508 @@ +package chain_test + +import ( + "bytes" + "fmt" + "net" + "net/http" + "os" + "strings" + "testing" + + "github.com/stretchr/testify/require" + "gopkg.in/yaml.v3" + + chainconfig "github.com/ignite/cli/v29/ignite/config/chain" + "github.com/ignite/cli/v29/ignite/config/chain/version" + "github.com/ignite/cli/v29/ignite/config/testdata" + "github.com/ignite/cli/v29/ignite/pkg/availableport" + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +func TestReadConfigVersion(t *testing.T) { + // Arrange + r := strings.NewReader("version: 42") + want := version.Version(42) + + // Act + v, err := chainconfig.ReadConfigVersion(r) + + // Assert + require.NoError(t, err) + require.Equal(t, want, v) +} + +func TestParse(t *testing.T) { + // Arrange: Initialize a reader with the previous version + ver := chainconfig.LatestVersion - 1 + r := bytes.NewReader(testdata.Versions[ver]) + + // Act + cfg, err := chainconfig.Parse(r) + + // Assert + require.NoError(t, err) + + // Assert: Parse must return the latest version + require.Equal(t, chainconfig.LatestVersion, cfg.Version) + require.Equal(t, testdata.GetLatestConfig(t), cfg) +} + +func TestParseWithCurrentVersion(t *testing.T) { + // Arrange + r := bytes.NewReader(testdata.Versions[chainconfig.LatestVersion]) + + // Act + cfg, err := chainconfig.Parse(r) + + // Assert + require.NoError(t, err) + require.Equal(t, chainconfig.LatestVersion, cfg.Version) + require.Equal(t, testdata.GetLatestConfig(t), cfg) +} + +func TestParseWithUnknownVersion(t *testing.T) { + // Arrange + version := version.Version(9999) + r := strings.NewReader(fmt.Sprintf("version: %d", version)) + + var want *chainconfig.UnsupportedVersionError + + // Act + _, err := chainconfig.Parse(r) + + // Assert + require.ErrorAs(t, err, &want) + require.NotNil(t, want) + require.Equal(t, want.Version, version) +} + +func TestParseNetworkWithCurrentVersion(t *testing.T) { + // Arrange + r := bytes.NewReader(testdata.NetworkConfig) + + // Act + cfg, err := chainconfig.ParseNetwork(r) + + // Assert + require.NoError(t, err) + + // Assert: Parse must return the latest version + require.Equal(t, chainconfig.LatestVersion, cfg.Version) + require.Equal(t, testdata.GetLatestNetworkConfig(t).Accounts, cfg.Accounts) + require.Equal(t, testdata.GetLatestNetworkConfig(t).Genesis, cfg.Genesis) +} + +func TestParseNetworkWithInvalidData(t *testing.T) { + // Arrange + r := bytes.NewReader(testdata.Versions[chainconfig.LatestVersion]) + + // Act + _, err := chainconfig.ParseNetwork(r) + + // Assert error + require.True( + t, + strings.Contains( + err.Error(), + "config is not valid: no validators can be used in config for network genesis", + ), + ) +} + +func TestHandleIncludes(t *testing.T) { + server := startTestServer(t) + + tests := []struct { + name string + baseConfig string + expected string + err error + }{ + { + name: "Single valid include", + baseConfig: ` +version: 1 +client: + typescript: + path: original +include: + - "./testdata/include1.yml" +`, + + expected: ` +include: + - ./testdata/include1.yml +validation: sovereign +version: 1 +build: + proto: + path: proto +accounts: + - name: bob + coins: + - 10000token + - 100000000stake +faucet: + name: danilo + coins: + - 5token + - 100000stake + host: 0.0.0.0:4500 +client: + typescript: + path: override-1 + openapi: + path: docs/static/include1.yml + exclude_list: [] +validators: + - name: alice + bonded: 100000000stake`, + }, + { + name: "Multiple valid includes", + baseConfig: ` +version: 1 +client: + typescript: + path: original +include: + - "./testdata/include1.yml" + - "./testdata/include2.yml" +`, + expected: ` +include: + - ./testdata/include1.yml + - ./testdata/include2.yml +validation: sovereign +version: 1 +build: + proto: + path: proto +accounts: + - name: bob + coins: + - 10000token + - 100000000stake + - name: alice + coins: + - 20000token + - 200000000stake +faucet: + name: alice + coins: + - 5token + - 100000stake + - 5token + - 100000stake + host: 0.0.0.0:4500 +client: + typescript: + path: override-2 + openapi: + path: docs/static/include2.yml + exclude_list: [] +validators: + - name: alice + bonded: 100000000stake + - name: validator1 + bonded: 100000000stake`, + }, + { + name: "Invalid include file path", + baseConfig: ` +version: 1 +client: + typescript: + path: original +include: + - "./testdata/nonexistent.yml"`, + err: errors.New("error parsing config file: failed to open included file './testdata/nonexistent.yml'"), + }, + { + name: "Empty include list", + baseConfig: ` +version: 1 +validation: sovereign +accounts: + - name: alice + coins: + - 10000token +include: [] +faucet: + name: alice + coins: + - 5token +client: + typescript: + path: override-1 + openapi: + path: docs/static/include1.yml +validators: + - name: alice + bonded: 100stake`, + expected: ` +validation: sovereign +version: 1 +build: + proto: + path: proto +accounts: + - name: alice + coins: + - 10000token +faucet: + name: alice + coins: + - 5token + host: 0.0.0.0:4500 +client: + typescript: + path: override-1 + openapi: + path: docs/static/include1.yml + exclude_list: [] +validators: + - name: alice + bonded: 100stake`, + }, + { + name: "Empty values include", + baseConfig: ` +version: 1 +include: + - "./testdata/include1.yml" + - "./testdata/include2.yml" +`, + expected: ` +include: + - ./testdata/include1.yml + - ./testdata/include2.yml +validation: sovereign +version: 1 +build: + proto: + path: proto +accounts: + - name: bob + coins: + - 10000token + - 100000000stake + - name: alice + coins: + - 20000token + - 200000000stake +faucet: + name: alice + coins: + - 5token + - 100000stake + - 5token + - 100000stake + host: 0.0.0.0:4500 +client: + typescript: + path: override-2 + openapi: + path: docs/static/include2.yml + exclude_list: [] +validators: + - name: alice + bonded: 100000000stake + - name: validator1 + bonded: 100000000stake`, + }, + { + name: "HTTP include", + baseConfig: fmt.Sprintf(` +version: 1 +validation: sovereign +accounts: + - name: alice + coins: + - 10000token +include: + - %[1]v/include1.yml + - %[1]v/include2.yml +faucet: + name: alice + coins: + - 5token +client: + typescript: + path: original + openapi: + path: docs/static/include1.yml +validators: + - name: alice + bonded: 100stake +`, server), + expected: fmt.Sprintf(` +include: + - %[1]v/include1.yml + - %[1]v/include2.yml +validation: sovereign +version: 1 +build: + proto: + path: proto +accounts: + - name: alice + coins: + - 10000token + - name: bob + coins: + - 10000token + - 100000000stake + - name: alice + coins: + - 20000token + - 200000000stake +faucet: + name: alice + coins: + - 5token + - 5token + - 100000stake + - 5token + - 100000stake + host: 0.0.0.0:4500 +client: + typescript: + path: override-2 + openapi: + path: docs/static/include2.yml + exclude_list: [] +validators: + - name: alice + bonded: 100stake + - name: alice + bonded: 100000000stake + - name: validator1 + bonded: 100000000stake`, server), + }, + { + name: "HTTP and local include", + baseConfig: fmt.Sprintf(` +version: 1 +validation: sovereign +accounts: + - name: alice + coins: + - 10000token +include: + - %[1]v/include1.yml + - testdata/include2.yml +faucet: + name: alice + coins: + - 5token +client: + typescript: + path: original + openapi: + path: docs/static/include1.yml +validators: + - name: alice + bonded: 100stake +`, server), + expected: fmt.Sprintf(` +include: + - %[1]v/include1.yml + - testdata/include2.yml +validation: sovereign +version: 1 +build: + proto: + path: proto +accounts: + - name: alice + coins: + - 10000token + - name: bob + coins: + - 10000token + - 100000000stake + - name: alice + coins: + - 20000token + - 200000000stake +faucet: + name: alice + coins: + - 5token + - 5token + - 100000stake + - 5token + - 100000stake + host: 0.0.0.0:4500 +client: + typescript: + path: override-2 + openapi: + path: docs/static/include2.yml + exclude_list: [] +validators: + - name: alice + bonded: 100stake + - name: alice + bonded: 100000000stake + - name: validator1 + bonded: 100000000stake`, server), + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + baseReader := bytes.NewReader([]byte(tt.baseConfig)) + baseConfig, err := chainconfig.Parse(baseReader) + if tt.err != nil { + require.Error(t, err) + require.Equal(t, tt.err.Error(), err.Error()) + return + } + require.NoError(t, err) + + finalConfigYaml, err := yaml.Marshal(baseConfig) + require.NoError(t, err) + require.Equal(t, strings.TrimSpace(tt.expected), strings.TrimSpace(string(finalConfigYaml))) + }) + } +} + +func startTestServer(t *testing.T) string { + t.Helper() + + mux := http.NewServeMux() + mux.HandleFunc("/include1.yml", func(w http.ResponseWriter, r *http.Request) { + content, err := os.ReadFile("testdata/include1.yml") + require.NoError(t, err) + _, err = w.Write(content) + require.NoError(t, err) + }) + mux.HandleFunc("/include2.yml", func(w http.ResponseWriter, r *http.Request) { + content, err := os.ReadFile("testdata/include2.yml") + require.NoError(t, err) + _, err = w.Write(content) + require.NoError(t, err) + }) + + ports, err := availableport.Find(1) + require.NoError(t, err) + + server := &http.Server{ + Addr: fmt.Sprintf(":%d", ports[0]), + Handler: mux, + } + + listener, err := net.Listen("tcp", server.Addr) + require.NoError(t, err) + + go server.Serve(listener) + + t.Cleanup(func() { + server.Close() + }) + + return fmt.Sprintf("http://localhost:%d", ports[0]) +} diff --git a/ignite/config/chain/testdata/include1.yml b/ignite/config/chain/testdata/include1.yml new file mode 100644 index 0000000..3c7cb17 --- /dev/null +++ b/ignite/config/chain/testdata/include1.yml @@ -0,0 +1,20 @@ +validation: sovereign +version: 1 +accounts: + - name: bob + coins: + - 10000token + - 100000000stake +faucet: + name: danilo + coins: + - 5token + - 100000stake +client: + typescript: + path: override-1 + openapi: + path: docs/static/include1.yml +validators: + - name: alice + bonded: 100000000stake diff --git a/ignite/config/chain/testdata/include2.yml b/ignite/config/chain/testdata/include2.yml new file mode 100644 index 0000000..16805e5 --- /dev/null +++ b/ignite/config/chain/testdata/include2.yml @@ -0,0 +1,19 @@ +version: 1 +accounts: + - name: alice + coins: + - 20000token + - 200000000stake +faucet: + name: alice + coins: + - 5token + - 100000stake +client: + typescript: + path: override-2 + openapi: + path: docs/static/include2.yml +validators: + - name: validator1 + bonded: 100000000stake \ No newline at end of file diff --git a/ignite/config/chain/v0/config.go b/ignite/config/chain/v0/config.go new file mode 100644 index 0000000..88fa612 --- /dev/null +++ b/ignite/config/chain/v0/config.go @@ -0,0 +1,37 @@ +package v0 + +import ( + "io" + + "dario.cat/mergo" + "gopkg.in/yaml.v3" + + "github.com/ignite/cli/v29/ignite/config/chain/base" + "github.com/ignite/cli/v29/ignite/config/chain/version" +) + +// Config is the user given configuration to do additional setup during serve. +type Config struct { + base.Config `yaml:",inline"` + + Validator Validator `yaml:"validator" doc:"Contains information related to the validator and settings."` + Init base.Init `yaml:"init" doc:"Overwrites the appd's config/config.toml configurations."` + Host base.Host `yaml:"host" doc:"Keeps configuration related to started servers."` +} + +// Clone returns an identical copy of the instance. +func (c *Config) Clone() (version.Converter, error) { + cfgCopy := Config{} + return &cfgCopy, mergo.Merge(&cfgCopy, c, mergo.WithAppendSlice) +} + +// Decode decodes the config file values from YAML. +func (c *Config) Decode(r io.Reader) error { + return yaml.NewDecoder(r).Decode(c) +} + +// Validator holds info related to validator settings. +type Validator struct { + Name string `yaml:"name" doc:"Name of the validator."` + Staked string `yaml:"staked" doc:"Amount staked by the validator."` +} diff --git a/ignite/config/chain/v0/config_convert.go b/ignite/config/chain/v0/config_convert.go new file mode 100644 index 0000000..40785f6 --- /dev/null +++ b/ignite/config/chain/v0/config_convert.go @@ -0,0 +1,65 @@ +package v0 + +import ( + v1 "github.com/ignite/cli/v29/ignite/config/chain/v1" + "github.com/ignite/cli/v29/ignite/config/chain/version" +) + +// ConvertNext converts the current config version to the next one. +func (c *Config) ConvertNext() (version.Converter, error) { + targetCfg := v1.DefaultConfig() + + // All the fields in the base config remain the same + targetCfg.Config = c.Config + targetCfg.Version = 1 + + // There is always only one validator in version 0 + validator := v1.Validator{} + validator.Name = c.Validator.Name + validator.Bonded = c.Validator.Staked + validator.Home = c.Init.Home + validator.Client = c.Init.Client + + if c.Init.App != nil { + validator.App = c.Init.App + } + + if c.Init.Config != nil { + validator.Config = c.Init.Config + } + + // The host configuration must be defined in the validators for version 1 + servers := v1.Servers{} + + if c.Host.P2P != "" { + servers.P2P.Address = c.Host.P2P + } + + if c.Host.RPC != "" { + servers.RPC.Address = c.Host.RPC + } + + if c.Host.Prof != "" { + servers.RPC.PProfAddress = c.Host.Prof + } + + if c.Host.GRPCWeb != "" { + servers.GRPCWeb.Address = c.Host.GRPCWeb + } + + if c.Host.GRPC != "" { + servers.GRPC.Address = c.Host.GRPC + } + + if c.Host.API != "" { + servers.API.Address = c.Host.API + } + + if err := validator.SetServers(servers); err != nil { + return nil, err + } + + targetCfg.Validators = append(targetCfg.Validators, validator) + + return targetCfg, nil +} diff --git a/ignite/config/chain/v0/config_convert_test.go b/ignite/config/chain/v0/config_convert_test.go new file mode 100644 index 0000000..830e99d --- /dev/null +++ b/ignite/config/chain/v0/config_convert_test.go @@ -0,0 +1,60 @@ +package v0_test + +import ( + "testing" + + "github.com/stretchr/testify/require" + + v0testdata "github.com/ignite/cli/v29/ignite/config/chain/v0/testdata" + v1 "github.com/ignite/cli/v29/ignite/config/chain/v1" + "github.com/ignite/cli/v29/ignite/config/chain/version" +) + +func TestV0ToV1(t *testing.T) { + // Arrange + cfgV0 := v0testdata.GetConfig(t) + + // Act + c, err := cfgV0.ConvertNext() + cfgV1, _ := c.(*v1.Config) + + // Assert + require.NoError(t, err) + require.NotNilf(t, cfgV1, "expected *v1.Config, got %T", c) + require.Equal(t, version.Version(1), cfgV1.GetVersion()) + require.Equal(t, cfgV0.Build, cfgV1.Build) + require.Equal(t, cfgV0.Accounts, cfgV1.Accounts) + require.Equal(t, cfgV0.Faucet, cfgV1.Faucet) + require.Equal(t, cfgV0.Client, cfgV1.Client) + require.Equal(t, cfgV0.Genesis, cfgV1.Genesis) + require.Len(t, cfgV1.Validators, 1) +} + +func TestV0ToV1Validator(t *testing.T) { + // Arrange + cfgV0 := v0testdata.GetConfig(t) + cfgV0.Host.RPC = "127.0.0.0:1" + cfgV0.Host.P2P = "127.0.0.0:2" + cfgV0.Host.GRPC = "127.0.0.0:3" + cfgV0.Host.GRPCWeb = "127.0.0.0:4" + cfgV0.Host.Prof = "127.0.0.0:5" + cfgV0.Host.API = "127.0.0.0:6" + + // Act + c, _ := cfgV0.ConvertNext() + cfgV1, _ := c.(*v1.Config) + validator := cfgV1.Validators[0] + servers, _ := validator.GetServers() + + // Assert + require.Equal(t, cfgV0.Validator.Name, validator.Name) + require.Equal(t, cfgV0.Validator.Staked, validator.Bonded) + require.Equal(t, cfgV0.Init.Home, validator.Home) + require.Equal(t, cfgV0.Init.Client, validator.Client) + require.Equal(t, cfgV0.Host.RPC, servers.RPC.Address) + require.Equal(t, cfgV0.Host.P2P, servers.P2P.Address) + require.Equal(t, cfgV0.Host.GRPC, servers.GRPC.Address) + require.Equal(t, cfgV0.Host.GRPCWeb, servers.GRPCWeb.Address) + require.Equal(t, cfgV0.Host.Prof, servers.RPC.PProfAddress) + require.Equal(t, cfgV0.Host.API, servers.API.Address) +} diff --git a/ignite/config/chain/v0/config_test.go b/ignite/config/chain/v0/config_test.go new file mode 100644 index 0000000..6801937 --- /dev/null +++ b/ignite/config/chain/v0/config_test.go @@ -0,0 +1,26 @@ +package v0_test + +import ( + "testing" + + "github.com/stretchr/testify/require" + + v0 "github.com/ignite/cli/v29/ignite/config/chain/v0" +) + +func TestClone(t *testing.T) { + // Arrange + c := &v0.Config{ + Validator: v0.Validator{ + Name: "alice", + Staked: "100000000stake", + }, + } + + // Act + c2, err := c.Clone() + + // Assert + require.NoError(t, err) + require.Equal(t, c, c2) +} diff --git a/ignite/config/chain/v0/testdata/config.yaml b/ignite/config/chain/v0/testdata/config.yaml new file mode 100644 index 0000000..de02eb7 --- /dev/null +++ b/ignite/config/chain/v0/testdata/config.yaml @@ -0,0 +1,45 @@ +accounts: + - name: alice + coins: [ "100000000uatom", "100000000000000000000aevmos" ] + mnemonic: "ozone unfold device pave lemon potato omit insect column wise cover hint narrow large provide kidney episode clay notable milk mention dizzy muffin crazy" + - name: bob + coins: [ "5000000000000aevmos" ] + address: "cosmos1adn9gxjmrc3hrsdx5zpc9sj2ra7kgqkmphf8yw" +validator: + name: alice + staked: "100000000000000000000aevmos" +faucet: + name: bob + coins: [ "10aevmos" ] + host: 0.0.0.0:4600 + port: 4600 +build: + binary: "evmosd" +init: + home: "$HOME/.evmosd" + client: + keyring-backend: "os" + app: + evm-rpc: + address: "0.0.0.0:8545" + ws-address: "0.0.0.0:8546" +genesis: + chain_id: "evmosd_9000-1" + app_state: + staking: + params: + bond_denom: "aevmos" + mint: + params: + mint_denom: "aevmos" + crisis: + constant_fee: + denom: "aevmos" + gov: + deposit_params: + min_deposit: + - amount: "10000000" + denom: "aevmos" + evm: + params: + evm_denom: "aevmos" diff --git a/ignite/config/chain/v0/testdata/testdata.go b/ignite/config/chain/v0/testdata/testdata.go new file mode 100644 index 0000000..61b5cb2 --- /dev/null +++ b/ignite/config/chain/v0/testdata/testdata.go @@ -0,0 +1,27 @@ +package testdata + +import ( + "bytes" + _ "embed" + "testing" + + "github.com/stretchr/testify/require" + "gopkg.in/yaml.v3" + + v0 "github.com/ignite/cli/v29/ignite/config/chain/v0" +) + +//go:embed config.yaml +var ConfigYAML []byte + +func GetConfig(t *testing.T) *v0.Config { + c := &v0.Config{} + + err := yaml.NewDecoder(bytes.NewReader(ConfigYAML)).Decode(c) + require.NoError(t, err) + + err = c.SetDefaults() + require.NoError(t, err) + + return c +} diff --git a/ignite/config/chain/v1/config.go b/ignite/config/chain/v1/config.go new file mode 100644 index 0000000..0137aa7 --- /dev/null +++ b/ignite/config/chain/v1/config.go @@ -0,0 +1,129 @@ +package v1 + +import ( + "fmt" + "io" + + "dario.cat/mergo" + "gopkg.in/yaml.v3" + + "github.com/ignite/cli/v29/ignite/config/chain/base" + "github.com/ignite/cli/v29/ignite/config/chain/defaults" + "github.com/ignite/cli/v29/ignite/config/chain/version" + "github.com/ignite/cli/v29/ignite/pkg/xnet" +) + +// DefaultConfig returns a config with default values. +func DefaultConfig() *Config { + c := Config{Config: base.DefaultConfig()} + c.Version = 1 + return &c +} + +// Config is the user given configuration to do additional setup during serve. +type Config struct { + base.Config `yaml:",inline"` + + Validators []Validator `yaml:"validators" doc:"Contains information related to the list of validators and settings."` +} + +func (c *Config) SetDefaults() error { + if err := c.Config.SetDefaults(); err != nil { + return err + } + return c.updateValidatorAddresses() +} + +// Clone returns an identical copy of the instance. +func (c *Config) Clone() (version.Converter, error) { + cfgCopy := Config{} + return &cfgCopy, mergo.Merge(&cfgCopy, c, mergo.WithAppendSlice) +} + +// Decode decodes the config file values from YAML. +func (c *Config) Decode(r io.Reader) error { + return yaml.NewDecoder(r).Decode(c) +} + +func (c *Config) updateValidatorAddresses() (err error) { + // Margin to increase port numbers of the default addresses + margin := 10 + + for i := range c.Validators { + // Use default addresses for the first validator + if i == 0 { + continue + } + + validator := &c.Validators[i] + servers, err := validator.GetServers() + if err != nil { + return err + } + portIncrement := margin * i + if portIncrement < 0 { + return fmt.Errorf("calculated port increment is negative: %d", portIncrement) //nolint: forbidigo + } + + servers, err = incrementDefaultServerPortsBy(servers, uint64(portIncrement)) + if err != nil { + return err + } + + if err := validator.SetServers(servers); err != nil { + return err + } + } + + return nil +} + +// Returns a new server where the default addresses have their ports +// incremented by a margin to avoid port clashing. +func incrementDefaultServerPortsBy(s Servers, inc uint64) (Servers, error) { + var err error + + if s.GRPC.Address == defaults.GRPCAddress { + s.GRPC.Address, err = xnet.IncreasePortBy(defaults.GRPCAddress, inc) + if err != nil { + return Servers{}, err + } + } + + if s.GRPCWeb.Address == defaults.GRPCWebAddress { + s.GRPCWeb.Address, err = xnet.IncreasePortBy(defaults.GRPCWebAddress, inc) + if err != nil { + return Servers{}, err + } + } + + if s.API.Address == defaults.APIAddress { + s.API.Address, err = xnet.IncreasePortBy(defaults.APIAddress, inc) + if err != nil { + return Servers{}, err + } + } + + if s.P2P.Address == defaults.P2PAddress { + s.P2P.Address, err = xnet.IncreasePortBy(defaults.P2PAddress, inc) + if err != nil { + return Servers{}, err + } + } + + if s.RPC.Address == defaults.RPCAddress { + s.RPC.Address, err = xnet.IncreasePortBy(defaults.RPCAddress, inc) + if err != nil { + return Servers{}, err + } + } + + if s.RPC.PProfAddress == defaults.PProfAddress { + s.RPC.PProfAddress, err = xnet.IncreasePortBy(defaults.PProfAddress, inc) + if err != nil { + return Servers{}, err + } + } + + return s, nil +} diff --git a/ignite/config/chain/v1/config_convert.go b/ignite/config/chain/v1/config_convert.go new file mode 100644 index 0000000..e28bd4f --- /dev/null +++ b/ignite/config/chain/v1/config_convert.go @@ -0,0 +1,11 @@ +package v1 + +import ( + "github.com/ignite/cli/v29/ignite/config/chain/version" +) + +// ConvertNext implements the conversion of the current config to the next version. +func (c *Config) ConvertNext() (version.Converter, error) { + // v1 is the latest version, there is no need to convert. + return c, nil +} diff --git a/ignite/config/chain/v1/config_test.go b/ignite/config/chain/v1/config_test.go new file mode 100644 index 0000000..d68747f --- /dev/null +++ b/ignite/config/chain/v1/config_test.go @@ -0,0 +1,249 @@ +package v1_test + +import ( + "os" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/config/chain/base" + "github.com/ignite/cli/v29/ignite/config/chain/defaults" + v1 "github.com/ignite/cli/v29/ignite/config/chain/v1" + "github.com/ignite/cli/v29/ignite/pkg/xnet" +) + +func TestConfigDecode(t *testing.T) { + assert := assert.New(t) + require := require.New(t) + f, err := os.Open("testdata/config2.yaml") + require.NoError(err) + defer f.Close() + var cfg v1.Config + + err = cfg.Decode(f) + + require.NoError(err) + expected := v1.Config{ + Config: base.Config{ + Version: 1, + Build: base.Build{ + Binary: "evmosd", + Proto: base.Proto{ + Path: "proto", + }, + }, + Accounts: []base.Account{ + { + Name: "alice", + Coins: []string{"100000000uatom", "100000000000000000000aevmos"}, + Mnemonic: "ozone unfold device pave lemon potato omit insect column wise cover hint narrow large provide kidney episode clay notable milk mention dizzy muffin crazy", + }, + { + Name: "bob", + Coins: []string{"5000000000000aevmos"}, + Address: "cosmos1adn9gxjmrc3hrsdx5zpc9sj2ra7kgqkmphf8yw", + }, + }, + Faucet: base.Faucet{ + Name: &[]string{"bob"}[0], + Coins: []string{"10aevmos"}, + Host: "0.0.0.0:4600", + Port: 4600, + }, + Genesis: map[string]any{ + "app_state": map[string]any{ + "crisis": map[string]any{ + "constant_fee": map[string]any{ + "denom": "aevmos", + }, + }, + }, + "chain_id": "evmosd_9000-1", + }, + }, + Validators: []v1.Validator{{ + Name: "alice", + Bonded: "100000000000000000000aevmos", + Home: "$HOME/.evmosd", + App: map[string]any{ + "evm-rpc": map[string]any{ + "address": "0.0.0.0:8545", + "ws-address": "0.0.0.0:8546", + }, + }, + }}, + } + assert.Equal(expected, cfg) +} + +func TestConfigValidatorDefaultServers(t *testing.T) { + // Arrange + c := v1.Config{ + Validators: []v1.Validator{ + { + Name: "name-1", + Bonded: "100ATOM", + }, + }, + } + servers := v1.Servers{} + + // Act + err := c.SetDefaults() + if err == nil { + servers, err = c.Validators[0].GetServers() + } + + // Assert + require.NoError(t, err) + + // Assert + require.Equal(t, defaults.GRPCAddress, servers.GRPC.Address) + require.Equal(t, defaults.GRPCWebAddress, servers.GRPCWeb.Address) + require.Equal(t, defaults.APIAddress, servers.API.Address) + require.Equal(t, defaults.RPCAddress, servers.RPC.Address) + require.Equal(t, defaults.P2PAddress, servers.P2P.Address) + require.Equal(t, defaults.PProfAddress, servers.RPC.PProfAddress) +} + +func TestConfigValidatorWithExistingServers(t *testing.T) { + // Arrange + rpcAddr := "127.0.0.1:1234" + apiAddr := "127.0.0.1:4321" + c := v1.Config{ + Validators: []v1.Validator{ + { + Name: "name-1", + Bonded: "100ATOM", + App: map[string]interface{}{ + // This value should not be overwritten with the default address + "api": map[string]interface{}{"address": apiAddr}, + }, + Config: map[string]interface{}{ + // This value should not be overwritten with the default address + "rpc": map[string]interface{}{"laddr": rpcAddr}, + }, + }, + }, + } + servers := v1.Servers{} + + // Act + err := c.SetDefaults() + if err == nil { + servers, err = c.Validators[0].GetServers() + } + + // Assert + require.NoError(t, err) + + // Assert + require.Equal(t, rpcAddr, servers.RPC.Address) + require.Equal(t, apiAddr, servers.API.Address) + require.Equal(t, defaults.GRPCAddress, servers.GRPC.Address) + require.Equal(t, defaults.GRPCWebAddress, servers.GRPCWeb.Address) + require.Equal(t, defaults.P2PAddress, servers.P2P.Address) + require.Equal(t, defaults.PProfAddress, servers.RPC.PProfAddress) +} + +func TestConfigValidatorsWithExistingServers(t *testing.T) { + // Arrange + inc := uint64(10) + rpcAddr := "127.0.0.1:1234" + apiAddr := "127.0.0.1:4321" + c := v1.Config{ + Validators: []v1.Validator{ + { + Name: "name-1", + Bonded: "100ATOM", + }, + { + Name: "name-2", + Bonded: "200ATOM", + App: map[string]interface{}{ + // This value should not be overwritten with the default address + "api": map[string]interface{}{"address": apiAddr}, + }, + Config: map[string]interface{}{ + // This value should not be overwritten with the default address + "rpc": map[string]interface{}{"laddr": rpcAddr}, + }, + }, + }, + } + servers := v1.Servers{} + + // Act + err := c.SetDefaults() + if err == nil { + servers, err = c.Validators[1].GetServers() + } + + // Assert + require.NoError(t, err) + + // Assert: The existing addresses should not be changed + require.Equal(t, rpcAddr, servers.RPC.Address) + require.Equal(t, apiAddr, servers.API.Address) + + // Assert: The second validator should have the ports incremented by 10 + require.Equal(t, xnet.MustIncreasePortBy(defaults.GRPCAddress, inc), servers.GRPC.Address) + require.Equal(t, xnet.MustIncreasePortBy(defaults.GRPCWebAddress, inc), servers.GRPCWeb.Address) + require.Equal(t, xnet.MustIncreasePortBy(defaults.P2PAddress, inc), servers.P2P.Address) + require.Equal(t, xnet.MustIncreasePortBy(defaults.PProfAddress, inc), servers.RPC.PProfAddress) +} + +func TestConfigValidatorsDefaultServers(t *testing.T) { + // Arrange + inc := uint64(10) + c := v1.Config{ + Validators: []v1.Validator{ + { + Name: "name-1", + Bonded: "100ATOM", + }, + { + Name: "name-2", + Bonded: "200ATOM", + }, + }, + } + servers := v1.Servers{} + + // Act + err := c.SetDefaults() + if err == nil { + servers, err = c.Validators[1].GetServers() + } + + // Assert + require.NoError(t, err) + + // Assert: The second validator should have the ports incremented by 10 + require.Equal(t, xnet.MustIncreasePortBy(defaults.GRPCAddress, inc), servers.GRPC.Address) + require.Equal(t, xnet.MustIncreasePortBy(defaults.GRPCWebAddress, inc), servers.GRPCWeb.Address) + require.Equal(t, xnet.MustIncreasePortBy(defaults.APIAddress, inc), servers.API.Address) + require.Equal(t, xnet.MustIncreasePortBy(defaults.RPCAddress, inc), servers.RPC.Address) + require.Equal(t, xnet.MustIncreasePortBy(defaults.P2PAddress, inc), servers.P2P.Address) + require.Equal(t, xnet.MustIncreasePortBy(defaults.PProfAddress, inc), servers.RPC.PProfAddress) +} + +func TestClone(t *testing.T) { + // Arrange + c := &v1.Config{ + Validators: []v1.Validator{ + { + Name: "alice", + Bonded: "100000000stake", + }, + }, + } + + // Act + c2, err := c.Clone() + + // Assert + require.NoError(t, err) + require.Equal(t, c, c2) +} diff --git a/ignite/config/chain/v1/testdata/config.yaml b/ignite/config/chain/v1/testdata/config.yaml new file mode 100644 index 0000000..3baf8f5 --- /dev/null +++ b/ignite/config/chain/v1/testdata/config.yaml @@ -0,0 +1,51 @@ +version: 1 +build: + binary: evmosd + proto: + path: proto +accounts: + - name: alice + coins: + - 100000000uatom + - 100000000000000000000aevmos + mnemonic: ozone unfold device pave lemon potato omit insect column wise cover hint narrow large provide kidney episode clay notable milk mention dizzy muffin crazy + - name: bob + coins: + - 5000000000000aevmos + address: cosmos1adn9gxjmrc3hrsdx5zpc9sj2ra7kgqkmphf8yw +faucet: + name: bob + coins: + - 10aevmos + host: 0.0.0.0:4600 + port: 4600 +genesis: + app_state: + crisis: + constant_fee: + denom: aevmos + evm: + params: + evm_denom: aevmos + gov: + deposit_params: + min_deposit: + - amount: "10000000" + denom: aevmos + mint: + params: + mint_denom: aevmos + staking: + params: + bond_denom: aevmos + chain_id: evmosd_9000-1 +validators: + - name: alice + bonded: 100000000000000000000aevmos + app: + evm-rpc: + address: 0.0.0.0:8545 + ws-address: 0.0.0.0:8546 + client: + keyring-backend: os + home: $HOME/.evmosd diff --git a/ignite/config/chain/v1/testdata/config2.yaml b/ignite/config/chain/v1/testdata/config2.yaml new file mode 100644 index 0000000..c3dabc2 --- /dev/null +++ b/ignite/config/chain/v1/testdata/config2.yaml @@ -0,0 +1,43 @@ +version: 1 +build: + binary: evmosd + proto: + path: proto +accounts: + - name: alice + coins: + - 100000000uatom + - 100000000000000000000aevmos + mnemonic: ozone unfold device pave lemon potato omit insect column wise cover hint narrow large provide kidney episode clay notable milk mention dizzy muffin crazy + - name: bob + coins: + - 5000000000000aevmos + address: cosmos1adn9gxjmrc3hrsdx5zpc9sj2ra7kgqkmphf8yw +faucet: + name: bob + coins: + - 10aevmos + host: 0.0.0.0:4600 + port: 4600 +genesis: + app_state: + crisis: + constant_fee: + denom: aevmos + chain_id: evmosd_9000-1 +validators: + - name: alice + bonded: 100000000000000000000aevmos + app: + evm-rpc: + address: 0.0.0.0:8545 + ws-address: 0.0.0.0:8546 + home: $HOME/.evmosd +apps: + - name: plugin1 + path: /path/to/plugin1 + - name: plugin2 + path: /path/to/plugin2 + with: + foo: bar + bar: baz diff --git a/ignite/config/chain/v1/testdata/testdata.go b/ignite/config/chain/v1/testdata/testdata.go new file mode 100644 index 0000000..b096595 --- /dev/null +++ b/ignite/config/chain/v1/testdata/testdata.go @@ -0,0 +1,27 @@ +package testdata + +import ( + "bytes" + _ "embed" + "testing" + + "github.com/stretchr/testify/require" + "gopkg.in/yaml.v3" + + v1 "github.com/ignite/cli/v29/ignite/config/chain/v1" +) + +//go:embed config.yaml +var ConfigYAML []byte + +func GetConfig(t *testing.T) *v1.Config { + c := &v1.Config{} + + err := yaml.NewDecoder(bytes.NewReader(ConfigYAML)).Decode(c) + require.NoError(t, err) + + err = c.SetDefaults() + require.NoError(t, err) + + return c +} diff --git a/ignite/config/chain/v1/validator.go b/ignite/config/chain/v1/validator.go new file mode 100644 index 0000000..3c39f72 --- /dev/null +++ b/ignite/config/chain/v1/validator.go @@ -0,0 +1,137 @@ +package v1 + +import ( + "github.com/ignite/cli/v29/ignite/pkg/xyaml" +) + +// Validator holds info related to validator settings. +type Validator struct { + // Name is the name of the validator. + Name string `yaml:"name" doc:"Name of the validator."` + + // Bonded is how much the validator has staked. + Bonded string `yaml:"bonded" doc:"Amount staked by the validator."` + + // App overwrites appd's config/app.toml configs. + App xyaml.Map `yaml:"app,omitempty" doc:"Overwrites the appd's config/app.toml configurations."` + + // Config overwrites appd's config/config.toml configs. + Config xyaml.Map `yaml:"config,omitempty" doc:"Overwrites the appd's config/config.toml configurations."` + + // Client overwrites appd's config/client.toml configs. + Client xyaml.Map `yaml:"client,omitempty" doc:"Overwrites the appd's config/client.toml configurations."` + + // Home overwrites default home directory used for the app. + Home string `yaml:"home,omitempty" doc:"Overwrites the default home directory used for the application."` + + // Gentx overwrites appd's config/gentx.toml configs. + Gentx *Gentx `yaml:"gentx,omitempty" doc:"Overwrites the appd's config/gentx.toml configurations."` +} + +// Gentx holds info related to Gentx settings. +type Gentx struct { + // Amount is the amount for the current Gentx. + Amount string `yaml:"amount" doc:"Amount for the current Gentx."` + + // Moniker is the validator's (optional) moniker. + Moniker string `yaml:"moniker" doc:"Optional moniker for the validator."` + + // KeyringBackend is keyring's backend. + KeyringBackend string `yaml:"keyring-backend" doc:"Backend for the keyring."` + + // ChainID is the network chain ID. + ChainID string `yaml:"chain-id" doc:"Network chain ID."` + + // CommissionMaxChangeRate is the maximum commission change rate percentage (per day). + CommissionMaxChangeRate string `yaml:"commission-max-change-rate" doc:"Maximum commission change rate percentage per day."` + + // CommissionMaxRate is the maximum commission rate percentage. + CommissionMaxRate string `yaml:"commission-max-rate" doc:"Maximum commission rate percentage (e.g., 0.01 = 1%)."` + + // CommissionRate is the initial commission rate percentage. + CommissionRate string `yaml:"commission-rate" doc:"Initial commission rate percentage (e.g., 0.01 = 1%)."` + + // Details is the validator's (optional) details. + Details string `yaml:"details" doc:"Optional details about the validator."` + + // SecurityContact is the validator's (optional) security contact email. + SecurityContact string `yaml:"security-contact" doc:"Optional security contact email for the validator."` + + // Website is the validator's (optional) website. + Website string `yaml:"website" doc:"Optional website for the validator."` + + // AccountNumber is the account number of the signing account (offline mode only). + AccountNumber int `yaml:"account-number" doc:"Account number of the signing account (offline mode only)."` + + // BroadcastMode is the transaction broadcasting mode (sync|async|block) (default "sync"). + BroadcastMode string `yaml:"broadcast-mode" doc:"Transaction broadcasting mode (sync|async|block) (default is 'sync')."` + + // DryRun is a boolean determining whether to ignore the --gas flag and perform a simulation of a transaction. + DryRun bool `yaml:"dry-run" doc:"Simulates the transaction without actually performing it, ignoring the --gas flag."` + + // FeeAccount is the fee account pays fees for the transaction instead of deducting from the signer. + FeeAccount string `yaml:"fee-account" doc:"Account that pays the transaction fees instead of the signer."` + + // Fee is the fee to pay along with transaction; eg: 10uatom. + Fee string `yaml:"fee" doc:"Fee to pay with the transaction (e.g.: 10uatom)."` + + // From is the name or address of private key with which to sign. + From string `yaml:"from" doc:"Name or address of the private key used to sign the transaction."` + + // From is the gas limit to set per-transaction; set to "auto" to calculate sufficient gas automatically (default 200000). + Gas string `yaml:"gas" doc:"Gas limit per transaction; set to 'auto' to calculate sufficient gas automatically (default is 200000)."` + + // GasAdjustment is the adjustment factor to be multiplied against the estimate returned by the tx simulation; if the gas limit is set manually this flag is ignored (default 1). + GasAdjustment string `yaml:"gas-adjustment" doc:"Factor to multiply against the estimated gas (default is 1)."` + + // GasPrices is the gas prices in decimal format to determine the transaction fee (e.g. 0.1uatom). + GasPrices string `yaml:"gas-prices" doc:"Gas prices in decimal format to determine the transaction fee (e.g., 0.1uatom)."` + + // GenerateOnly is a boolean determining whether to build an unsigned transaction and write it to STDOUT. + GenerateOnly bool `yaml:"generate-only" doc:"Creates an unsigned transaction and writes it to STDOUT."` + + // Identity is the (optional) identity signature (ex. UPort or Keybase). + Identity string `yaml:"identity" doc:"Identity signature (e.g., UPort or Keybase)."` + + // IP is the node's public IP (default "192.168.1.64"). + IP string `yaml:"ip" doc:"Node's public IP address (default is '192.168.1.64')."` + + // KeyringDir is the client Keyring directory; if omitted, the default 'home' directory will be used. + KeyringDir string `yaml:"keyring-dir" doc:"Directory for the client keyring; defaults to the 'home' directory if omitted."` + + // Ledger is a boolean determining whether to use a connected Ledger device. + Ledger bool `yaml:"ledger" doc:"Uses a connected Ledger device if true."` + + // KeyringDir is the minimum self delegation required on the validator. + MinSelfDelegation string `yaml:"min-self-delegation" doc:"Minimum self-delegation required for the validator."` + + // Node is <host>:<port> to tendermint rpc interface for this chain (default "tcp://localhost:26657"). + Node string `yaml:"node" doc:"<host>:<port> for the Tendermint RPC interface (default 'tcp://localhost:26657')"` + + // NodeID is the node's NodeID. + NodeID string `yaml:"node-id" doc:"Node's NodeID"` + + // Note is the note to add a description to the transaction (previously --memo). + Note string `yaml:"note" doc:"Adds a description to the transaction (formerly --memo)."` + + // Offline is a boolean determining the offline mode (does not allow any online functionality). + Offline bool `yaml:"offline" doc:"Operates in offline mode, disallowing any online functionality."` + + // Output is the output format (text|json) (default "json"). + Output string `yaml:"output" doc:"Output format (text|json) (default 'json')."` + + // OutputDocument writes the genesis transaction JSON document to the given file instead of the default location. + OutputDocument string `yaml:"output-document" doc:"Writes the genesis transaction JSON document to the specified file instead of the default location."` + + // PubKey is the validator's Protobuf JSON encoded public key. + PubKey string `yaml:"pubkey" doc:"Protobuf JSON encoded public key of the validator."` + + // Sequence is the sequence number of the signing account (offline mode only). + Sequence uint `yaml:"sequence" doc:"Sequence number of the signing account (offline mode only)."` + + // SignMode is the choose sign mode (direct|amino-json), this is an advanced feature. + SignMode string `yaml:"sign-mode" doc:"Chooses sign mode (direct|amino-json), an advanced feature."` + + // TimeoutHeight sets a block timeout height to prevent the tx from being committed past a certain height. + TimeoutHeight uint `yaml:"timeout-height" doc:"Sets a block timeout height to prevent the transaction from being committed past a certain height."` +} diff --git a/ignite/config/chain/v1/validator_servers.go b/ignite/config/chain/v1/validator_servers.go new file mode 100644 index 0000000..2a78d03 --- /dev/null +++ b/ignite/config/chain/v1/validator_servers.go @@ -0,0 +1,150 @@ +package v1 + +import ( + "github.com/mitchellh/mapstructure" + + baseconfig "github.com/ignite/cli/v29/ignite/config/chain/defaults" + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +func DefaultServers() Servers { + s := Servers{} + s.GRPC.Address = baseconfig.GRPCAddress + s.GRPCWeb.Address = baseconfig.GRPCWebAddress + s.API.Address = baseconfig.APIAddress + s.P2P.Address = baseconfig.P2PAddress + s.RPC.Address = baseconfig.RPCAddress + s.RPC.PProfAddress = baseconfig.PProfAddress + + return s +} + +type Servers struct { + cosmosServers `mapstructure:",squash"` + tendermintServers `mapstructure:",squash"` +} + +type cosmosServers struct { + GRPC CosmosHost `mapstructure:"grpc"` + GRPCWeb CosmosHost `mapstructure:"grpc-web"` + API CosmosHost `mapstructure:"api"` +} + +type tendermintServers struct { + P2P TendermintHost `mapstructure:"p2p"` + RPC TendermintRPCHost `mapstructure:"rpc"` +} + +type CosmosHost struct { + Address string `mapstructure:"address,omitempty"` +} + +type TendermintHost struct { + Address string `mapstructure:"laddr,omitempty"` +} + +type TendermintRPCHost struct { + TendermintHost `mapstructure:",squash"` + + PProfAddress string `mapstructure:"pprof_laddr,omitempty"` +} + +func (v Validator) GetServers() (Servers, error) { + // Initialize servers with default addresses + s := DefaultServers() + + // Overwrite the default Cosmos SDK addresses with the configured ones + if err := mapstructure.Decode(v.App, &s); err != nil { + return Servers{}, errors.Errorf("error reading validator app servers: %w", err) + } + + // Overwrite the default Tendermint addresses with the configured ones + if err := mapstructure.Decode(v.Config, &s); err != nil { + return Servers{}, errors.Errorf("error reading tendermint validator config servers: %w", err) + } + + return s, nil +} + +func (v *Validator) SetServers(s Servers) error { + if err := v.setAppServers(s); err != nil { + return errors.Errorf("error updating validator app servers: %w", err) + } + + if err := v.setConfigServers(s); err != nil { + return errors.Errorf("error updating validator config servers: %w", err) + } + + return nil +} + +func (v *Validator) setAppServers(s Servers) error { + c, err := decodeServers(s.cosmosServers) + if err != nil { + return err + } + + v.App = mergeMaps(c, v.App) + + return nil +} + +func (v *Validator) setConfigServers(s Servers) error { + m, err := decodeServers(s.tendermintServers) + if err != nil { + return errors.Errorf("error updating validator config servers: %w", err) + } + + v.Config = mergeMaps(m, v.Config) + + return nil +} + +func decodeServers(input interface{}) (output map[string]interface{}, err error) { + // Decode the input structure into a map + if err := mapstructure.Decode(input, &output); err != nil { + return nil, err + } + + // Remove keys with empty server values from the map + for k := range output { + if v, _ := output[k].(map[string]interface{}); len(v) == 0 { + delete(output, k) + } + } + + // Don't return an empty map to avoid the generation of empty + // fields when the validator is saved to a YAML config file. + if len(output) == 0 { + return nil, nil + } + + return +} + +func mergeMaps(src, dst map[string]interface{}) map[string]interface{} { + if len(src) == 0 { + return dst + } + + // Allow dst to be nil by initializing it here + if dst == nil { + dst = make(map[string]interface{}) + } + + for k, v := range src { + // When the current value is a map in both merge their values + if srcValue, ok := v.(map[string]interface{}); ok { + if dstValue, ok := dst[k].(map[string]interface{}); ok { + mergeMaps(srcValue, dstValue) + + continue + } + } + + // By default overwrite the destination map with the source value + dst[k] = v + } + + return dst +} diff --git a/ignite/config/chain/v1/validator_servers_test.go b/ignite/config/chain/v1/validator_servers_test.go new file mode 100644 index 0000000..5f581b5 --- /dev/null +++ b/ignite/config/chain/v1/validator_servers_test.go @@ -0,0 +1,69 @@ +package v1_test + +import ( + "testing" + + "github.com/stretchr/testify/require" + + v1 "github.com/ignite/cli/v29/ignite/config/chain/v1" + "github.com/ignite/cli/v29/ignite/pkg/xyaml" +) + +func TestValidatorGetServers(t *testing.T) { + // Arrange + want := v1.DefaultServers() + want.RPC.Address = "127.0.0.0:1" + want.P2P.Address = "127.0.0.0:2" + want.GRPC.Address = "127.0.0.0:3" + want.GRPCWeb.Address = "127.0.0.0:4" + want.RPC.PProfAddress = "127.0.0.0:5" + want.API.Address = "127.0.0.0:6" + + v := v1.Validator{ + App: map[string]interface{}{ + "grpc": map[string]interface{}{"address": want.GRPC.Address}, + "grpc-web": map[string]interface{}{"address": want.GRPCWeb.Address}, + "api": map[string]interface{}{"address": want.API.Address}, + }, + Config: map[string]interface{}{ + "p2p": map[string]interface{}{"laddr": want.P2P.Address}, + "rpc": map[string]interface{}{ + "laddr": want.RPC.Address, + "pprof_laddr": want.RPC.PProfAddress, + }, + }, + } + + // Act + s, err := v.GetServers() + + // Assert + require.NoError(t, err) + require.Equal(t, want, s) +} + +func TestValidatorSetServers(t *testing.T) { + // Arrange + v := v1.Validator{} + s := v1.DefaultServers() + wantApp := xyaml.Map{ + "grpc": map[string]interface{}{"address": s.GRPC.Address}, + "grpc-web": map[string]interface{}{"address": s.GRPCWeb.Address}, + "api": map[string]interface{}{"address": s.API.Address}, + } + wantConfig := xyaml.Map{ + "p2p": map[string]interface{}{"laddr": s.P2P.Address}, + "rpc": map[string]interface{}{ + "laddr": s.RPC.Address, + "pprof_laddr": s.RPC.PProfAddress, + }, + } + + // Act + err := v.SetServers(s) + + // Assert + require.NoError(t, err) + require.Equal(t, wantApp, v.App, "cosmos app config is not equal") + require.Equal(t, wantConfig, v.Config, "tendermint config is not equal") +} diff --git a/ignite/config/chain/version/version.go b/ignite/config/chain/version/version.go new file mode 100644 index 0000000..9f7688b --- /dev/null +++ b/ignite/config/chain/version/version.go @@ -0,0 +1,31 @@ +package version + +import ( + "fmt" + "io" +) + +// Version defines the type for the config version number. +type Version uint + +func (v Version) String() string { + return fmt.Sprintf("v%d", v) +} + +// Converter defines the interface required to migrate configurations to newer versions. +type Converter interface { + // Clone clones the config by returning a new copy of the current one. + Clone() (Converter, error) + + // SetDefaults assigns default values to empty config fields. + SetDefaults() error + + // GetVersion returns the config version. + GetVersion() Version + + // ConvertNext converts the config to the next version. + ConvertNext() (Converter, error) + + // Decode decodes the config file from YAML and updates its values. + Decode(io.Reader) error +} diff --git a/ignite/config/config.go b/ignite/config/config.go new file mode 100644 index 0000000..7cd24c2 --- /dev/null +++ b/ignite/config/config.go @@ -0,0 +1,9 @@ +package config + +import ( + "github.com/ignite/cli/v29/ignite/pkg/env" + "github.com/ignite/cli/v29/ignite/pkg/xfilepath" +) + +// DirPath returns the path of configuration directory of Ignite. +var DirPath = xfilepath.Mkdir(env.ConfigDir()) diff --git a/ignite/config/plugins/config.go b/ignite/config/plugins/config.go new file mode 100644 index 0000000..2ac910e --- /dev/null +++ b/ignite/config/plugins/config.go @@ -0,0 +1,154 @@ +package plugins + +import ( + "os" + "slices" + "strings" + + "gopkg.in/yaml.v3" + + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/gomodule" + "github.com/ignite/cli/v29/ignite/pkg/xfilepath" +) + +type Config struct { + path string + + // Apps holds the list of installed Ignite Apps. + // Ignite Apps are implemented as plugins. + Apps []Plugin `yaml:"apps"` +} + +// Plugin keeps plugin name and location. +type Plugin struct { + // Path holds the location of the plugin. + // A path can be local, in that case it must start with a `/`. + // A remote path on the other hand, is an URL to a public remote git + // repository. For example: + // + // path: github.com/foo/bar + // + // It can contain a path inside that repository, if for instance the repo + // contains multiple plugins, for example: + // + // path: github.com/foo/bar/plugin1 + // + // It can also specify a tag or a branch, by adding a `@` and the branch/tag + // name at the end of the path. For example: + // + // path: github.com/foo/bar/plugin1@v42 + Path string `yaml:"path"` + + // With holds arguments passed to the plugin interface + With map[string]string `yaml:"with,omitempty"` + + // Global holds whether the plugin is installed globally + // (default: $HOME/.ignite/apps/igniteapps.yml) or locally for a chain. + Global bool `yaml:"-"` +} + +// RemoveDuplicates takes a list of Plugins and returns a new list with only unique values. +// Local plugins take precedence over global plugins if duplicate paths exist. +// Duplicates are compared regardless of version. +func RemoveDuplicates(plugins []Plugin) (unique []Plugin) { + // struct to track plugin configs + type check struct { + hasPath bool + global bool + prevIndex int + } + + keys := make(map[string]check) + for i, plugin := range plugins { + c := keys[plugin.CanonicalPath()] + if !c.hasPath { + keys[plugin.CanonicalPath()] = check{ + hasPath: true, + global: plugin.Global, + prevIndex: i, + } + unique = append(unique, plugin) + } else if c.hasPath && !plugin.Global && c.global { // overwrite global plugin if local duplicate exists + unique[c.prevIndex] = plugin + } + } + + return unique +} + +// IsGlobal returns whether the plugin is installed globally or locally for a chain. +func (p Plugin) IsGlobal() bool { + return p.Global +} + +// IsLocalPath returns true if the plugin path is a local directory. +func (p Plugin) IsLocalPath() bool { + return xfilepath.IsDir(p.Path) +} + +// HasPath verifies if a plugin has the given path regardless of version. +// For example github.com/foo/bar@v1 and github.com/foo/bar@v2 have the +// same path so "true" will be returned. +func (p Plugin) HasPath(path string) bool { + if path == "" { + return false + } + if p.Path == path { + return true + } + pluginPath := p.CanonicalPath() + path = strings.Split(path, "@")[0] + return pluginPath == path +} + +// CanonicalPath returns the canonical path of a plugin (excludes version ref). +func (p Plugin) CanonicalPath() string { + return strings.Split(p.Path, "@")[0] +} + +// Path return the path of the config file. +func (c Config) Path() string { + return c.path +} + +// Save persists a config yaml to a specified path on disk. +// Must be writable. +func (c *Config) Save() error { + errf := func(err error) error { + return errors.Errorf("plugin config save: %w", err) + } + if c.path == "" { + return errf(errors.New("empty path")) + } + file, err := os.Create(c.path) + if err != nil { + return errf(err) + } + defer file.Close() + if err := yaml.NewEncoder(file).Encode(c); err != nil { + return errf(err) + } + return nil +} + +// HasPlugin returns true if c contains a plugin with given path. +// Returns also true if there's a local plugin with the module name equal to +// that path. +func (c Config) HasPlugin(path string) bool { + return slices.ContainsFunc(c.Apps, func(cp Plugin) bool { + if cp.HasPath(path) { + return true + } + if cp.IsLocalPath() { + // check local plugin go.mod to see if module name match plugin path + gm, err := gomodule.ParseAt(cp.Path) + if err != nil { + // Skip if we can't parse gomod + return false + } + return Plugin{Path: gm.Module.Mod.Path}.HasPath(path) + } + return false + }) +} diff --git a/ignite/config/plugins/config_test.go b/ignite/config/plugins/config_test.go new file mode 100644 index 0000000..ce74b28 --- /dev/null +++ b/ignite/config/plugins/config_test.go @@ -0,0 +1,389 @@ +package plugins_test + +import ( + "os" + "path" + "path/filepath" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + pluginsconfig "github.com/ignite/cli/v29/ignite/config/plugins" +) + +func TestPluginIsGlobal(t *testing.T) { + assert.False(t, pluginsconfig.Plugin{}.IsGlobal()) + assert.True(t, pluginsconfig.Plugin{Global: true}.IsGlobal()) +} + +func TestPluginIsLocalPath(t *testing.T) { + pwd, err := os.Getwd() + require.NoError(t, err) + + assert.False(t, pluginsconfig.Plugin{}.IsLocalPath()) + assert.False(t, pluginsconfig.Plugin{Path: "github.com/ignite/example"}.IsLocalPath()) + assert.False(t, pluginsconfig.Plugin{Path: "invalid_path"}.IsLocalPath()) + assert.False(t, pluginsconfig.Plugin{Path: "/testdata"}.IsLocalPath()) + assert.True(t, pluginsconfig.Plugin{Path: "testdata"}.IsLocalPath()) + assert.True(t, pluginsconfig.Plugin{Path: "/"}.IsLocalPath()) + assert.True(t, pluginsconfig.Plugin{Path: "."}.IsLocalPath()) + assert.True(t, pluginsconfig.Plugin{Path: ".."}.IsLocalPath()) + assert.True(t, pluginsconfig.Plugin{Path: pwd}.IsLocalPath()) + assert.True(t, pluginsconfig.Plugin{Path: filepath.Join(pwd, "testdata")}.IsLocalPath()) +} + +func TestPluginHasPath(t *testing.T) { + tests := []struct { + name string + plugin pluginsconfig.Plugin + path string + expectedRes bool + }{ + { + name: "empty both path", + plugin: pluginsconfig.Plugin{}, + expectedRes: false, + }, + { + name: "simple path", + plugin: pluginsconfig.Plugin{ + Path: "github.com/ignite/example", + }, + path: "github.com/ignite/example", + expectedRes: true, + }, + { + name: "plugin path with ref", + plugin: pluginsconfig.Plugin{ + Path: "github.com/ignite/example@v1", + }, + path: "github.com/ignite/example", + expectedRes: true, + }, + { + name: "plugin path with empty ref", + plugin: pluginsconfig.Plugin{ + Path: "github.com/ignite/example@", + }, + path: "github.com/ignite/example", + expectedRes: true, + }, + { + name: "both path with different ref", + plugin: pluginsconfig.Plugin{ + Path: "github.com/ignite/example@v1", + }, + path: "github.com/ignite/example@v2", + expectedRes: true, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + res := tt.plugin.HasPath(tt.path) + + require.Equal(t, tt.expectedRes, res) + }) + } +} + +func TestPluginCanonicalPath(t *testing.T) { + tests := []struct { + name string + plugin pluginsconfig.Plugin + expectedPath string + }{ + { + name: "empty both path", + plugin: pluginsconfig.Plugin{}, + expectedPath: "", + }, + { + name: "simple path", + plugin: pluginsconfig.Plugin{ + Path: "github.com/ignite/example", + }, + expectedPath: "github.com/ignite/example", + }, + { + name: "plugin path with ref", + plugin: pluginsconfig.Plugin{ + Path: "github.com/ignite/example@v1", + }, + expectedPath: "github.com/ignite/example", + }, + { + name: "plugin path with empty ref", + plugin: pluginsconfig.Plugin{ + Path: "github.com/ignite/example@", + }, + expectedPath: "github.com/ignite/example", + }, + { + name: "plugin local directory path", + plugin: pluginsconfig.Plugin{ + Path: "/home/user/go/foo/bar", + }, + expectedPath: "/home/user/go/foo/bar", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + res := tt.plugin.CanonicalPath() + require.Equal(t, tt.expectedPath, res) + }) + } +} + +func TestRemoveDuplicates(t *testing.T) { + tests := []struct { + name string + configs []pluginsconfig.Plugin + expected []pluginsconfig.Plugin + }{ + { + name: "do nothing for empty list", + configs: []pluginsconfig.Plugin(nil), + expected: []pluginsconfig.Plugin(nil), + }, + { + name: "remove duplicates", + configs: []pluginsconfig.Plugin{ + { + Path: "foo/bar", + }, + { + Path: "foo/bar", + }, + { + Path: "bar/foo", + }, + }, + expected: []pluginsconfig.Plugin{ + { + Path: "foo/bar", + }, + { + Path: "bar/foo", + }, + }, + }, + { + name: "do nothing for no duplicates", + configs: []pluginsconfig.Plugin{ + { + Path: "foo/bar", + }, + { + Path: "bar/foo", + }, + }, + expected: []pluginsconfig.Plugin{ + { + Path: "foo/bar", + }, + { + Path: "bar/foo", + }, + }, + }, + { + name: "prioritize local plugins", + configs: []pluginsconfig.Plugin{ + { + Path: "foo/bar", + Global: true, + }, + { + Path: "bar/foo", + Global: true, + }, + { + Path: "foo/bar", + Global: false, + }, + { + Path: "bar/foo", + Global: false, + }, + }, + expected: []pluginsconfig.Plugin{ + { + Path: "foo/bar", + Global: false, + }, + { + Path: "bar/foo", + Global: false, + }, + }, + }, + { + name: "prioritize local plugins different versions", + configs: []pluginsconfig.Plugin{ + { + Path: "foo/bar@v1", + Global: true, + }, + { + Path: "bar/foo", + Global: true, + }, + { + Path: "foo/bar@v2", + Global: false, + }, + { + Path: "bar/foo", + Global: false, + }, + }, + expected: []pluginsconfig.Plugin{ + { + Path: "foo/bar@v2", + Global: false, + }, + { + Path: "bar/foo", + Global: false, + }, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + unique := pluginsconfig.RemoveDuplicates(tt.configs) + require.EqualValues(t, tt.expected, unique) + }) + } +} + +func TestConfigSave(t *testing.T) { + tests := []struct { + name string + buildConfig func(*testing.T) *pluginsconfig.Config + expectedError string + expectedContent string + }{ + { + name: "fail: config path is empty", + buildConfig: func(t *testing.T) *pluginsconfig.Config { + t.Helper() + return &pluginsconfig.Config{} + }, + expectedError: "plugin config save: empty path", + }, + { + name: "ok: config path is a file that doesn't exist", + buildConfig: func(t *testing.T) *pluginsconfig.Config { + t.Helper() + cfg, err := pluginsconfig.ParseDir(t.TempDir()) + require.NoError(t, err) + return cfg + }, + expectedContent: "apps: []\n", + }, + { + name: "ok: config path is an existing file", + buildConfig: func(t *testing.T) *pluginsconfig.Config { + t.Helper() + // copy testdata/igniteapps.yml to tmp because it will be modified + dir := t.TempDir() + bz, err := os.ReadFile("testdata/igniteapps.yml") + require.NoError(t, err) + err = os.WriteFile(path.Join(dir, "igniteapps.yml"), bz, 0o666) + require.NoError(t, err) + // load from tmp + cfg, _ := pluginsconfig.ParseDir(dir) + // add a new plugin + cfg.Apps = append(cfg.Apps, pluginsconfig.Plugin{ + Path: "/path/to/plugin3", + With: map[string]string{"key": "val"}, + }) + // update a plugin + cfg.Apps[1].Path = "/path/to/plugin22" + cfg.Apps[1].With["key"] = "val" + return cfg + }, + expectedContent: `apps: + - path: /path/to/plugin1 + - path: /path/to/plugin22 + with: + bar: baz + foo: bar + key: val + - path: /path/to/plugin3 + with: + key: val +`, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + require := require.New(t) + cfg := tt.buildConfig(t) + + err := cfg.Save() + + if tt.expectedError != "" { + require.EqualError(err, tt.expectedError) + return + } + require.NoError(err) + bz, err := os.ReadFile(cfg.Path()) + require.NoError(err) + require.Equal(string(bz), tt.expectedContent) + }) + } +} + +func TestConfigHasPlugin(t *testing.T) { + wd, err := os.Getwd() + require.NoError(t, err) + tests := []struct { + name string + cfg pluginsconfig.Config + expectedFound bool + }{ + { + name: "empty config", + expectedFound: false, + }, + { + name: "not found in config", + cfg: pluginsconfig.Config{ + Apps: []pluginsconfig.Plugin{ + {Path: "github.com/ignite/example2"}, + }, + }, + expectedFound: false, + }, + { + name: "found in config", + cfg: pluginsconfig.Config{ + Apps: []pluginsconfig.Plugin{ + {Path: "github.com/ignite/example2"}, + {Path: "github.com/ignite/example@master"}, + }, + }, + expectedFound: true, + }, + { + name: "found in config but from a local plugin", + cfg: pluginsconfig.Config{ + Apps: []pluginsconfig.Plugin{ + {Path: "github.com/ignite/example2"}, + {Path: path.Join(wd, "testdata", "localplugin", "example")}, + }, + }, + expectedFound: true, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + found := tt.cfg.HasPlugin("github.com/ignite/example@v42") + + assert.Equal(t, tt.expectedFound, found) + }) + } +} diff --git a/ignite/config/plugins/parse.go b/ignite/config/plugins/parse.go new file mode 100644 index 0000000..2b63bc2 --- /dev/null +++ b/ignite/config/plugins/parse.go @@ -0,0 +1,74 @@ +package plugins + +import ( + "io" + "os" + "path/filepath" + + "gopkg.in/yaml.v3" + + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +// ParseDir expects to find a plugin config file in dir. If dir is not a folder, +// an error is returned. +// The plugin config file format can be `igniteapps.yml` or `igniteapps.yaml`. If +// found, the file is parsed into a Config and returned. If no file from the +// given names above are found, then an empty config is returned, w/o errors. +func ParseDir(dir string) (*Config, error) { + // handy function that wraps and prefix err with a common label + errf := func(err error) error { + return errors.Errorf("plugin config parse: %w", err) + } + fi, err := os.Stat(dir) + if err != nil { + return nil, errf(err) + } + if !fi.IsDir() { + return nil, errf(errors.Errorf("path %s is not a dir", dir)) + } + + filename, err := locateFile(dir) + if err != nil { + return nil, errf(err) + } + c := Config{ + path: filename, + } + + f, err := os.Open(filename) + if err != nil { + if os.IsNotExist(err) { + return &c, nil + } + return nil, errf(err) + } + defer f.Close() + + // if the error is end of file meaning an empty file on read return nil + if err := yaml.NewDecoder(f).Decode(&c); err != nil && !errors.Is(err, io.EOF) { + return nil, errf(err) + } + return &c, nil +} + +var ( + filenames = []string{"igniteapps.yml", "igniteapps.yaml"} + defaultFilename = filenames[0] +) + +func locateFile(root string) (string, error) { + for _, name := range filenames { + path := filepath.Join(root, name) + _, err := os.Stat(path) + if err == nil { + // file found + return path, nil + } + if !os.IsNotExist(err) { + return "", err + } + } + // no file found, return the default config name + return filepath.Join(root, defaultFilename), nil +} diff --git a/ignite/config/plugins/parse_test.go b/ignite/config/plugins/parse_test.go new file mode 100644 index 0000000..b924a13 --- /dev/null +++ b/ignite/config/plugins/parse_test.go @@ -0,0 +1,90 @@ +package plugins_test + +import ( + "testing" + + "github.com/stretchr/testify/require" + + pluginsconfig "github.com/ignite/cli/v29/ignite/config/plugins" +) + +func TestParseDir(t *testing.T) { + tests := []struct { + name string + path string + expectedError string + expectedPlugins []pluginsconfig.Plugin + expectedPath string + }{ + { + name: "fail: path is not a dir", + path: "testdata/igniteapps.yml", + expectedError: "plugin config parse: path testdata/igniteapps.yml is not a dir", + }, + { + name: "fail: path doesn't exists", + path: "testdata/xxx/yyy", + expectedError: "plugin config parse: stat testdata/xxx/yyy: no such file or directory", + }, + { + name: "ok: path doesn't contain any config", + path: "testdata/noconfig", + expectedPlugins: nil, + expectedPath: "testdata/noconfig/igniteapps.yml", + }, + { + name: "fail: path contains an invalid yml file", + path: "testdata/invalid", + expectedError: "plugin config parse: yaml: unmarshal errors:\n line 1: cannot unmarshal !!str `not yaml !` into plugins.Config", + }, + { + name: "ok: path contains a plugin.yml file", + path: "testdata", + expectedPlugins: []pluginsconfig.Plugin{ + { + Path: "/path/to/plugin1", + }, + { + Path: "/path/to/plugin2", + With: map[string]string{ + "bar": "baz", + "foo": "bar", + }, + }, + }, + expectedPath: "testdata/igniteapps.yml", + }, + { + name: "ok: path contains a plugin.yaml file", + path: "testdata/other", + expectedPlugins: []pluginsconfig.Plugin{ + { + Path: "/path/to/plugin1", + }, + { + Path: "/path/to/plugin2", + With: map[string]string{ + "bar": "baz", + "foo": "bar", + }, + }, + }, + expectedPath: "testdata/other/igniteapps.yaml", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + require := require.New(t) + + cfg, err := pluginsconfig.ParseDir(tt.path) + + if tt.expectedError != "" { + require.EqualError(err, tt.expectedError) + return + } + require.NoError(err) + require.Equal(tt.expectedPlugins, cfg.Apps) + require.Equal(tt.expectedPath, cfg.Path()) + }) + } +} diff --git a/ignite/config/plugins/testdata/igniteapps.yml b/ignite/config/plugins/testdata/igniteapps.yml new file mode 100644 index 0000000..d428163 --- /dev/null +++ b/ignite/config/plugins/testdata/igniteapps.yml @@ -0,0 +1,8 @@ +apps: + - name: plugin1 + path: /path/to/plugin1 + - name: plugin2 + path: /path/to/plugin2 + with: + foo: bar + bar: baz diff --git a/ignite/config/plugins/testdata/invalid/igniteapps.yml b/ignite/config/plugins/testdata/invalid/igniteapps.yml new file mode 100644 index 0000000..81eb8c2 --- /dev/null +++ b/ignite/config/plugins/testdata/invalid/igniteapps.yml @@ -0,0 +1 @@ +not yaml ! diff --git a/ignite/config/plugins/testdata/localplugin/example/go.mod b/ignite/config/plugins/testdata/localplugin/example/go.mod new file mode 100644 index 0000000..8aa29e2 --- /dev/null +++ b/ignite/config/plugins/testdata/localplugin/example/go.mod @@ -0,0 +1,3 @@ +module github.com/ignite/example + +go 1.20 diff --git a/ignite/config/plugins/testdata/noconfig/somefile b/ignite/config/plugins/testdata/noconfig/somefile new file mode 100644 index 0000000..473a0f4 diff --git a/ignite/config/plugins/testdata/other/igniteapps.yaml b/ignite/config/plugins/testdata/other/igniteapps.yaml new file mode 100644 index 0000000..d428163 --- /dev/null +++ b/ignite/config/plugins/testdata/other/igniteapps.yaml @@ -0,0 +1,8 @@ +apps: + - name: plugin1 + path: /path/to/plugin1 + - name: plugin2 + path: /path/to/plugin2 + with: + foo: bar + bar: baz diff --git a/ignite/config/testdata/testdata.go b/ignite/config/testdata/testdata.go new file mode 100644 index 0000000..a40d18e --- /dev/null +++ b/ignite/config/testdata/testdata.go @@ -0,0 +1,26 @@ +package testdata + +import ( + "testing" + + chainconfig "github.com/ignite/cli/v29/ignite/config/chain" + networkconfigTestdata "github.com/ignite/cli/v29/ignite/config/chain/network/testdata" + v0testdata "github.com/ignite/cli/v29/ignite/config/chain/v0/testdata" + v1testdata "github.com/ignite/cli/v29/ignite/config/chain/v1/testdata" + "github.com/ignite/cli/v29/ignite/config/chain/version" +) + +var Versions = map[version.Version][]byte{ + 0: v0testdata.ConfigYAML, + 1: v1testdata.ConfigYAML, +} + +var NetworkConfig = networkconfigTestdata.ConfigYAML + +func GetLatestConfig(t *testing.T) *chainconfig.Config { + return v1testdata.GetConfig(t) +} + +func GetLatestNetworkConfig(t *testing.T) *chainconfig.Config { + return networkconfigTestdata.GetConfig(t) +} diff --git a/ignite/internal/analytics/analytics.go b/ignite/internal/analytics/analytics.go new file mode 100644 index 0000000..5b92a39 --- /dev/null +++ b/ignite/internal/analytics/analytics.go @@ -0,0 +1,190 @@ +package analytics + +import ( + "context" + "encoding/json" + "os" + "path/filepath" + "strconv" + "strings" + "sync" + + "github.com/spf13/cobra" + + "github.com/ignite/cli/v29/ignite/config" + "github.com/ignite/cli/v29/ignite/internal/sentry" + "github.com/ignite/cli/v29/ignite/pkg/cliui" + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/randstr" + "github.com/ignite/cli/v29/ignite/version" +) + +const ( + telemetryEndpoint = "https://api.ignite.com/v1/telemetry" + envDoNotTrack = "DO_NOT_TRACK" + envCI = "CI" + envGitHubActions = "GITHUB_ACTIONS" + igniteAnonIdentity = "anon_identity.json" +) + +var matomoClient MatomoClient + +// anonIdentity represents an analytics identity file. +type anonIdentity struct { + // Name represents the username. + Name string `json:"name" yaml:"name"` + // DoNotTrack represents the user track choice. + DoNotTrack bool `json:"doNotTrack" yaml:"doNotTrack"` +} + +func init() { + matomoClient = NewMatomoClient( + telemetryEndpoint, + WithIDSite(4), + WithSource("https://cli.ignite.com"), + ) +} + +// SendMetric send command metrics to analytics. +func SendMetric(wg *sync.WaitGroup, cmd *cobra.Command) { + if cmd.Name() == "version" { + return + } + + dntInfo, err := checkDNT() + if err != nil || dntInfo.DoNotTrack { + return + } + + versionInfo, err := version.GetInfo(context.Background()) + if err != nil { + return + } + + var ( + path = cmd.CommandPath() + scaffoldType = "" + ) + if strings.Contains(path, "ignite scaffold") { + splitCMD := strings.Split(path, " ") + if len(splitCMD) > 2 { + scaffoldType = splitCMD[2] + } + } + + met := Metric{ + Name: cmd.Name(), + Cmd: path, + ScaffoldType: scaffoldType, + OS: versionInfo.OS, + Arch: versionInfo.Arch, + Version: versionInfo.CLIVersion, + CLIVersion: versionInfo.CLIVersion, + GoVersion: versionInfo.GoVersion, + SDKVersion: versionInfo.SDKVersion, + BuildDate: versionInfo.BuildDate, + SourceHash: versionInfo.SourceHash, + ConfigVersion: versionInfo.ConfigVersion, + Uname: versionInfo.Uname, + CWD: versionInfo.CWD, + BuildFromSource: versionInfo.BuildFromSource, + IsCI: getIsCI(), + } + + wg.Add(1) + go func() { + defer wg.Done() + _ = matomoClient.SendMetric(dntInfo.Name, met) + }() +} + +// EnableSentry enable errors reporting to Sentry. +func EnableSentry(ctx context.Context, wg *sync.WaitGroup) { + dntInfo, err := checkDNT() + if err != nil || dntInfo.DoNotTrack { + return + } + + closeSentry, err := sentry.InitSentry(ctx) + wg.Add(1) + go func() { + defer wg.Done() + if err == nil { + defer closeSentry() + } + }() +} + +// checkDNT check if the user allow to track data or if the DO_NOT_TRACK +// env var is set https://consoledonottrack.com/ +func checkDNT() (anonIdentity, error) { + if dnt := os.Getenv(envDoNotTrack); dnt != "" { + if dnt, err := strconv.ParseBool(dnt); err != nil || dnt { + return anonIdentity{DoNotTrack: true}, nil + } + } + + globalPath, err := config.DirPath() + if err != nil { + return anonIdentity{}, err + } + if err := os.Mkdir(globalPath, 0o700); err != nil && !os.IsExist(err) { + return anonIdentity{}, err + } + + identityPath := filepath.Join(globalPath, igniteAnonIdentity) + data, err := os.ReadFile(identityPath) + if err != nil && !os.IsNotExist(err) { + return anonIdentity{}, err + } + + var i anonIdentity + if err := json.Unmarshal(data, &i); err == nil { + return i, nil + } + + i.Name = randstr.Runes(16) + i.DoNotTrack = false + + message := "Ignite uses anonymized metrics to enhance the application, " + + "focusing on features such as command usage. We do not collect " + + "identifiable personal information. Your privacy is important to us. " + + "For more details, please visit our Privacy Policy at https://ignite.com/privacy " + + "and our Terms of Use at https://ignite.com/terms-of-use. " + + "Do you consent to the collection of these usage metrics for analytics purposes?" + + session := cliui.New() + err = session.AskConfirm(message) + if err != nil && !errors.Is(err, cliui.ErrAbort) { + return anonIdentity{}, err + } + + if errors.Is(err, cliui.ErrAbort) { + i.DoNotTrack = true + } + + data, err = json.Marshal(&i) + if err != nil { + return i, err + } + + return i, os.WriteFile(identityPath, data, 0o600) +} + +func getIsCI() bool { + ci, err := strconv.ParseBool(os.Getenv(envCI)) + if err != nil { + return false + } + + if ci { + return true + } + + ci, err = strconv.ParseBool(os.Getenv(envGitHubActions)) + if err != nil { + return false + } + + return ci +} diff --git a/ignite/internal/analytics/matomo.go b/ignite/internal/analytics/matomo.go new file mode 100644 index 0000000..bd2ab96 --- /dev/null +++ b/ignite/internal/analytics/matomo.go @@ -0,0 +1,265 @@ +package analytics + +import ( + "crypto/rand" + "fmt" + "math" + "math/big" + "net/http" + "net/url" + "strings" + "time" + + "github.com/google/go-querystring/query" + + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +type ( + // MatomoClient is a matomo client. + MatomoClient struct { + endpoint string + idSite uint // Matomo ID Site. + tokenAuth string // Matomo Token Auth. + source string + httpClient http.Client + } + + // MatomoParams analytics metrics body. + MatomoParams struct { + IDSite uint `url:"idsite"` + Rec uint `url:"rec"` + ActionName string `url:"action_name"` + APIVersion uint `url:"apiv"` + TokenAuth string `url:"token_auth,omitempty"` + Rand uint64 `url:"rand,omitempty"` + URL string `url:"url,omitempty"` + UTMSource string `url:"utm_source,omitempty"` + UTMMedium string `url:"utm_medium,omitempty"` + UTMCampaign string `url:"utm_campaign,omitempty"` + UTMContent string `url:"utm_content,omitempty"` + UserID string `url:"uid,omitempty"` + UserAgent string `url:"ua,omitempty"` + Hour int `url:"h,omitempty"` + Minute int `url:"m,omitempty"` + Second int `url:"s,omitempty"` + + // Dimension1 development mode boolean. + // 1 = devMode ON | 0 = devMode OFF. + Dimension1 uint `url:"dimension1"` + + // Dimension2 internal boolean. + // 1 = internal ON not supported at present | 0 = internal OFF. + Dimension2 uint `url:"dimension2"` + + // Dimension3 is deprecated. + // Should always be 0. + Dimension3 uint `url:"dimension3"` + + // Dimension4 ignite version + Dimension4 string `url:"dimension4,omitempty"` + + // Dimension6 ignite config version + Dimension6 string `url:"dimension6,omitempty"` + + // Dimension7 full cli command + Dimension7 string `url:"dimension7,omitempty"` + + // Dimension11 scaffold customization type + Dimension11 string `url:"dimension11,omitempty"` + + // Dimension13 command level 1. + Dimension13 string `url:"dimension13,omitempty"` + + // Dimension14 command level 2. + Dimension14 string `url:"dimension14,omitempty"` + + // Dimension15 command level 3. + Dimension15 string `url:"dimension15,omitempty"` + + // Dimension16 command level 4. + Dimension16 string `url:"dimension16,omitempty"` + + // Dimension17 cosmos-sdk version. + Dimension17 string `url:"dimension17,omitempty"` + + // Dimension18 operational system. + Dimension18 string `url:"dimension18,omitempty"` + + // Dimension19 system architecture. + Dimension19 string `url:"dimension19,omitempty"` + + // Dimension20 golang version. + Dimension20 string `url:"dimension20,omitempty"` + + // Dimension21 command level 5. + Dimension21 string `url:"dimension21,omitempty"` + + // Dimension22 command level 6. + Dimension22 string `url:"dimension22,omitempty"` + } + // Metric represents a custom data. + Metric struct { + Name string + Cmd string + OS string + Arch string + Version string + CLIVersion string + GoVersion string + SDKVersion string + BuildDate string + SourceHash string + ConfigVersion string + Uname string + CWD string + ScaffoldType string + BuildFromSource bool + IsCI bool + } +) + +// Option configures code generation. +type Option func(*MatomoClient) + +// WithIDSite adds an id site. +func WithIDSite(idSite uint) Option { + return func(c *MatomoClient) { + c.idSite = idSite + } +} + +// WithTokenAuth adds a matomo token authentication. +func WithTokenAuth(tokenAuth string) Option { + return func(c *MatomoClient) { + c.tokenAuth = tokenAuth + } +} + +// WithSource adds a matomo URL source. +func WithSource(source string) Option { + return func(c *MatomoClient) { + c.source = source + } +} + +// NewMatomoClient creates a new Matomo client. +func NewMatomoClient(endpoint string, opts ...Option) MatomoClient { + c := MatomoClient{ + endpoint: endpoint, + source: endpoint, + httpClient: http.Client{ + Timeout: 1500 * time.Millisecond, + }, + } + // apply analytics options. + for _, o := range opts { + o(&c) + } + return c +} + +// Send sends metric event to analytics. +func (c MatomoClient) Send(params MatomoParams) error { + requestURL, err := url.Parse(c.endpoint) + if err != nil { + return err + } + + // encode request parameters. + queryParams, err := query.Values(params) + if err != nil { + return err + } + requestURL.RawQuery = queryParams.Encode() + + // Create an HTTP request with the payload. + resp, err := c.httpClient.Get(requestURL.String()) + if err != nil { + return errors.Wrapf(err, "error creating HTTP request: %s", requestURL.String()) + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return errors.Errorf("error to add matomo analytics metric. Status code: %d", resp.StatusCode) + } + + return nil +} + +// SendMetric build the metrics and send to analytics. +func (c MatomoClient) SendMetric(sessionID string, metric Metric) error { + var ( + now = time.Now() + r, _ = rand.Int(rand.Reader, big.NewInt(math.MaxInt64)) + utmMedium = "dev" + ) + if !metric.BuildFromSource { + utmMedium = "binary" + } + + cmd := splitCommand(metric.Cmd) + + return c.Send(MatomoParams{ + IDSite: c.idSite, + Rec: 1, + APIVersion: 1, + TokenAuth: c.tokenAuth, + Rand: r.Uint64(), + URL: c.metricURL(metric.Cmd), + UTMSource: "source-code-github", + UTMMedium: utmMedium, + UTMCampaign: metric.CLIVersion, + UTMContent: fmt.Sprintf("commit-%s", metric.SourceHash), + UserID: sessionID, + UserAgent: "Go-http-client", + ActionName: metric.Cmd, + Hour: now.Hour(), + Minute: now.Minute(), + Second: now.Second(), + Dimension1: 0, + Dimension2: formatBool(metric.IsCI), + Dimension4: metric.Version, + Dimension6: metric.ConfigVersion, + Dimension7: metric.Cmd, + Dimension11: metric.ScaffoldType, + Dimension13: cmd[0], + Dimension14: cmd[1], + Dimension15: cmd[2], + Dimension16: cmd[3], + Dimension17: metric.SDKVersion, + Dimension18: metric.OS, + Dimension19: metric.Arch, + Dimension20: metric.GoVersion, + Dimension21: cmd[4], + Dimension22: cmd[5], + }) +} + +// formatBool returns "1" or "0" according to the value of b. +func formatBool(b bool) uint { + if b { + return 1 + } + return 0 +} + +// splitCommand splice the command into a slice with length 6. +func splitCommand(cmd string) []string { + var ( + splitCmd = strings.Split(cmd, " ") + cmdLevels = make([]string, 6) + ) + for i := 0; i < len(cmdLevels); i++ { + if i >= len(splitCmd) { + break + } + cmdLevels[i] = splitCmd[i] + } + return cmdLevels +} + +// metricURL build the metric URL. +func (c MatomoClient) metricURL(cmd string) string { + return fmt.Sprintf("%s/%s", c.source, strings.ReplaceAll(cmd, " ", "_")) +} diff --git a/ignite/internal/announcements/announcement.go b/ignite/internal/announcements/announcement.go new file mode 100644 index 0000000..091e587 --- /dev/null +++ b/ignite/internal/announcements/announcement.go @@ -0,0 +1,57 @@ +package announcements + +import ( + "encoding/json" + "fmt" + "net/http" + "strings" + "time" + + "github.com/ignite/cli/v29/ignite/pkg/cliui/icons" +) + +var ( + SurveyLink = "https://bit.ly/3WZS2uS" + AnnouncementURL = "https://api.ignite.com/v1/announcements" +) + +type announcement struct { + ID string `json:"id"` + Text string `json:"text"` + Timestamp time.Time `json:"timestamp"` + User string `json:"user"` +} + +// Fetch fetches the latest announcements from the API. +func Fetch() string { + resp, err := http.Get(AnnouncementURL) //nolint:gosec + if err != nil || resp.StatusCode != 200 { + return fallbackData() + } + defer resp.Body.Close() + + type response struct { + Announcements []announcement `json:"announcements"` + } + var data response + if err := json.NewDecoder(resp.Body).Decode(&data); err != nil { + return fallbackData() + } + + if len(data.Announcements) == 0 { + return fallbackData() + } + + var out strings.Builder + fmt.Fprintf(&out, "%s\n\n", "Announcements:") + + for _, msg := range data.Announcements { + fmt.Fprintf(&out, "%s %s\n", icons.Bullet, msg.Text) + } + + return out.String() +} + +func fallbackData() string { + return fmt.Sprintf("\n%s Survey: %s\n", icons.Survey, SurveyLink) +} diff --git a/ignite/internal/announcements/announcement_test.go b/ignite/internal/announcements/announcement_test.go new file mode 100644 index 0000000..0605c84 --- /dev/null +++ b/ignite/internal/announcements/announcement_test.go @@ -0,0 +1,66 @@ +package announcements_test + +import ( + "fmt" + "net/http" + "net/http/httptest" + "testing" + + "github.com/ignite/cli/v29/ignite/internal/announcements" + "github.com/ignite/cli/v29/ignite/pkg/cliui/icons" +) + +func TestFetchAnnouncements(t *testing.T) { + fallbackData := fmt.Sprintf("\n💬 Survey: %s\n", announcements.SurveyLink) + + tests := []struct { + name string + mockResponse string + statusCode int + expected string + }{ + { + name: "successful retrieval", + mockResponse: `{"version":1,"announcements":[{"id":"1744230503810","text":"New Ignite announcement: v1.0.0 released!","timestamp":"2025-04-09T20:28:23.810Z","user":"announcement-bot"}]}`, + statusCode: http.StatusOK, + expected: fmt.Sprintf("Announcements:\n\n%s New Ignite announcement: v1.0.0 released!\n", icons.Bullet), + }, + { + name: "empty announcements", + mockResponse: `{"announcements":[]}`, + statusCode: http.StatusOK, + expected: fallbackData, + }, + { + name: "invalid JSON response", + mockResponse: `invalid json`, + statusCode: http.StatusOK, + expected: fallbackData, + }, + { + name: "non-200 HTTP response", + mockResponse: ``, + statusCode: http.StatusInternalServerError, + expected: fallbackData, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { + w.WriteHeader(tt.statusCode) + w.Write([]byte(tt.mockResponse)) + })) + defer server.Close() + + originalAPI := announcements.AnnouncementURL + announcements.AnnouncementURL = server.URL + defer func() { announcements.AnnouncementURL = originalAPI }() + + result := announcements.Fetch() + if result != tt.expected { + t.Errorf("expected %q, got %q", tt.expected, result) + } + }) + } +} diff --git a/ignite/internal/buf/buf.go b/ignite/internal/buf/buf.go new file mode 100644 index 0000000..ffab251 --- /dev/null +++ b/ignite/internal/buf/buf.go @@ -0,0 +1,39 @@ +package buf + +import ( + "encoding/json" + "net/http" + "time" + + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +var BufTokenURL = "https://api.ignite.com/v1/buf" //nolint:gosec // URL is hardcoded and not user-provided + +// FetchToken fetches the buf token from the Ignite API. +func FetchToken() (string, error) { + client := &http.Client{ + Timeout: 5 * time.Second, + } + + resp, err := client.Get(BufTokenURL) + if err != nil { + return "", err + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return "", errors.Errorf("HTTP request failed with status code: %d", resp.StatusCode) + } + + type tokenResponse struct { + Token string `json:"token"` + } + var tokenResp tokenResponse + + if err := json.NewDecoder(resp.Body).Decode(&tokenResp); err != nil { + return "", err + } + + return tokenResp.Token, nil +} diff --git a/ignite/internal/buf/buf_test.go b/ignite/internal/buf/buf_test.go new file mode 100644 index 0000000..29eb8ce --- /dev/null +++ b/ignite/internal/buf/buf_test.go @@ -0,0 +1,70 @@ +package buf_test + +import ( + "net/http" + "net/http/httptest" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/internal/buf" +) + +func TestFetchToken(t *testing.T) { + tests := []struct { + name string + serverResponse string + statusCode int + expectedToken string + expectError bool + }{ + { + name: "successful token fetch", + serverResponse: `{"token":"test_token_123"}`, + statusCode: http.StatusOK, + expectedToken: "test_token_123", + expectError: false, + }, + { + name: "server error", + serverResponse: `{"error":"internal server error"}`, + statusCode: http.StatusInternalServerError, + expectedToken: "", + expectError: true, + }, + { + name: "invalid json response", + serverResponse: `invalid json`, + statusCode: http.StatusOK, + expectedToken: "", + expectError: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + // Create mock server + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(tt.statusCode) + w.Write([]byte(tt.serverResponse)) + })) + defer server.Close() + + // Temporarily override the endpoint + originalEndpoint := buf.BufTokenURL + buf.BufTokenURL = server.URL + defer func() { + buf.BufTokenURL = originalEndpoint + }() + + token, err := buf.FetchToken() + if tt.expectError { + require.Error(t, err) + require.Empty(t, token) + } else { + require.NoError(t, err) + require.Equal(t, tt.expectedToken, token) + } + }) + } +} diff --git a/ignite/internal/plugin/execute.go b/ignite/internal/plugin/execute.go new file mode 100644 index 0000000..8856208 --- /dev/null +++ b/ignite/internal/plugin/execute.go @@ -0,0 +1,62 @@ +package plugininternal + +import ( + "bytes" + "context" + "sync" + + "google.golang.org/grpc/status" + + pluginsconfig "github.com/ignite/cli/v29/ignite/config/plugins" + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/services/plugin" +) + +type synchronizedBuffer struct { + mu sync.Mutex + buf bytes.Buffer +} + +func (b *synchronizedBuffer) Write(p []byte) (int, error) { + b.mu.Lock() + defer b.mu.Unlock() + + return b.buf.Write(p) +} + +func (b *synchronizedBuffer) String() string { + b.mu.Lock() + defer b.mu.Unlock() + + return b.buf.String() +} + +// Execute starts and executes a plugin, then shutdowns it. +func Execute(ctx context.Context, path string, args []string, options ...plugin.APIOption) (string, error) { + var buf synchronizedBuffer + plugins, err := plugin.Load( + ctx, + []pluginsconfig.Plugin{{Path: path}}, + plugin.RedirectStdout(&buf), + ) + if err != nil { + return "", err + } + defer plugins[0].KillClient() + if plugins[0].Error != nil { + return "", plugins[0].Error + } + err = plugins[0].Interface.Execute( + ctx, + &plugin.ExecutedCommand{Args: args}, + plugin.NewClientAPI(options...), + ) + if err != nil { + // Extract the rpc status message and create a simple error from it. + // We don't want Execute to return rpc errors. + return "", errors.New(status.Convert(err).Message()) + } + + plugins[0].KillClient() + return buf.String(), err +} diff --git a/ignite/internal/plugin/execute_test.go b/ignite/internal/plugin/execute_test.go new file mode 100644 index 0000000..246dc1f --- /dev/null +++ b/ignite/internal/plugin/execute_test.go @@ -0,0 +1,72 @@ +package plugininternal + +import ( + "context" + "os" + "path/filepath" + "strings" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/services/plugin" + "github.com/ignite/cli/v29/ignite/services/plugin/mocks" +) + +func TestPluginExecute(t *testing.T) { + tests := []struct { + name string + pluginPath string + expectedOutput string + expectedError string + }{ + { + name: "fail: plugin doesnt exist", + pluginPath: "/not/exists", + expectedError: "local app path \"/not/exists\" not found: stat /not/exists: no such file or directory", + }, + { + name: "ok: plugin execute ok", + pluginPath: "testdata/execute_ok", + expectedOutput: `ok args=\[arg1 arg2\] chainid=id appPath=apppath configPath=configpath home=home rpcAddress=rpcPublicAddress +ok args=\[arg1 arg2\] cliVersion=.* goVersion=.* sdkVersion=.* bufVersion=.* buildDate=.* sourceHash=.* configVersion=.* os=.* arch=.* buildFromSource=.* +`, + }, + { + name: "ok: plugin execute fail", + pluginPath: "testdata/execute_fail", + expectedError: "fail", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + pluginPath := tt.pluginPath + if !strings.HasPrefix(pluginPath, "/") { + // add working dir to relative paths + wd, err := os.Getwd() + require.NoError(t, err) + pluginPath = filepath.Join(wd, pluginPath) + } + chainer := mocks.NewChainerInterface(t) + chainer.EXPECT().ID().Return("id", nil).Maybe() + chainer.EXPECT().AppPath().Return("apppath").Maybe() + chainer.EXPECT().ConfigPath().Return("configpath").Maybe() + chainer.EXPECT().Home().Return("home", nil).Maybe() + chainer.EXPECT().RPCPublicAddress().Return("rpcPublicAddress", nil).Maybe() + + out, err := Execute( + context.Background(), + pluginPath, + []string{"arg1", "arg2"}, + plugin.WithChain(chainer), + ) + + if tt.expectedError != "" { + require.EqualError(t, err, tt.expectedError) + return + } + require.NoError(t, err) + require.Regexp(t, tt.expectedOutput, out) + }) + } +} diff --git a/ignite/internal/plugin/testdata/consumer/config/genesis.json b/ignite/internal/plugin/testdata/consumer/config/genesis.json new file mode 100644 index 0000000..0f180ed --- /dev/null +++ b/ignite/internal/plugin/testdata/consumer/config/genesis.json @@ -0,0 +1,9 @@ +{ + "app_name": "test", + "app_version": "", + "genesis_time": "2024-01-19T10:27:44.742750573Z", + "chain_id": "test", + "initial_height": 1, + "app_hash": null, + "app_state": {} +} diff --git a/ignite/internal/plugin/testdata/consumer/config/priv_validator_key.json b/ignite/internal/plugin/testdata/consumer/config/priv_validator_key.json new file mode 100644 index 0000000..302f489 --- /dev/null +++ b/ignite/internal/plugin/testdata/consumer/config/priv_validator_key.json @@ -0,0 +1,11 @@ +{ + "address": "2D3C15095E5EAA318CAEDE1C2D02C77581584751", + "pub_key": { + "type": "tendermint/PubKeyEd25519", + "value": "uBOT+dDuUvXjJrkfwMNrS4bRT4/O+fBnpwfYpR6n1Wk=" + }, + "priv_key": { + "type": "tendermint/PrivKeyEd25519", + "value": "HovIzTJTGMrQx5oBikjfypyMZYF9QP5MxS+S+S/3QYq4E5P50O5S9eMmuR/Aw2tLhtFPj8758GenB9ilHqfVaQ==" + } +} \ No newline at end of file diff --git a/ignite/internal/plugin/testdata/execute_fail/.gitignore b/ignite/internal/plugin/testdata/execute_fail/.gitignore new file mode 100644 index 0000000..21dc35c --- /dev/null +++ b/ignite/internal/plugin/testdata/execute_fail/.gitignore @@ -0,0 +1 @@ +execute_fail* diff --git a/ignite/internal/plugin/testdata/execute_fail/go.mod b/ignite/internal/plugin/testdata/execute_fail/go.mod new file mode 100644 index 0000000..c6c9d5b --- /dev/null +++ b/ignite/internal/plugin/testdata/execute_fail/go.mod @@ -0,0 +1,106 @@ +module execute_fail + +go 1.25.4 + +replace github.com/ignite/cli/v29 => ../../../../.. + +require ( + github.com/hashicorp/go-plugin v1.6.3 + github.com/ignite/cli/v29 v29.0.0 +) + +require ( + dario.cat/mergo v1.0.1 // indirect + github.com/Microsoft/go-winio v0.6.2 // indirect + github.com/ProtonMail/go-crypto v1.1.6 // indirect + github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect + github.com/aymerick/douceur v0.2.0 // indirect + github.com/blang/semver/v4 v4.0.0 // indirect + github.com/briandowns/spinner v1.23.2 // indirect + github.com/charmbracelet/bubbletea v1.3.5 // indirect + github.com/charmbracelet/colorprofile v0.3.1 // indirect + github.com/charmbracelet/lipgloss v1.1.0 // indirect + github.com/charmbracelet/x/ansi v0.8.0 // indirect + github.com/charmbracelet/x/cellbuf v0.0.13 // indirect + github.com/charmbracelet/x/term v0.2.1 // indirect + github.com/cloudflare/circl v1.6.3 // indirect + github.com/cockroachdb/errors v1.12.0 // indirect + github.com/cockroachdb/logtags v0.0.0-20241215232642-bb51bb14a506 // indirect + github.com/cockroachdb/redact v1.1.6 // indirect + github.com/cosmos/btcutil v1.0.5 // indirect + github.com/cosmos/cosmos-sdk v0.53.6 // indirect + github.com/cyphar/filepath-securejoin v0.4.1 // indirect + github.com/emirpasic/gods v1.18.1 // indirect + github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f // indirect + github.com/fatih/color v1.18.0 // indirect + github.com/fatih/structs v1.1.0 // indirect + github.com/getsentry/sentry-go v0.35.0 // indirect + github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 // indirect + github.com/go-git/go-billy/v5 v5.8.0 // indirect + github.com/go-git/go-git/v5 v5.17.1 // indirect + github.com/gobuffalo/flect v0.3.0 // indirect + github.com/gobuffalo/genny/v2 v2.1.0 // indirect + github.com/gobuffalo/github_flavored_markdown v1.1.4 // indirect + github.com/gobuffalo/helpers v0.6.7 // indirect + github.com/gobuffalo/logger v1.0.7 // indirect + github.com/gobuffalo/packd v1.0.2 // indirect + github.com/gobuffalo/plush/v4 v4.1.22 // indirect + github.com/gobuffalo/tags/v3 v3.1.4 // indirect + github.com/gobuffalo/validate/v3 v3.3.3 // indirect + github.com/gobwas/glob v0.2.3 // indirect + github.com/goccy/go-yaml v1.15.23 // indirect + github.com/gofrs/uuid v4.4.0+incompatible // indirect + github.com/gogo/protobuf v1.3.2 // indirect + github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 // indirect + github.com/golang/protobuf v1.5.4 // indirect + github.com/google/go-github/v48 v48.2.0 // indirect + github.com/google/go-querystring v1.1.0 // indirect + github.com/gorilla/css v1.0.0 // indirect + github.com/hashicorp/go-hclog v1.6.3 // indirect + github.com/hashicorp/yamux v0.1.2 // indirect + github.com/inconshreveable/mousetrap v1.1.0 // indirect + github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 // indirect + github.com/kevinburke/ssh_config v1.2.0 // indirect + github.com/kr/pretty v0.3.1 // indirect + github.com/kr/text v0.2.0 // indirect + github.com/lucasb-eyer/go-colorful v1.2.0 // indirect + github.com/mattn/go-colorable v0.1.14 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect + github.com/mattn/go-localereader v0.0.1 // indirect + github.com/mattn/go-runewidth v0.0.16 // indirect + github.com/microcosm-cc/bluemonday v1.0.23 // indirect + github.com/mitchellh/mapstructure v1.5.0 // indirect + github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6 // indirect + github.com/muesli/cancelreader v0.2.2 // indirect + github.com/muesli/reflow v0.3.0 // indirect + github.com/muesli/termenv v0.16.0 // indirect + github.com/oklog/run v1.1.0 // indirect + github.com/otiai10/copy v1.14.1 // indirect + github.com/otiai10/mint v1.6.3 // indirect + github.com/pjbgf/sha1cd v0.3.2 // indirect + github.com/pkg/errors v0.9.1 // indirect + github.com/rivo/uniseg v0.4.7 // indirect + github.com/rogpeppe/go-internal v1.14.1 // indirect + github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 // indirect + github.com/sirupsen/logrus v1.9.3 // indirect + github.com/skeema/knownhosts v1.3.1 // indirect + github.com/sourcegraph/annotate v0.0.0-20160123013949-f4cad6c6324d // indirect + github.com/sourcegraph/syntaxhighlight v0.0.0-20170531221838-bd320f5d308e // indirect + github.com/spf13/cobra v1.10.1 // indirect + github.com/spf13/pflag v1.0.10 // indirect + github.com/xanzy/ssh-agent v0.3.3 // indirect + github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect + go.etcd.io/bbolt v1.4.0 // indirect + golang.org/x/crypto v0.46.0 // indirect + golang.org/x/mod v0.30.0 // indirect + golang.org/x/net v0.48.0 // indirect + golang.org/x/sync v0.19.0 // indirect + golang.org/x/sys v0.39.0 // indirect + golang.org/x/term v0.38.0 // indirect + golang.org/x/text v0.32.0 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20251202230838-ff82c1b0f217 // indirect + google.golang.org/grpc v1.79.3 // indirect + google.golang.org/protobuf v1.36.10 // indirect + gopkg.in/warnings.v0 v0.1.2 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect +) diff --git a/ignite/internal/plugin/testdata/execute_fail/main.go b/ignite/internal/plugin/testdata/execute_fail/main.go new file mode 100644 index 0000000..49098c5 --- /dev/null +++ b/ignite/internal/plugin/testdata/execute_fail/main.go @@ -0,0 +1,44 @@ +package main + +import ( + "context" + + hplugin "github.com/hashicorp/go-plugin" + + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/services/plugin" +) + +type app struct{} + +func (app) Manifest(context.Context) (*plugin.Manifest, error) { + return &plugin.Manifest{ + Name: "execute_fail", + }, nil +} + +func (app) Execute(context.Context, *plugin.ExecutedCommand, plugin.ClientAPI) error { + return errors.New("fail") +} + +func (app) ExecuteHookPre(context.Context, *plugin.ExecutedHook, plugin.ClientAPI) error { + return nil +} + +func (app) ExecuteHookPost(context.Context, *plugin.ExecutedHook, plugin.ClientAPI) error { + return nil +} + +func (app) ExecuteHookCleanUp(context.Context, *plugin.ExecutedHook, plugin.ClientAPI) error { + return nil +} + +func main() { + hplugin.Serve(&hplugin.ServeConfig{ + HandshakeConfig: plugin.HandshakeConfig(), + Plugins: map[string]hplugin.Plugin{ + "execute_fail": plugin.NewGRPC(&app{}), + }, + GRPCServer: hplugin.DefaultGRPCServer, + }) +} diff --git a/ignite/internal/plugin/testdata/execute_ok/.gitignore b/ignite/internal/plugin/testdata/execute_ok/.gitignore new file mode 100644 index 0000000..cb56ef7 --- /dev/null +++ b/ignite/internal/plugin/testdata/execute_ok/.gitignore @@ -0,0 +1 @@ +execute_ok* diff --git a/ignite/internal/plugin/testdata/execute_ok/go.mod b/ignite/internal/plugin/testdata/execute_ok/go.mod new file mode 100644 index 0000000..d30a742 --- /dev/null +++ b/ignite/internal/plugin/testdata/execute_ok/go.mod @@ -0,0 +1,106 @@ +module execute_ok + +go 1.25.4 + +replace github.com/ignite/cli/v29 => ../../../../.. + +require ( + github.com/hashicorp/go-plugin v1.6.3 + github.com/ignite/cli/v29 v29.0.0 +) + +require ( + dario.cat/mergo v1.0.1 // indirect + github.com/Microsoft/go-winio v0.6.2 // indirect + github.com/ProtonMail/go-crypto v1.1.6 // indirect + github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect + github.com/aymerick/douceur v0.2.0 // indirect + github.com/blang/semver/v4 v4.0.0 // indirect + github.com/briandowns/spinner v1.23.2 // indirect + github.com/charmbracelet/bubbletea v1.3.5 // indirect + github.com/charmbracelet/colorprofile v0.3.1 // indirect + github.com/charmbracelet/lipgloss v1.1.0 // indirect + github.com/charmbracelet/x/ansi v0.8.0 // indirect + github.com/charmbracelet/x/cellbuf v0.0.13 // indirect + github.com/charmbracelet/x/term v0.2.1 // indirect + github.com/cloudflare/circl v1.6.3 // indirect + github.com/cockroachdb/errors v1.12.0 // indirect + github.com/cockroachdb/logtags v0.0.0-20241215232642-bb51bb14a506 // indirect + github.com/cockroachdb/redact v1.1.6 // indirect + github.com/cosmos/btcutil v1.0.5 // indirect + github.com/cosmos/cosmos-sdk v0.53.6 // indirect + github.com/cyphar/filepath-securejoin v0.4.1 // indirect + github.com/emirpasic/gods v1.18.1 // indirect + github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f // indirect + github.com/fatih/color v1.18.0 // indirect + github.com/fatih/structs v1.1.0 // indirect + github.com/getsentry/sentry-go v0.35.0 // indirect + github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 // indirect + github.com/go-git/go-billy/v5 v5.8.0 // indirect + github.com/go-git/go-git/v5 v5.17.1 // indirect + github.com/gobuffalo/flect v0.3.0 // indirect + github.com/gobuffalo/genny/v2 v2.1.0 // indirect + github.com/gobuffalo/github_flavored_markdown v1.1.4 // indirect + github.com/gobuffalo/helpers v0.6.7 // indirect + github.com/gobuffalo/logger v1.0.7 // indirect + github.com/gobuffalo/packd v1.0.2 // indirect + github.com/gobuffalo/plush/v4 v4.1.22 // indirect + github.com/gobuffalo/tags/v3 v3.1.4 // indirect + github.com/gobuffalo/validate/v3 v3.3.3 // indirect + github.com/gobwas/glob v0.2.3 // indirect + github.com/goccy/go-yaml v1.15.23 // indirect + github.com/gofrs/uuid v4.4.0+incompatible // indirect + github.com/gogo/protobuf v1.3.2 // indirect + github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 // indirect + github.com/golang/protobuf v1.5.4 // indirect + github.com/google/go-github/v48 v48.2.0 // indirect + github.com/google/go-querystring v1.1.0 // indirect + github.com/gorilla/css v1.0.0 // indirect + github.com/hashicorp/go-hclog v1.6.3 // indirect + github.com/hashicorp/yamux v0.1.2 // indirect + github.com/inconshreveable/mousetrap v1.1.0 // indirect + github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 // indirect + github.com/kevinburke/ssh_config v1.2.0 // indirect + github.com/kr/pretty v0.3.1 // indirect + github.com/kr/text v0.2.0 // indirect + github.com/lucasb-eyer/go-colorful v1.2.0 // indirect + github.com/mattn/go-colorable v0.1.14 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect + github.com/mattn/go-localereader v0.0.1 // indirect + github.com/mattn/go-runewidth v0.0.16 // indirect + github.com/microcosm-cc/bluemonday v1.0.23 // indirect + github.com/mitchellh/mapstructure v1.5.0 // indirect + github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6 // indirect + github.com/muesli/cancelreader v0.2.2 // indirect + github.com/muesli/reflow v0.3.0 // indirect + github.com/muesli/termenv v0.16.0 // indirect + github.com/oklog/run v1.1.0 // indirect + github.com/otiai10/copy v1.14.1 // indirect + github.com/otiai10/mint v1.6.3 // indirect + github.com/pjbgf/sha1cd v0.3.2 // indirect + github.com/pkg/errors v0.9.1 // indirect + github.com/rivo/uniseg v0.4.7 // indirect + github.com/rogpeppe/go-internal v1.14.1 // indirect + github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 // indirect + github.com/sirupsen/logrus v1.9.3 // indirect + github.com/skeema/knownhosts v1.3.1 // indirect + github.com/sourcegraph/annotate v0.0.0-20160123013949-f4cad6c6324d // indirect + github.com/sourcegraph/syntaxhighlight v0.0.0-20170531221838-bd320f5d308e // indirect + github.com/spf13/cobra v1.10.1 // indirect + github.com/spf13/pflag v1.0.10 // indirect + github.com/xanzy/ssh-agent v0.3.3 // indirect + github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect + go.etcd.io/bbolt v1.4.0 // indirect + golang.org/x/crypto v0.46.0 // indirect + golang.org/x/mod v0.30.0 // indirect + golang.org/x/net v0.48.0 // indirect + golang.org/x/sync v0.19.0 // indirect + golang.org/x/sys v0.39.0 // indirect + golang.org/x/term v0.38.0 // indirect + golang.org/x/text v0.32.0 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20251202230838-ff82c1b0f217 // indirect + google.golang.org/grpc v1.79.3 // indirect + google.golang.org/protobuf v1.36.10 // indirect + gopkg.in/warnings.v0 v0.1.2 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect +) diff --git a/ignite/internal/plugin/testdata/execute_ok/main.go b/ignite/internal/plugin/testdata/execute_ok/main.go new file mode 100644 index 0000000..11ba671 --- /dev/null +++ b/ignite/internal/plugin/testdata/execute_ok/main.go @@ -0,0 +1,64 @@ +package main + +import ( + "context" + "fmt" + + hplugin "github.com/hashicorp/go-plugin" + + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/services/plugin" +) + +type app struct{} + +func (app) Manifest(context.Context) (*plugin.Manifest, error) { + return &plugin.Manifest{ + Name: "execute_ok", + }, nil +} + +func (app) Execute(ctx context.Context, cmd *plugin.ExecutedCommand, api plugin.ClientAPI) error { + c, err := api.GetChainInfo(ctx) + fmt.Printf( + "ok args=%s chainid=%s appPath=%s configPath=%s home=%s rpcAddress=%s\n", + cmd.Args, c.ChainId, c.AppPath, c.ConfigPath, c.Home, c.RpcAddress, + ) + if err != nil { + return errors.Errorf("failed to get chain info: %w", err) + } + + i, err := api.GetIgniteInfo(ctx) + fmt.Printf( + "ok args=%s cliVersion=%s goVersion=%s sdkVersion=%s bufVersion=%s buildDate=%s "+ + "sourceHash=%s configVersion=%s os=%s arch=%s buildFromSource=%t\n", + cmd.Args, i.CliVersion, i.GoVersion, i.SdkVersion, i.BufVersion, i.BuildDate, i.SourceHash, i.ConfigVersion, + i.Os, i.Arch, i.BuildFromSource, + ) + if err != nil { + return errors.Errorf("failed to get ignite info: %w", err) + } + return nil +} + +func (app) ExecuteHookPre(context.Context, *plugin.ExecutedHook, plugin.ClientAPI) error { + return nil +} + +func (app) ExecuteHookPost(context.Context, *plugin.ExecutedHook, plugin.ClientAPI) error { + return nil +} + +func (app) ExecuteHookCleanUp(context.Context, *plugin.ExecutedHook, plugin.ClientAPI) error { + return nil +} + +func main() { + hplugin.Serve(&hplugin.ServeConfig{ + HandshakeConfig: plugin.HandshakeConfig(), + Plugins: map[string]hplugin.Plugin{ + "execute_ok": plugin.NewGRPC(&app{}), + }, + GRPCServer: hplugin.DefaultGRPCServer, + }) +} diff --git a/ignite/internal/sentry/sentry.go b/ignite/internal/sentry/sentry.go new file mode 100644 index 0000000..4df79c3 --- /dev/null +++ b/ignite/internal/sentry/sentry.go @@ -0,0 +1,48 @@ +package sentry + +import ( + "context" + "fmt" + "strings" + "time" + + "github.com/getsentry/sentry-go" + + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/version" +) + +const IgniteDNS = "https://1d862300ead01c5814d8ead3732fd41f@o1152630.ingest.us.sentry.io/4507891348930560" + +func InitSentry(ctx context.Context) (deferMe func(), err error) { + sentrySyncTransport := sentry.NewHTTPSyncTransport() + sentrySyncTransport.Timeout = time.Second * 3 + + igniteInfo, err := version.GetInfo(ctx) + if err != nil { + return nil, errors.Errorf("failed to init sentry: %w", err) + } + + if err := sentry.Init(sentry.ClientOptions{ + Dsn: IgniteDNS, + Transport: sentrySyncTransport, + Environment: getEnvironment(igniteInfo.CLIVersion), + Release: fmt.Sprintf("ignite@%s", igniteInfo.CLIVersion), + SampleRate: 1.0, // get all events + }); err != nil { + return nil, errors.Errorf("failed to init sentry: %w", err) + } + + return func() { + sentry.Recover() + sentry.Flush(time.Second * 2) + }, nil +} + +func getEnvironment(igniteVersion string) string { + if strings.Contains(igniteVersion, "dev") { + return "development" + } + + return "production" +} diff --git a/ignite/internal/tools/gen-cli-docs/main.go b/ignite/internal/tools/gen-cli-docs/main.go new file mode 100644 index 0000000..016f32b --- /dev/null +++ b/ignite/internal/tools/gen-cli-docs/main.go @@ -0,0 +1,210 @@ +// this tool generates Ignite CLI docs to be placed in the docs/cli dir and deployed +// on docs.ignite.com +package main + +import ( + "bufio" + "bytes" + "context" + "fmt" + "io" + "log" + "os" + "path/filepath" + "sort" + "strings" + + "github.com/spf13/cobra" + "github.com/spf13/cobra/doc" + + ignitecmd "github.com/ignite/cli/v29/ignite/cmd" + pluginsconfig "github.com/ignite/cli/v29/ignite/config/plugins" + "github.com/ignite/cli/v29/ignite/pkg/env" + "github.com/ignite/cli/v29/ignite/services/plugin" + "github.com/ignite/cli/v29/ignite/templates/field/datatype" +) + +const ( + scaffoldTypeFooter = ` + +Field Usage: + + - fieldName + - fieldName:fieldType + + +If no :fieldType, default (string) is used +` + scaffoldTypeHead = `# Scaffold Type + +Ignites provides a set of scaffold types that can be used to generate code for your application. +These types are used in the ` + "`ignite scaffold`" + ` command. + +## Available Scaffold Types + +` + head = `--- +description: Ignite CLI docs. +--- + +# CLI commands + +Documentation for Ignite CLI. +` + outFlag = "out" +) + +func main() { + if err := run(); err != nil { + log.Fatal(err) + } +} + +func run() error { + // We want to have documentation for commands that are implemented in plugins. + // To do that, we need to add the related plugins in the config. + // To avoid conflicts with user config, set an alternate config dir in tmp. + dir, err := os.MkdirTemp("", ".ignite") + if err != nil { + return err + } + defer os.RemoveAll(dir) + env.SetConfigDir(dir) + pluginDir, err := plugin.PluginsPath() + if err != nil { + return err + } + cfg, err := pluginsconfig.ParseDir(pluginDir) + if err != nil { + return err + } + if err := cfg.Save(); err != nil { + return err + } + + cmd, cleanUp, err := ignitecmd.New(context.Background()) + if err != nil { + return err + } + defer cleanUp() + + cmd.Flags().String(outFlag, "output.md", ".md file path to place Ignite CLI docs inside") + if err := cmd.Flags().MarkHidden(outFlag); err != nil { + return err + } + + // Run ExecuteC so cobra adds the completion command. + cmd, err = cmd.ExecuteC() + if err != nil { + return err + } + + outPath, err := cmd.Flags().GetString(outFlag) + if err != nil { + return nil + } + + return generateUsage(cmd, outPath) +} + +func generateUsage(cmd *cobra.Command, outPath string) error { + if err := os.MkdirAll(filepath.Dir(outPath), 0o755); err != nil { + return err + } + f, err := os.OpenFile(outPath, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0o644) + if err != nil { + return err + } + defer f.Close() + + if _, err := fmt.Fprint(f, head); err != nil { + return err + } + + if err := generateCmd(cmd, f); err != nil { + return err + } + + return generateScaffoldTypes(f) +} + +func generateScaffoldTypes(w io.Writer) error { + if _, err := fmt.Fprint(w, scaffoldTypeHead); err != nil { + return err + } + + supported := datatype.SupportedTypes() + entries := make([][]string, 0, len(supported)) + for name, usage := range supported { + entries = append(entries, []string{name, usage}) + } + + sort.Slice(entries, func(i, j int) bool { + return entries[i][0] < entries[j][0] + }) + + // Write table header + if _, err := fmt.Fprintf(w, "| Type | Usage |\n"); err != nil { + return err + } + if _, err := fmt.Fprintf(w, "| --- | --- |\n"); err != nil { + return err + } + + // Write table rows + for _, entry := range entries { + if _, err := fmt.Fprintf(w, "| %s | %s |\n", entry[0], entry[1]); err != nil { + return err + } + } + + if _, err := fmt.Fprint(w, scaffoldTypeFooter); err != nil { + return err + } + return nil +} + +func generateCmd(cmd *cobra.Command, w io.Writer) error { + cmd.DisableAutoGenTag = true + + b := &bytes.Buffer{} + if err := doc.GenMarkdownCustom(cmd, b, linkHandler); err != nil { + return err + } + + // here we change sub titles to bold styling. Otherwise, these titles will get + // printed in the right menu of docs.ignite.com which is unpleasant because + // we only want to see a list of all available commands without the extra noise. + sc := bufio.NewScanner(b) + for sc.Scan() { + t := sc.Text() + if strings.HasPrefix(t, "###") { + t = strings.TrimPrefix(t, "### ") + t = fmt.Sprintf("**%s**", t) + } + if _, err := fmt.Fprintln(w, t); err != nil { + return err + } + } + + for _, cmd := range cmd.Commands() { + if !cmd.IsAvailableCommand() || cmd.IsAdditionalHelpTopicCommand() { + continue + } + + _, _ = io.WriteString(w, "\n") + + if err := generateCmd(cmd, w); err != nil { + return err + } + } + + return nil +} + +func linkHandler(link string) string { + link = strings.ReplaceAll(link, "_", "-") + link = strings.TrimSuffix(link, ".md") + link = "#" + link + return link +} diff --git a/ignite/internal/tools/gen-config-doc/cmd/root.go b/ignite/internal/tools/gen-config-doc/cmd/root.go new file mode 100644 index 0000000..30fe15f --- /dev/null +++ b/ignite/internal/tools/gen-config-doc/cmd/root.go @@ -0,0 +1,104 @@ +package cmd + +import ( + "fmt" + "path/filepath" + "strings" + + "github.com/spf13/cobra" + + v0 "github.com/ignite/cli/v29/ignite/config/chain/v0" + v1 "github.com/ignite/cli/v29/ignite/config/chain/v1" + "github.com/ignite/cli/v29/ignite/pkg/clidoc" + "github.com/ignite/cli/v29/ignite/pkg/cliui" + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/xgenny" + + "github.com/ignite/cli/ignite/internal/tools/gen-config-doc/templates/doc" +) + +const ( + flagVersion = "version" + flagOutput = "output" + flagFilename = "filename" + flagYes = "yes" + + defaultFilename = "02-config_example.md" + defaultDocPath = "docs/docs/08-configuration" +) + +// NewRootCmd creates a new root command. +func NewRootCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "gen-config-doc", + Short: "generate configuration file documentation", + Long: "This tool is used to generate the chain configuration file documentation", + RunE: func(cmd *cobra.Command, args []string) (err error) { + var ( + version, _ = cmd.Flags().GetString(flagVersion) + output, _ = cmd.Flags().GetString(flagOutput) + fileName, _ = cmd.Flags().GetString(flagFilename) + yes, _ = cmd.Flags().GetBool(flagYes) + ) + session := cliui.New(cliui.WithoutUserInteraction(yes)) + defer session.End() + + output, err = filepath.Abs(output) + if err != nil { + return errors.Wrap(err, "failed to find the abs path") + } + + var docs clidoc.Docs + switch version { + case "v0": + docs, err = clidoc.GenDoc(v0.Config{}) + case "v1": + docs, err = clidoc.GenDoc(v1.Config{}) + default: + return errors.Errorf("unknown version: %s", version) + } + if err != nil { + return errors.Wrapf(err, "failed to generate migration doc %s", version) + } + + // Generate the docs file. + g, err := doc.NewGenerator(doc.Options{ + Path: output, + FileName: fileName, + Config: docs.String(), + }) + if err != nil { + return errors.Wrap(err, "failed to create the doc generator object") + } + + runner := xgenny.NewRunner(cmd.Context(), output) + sm, err := runner.RunAndApply(g, xgenny.ApplyPreRun(func(_, _, duplicated []string) error { + if len(duplicated) == 0 { + return nil + } + question := fmt.Sprintf("Do you want to overwrite the existing files? \n%s", strings.Join(duplicated, "\n")) + return session.AskConfirm(question) + })) + if err != nil { + return err + } + + files := append(sm.CreatedFiles(), sm.ModifiedFiles()...) + if len(files) == 0 { + return errors.Errorf("config doc not created at %s", output) + } + session.EventBus().SendInfo( + fmt.Sprintf("Config doc generated successfully at %s", files[0]), + ) + + return nil + }, + } + + cmd.Flags().StringP(flagVersion, "v", "v1", "Version of Ignite config file") + cmd.Flags().StringP(flagOutput, "o", defaultDocPath, "Output directory to save the config document") + cmd.Flags().StringP(flagFilename, "f", defaultFilename, "Document file name") + cmd.Flags().BoolP(flagYes, "y", false, "answers interactive yes/no questions with yes") + + return cmd +} diff --git a/ignite/internal/tools/gen-config-doc/go.mod b/ignite/internal/tools/gen-config-doc/go.mod new file mode 100644 index 0000000..70b4377 --- /dev/null +++ b/ignite/internal/tools/gen-config-doc/go.mod @@ -0,0 +1,79 @@ +module github.com/ignite/cli/ignite/internal/tools/gen-config-doc + +go 1.25.4 + +replace github.com/ignite/cli/v29 => ../../../../ + +require ( + github.com/gobuffalo/genny/v2 v2.1.1 + github.com/gobuffalo/plush/v4 v4.1.22 + github.com/ignite/cli/v29 v29.0.0-rc.1 + github.com/spf13/cobra v1.10.1 +) + +require ( + dario.cat/mergo v1.0.2 // indirect + github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect + github.com/aymerick/douceur v0.2.0 // indirect + github.com/briandowns/spinner v1.23.2 // indirect + github.com/charmbracelet/bubbletea v1.3.5 // indirect + github.com/charmbracelet/colorprofile v0.3.1 // indirect + github.com/charmbracelet/lipgloss v1.1.0 // indirect + github.com/charmbracelet/x/ansi v0.9.2 // indirect + github.com/charmbracelet/x/cellbuf v0.0.13 // indirect + github.com/charmbracelet/x/term v0.2.1 // indirect + github.com/cockroachdb/errors v1.12.0 // indirect + github.com/cockroachdb/logtags v0.0.0-20241215232642-bb51bb14a506 // indirect + github.com/cockroachdb/redact v1.1.6 // indirect + github.com/cosmos/gogoproto v1.7.2 // indirect + github.com/emicklei/proto v1.14.1 // indirect + github.com/emicklei/proto-contrib v0.18.2 // indirect + github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f // indirect + github.com/fatih/color v1.18.0 // indirect + github.com/fatih/structs v1.1.0 // indirect + github.com/getsentry/sentry-go v0.35.0 // indirect + github.com/gobuffalo/flect v1.0.3 // indirect + github.com/gobuffalo/github_flavored_markdown v1.1.4 // indirect + github.com/gobuffalo/helpers v0.6.9 // indirect + github.com/gobuffalo/logger v1.0.7 // indirect + github.com/gobuffalo/packd v1.0.2 // indirect + github.com/gobuffalo/plush/v5 v5.0.3 // indirect + github.com/gobuffalo/tags/v3 v3.1.4 // indirect + github.com/gobuffalo/validate/v3 v3.3.3 // indirect + github.com/goccy/go-yaml v1.17.1 // indirect + github.com/gofrs/uuid v4.4.0+incompatible // indirect + github.com/gogo/protobuf v1.3.2 // indirect + github.com/google/go-cmp v0.7.0 // indirect + github.com/gorilla/css v1.0.1 // indirect + github.com/iancoleman/strcase v0.3.0 // indirect + github.com/inconshreveable/mousetrap v1.1.0 // indirect + github.com/kr/pretty v0.3.1 // indirect + github.com/kr/text v0.2.0 // indirect + github.com/lucasb-eyer/go-colorful v1.2.0 // indirect + github.com/mattn/go-colorable v0.1.14 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect + github.com/mattn/go-localereader v0.0.1 // indirect + github.com/mattn/go-runewidth v0.0.16 // indirect + github.com/microcosm-cc/bluemonday v1.0.27 // indirect + github.com/mitchellh/mapstructure v1.5.0 // indirect + github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6 // indirect + github.com/muesli/cancelreader v0.2.2 // indirect + github.com/muesli/reflow v0.3.0 // indirect + github.com/muesli/termenv v0.16.0 // indirect + github.com/pkg/errors v0.9.1 // indirect + github.com/rivo/uniseg v0.4.7 // indirect + github.com/rogpeppe/go-internal v1.14.1 // indirect + github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 // indirect + github.com/sirupsen/logrus v1.9.3 // indirect + github.com/sourcegraph/annotate v0.0.0-20160123013949-f4cad6c6324d // indirect + github.com/sourcegraph/syntaxhighlight v0.0.0-20170531221838-bd320f5d308e // indirect + github.com/spf13/pflag v1.0.10 // indirect + github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect + golang.org/x/net v0.47.0 // indirect + golang.org/x/sync v0.18.0 // indirect + golang.org/x/sys v0.38.0 // indirect + golang.org/x/term v0.37.0 // indirect + golang.org/x/text v0.31.0 // indirect + google.golang.org/protobuf v1.36.10 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect +) diff --git a/ignite/internal/tools/gen-config-doc/go.sum b/ignite/internal/tools/gen-config-doc/go.sum new file mode 100644 index 0000000..4504027 --- /dev/null +++ b/ignite/internal/tools/gen-config-doc/go.sum @@ -0,0 +1,248 @@ +dario.cat/mergo v1.0.2 h1:85+piFYR1tMbRrLcDwR18y4UKJ3aH1Tbzi24VRW1TK8= +dario.cat/mergo v1.0.2/go.mod h1:E/hbnu0NxMFBjpMIE34DRGLWqDy0g5FuKDhCb31ngxA= +github.com/aymanbagabas/go-osc52/v2 v2.0.1 h1:HwpRHbFMcZLEVr42D4p7XBqjyuxQH5SMiErDT4WkJ2k= +github.com/aymanbagabas/go-osc52/v2 v2.0.1/go.mod h1:uYgXzlJ7ZpABp8OJ+exZzJJhRNQ2ASbcXHWsFqH8hp8= +github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuPk= +github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4= +github.com/briandowns/spinner v1.23.2 h1:Zc6ecUnI+YzLmJniCfDNaMbW0Wid1d5+qcTq4L2FW8w= +github.com/briandowns/spinner v1.23.2/go.mod h1:LaZeM4wm2Ywy6vO571mvhQNRcWfRUnXOs0RcKV0wYKM= +github.com/charmbracelet/bubbletea v1.3.5 h1:JAMNLTbqMOhSwoELIr0qyP4VidFq72/6E9j7HHmRKQc= +github.com/charmbracelet/bubbletea v1.3.5/go.mod h1:TkCnmH+aBd4LrXhXcqrKiYwRs7qyQx5rBgH5fVY3v54= +github.com/charmbracelet/colorprofile v0.3.1 h1:k8dTHMd7fgw4bnFd7jXTLZrSU/CQrKnL3m+AxCzDz40= +github.com/charmbracelet/colorprofile v0.3.1/go.mod h1:/GkGusxNs8VB/RSOh3fu0TJmQ4ICMMPApIIVn0KszZ0= +github.com/charmbracelet/lipgloss v1.1.0 h1:vYXsiLHVkK7fp74RkV7b2kq9+zDLoEU4MZoFqR/noCY= +github.com/charmbracelet/lipgloss v1.1.0/go.mod h1:/6Q8FR2o+kj8rz4Dq0zQc3vYf7X+B0binUUBwA0aL30= +github.com/charmbracelet/x/ansi v0.9.2 h1:92AGsQmNTRMzuzHEYfCdjQeUzTrgE1vfO5/7fEVoXdY= +github.com/charmbracelet/x/ansi v0.9.2/go.mod h1:3RQDQ6lDnROptfpWuUVIUG64bD2g2BgntdxH0Ya5TeE= +github.com/charmbracelet/x/cellbuf v0.0.13 h1:/KBBKHuVRbq1lYx5BzEHBAFBP8VcQzJejZ/IA3iR28k= +github.com/charmbracelet/x/cellbuf v0.0.13/go.mod h1:xe0nKWGd3eJgtqZRaN9RjMtK7xUYchjzPr7q6kcvCCs= +github.com/charmbracelet/x/term v0.2.1 h1:AQeHeLZ1OqSXhrAWpYUtZyX1T3zVxfpZuEQMIQaGIAQ= +github.com/charmbracelet/x/term v0.2.1/go.mod h1:oQ4enTYFV7QN4m0i9mzHrViD7TQKvNEEkHUMCmsxdUg= +github.com/cockroachdb/errors v1.12.0 h1:d7oCs6vuIMUQRVbi6jWWWEJZahLCfJpnJSVobd1/sUo= +github.com/cockroachdb/errors v1.12.0/go.mod h1:SvzfYNNBshAVbZ8wzNc/UPK3w1vf0dKDUP41ucAIf7g= +github.com/cockroachdb/logtags v0.0.0-20241215232642-bb51bb14a506 h1:ASDL+UJcILMqgNeV5jiqR4j+sTuvQNHdf2chuKj1M5k= +github.com/cockroachdb/logtags v0.0.0-20241215232642-bb51bb14a506/go.mod h1:Mw7HqKr2kdtu6aYGn3tPmAftiP3QPX63LdK/zcariIo= +github.com/cockroachdb/redact v1.1.6 h1:zXJBwDZ84xJNlHl1rMyCojqyIxv+7YUpQiJLQ7n4314= +github.com/cockroachdb/redact v1.1.6/go.mod h1:BVNblN9mBWFyMyqK1k3AAiSxhvhfK2oOZZ2lK+dpvRg= +github.com/cosmos/gogoproto v1.7.2 h1:5G25McIraOC0mRFv9TVO139Uh3OklV2hczr13KKVHCA= +github.com/cosmos/gogoproto v1.7.2/go.mod h1:8S7w53P1Y1cHwND64o0BnArT6RmdgIvsBuco6uTllsk= +github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g= +github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= +github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/emicklei/proto v1.14.0/go.mod h1:rn1FgRS/FANiZdD2djyH7TMA9jdRDcYQ9IEN9yvjX0A= +github.com/emicklei/proto v1.14.1 h1:fFq+Bj70XXZWXWikcVRvYZxrMS4KIIiPAqdJ8vPrenY= +github.com/emicklei/proto v1.14.1/go.mod h1:rn1FgRS/FANiZdD2djyH7TMA9jdRDcYQ9IEN9yvjX0A= +github.com/emicklei/proto-contrib v0.18.2 h1:/3HbJnGuiT5x4TgGQjRkPYR5QP2i8sgUgeNbpmOEq+U= +github.com/emicklei/proto-contrib v0.18.2/go.mod h1:0jnPdGJOjrTDObeFfSM1XKx1Z/mjJzYMomI69tJIq58= +github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f h1:Y/CXytFA4m6baUTXGLOoWe4PQhGxaX0KpnayAqC48p4= +github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f/go.mod h1:vw97MGsxSvLiUE2X8qFplwetxpGLQrlU1Q9AUEIzCaM= +github.com/fatih/color v1.18.0 h1:S8gINlzdQ840/4pfAwic/ZE0djQEH3wM94VfqLTZcOM= +github.com/fatih/color v1.18.0/go.mod h1:4FelSpRwEGDpQ12mAdzqdOukCy4u8WUtOY6lkT/6HfU= +github.com/fatih/structs v1.1.0 h1:Q7juDM0QtcnhCpeyLGQKyg4TOIghuNXrkL32pHAUMxo= +github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M= +github.com/getsentry/sentry-go v0.35.0 h1:+FJNlnjJsZMG3g0/rmmP7GiKjQoUF5EXfEtBwtPtkzY= +github.com/getsentry/sentry-go v0.35.0/go.mod h1:C55omcY9ChRQIUcVcGcs+Zdy4ZpQGvNJ7JYHIoSWOtE= +github.com/go-errors/errors v1.4.2 h1:J6MZopCL4uSllY1OfXM374weqZFFItUbrImctkmUxIA= +github.com/go-errors/errors v1.4.2/go.mod h1:sIVyrIiJhuEF+Pj9Ebtd6P/rEYROXFi3BopGUQ5a5Og= +github.com/gobuffalo/flect v0.3.0/go.mod h1:5pf3aGnsvqvCj50AVni7mJJF8ICxGZ8HomberC3pXLE= +github.com/gobuffalo/flect v1.0.3 h1:xeWBM2nui+qnVvNM4S3foBhCAL2XgPU+a7FdpelbTq4= +github.com/gobuffalo/flect v1.0.3/go.mod h1:A5msMlrHtLqh9umBSnvabjsMrCcCpAyzglnDvkbYKHs= +github.com/gobuffalo/genny/v2 v2.1.1 h1:WJsJVaekfD1jkoi+EgSaqMZWH6PBf925LLDn3TK4JEw= +github.com/gobuffalo/genny/v2 v2.1.1/go.mod h1:PK9LyLU5o6cTCZN/+c0qm6PzAkaTEAkHnuK82F46H/c= +github.com/gobuffalo/github_flavored_markdown v1.1.3/go.mod h1:IzgO5xS6hqkDmUh91BW/+Qxo/qYnvfzoz3A7uLkg77I= +github.com/gobuffalo/github_flavored_markdown v1.1.4 h1:WacrEGPXUDX+BpU1GM/Y0ADgMzESKNWls9hOTG1MHVs= +github.com/gobuffalo/github_flavored_markdown v1.1.4/go.mod h1:Vl9686qrVVQou4GrHRK/KOG3jCZOKLUqV8MMOAYtlso= +github.com/gobuffalo/helpers v0.6.7/go.mod h1:j0u1iC1VqlCaJEEVkZN8Ia3TEzfj/zoXANqyJExTMTA= +github.com/gobuffalo/helpers v0.6.9 h1:jOpLfIK8HwdLrPHfJ4IYvoCxORw0I1OyuRw8ikDy8uM= +github.com/gobuffalo/helpers v0.6.9/go.mod h1:hScSAHLvUMSATy5uxUxxYITKTv1Hfkpldkd5Ik8vf7Y= +github.com/gobuffalo/logger v1.0.7 h1:LTLwWelETXDYyqF/ASf0nxaIcdEOIJNxRokPcfI/xbU= +github.com/gobuffalo/logger v1.0.7/go.mod h1:u40u6Bq3VVvaMcy5sRBclD8SXhBYPS0Qk95ubt+1xJM= +github.com/gobuffalo/packd v1.0.2 h1:Yg523YqnOxGIWCp69W12yYBKsoChwI7mtu6ceM9Bwfw= +github.com/gobuffalo/packd v1.0.2/go.mod h1:sUc61tDqGMXON80zpKGp92lDb86Km28jfvX7IAyxFT8= +github.com/gobuffalo/plush/v4 v4.1.22 h1:bPQr5PsiTg54UGMsfvnIAvFmUfxzD/ri+wbpu7PlmTM= +github.com/gobuffalo/plush/v4 v4.1.22/go.mod h1:WiKHJx3qBvfaDVlrv8zT7NCd3dEMaVR/fVxW4wqV17M= +github.com/gobuffalo/plush/v5 v5.0.3 h1:cDt5mQbt+0vpbF+My/9uuk9yB0HFlDzPRE9nHh+vDMA= +github.com/gobuffalo/plush/v5 v5.0.3/go.mod h1:C08u/VEqzzPBXFF/yqs40P/5Cvc/zlZsMzhCxXyWJmU= +github.com/gobuffalo/tags/v3 v3.1.4 h1:X/ydLLPhgXV4h04Hp2xlbI2oc5MDaa7eub6zw8oHjsM= +github.com/gobuffalo/tags/v3 v3.1.4/go.mod h1:ArRNo3ErlHO8BtdA0REaZxijuWnWzF6PUXngmMXd2I0= +github.com/gobuffalo/validate/v3 v3.3.3 h1:o7wkIGSvZBYBd6ChQoLxkz2y1pfmhbI4jNJYh6PuNJ4= +github.com/gobuffalo/validate/v3 v3.3.3/go.mod h1:YC7FsbJ/9hW/VjQdmXPvFqvRis4vrRYFxr69WiNZw6g= +github.com/goccy/go-yaml v1.17.1 h1:LI34wktB2xEE3ONG/2Ar54+/HJVBriAGJ55PHls4YuY= +github.com/goccy/go-yaml v1.17.1/go.mod h1:XBurs7gK8ATbW4ZPGKgcbrY1Br56PdM69F7LkFRi1kA= +github.com/gofrs/uuid v4.2.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM= +github.com/gofrs/uuid v4.4.0+incompatible h1:3qXRTX8/NbyulANqlc0lchS1gqAVxRgsuW1YrTJupqA= +github.com/gofrs/uuid v4.4.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM= +github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= +github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= +github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= +github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= +github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= +github.com/gorilla/css v1.0.0/go.mod h1:Dn721qIggHpt4+EFCcTLTU/vk5ySda2ReITrtgBl60c= +github.com/gorilla/css v1.0.1 h1:ntNaBIghp6JmvWnxbZKANoLyuXTPZ4cAMlo6RyhlbO8= +github.com/gorilla/css v1.0.1/go.mod h1:BvnYkspnSzMmwRK+b8/xgNPLiIuNZr6vbZBTPQ2A3b0= +github.com/iancoleman/strcase v0.3.0 h1:nTXanmYxhfFAMjZL34Ov6gkzEsSJZ5DbhxWjvSASxEI= +github.com/iancoleman/strcase v0.3.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho= +github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= +github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= +github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY= +github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0= +github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHPsaIE= +github.com/mattn/go-colorable v0.1.14/go.mod h1:6LmQG8QLFO4G5z1gPvYEzlUgJ2wF+stgPZH1UqBm1s8= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mattn/go-localereader v0.0.1 h1:ygSAOl7ZXTx4RdPYinUpg6W99U8jWvWi9Ye2JC/oIi4= +github.com/mattn/go-localereader v0.0.1/go.mod h1:8fBrzywKY7BI3czFoHkuzRoWE9C+EiG4R1k4Cjx5p88= +github.com/mattn/go-runewidth v0.0.12/go.mod h1:RAqKPSqVFrSLVXbA8x7dzmKdmGzieGRCM46jaSJTDAk= +github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc= +github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= +github.com/microcosm-cc/bluemonday v1.0.20/go.mod h1:yfBmMi8mxvaZut3Yytv+jTXRY8mxyjJ0/kQBTElld50= +github.com/microcosm-cc/bluemonday v1.0.22/go.mod h1:ytNkv4RrDrLJ2pqlsSI46O6IVXmZOBBD4SaJyDwwTkM= +github.com/microcosm-cc/bluemonday v1.0.27 h1:MpEUotklkwCSLeH+Qdx1VJgNqLlpY2KXwXFM08ygZfk= +github.com/microcosm-cc/bluemonday v1.0.27/go.mod h1:jFi9vgW+H7c3V0lb6nR74Ib/DIB5OBs92Dimizgw2cA= +github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= +github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6 h1:ZK8zHtRHOkbHy6Mmr5D264iyp3TiX5OmNcI5cIARiQI= +github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6/go.mod h1:CJlz5H+gyd6CUWT45Oy4q24RdLyn7Md9Vj2/ldJBSIo= +github.com/muesli/cancelreader v0.2.2 h1:3I4Kt4BQjOR54NavqnDogx/MIoWBFa0StPA8ELUXHmA= +github.com/muesli/cancelreader v0.2.2/go.mod h1:3XuTXfFS2VjM+HTLZY9Ak0l6eUKfijIfMUZ4EgX0QYo= +github.com/muesli/reflow v0.3.0 h1:IFsN6K9NfGtjeggFP+68I4chLZV2yIKsXJFNZ+eWh6s= +github.com/muesli/reflow v0.3.0/go.mod h1:pbwTDkVPibjO2kyvBQRBxTWEEGDGq0FlB1BIKtnHY/8= +github.com/muesli/termenv v0.16.0 h1:S5AlUN9dENB57rsbnkPyfdGuWIlkmzJjbFf0Tf5FWUc= +github.com/muesli/termenv v0.16.0/go.mod h1:ZRfOIKPFDYQoDFF4Olj7/QJbW60Ol/kL1pU3VfY/Cnk= +github.com/pingcap/errors v0.11.4 h1:lFuQV/oaUMGcD2tqt+01ROSmJs75VG1ToEOkZIZ4nE4= +github.com/pingcap/errors v0.11.4/go.mod h1:Oi8TUi2kEtXXLMJk9l1cGmz20kV3TaQ0usTwv5KuLY8= +github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= +github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= +github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/rivo/uniseg v0.1.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= +github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= +github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ= +github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= +github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= +github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ= +github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc= +github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/sergi/go-diff v1.2.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= +github.com/sergi/go-diff v1.3.1/go.mod h1:aMJSSKb2lpPvRNec0+w3fl7LP9IOFzdc9Pa4NFbPK1I= +github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 h1:n661drycOFuPLCN3Uc8sB6B/s6Z4t2xvBgU1htSHuq8= +github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3/go.mod h1:A0bzQcvG0E7Rwjx0REVgAGH58e96+X0MeOfepqsbeW4= +github.com/sirupsen/logrus v1.9.0/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= +github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= +github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= +github.com/sourcegraph/annotate v0.0.0-20160123013949-f4cad6c6324d h1:yKm7XZV6j9Ev6lojP2XaIshpT4ymkqhMeSghO5Ps00E= +github.com/sourcegraph/annotate v0.0.0-20160123013949-f4cad6c6324d/go.mod h1:UdhH50NIW0fCiwBSr0co2m7BnFLdv4fQTgdqdJTHFeE= +github.com/sourcegraph/syntaxhighlight v0.0.0-20170531221838-bd320f5d308e h1:qpG93cPwA5f7s/ZPBJnGOYQNK/vKsaDaseuKT5Asee8= +github.com/sourcegraph/syntaxhighlight v0.0.0-20170531221838-bd320f5d308e/go.mod h1:HuIsMU8RRBOtsCgI77wP899iHVBQpCmg4ErYMZB+2IA= +github.com/spf13/cobra v1.10.1 h1:lJeBwCfmrnXthfAupyUTzJ/J4Nc1RsHC/mSRU2dll/s= +github.com/spf13/cobra v1.10.1/go.mod h1:7SmJGaTHFVBY0jW4NXGluQoLvhqFQM+6XSKD+P4XaB0= +github.com/spf13/pflag v1.0.9/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/spf13/pflag v1.0.10 h1:4EBh2KAYBwaONj6b2Ye1GiHfwjqyROoF4RwYO+vPwFk= +github.com/spf13/pflag v1.0.10/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= +github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= +github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= +github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e h1:JVG44RsyaB9T2KIHavMF/ppJZNG9ZpyihvCd0w101no= +github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e/go.mod h1:RbqR21r5mrJuqunuUZ/Dhy/avygyECGrLceyNeo4LiM= +github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= +go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/exp v0.0.0-20250718183923-645b1fa84792 h1:R9PFI6EUdfVKgwKjZef7QIwGcBKu86OEFpJ9nUEP2l4= +golang.org/x/exp v0.0.0-20250718183923-645b1fa84792/go.mod h1:A+z0yzpGtvnG90cToK5n2tu8UJVP2XUATh+r+sfOOOc= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.0.0-20220826154423-83b083e8dc8b/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk= +golang.org/x/net v0.0.0-20221002022538-bcab6841153b/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk= +golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY= +golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= +golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I= +golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc= +golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.0.0-20220722155259-a9ba230a4035/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= +golang.org/x/term v0.37.0 h1:8EGAD0qCmHYZg6J17DvsMy9/wJ7/D/4pV/wfnld5lTU= +golang.org/x/term v0.37.0/go.mod h1:5pB4lxRNYYVZuTLmy8oR2BH8dflOR+IbTYFD8fi3254= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM= +golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/protobuf v1.36.10 h1:AYd7cD/uASjIL6Q9LiTjz8JLcrh/88q5UObnmY3aOOE= +google.golang.org/protobuf v1.36.10/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/ignite/internal/tools/gen-config-doc/main.go b/ignite/internal/tools/gen-config-doc/main.go new file mode 100644 index 0000000..c26eef8 --- /dev/null +++ b/ignite/internal/tools/gen-config-doc/main.go @@ -0,0 +1,15 @@ +package main + +import ( + "fmt" + "os" + + "github.com/ignite/cli/ignite/internal/tools/gen-config-doc/cmd" +) + +func main() { + if err := cmd.NewRootCmd().Execute(); err != nil { + fmt.Println(err) + os.Exit(1) + } +} diff --git a/ignite/internal/tools/gen-config-doc/templates/doc/doc.go b/ignite/internal/tools/gen-config-doc/templates/doc/doc.go new file mode 100644 index 0000000..3bec4a8 --- /dev/null +++ b/ignite/internal/tools/gen-config-doc/templates/doc/doc.go @@ -0,0 +1,45 @@ +package doc + +import ( + "embed" + "io/fs" + + "github.com/gobuffalo/genny/v2" + "github.com/gobuffalo/plush/v4" + + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/xgenny" + "github.com/ignite/cli/v29/ignite/templates/field/plushhelpers" +) + +//go:embed files/* +var files embed.FS + +// Options represents the options to scaffold a migration document. +type Options struct { + Path string + FileName string + Config string +} + +// NewGenerator returns the generator to scaffold a migration doc. +func NewGenerator(opts Options) (*genny.Generator, error) { + subFs, err := fs.Sub(files, "files") + if err != nil { + return nil, errors.Errorf("fail to generate sub: %w", err) + } + + g := genny.New() + if err := g.OnlyFS(subFs, nil, nil); err != nil { + return g, err + } + + ctx := plush.NewContext() + ctx.Set("Config", opts.Config) + + plushhelpers.ExtendPlushContext(ctx) + g.Transformer(xgenny.Transformer(ctx)) + g.Transformer(genny.Replace("{{Name}}", opts.FileName)) + + return g, nil +} diff --git a/ignite/internal/tools/gen-config-doc/templates/doc/files/{{Name}}.plush b/ignite/internal/tools/gen-config-doc/templates/doc/files/{{Name}}.plush new file mode 100644 index 0000000..328b59d --- /dev/null +++ b/ignite/internal/tools/gen-config-doc/templates/doc/files/{{Name}}.plush @@ -0,0 +1,11 @@ +--- +sidebar_position: 2 +description: Configuration File Example. +title: Configuration File Example +--- + +## Configuration File Example + +```yaml title="config.yml" +<%= Config %> +``` \ No newline at end of file diff --git a/ignite/internal/tools/gen-mig-diffs/cmd/root.go b/ignite/internal/tools/gen-mig-diffs/cmd/root.go new file mode 100644 index 0000000..1175993 --- /dev/null +++ b/ignite/internal/tools/gen-mig-diffs/cmd/root.go @@ -0,0 +1,205 @@ +package cmd + +import ( + "fmt" + "os" + "path/filepath" + "strings" + + "github.com/Masterminds/semver/v3" + "github.com/spf13/cobra" + + "github.com/ignite/cli/v29/ignite/pkg/cliui" + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/xgenny" + + "github.com/ignite/cli/ignite/internal/tools/gen-mig-diffs/pkg/diff" + "github.com/ignite/cli/ignite/internal/tools/gen-mig-diffs/pkg/repo" + "github.com/ignite/cli/ignite/internal/tools/gen-mig-diffs/pkg/scaffold" + "github.com/ignite/cli/ignite/internal/tools/gen-mig-diffs/pkg/url" + "github.com/ignite/cli/ignite/internal/tools/gen-mig-diffs/templates/doc" +) + +const ( + flagFrom = "from" + flagTo = "to" + flagOutput = "output" + flagSource = "repo-source" + flagRepoURL = "repo-url" + flagRepoOutput = "repo-output" + flagScaffoldOutput = "scaffold-output" + flagScaffoldCache = "scaffold-cache" + flagYes = "yes" + + defaultDocPath = "docs/docs/06-migration" +) + +// NewRootCmd creates a new root command. +func NewRootCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "gen-mig-diffs", + Short: "generate migration diffs from two different version", + Long: "This tool is used to generate migration diff files for each of ignites scaffold commands", + RunE: func(cmd *cobra.Command, args []string) error { + var ( + from, _ = cmd.Flags().GetString(flagFrom) + to, _ = cmd.Flags().GetString(flagTo) + repoSource, _ = cmd.Flags().GetString(flagSource) + output, _ = cmd.Flags().GetString(flagOutput) + repoURLStr, _ = cmd.Flags().GetString(flagRepoURL) + repoOutput, _ = cmd.Flags().GetString(flagRepoOutput) + scaffoldOutput, _ = cmd.Flags().GetString(flagScaffoldOutput) + scaffoldCache, _ = cmd.Flags().GetString(flagScaffoldCache) + yes, _ = cmd.Flags().GetBool(flagYes) + ) + session := cliui.New(cliui.WithoutUserInteraction(yes)) + defer session.End() + + fromVer, err := semver.NewVersion(from) + if err != nil && from != "" { + return errors.Wrapf(err, "failed to parse from version %s", from) + } + toVer, err := semver.NewVersion(to) + if err != nil && to != "" { + return errors.Wrapf(err, "failed to parse to version %s", to) + } + + // Check or download the source and generate the binaries for each version. + repoOptions := []repo.Options{repo.WithStdOutput(cmd.OutOrStdout())} + if repoSource != "" { + repoOptions = append(repoOptions, repo.WithSource(repoSource)) + } + if repoURLStr != "" { + repoURL, err := url.New(repoURLStr) + if err != nil { + return err + } + repoOptions = append(repoOptions, repo.WithRepoURL(repoURL)) + } + if repoOutput != "" { + repoOptions = append(repoOptions, repo.WithRepoOutput(repoOutput)) + } + + igniteRepo, err := repo.New(cmd.Context(), fromVer, toVer, session, repoOptions...) + if err != nil { + return err + } + defer igniteRepo.Cleanup() + + releaseDescription, err := igniteRepo.ReleaseDescription() + if err != nil { + return errors.Wrapf(err, "failed to fetch the release tag %s description", igniteRepo.To.Original()) + } + + fromBin, toBin, err := igniteRepo.GenerateBinaries(cmd.Context()) + if err != nil { + return err + } + + // Scaffold the default commands for each version. + scaffoldOptions := []scaffold.Option{ + scaffold.WithStderr(os.Stderr), + scaffold.WithStdout(os.Stdout), + scaffold.WithStdin(os.Stdin), + } + if scaffoldOutput != "" { + scaffoldOptions = append(scaffoldOptions, scaffold.WithOutput(scaffoldOutput)) + } + if scaffoldCache != "" { + scaffoldOptions = append(scaffoldOptions, scaffold.WithCachePath(scaffoldCache)) + } + + session.StartSpinner(fmt.Sprintf("Running scaffold commands for %s...", igniteRepo.From.Original())) + sFrom, err := scaffold.New(fromBin, igniteRepo.From, scaffoldOptions...) + if err != nil { + return err + } + defer sFrom.Cleanup() + + if err := sFrom.Run(cmd.Context()); err != nil { + return err + } + session.StopSpinner() + session.EventBus().SendInfo(fmt.Sprintf("Scaffolded code for %s at %s", igniteRepo.From.Original(), sFrom.Output)) + + session.StartSpinner(fmt.Sprintf("Running scaffold commands for %s...", igniteRepo.To.Original())) + sTo, err := scaffold.New(toBin, igniteRepo.To, scaffoldOptions...) + if err != nil { + return err + } + defer sTo.Cleanup() + + if err := sTo.Run(cmd.Context()); err != nil { + return err + } + session.StopSpinner() + session.EventBus().SendInfo(fmt.Sprintf("Scaffolded code for %s at %s", igniteRepo.To.Original(), sTo.Output)) + + // Calculate and save the diffs from the scaffolded code. + session.StartSpinner("Calculating diff...") + diffs, err := diff.CalculateDiffs(sFrom.Output, sTo.Output) + if err != nil { + return errors.Wrap(err, "failed to calculate diff") + } + + formatedDiffs, err := diff.FormatDiffs(diffs) + if err != nil { + return errors.Wrap(err, "failed to save diff map") + } + session.StopSpinner() + session.EventBus().SendInfo("Diff calculated successfully") + + output, err = filepath.Abs(output) + if err != nil { + return errors.Wrap(err, "failed to find the abs path") + } + + // Generate the docs file. + g, err := doc.NewGenerator(doc.Options{ + Path: output, + FromVersion: igniteRepo.From, + ToVersion: igniteRepo.To, + Diffs: string(formatedDiffs), + Description: releaseDescription, + }) + if err != nil { + return errors.Wrap(err, "failed to create the doc generator object") + } + + runner := xgenny.NewRunner(cmd.Context(), output) + sm, err := runner.RunAndApply(g, xgenny.ApplyPreRun(func(_, _, duplicated []string) error { + if len(duplicated) == 0 { + return nil + } + question := fmt.Sprintf("Do you want to overwrite the existing files? \n%s", strings.Join(duplicated, "\n")) + return session.AskConfirm(question) + })) + if err != nil { + return err + } + + files := append(sm.CreatedFiles(), sm.ModifiedFiles()...) + if len(files) == 0 { + return errors.Errorf("migration doc not created at %s", output) + } + session.EventBus().SendInfo( + fmt.Sprintf("Migration doc generated successfully at %s", files[0]), + ) + + return nil + }, + } + + defaultRepoURL := repo.DefaultRepoURL.String() + cmd.Flags().StringP(flagFrom, "f", "", "Version of Ignite or path to Ignite source code to generate the diff from") + cmd.Flags().StringP(flagTo, "t", "", "Version of Ignite or path to Ignite source code to generate the diff to") + cmd.Flags().StringP(flagOutput, "o", defaultDocPath, "Output directory to save the migration document") + cmd.Flags().StringP(flagSource, "s", "", "Path to Ignite source code repository. Set the source automatically set the cleanup to false") + cmd.Flags().String(flagRepoURL, defaultRepoURL, "Git URL for the Ignite repository") + cmd.Flags().String(flagRepoOutput, "", "Output path to clone the Ignite repository") + cmd.Flags().String(flagScaffoldOutput, "", "Output path to clone the Ignite repository") + cmd.Flags().String(flagScaffoldCache, "", "Path to cache directory") + cmd.Flags().BoolP(flagYes, "y", false, "answers interactive yes/no questions with yes") + + return cmd +} diff --git a/ignite/internal/tools/gen-mig-diffs/go.mod b/ignite/internal/tools/gen-mig-diffs/go.mod new file mode 100644 index 0000000..fbf125c --- /dev/null +++ b/ignite/internal/tools/gen-mig-diffs/go.mod @@ -0,0 +1,103 @@ +module github.com/ignite/cli/ignite/internal/tools/gen-mig-diffs + +go 1.25.4 + +replace github.com/ignite/cli/v29 => ../../../../ + +require ( + github.com/Masterminds/semver/v3 v3.4.0 + github.com/go-git/go-git/v5 v5.17.1 + github.com/gobuffalo/genny/v2 v2.1.1 + github.com/gobuffalo/plush/v4 v4.1.22 + github.com/gobwas/glob v0.2.3 + github.com/hexops/gotextdiff v1.0.3 + github.com/ignite/cli/v29 v29.6.1 + github.com/spf13/cobra v1.10.1 + github.com/stretchr/testify v1.11.1 +) + +require ( + dario.cat/mergo v1.0.2 // indirect + github.com/Microsoft/go-winio v0.6.2 // indirect + github.com/ProtonMail/go-crypto v1.3.0 // indirect + github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect + github.com/aymerick/douceur v0.2.0 // indirect + github.com/briandowns/spinner v1.23.2 // indirect + github.com/charmbracelet/bubbletea v1.3.10 // indirect + github.com/charmbracelet/colorprofile v0.3.3 // indirect + github.com/charmbracelet/lipgloss v1.1.0 // indirect + github.com/charmbracelet/x/ansi v0.11.0 // indirect + github.com/charmbracelet/x/cellbuf v0.0.14 // indirect + github.com/charmbracelet/x/term v0.2.2 // indirect + github.com/clipperhouse/displaywidth v0.5.0 // indirect + github.com/clipperhouse/stringish v0.1.1 // indirect + github.com/clipperhouse/uax29/v2 v2.3.0 // indirect + github.com/cloudflare/circl v1.6.3 // indirect + github.com/cockroachdb/errors v1.12.0 // indirect + github.com/cockroachdb/logtags v0.0.0-20241215232642-bb51bb14a506 // indirect + github.com/cockroachdb/redact v1.1.6 // indirect + github.com/cosmos/gogoproto v1.7.2 // indirect + github.com/cyphar/filepath-securejoin v0.6.0 // indirect + github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect + github.com/emicklei/proto v1.14.2 // indirect + github.com/emicklei/proto-contrib v0.18.2 // indirect + github.com/emirpasic/gods v1.18.1 // indirect + github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f // indirect + github.com/fatih/color v1.18.0 // indirect + github.com/fatih/structs v1.1.0 // indirect + github.com/getsentry/sentry-go v0.36.2 // indirect + github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 // indirect + github.com/go-git/go-billy/v5 v5.8.0 // indirect + github.com/gobuffalo/flect v1.0.3 // indirect + github.com/gobuffalo/github_flavored_markdown v1.1.4 // indirect + github.com/gobuffalo/helpers v0.6.10 // indirect + github.com/gobuffalo/logger v1.0.7 // indirect + github.com/gobuffalo/packd v1.0.2 // indirect + github.com/gobuffalo/plush/v5 v5.0.10 // indirect + github.com/gobuffalo/tags/v3 v3.1.4 // indirect + github.com/gobuffalo/validate/v3 v3.3.3 // indirect + github.com/gofrs/uuid v4.4.0+incompatible // indirect + github.com/gogo/protobuf v1.3.2 // indirect + github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 // indirect + github.com/google/go-cmp v0.7.0 // indirect + github.com/gorilla/css v1.0.1 // indirect + github.com/iancoleman/strcase v0.3.0 // indirect + github.com/inconshreveable/mousetrap v1.1.0 // indirect + github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 // indirect + github.com/kevinburke/ssh_config v1.4.0 // indirect + github.com/klauspost/cpuid/v2 v2.3.0 // indirect + github.com/kr/pretty v0.3.1 // indirect + github.com/kr/text v0.2.0 // indirect + github.com/lucasb-eyer/go-colorful v1.3.0 // indirect + github.com/mattn/go-colorable v0.1.14 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect + github.com/mattn/go-localereader v0.0.1 // indirect + github.com/mattn/go-runewidth v0.0.19 // indirect + github.com/microcosm-cc/bluemonday v1.0.27 // indirect + github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6 // indirect + github.com/muesli/cancelreader v0.2.2 // indirect + github.com/muesli/reflow v0.3.0 // indirect + github.com/muesli/termenv v0.16.0 // indirect + github.com/pjbgf/sha1cd v0.5.0 // indirect + github.com/pkg/errors v0.9.1 // indirect + github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect + github.com/rivo/uniseg v0.4.7 // indirect + github.com/rogpeppe/go-internal v1.14.1 // indirect + github.com/sergi/go-diff v1.4.0 // indirect + github.com/sirupsen/logrus v1.9.3 // indirect + github.com/skeema/knownhosts v1.3.2 // indirect + github.com/sourcegraph/annotate v0.0.0-20160123013949-f4cad6c6324d // indirect + github.com/sourcegraph/syntaxhighlight v0.0.0-20170531221838-bd320f5d308e // indirect + github.com/spf13/pflag v1.0.10 // indirect + github.com/xanzy/ssh-agent v0.3.3 // indirect + github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect + golang.org/x/crypto v0.46.0 // indirect + golang.org/x/net v0.48.0 // indirect + golang.org/x/sync v0.19.0 // indirect + golang.org/x/sys v0.39.0 // indirect + golang.org/x/term v0.38.0 // indirect + golang.org/x/text v0.32.0 // indirect + google.golang.org/protobuf v1.36.10 // indirect + gopkg.in/warnings.v0 v0.1.2 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect +) diff --git a/ignite/internal/tools/gen-mig-diffs/go.sum b/ignite/internal/tools/gen-mig-diffs/go.sum new file mode 100644 index 0000000..a0cabe9 --- /dev/null +++ b/ignite/internal/tools/gen-mig-diffs/go.sum @@ -0,0 +1,311 @@ +dario.cat/mergo v1.0.2 h1:85+piFYR1tMbRrLcDwR18y4UKJ3aH1Tbzi24VRW1TK8= +dario.cat/mergo v1.0.2/go.mod h1:E/hbnu0NxMFBjpMIE34DRGLWqDy0g5FuKDhCb31ngxA= +github.com/Masterminds/semver/v3 v3.4.0 h1:Zog+i5UMtVoCU8oKka5P7i9q9HgrJeGzI9SA1Xbatp0= +github.com/Masterminds/semver/v3 v3.4.0/go.mod h1:4V+yj/TJE1HU9XfppCwVMZq3I84lprf4nC11bSS5beM= +github.com/Microsoft/go-winio v0.5.2/go.mod h1:WpS1mjBmmwHBEWmogvA2mj8546UReBk4v8QkMxJ6pZY= +github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= +github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= +github.com/ProtonMail/go-crypto v1.3.0 h1:ILq8+Sf5If5DCpHQp4PbZdS1J7HDFRXz/+xKBiRGFrw= +github.com/ProtonMail/go-crypto v1.3.0/go.mod h1:9whxjD8Rbs29b4XWbB8irEcE8KHMqaR2e7GWU1R+/PE= +github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be h1:9AeTilPcZAjCFIImctFaOjnTIavg87rW78vTPkQqLI8= +github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be/go.mod h1:ySMOLuWl6zY27l47sB3qLNK6tF2fkHG55UZxx8oIVo4= +github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio= +github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkYZB8zMSxRWpUBQtwG5a7fFgvEO+odwuTv2gs= +github.com/aymanbagabas/go-osc52/v2 v2.0.1 h1:HwpRHbFMcZLEVr42D4p7XBqjyuxQH5SMiErDT4WkJ2k= +github.com/aymanbagabas/go-osc52/v2 v2.0.1/go.mod h1:uYgXzlJ7ZpABp8OJ+exZzJJhRNQ2ASbcXHWsFqH8hp8= +github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuPk= +github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4= +github.com/briandowns/spinner v1.23.2 h1:Zc6ecUnI+YzLmJniCfDNaMbW0Wid1d5+qcTq4L2FW8w= +github.com/briandowns/spinner v1.23.2/go.mod h1:LaZeM4wm2Ywy6vO571mvhQNRcWfRUnXOs0RcKV0wYKM= +github.com/charmbracelet/bubbletea v1.3.10 h1:otUDHWMMzQSB0Pkc87rm691KZ3SWa4KUlvF9nRvCICw= +github.com/charmbracelet/bubbletea v1.3.10/go.mod h1:ORQfo0fk8U+po9VaNvnV95UPWA1BitP1E0N6xJPlHr4= +github.com/charmbracelet/colorprofile v0.3.3 h1:DjJzJtLP6/NZ8p7Cgjno0CKGr7wwRJGxWUwh2IyhfAI= +github.com/charmbracelet/colorprofile v0.3.3/go.mod h1:nB1FugsAbzq284eJcjfah2nhdSLppN2NqvfotkfRYP4= +github.com/charmbracelet/lipgloss v1.1.0 h1:vYXsiLHVkK7fp74RkV7b2kq9+zDLoEU4MZoFqR/noCY= +github.com/charmbracelet/lipgloss v1.1.0/go.mod h1:/6Q8FR2o+kj8rz4Dq0zQc3vYf7X+B0binUUBwA0aL30= +github.com/charmbracelet/x/ansi v0.11.0 h1:uuIVK7GIplwX6UBIz8S2TF8nkr7xRlygSsBRjSJqIvA= +github.com/charmbracelet/x/ansi v0.11.0/go.mod h1:uQt8bOrq/xgXjlGcFMc8U2WYbnxyjrKhnvTQluvfCaE= +github.com/charmbracelet/x/cellbuf v0.0.14 h1:iUEMryGyFTelKW3THW4+FfPgi4fkmKnnaLOXuc+/Kj4= +github.com/charmbracelet/x/cellbuf v0.0.14/go.mod h1:P447lJl49ywBbil/KjCk2HexGh4tEY9LH0/1QrZZ9rA= +github.com/charmbracelet/x/term v0.2.2 h1:xVRT/S2ZcKdhhOuSP4t5cLi5o+JxklsoEObBSgfgZRk= +github.com/charmbracelet/x/term v0.2.2/go.mod h1:kF8CY5RddLWrsgVwpw4kAa6TESp6EB5y3uxGLeCqzAI= +github.com/clipperhouse/displaywidth v0.5.0 h1:AIG5vQaSL2EKqzt0M9JMnvNxOCRTKUc4vUnLWGgP89I= +github.com/clipperhouse/displaywidth v0.5.0/go.mod h1:R+kHuzaYWFkTm7xoMmK1lFydbci4X2CicfbGstSGg0o= +github.com/clipperhouse/stringish v0.1.1 h1:+NSqMOr3GR6k1FdRhhnXrLfztGzuG+VuFDfatpWHKCs= +github.com/clipperhouse/stringish v0.1.1/go.mod h1:v/WhFtE1q0ovMta2+m+UbpZ+2/HEXNWYXQgCt4hdOzA= +github.com/clipperhouse/uax29/v2 v2.3.0 h1:SNdx9DVUqMoBuBoW3iLOj4FQv3dN5mDtuqwuhIGpJy4= +github.com/clipperhouse/uax29/v2 v2.3.0/go.mod h1:Wn1g7MK6OoeDT0vL+Q0SQLDz/KpfsVRgg6W7ihQeh4g= +github.com/cloudflare/circl v1.6.3 h1:9GPOhQGF9MCYUeXyMYlqTR6a5gTrgR/fBLXvUgtVcg8= +github.com/cloudflare/circl v1.6.3/go.mod h1:2eXP6Qfat4O/Yhh8BznvKnJ+uzEoTQ6jVKJRn81BiS4= +github.com/cockroachdb/errors v1.12.0 h1:d7oCs6vuIMUQRVbi6jWWWEJZahLCfJpnJSVobd1/sUo= +github.com/cockroachdb/errors v1.12.0/go.mod h1:SvzfYNNBshAVbZ8wzNc/UPK3w1vf0dKDUP41ucAIf7g= +github.com/cockroachdb/logtags v0.0.0-20241215232642-bb51bb14a506 h1:ASDL+UJcILMqgNeV5jiqR4j+sTuvQNHdf2chuKj1M5k= +github.com/cockroachdb/logtags v0.0.0-20241215232642-bb51bb14a506/go.mod h1:Mw7HqKr2kdtu6aYGn3tPmAftiP3QPX63LdK/zcariIo= +github.com/cockroachdb/redact v1.1.6 h1:zXJBwDZ84xJNlHl1rMyCojqyIxv+7YUpQiJLQ7n4314= +github.com/cockroachdb/redact v1.1.6/go.mod h1:BVNblN9mBWFyMyqK1k3AAiSxhvhfK2oOZZ2lK+dpvRg= +github.com/cosmos/gogoproto v1.7.2 h1:5G25McIraOC0mRFv9TVO139Uh3OklV2hczr13KKVHCA= +github.com/cosmos/gogoproto v1.7.2/go.mod h1:8S7w53P1Y1cHwND64o0BnArT6RmdgIvsBuco6uTllsk= +github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g= +github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/cyphar/filepath-securejoin v0.6.0 h1:BtGB77njd6SVO6VztOHfPxKitJvd/VPT+OFBFMOi1Is= +github.com/cyphar/filepath-securejoin v0.6.0/go.mod h1:A8hd4EnAeyujCJRrICiOWqjS1AX0a9kM5XL+NwKoYSc= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= +github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/elazarl/goproxy v1.7.2 h1:Y2o6urb7Eule09PjlhQRGNsqRfPmYI3KKQLFpCAV3+o= +github.com/elazarl/goproxy v1.7.2/go.mod h1:82vkLNir0ALaW14Rc399OTTjyNREgmdL2cVoIbS6XaE= +github.com/emicklei/proto v1.14.0/go.mod h1:rn1FgRS/FANiZdD2djyH7TMA9jdRDcYQ9IEN9yvjX0A= +github.com/emicklei/proto v1.14.2 h1:wJPxPy2Xifja9cEMrcA/g08art5+7CGJNFNk35iXC1I= +github.com/emicklei/proto v1.14.2/go.mod h1:rn1FgRS/FANiZdD2djyH7TMA9jdRDcYQ9IEN9yvjX0A= +github.com/emicklei/proto-contrib v0.18.2 h1:/3HbJnGuiT5x4TgGQjRkPYR5QP2i8sgUgeNbpmOEq+U= +github.com/emicklei/proto-contrib v0.18.2/go.mod h1:0jnPdGJOjrTDObeFfSM1XKx1Z/mjJzYMomI69tJIq58= +github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc= +github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FMNAnJvWQ= +github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f h1:Y/CXytFA4m6baUTXGLOoWe4PQhGxaX0KpnayAqC48p4= +github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f/go.mod h1:vw97MGsxSvLiUE2X8qFplwetxpGLQrlU1Q9AUEIzCaM= +github.com/fatih/color v1.18.0 h1:S8gINlzdQ840/4pfAwic/ZE0djQEH3wM94VfqLTZcOM= +github.com/fatih/color v1.18.0/go.mod h1:4FelSpRwEGDpQ12mAdzqdOukCy4u8WUtOY6lkT/6HfU= +github.com/fatih/structs v1.1.0 h1:Q7juDM0QtcnhCpeyLGQKyg4TOIghuNXrkL32pHAUMxo= +github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M= +github.com/getsentry/sentry-go v0.36.2 h1:uhuxRPTrUy0dnSzTd0LrYXlBYygLkKY0hhlG5LXarzM= +github.com/getsentry/sentry-go v0.36.2/go.mod h1:p5Im24mJBeruET8Q4bbcMfCQ+F+Iadc4L48tB1apo2c= +github.com/gliderlabs/ssh v0.3.8 h1:a4YXD1V7xMF9g5nTkdfnja3Sxy1PVDCj1Zg4Wb8vY6c= +github.com/gliderlabs/ssh v0.3.8/go.mod h1:xYoytBv1sV0aL3CavoDuJIQNURXkkfPA/wxQ1pL1fAU= +github.com/go-errors/errors v1.4.2 h1:J6MZopCL4uSllY1OfXM374weqZFFItUbrImctkmUxIA= +github.com/go-errors/errors v1.4.2/go.mod h1:sIVyrIiJhuEF+Pj9Ebtd6P/rEYROXFi3BopGUQ5a5Og= +github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 h1:+zs/tPmkDkHx3U66DAb0lQFJrpS6731Oaa12ikc+DiI= +github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376/go.mod h1:an3vInlBmSxCcxctByoQdvwPiA7DTK7jaaFDBTtu0ic= +github.com/go-git/go-billy/v5 v5.8.0 h1:I8hjc3LbBlXTtVuFNJuwYuMiHvQJDq1AT6u4DwDzZG0= +github.com/go-git/go-billy/v5 v5.8.0/go.mod h1:RpvI/rw4Vr5QA+Z60c6d6LXH0rYJo0uD5SqfmrrheCY= +github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20231010084843-55a94097c399 h1:eMje31YglSBqCdIqdhKBW8lokaMrL3uTkpGYlE2OOT4= +github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20231010084843-55a94097c399/go.mod h1:1OCfN199q1Jm3HZlxleg+Dw/mwps2Wbk9frAWm+4FII= +github.com/go-git/go-git/v5 v5.17.1 h1:WnljyxIzSj9BRRUlnmAU35ohDsjRK0EKmL0evDqi5Jk= +github.com/go-git/go-git/v5 v5.17.1/go.mod h1:pW/VmeqkanRFqR6AljLcs7EA7FbZaN5MQqO7oZADXpo= +github.com/gobuffalo/flect v0.3.0/go.mod h1:5pf3aGnsvqvCj50AVni7mJJF8ICxGZ8HomberC3pXLE= +github.com/gobuffalo/flect v1.0.3 h1:xeWBM2nui+qnVvNM4S3foBhCAL2XgPU+a7FdpelbTq4= +github.com/gobuffalo/flect v1.0.3/go.mod h1:A5msMlrHtLqh9umBSnvabjsMrCcCpAyzglnDvkbYKHs= +github.com/gobuffalo/genny/v2 v2.1.1 h1:WJsJVaekfD1jkoi+EgSaqMZWH6PBf925LLDn3TK4JEw= +github.com/gobuffalo/genny/v2 v2.1.1/go.mod h1:PK9LyLU5o6cTCZN/+c0qm6PzAkaTEAkHnuK82F46H/c= +github.com/gobuffalo/github_flavored_markdown v1.1.3/go.mod h1:IzgO5xS6hqkDmUh91BW/+Qxo/qYnvfzoz3A7uLkg77I= +github.com/gobuffalo/github_flavored_markdown v1.1.4 h1:WacrEGPXUDX+BpU1GM/Y0ADgMzESKNWls9hOTG1MHVs= +github.com/gobuffalo/github_flavored_markdown v1.1.4/go.mod h1:Vl9686qrVVQou4GrHRK/KOG3jCZOKLUqV8MMOAYtlso= +github.com/gobuffalo/helpers v0.6.7/go.mod h1:j0u1iC1VqlCaJEEVkZN8Ia3TEzfj/zoXANqyJExTMTA= +github.com/gobuffalo/helpers v0.6.10 h1:puKDCOrJ0EIq5ScnTRgKyvEZ05xQa+gwRGCpgoh6Ek8= +github.com/gobuffalo/helpers v0.6.10/go.mod h1:r52L6VSnByLJFOmURp1irvzgSakk7RodChi1YbGwk8I= +github.com/gobuffalo/logger v1.0.7 h1:LTLwWelETXDYyqF/ASf0nxaIcdEOIJNxRokPcfI/xbU= +github.com/gobuffalo/logger v1.0.7/go.mod h1:u40u6Bq3VVvaMcy5sRBclD8SXhBYPS0Qk95ubt+1xJM= +github.com/gobuffalo/packd v1.0.2 h1:Yg523YqnOxGIWCp69W12yYBKsoChwI7mtu6ceM9Bwfw= +github.com/gobuffalo/packd v1.0.2/go.mod h1:sUc61tDqGMXON80zpKGp92lDb86Km28jfvX7IAyxFT8= +github.com/gobuffalo/plush/v4 v4.1.22 h1:bPQr5PsiTg54UGMsfvnIAvFmUfxzD/ri+wbpu7PlmTM= +github.com/gobuffalo/plush/v4 v4.1.22/go.mod h1:WiKHJx3qBvfaDVlrv8zT7NCd3dEMaVR/fVxW4wqV17M= +github.com/gobuffalo/plush/v5 v5.0.10 h1:IJ6qI4zjNT9FxqDrE0f9w4z8PVe9XXeuu7vY0792iOk= +github.com/gobuffalo/plush/v5 v5.0.10/go.mod h1:C08u/VEqzzPBXFF/yqs40P/5Cvc/zlZsMzhCxXyWJmU= +github.com/gobuffalo/tags/v3 v3.1.4 h1:X/ydLLPhgXV4h04Hp2xlbI2oc5MDaa7eub6zw8oHjsM= +github.com/gobuffalo/tags/v3 v3.1.4/go.mod h1:ArRNo3ErlHO8BtdA0REaZxijuWnWzF6PUXngmMXd2I0= +github.com/gobuffalo/validate/v3 v3.3.3 h1:o7wkIGSvZBYBd6ChQoLxkz2y1pfmhbI4jNJYh6PuNJ4= +github.com/gobuffalo/validate/v3 v3.3.3/go.mod h1:YC7FsbJ/9hW/VjQdmXPvFqvRis4vrRYFxr69WiNZw6g= +github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y= +github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8= +github.com/gofrs/uuid v4.2.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM= +github.com/gofrs/uuid v4.4.0+incompatible h1:3qXRTX8/NbyulANqlc0lchS1gqAVxRgsuW1YrTJupqA= +github.com/gofrs/uuid v4.4.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM= +github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= +github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 h1:f+oWsMOmNPc8JmEHVZIycC7hBoQxHH9pNKQORJNozsQ= +github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8/go.mod h1:wcDNUvekVysuuOpQKo3191zZyTpiI6se1N1ULghS0sw= +github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= +github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= +github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= +github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= +github.com/gorilla/css v1.0.0/go.mod h1:Dn721qIggHpt4+EFCcTLTU/vk5ySda2ReITrtgBl60c= +github.com/gorilla/css v1.0.1 h1:ntNaBIghp6JmvWnxbZKANoLyuXTPZ4cAMlo6RyhlbO8= +github.com/gorilla/css v1.0.1/go.mod h1:BvnYkspnSzMmwRK+b8/xgNPLiIuNZr6vbZBTPQ2A3b0= +github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM= +github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg= +github.com/iancoleman/strcase v0.3.0 h1:nTXanmYxhfFAMjZL34Ov6gkzEsSJZ5DbhxWjvSASxEI= +github.com/iancoleman/strcase v0.3.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho= +github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= +github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= +github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A= +github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo= +github.com/kevinburke/ssh_config v1.4.0 h1:6xxtP5bZ2E4NF5tuQulISpTO2z8XbtH8cg1PWkxoFkQ= +github.com/kevinburke/ssh_config v1.4.0/go.mod h1:q2RIzfka+BXARoNexmF9gkxEX7DmvbW9P4hIVx2Kg4M= +github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/klauspost/cpuid/v2 v2.3.0 h1:S4CRMLnYUhGeDFDqkGriYKdfoFlDnMtqTiI/sFzhA9Y= +github.com/klauspost/cpuid/v2 v2.3.0/go.mod h1:hqwkgyIinND0mEev00jJYCxPNVRVXFQeu1XKlok6oO0= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/lucasb-eyer/go-colorful v1.3.0 h1:2/yBRLdWBZKrf7gB40FoiKfAWYQ0lqNcbuQwVHXptag= +github.com/lucasb-eyer/go-colorful v1.3.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0= +github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHPsaIE= +github.com/mattn/go-colorable v0.1.14/go.mod h1:6LmQG8QLFO4G5z1gPvYEzlUgJ2wF+stgPZH1UqBm1s8= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mattn/go-localereader v0.0.1 h1:ygSAOl7ZXTx4RdPYinUpg6W99U8jWvWi9Ye2JC/oIi4= +github.com/mattn/go-localereader v0.0.1/go.mod h1:8fBrzywKY7BI3czFoHkuzRoWE9C+EiG4R1k4Cjx5p88= +github.com/mattn/go-runewidth v0.0.12/go.mod h1:RAqKPSqVFrSLVXbA8x7dzmKdmGzieGRCM46jaSJTDAk= +github.com/mattn/go-runewidth v0.0.19 h1:v++JhqYnZuu5jSKrk9RbgF5v4CGUjqRfBm05byFGLdw= +github.com/mattn/go-runewidth v0.0.19/go.mod h1:XBkDxAl56ILZc9knddidhrOlY5R/pDhgLpndooCuJAs= +github.com/microcosm-cc/bluemonday v1.0.20/go.mod h1:yfBmMi8mxvaZut3Yytv+jTXRY8mxyjJ0/kQBTElld50= +github.com/microcosm-cc/bluemonday v1.0.22/go.mod h1:ytNkv4RrDrLJ2pqlsSI46O6IVXmZOBBD4SaJyDwwTkM= +github.com/microcosm-cc/bluemonday v1.0.27 h1:MpEUotklkwCSLeH+Qdx1VJgNqLlpY2KXwXFM08ygZfk= +github.com/microcosm-cc/bluemonday v1.0.27/go.mod h1:jFi9vgW+H7c3V0lb6nR74Ib/DIB5OBs92Dimizgw2cA= +github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6 h1:ZK8zHtRHOkbHy6Mmr5D264iyp3TiX5OmNcI5cIARiQI= +github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6/go.mod h1:CJlz5H+gyd6CUWT45Oy4q24RdLyn7Md9Vj2/ldJBSIo= +github.com/muesli/cancelreader v0.2.2 h1:3I4Kt4BQjOR54NavqnDogx/MIoWBFa0StPA8ELUXHmA= +github.com/muesli/cancelreader v0.2.2/go.mod h1:3XuTXfFS2VjM+HTLZY9Ak0l6eUKfijIfMUZ4EgX0QYo= +github.com/muesli/reflow v0.3.0 h1:IFsN6K9NfGtjeggFP+68I4chLZV2yIKsXJFNZ+eWh6s= +github.com/muesli/reflow v0.3.0/go.mod h1:pbwTDkVPibjO2kyvBQRBxTWEEGDGq0FlB1BIKtnHY/8= +github.com/muesli/termenv v0.16.0 h1:S5AlUN9dENB57rsbnkPyfdGuWIlkmzJjbFf0Tf5FWUc= +github.com/muesli/termenv v0.16.0/go.mod h1:ZRfOIKPFDYQoDFF4Olj7/QJbW60Ol/kL1pU3VfY/Cnk= +github.com/onsi/gomega v1.34.1 h1:EUMJIKUjM8sKjYbtxQI9A4z2o+rruxnzNvpknOXie6k= +github.com/onsi/gomega v1.34.1/go.mod h1:kU1QgUvBDLXBJq618Xvm2LUX6rSAfRaFRTcdOeDLwwY= +github.com/pingcap/errors v0.11.4 h1:lFuQV/oaUMGcD2tqt+01ROSmJs75VG1ToEOkZIZ4nE4= +github.com/pingcap/errors v0.11.4/go.mod h1:Oi8TUi2kEtXXLMJk9l1cGmz20kV3TaQ0usTwv5KuLY8= +github.com/pjbgf/sha1cd v0.5.0 h1:a+UkboSi1znleCDUNT3M5YxjOnN1fz2FhN48FlwCxs0= +github.com/pjbgf/sha1cd v0.5.0/go.mod h1:lhpGlyHLpQZoxMv8HcgXvZEhcGs0PG/vsZnEJ7H0iCM= +github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= +github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= +github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/rivo/uniseg v0.1.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= +github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= +github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ= +github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= +github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= +github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ= +github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc= +github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/sergi/go-diff v1.2.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= +github.com/sergi/go-diff v1.3.1/go.mod h1:aMJSSKb2lpPvRNec0+w3fl7LP9IOFzdc9Pa4NFbPK1I= +github.com/sergi/go-diff v1.4.0 h1:n/SP9D5ad1fORl+llWyN+D6qoUETXNZARKjyY2/KVCw= +github.com/sergi/go-diff v1.4.0/go.mod h1:A0bzQcvG0E7Rwjx0REVgAGH58e96+X0MeOfepqsbeW4= +github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= +github.com/sirupsen/logrus v1.9.0/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= +github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= +github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= +github.com/skeema/knownhosts v1.3.2 h1:EDL9mgf4NzwMXCTfaxSD/o/a5fxDw/xL9nkU28JjdBg= +github.com/skeema/knownhosts v1.3.2/go.mod h1:bEg3iQAuw+jyiw+484wwFJoKSLwcfd7fqRy+N0QTiow= +github.com/sourcegraph/annotate v0.0.0-20160123013949-f4cad6c6324d h1:yKm7XZV6j9Ev6lojP2XaIshpT4ymkqhMeSghO5Ps00E= +github.com/sourcegraph/annotate v0.0.0-20160123013949-f4cad6c6324d/go.mod h1:UdhH50NIW0fCiwBSr0co2m7BnFLdv4fQTgdqdJTHFeE= +github.com/sourcegraph/syntaxhighlight v0.0.0-20170531221838-bd320f5d308e h1:qpG93cPwA5f7s/ZPBJnGOYQNK/vKsaDaseuKT5Asee8= +github.com/sourcegraph/syntaxhighlight v0.0.0-20170531221838-bd320f5d308e/go.mod h1:HuIsMU8RRBOtsCgI77wP899iHVBQpCmg4ErYMZB+2IA= +github.com/spf13/cobra v1.10.1 h1:lJeBwCfmrnXthfAupyUTzJ/J4Nc1RsHC/mSRU2dll/s= +github.com/spf13/cobra v1.10.1/go.mod h1:7SmJGaTHFVBY0jW4NXGluQoLvhqFQM+6XSKD+P4XaB0= +github.com/spf13/pflag v1.0.9/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/spf13/pflag v1.0.10 h1:4EBh2KAYBwaONj6b2Ye1GiHfwjqyROoF4RwYO+vPwFk= +github.com/spf13/pflag v1.0.10/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= +github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= +github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= +github.com/xanzy/ssh-agent v0.3.3 h1:+/15pJfg/RsTxqYcX6fHqOXZwwMP+2VyYWJeWM2qQFM= +github.com/xanzy/ssh-agent v0.3.3/go.mod h1:6dzNDKs0J9rVPHPhaGCukekBHKqfl+L3KghI1Bc68Uw= +github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e h1:JVG44RsyaB9T2KIHavMF/ppJZNG9ZpyihvCd0w101no= +github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e/go.mod h1:RbqR21r5mrJuqunuUZ/Dhy/avygyECGrLceyNeo4LiM= +github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= +go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/crypto v0.46.0 h1:cKRW/pmt1pKAfetfu+RCEvjvZkA9RimPbh7bhFjGVBU= +golang.org/x/crypto v0.46.0/go.mod h1:Evb/oLKmMraqjZ2iQTwDwvCtJkczlDuTmdJXoZVzqU0= +golang.org/x/exp v0.0.0-20250718183923-645b1fa84792 h1:R9PFI6EUdfVKgwKjZef7QIwGcBKu86OEFpJ9nUEP2l4= +golang.org/x/exp v0.0.0-20250718183923-645b1fa84792/go.mod h1:A+z0yzpGtvnG90cToK5n2tu8UJVP2XUATh+r+sfOOOc= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.0.0-20220826154423-83b083e8dc8b/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk= +golang.org/x/net v0.0.0-20221002022538-bcab6841153b/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk= +golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.48.0 h1:zyQRTTrjc33Lhh0fBgT/H3oZq9WuvRR5gPC70xpDiQU= +golang.org/x/net v0.48.0/go.mod h1:+ndRgGjkh8FGtu1w1FGbEC31if4VrNVMuKTgcAAnQRY= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= +golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4= +golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.39.0 h1:CvCKL8MeisomCi6qNZ+wbb0DN9E5AATixKsvNtMoMFk= +golang.org/x/sys v0.39.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.0.0-20220722155259-a9ba230a4035/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= +golang.org/x/term v0.38.0 h1:PQ5pkm/rLO6HnxFR7N2lJHOZX6Kez5Y1gDSJla6jo7Q= +golang.org/x/term v0.38.0/go.mod h1:bSEAKrOT1W+VSu9TSCMtoGEOUcKxOKgl3LE5QEF/xVg= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.32.0 h1:ZD01bjUt1FQ9WJ0ClOL5vxgxOI/sVCNgX1YtKwcY0mU= +golang.org/x/text v0.32.0/go.mod h1:o/rUWzghvpD5TXrTIBuJU77MTaN0ljMWE47kxGJQ7jY= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/protobuf v1.36.10 h1:AYd7cD/uASjIL6Q9LiTjz8JLcrh/88q5UObnmY3aOOE= +google.golang.org/protobuf v1.36.10/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME= +gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/ignite/internal/tools/gen-mig-diffs/main.go b/ignite/internal/tools/gen-mig-diffs/main.go new file mode 100644 index 0000000..fffb62f --- /dev/null +++ b/ignite/internal/tools/gen-mig-diffs/main.go @@ -0,0 +1,15 @@ +package main + +import ( + "fmt" + "os" + + "github.com/ignite/cli/ignite/internal/tools/gen-mig-diffs/cmd" +) + +func main() { + if err := cmd.NewRootCmd().Execute(); err != nil { + fmt.Println(err) + os.Exit(1) + } +} diff --git a/ignite/internal/tools/gen-mig-diffs/pkg/cache/cache.go b/ignite/internal/tools/gen-mig-diffs/pkg/cache/cache.go new file mode 100644 index 0000000..c89f8e6 --- /dev/null +++ b/ignite/internal/tools/gen-mig-diffs/pkg/cache/cache.go @@ -0,0 +1,77 @@ +package cache + +import ( + "os" + "path/filepath" + "sync" + + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/xos" +) + +// Cache represents a cache for executed scaffold command. +type Cache struct { + cachePath string + cachesPath map[string]string + mu sync.RWMutex +} + +// New initializes a new Cache instance. +func New(path string) (*Cache, error) { + return &Cache{ + cachePath: path, + cachesPath: make(map[string]string), + }, os.MkdirAll(path, os.ModePerm) +} + +// Save creates a new cache. +func (c *Cache) Save(name, path string) error { + c.mu.Lock() + defer c.mu.Unlock() + + dstPath := filepath.Join(c.cachePath, name) + if err := xos.CopyFolder(path, dstPath); err != nil { + return err + } + + c.cachesPath[name] = dstPath + return nil +} + +// Has return if the cache exist. +func (c *Cache) Has(name string) bool { + c.mu.RLock() + defer c.mu.RUnlock() + + cachePath, ok := c.cachesPath[name] + if !ok { + return false + } + if _, err := os.Stat(cachePath); os.IsNotExist(err) { + return false + } + + return true +} + +// Get return the cache path and copy all files to the destination path. +func (c *Cache) Get(name, dstPath string) error { + c.mu.RLock() + defer c.mu.RUnlock() + + cachePath, ok := c.cachesPath[name] + if !ok { + return errors.Errorf("command %s not exist in the cache list", name) + } + if _, err := os.Stat(cachePath); os.IsNotExist(err) { + return errors.Wrapf(err, "cache %s not exist in the path", name) + } + dstPath, err := filepath.Abs(dstPath) + if err != nil { + return err + } + if err := xos.CopyFolder(cachePath, dstPath); err != nil { + return errors.Wrapf(err, "error to copy cache from %s to %s", cachePath, dstPath) + } + return nil +} diff --git a/ignite/internal/tools/gen-mig-diffs/pkg/diff/compute.go b/ignite/internal/tools/gen-mig-diffs/pkg/diff/compute.go new file mode 100644 index 0000000..09706f2 --- /dev/null +++ b/ignite/internal/tools/gen-mig-diffs/pkg/diff/compute.go @@ -0,0 +1,119 @@ +package diff + +import ( + "fmt" + "io/fs" + "os" + "path/filepath" + + "github.com/gobwas/glob" + "github.com/hexops/gotextdiff" + "github.com/hexops/gotextdiff/myers" + "github.com/hexops/gotextdiff/span" + + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +// computeFS computes the unified diffs between the origin and modified filesystems. +// but ignores files that match the given globs. +func computeFS(origin, modified fs.FS, ignoreGlobs ...string) ([]gotextdiff.Unified, error) { + compiledGlobs, err := compileGlobs(ignoreGlobs) + if err != nil { + return nil, err + } + + marked := make(map[string]struct{}) + unified := make([]gotextdiff.Unified, 0) + err = fs.WalkDir(origin, ".", func(path string, d fs.DirEntry, err error) error { + if err != nil { + return errors.Errorf("failed to walk origin: %w", err) + } + + if d.IsDir() { + return nil + } + + if matchGlobs(compiledGlobs, path) { + return nil + } + + marked[path] = struct{}{} + data, err := fs.ReadFile(origin, path) + if err != nil { + return errors.Errorf("failed to read file %q from origin: %w", path, err) + } + originFile := string(data) + + data, err = fs.ReadFile(modified, path) + if !os.IsNotExist(err) && err != nil { + return errors.Errorf("failed to read file %q from modified: %w", path, err) + } + modifiedFile := string(data) + + edits := myers.ComputeEdits(span.URIFromURI(fmt.Sprintf("file://%s", path)), originFile, modifiedFile) + if len(edits) > 0 { + unified = append(unified, gotextdiff.ToUnified(path, path, originFile, edits)) + } + return nil + }) + if err != nil { + return nil, err + } + + err = fs.WalkDir(modified, ".", func(path string, d fs.DirEntry, err error) error { + if err != nil { + return errors.Errorf("failed to walk modified: %w", err) + } + + if d.IsDir() { + return nil + } + + if _, ok := marked[path]; ok { + return nil + } + + if matchGlobs(compiledGlobs, path) { + return nil + } + + originFile := "" + data, err := fs.ReadFile(modified, path) + if err != nil { + return errors.Errorf("failed to read file %q from modified: %w", path, err) + } + modifiedFile := string(data) + + edits := myers.ComputeEdits(span.URIFromURI(fmt.Sprintf("file://%s", path)), originFile, modifiedFile) + if len(edits) > 0 { + unified = append(unified, gotextdiff.ToUnified(path, path, originFile, edits)) + } + return nil + }) + if err != nil { + return nil, err + } + + return unified, nil +} + +func compileGlobs(globs []string) ([]glob.Glob, error) { + var compiledGlobs []glob.Glob + for _, g := range globs { + compiledGlob, err := glob.Compile(g, filepath.Separator) + if err != nil { + return nil, errors.Errorf("failed to compile glob %q: %w", g, err) + } + compiledGlobs = append(compiledGlobs, compiledGlob) + } + return compiledGlobs, nil +} + +func matchGlobs(globs []glob.Glob, path string) bool { + for _, g := range globs { + if g.Match(path) { + return true + } + } + return false +} diff --git a/ignite/internal/tools/gen-mig-diffs/pkg/diff/compute_test.go b/ignite/internal/tools/gen-mig-diffs/pkg/diff/compute_test.go new file mode 100644 index 0000000..310edf2 --- /dev/null +++ b/ignite/internal/tools/gen-mig-diffs/pkg/diff/compute_test.go @@ -0,0 +1,55 @@ +package diff + +import ( + "testing" + "testing/fstest" + + "github.com/stretchr/testify/require" +) + +func TestComputeFS(t *testing.T) { + require := require.New(t) + + origin := fstest.MapFS{ + "foo.txt": &fstest.MapFile{ + Data: []byte("hello"), + }, + "bar.txt": &fstest.MapFile{ + Data: []byte("unmodified"), + }, + "pkg/main.go": &fstest.MapFile{ + Data: []byte("package main"), + }, + } + modified := fstest.MapFS{ + "foo.txt": &fstest.MapFile{ + Data: []byte("world"), + }, + "bar.txt": &fstest.MapFile{ + Data: []byte("unmodified"), + }, + "new.txt": &fstest.MapFile{ + Data: []byte("new file"), + }, + "pkg/main.go": &fstest.MapFile{ + Data: []byte("package main\nfunc main() {}"), + }, + } + + unified, err := computeFS(origin, modified) + require.NoError(err) + require.Len(unified, 3) + expectedFiles := []string{"foo.txt", "new.txt", "pkg/main.go"} + for _, u := range unified { + require.Contains(expectedFiles, u.From, "unexpected file in diff: %s", u.From) + } + + // Test ignoring files + unified, err = computeFS(origin, modified, "**.go") + require.NoError(err) + require.Len(unified, 2) + expectedFiles = []string{"foo.txt", "new.txt"} + for _, u := range unified { + require.Contains(expectedFiles, u.From, "unexpected file in diff: %s", u.From) + } +} diff --git a/ignite/internal/tools/gen-mig-diffs/pkg/diff/diff.go b/ignite/internal/tools/gen-mig-diffs/pkg/diff/diff.go new file mode 100644 index 0000000..d141ee6 --- /dev/null +++ b/ignite/internal/tools/gen-mig-diffs/pkg/diff/diff.go @@ -0,0 +1,155 @@ +package diff + +import ( + "bytes" + "fmt" + "os" + "path/filepath" + + "github.com/hexops/gotextdiff" + + "github.com/ignite/cli/v29/ignite/pkg/xstrings" +) + +type Diffs map[string][]gotextdiff.Unified + +var diffIgnoreGlobs = []string{ + ".git/**", + "**.md", + "go.sum", + "**_test.go", + "**.pb.go", + "**.pb.gw.go", + "**.pulsar.go", + "**/node_modules/**", + "**/openapi.yml", + "**/openapi.json", + ".gitignore", + ".github/**", + "**.html", + "**.css", + "**.js", + "**.ts", + "**.json", +} + +// CalculateDiffs calculate the diff from two directories. +func CalculateDiffs(fromDir, toDir string) (Diffs, error) { + paths, err := readRootFolders(fromDir) + if err != nil { + return nil, err + } + toPaths, err := readRootFolders(toDir) + if err != nil { + return nil, err + } + for key, value := range toPaths { + paths[key] = value + } + + diffs := make(Diffs) + for path := range paths { + from := filepath.Join(fromDir, path) + if err := os.MkdirAll(from, os.ModePerm); err != nil { + return nil, err + } + to := filepath.Join(toDir, path) + if err := os.MkdirAll(to, os.ModePerm); err != nil { + return nil, err + } + + computedDiff, err := computeFS( + os.DirFS(from), + os.DirFS(to), + diffIgnoreGlobs..., + ) + if err != nil { + return nil, err + } + + diffs[path] = computedDiff + } + return subtractBaseDiffs(diffs), nil +} + +// SaveDiffs save all migration diffs to the output path. +func SaveDiffs(diffs Diffs, outputPath string) error { + if err := os.MkdirAll(outputPath, os.ModePerm); err != nil { + return err + } + + for name, diffs := range diffs { + output, err := os.Create(filepath.Join(outputPath, name+".diff")) + if err != nil { + return err + } + for _, d := range diffs { + output.WriteString(fmt.Sprint(d)) + output.WriteString("\n") + } + if err := output.Close(); err != nil { + return err + } + } + + return nil +} + +// FormatDiffs format all diffs in a single markdown byte array. +func FormatDiffs(diffs Diffs) ([]byte, error) { + if len(diffs) == 0 { + return []byte{}, nil + } + buffer := &bytes.Buffer{} + for name, diffs := range diffs { + if len(diffs) == 0 { + continue + } + buffer.WriteString(fmt.Sprintf("#### **%s diff**\n\n", xstrings.ToUpperFirst(name))) + buffer.WriteString("```diff\n") + for _, d := range diffs { + buffer.WriteString(fmt.Sprint(d)) + } + buffer.WriteString("```\n\n") + } + return buffer.Bytes(), nil +} + +// readRootFolders return a map of all root folders from a directory. +func readRootFolders(dir string) (map[string]struct{}, error) { + paths := make(map[string]struct{}) + dirEntries, err := os.ReadDir(dir) + if err != nil { + return nil, err + } + for _, entry := range dirEntries { + if entry.IsDir() { + paths[entry.Name()] = struct{}{} + } + } + return paths, nil +} + +// subtractBaseDiffs removes chain and module diffs from other diffs. +func subtractBaseDiffs(diffs Diffs) Diffs { + chainDiff := diffs["chain"] + moduleDiff := diffs["module"] + for name, d := range diffs { + if name != "chain" && name != "module" { + diffs[name] = subtractUnifieds(d, moduleDiff) + } + } + diffs["module"] = subtractUnifieds(moduleDiff, chainDiff) + return diffs +} + +func subtractUnifieds(a, b []gotextdiff.Unified) []gotextdiff.Unified { + for i, ad := range a { + for _, bd := range b { + if ad.From == bd.From && ad.To == bd.To { + a[i] = subtract(ad, bd) + } + } + } + return a +} diff --git a/ignite/internal/tools/gen-mig-diffs/pkg/diff/subtract.go b/ignite/internal/tools/gen-mig-diffs/pkg/diff/subtract.go new file mode 100644 index 0000000..728d9c9 --- /dev/null +++ b/ignite/internal/tools/gen-mig-diffs/pkg/diff/subtract.go @@ -0,0 +1,165 @@ +package diff + +import ( + "sort" + + "github.com/hexops/gotextdiff" +) + +// subtract two unified diffs from each other. +func subtract(a, b gotextdiff.Unified) gotextdiff.Unified { + return gotextdiff.Unified{ + From: a.From, + To: a.To, + Hunks: subtractHunks(a.Hunks, b.Hunks), + } +} + +func subtractHunks(src, base []*gotextdiff.Hunk) []*gotextdiff.Hunk { + sortHunks(src) + sortHunks(base) + + res := make([]*gotextdiff.Hunk, 0, len(src)) + offset := 0 + for i, j := 0, 0; i < len(src) || j < len(base); { + if i >= len(src) { + break + } + if j >= len(base) { + res = append(res, src[i]) + i++ + continue + } + + s := src[i] + b := base[j] + + switch { + case beforeHunk(s, b, offset): + res = append(res, s) + offset += calculateHunkOffsetChange(s.Lines) + i++ + case beforeHunk(b, s, -offset): + j++ + case hunksOverlap(s, b, offset): + if s.FromLine < b.FromLine { + res = append(res, s) + offset += calculateHunkOffsetChange(s.Lines) - calculateHunkOffsetChange(b.Lines) + i++ + } else { + offset += calculateHunkOffsetChange(s.Lines) - calculateHunkOffsetChange(b.Lines) + j++ + } + default: + h := subtractHunk(s, b) + if !isHunkEmpty(h) { + res = append(res, subtractHunk(s, b)) + } + offset += calculateHunkOffsetChange(s.Lines) - calculateHunkOffsetChange(b.Lines) + i++ + j++ + } + + } + + return res +} + +func sortHunks(hunks []*gotextdiff.Hunk) { + sort.Slice(hunks, func(i, j int) bool { + return hunks[i].FromLine < hunks[j].FromLine + }) +} + +// beforeHunk returns true if a comes before b. +func beforeHunk(a, b *gotextdiff.Hunk, offset int) bool { + return a.ToLine-calculateEndEqualLines(a) < b.FromLine+calculateStartEqualLines(b)+offset +} + +func calculateStartEqualLines(h *gotextdiff.Hunk) int { + lines := 0 + for _, l := range h.Lines { + if l.Kind == gotextdiff.Equal { + lines++ + } else { + break + } + } + return lines +} + +func calculateEndEqualLines(h *gotextdiff.Hunk) int { + lines := 0 + for i := len(h.Lines) - 1; i >= 0; i-- { + if h.Lines[i].Kind == gotextdiff.Equal { + lines++ + } else { + break + } + } + return lines +} + +func calculateHunkOffsetChange(lines []gotextdiff.Line) int { + offset := 0 + for _, l := range lines { + if l.Kind == gotextdiff.Insert { + offset++ + } else if l.Kind == gotextdiff.Delete { + offset-- + } + } + return offset +} + +func hunksOverlap(a, b *gotextdiff.Hunk, offset int) bool { + if !isLineInHunk(a.FromLine, b, offset) && isLineInHunk(a.ToLine, b, offset) { + return true + } + if isLineInHunk(a.FromLine, b, offset) && !isLineInHunk(a.ToLine, b, offset) { + return true + } + return false +} + +func isLineInHunk(line int, h *gotextdiff.Hunk, offset int) bool { + return line-calculateStartEqualLines(h) > h.FromLine+offset && line+calculateEndEqualLines(h) < h.ToLine+offset +} + +func subtractHunk(a, b *gotextdiff.Hunk) *gotextdiff.Hunk { + lines := subtractLines(a.Lines, b.Lines) + return &gotextdiff.Hunk{ + FromLine: a.FromLine, + ToLine: a.ToLine + calculateHunkOffsetChange(a.Lines) - calculateHunkOffsetChange(lines), + Lines: lines, + } +} + +func subtractLines(a, b []gotextdiff.Line) []gotextdiff.Line { + res := make([]gotextdiff.Line, 0, len(a)) + for _, la := range a { + rep := false + for _, lb := range b { + if la.Kind != gotextdiff.Equal && la.Kind == lb.Kind && la.Content == lb.Content { + rep = true + break + } + } + + if !rep { + res = append(res, la) + } + } + + return res +} + +func isHunkEmpty(h *gotextdiff.Hunk) bool { + effectiveLines := 0 + for _, l := range h.Lines { + if l.Kind != gotextdiff.Equal { + effectiveLines++ + } + } + return effectiveLines == 0 +} diff --git a/ignite/internal/tools/gen-mig-diffs/pkg/diff/subtract_test.go b/ignite/internal/tools/gen-mig-diffs/pkg/diff/subtract_test.go new file mode 100644 index 0000000..3cff01d --- /dev/null +++ b/ignite/internal/tools/gen-mig-diffs/pkg/diff/subtract_test.go @@ -0,0 +1,178 @@ +package diff + +import ( + "reflect" + "testing" + + "github.com/hexops/gotextdiff" +) + +func TestSubtract(t *testing.T) { + type args struct { + a gotextdiff.Unified + b gotextdiff.Unified + } + tests := []struct { + name string + args args + want gotextdiff.Unified + }{ + { + name: "Equal diffs", + args: args{ + a: gotextdiff.Unified{ + Hunks: []*gotextdiff.Hunk{ + { + FromLine: 1, + ToLine: 3, + Lines: []gotextdiff.Line{ + {Kind: gotextdiff.Equal, Content: "equal1\n"}, + {Kind: gotextdiff.Insert, Content: "insert1\n"}, + {Kind: gotextdiff.Equal, Content: "equal2\n"}, + {Kind: gotextdiff.Delete, Content: "delete1\n"}, + }, + }, + }, + }, + b: gotextdiff.Unified{ + Hunks: []*gotextdiff.Hunk{ + { + FromLine: 1, + ToLine: 3, + Lines: []gotextdiff.Line{ + {Kind: gotextdiff.Equal, Content: "equal1\n"}, + {Kind: gotextdiff.Insert, Content: "insert1\n"}, + {Kind: gotextdiff.Equal, Content: "equal2\n"}, + {Kind: gotextdiff.Delete, Content: "delete1\n"}, + }, + }, + }, + }, + }, + want: gotextdiff.Unified{ + Hunks: []*gotextdiff.Hunk{}, + }, + }, + { + name: "Add hunk at the beginning", + args: args{ + a: gotextdiff.Unified{ + Hunks: []*gotextdiff.Hunk{ + { + FromLine: 1, + ToLine: 3, + Lines: []gotextdiff.Line{ + {Kind: gotextdiff.Equal, Content: "equal1\n"}, + {Kind: gotextdiff.Insert, Content: "insert1\n"}, + {Kind: gotextdiff.Equal, Content: "equal2\n"}, + {Kind: gotextdiff.Delete, Content: "delete1\n"}, + }, + }, + { + FromLine: 4, + ToLine: 6, + Lines: []gotextdiff.Line{ + {Kind: gotextdiff.Equal, Content: "equal3\n"}, + {Kind: gotextdiff.Insert, Content: "insert2\n"}, + {Kind: gotextdiff.Equal, Content: "equal4\n"}, + {Kind: gotextdiff.Delete, Content: "delete2\n"}, + }, + }, + }, + }, + b: gotextdiff.Unified{ + Hunks: []*gotextdiff.Hunk{ + { + FromLine: 4, + ToLine: 6, + Lines: []gotextdiff.Line{ + {Kind: gotextdiff.Equal, Content: "equal3\n"}, + {Kind: gotextdiff.Insert, Content: "insert2\n"}, + {Kind: gotextdiff.Equal, Content: "equal4\n"}, + {Kind: gotextdiff.Delete, Content: "delete2\n"}, + }, + }, + }, + }, + }, + want: gotextdiff.Unified{ + Hunks: []*gotextdiff.Hunk{ + { + FromLine: 1, + ToLine: 3, + Lines: []gotextdiff.Line{ + {Kind: gotextdiff.Equal, Content: "equal1\n"}, + {Kind: gotextdiff.Insert, Content: "insert1\n"}, + {Kind: gotextdiff.Equal, Content: "equal2\n"}, + {Kind: gotextdiff.Delete, Content: "delete1\n"}, + }, + }, + }, + }, + }, + { + name: "Add hunk at the end", + args: args{ + a: gotextdiff.Unified{ + Hunks: []*gotextdiff.Hunk{ + { + FromLine: 1, + ToLine: 3, + Lines: []gotextdiff.Line{ + {Kind: gotextdiff.Equal, Content: "equal1"}, + {Kind: gotextdiff.Insert, Content: "insert1"}, + {Kind: gotextdiff.Equal, Content: "equal2"}, + {Kind: gotextdiff.Delete, Content: "delete1"}, + }, + }, + { + FromLine: 4, + ToLine: 6, + Lines: []gotextdiff.Line{ + {Kind: gotextdiff.Equal, Content: "equal3"}, + {Kind: gotextdiff.Insert, Content: "insert2"}, + {Kind: gotextdiff.Equal, Content: "equal4"}, + {Kind: gotextdiff.Delete, Content: "delete2"}, + }, + }, + }, + }, + b: gotextdiff.Unified{ + Hunks: []*gotextdiff.Hunk{ + { + FromLine: 1, + ToLine: 3, + Lines: []gotextdiff.Line{ + {Kind: gotextdiff.Equal, Content: "equal1"}, + {Kind: gotextdiff.Insert, Content: "insert1"}, + {Kind: gotextdiff.Equal, Content: "equal2"}, + {Kind: gotextdiff.Delete, Content: "delete1"}, + }, + }, + }, + }, + }, + want: gotextdiff.Unified{ + Hunks: []*gotextdiff.Hunk{ + { + FromLine: 4, + ToLine: 6, + Lines: []gotextdiff.Line{ + {Kind: gotextdiff.Equal, Content: "equal3"}, + {Kind: gotextdiff.Insert, Content: "insert2"}, + {Kind: gotextdiff.Equal, Content: "equal4"}, + {Kind: gotextdiff.Delete, Content: "delete2"}, + }, + }, + }, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := subtract(tt.args.a, tt.args.b); !reflect.DeepEqual(got, tt.want) { + t.Errorf("subtract() = %v, want %v", got, tt.want) + } + }) + } +} diff --git a/ignite/internal/tools/gen-mig-diffs/pkg/diff/testdata/modified/bar.txt b/ignite/internal/tools/gen-mig-diffs/pkg/diff/testdata/modified/bar.txt new file mode 100644 index 0000000..095b1d1 --- /dev/null +++ b/ignite/internal/tools/gen-mig-diffs/pkg/diff/testdata/modified/bar.txt @@ -0,0 +1 @@ +unmodified \ No newline at end of file diff --git a/ignite/internal/tools/gen-mig-diffs/pkg/diff/testdata/modified/foo.txt b/ignite/internal/tools/gen-mig-diffs/pkg/diff/testdata/modified/foo.txt new file mode 100644 index 0000000..8df3dab --- /dev/null +++ b/ignite/internal/tools/gen-mig-diffs/pkg/diff/testdata/modified/foo.txt @@ -0,0 +1 @@ +world \ No newline at end of file diff --git a/ignite/internal/tools/gen-mig-diffs/pkg/diff/testdata/modified/new.txt b/ignite/internal/tools/gen-mig-diffs/pkg/diff/testdata/modified/new.txt new file mode 100644 index 0000000..9edf80d --- /dev/null +++ b/ignite/internal/tools/gen-mig-diffs/pkg/diff/testdata/modified/new.txt @@ -0,0 +1 @@ +new file \ No newline at end of file diff --git a/ignite/internal/tools/gen-mig-diffs/pkg/diff/testdata/modified/pkg/main.go b/ignite/internal/tools/gen-mig-diffs/pkg/diff/testdata/modified/pkg/main.go new file mode 100644 index 0000000..fa70251 --- /dev/null +++ b/ignite/internal/tools/gen-mig-diffs/pkg/diff/testdata/modified/pkg/main.go @@ -0,0 +1,3 @@ +package main + +func main() {} diff --git a/ignite/internal/tools/gen-mig-diffs/pkg/diff/testdata/origin/bar.txt b/ignite/internal/tools/gen-mig-diffs/pkg/diff/testdata/origin/bar.txt new file mode 100644 index 0000000..095b1d1 --- /dev/null +++ b/ignite/internal/tools/gen-mig-diffs/pkg/diff/testdata/origin/bar.txt @@ -0,0 +1 @@ +unmodified \ No newline at end of file diff --git a/ignite/internal/tools/gen-mig-diffs/pkg/diff/testdata/origin/foo.txt b/ignite/internal/tools/gen-mig-diffs/pkg/diff/testdata/origin/foo.txt new file mode 100644 index 0000000..8aec4e4 --- /dev/null +++ b/ignite/internal/tools/gen-mig-diffs/pkg/diff/testdata/origin/foo.txt @@ -0,0 +1 @@ +hello \ No newline at end of file diff --git a/ignite/internal/tools/gen-mig-diffs/pkg/diff/testdata/origin/pkg/main.go b/ignite/internal/tools/gen-mig-diffs/pkg/diff/testdata/origin/pkg/main.go new file mode 100644 index 0000000..0772a93 --- /dev/null +++ b/ignite/internal/tools/gen-mig-diffs/pkg/diff/testdata/origin/pkg/main.go @@ -0,0 +1 @@ +package main diff --git a/ignite/internal/tools/gen-mig-diffs/pkg/repo/repo.go b/ignite/internal/tools/gen-mig-diffs/pkg/repo/repo.go new file mode 100644 index 0000000..32d4c17 --- /dev/null +++ b/ignite/internal/tools/gen-mig-diffs/pkg/repo/repo.go @@ -0,0 +1,452 @@ +package repo + +import ( + "context" + "fmt" + "io" + "os" + "path/filepath" + "sort" + "time" + + "github.com/Masterminds/semver/v3" + "github.com/go-git/go-git/v5" + "github.com/go-git/go-git/v5/plumbing" + + "github.com/ignite/cli/v29/ignite/pkg/cliui" + "github.com/ignite/cli/v29/ignite/pkg/cmdrunner/exec" + "github.com/ignite/cli/v29/ignite/pkg/cmdrunner/step" + "github.com/ignite/cli/v29/ignite/pkg/errors" + + "github.com/ignite/cli/ignite/internal/tools/gen-mig-diffs/pkg/url" +) + +const ( + defaultBinaryPath = "dist/ignite" +) + +var DefaultRepoURL = url.URL{ + Protocol: "https", + Host: "github.com", + Path: "ignite/cli", +} + +type ( + // Generator is used to generate migration diffs. + Generator struct { + From, To *semver.Version + source string + binPath string + repo *git.Repository + session *cliui.Session + cleanup bool + } + + // options represents configuration for the generator. + options struct { + source string + output string + stdOut io.Writer + repoURL url.URL + binPath string + } + // Options configures the generator. + Options func(*options) +) + +// newOptions returns a options with default options. +func newOptions() (options, error) { + var ( + tmpDir = os.TempDir() + binPath = filepath.Join(tmpDir, "bin") + output = filepath.Join(tmpDir, "migration-source") + ) + if err := os.RemoveAll(binPath); err != nil { + return options{}, errors.Wrap(err, "failed to clean the output directory") + } + if err := os.RemoveAll(output); err != nil { + return options{}, errors.Wrap(err, "failed to clean the output directory") + } + return options{ + source: "", + binPath: filepath.Join(tmpDir, "bin"), + output: filepath.Join(tmpDir, "migration-source"), + repoURL: DefaultRepoURL, + stdOut: os.Stdout, + }, nil +} + +// WithSource set the repo source Options. +func WithSource(source string) Options { + return func(o *options) { + o.source = source + } +} + +// WithRepoURL set the repo URL Options. +func WithRepoURL(repoURL url.URL) Options { + return func(o *options) { + o.repoURL = repoURL + } +} + +// WithRepoOutput set the repo output Options. +func WithRepoOutput(output string) Options { + return func(o *options) { + o.output = output + } +} + +// WithStdOutput set the std output Options. +func WithStdOutput(stdOut io.Writer) Options { + return func(o *options) { + o.stdOut = stdOut + } +} + +// WithBinPath set the binary path to build the source. +func WithBinPath(binPath string) Options { + return func(o *options) { + o.binPath = binPath + } +} + +// validate options. +func (o options) validate() error { + if o.source != "" && (o.repoURL != DefaultRepoURL) { + return errors.New("cannot set source and repo URL at the same time") + } + return nil +} + +// New creates a new generator for migration diffs between from and to versions of ignite cli +// If source is empty, then it clones the ignite cli repository to a temporary directory and uses it as the source. +func New(ctx context.Context, from, to *semver.Version, session *cliui.Session, options ...Options) (*Generator, error) { + opts, err := newOptions() + if err != nil { + return nil, err + } + + for _, apply := range options { + apply(&opts) + } + if err := opts.validate(); err != nil { + return nil, err + } + + var ( + source = opts.source + repo *git.Repository + ) + if source != "" { + repo, err = verifyRepoSource(source, opts.repoURL) + if err != nil { + return nil, errors.Wrap(err, "failed to open ignite repository") + } + + session.StopSpinner() + session.EventBus().SendInfo(fmt.Sprintf("Using ignite repository at: %s", source)) + } else { + session.StartSpinner("Cloning ignite repository...") + + source = opts.output + repo, err = git.PlainCloneContext(ctx, source, false, &git.CloneOptions{ + URL: opts.repoURL.String(), + Depth: 1, + Progress: opts.stdOut, + }) + if errors.Is(err, git.ErrRepositoryAlreadyExists) { + repo, err = verifyRepoSource(source, opts.repoURL) + } + if err != nil { + return nil, errors.Wrap(err, "failed to clone ignite repository") + } + + session.StopSpinner() + session.EventBus().SendInfo(fmt.Sprintf("Cloned ignite repository to: %s", source)) + } + + versions, err := getRepoVersionTags(repo) + if err != nil { + return nil, err + } + + from, to, err = validateVersionRange(from, to, versions) + if err != nil { + return nil, err + } + + binPath, err := filepath.Abs(opts.binPath) + if err != nil { + return nil, err + } + + return &Generator{ + From: from, + To: to, + source: source, + repo: repo, + session: session, + binPath: binPath, + }, nil +} + +// ReleaseDescription generate the release description based in the tag data, if not exist, from the commit data. +func (g *Generator) ReleaseDescription() (string, error) { + tag, err := g.repo.Tag(g.To.Original()) + if err != nil { + return "", errors.Wrapf(err, "failed to get tag %s", g.To.Original()) + } + + var ( + author string + date time.Time + msg string + ) + tagObj, err := g.repo.TagObject(tag.Hash()) + switch { + case errors.Is(err, plumbing.ErrObjectNotFound): + commit, err := g.repo.CommitObject(tag.Hash()) + if err != nil { + return "", errors.Wrapf(err, "failed to get commit %s", g.To.Original()) + } + author = commit.Author.String() + date = commit.Author.When + msg = commit.Message + case err != nil: + return "", errors.Wrapf(err, "failed to get tag object %s", tag.Hash().String()) + default: + author = tagObj.Tagger.String() + date = tagObj.Tagger.When + msg = tagObj.Message + } + + description := fmt.Sprintf(`Tag: %[1]v +Commit: %[2]v +Author: %[3]v +Date: %[4]v + +%[5]v`, + g.To.Original(), + tag.Hash().String(), + author, + msg, + date.Format("Jan 2 15:04:05 2006"), + ) + return description, nil +} + +// Cleanup cleanup all temporary directories. +func (g *Generator) Cleanup() { + if !g.cleanup { + return + } + if err := os.RemoveAll(g.source); err != nil { + g.session.EventBus().SendError(err) + return + } + g.session.EventBus().SendInfo(fmt.Sprintf("Removed temporary directory: %s", g.source)) +} + +func (g *Generator) GenerateBinaries(ctx context.Context) (string, string, error) { + fromBinPath, err := g.buildIgniteCli(ctx, g.From) + if err != nil { + return "", "", errors.Wrapf(err, "failed to run scaffolds for 'FROM' version %s", g.From) + } + toBinPath, err := g.buildIgniteCli(ctx, g.To) + if err != nil { + return "", "", errors.Wrapf(err, "failed to run scaffolds for 'TO' version %s", g.To) + } + return fromBinPath, toBinPath, nil +} + +// buildIgniteCli build the ignite CLI from version. +func (g *Generator) buildIgniteCli(ctx context.Context, ver *semver.Version) (string, error) { + g.session.StartSpinner(fmt.Sprintf("Building binary for version v%s...", ver)) + + if err := g.checkoutToTag(ver.Original()); err != nil { + return "", err + } + + err := exec.Exec(ctx, []string{"make", "build"}, exec.StepOption(step.Workdir(g.source))) + if err != nil { + return "", errors.Wrap(err, "failed to build ignite cli using make build") + } + + // Copy the built binary to the binary path. + genBinaryPath := filepath.Join(g.source, defaultBinaryPath) + binPath := filepath.Join(g.binPath, ver.Original(), "ignite") + if err := copyFile(genBinaryPath, binPath); err != nil { + return "", err + } + + g.session.StopSpinner() + g.session.EventBus().SendInfo(fmt.Sprintf("Built ignite cli for %s at %s", ver.Original(), binPath)) + + return binPath, nil +} + +// checkoutToTag checkout the repository from a specific git tag. +func (g *Generator) checkoutToTag(tag string) error { + wt, err := g.repo.Worktree() + if err != nil { + return err + } + // Reset and clean the git directory before the checkout to avoid conflicts. + if err := wt.Reset(&git.ResetOptions{Mode: git.HardReset}); err != nil { + return errors.Wrapf(err, "failed to reset %s", g.source) + } + if err := wt.Clean(&git.CleanOptions{Dir: true}); err != nil { + return errors.Wrapf(err, "failed to reset %s", g.source) + } + if err = wt.Checkout(&git.CheckoutOptions{Branch: plumbing.NewTagReferenceName(tag)}); err != nil { + return errors.Wrapf(err, "failed to checkout tag %s", tag) + } + return nil +} + +// getRepoVersionTags returns a sorted collection of semver tags from the ignite cli repository. +func getRepoVersionTags(repo *git.Repository) (semver.Collection, error) { + tags, err := repo.Tags() + if err != nil { + return nil, errors.Wrap(err, "failed to get tags") + } + + // Iterate over all tags in the repository and pick valid semver tags + var versions semver.Collection + err = tags.ForEach(func(ref *plumbing.Reference) error { + name := ref.Name() + if name.IsTag() { + ver, err := semver.NewVersion(name.Short()) + if err != nil { + // Do nothing as it's not a semver tag + return nil + } + versions = append(versions, ver) + } + return nil + }) + if err != nil { + return nil, errors.Wrap(err, "failed to iterate over tags") + } + + sort.Sort(versions) + + return versions, nil +} + +// validateVersionRange checks if the provided fromVer and toVer exist in the versions and if any of them is nil, then it picks default values. +func validateVersionRange(fromVer, toVer *semver.Version, versions semver.Collection) (*semver.Version, *semver.Version, error) { + // Unable to generate migration document if there are less than two releases! + if versions.Len() < 2 { + return nil, nil, errors.New("At least two semver tags are required") + } + + versionMap := make(map[string]*semver.Version) + for _, ver := range versions { + versionMap[ver.String()] = ver + } + + // Picking default values for fromVer and toVer such that: + // If both fromVer and toVer are not provided, then generate migration document for second last and last semver major tags + // If only fromVer is not provided, then use the tag before toVer as fromVer + // If only toVer is not provided, then use the last tag as toVer + if toVer != nil { + if _, found := versionMap[toVer.String()]; !found { + return nil, nil, errors.Errorf("tag %s not found", toVer) + } + } else { + toVer = versions[versions.Len()-1] + } + + // Replace fromVer and toVer with equivalent semver tags from versions + if fromVer != nil { + if _, found := versionMap[fromVer.String()]; !found { + return nil, nil, errors.Errorf("tag %s not found", fromVer) + } + } else { + // Find the last major release version. + sort.Sort(sort.Reverse(versions)) + for _, ver := range versions { + if ver.Major() < toVer.Major() { + fromVer = ver + break + } + } + if fromVer == nil { + return nil, nil, errors.Errorf("can't find an older major release from %s", toVer.Original()) + } + } + + // Unable to generate migration document if fromVer is greater or equal to toVer + if fromVer.GreaterThan(toVer) || fromVer.Equal(toVer) { + return nil, nil, errors.Errorf("from version %s should be less than to version %s", fromVer, toVer) + } + + return fromVer, toVer, nil +} + +// copyFile copy a file to a destination directory. Creates the directory if not exist. +func copyFile(srcPath, dstPath string) error { + dstDir := filepath.Dir(dstPath) + if err := os.RemoveAll(dstDir); err != nil { + return err + } + if err := os.MkdirAll(dstDir, os.ModePerm); err != nil { + return err + } + + src, err := os.Open(srcPath) + if err != nil { + return errors.Wrap(err, "failed to open source file") + } + defer src.Close() + + dst, err := os.Create(dstPath) + if err != nil { + return errors.Wrap(err, "failed to create destination file") + } + defer dst.Close() + + _, err = io.Copy(dst, src) + if err != nil { + return errors.Wrap(err, "failed to copy data: %s") + } + // Sync to ensure data is flushed to disk. + err = dst.Sync() + if err != nil { + return errors.Wrap(err, "failed to sync destination file") + } + + // Set executable permissions on the destination file. + err = os.Chmod(dstPath, 0o755) + if err != nil { + return errors.Wrap(err, "failed to set executable permissions") + } + return err +} + +// verifyRepoSource checks if the repose source path is the same from the provider URL +// and returns the *git.Repository object. +func verifyRepoSource(source string, repoURL url.URL) (*git.Repository, error) { + repo, err := git.PlainOpen(source) + if err != nil { + return nil, errors.Wrap(err, "failed to open ignite repository") + } + remote, err := repo.Remote("origin") + if err != nil { + return nil, errors.Wrap(err, "failed to open ignite repository") + } + + for _, u := range remote.Config().URLs { + remoteURL, err := url.New(u) + if err != nil { + return nil, errors.Wrapf(err, "failed to parse repo url %s", u) + } + + if err := repoURL.Compare(remoteURL); err != nil { + return nil, errors.Wrapf(err, "repository folder %s does not match the repo URL %s", repoURL, remoteURL) + } + } + return repo, nil +} diff --git a/ignite/internal/tools/gen-mig-diffs/pkg/scaffold/commands.go b/ignite/internal/tools/gen-mig-diffs/pkg/scaffold/commands.go new file mode 100644 index 0000000..ec676f2 --- /dev/null +++ b/ignite/internal/tools/gen-mig-diffs/pkg/scaffold/commands.go @@ -0,0 +1,119 @@ +package scaffold + +import "github.com/ignite/cli/v29/ignite/pkg/errors" + +type ( + // Command represents a set of command and prerequisites scaffold command that are required to run before them. + Command struct { + // Name is the unique identifier of the command + Name string + // Prerequisite is the name of command that need to be run before this command set + Prerequisite string + // Commands is the list of scaffold command that are going to be run + // The command will be prefixed with "ignite scaffold" and executed in order + Commands []string + } + + Commands []Command +) + +func (c Commands) Get(name string) (Command, error) { + for _, cmd := range c { + if cmd.Name == name { + return cmd, nil + } + } + return Command{}, errors.Errorf("command %s not exist", name) +} + +func (c Commands) Has(name string) bool { + for _, cmd := range c { + if cmd.Name == name { + return true + } + } + return false +} + +func (c Commands) Validate() error { + cmdMap := make(map[string]bool) + for i, command := range c { + if cmdMap[command.Name] { + return errors.Errorf("duplicate command name found: %s", command.Name) + } + cmdMap[command.Name] = true + if command.Name == "" { + return errors.Errorf("empty command name at index %d: %v", i, command) + } + if len(command.Commands) == 0 { + return errors.Errorf("empty command list at index %d: %v", i, command) + } + } + for _, command := range c { + if command.Prerequisite != "" && !cmdMap[command.Prerequisite] { + return errors.Errorf("command %s pre-requisete %s not found", command.Name, command.Prerequisite) + } + } + return nil +} + +var defaultCommands = Commands{ + Command{ + Name: "chain", + Commands: []string{"chain example --no-module"}, + }, + Command{ + Name: "module", + Prerequisite: "chain", + Commands: []string{"module example --ibc"}, + }, + Command{ + Name: "list", + Prerequisite: "module", + Commands: []string{ + "list list1 f1:string f2:strings f3:bool f4:int f5:ints f6:uint f7:uints f8:coin f9:coins --module example --yes", + }, + }, + Command{ + Name: "map", + Prerequisite: "module", + Commands: []string{ + "map map1 f1:string f2:strings f3:bool f4:int f5:ints f6:uint f7:uints f8:coin f9:coins --index i1:string --module example --yes", + }, + }, + Command{ + Name: "single", + Prerequisite: "module", + Commands: []string{ + "single single1 f1:string f2:strings f3:bool f4:int f5:ints f6:uint f7:uints f8:coin f9:coins --module example --yes", + }, + }, + Command{ + Name: "type", + Prerequisite: "module", + Commands: []string{ + "type type1 f1:string f2:strings f3:bool f4:int f5:ints f6:uint f7:uints f8:coin f9:coins --module example --yes", + }, + }, + Command{ + Name: "message", + Prerequisite: "module", + Commands: []string{ + "message message1 f1:string f2:strings f3:bool f4:int f5:ints f6:uint f7:uints f8:coin f9:coins --module example --yes", + }, + }, + Command{ + Name: "query", + Prerequisite: "module", + Commands: []string{ + "query query1 f1:string f2:strings f3:bool f4:int f5:ints f6:uint f7:uints --module example --yes", + }, + }, + Command{ + Name: "packet", + Prerequisite: "module", + Commands: []string{ + "packet packet1 f1:string f2:strings f3:bool f4:int f5:ints f6:uint f7:uints f8:coin f9:coins --ack f1:string,f2:strings,f3:bool,f4:int,f5:ints,f6:uint,f7:uints,f8:coin,f9:coins --module example --yes", + }, + }, +} diff --git a/ignite/internal/tools/gen-mig-diffs/pkg/scaffold/scaffold.go b/ignite/internal/tools/gen-mig-diffs/pkg/scaffold/scaffold.go new file mode 100644 index 0000000..521c693 --- /dev/null +++ b/ignite/internal/tools/gen-mig-diffs/pkg/scaffold/scaffold.go @@ -0,0 +1,223 @@ +package scaffold + +import ( + "context" + "io" + "os" + "path/filepath" + "strings" + + "github.com/Masterminds/semver/v3" + + "github.com/ignite/cli/v29/ignite/pkg/cmdrunner/exec" + "github.com/ignite/cli/v29/ignite/pkg/cmdrunner/step" + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/randstr" + + "github.com/ignite/cli/ignite/internal/tools/gen-mig-diffs/pkg/cache" +) + +var v027 = semver.MustParse("v0.27.0") + +type ( + // Scaffold holder the Scaffold logic. + Scaffold struct { + Output string + binary string + version *semver.Version + cache *cache.Cache + cachePath string + commandList Commands + stdout io.Writer + stderr io.Writer + stdin io.Reader + } + + // option represents configuration for the generator. + option struct { + cachePath string + output string + commands Commands + stdout io.Writer + stderr io.Writer + stdin io.Reader + } + // Option configures the generator. + Option func(*option) +) + +// newOptions returns a option with default option. +func newOptions() option { + tmpDir := filepath.Join(os.TempDir(), randstr.Runes(4)) + return option{ + cachePath: filepath.Join(tmpDir, "migration-cache"), + output: filepath.Join(tmpDir, "migration"), + commands: defaultCommands, + } +} + +// WithOutput set the ignite scaffold Output. +func WithOutput(output string) Option { + return func(o *option) { + o.output = output + } +} + +// WithCachePath set the ignite scaffold cache path. +func WithCachePath(cachePath string) Option { + return func(o *option) { + o.cachePath = cachePath + } +} + +func WithStdout(w io.Writer) Option { + return func(o *option) { + o.stdout = w + } +} + +func WithStderr(w io.Writer) Option { + return func(o *option) { + o.stderr = w + } +} + +func WithStdin(r io.Reader) Option { + return func(o *option) { + o.stdin = r + } +} + +// New returns a new Scaffold. +func New(binary string, ver *semver.Version, options ...Option) (*Scaffold, error) { + opts := newOptions() + for _, apply := range options { + apply(&opts) + } + + output, err := filepath.Abs(opts.output) + if err != nil { + return nil, err + } + + c, err := cache.New(opts.cachePath) + if err != nil { + return nil, err + } + + if err := opts.commands.Validate(); err != nil { + return nil, err + } + + return &Scaffold{ + stdout: opts.stdout, + stderr: opts.stderr, + stdin: opts.stdin, + binary: binary, + version: ver, + cache: c, + cachePath: opts.cachePath, + Output: filepath.Join(output, ver.Original()), + commandList: opts.commands, + }, nil +} + +// Run execute the scaffold command based in the binary semantic version. +func (s *Scaffold) Run(ctx context.Context) error { + if err := os.RemoveAll(s.Output); err != nil { + return errors.Wrapf(err, "failed to remove the scaffold output directory: %s", s.Output) + } + + for _, command := range s.commandList { + if err := s.runCommand(ctx, command.Name, command); err != nil { + return err + } + if err := applyPostScaffoldExceptions(s.version, command.Name, s.Output); err != nil { + return err + } + } + return nil +} + +// Cleanup cleanup all temporary directories. +func (s *Scaffold) Cleanup() error { + if err := os.RemoveAll(s.cachePath); err != nil { + return err + } + return os.RemoveAll(s.Output) +} + +func (s *Scaffold) runCommand(ctx context.Context, name string, command Command) error { + path := filepath.Join(s.Output, name) + if command.Prerequisite != "" { + reqCmd, err := s.commandList.Get(command.Prerequisite) + if err != nil { + return errors.Wrapf(err, "pre-requisite command %s from %s not found", command.Prerequisite, name) + } + + if s.cache.Has(command.Prerequisite) { + if err := s.cache.Get(command.Prerequisite, path); err != nil { + return errors.Wrapf(err, "failed to get cache key %s", command.Prerequisite) + } + } else { + if err := s.runCommand(ctx, name, reqCmd); err != nil { + return err + } + } + } + + for _, cmd := range command.Commands { + if err := s.executeScaffold(ctx, cmd, path); err != nil { + return err + } + } + return s.cache.Save(command.Name, path) +} + +func (s *Scaffold) executeScaffold(ctx context.Context, cmd, path string) error { + args := append([]string{s.binary, "scaffold"}, strings.Fields(cmd)...) + args = append(args, "--path", path) + args = applyPreExecuteExceptions(s.version, args) + + if err := exec.Exec( + ctx, + args, + exec.StepOption(step.Stdout(s.stdout)), + exec.StepOption(step.Stderr(s.stderr)), + exec.StepOption(step.Stdin(s.stdin)), + ); err != nil { + return errors.Wrapf(err, "failed to execute ignite scaffold command: %s", cmd) + } + return nil +} + +// applyPreExecuteExceptions this function we can manipulate command arguments before executing it in +// order to compensate for differences in versions. +func applyPreExecuteExceptions(ver *semver.Version, args []string) []string { + // In versions <0.27.0, "scaffold chain" command always creates a new directory with the + // name of chain at the given '--path', so we need to append "example" to the path if the + // command is not "chain". + if ver.LessThan(v027) && args[2] != "chain" { + args[len(args)-1] = filepath.Join(args[len(args)-1], "example") + } + return args +} + +// applyPostScaffoldExceptions this function we can manipulate the Output of scaffold command after +// they have been executed in order to compensate for differences in versions. +func applyPostScaffoldExceptions(ver *semver.Version, name string, output string) error { + // In versions <0.27.0, "scaffold chain" command always creates a new directory with the name of + // chain at the given '--path', so we need to move the directory to the parent directory. + if ver.LessThan(v027) { + if err := os.Rename(filepath.Join(output, name, "example"), filepath.Join(output, "example_tmp")); err != nil { + return errors.Wrapf(err, "failed to move %s directory to tmp directory", name) + } + if err := os.RemoveAll(filepath.Join(output, name)); err != nil { + return errors.Wrapf(err, "failed to remove %s directory", name) + } + if err := os.Rename(filepath.Join(output, "example_tmp"), filepath.Join(output, name)); err != nil { + return errors.Wrapf(err, "failed to move tmp directory to %s directory", name) + } + } + return nil +} diff --git a/ignite/internal/tools/gen-mig-diffs/pkg/url/url.go b/ignite/internal/tools/gen-mig-diffs/pkg/url/url.go new file mode 100644 index 0000000..6504e77 --- /dev/null +++ b/ignite/internal/tools/gen-mig-diffs/pkg/url/url.go @@ -0,0 +1,110 @@ +package url + +import ( + "fmt" + "net/url" + "regexp" + "strings" + + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +// URL represents a Git URL in any supported protocol. +type URL struct { + // Protocol is the protocol of the endpoint (e.g. ssh, https). + Protocol string + // Host is the host. + Host string + // Path is the repository path. + Path string +} + +var ( + scpLikeUrlRegExp = regexp.MustCompile(`^[^@]+@[^:]+:.+`) + scpSubMatchRegExp = regexp.MustCompile(`^(?:(?P<user>[^@]+)@)?(?P<host>[^:\s]+):(?:(?P<port>[0-9]{1,5}):)?(?P<path>[^\\].*)$`) +) + +// New creates a new URL object. +func New(endpoint string) (URL, error) { + if scpLikeUrlRegExp.MatchString(endpoint) { + return parseSCPLike(endpoint), nil + } + + u, err := url.Parse(endpoint) + if err == nil && u.Scheme == "ssh" { + return parseSCPLike(endpoint), nil + } + + return parseURL(endpoint) +} + +func (u URL) Compare(cp URL) error { + switch { + case u.Host != cp.Host: + return errors.Errorf("host mismatch for %s != %s", u.Host, cp.Host) + case u.Path != cp.Path: + return errors.Errorf("path mismatch for %s != %s", u.Path, cp.Path) + default: + return nil + } +} + +func (u URL) String() string { + if u.Protocol == "ssh" { + return fmt.Sprintf("git@%s:%s.git", u.Host, u.Path) + } + return fmt.Sprintf("%s://%s/%s.git", u.Protocol, u.Host, u.Path) +} + +// parseSCPLike returns an URL object from SCP git URL. +func parseSCPLike(endpoint string) URL { + _, host, _, path := findScpLikeComponents(endpoint) + return URL{ + Protocol: "ssh", + Host: host, + Path: strings.TrimSuffix(path, ".git"), + } +} + +// parseURL returns an URL object from an endpoint. +func parseURL(endpoint string) (URL, error) { + u, err := url.Parse(endpoint) + if err != nil { + return URL{}, errors.Errorf("failed to parse URL: %v", err) + } + + if !u.IsAbs() { + return URL{}, errors.Errorf("URL must be absolute with scheme and host: %s", endpoint) + } + + return URL{ + Protocol: u.Scheme, + Host: u.Hostname(), + Path: getPath(u), + }, nil +} + +// findScpLikeComponents returns the user, host, port and path of the given SCP-like URL. +func findScpLikeComponents(url string) (user, host, port, path string) { + m := scpSubMatchRegExp.FindStringSubmatch(url) + user = m[1] + host = m[2] + port = m[3] + path = m[4] + return m[1], m[2], m[3], m[4] +} + +// getPath returns the path from an *url.URL. +func getPath(u *url.URL) string { + res := u.Path + if u.RawQuery != "" { + res += "?" + u.RawQuery + } + if u.Fragment != "" { + res += "#" + u.Fragment + } + + res = strings.Trim(res, "/") + res = strings.TrimSuffix(res, ".git") + return strings.Split(res, ":")[0] +} diff --git a/ignite/internal/tools/gen-mig-diffs/pkg/url/url_test.go b/ignite/internal/tools/gen-mig-diffs/pkg/url/url_test.go new file mode 100644 index 0000000..a6df673 --- /dev/null +++ b/ignite/internal/tools/gen-mig-diffs/pkg/url/url_test.go @@ -0,0 +1,230 @@ +package url + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestFindScpLikeComponents(t *testing.T) { + tests := []struct { + name string + url string + want URL + wantString string + err error + }{ + { + name: "https protocol", + url: "https://github.com/james/bond", + wantString: "https://github.com/james/bond.git", + want: URL{ + Protocol: "https", + Host: "github.com", + Path: "james/bond", + }, + }, + { + name: "https protocol with .git", + url: "https://github.com/james/bond.git", + wantString: "https://github.com/james/bond.git", + want: URL{ + Protocol: "https", + Host: "github.com", + Path: "james/bond", + }, + }, + { + name: "http protocol", + url: "http://github.com/james/bond", + wantString: "http://github.com/james/bond.git", + want: URL{ + Protocol: "http", + Host: "github.com", + Path: "james/bond", + }, + }, + { + name: "http protocol with port", + url: "http://github.com/james/bond:8080", + wantString: "http://github.com/james/bond.git", + want: URL{ + Protocol: "http", + Host: "github.com", + Path: "james/bond", + }, + }, + { + name: "https with numeric path", + url: "https://github.com/007/bond", + wantString: "https://github.com/007/bond.git", + want: URL{ + Protocol: "https", + Host: "github.com", + Path: "007/bond", + }, + }, + { + name: "https with single repo path", + url: "https://github.com/bond", + wantString: "https://github.com/bond.git", + want: URL{ + Protocol: "https", + Host: "github.com", + Path: "bond", + }, + }, + { + name: "https repo path ending with .git and starting with _", + url: "https://github.com/_007.git", + wantString: "https://github.com/_007.git", + want: URL{ + Protocol: "https", + Host: "github.com", + Path: "_007", + }, + }, + { + name: "https repo path ending with .git and starting with _", + url: "https://github.com/_james.git", + wantString: "https://github.com/_james.git", + want: URL{ + Protocol: "https", + Host: "github.com", + Path: "_james", + }, + }, + { + name: "https repo path ending with .git and starting with _", + url: "https://github.com/_james/bond.git", + wantString: "https://github.com/_james/bond.git", + want: URL{ + Protocol: "https", + Host: "github.com", + Path: "_james/bond", + }, + }, + { + name: "most-extended case", + url: "git@github.com:james/bond", + wantString: "git@github.com:james/bond.git", + want: URL{ + Protocol: "ssh", + Host: "github.com", + Path: "james/bond", + }, + }, + { + name: "most-extended case with port", + url: "git@github.com:22:james/bond", + wantString: "git@github.com:james/bond.git", + want: URL{ + Protocol: "ssh", + Host: "github.com", + Path: "james/bond", + }, + }, + { + name: "most-extended case with numeric path", + url: "git@github.com:007/bond", + wantString: "git@github.com:007/bond.git", + want: URL{ + Protocol: "ssh", + Host: "github.com", + Path: "007/bond", + }, + }, + { + name: "most-extended case with port and numeric path", + url: "git@github.com:22:007/bond", + wantString: "git@github.com:007/bond.git", + want: URL{ + Protocol: "ssh", + Host: "github.com", + Path: "007/bond", + }, + }, + { + name: "single repo path", + url: "git@github.com:bond", + wantString: "git@github.com:bond.git", + want: URL{ + Protocol: "ssh", + Host: "github.com", + Path: "bond", + }, + }, + { + name: "single repo path with port", + url: "git@github.com:22:bond", + wantString: "git@github.com:bond.git", + want: URL{ + Protocol: "ssh", + Host: "github.com", + Path: "bond", + }, + }, + { + name: "single repo path with port and numeric path", + url: "git@github.com:22:007", + wantString: "git@github.com:007.git", + want: URL{ + Protocol: "ssh", + Host: "github.com", + Path: "007", + }, + }, + { + name: "repo path ending with .git and starting with _", + url: "git@github.com:22:_007.git", + wantString: "git@github.com:_007.git", + want: URL{ + Protocol: "ssh", + Host: "github.com", + Path: "_007", + }, + }, + { + name: "repo path ending with .git, number and starting with _", + url: "git@github.com:_007.git", + wantString: "git@github.com:_007.git", + want: URL{ + Protocol: "ssh", + Host: "github.com", + Path: "_007", + }, + }, + { + name: "repo path ending with .git and starting with _", + url: "git@github.com:_james.git", + wantString: "git@github.com:_james.git", + want: URL{ + Protocol: "ssh", + Host: "github.com", + Path: "_james", + }, + }, + { + name: "repo path with .git and starting with _", + url: "git@github.com:_james/bond.git", + wantString: "git@github.com:_james/bond.git", + want: URL{ + Protocol: "ssh", + Host: "github.com", + Path: "_james/bond", + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := New(tt.url) + if tt.err != nil { + require.Error(t, err) + require.Equal(t, tt.err.Error(), err.Error()) + } + require.NoError(t, err) + require.EqualValues(t, tt.want, got) + require.EqualValues(t, tt.wantString, got.String()) + }) + } +} diff --git a/ignite/internal/tools/gen-mig-diffs/readme.md b/ignite/internal/tools/gen-mig-diffs/readme.md new file mode 100644 index 0000000..8787c45 --- /dev/null +++ b/ignite/internal/tools/gen-mig-diffs/readme.md @@ -0,0 +1,72 @@ +<div align="center"> + <h1> Generate Ignite Migration Diffs </h1> +</div> + +This repository hosts the Chain Scaffold Migration Tool for Ignite CLI, designed to help developers migrate their +projects from older versions of Ignite to the latest release. +This tool addresses compatibility and feature alignment as a detailed +in [Issue #3699](https://github.com/ignite/cli/issues/3699) and implemented +in [PR #3718](https://github.com/ignite/cli/pull/3718). + +The migration tool aims to streamline the update process for projects built with Ignite CLI, ensuring they leverage the +latest improvements and SDK stack. + +#### Features + +- Automated migration of chain scaffold files. +- Detailed comparison and generation of migration differences. +- Support for multiple versions of chain scaffolds. + +## Installation + +It is located in the `ignite/internal/tools/gen-mig-diffs` +directory and made it a standalone project. + +To set up this tool in your development environment: + +1. Clone the Ignite CLI repository: + +```shell +git clone https://github.com/ignite/cli.git && \ +cd cli/ignite/internal/tools/gen-mig-diffs +``` + +2. Install and show usage: + +```shell +go install . && gen-mig-diffs -h +``` + +3. Run migration diff tool: + +```shell +gen-mig-diffs --output temp/migs --from v0.27.2 --to v28.3.0 +``` + +4. In case of the issue `unable to authenticate, attempted methods [none publickey], no supported methods remain`. + Make sure you have SSH keys set up for GitHub. If yes, try to add the SSH key to your SSH agent: + +```shell +chmod 600 ~/.ssh/id_rsa +ssh-add ~/.ssh/id_rsa +``` + +## Usage + +```bash +This tool is used to generate migration diff files for each of ignites scaffold commands + +Usage: + gen-mig-diffs [flags] + +Flags: + -f, --from string Version of Ignite or path to Ignite source code to generate the diff from + -h, --help help for gen-mig-diffs + -o, --output string Output directory to save the migration document (default "docs/docs/06-migration") + --repo-output string Output path to clone the Ignite repository + -s, --repo-source string Path to Ignite source code repository. Set the source automatically set the cleanup to false + --repo-url string Git URL for the Ignite repository (default "https://github.com/ignite/cli.git") + --scaffold-cache string Path to cache directory + --scaffold-output string Output path to clone the Ignite repository + -t, --to string Version of Ignite or path to Ignite source code to generate the diff to +``` \ No newline at end of file diff --git a/ignite/internal/tools/gen-mig-diffs/templates/doc/doc.go b/ignite/internal/tools/gen-mig-diffs/templates/doc/doc.go new file mode 100644 index 0000000..5ec105e --- /dev/null +++ b/ignite/internal/tools/gen-mig-diffs/templates/doc/doc.go @@ -0,0 +1,68 @@ +package doc + +import ( + "embed" + "fmt" + "io/fs" + "time" + + "github.com/Masterminds/semver/v3" + "github.com/gobuffalo/genny/v2" + "github.com/gobuffalo/plush/v4" + + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/xgenny" + "github.com/ignite/cli/v29/ignite/templates/field/plushhelpers" +) + +//go:embed files/* +var files embed.FS + +// Options represents the options to scaffold a migration document. +type Options struct { + Path string + FromVersion *semver.Version + ToVersion *semver.Version + Diffs string + Description string +} + +func (o Options) position() string { + return fmt.Sprintf("%02d%02d%02d", o.ToVersion.Major(), o.ToVersion.Minor(), o.ToVersion.Patch()) +} + +func (o Options) shortDescription() string { + return fmt.Sprintf("Release %s", o.ToVersion.Original()) +} + +func (o Options) date() string { + return time.Now().Format("Jan 2 15:04:05 2006") +} + +// NewGenerator returns the generator to scaffold a migration doc. +func NewGenerator(opts Options) (*genny.Generator, error) { + subFs, err := fs.Sub(files, "files") + if err != nil { + return nil, errors.Errorf("fail to generate sub: %w", err) + } + + g := genny.New() + if err := g.OnlyFS(subFs, nil, nil); err != nil { + return g, err + } + + ctx := plush.NewContext() + ctx.Set("Position", opts.position()) + ctx.Set("FromVersion", opts.FromVersion.Original()) + ctx.Set("ToVersion", opts.ToVersion.Original()) + ctx.Set("Diffs", opts.Diffs) + ctx.Set("Description", opts.Description) + ctx.Set("ShortDescription", opts.shortDescription()) + ctx.Set("Date", opts.date()) + + plushhelpers.ExtendPlushContext(ctx) + g.Transformer(xgenny.Transformer(ctx)) + g.Transformer(genny.Replace("{{Version}}", opts.ToVersion.Original())) + + return g, nil +} diff --git a/ignite/internal/tools/gen-mig-diffs/templates/doc/files/{{Version}}.md.plush b/ignite/internal/tools/gen-mig-diffs/templates/doc/files/{{Version}}.md.plush new file mode 100644 index 0000000..4bcd83c --- /dev/null +++ b/ignite/internal/tools/gen-mig-diffs/templates/doc/files/{{Version}}.md.plush @@ -0,0 +1,17 @@ +--- +sidebar_position: <%= Position %> +title: <%= ToVersion %> +description: <%= ShortDescription %> +--- + +## **<%= ToVersion %>** + +<%= Description %> +### **Chain migration diffs** + +<%= Diffs %> +### **Details** + +- **The CLI tools automatically generated this file**; +- Generated from <%= FromVersion %> to <%= ToVersion %>; +- Generated <%= Date %>; diff --git a/ignite/pkg/archive/tar_gz.go b/ignite/pkg/archive/tar_gz.go new file mode 100644 index 0000000..2a20fc9 --- /dev/null +++ b/ignite/pkg/archive/tar_gz.go @@ -0,0 +1,122 @@ +package archive + +import ( + "archive/tar" + "compress/gzip" + "io" + "os" + "path/filepath" + + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +// CreateArchive creates a tar.gz archive from a list of files. +func CreateArchive(dir string, buf io.Writer) error { + // Create new Writers for gzip and tar + // These writers are chained. Writing to the tar writer will + // write to the gzip writer which in turn will write to + // the "buf" writer + gw := gzip.NewWriter(buf) + defer gw.Close() + tw := tar.NewWriter(gw) + defer tw.Close() + + return filepath.WalkDir(dir, func(path string, _ os.DirEntry, _ error) error { + return addToArchive(tw, path) + }) +} + +func addToArchive(tw *tar.Writer, filename string) error { + // Open the file which will be written into the archive + file, err := os.Open(filename) + if err != nil { + return err + } + defer file.Close() + + // Get FileInfo about our file providing file size, mode, etc. + info, err := file.Stat() + if err != nil { + return err + } + + // Create a tar Header from the FileInfo data + if info.IsDir() { + hdr, err := tar.FileInfoHeader(info, info.Name()) + if err != nil { + return err + } + hdr.Name = filename + if err := tw.WriteHeader(hdr); err != nil { + return err + } + + return nil + } + + header, err := tar.FileInfoHeader(info, info.Name()) + if err != nil { + return err + } + + // Use full path as name (FileInfoHeader only takes the basename) + // If we don't do this the directory structure would + // not be preserved + // https://golang.org/src/archive/tar/common.go?#L626 + header.Name = filename + + // Write file header to the tar archive + err = tw.WriteHeader(header) + if err != nil { + return err + } + + _, err = io.Copy(tw, file) + if err != nil { + return err + } + + return nil +} + +// ExtractArchive extracts a tar.gz archive to the specified directory. +func ExtractArchive(outDir string, gzipStream io.Reader) error { + uncompressedStream, err := gzip.NewReader(gzipStream) + if err != nil { + return err + } + + tarReader := tar.NewReader(uncompressedStream) + + for { + header, err := tarReader.Next() + if err == io.EOF { + break + } else if err != nil { + return err + } + + targetPath := filepath.Join(outDir, header.Name) //nolint:gosec // We trust the tar file + + switch header.Typeflag { + case tar.TypeDir: + if err := os.MkdirAll(targetPath, 0o755); err != nil { + return err + } + case tar.TypeReg: + outFile, err := os.Create(targetPath) + if err != nil { + return err + } + if _, err := io.Copy(outFile, tarReader); err != nil { //nolint:gosec // We trust the tar file + return err + } + outFile.Close() + + default: + return errors.Errorf("unknown type: %s in %s", string(header.Typeflag), header.Name) + } + } + + return nil +} diff --git a/ignite/pkg/archive/tar_gz_test.go b/ignite/pkg/archive/tar_gz_test.go new file mode 100644 index 0000000..44f0646 --- /dev/null +++ b/ignite/pkg/archive/tar_gz_test.go @@ -0,0 +1,48 @@ +package archive + +import ( + "bytes" + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestCreateArchiveAndExtractArchive(t *testing.T) { + root := t.TempDir() + oldWD, err := os.Getwd() + require.NoError(t, err) + require.NoError(t, os.Chdir(root)) + t.Cleanup(func() { + require.NoError(t, os.Chdir(oldWD)) + }) + + src := "src" + require.NoError(t, os.MkdirAll(filepath.Join(src, "nested"), 0o755)) + require.NoError(t, os.WriteFile(filepath.Join(src, "a.txt"), []byte("alpha"), 0o600)) + require.NoError(t, os.WriteFile(filepath.Join(src, "nested", "b.txt"), []byte("beta"), 0o600)) + + var buf bytes.Buffer + require.NoError(t, CreateArchive(src, &buf)) + + dst := filepath.Join(root, "out") + require.NoError(t, os.MkdirAll(dst, 0o755)) + require.NoError(t, ExtractArchive(dst, bytes.NewReader(buf.Bytes()))) + + gotA, err := os.ReadFile(filepath.Join(dst, "src", "a.txt")) + require.NoError(t, err) + require.Equal(t, "alpha", string(gotA)) + + gotB, err := os.ReadFile(filepath.Join(dst, "src", "nested", "b.txt")) + require.NoError(t, err) + require.Equal(t, "beta", string(gotB)) +} + +func TestAddToArchiveReturnsErrorForMissingFile(t *testing.T) { + var buf bytes.Buffer + require.NoError(t, CreateArchive(t.TempDir(), &buf)) + + err := addToArchive(nil, filepath.Join(t.TempDir(), "does-not-exist")) + require.Error(t, err) +} diff --git a/ignite/pkg/availableport/availableport.go b/ignite/pkg/availableport/availableport.go new file mode 100644 index 0000000..ac94c2c --- /dev/null +++ b/ignite/pkg/availableport/availableport.go @@ -0,0 +1,85 @@ +package availableport + +import ( + "crypto/rand" + "fmt" + "math/big" + "net" + + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/safeconverter" +) + +type availablePortOptions struct { + minPort uint + maxPort uint +} + +type Options func(o *availablePortOptions) + +func WithMaxPort(maxPort uint) Options { + return func(o *availablePortOptions) { + o.maxPort = maxPort + } +} + +func WithMinPort(minPort uint) Options { + return func(o *availablePortOptions) { + o.minPort = minPort + } +} + +// Find finds n number of unused ports. +// it is not guaranteed that these ports will not be allocated to +// another program in the time of calling Find(). +func Find(n uint, options ...Options) (ports []uint, err error) { + // Defining them before so we can set a value depending on the AvailablePortOptions + opts := availablePortOptions{ + minPort: 44000, + maxPort: 55000, + } + + for _, apply := range options { + apply(&opts) + } + // If the number of ports required is bigger than the range, this stops it + if opts.maxPort < opts.minPort { + return nil, errors.Errorf("invalid ports range: max < min (%d < %d)", opts.maxPort, opts.minPort) + } + + // If the number of ports required is bigger than the range, this stops it + if n > (opts.maxPort - opts.minPort) { + return nil, errors.Errorf("invalid amount of ports requested: limit is %d", opts.maxPort-opts.minPort) + } + + // Marker to point if a port is already added in the list + registered := make(map[uint]bool) + i := safeconverter.ToInt[uint](n) + for len(registered) < i { + // Greater or equal to min and lower than max + totalPorts := opts.maxPort - opts.minPort + 1 + + randomPort, _ := rand.Int(rand.Reader, big.NewInt(safeconverter.ToInt64[uint](totalPorts))) + port := uint(randomPort.Uint64()) + opts.minPort + + conn, err := net.Dial("tcp", fmt.Sprintf(":%d", port)) + // if there is an error, this might mean that no one is listening from this port + // which is what we need. + if err == nil { + conn.Close() + continue + } + if conn != nil { + defer conn.Close() + } + + // if the port is already registered we skip it to the next one + // otherwise it's added to the ports list and pointed in our map + if registered[port] { + continue + } + ports = append(ports, port) + registered[port] = true + } + return ports, nil +} diff --git a/ignite/pkg/availableport/availableport_test.go b/ignite/pkg/availableport/availableport_test.go new file mode 100644 index 0000000..1cd44ce --- /dev/null +++ b/ignite/pkg/availableport/availableport_test.go @@ -0,0 +1,77 @@ +package availableport_test + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/availableport" + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +func TestFind(t *testing.T) { + tests := []struct { + name string + n uint + options []availableport.Options + err error + }{ + { + name: "test 10 ports", + n: 10, + }, + { + name: "invalid port range", + n: 10, + options: []availableport.Options{ + availableport.WithMinPort(5), + availableport.WithMaxPort(1), + }, + err: errors.Errorf("invalid ports range: max < min (1 < 5)"), + }, + { + name: "invalid maximum port range", + n: 10, + options: []availableport.Options{ + availableport.WithMinPort(55001), + availableport.WithMaxPort(1), + }, + err: errors.Errorf("invalid ports range: max < min (1 < 55001)"), + }, + { + name: "only invalid maximum port range", + n: 10, + options: []availableport.Options{ + availableport.WithMaxPort(43999), + }, + err: errors.Errorf("invalid ports range: max < min (43999 < 44000)"), + }, + { + name: "with randomizer", + n: 100, + options: []availableport.Options{ + availableport.WithMinPort(100), + availableport.WithMaxPort(200), + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := availableport.Find(tt.n, tt.options...) + if tt.err != nil { + require.Error(t, err) + require.True(t, errors.Is(tt.err, err)) + return + } + require.NoError(t, err) + require.Len(t, got, int(tt.n)) + + seen := make(map[uint]struct{}) + for _, val := range got { + _, ok := seen[val] + require.Falsef(t, ok, "duplicated port %d", val) + seen[val] = struct{}{} + } + }) + } +} diff --git a/ignite/pkg/cache/cache.go b/ignite/pkg/cache/cache.go new file mode 100644 index 0000000..0fb19c4 --- /dev/null +++ b/ignite/pkg/cache/cache.go @@ -0,0 +1,159 @@ +package cache + +import ( + "bytes" + "encoding/gob" + "fmt" + "os" + "path/filepath" + "strings" + "time" + + bolt "go.etcd.io/bbolt" + + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +var ErrorNotFound = errors.New("no value was found with the provided key") + +// Storage is meant to be passed around and used by the New function (which provides namespacing and type-safety). +type Storage struct { + path, version string +} + +// Cache is a namespaced and type-safe key-value store. +type Cache[T any] struct { + storage Storage + namespace string +} + +// NewStorage sets up the storage needed for later cache usage +// path is the full path (including filename) to the database file to use. +// It does not need to be closed as this happens automatically in each call to the cache. +func NewStorage(path string, options ...StorageOption) (Storage, error) { + if err := os.MkdirAll(filepath.Dir(path), 0o755); err != nil { + return Storage{}, err + } + + s := Storage{path: path} + for _, apply := range options { + apply(&s) + } + return s, nil +} + +// New creates a namespaced and typesafe key-value Cache. +func New[T any](storage Storage, namespace string) Cache[T] { + if storage.version != "" { + namespace = fmt.Sprint(storage.version, namespace) + } + + return Cache[T]{ + storage: storage, + namespace: namespace, + } +} + +// Key creates a single composite key from a list of keyParts. +func Key(keyParts ...string) string { + return strings.Join(keyParts, "") +} + +// Clear deletes all namespaces and cached values. +func (s Storage) Clear() error { + db, err := openDB(s.path) + if err != nil { + return err + } + defer db.Close() + + return db.Update(func(tx *bolt.Tx) error { + return tx.ForEach(func(name []byte, _ *bolt.Bucket) error { + return tx.DeleteBucket(name) + }) + }) +} + +// Put sets key to value within the namespace +// If the key already exists, it will be overwritten. +func (c Cache[T]) Put(key string, value T) error { + db, err := openDB(c.storage.path) + if err != nil { + return err + } + defer db.Close() + + var buf bytes.Buffer + encoder := gob.NewEncoder(&buf) + if err := encoder.Encode(value); err != nil { + return err + } + result := buf.Bytes() + + return db.Update(func(tx *bolt.Tx) error { + b, err := tx.CreateBucketIfNotExists([]byte(c.namespace)) + if err != nil { + return err + } + return b.Put([]byte(key), result) + }) +} + +// Get fetches the value of key within the namespace. +// If no value exists, it will return found == false. +func (c Cache[T]) Get(key string) (val T, err error) { + db, err := openDB(c.storage.path) + if err != nil { + return val, err + } + defer db.Close() + + err = db.View(func(tx *bolt.Tx) error { + b := tx.Bucket([]byte(c.namespace)) + if b == nil { + return ErrorNotFound + } + c := b.Cursor() + if k, v := c.Seek([]byte(key)); bytes.Equal(k, []byte(key)) { + if v == nil { + return ErrorNotFound + } + + var decodedVal T + d := gob.NewDecoder(bytes.NewReader(v)) + if err := d.Decode(&decodedVal); err != nil { + return err + } + + val = decodedVal + } else { + return ErrorNotFound + } + + return nil + }) + + return val, err +} + +// Delete removes a value for key within the namespace. +func (c Cache[T]) Delete(key string) error { + db, err := openDB(c.storage.path) + if err != nil { + return err + } + defer db.Close() + + return db.Update(func(tx *bolt.Tx) error { + b := tx.Bucket([]byte(c.namespace)) + if b == nil { + return nil + } + + return b.Delete([]byte(key)) + }) +} + +func openDB(path string) (*bolt.DB, error) { + return bolt.Open(path, 0o640, &bolt.Options{Timeout: 1 * time.Minute}) +} diff --git a/ignite/pkg/cache/cache_test.go b/ignite/pkg/cache/cache_test.go new file mode 100644 index 0000000..3e8cb98 --- /dev/null +++ b/ignite/pkg/cache/cache_test.go @@ -0,0 +1,205 @@ +package cache_test + +import ( + "path/filepath" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/cache" +) + +type TestStruct struct { + Num int +} + +func TestCreateStorage(t *testing.T) { + cases := []struct { + name string + options []cache.StorageOption + }{ + { + name: "simple", + }, + { + name: "versioned", + options: []cache.StorageOption{ + cache.WithVersion("v0.1.0"), + }, + }, + } + + for _, tt := range cases { + t.Run(tt.name, func(t *testing.T) { + _, err := cache.NewStorage(filepath.Join(t.TempDir(), "cache.db"), tt.options...) + require.NoError(t, err) + }) + } +} + +func TestStoreWithVersion(t *testing.T) { + path := filepath.Join(t.TempDir(), "cache.db") + storage, err := cache.NewStorage(path, cache.WithVersion("v0.1.0")) + require.NoError(t, err) + + nsCache := cache.New[string](storage, "cacheNS") + err = nsCache.Put("myKey", "myValue") + require.NoError(t, err) + + v, err := nsCache.Get("myKey") + require.NoError(t, err) + require.Equal(t, "myValue", v) + + // Create a non versioned storage with the same file path + storage, err = cache.NewStorage(path) + require.NoError(t, err) + + nsCache = cache.New[string](storage, "cacheNS") + _, err = nsCache.Get("myKey") + require.ErrorIs(t, err, cache.ErrorNotFound) +} + +func TestStoreString(t *testing.T) { + tmpDir := t.TempDir() + cacheStorage, err := cache.NewStorage(filepath.Join(tmpDir, "testdbfile.db")) + require.NoError(t, err) + + strNamespace := cache.New[string](cacheStorage, "myNameSpace") + + err = strNamespace.Put("myKey", "myValue") + require.NoError(t, err) + + val, err := strNamespace.Get("myKey") + require.NoError(t, err) + require.Equal(t, "myValue", val) + + strNamespaceAgain := cache.New[string](cacheStorage, "myNameSpace") + + valAgain, err := strNamespaceAgain.Get("myKey") + require.NoError(t, err) + require.Equal(t, "myValue", valAgain) +} + +func TestStoreObjects(t *testing.T) { + tmpDir := t.TempDir() + cacheStorage, err := cache.NewStorage(filepath.Join(tmpDir, "testdbfile.db")) + require.NoError(t, err) + + structCache := cache.New[TestStruct](cacheStorage, "mySimpleNamespace") + + err = structCache.Put("myKey", TestStruct{ + Num: 42, + }) + require.NoError(t, err) + + val, err := structCache.Get("myKey") + require.NoError(t, err) + require.Equal(t, val, TestStruct{ + Num: 42, + }) + + arrayNamespace := cache.New[[]TestStruct](cacheStorage, "myArrayNamespace") + + err = arrayNamespace.Put("myKey", []TestStruct{ + { + Num: 42, + }, + { + Num: 420, + }, + }) + require.NoError(t, err) + + val2, err := arrayNamespace.Get("myKey") + require.NoError(t, err) + require.Equal(t, 2, len(val2)) + require.Equal(t, 42, (val2)[0].Num) + require.Equal(t, 420, (val2)[1].Num) + + empty, err := arrayNamespace.Get("doesNotExists") + require.Equal(t, cache.ErrorNotFound, err) + require.Nil(t, empty) +} + +func TestConflicts(t *testing.T) { + tmpDir := t.TempDir() + tmpDir2 := t.TempDir() + cacheStorage1, err := cache.NewStorage(filepath.Join(tmpDir, "testdbfile.db")) + require.NoError(t, err) + cacheStorage2, err := cache.NewStorage(filepath.Join(tmpDir2, "testdbfile.db")) + require.NoError(t, err) + + sameStorageDifferentNamespaceCache1 := cache.New[int](cacheStorage1, "ns1") + + sameStorageDifferentNamespaceCache2 := cache.New[int](cacheStorage1, "ns2") + + differentStorageSameNamespace := cache.New[int](cacheStorage2, "ns1") + + // Put values in caches + err = sameStorageDifferentNamespaceCache1.Put("myKey", 41) + require.NoError(t, err) + + err = sameStorageDifferentNamespaceCache2.Put("myKey", 1337) + require.NoError(t, err) + + err = differentStorageSameNamespace.Put("myKey", 9001) + require.NoError(t, err) + + // Overwrite a value + err = sameStorageDifferentNamespaceCache1.Put("myKey", 42) + require.NoError(t, err) + + // Check that everything comes back as expected + val1, err := sameStorageDifferentNamespaceCache1.Get("myKey") + require.NoError(t, err) + require.Equal(t, 42, val1) + + val2, err := sameStorageDifferentNamespaceCache2.Get("myKey") + require.NoError(t, err) + require.Equal(t, 1337, val2) + + val3, err := differentStorageSameNamespace.Get("myKey") + require.NoError(t, err) + require.Equal(t, 9001, val3) +} + +func TestDeleteKey(t *testing.T) { + tmpDir := t.TempDir() + cacheStorage, err := cache.NewStorage(filepath.Join(tmpDir, "testdbfile.db")) + require.NoError(t, err) + + strNamespace := cache.New[string](cacheStorage, "myNameSpace") + err = strNamespace.Put("myKey", "someValue") + require.NoError(t, err) + + err = strNamespace.Delete("myKey") + require.NoError(t, err) + + _, err = strNamespace.Get("myKey") + require.Equal(t, cache.ErrorNotFound, err) +} + +func TestClearStorage(t *testing.T) { + tmpDir := t.TempDir() + cacheStorage, err := cache.NewStorage(filepath.Join(tmpDir, "testdbfile.db")) + require.NoError(t, err) + + strNamespace := cache.New[string](cacheStorage, "myNameSpace") + + err = strNamespace.Put("myKey", "myValue") + require.NoError(t, err) + + err = cacheStorage.Clear() + require.NoError(t, err) + + _, err = strNamespace.Get("myKey") + require.Equal(t, cache.ErrorNotFound, err) +} + +func TestKey(t *testing.T) { + singleKey := cache.Key("test1") + require.Equal(t, "test1", singleKey) + + multiKey := cache.Key("test1", "test2", "test3") + require.Equal(t, "test1test2test3", multiKey) +} diff --git a/ignite/pkg/cache/options.go b/ignite/pkg/cache/options.go new file mode 100644 index 0000000..421b016 --- /dev/null +++ b/ignite/pkg/cache/options.go @@ -0,0 +1,12 @@ +package cache + +// StorageOptions configures the cache storage. +type StorageOption func(*Storage) + +// WithVersion sets a version for the storage. +// Version is used as prefix for any cached value. +func WithVersion(version string) StorageOption { + return func(o *Storage) { + o.version = version + } +} diff --git a/ignite/pkg/chaincmd/chaincmd.go b/ignite/pkg/chaincmd/chaincmd.go new file mode 100644 index 0000000..506edb9 --- /dev/null +++ b/ignite/pkg/chaincmd/chaincmd.go @@ -0,0 +1,670 @@ +package chaincmd + +import ( + "fmt" + + "github.com/cosmos/cosmos-sdk/client/flags" + sdk "github.com/cosmos/cosmos-sdk/types" + + "github.com/ignite/cli/v29/ignite/pkg/cmdrunner/step" + "github.com/ignite/cli/v29/ignite/pkg/cosmosver" + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +const ( + commandStart = "start" + commandInit = "init" + commandKeys = "keys" + commandGenesis = "genesis" + commandAddGenesisAccount = "add-genesis-account" + commandGentx = "gentx" + commandCollectGentxs = "collect-gentxs" + commandValidateGenesis = "validate" + commandExportGenssis = "export" + commandShowNodeID = "show-node-id" + commandStatus = "status" + commandTx = "tx" + commandQuery = "query" + commandUnsafeReset = "unsafe-reset-all" + commandTendermint = "tendermint" + commandTestnetInPlace = "in-place-testnet" + commandTestnetMultiNode = "multi-node" + + optionHome = "--home" + optionNode = "--node" + optionKeyringBackend = "--keyring-backend" + optionChainID = "--chain-id" + optionOutput = "--output" + optionRecover = "--recover" + optionAddress = "--address" + optionAmount = "--amount" + optionFees = "--fees" + optionValidatorMoniker = "--moniker" + optionValidatorCommissionRate = "--commission-rate" + optionValidatorCommissionMaxRate = "--commission-max-rate" + optionValidatorCommissionMaxChangeRate = "--commission-max-change-rate" + optionValidatorMinSelfDelegation = "--min-self-delegation" + optionValidatorGasPrices = "--gas-prices" + optionValidatorDetails = "--details" + optionValidatorIdentity = "--identity" + optionValidatorWebsite = "--website" + optionValidatorSecurityContact = "--security-contact" + optionYes = "--yes" + optionHomeClient = "--home-client" + optionCoinType = "--coin-type" + optionVestingAmount = "--vesting-amount" + optionVestingEndTime = "--vesting-end-time" + optionBroadcastMode = "--broadcast-mode" + optionAccount = "--account" + optionIndex = "--index" + optionValidatorPrivateKey = "--validator-privkey" + optionAccountToFund = "--accounts-to-fund" + optionSkipConfirmation = "--skip-confirmation" + optionAmountStakes = "--validators-stake-amount" + optionOutPutDir = "--output-dir" + optionNumValidator = "--v" + optionNodeDirPrefix = "--node-dir-prefix" + optionPorts = "--list-ports" + + constTendermint = "tendermint" + constJSON = "json" +) + +type KeyringBackend string + +const ( + KeyringBackendUnspecified KeyringBackend = "" + KeyringBackendOS KeyringBackend = "os" + KeyringBackendFile KeyringBackend = "file" + KeyringBackendPass KeyringBackend = "pass" + KeyringBackendTest KeyringBackend = "test" + KeyringBackendKwallet KeyringBackend = "kwallet" +) + +type ChainCmd struct { + appCmd string + chainID string + homeDir string + keyringBackend KeyringBackend + keyringPassword string + nodeAddress string + + isAutoChainIDDetectionEnabled bool + + sdkVersion cosmosver.Version +} + +// New creates a new ChainCmd to launch command with the chain app. +func New(appCmd string, options ...Option) ChainCmd { + chainCmd := ChainCmd{ + appCmd: appCmd, + sdkVersion: cosmosver.Latest, + } + + applyOptions(&chainCmd, options) + + return chainCmd +} + +// Copy makes a copy of ChainCmd by overwriting its options with given options. +func (c ChainCmd) Copy(options ...Option) ChainCmd { + applyOptions(&c, options) + + return c +} + +// Option configures ChainCmd. +type Option func(*ChainCmd) + +func applyOptions(c *ChainCmd, options []Option) { + for _, apply := range options { + apply(c) + } +} + +// WithVersion sets the version of the blockchain. +// when this is not provided, the latest version of SDK is assumed. +func WithVersion(v cosmosver.Version) Option { + return func(c *ChainCmd) { + c.sdkVersion = v + } +} + +// WithHome replaces the default home used by the chain. +func WithHome(home string) Option { + return func(c *ChainCmd) { + c.homeDir = home + } +} + +// WithChainID provides a specific chain ID for the commands that accept this option. +func WithChainID(chainID string) Option { + return func(c *ChainCmd) { + c.chainID = chainID + } +} + +// WithAutoChainIDDetection finds out the chain id by communicating with the node running. +func WithAutoChainIDDetection() Option { + return func(c *ChainCmd) { + c.isAutoChainIDDetectionEnabled = true + } +} + +// WithKeyringBackend provides a specific keyring backend for the commands that accept this option. +func WithKeyringBackend(keyringBackend KeyringBackend) Option { + return func(c *ChainCmd) { + c.keyringBackend = keyringBackend + } +} + +// WithKeyringPassword provides a password to unlock keyring. +func WithKeyringPassword(password string) Option { + return func(c *ChainCmd) { + c.keyringPassword = password + } +} + +// WithNodeAddress sets the node address for the commands that needs to make an +// API request to the node that has a different node address other than the default one. +func WithNodeAddress(addr string) Option { + return func(c *ChainCmd) { + c.nodeAddress = addr + } +} + +// Name returns the app name (prefix of the chain daemon). +func (c ChainCmd) Name() string { + return c.appCmd +} + +// StartCommand returns the command to start the daemon of the chain. +func (c ChainCmd) StartCommand(options ...string) step.Option { + command := append([]string{ + commandStart, + }, options...) + return c.daemonCommand(command) +} + +// InitCommand returns the command to initialize the chain. +func (c ChainCmd) InitCommand(moniker string, options ...string) step.Option { + command := append([]string{ + commandInit, + moniker, + }, options...) + command = c.attachChainID(command) + return c.daemonCommand(command) +} + +// AddKeyCommand returns the command to add a new key in the chain keyring. +func (c ChainCmd) AddKeyCommand(accountName, coinType, accountNumber, addressIndex string) step.Option { + command := []string{ + commandKeys, + "add", + accountName, + optionOutput, + constJSON, + } + if coinType != "" { + command = append(command, optionCoinType, coinType) + } + if accountNumber != "" { + command = append(command, optionAccount, accountNumber) + } + if addressIndex != "" { + command = append(command, optionIndex, addressIndex) + } + command = c.attachKeyringBackend(command) + + return c.cliCommand(command) +} + +// RecoverKeyCommand returns the command to recover a key into the chain keyring from a mnemonic. +func (c ChainCmd) RecoverKeyCommand(accountName, coinType, accountNumber, addressIndex string) step.Option { + command := []string{ + commandKeys, + "add", + accountName, + optionRecover, + } + if coinType != "" { + command = append(command, optionCoinType, coinType) + } + if accountNumber != "" { + command = append(command, optionAccount, accountNumber) + } + if addressIndex != "" { + command = append(command, optionIndex, addressIndex) + } + command = c.attachKeyringBackend(command) + + return c.cliCommand(command) +} + +// ImportKeyCommand returns the command to import a key into the chain keyring from a key file. +func (c ChainCmd) ImportKeyCommand(accountName, keyFile string) step.Option { + command := []string{ + commandKeys, + "import", + accountName, + keyFile, + } + command = c.attachKeyringBackend(command) + + return c.cliCommand(command) +} + +// ShowKeyAddressCommand returns the command to print the address of a key in the chain keyring. +func (c ChainCmd) ShowKeyAddressCommand(accountName string) step.Option { + command := []string{ + commandKeys, + "show", + accountName, + optionAddress, + } + command = c.attachKeyringBackend(command) + + return c.cliCommand(command) +} + +// ListKeysCommand returns the command to print the list of a keys in the chain keyring. +func (c ChainCmd) ListKeysCommand() step.Option { + command := []string{ + commandKeys, + "list", + optionOutput, + constJSON, + } + command = c.attachKeyringBackend(command) + + return c.cliCommand(command) +} + +// AddGenesisAccountCommand returns the command to add a new account in the genesis file of the chain. +func (c ChainCmd) AddGenesisAccountCommand(address, coins string) step.Option { + command := []string{ + commandGenesis, + commandAddGenesisAccount, + address, + coins, + } + + return c.daemonCommand(command) +} + +// AddVestingAccountCommand returns the command to add a delayed vesting account in the genesis file of the chain. +func (c ChainCmd) AddVestingAccountCommand(address, originalCoins, vestingCoins string, vestingEndTime int64) step.Option { + command := []string{ + commandGenesis, + commandAddGenesisAccount, + address, + originalCoins, + optionVestingAmount, + vestingCoins, + optionVestingEndTime, + fmt.Sprintf("%d", vestingEndTime), + } + + return c.daemonCommand(command) +} + +// GentxOption for the GentxCommand. +type GentxOption func([]string) []string + +// GentxWithMoniker provides moniker option for the gentx command. +func GentxWithMoniker(moniker string) GentxOption { + return func(command []string) []string { + if len(moniker) > 0 { + return append(command, optionValidatorMoniker, moniker) + } + return command + } +} + +// GentxWithCommissionRate provides commission rate option for the gentx command. +func GentxWithCommissionRate(commissionRate string) GentxOption { + return func(command []string) []string { + if len(commissionRate) > 0 { + return append(command, optionValidatorCommissionRate, commissionRate) + } + return command + } +} + +// GentxWithCommissionMaxRate provides commission max rate option for the gentx command. +func GentxWithCommissionMaxRate(commissionMaxRate string) GentxOption { + return func(command []string) []string { + if len(commissionMaxRate) > 0 { + return append(command, optionValidatorCommissionMaxRate, commissionMaxRate) + } + return command + } +} + +// GentxWithCommissionMaxChangeRate provides commission max change rate option for the gentx command. +func GentxWithCommissionMaxChangeRate(commissionMaxChangeRate string) GentxOption { + return func(command []string) []string { + if len(commissionMaxChangeRate) > 0 { + return append(command, optionValidatorCommissionMaxChangeRate, commissionMaxChangeRate) + } + return command + } +} + +// GentxWithMinSelfDelegation provides minimum self delegation option for the gentx command. +func GentxWithMinSelfDelegation(minSelfDelegation string) GentxOption { + return func(command []string) []string { + if len(minSelfDelegation) > 0 { + return append(command, optionValidatorMinSelfDelegation, minSelfDelegation) + } + return command + } +} + +// GentxWithGasPrices provides gas price option for the gentx command. +func GentxWithGasPrices(gasPrices string) GentxOption { + return func(command []string) []string { + if len(gasPrices) > 0 { + return append(command, optionValidatorGasPrices, gasPrices) + } + return command + } +} + +// GentxWithDetails provides validator details option for the gentx command. +func GentxWithDetails(details string) GentxOption { + return func(command []string) []string { + if len(details) > 0 { + return append(command, optionValidatorDetails, details) + } + return command + } +} + +// GentxWithIdentity provides validator identity option for the gentx command. +func GentxWithIdentity(identity string) GentxOption { + return func(command []string) []string { + if len(identity) > 0 { + return append(command, optionValidatorIdentity, identity) + } + return command + } +} + +// GentxWithWebsite provides validator website option for the gentx command. +func GentxWithWebsite(website string) GentxOption { + return func(command []string) []string { + if len(website) > 0 { + return append(command, optionValidatorWebsite, website) + } + return command + } +} + +// GentxWithSecurityContact provides validator security contact option for the gentx command. +func GentxWithSecurityContact(securityContact string) GentxOption { + return func(command []string) []string { + if len(securityContact) > 0 { + return append(command, optionValidatorSecurityContact, securityContact) + } + return command + } +} + +func (c ChainCmd) IsAutoChainIDDetectionEnabled() bool { + return c.isAutoChainIDDetectionEnabled +} + +func (c ChainCmd) SDKVersion() cosmosver.Version { + return c.sdkVersion +} + +// GentxCommand returns the command to generate a gentx for the chain. +func (c ChainCmd) GentxCommand( + validatorName string, + selfDelegation string, + options ...GentxOption, +) step.Option { + command := []string{ + commandGenesis, + commandGentx, + } + + switch { + case c.sdkVersion.LT(cosmosver.StargateFortyVersion): + command = append(command, + validatorName, + optionAmount, + selfDelegation, + ) + case c.sdkVersion.GTE(cosmosver.StargateFortyVersion): + command = append(command, + validatorName, + selfDelegation, + ) + } + + // Apply the options provided by the user + for _, apply := range options { + command = apply(command) + } + + command = c.attachChainID(command) + command = c.attachKeyringBackend(command) + + return c.daemonCommand(command) +} + +// CollectGentxsCommand returns the command to gather the gentxs in /gentx dir into the genesis file of the chain. +func (c ChainCmd) CollectGentxsCommand() step.Option { + command := []string{ + commandGenesis, + commandCollectGentxs, + } + return c.daemonCommand(command) +} + +// ValidateGenesisCommand returns the command to check the validity of the chain genesis. +func (c ChainCmd) ValidateGenesisCommand() step.Option { + command := []string{ + commandGenesis, + commandValidateGenesis, + } + return c.daemonCommand(command) +} + +// ShowNodeIDCommand returns the command to print the node ID of the node for the chain. +func (c ChainCmd) ShowNodeIDCommand() step.Option { + command := []string{ + constTendermint, + commandShowNodeID, + } + return c.daemonCommand(command) +} + +// UnsafeResetCommand returns the command to reset the blockchain database. +func (c ChainCmd) UnsafeResetCommand() step.Option { + var command []string + + if c.sdkVersion.GTE(cosmosver.StargateFortyFiveThreeVersion) { + command = append(command, commandTendermint) + } + + command = append(command, commandUnsafeReset) + + return c.daemonCommand(command) +} + +// ExportCommand returns the command to export the state of the blockchain into a genesis file. +func (c ChainCmd) ExportCommand() step.Option { + command := []string{ + commandExportGenssis, + } + return c.daemonCommand(command) +} + +// BankSendOption for the BankSendCommand. +type BankSendOption func([]string) []string + +// BankSendWithFees sets fees to pay along with transaction for the bank send command. +func BankSendWithFees(fee sdk.Coin) BankSendOption { + return func(command []string) []string { + if !fee.IsNil() { + return append(command, optionFees, fee.String()) + } + return command + } +} + +// BankSendCommand returns the command for transferring tokens. +func (c ChainCmd) BankSendCommand(fromAddress, toAddress, amount string, options ...BankSendOption) step.Option { + command := []string{ + commandTx, + } + + command = append(command, "bank") + command = append(command, + "send", + fromAddress, + toAddress, + amount, + ) + command = append(command, + optionBroadcastMode, flags.BroadcastSync, + optionYes, + ) + + // Apply the options provided by the user + for _, apply := range options { + command = apply(command) + } + + command = c.attachChainID(command) + command = c.attachKeyringBackend(command) + command = c.attachNode(command) + + return c.cliCommand(command) +} + +// QueryTxCommand returns the command to query tx. +func (c ChainCmd) QueryTxCommand(txHash string) step.Option { + command := []string{ + commandQuery, + "tx", + txHash, + } + + command = c.attachNode(command) + return c.cliCommand(command) +} + +// QueryTxEventsCommand returns the command to query events. +func (c ChainCmd) QueryTxEventsCommand(query string) step.Option { + command := []string{ + commandQuery, + "txs", + "--query", + query, + "--page", "1", + "--limit", "1000", + "--output", "json", + } + + command = c.attachNode(command) + return c.cliCommand(command) +} + +// QueryTxQueryCommand returns the command to query tx. +func (c ChainCmd) QueryTxQueryCommand(query string) step.Option { + command := []string{ + commandQuery, + "txs", + "--query", + query, + "--page", "1", + "--limit", "1000", + "--output", "json", + } + + command = c.attachNode(command) + return c.cliCommand(command) +} + +// StatusCommand returns the command that fetches node's status. +func (c ChainCmd) StatusCommand() step.Option { + command := []string{ + commandStatus, + } + + command = c.attachNode(command) + return c.cliCommand(command) +} + +// KeyringBackend returns the underlying keyring backend. +func (c ChainCmd) KeyringBackend() KeyringBackend { + return c.keyringBackend +} + +// KeyringPassword returns the underlying keyring password. +func (c ChainCmd) KeyringPassword() string { + return c.keyringPassword +} + +// attachChainID appends the chain ID flag to the provided command. +func (c ChainCmd) attachChainID(command []string) []string { + if c.chainID != "" { + command = append(command, []string{optionChainID, c.chainID}...) + } + return command +} + +// attachKeyringBackend appends the keyring backend flag to the provided command. +func (c ChainCmd) attachKeyringBackend(command []string) []string { + if c.keyringBackend != "" { + command = append(command, []string{optionKeyringBackend, string(c.keyringBackend)}...) + } + return command +} + +// attachHome appends the home flag to the provided command. +func (c ChainCmd) attachHome(command []string) []string { + if c.homeDir != "" { + command = append(command, []string{optionHome, c.homeDir}...) + } + return command +} + +// attachNode appends the node flag to the provided command. +func (c ChainCmd) attachNode(command []string) []string { + if c.nodeAddress != "" { + command = append(command, []string{optionNode, c.nodeAddress}...) + } + return command +} + +// daemonCommand returns the daemon command from the provided command. +func (c ChainCmd) daemonCommand(command []string) step.Option { + return step.Exec(c.appCmd, c.attachHome(command)...) +} + +// cliCommand returns the cli command from the provided command. +func (c ChainCmd) cliCommand(command []string) step.Option { + return step.Exec(c.appCmd, c.attachHome(command)...) +} + +// KeyringBackendFromString returns the keyring backend from its string. +func KeyringBackendFromString(kb string) (KeyringBackend, error) { + existingKeyringBackend := map[KeyringBackend]bool{ + KeyringBackendUnspecified: true, + KeyringBackendOS: true, + KeyringBackendFile: true, + KeyringBackendPass: true, + KeyringBackendTest: true, + KeyringBackendKwallet: true, + } + + if _, ok := existingKeyringBackend[KeyringBackend(kb)]; ok { + return KeyringBackend(kb), nil + } + return KeyringBackendUnspecified, errors.Errorf("unrecognized keyring backend: %s", kb) +} diff --git a/ignite/pkg/chaincmd/chaincmd_test.go b/ignite/pkg/chaincmd/chaincmd_test.go new file mode 100644 index 0000000..6afacb1 --- /dev/null +++ b/ignite/pkg/chaincmd/chaincmd_test.go @@ -0,0 +1,166 @@ +package chaincmd + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/cmdrunner/step" +) + +func TestInitCommandBuildsExpectedCommand(t *testing.T) { + cmd := New("simd", WithChainID("my-chain"), WithHome("/tmp/simd")) + + s := step.New(cmd.InitCommand("my-moniker")) + + require.Equal(t, "simd", s.Exec.Command) + require.Equal(t, []string{ + "init", + "my-moniker", + "--chain-id", + "my-chain", + "--home", + "/tmp/simd", + }, s.Exec.Args) +} + +func TestAddKeyCommandAddsOptionalFieldsAndKeyringBackend(t *testing.T) { + cmd := New( + "simd", + WithKeyringBackend(KeyringBackendTest), + WithHome("/tmp/simd"), + ) + + s := step.New(cmd.AddKeyCommand("alice", "118", "3", "1")) + + require.Equal(t, "simd", s.Exec.Command) + require.Equal(t, []string{ + "keys", + "add", + "alice", + "--output", + "json", + "--coin-type", + "118", + "--account", + "3", + "--index", + "1", + "--keyring-backend", + "test", + "--home", + "/tmp/simd", + }, s.Exec.Args) +} + +func TestAddKeyCommandSkipsEmptyOptionalFields(t *testing.T) { + cmd := New("simd") + s := step.New(cmd.AddKeyCommand("alice", "", "", "")) + + require.Equal(t, "simd", s.Exec.Command) + require.Equal(t, []string{ + "keys", + "add", + "alice", + "--output", + "json", + }, s.Exec.Args) +} + +func TestStatusCommandAddsNodeFlag(t *testing.T) { + cmd := New( + "simd", + WithNodeAddress("http://127.0.0.1:26657"), + WithHome("/tmp/simd"), + ) + + s := step.New(cmd.StatusCommand()) + + require.Equal(t, "simd", s.Exec.Command) + require.Equal(t, []string{ + "status", + "--node", + "http://127.0.0.1:26657", + "--home", + "/tmp/simd", + }, s.Exec.Args) +} + +func TestCopyOverridesOptionsWithoutMutatingOriginal(t *testing.T) { + original := New("simd", WithChainID("chain-A")) + copied := original.Copy(WithChainID("chain-B")) + + originalStep := step.New(original.InitCommand("alice")) + copiedStep := step.New(copied.InitCommand("alice")) + + require.Equal(t, []string{ + "init", + "alice", + "--chain-id", + "chain-A", + }, originalStep.Exec.Args) + require.Equal(t, []string{ + "init", + "alice", + "--chain-id", + "chain-B", + }, copiedStep.Exec.Args) +} + +func TestKeyringBackendFromString(t *testing.T) { + testCases := []struct { + name string + input string + expected KeyringBackend + shouldErr bool + }{ + { + name: "unspecified", + input: "", + expected: KeyringBackendUnspecified, + }, + { + name: "os", + input: "os", + expected: KeyringBackendOS, + }, + { + name: "file", + input: "file", + expected: KeyringBackendFile, + }, + { + name: "pass", + input: "pass", + expected: KeyringBackendPass, + }, + { + name: "test", + input: "test", + expected: KeyringBackendTest, + }, + { + name: "kwallet", + input: "kwallet", + expected: KeyringBackendKwallet, + }, + { + name: "invalid", + input: "invalid", + expected: KeyringBackendUnspecified, + shouldErr: true, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + got, err := KeyringBackendFromString(tc.input) + require.Equal(t, tc.expected, got) + if tc.shouldErr { + require.Error(t, err) + return + } + require.NoError(t, err) + }) + } +} diff --git a/ignite/pkg/chaincmd/in-place-testnet.go b/ignite/pkg/chaincmd/in-place-testnet.go new file mode 100644 index 0000000..2654ad7 --- /dev/null +++ b/ignite/pkg/chaincmd/in-place-testnet.go @@ -0,0 +1,128 @@ +package chaincmd + +import ( + "github.com/ignite/cli/v29/ignite/pkg/cmdrunner/step" +) + +type InPlaceOption func([]string) []string + +func InPlaceWithPrvKey(prvKey string) InPlaceOption { + return func(s []string) []string { + if len(prvKey) > 0 { + return append(s, optionValidatorPrivateKey, prvKey) + } + return s + } +} + +func InPlaceWithAccountToFund(accounts string) InPlaceOption { + return func(s []string) []string { + if len(accounts) > 0 { + return append(s, optionAccountToFund, accounts) + } + return s + } +} + +func InPlaceWithSkipConfirmation() InPlaceOption { + return func(s []string) []string { + return append(s, optionSkipConfirmation) + } +} + +// TestnetInPlaceCommand return command to start testnet in-place. +func (c ChainCmd) TestnetInPlaceCommand(newChainID, newOperatorAddress string, options ...InPlaceOption) step.Option { + command := []string{ + commandTestnetInPlace, + newChainID, + newOperatorAddress, + } + + // Apply the options provided by the user + for _, apply := range options { + command = apply(command) + } + + return c.daemonCommand(command) +} + +// Options for testnet multi node. +type MultiNodeOption func([]string) []string + +// MultiNodeWithChainID returns a MultiNodeOption that appends the chainID option +// to the provided slice of strings. +func MultiNodeWithChainID(chainID string) MultiNodeOption { + return func(s []string) []string { + if len(chainID) > 0 { + return append(s, optionChainID, chainID) + } + return s + } +} + +// MultiNodeWithDirOutput returns a MultiNodeOption that appends the output directory option +// to the provided slice of strings. +func MultiNodeWithDirOutput(dirOutput string) MultiNodeOption { + return func(s []string) []string { + if len(dirOutput) > 0 { + return append(s, optionOutPutDir, dirOutput) + } + return s + } +} + +// MultiNodeWithNumValidator returns a MultiNodeOption that appends the number of validators option +// to the provided slice of strings. +func MultiNodeWithNumValidator(numVal string) MultiNodeOption { + return func(s []string) []string { + if len(numVal) > 0 { + return append(s, optionNumValidator, numVal) + } + return s + } +} + +// MultiNodeWithValidatorsStakeAmount returns a MultiNodeOption that appends the stake amounts option +// to the provided slice of strings. +func MultiNodeWithValidatorsStakeAmount(satkeAmounts string) MultiNodeOption { + return func(s []string) []string { + if len(satkeAmounts) > 0 { + return append(s, optionAmountStakes, satkeAmounts) + } + return s + } +} + +// MultiNodeDirPrefix returns a MultiNodeOption that appends the node directory prefix option +// to the provided slice of strings. +func MultiNodeDirPrefix(nodeDirPrefix string) MultiNodeOption { + return func(s []string) []string { + if len(nodeDirPrefix) > 0 { + return append(s, optionNodeDirPrefix, nodeDirPrefix) + } + return s + } +} + +func MultiNodePorts(ports string) MultiNodeOption { + return func(s []string) []string { + if len(ports) > 0 { + return append(s, optionPorts, ports) + } + return s + } +} + +// TestnetMultiNodeCommand return command to start testnet multinode. +func (c ChainCmd) TestnetMultiNodeCommand(options ...MultiNodeOption) step.Option { + command := []string{ + commandTestnetMultiNode, + } + + // Apply the options provided by the user + for _, apply := range options { + command = apply(command) + } + + return c.daemonCommand(command) +} diff --git a/ignite/pkg/chaincmd/runner/account.go b/ignite/pkg/chaincmd/runner/account.go new file mode 100644 index 0000000..5c63b8b --- /dev/null +++ b/ignite/pkg/chaincmd/runner/account.go @@ -0,0 +1,226 @@ +package chaincmdrunner + +import ( + "context" + "encoding/json" + "fmt" + "os" + "strings" + + "github.com/ignite/cli/v29/ignite/pkg/cmdrunner/step" + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +var ( + // ErrAccountAlreadyExists returned when an already exists account attempted to be imported. + ErrAccountAlreadyExists = errors.New("account already exists") + + // ErrAccountDoesNotExist returned when account does not exit. + ErrAccountDoesNotExist = errors.New("account does not exit") +) + +const msgEmptyKeyring = "No records were found in keyring" + +// Account represents a user account. +type Account struct { + Name string `json:"name"` + Address string `json:"address"` + Mnemonic string `json:"mnemonic,omitempty"` +} + +// AddAccount creates a new account or imports an account when mnemonic is provided. +// returns with an error if the operation went unsuccessful or an account with the provided name +// already exists. +func (r Runner) AddAccount( + ctx context.Context, + name, + mnemonic, + coinType, + accountNumber, + addressIndex string, +) (Account, error) { + if err := r.CheckAccountExist(ctx, name); err != nil { + return Account{}, err + } + b := newBuffer() + + account := Account{ + Name: name, + Mnemonic: mnemonic, + } + + // import the account when mnemonic is provided, otherwise create a new one. + if mnemonic != "" { + input := newBuffer() + _, err := fmt.Fprintln(input, mnemonic) + if err != nil { + return Account{}, err + } + + if r.chainCmd.KeyringPassword() != "" { + _, err = fmt.Fprintln(input, r.chainCmd.KeyringPassword()) + if err != nil { + return Account{}, err + } + + _, err = fmt.Fprintln(input, r.chainCmd.KeyringPassword()) + if err != nil { + return Account{}, err + } + + } + + if err := r.run( + ctx, + runOptions{}, + r.chainCmd.RecoverKeyCommand(name, coinType, accountNumber, addressIndex), + step.Write(input.Bytes()), + ); err != nil { + return Account{}, err + } + } else { + if err := r.run(ctx, runOptions{ + stdout: b, + stderr: b, + stdin: os.Stdin, + }, r.chainCmd.AddKeyCommand(name, coinType, accountNumber, addressIndex)); err != nil { + return Account{}, err + } + + data, err := b.JSONEnsuredBytes() + if err != nil { + return Account{}, err + } + if err := json.Unmarshal(data, &account); err != nil { + return Account{}, err + } + } + + // get the address of the account. + retrieved, err := r.ShowAccount(ctx, name) + if err != nil { + return Account{}, err + } + account.Address = retrieved.Address + + return account, nil +} + +// ImportAccount import an account from a key file. +func (r Runner) ImportAccount(ctx context.Context, name, keyFile, passphrase string) (Account, error) { + if err := r.CheckAccountExist(ctx, name); err != nil { + return Account{}, err + } + + // write the passphrase as input + // TODO: manage keyring backend other than test + input := newBuffer() + _, err := fmt.Fprintln(input, passphrase) + if err != nil { + return Account{}, err + } + + if err := r.run( + ctx, + runOptions{}, + r.chainCmd.ImportKeyCommand(name, keyFile), + step.Write(input.Bytes()), + ); err != nil { + return Account{}, err + } + + return r.ShowAccount(ctx, name) +} + +// ListAccounts returns the list of accounts in the keyring. +func (r Runner) ListAccounts(ctx context.Context) ([]Account, error) { + // Get a JSON string with all accounts in the keyring + b := newBuffer() + if err := r.run(ctx, runOptions{stdout: b}, r.chainCmd.ListKeysCommand()); err != nil { + return nil, err + } + + // Make sure that the command output is not the empty keyring message. + // This need to be checked because when the keyring is empty the command + // finishes with exit code 0 and a plain text message. + // This behavior was added to Cosmos SDK v0.46.2. See the link + // https://github.com/cosmos/cosmos-sdk/blob/d01aa5b4a8/client/keys/list.go#L37 + if strings.TrimSpace(b.String()) == msgEmptyKeyring { + return nil, nil + } + + data, err := b.JSONEnsuredBytes() + if err != nil { + return nil, err + } + + var accounts []Account + if err := json.Unmarshal(data, &accounts); err != nil { + return nil, err + } + + return accounts, nil +} + +// CheckAccountExist returns an error if the account already exists in the chain keyring. +func (r Runner) CheckAccountExist(ctx context.Context, name string) error { + accounts, err := r.ListAccounts(ctx) + if err != nil { + return err + } + + // Search for the account name + for _, account := range accounts { + if account.Name == name { + return ErrAccountAlreadyExists + } + } + + return nil +} + +// ShowAccount shows details of an account. +func (r Runner) ShowAccount(ctx context.Context, name string) (Account, error) { + b := newBuffer() + opt := []step.Option{ + r.chainCmd.ShowKeyAddressCommand(name), + } + + if r.chainCmd.KeyringPassword() != "" { + input := newBuffer() + _, err := fmt.Fprintln(input, r.chainCmd.KeyringPassword()) + if err != nil { + return Account{}, err + } + opt = append(opt, step.Write(input.Bytes())) + } + + if err := r.run(ctx, runOptions{stdout: b}, opt...); err != nil { + if strings.Contains(err.Error(), "item could not be found") || + strings.Contains(err.Error(), "not a valid name or address") { + return Account{}, ErrAccountDoesNotExist + } + return Account{}, err + } + + return Account{ + Name: name, + Address: strings.TrimSpace(b.String()), + }, nil +} + +// AddGenesisAccount adds account to genesis by its address. +func (r Runner) AddGenesisAccount(ctx context.Context, address, coins string) error { + return r.run(ctx, runOptions{}, r.chainCmd.AddGenesisAccountCommand(address, coins)) +} + +// AddVestingAccount adds vesting account to genesis by its address. +func (r Runner) AddVestingAccount( + ctx context.Context, + address, + originalCoins, + vestingCoins string, + vestingEndTime int64, +) error { + return r.run(ctx, runOptions{}, r.chainCmd.AddVestingAccountCommand(address, originalCoins, vestingCoins, vestingEndTime)) +} diff --git a/ignite/pkg/chaincmd/runner/chain.go b/ignite/pkg/chaincmd/runner/chain.go new file mode 100644 index 0000000..51394ab --- /dev/null +++ b/ignite/pkg/chaincmd/runner/chain.go @@ -0,0 +1,389 @@ +package chaincmdrunner + +import ( + "context" + "encoding/json" + "fmt" + "os" + "path/filepath" + "regexp" + "strings" + "time" + + "github.com/cenkalti/backoff" + + "github.com/ignite/cli/v29/ignite/pkg/chaincmd" + "github.com/ignite/cli/v29/ignite/pkg/cmdrunner/step" + "github.com/ignite/cli/v29/ignite/pkg/cosmosver" + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +// Start starts the blockchain. +func (r Runner) Start(ctx context.Context, args ...string) error { + return r.run( + ctx, + runOptions{wrappedStdErrMaxLen: 50000}, + r.chainCmd.StartCommand(args...), + ) +} + +// Init inits the blockchain. +func (r Runner) Init(ctx context.Context, moniker string, args ...string) error { + return r.run(ctx, runOptions{}, r.chainCmd.InitCommand(moniker, args...)) +} + +// KV holds a key, value pair. +type KV struct { + key string + value string +} + +// NewKV returns a new key, value pair. +func NewKV(key, value string) KV { + return KV{key, value} +} + +var gentxRe = regexp.MustCompile(`(?m)"(.+?)"`) + +func (r Runner) InPlace(ctx context.Context, newChainID, newOperatorAddress string, options ...chaincmd.InPlaceOption) error { + runOptions := runOptions{ + stdout: os.Stdout, + stderr: os.Stderr, + } + return r.run( + ctx, + runOptions, + r.chainCmd.TestnetInPlaceCommand(newChainID, newOperatorAddress, options...), + ) +} + +// Initialize config directories & files for a multi-validator testnet locally. +func (r Runner) MultiNode(ctx context.Context, options ...chaincmd.MultiNodeOption) error { + runOptions := runOptions{ + stdout: os.Stdout, + stderr: os.Stderr, + } + return r.run( + ctx, + runOptions, + r.chainCmd.TestnetMultiNodeCommand(options...), + ) +} + +// Gentx generates a genesis tx carrying a self delegation. +func (r Runner) Gentx( + ctx context.Context, + validatorName, + selfDelegation string, + options ...chaincmd.GentxOption, +) (gentxPath string, err error) { + b := newBuffer() + + if err := r.run(ctx, runOptions{ + stdout: b, + stderr: b, + stdin: os.Stdin, + }, r.chainCmd.GentxCommand(validatorName, selfDelegation, options...)); err != nil { + return "", err + } + + return gentxRe.FindStringSubmatch(b.String())[1], nil +} + +// CollectGentxs collects gentxs. +func (r Runner) CollectGentxs(ctx context.Context) error { + return r.run(ctx, runOptions{}, r.chainCmd.CollectGentxsCommand()) +} + +// ValidateGenesis validates genesis. +func (r Runner) ValidateGenesis(ctx context.Context) error { + return r.run(ctx, runOptions{}, r.chainCmd.ValidateGenesisCommand()) +} + +// UnsafeReset resets the blockchain database. +func (r Runner) UnsafeReset(ctx context.Context) error { + return r.run(ctx, runOptions{}, r.chainCmd.UnsafeResetCommand()) +} + +// ShowNodeID shows node id. +func (r Runner) ShowNodeID(ctx context.Context) (nodeID string, err error) { + b := newBuffer() + err = r.run(ctx, runOptions{stdout: b}, r.chainCmd.ShowNodeIDCommand()) + nodeID = strings.TrimSpace(b.String()) + return +} + +// NodeStatus keeps info about node's status. +type NodeStatus struct { + ChainID string +} + +// Status returns the node's status. +func (r Runner) Status(ctx context.Context) (NodeStatus, error) { + b := newBuffer() + + if err := r.run(ctx, runOptions{stdout: b, stderr: b}, r.chainCmd.StatusCommand()); err != nil { + return NodeStatus{}, err + } + + var chainID string + + data, err := b.JSONEnsuredBytes() + if err != nil { + return NodeStatus{}, err + } + + version := r.chainCmd.SDKVersion() + switch { + case version.GTE(cosmosver.StargateFortyVersion): + out := struct { + NodeInfo struct { + Network string `json:"network"` + } `json:"NodeInfo"` + }{} + + if err := json.Unmarshal(data, &out); err != nil { + return NodeStatus{}, err + } + + chainID = out.NodeInfo.Network + default: + out := struct { + NodeInfo struct { + Network string `json:"network"` + } `json:"node_info"` + }{} + + if err := json.Unmarshal(data, &out); err != nil { + return NodeStatus{}, err + } + + chainID = out.NodeInfo.Network + } + + return NodeStatus{ + ChainID: chainID, + }, nil +} + +// BankSend sends amount from fromAccount to toAccount. +func (r Runner) BankSend(ctx context.Context, fromAccount, toAccount, amount string, options ...chaincmd.BankSendOption) (string, error) { + b := newBuffer() + opt := []step.Option{ + r.chainCmd.BankSendCommand(fromAccount, toAccount, amount, options...), + } + + if r.chainCmd.KeyringPassword() != "" { + input := newBuffer() + _, err := fmt.Fprintln(input, r.chainCmd.KeyringPassword()) + if err != nil { + return "", err + } + _, err = fmt.Fprintln(input, r.chainCmd.KeyringPassword()) + if err != nil { + return "", err + } + _, err = fmt.Fprintln(input, r.chainCmd.KeyringPassword()) + if err != nil { + return "", err + } + opt = append(opt, step.Write(input.Bytes())) + } + + if err := r.run(ctx, runOptions{stdout: b}, opt...); err != nil { + if strings.Contains(err.Error(), "key not found") { + return "", errors.New("account doesn't have any balances") + } + + return "", err + } + + txResult, err := decodeTxResult(b) + if err != nil { + return "", err + } + + if txResult.Code > 0 { + return "", errors.Errorf("cannot send tokens (SDK code %d): %s", txResult.Code, txResult.RawLog) + } + + return txResult.TxHash, nil +} + +// WaitTx waits until a tx is successfully added to a block and can be queried. +func (r Runner) WaitTx(ctx context.Context, txHash string, retryDelay time.Duration, maxRetry int) error { + retry := 0 + + // retry querying the request + checkTx := func() error { + b := newBuffer() + if err := r.run(ctx, runOptions{stdout: b}, r.chainCmd.QueryTxCommand(txHash)); err != nil { + // filter not found error and check for max retry + if !strings.Contains(err.Error(), "not found") { + return backoff.Permanent(err) + } + retry++ + if retry == maxRetry { + return backoff.Permanent(errors.Errorf("can't retrieve tx %s", txHash)) + } + return err + } + + // parse tx and check code + txResult, err := decodeTxResult(b) + if err != nil { + return backoff.Permanent(err) + } + if txResult.Code != 0 { + return backoff.Permanent(errors.Errorf("tx %s failed: %s", txHash, txResult.RawLog)) + } + + return nil + } + return backoff.Retry(checkTx, backoff.WithContext(backoff.NewConstantBackOff(retryDelay), ctx)) +} + +// Export exports the state of the chain into the specified file. +func (r Runner) Export(ctx context.Context, exportedFile string) error { + // Make sure the path exists + dir := filepath.Dir(exportedFile) + if err := os.MkdirAll(dir, 0o755); err != nil { + return err + } + + stdout, stderr := newBuffer(), newBuffer() + if err := r.run(ctx, runOptions{stdout: stdout, stderr: stderr}, r.chainCmd.ExportCommand()); err != nil { + return err + } + + // Exported genesis is written on stderr from Cosmos-SDK v0.44.0 + var exportedState []byte + if stdout.Len() > 0 { + exportedState = stdout.Bytes() + } else { + exportedState = stderr.Bytes() + } + + // Save the new state + return os.WriteFile(exportedFile, exportedState, 0o600) +} + +// EventSelector is used to query events. +type EventSelector struct { + typ string + attr string + value string +} + +// NewEventSelector creates a new event selector. +func NewEventSelector(typ, addr, value string) EventSelector { + return EventSelector{typ, addr, value} +} + +// Event represents a TX event. +type Event struct { + Type string + Attributes []EventAttribute + Time time.Time +} + +// EventAttribute holds event's attributes. +type EventAttribute struct { + Key string + Value string +} + +// QueryTxByEvents queries tx events by event selectors. +func (r Runner) QueryTxByEvents( + ctx context.Context, + selectors ...EventSelector, +) ([]Event, error) { + if len(selectors) == 0 { + return nil, errors.New("event selector list should be greater than zero") + } + list := make([]string, len(selectors)) + for i, event := range selectors { + list[i] = fmt.Sprintf("%s.%s=%s", event.typ, event.attr, event.value) + } + query := strings.Join(list, "&") + return r.QueryTx(ctx, r.chainCmd.QueryTxEventsCommand(query)) +} + +// QueryTxByQuery queries tx events by event selectors. +func (r Runner) QueryTxByQuery( + ctx context.Context, + selectors ...EventSelector, +) ([]Event, error) { + if len(selectors) == 0 { + return nil, errors.New("event selector list should be greater than zero") + } + list := make([]string, len(selectors)) + for i, query := range selectors { + list[i] = fmt.Sprintf("%s.%s='%s'", query.typ, query.attr, query.value) + } + query := strings.Join(list, " AND ") + return r.QueryTx(ctx, r.chainCmd.QueryTxQueryCommand(query)) +} + +// QueryTx queries tx events/query selectors. +func (r Runner) QueryTx( + ctx context.Context, + option ...step.Option, +) ([]Event, error) { + // execute the command and parse the output. + b := newBuffer() + if err := r.run(ctx, runOptions{stdout: b}, option...); err != nil { + return nil, err + } + + out := struct { + Txs []struct { + Logs []struct { + Events []struct { + Type string `json:"type"` + Attrs []struct { + Key string `json:"key"` + Value string `json:"value"` + } `json:"attributes"` + } `json:"events"` + } `json:"logs"` + TimeStamp string `json:"timestamp"` + } `json:"txs"` + }{} + + data, err := b.JSONEnsuredBytes() + if err != nil { + return nil, err + } + if err := json.Unmarshal(data, &out); err != nil { + return nil, err + } + + var events []Event + for _, tx := range out.Txs { + for _, log := range tx.Logs { + for _, e := range log.Events { + var attrs []EventAttribute + for _, attr := range e.Attrs { + attrs = append(attrs, EventAttribute{ + Key: attr.Key, + Value: attr.Value, + }) + } + + txTime, err := time.Parse(time.RFC3339, tx.TimeStamp) + if err != nil { + return nil, err + } + + events = append(events, Event{ + Type: e.Type, + Attributes: attrs, + Time: txTime, + }) + } + } + } + + return events, nil +} diff --git a/ignite/pkg/chaincmd/runner/runner.go b/ignite/pkg/chaincmd/runner/runner.go new file mode 100644 index 0000000..d0b5c4a --- /dev/null +++ b/ignite/pkg/chaincmd/runner/runner.go @@ -0,0 +1,270 @@ +// Package chaincmdrunner provides high level access to a blockchain's commands. +package chaincmdrunner + +import ( + "bytes" + "context" + "encoding/json" + "io" + "strings" + + "sigs.k8s.io/yaml" + + "github.com/ignite/cli/v29/ignite/pkg/chaincmd" + "github.com/ignite/cli/v29/ignite/pkg/cmdrunner" + "github.com/ignite/cli/v29/ignite/pkg/cmdrunner/step" + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/truncatedbuffer" +) + +// Runner provides high level access to a blockchain's commands. +type Runner struct { + chainCmd chaincmd.ChainCmd + stdout, stderr io.Writer +} + +// Option configures Runner. +type Option func(r *Runner) + +// Stdout sets stdout for executed commands. +func Stdout(w io.Writer) Option { + return func(runner *Runner) { + runner.stdout = w + } +} + +// Stderr sets stderr for executed commands. +func Stderr(w io.Writer) Option { + return func(runner *Runner) { + runner.stderr = w + } +} + +// New creates a new Runner with cc and options. +func New(ctx context.Context, chainCmd chaincmd.ChainCmd, options ...Option) (Runner, error) { + runner := Runner{ + chainCmd: chainCmd, + stdout: io.Discard, + stderr: io.Discard, + } + + applyOptions(&runner, options) + + // auto detect the chain id and get it applied to chaincmd if auto + // detection is enabled. + if chainCmd.IsAutoChainIDDetectionEnabled() { + status, err := runner.Status(ctx) + if err != nil { + return Runner{}, err + } + + runner.chainCmd = runner.chainCmd.Copy(chaincmd.WithChainID(status.ChainID)) + } + + return runner, nil +} + +func applyOptions(r *Runner, options []Option) { + for _, apply := range options { + apply(r) + } +} + +// Copy makes a copy of runner by overwriting its options with given options. +func (r Runner) Copy(options ...Option) Runner { + applyOptions(&r, options) + + return r +} + +// Cmd returns underlying chain cmd. +func (r Runner) Cmd() chaincmd.ChainCmd { + return r.chainCmd +} + +type runOptions struct { + // wrappedStdErrMaxLen determines the maximum length of the wrapped error logs + // this option is used for long-running command to prevent the buffer containing stderr getting too big + // 0 can be used for no maximum length + wrappedStdErrMaxLen int + + // stdout and stderr used to collect a copy of command's outputs. + stdout, stderr io.Writer + + // stdin defines input for the command + stdin io.Reader +} + +// run executes a command. +func (r Runner) run(ctx context.Context, runOptions runOptions, stepOptions ...step.Option) error { + // we use a truncated buffer to prevent memory leak + // this is because app currently send logs to StdErr + // therefore if the app successfully starts, the written logs can become extensive + errb := truncatedbuffer.NewTruncatedBuffer(runOptions.wrappedStdErrMaxLen) + + stdout := r.stdout + if runOptions.stdout != nil { + stdout = io.MultiWriter(stdout, runOptions.stdout) + } + + stderr := r.stderr + if runOptions.stderr != nil { + stderr = io.MultiWriter(stderr, runOptions.stderr) + } + + stderr = io.MultiWriter(stderr, errb) + + runnerOptions := []cmdrunner.Option{ + cmdrunner.DefaultStdout(stdout), + cmdrunner.DefaultStderr(stderr), + } + + if runOptions.stdin != nil { + runnerOptions = append(runnerOptions, cmdrunner.DefaultStdin(runOptions.stdin)) + } + + err := cmdrunner. + New(runnerOptions...). + Run(ctx, step.New(stepOptions...)) + + return errors.Wrap(err, errb.GetBuffer().String()) +} + +func newBuffer() *buffer { + return &buffer{ + Buffer: new(bytes.Buffer), + } +} + +// buffer is a bytes.Buffer with additional features. +type buffer struct { + *bytes.Buffer +} + +// JSONEnsuredBytes ensures that encoding format for returned bytes is always +// JSON even if the written data is originally encoded in YAML. +// This method is purposely verbose to trim gibberish output. +func (b *buffer) JSONEnsuredBytes() ([]byte, error) { + bz := b.Bytes() + content := strings.TrimSpace(string(bz)) + + // Early detection - check first non-whitespace character + if len(content) > 0 { + firstChar := content[0] + + // Quick check for JSON format (starts with { or [) + if firstChar == '{' || firstChar == '[' { + // Attempt to validate and extract clean JSON + return cleanAndValidateJSON(bz) + } + + // Quick check for YAML format (common indicators) + if firstChar == '-' || strings.HasPrefix(content, "---") || + strings.Contains(content, ":\n") || strings.Contains(content, ": ") { + // Likely YAML, convert to JSON directly + var out any + if err := yaml.Unmarshal(bz, &out); err == nil { + return yaml.YAMLToJSON(bz) + } + } + } + + // If format wasn't immediately obvious, try the more thorough approach + return fallbackFormatDetection(bz) +} + +// cleanAndValidateJSON attempts to extract valid JSON from potentially messy output. +func cleanAndValidateJSON(bz []byte) ([]byte, error) { + // Find the first JSON opening character + startIndex := strings.IndexAny(string(bz), "{[") + if startIndex < 0 { + return bz, nil // No JSON structure found + } + + // Determine matching closing character + opening := bz[startIndex] + var closing byte + if opening == '{' { + closing = '}' + } else { + closing = ']' + } + + endIndex := findMatchingCloseBracket(bz[startIndex:], opening, closing) + if endIndex < 0 { + // no proper closing found, try last instance + endIndex = bytes.LastIndexByte(bz, closing) + if endIndex <= startIndex { + return bz[startIndex:], nil // Return from start to end if no closing found + } + } else { + endIndex += startIndex + } + + // validate JSON + jsonData := bz[startIndex : endIndex+1] + var jsonTest any + if err := json.Unmarshal(jsonData, &jsonTest); err == nil { + return jsonData, nil + } + + // if validation failed, return from start to end + return bz[startIndex:], nil +} + +// findMatchingCloseBracket returns the accounting for nested structures. +func findMatchingCloseBracket(data []byte, openChar, closeChar byte) int { + depth := 0 + for i, b := range data { + if b == openChar { + depth++ + } else if b == closeChar { + depth-- + if depth == 0 { + return i // Found matching closing bracket + } + } + } + return -1 // No matching bracket found +} + +// fallbackFormatDetection tries different approaches to detect and convert format. +func fallbackFormatDetection(bz []byte) ([]byte, error) { + // first try to find and extract JSON + startIndex := strings.IndexAny(string(bz), "{[") + if startIndex >= 0 { + result, err := cleanAndValidateJSON(bz) + if err == nil { + return result, nil + } + + // if extraction failed but we found a start, return from there + return bz[startIndex:], nil + } + + // fallback to yaml parsing + var out any + if err := yaml.Unmarshal(bz, &out); err == nil { + return yaml.YAMLToJSON(bz) + } + + // nothing worked, return original + return bz, nil +} + +type txResult struct { + Code int `json:"code"` + RawLog string `json:"raw_log"` + TxHash string `json:"txhash"` +} + +func decodeTxResult(b *buffer) (txResult, error) { + var r txResult + + data, err := b.JSONEnsuredBytes() + if err != nil { + return r, err + } + + return r, json.Unmarshal(data, &r) +} diff --git a/ignite/pkg/chaincmd/runner/runner_test.go b/ignite/pkg/chaincmd/runner/runner_test.go new file mode 100644 index 0000000..5fd6116 --- /dev/null +++ b/ignite/pkg/chaincmd/runner/runner_test.go @@ -0,0 +1,72 @@ +package chaincmdrunner + +import ( + "context" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestNewKV(t *testing.T) { + kv := NewKV("k", "v") + require.Equal(t, "k", kv.key) + require.Equal(t, "v", kv.value) +} + +func TestFindMatchingCloseBracket(t *testing.T) { + data := []byte(`{"a":{"b":1}} trailing`) + idx := findMatchingCloseBracket(data, '{', '}') + require.Equal(t, 12, idx) + + require.Equal(t, -1, findMatchingCloseBracket([]byte(`{"a":1`), '{', '}')) +} + +func TestCleanAndValidateJSON(t *testing.T) { + raw := []byte("logs...\n{\"code\":0,\"raw_log\":\"ok\",\"txhash\":\"ABC\"}\nmore") + got, err := cleanAndValidateJSON(raw) + require.NoError(t, err) + require.Equal(t, `{"code":0,"raw_log":"ok","txhash":"ABC"}`, string(got)) +} + +func TestFallbackFormatDetectionConvertsYAML(t *testing.T) { + raw := []byte("code: 0\nraw_log: ok\ntxhash: ABC\n") + got, err := fallbackFormatDetection(raw) + require.NoError(t, err) + require.JSONEq(t, `{"code":0,"raw_log":"ok","txhash":"ABC"}`, string(got)) +} + +func TestJSONEnsuredBytes(t *testing.T) { + b := newBuffer() + _, err := b.WriteString("noise\n{\"k\":\"v\"}\n") + require.NoError(t, err) + + got, err := b.JSONEnsuredBytes() + require.NoError(t, err) + require.JSONEq(t, `{"k":"v"}`, string(got)) +} + +func TestDecodeTxResult(t *testing.T) { + b := newBuffer() + _, err := b.WriteString("code: 0\nraw_log: ok\ntxhash: HASH\n") + require.NoError(t, err) + + got, err := decodeTxResult(b) + require.NoError(t, err) + require.Equal(t, 0, got.Code) + require.Equal(t, "ok", got.RawLog) + require.Equal(t, "HASH", got.TxHash) +} + +func TestQueryTxByEventsRequiresSelectors(t *testing.T) { + r := Runner{} + events, err := r.QueryTxByEvents(context.Background()) + require.Error(t, err) + require.Nil(t, events) +} + +func TestQueryTxByQueryRequiresSelectors(t *testing.T) { + r := Runner{} + events, err := r.QueryTxByQuery(context.Background()) + require.Error(t, err) + require.Nil(t, events) +} diff --git a/ignite/pkg/chaincmd/runner/simulate.go b/ignite/pkg/chaincmd/runner/simulate.go new file mode 100644 index 0000000..645110b --- /dev/null +++ b/ignite/pkg/chaincmd/runner/simulate.go @@ -0,0 +1,39 @@ +package chaincmdrunner + +import ( + "context" + "os" + + "github.com/cosmos/cosmos-sdk/types/simulation" + + "github.com/ignite/cli/v29/ignite/pkg/chaincmd" +) + +// Simulation run the chain simulation. +func (r Runner) Simulation( + ctx context.Context, + appPath, simName string, + enabled bool, + config simulation.Config, + genesisTime int64, +) error { + return r.run(ctx, runOptions{stdout: os.Stdout}, + chaincmd.SimulationCommand( + appPath, + simName, + chaincmd.SimappWithGenesis(config.GenesisFile), + chaincmd.SimappWithParams(config.ParamsFile), + chaincmd.SimappWithExportParamsPath(config.ExportParamsPath), + chaincmd.SimappWithExportParamsHeight(config.ExportParamsHeight), + chaincmd.SimappWithExportStatePath(config.ExportStatePath), + chaincmd.SimappWithExportStatsPath(config.ExportStatsPath), + chaincmd.SimappWithSeed(config.Seed), + chaincmd.SimappWithInitialBlockHeight(config.InitialBlockHeight), + chaincmd.SimappWithNumBlocks(config.NumBlocks), + chaincmd.SimappWithBlockSize(config.BlockSize), + chaincmd.SimappWithLean(config.Lean), + chaincmd.SimappWithCommit(config.Commit), + chaincmd.SimappWithEnable(enabled), + chaincmd.SimappWithGenesisTime(genesisTime), + )) +} diff --git a/ignite/pkg/chaincmd/simulate.go b/ignite/pkg/chaincmd/simulate.go new file mode 100644 index 0000000..f8ca144 --- /dev/null +++ b/ignite/pkg/chaincmd/simulate.go @@ -0,0 +1,185 @@ +package chaincmd + +import ( + "fmt" + "path/filepath" + "strconv" + + "github.com/ignite/cli/v29/ignite/pkg/cmdrunner/step" + "github.com/ignite/cli/v29/ignite/pkg/gocmd" +) + +const ( + optionSimappGenesis = "-Genesis" + optionSimappParams = "-Params" + optionSimappExportParamsPath = "-ExportParamsPath" + optionSimappExportParamsHeight = "-ExportParamsHeight" + optionSimappExportStatePath = "-ExportStatePath" + optionSimappExportStatsPath = "-ExportStatsPath" + optionSimappSeed = "-Seed" + optionSimappInitialBlockHeight = "-InitialBlockHeight" + optionSimappNumBlocks = "-NumBlocks" + optionSimappBlockSize = "-BlockSize" + optionSimappLean = "-Lean" + optionSimappCommit = "-Commit" + optionSimappEnabled = "-Enabled" + optionSimappGenesisTime = "-GenesisTime" + + commandGoTest = "test" + optionGoBenchmem = "-benchmem" + optionGoSimsTags = "-tags='sims'" +) + +// SimappOption for the SimulateCommand. +type SimappOption func([]string) []string + +// SimappWithGenesis provides genesis option for the simapp command. +func SimappWithGenesis(genesis string) SimappOption { + return func(command []string) []string { + if len(genesis) > 0 { + return append(command, optionSimappGenesis, genesis) + } + return command + } +} + +// SimappWithParams provides params option for the simapp command. +func SimappWithParams(params string) SimappOption { + return func(command []string) []string { + if len(params) > 0 { + return append(command, optionSimappParams, params) + } + return command + } +} + +// SimappWithExportParamsPath provides exportParamsPath option for the simapp command. +func SimappWithExportParamsPath(exportParamsPath string) SimappOption { + return func(command []string) []string { + if len(exportParamsPath) > 0 { + return append(command, optionSimappExportParamsPath, exportParamsPath) + } + return command + } +} + +// SimappWithExportParamsHeight provides exportParamsHeight option for the simapp command. +func SimappWithExportParamsHeight(exportParamsHeight int) SimappOption { + return func(command []string) []string { + if exportParamsHeight > 0 { + return append( + command, + optionSimappExportParamsHeight, + strconv.Itoa(exportParamsHeight), + ) + } + return command + } +} + +// SimappWithExportStatePath provides exportStatePath option for the simapp command. +func SimappWithExportStatePath(exportStatePath string) SimappOption { + return func(command []string) []string { + if len(exportStatePath) > 0 { + return append(command, optionSimappExportStatePath, exportStatePath) + } + return command + } +} + +// SimappWithExportStatsPath provides exportStatsPath option for the simapp command. +func SimappWithExportStatsPath(exportStatsPath string) SimappOption { + return func(command []string) []string { + if len(exportStatsPath) > 0 { + return append(command, optionSimappExportStatsPath, exportStatsPath) + } + return command + } +} + +// SimappWithSeed provides seed option for the simapp command. +func SimappWithSeed(seed int64) SimappOption { + return func(command []string) []string { + return append(command, optionSimappSeed, strconv.FormatInt(seed, 10)) + } +} + +// SimappWithInitialBlockHeight provides initialBlockHeight option for the simapp command. +func SimappWithInitialBlockHeight(initialBlockHeight int) SimappOption { + return func(command []string) []string { + return append(command, optionSimappBlockSize, strconv.Itoa(initialBlockHeight)) + } +} + +// SimappWithNumBlocks provides numBlocks option for the simapp command. +func SimappWithNumBlocks(numBlocks int) SimappOption { + return func(command []string) []string { + return append(command, optionSimappNumBlocks, strconv.Itoa(numBlocks)) + } +} + +// SimappWithBlockSize provides blockSize option for the simapp command. +func SimappWithBlockSize(blockSize int) SimappOption { + return func(command []string) []string { + return append(command, optionSimappBlockSize, strconv.Itoa(blockSize)) + } +} + +// SimappWithLean provides lean option for the simapp command. +func SimappWithLean(lean bool) SimappOption { + return func(command []string) []string { + if lean { + return append(command, optionSimappLean) + } + return command + } +} + +// SimappWithCommit provides commit option for the simapp command. +func SimappWithCommit(commit bool) SimappOption { + return func(command []string) []string { + if commit { + return append(command, optionSimappCommit) + } + return command + } +} + +// SimappWithEnable provides enable option for the simapp command. +func SimappWithEnable(enable bool) SimappOption { + return func(command []string) []string { + if enable { + return append(command, optionSimappEnabled) + } + return command + } +} + +// SimappWithGenesisTime provides genesisTime option for the simapp command. +func SimappWithGenesisTime(genesisTime int64) SimappOption { + return func(command []string) []string { + return append(command, optionSimappGenesisTime, strconv.Itoa(int(genesisTime))) + } +} + +// SimulationCommand returns the cli command for simapp tests. +// simName must be a test defined within the application (defaults to TestFullAppSimulation). +func SimulationCommand(appPath string, simName string, options ...SimappOption) step.Option { + if simName == "" { + simName = "TestFullAppSimulation" + } + + command := []string{ + commandGoTest, + optionGoBenchmem, + fmt.Sprintf("-run=^%s$", simName), + optionGoSimsTags, + filepath.Join(appPath, "app"), + } + + // Apply the options provided by the user + for _, applyOption := range options { + command = applyOption(command) + } + return step.Exec(gocmd.Name(), command...) +} diff --git a/ignite/pkg/chainregistry/asset.go b/ignite/pkg/chainregistry/asset.go new file mode 100644 index 0000000..71548dc --- /dev/null +++ b/ignite/pkg/chainregistry/asset.go @@ -0,0 +1,52 @@ +package chainregistry + +import ( + "encoding/json" + "os" +) + +// AssetList represents the assetlist.json file from the chain registry. +// https://raw.githubusercontent.com/cosmos/chain-registry/master/assetlist.schema.json +// https://github.com/cosmos/chain-registry?tab=readme-ov-file#assetlists +type AssetList struct { + ChainName string `json:"chain_name"` + Assets []Asset `json:"assets"` +} + +type Asset struct { + Description string `json:"description"` + DenomUnits []DenomUnit `json:"denom_units"` + Base string `json:"base"` + Name string `json:"name"` + Display string `json:"display"` + Symbol string `json:"symbol"` + LogoURIs LogoURIs `json:"logo_URIs"` + CoingeckoID string `json:"coingecko_id,omitempty"` + Socials Socials `json:"socials,omitempty"` + TypeAsset string `json:"type_asset"` +} + +type DenomUnit struct { + Denom string `json:"denom"` + Exponent int `json:"exponent"` +} + +type LogoURIs struct { + Png string `json:"png"` + Svg string `json:"svg"` +} + +type Socials struct { + Website string `json:"website"` + Twitter string `json:"twitter"` +} + +// SaveJSON saves the assetlist.json to the given out directory. +func (c AssetList) SaveJSON(out string) error { + bz, err := json.MarshalIndent(c, "", " ") + if err != nil { + return err + } + + return os.WriteFile(out, bz, 0o600) +} diff --git a/ignite/pkg/chainregistry/chain.go b/ignite/pkg/chainregistry/chain.go new file mode 100644 index 0000000..82c14b2 --- /dev/null +++ b/ignite/pkg/chainregistry/chain.go @@ -0,0 +1,98 @@ +package chainregistry + +import ( + "encoding/json" + "os" +) + +// Chain represents the chain.json file from the chain registry. +// https://raw.githubusercontent.com/cosmos/chain-registry/master/chain.schema.json +type Chain struct { + ChainName string `json:"chain_name"` + Status ChainStatus `json:"status"` + NetworkType NetworkType `json:"network_type"` + Website string `json:"website"` + PrettyName string `json:"pretty_name"` + ChainType ChainType `json:"chain_type"` + ChainID string `json:"chain_id"` + Bech32Prefix string `json:"bech32_prefix"` + DaemonName string `json:"daemon_name"` + NodeHome string `json:"node_home"` + KeyAlgos []KeyAlgos `json:"key_algos"` + Slip44 uint32 `json:"slip44"` + Fees Fees `json:"fees"` + Staking Staking `json:"staking"` + Codebase Codebase `json:"codebase"` + Description string `json:"description"` + APIs APIs `json:"apis"` +} + +type Staking struct { + StakingTokens []StakingToken `json:"staking_tokens"` +} + +type StakingToken struct { + Denom string `json:"denom"` +} + +type Codebase struct { + GitRepo string `json:"git_repo"` + Genesis CodebaseGenesis `json:"genesis"` + RecommendedVersion string `json:"recommended_version"` + CompatibleVersions []string `json:"compatible_versions"` + Consensus CodebaseInfo `json:"consensus"` + Sdk CodebaseInfo `json:"sdk"` + Ibc CodebaseInfo `json:"ibc,omitempty"` + Cosmwasm CodebaseInfoEnabled `json:"cosmwasm,omitempty"` +} + +type CodebaseGenesis struct { + GenesisURL string `json:"genesis_url"` +} + +type CodebaseInfo struct { + Type string `json:"type"` + Version string `json:"version"` + Repo string `json:"repo,omitempty"` + Tag string `json:"tag,omitempty"` +} + +type CodebaseInfoEnabled struct { + Version string `json:"version,omitempty"` + Repo string `json:"repo,omitempty"` + Tag string `json:"tag,omitempty"` + Enabled bool `json:"enabled"` +} + +type Fees struct { + FeeTokens []FeeToken `json:"fee_tokens"` +} + +type FeeToken struct { + Denom string `json:"denom"` + FixedMinGasPrice float64 `json:"fixed_min_gas_price"` + LowGasPrice float64 `json:"low_gas_price"` + AverageGasPrice float64 `json:"average_gas_price"` + HighGasPrice float64 `json:"high_gas_price"` +} + +type APIs struct { + RPC []APIProvider `json:"rpc"` + Rest []APIProvider `json:"rest"` + Grpc []APIProvider `json:"grpc"` +} + +type APIProvider struct { + Address string `json:"address"` + Provider string `json:"provider"` +} + +// SaveJSON saves the chainJSON to the given out directory. +func (c Chain) SaveJSON(out string) error { + bz, err := json.MarshalIndent(c, "", " ") + if err != nil { + return err + } + + return os.WriteFile(out, bz, 0o600) +} diff --git a/ignite/pkg/chainregistry/chainregistry_test.go b/ignite/pkg/chainregistry/chainregistry_test.go new file mode 100644 index 0000000..7c36364 --- /dev/null +++ b/ignite/pkg/chainregistry/chainregistry_test.go @@ -0,0 +1,51 @@ +package chainregistry + +import ( + "encoding/json" + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestChainSaveJSON(t *testing.T) { + path := filepath.Join(t.TempDir(), "chain.json") + in := Chain{ + ChainName: "ignite", + ChainID: "ignite-1", + } + + require.NoError(t, in.SaveJSON(path)) + + raw, err := os.ReadFile(path) + require.NoError(t, err) + var got Chain + require.NoError(t, json.Unmarshal(raw, &got)) + require.Equal(t, in.ChainName, got.ChainName) + require.Equal(t, in.ChainID, got.ChainID) +} + +func TestAssetListSaveJSON(t *testing.T) { + path := filepath.Join(t.TempDir(), "assetlist.json") + in := AssetList{ + ChainName: "ignite", + Assets: []Asset{ + { + Name: "Ignite", + Base: "uignite", + Symbol: "IGNT", + }, + }, + } + + require.NoError(t, in.SaveJSON(path)) + + raw, err := os.ReadFile(path) + require.NoError(t, err) + var got AssetList + require.NoError(t, json.Unmarshal(raw, &got)) + require.Equal(t, in.ChainName, got.ChainName) + require.Len(t, got.Assets, 1) + require.Equal(t, "IGNT", got.Assets[0].Symbol) +} diff --git a/ignite/pkg/chainregistry/consts.go b/ignite/pkg/chainregistry/consts.go new file mode 100644 index 0000000..1b96321 --- /dev/null +++ b/ignite/pkg/chainregistry/consts.go @@ -0,0 +1,50 @@ +package chainregistry + +type NetworkType string + +const ( + // NetworkTypeMainnet is the mainnet network type. + NetworkTypeMainnet NetworkType = "mainnet" + + // NetworkTypeTestnet is the testnet network type. + NetworkTypeTestnet NetworkType = "testnet" + + // NetworkTypeDevnet is the devnet network type. + NetworkTypeDevnet NetworkType = "devnet" +) + +type ChainType string + +const ( + // ChainTypeCosmos is the cosmos chain type. + ChainTypeCosmos ChainType = "cosmos" + + // ChainTypeEip155 is the eip155 chain type. + ChainTypeEip155 ChainType = "eip155" +) + +type ChainStatus string + +const ( + // ChainStatusActive is the live chain status. + ChainStatusActive ChainStatus = "live" + + // ChainStatusUpcoming is the upcoming chain status. + ChainStatusUpcoming ChainStatus = "upcoming" + + // ChainStatusKilled is the inactive chain status. + ChainStatusKilled ChainStatus = "killed" +) + +type KeyAlgos string + +const ( + // KeyAlgoSecp256k1 is the secp256k1 key algorithm. + KeyAlgoSecp256k1 KeyAlgos = "secp256k1" + + // KeyAlgosEthSecp256k1 is the secp256k1 key algorithm with ethereum compatibility. + KeyAlgosEthSecp256k1 KeyAlgos = "ethsecp256k1" + + // KeyAlgoEd25519 is the ed25519 key algorithm. + KeyAlgoEd25519 KeyAlgos = "ed25519" +) diff --git a/ignite/pkg/chainregistry/doc.go b/ignite/pkg/chainregistry/doc.go new file mode 100644 index 0000000..ad39d0f --- /dev/null +++ b/ignite/pkg/chainregistry/doc.go @@ -0,0 +1,3 @@ +// package chainregistry is a package that contains the chain.json and assetlist.json structs from the chain registry. +// Useful when parsing or creating chain.json and assetlist.json files. +package chainregistry diff --git a/ignite/pkg/checksum/checksum.go b/ignite/pkg/checksum/checksum.go new file mode 100644 index 0000000..1e62f4b --- /dev/null +++ b/ignite/pkg/checksum/checksum.go @@ -0,0 +1,76 @@ +package checksum + +import ( + "bytes" + "crypto/sha256" + "fmt" + "io" + "os" + "path/filepath" + + "github.com/ignite/cli/v29/ignite/pkg/xexec" +) + +// Sum reads files from dirPath, calculates sha256 for each file and creates a new checksum +// file for them in outPath. +func Sum(dirPath, outPath string) error { + var b bytes.Buffer + + files, err := os.ReadDir(dirPath) + if err != nil { + return err + } + + for _, info := range files { + path := filepath.Join(dirPath, info.Name()) + f, err := os.Open(path) + if err != nil { + return err + } + defer f.Close() + + h := sha256.New() + if _, err := io.Copy(h, f); err != nil { + return err + } + + // Note that checksum entry has two spaces as separator to follow + // FIPS-180-2 regarding the character prefix for text file types. + // This is required for tools like sha256sum with a strict verification. + if _, err := b.WriteString(fmt.Sprintf("%x %s\n", h.Sum(nil), info.Name())); err != nil { + return err + } + } + + return os.WriteFile(outPath, b.Bytes(), 0o600) +} + +// Binary returns SHA256 hash of executable file, file is searched by name in PATH. +func Binary(binaryName string) (string, error) { + // get binary path + binaryPath, err := xexec.ResolveAbsPath(binaryName) + if err != nil { + return "", err + } + f, err := os.Open(binaryPath) + if err != nil { + return "", err + } + defer f.Close() + + h := sha256.New() + if _, err := io.Copy(h, f); err != nil { + return "", err + } + + return fmt.Sprintf("%x", h.Sum(nil)), nil +} + +// Strings concatenates all inputs and returns SHA256 hash of them. +func Strings(inputs ...string) string { + h := sha256.New() + for _, input := range inputs { + h.Write([]byte(input)) + } + return fmt.Sprintf("%x", h.Sum(nil)) +} diff --git a/ignite/pkg/checksum/checksum_test.go b/ignite/pkg/checksum/checksum_test.go new file mode 100644 index 0000000..b91b840 --- /dev/null +++ b/ignite/pkg/checksum/checksum_test.go @@ -0,0 +1,48 @@ +package checksum + +import ( + "crypto/sha256" + "fmt" + "os" + "path/filepath" + "strings" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestStrings(t *testing.T) { + h := sha256.Sum256([]byte("abc")) + require.Equal(t, fmt.Sprintf("%x", h[:]), Strings("a", "b", "c")) +} + +func TestSum(t *testing.T) { + dir := t.TempDir() + require.NoError(t, os.WriteFile(filepath.Join(dir, "a.txt"), []byte("alpha"), 0o600)) + require.NoError(t, os.WriteFile(filepath.Join(dir, "b.txt"), []byte("beta"), 0o600)) + + out := filepath.Join(t.TempDir(), "checksums.txt") + require.NoError(t, Sum(dir, out)) + + content, err := os.ReadFile(out) + require.NoError(t, err) + text := string(content) + require.Contains(t, text, " a.txt\n") + require.Contains(t, text, " b.txt\n") +} + +func TestBinary(t *testing.T) { + bin := filepath.Join(t.TempDir(), "fake-bin") + data := []byte("#!/bin/sh\necho test\n") + require.NoError(t, os.WriteFile(bin, data, 0o700)) + + want := sha256.Sum256(data) + got, err := Binary(bin) + require.NoError(t, err) + require.Equal(t, fmt.Sprintf("%x", want[:]), got) +} + +func TestBinaryReturnsErrorForMissingFile(t *testing.T) { + _, err := Binary(strings.TrimSpace(filepath.Join(t.TempDir(), "missing-bin"))) + require.Error(t, err) +} diff --git a/ignite/pkg/clictx/clictx.go b/ignite/pkg/clictx/clictx.go new file mode 100644 index 0000000..8d786dd --- /dev/null +++ b/ignite/pkg/clictx/clictx.go @@ -0,0 +1,34 @@ +package clictx + +import ( + "context" + "os" + "os/signal" +) + +// From creates a new context from ctx that is canceled when an exit signal received. +func From(ctx context.Context) context.Context { + var ( + ctxend, cancel = context.WithCancel(ctx) + quit = make(chan os.Signal, 1) + ) + signal.Notify(quit, os.Interrupt) + go func() { + <-quit + cancel() + }() + return ctxend +} + +// Do runs fn and waits for its result unless ctx is canceled. +// Returns fn result or canceled context error. +func Do(ctx context.Context, fn func() error) error { + errc := make(chan error) + go func() { errc <- fn() }() + select { + case err := <-errc: + return err + case <-ctx.Done(): + return ctx.Err() + } +} diff --git a/ignite/pkg/clictx/clictx_test.go b/ignite/pkg/clictx/clictx_test.go new file mode 100644 index 0000000..720b983 --- /dev/null +++ b/ignite/pkg/clictx/clictx_test.go @@ -0,0 +1,55 @@ +package clictx_test + +import ( + "context" + "testing" + "time" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/clictx" + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +func TestDo(t *testing.T) { + ctxCanceled, cancel := context.WithCancel(context.Background()) + cancel() + tests := []struct { + name string + ctx context.Context + f func() error + expectedErr string + }{ + { + name: "f returns nil", + ctx: context.Background(), + f: func() error { return nil }, + }, + { + name: "f returns an error", + ctx: context.Background(), + f: func() error { return errors.New("oups") }, + expectedErr: "oups", + }, + { + name: "ctx is canceled", + ctx: ctxCanceled, + f: func() error { + time.Sleep(time.Second) + return nil + }, + expectedErr: context.Canceled.Error(), + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + err := clictx.Do(tt.ctx, tt.f) + + if tt.expectedErr != "" { + require.EqualError(t, err, tt.expectedErr) + return + } + require.NoError(t, err) + }) + } +} diff --git a/ignite/pkg/clidoc/struct.go b/ignite/pkg/clidoc/struct.go new file mode 100644 index 0000000..890393c --- /dev/null +++ b/ignite/pkg/clidoc/struct.go @@ -0,0 +1,135 @@ +package clidoc + +import ( + "fmt" + "reflect" + "strings" + + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +const listSuffix = "list" + +type ( + // Docs represents a slice of Doc. + Docs []Doc + // Doc represents the struct documentation with tag comments. + Doc struct { + Key string + Type string + Value Docs + Comment string + } +) + +// String converts Docs to a string. +func (d Docs) String() string { + var sb strings.Builder + // Initial call with a negative level to avoid unwanted dash at the top level + d.writeString(&sb, -1) + return strings.TrimSpace(sb.String()) +} + +// writeString appends the contents of Docs to sb's buffer at level. +func (d Docs) writeString(sb *strings.Builder, level int) { + indent := strings.Repeat(" ", level+1) // Two spaces per YAML indentation standard + for _, doc := range d { + sb.WriteString(indent) + switch doc.Type { + case "": + sb.WriteString(fmt.Sprintf("%s: # %s\n", doc.Key, doc.Comment)) + default: + sb.WriteString(fmt.Sprintf("%s: (%s) # %s\n", doc.Key, doc.Type, doc.Comment)) + } + + if len(doc.Value) > 0 { + doc.Value.writeString(sb, level+1) + } + } +} + +// GenDoc to generate documentation from a struct. +func GenDoc(v interface{}) (fields Docs, err error) { + t := reflect.TypeOf(v) + if t.Kind() != reflect.Struct && t.Kind() != reflect.Ptr { + return fields, nil + } + for i := 0; i < t.NumField(); i++ { + var ( + field = t.Field(i) + yaml = field.Tag.Get("yaml") + doc = field.Tag.Get("doc") + ) + + tags := strings.Split(yaml, ",") + if len(tags) == 0 { + return fields, errors.Errorf("no tags found in struct field %s", field.Name) + } + name := tags[0] + if name == "" { + name = strings.ToLower(field.Name) + } + if len(tags) > 1 && strings.Contains(tags[1], "inline") { + elemFields, err := GenDoc(reflect.New(field.Type).Elem().Interface()) + if err != nil { + return nil, err + } + fields = append(fields, elemFields...) + continue + } + + var ( + elemFields Docs + elemType string + ) + switch field.Type.Kind() { //nolint + case reflect.Struct: + elemType = field.Type.Kind().String() + elemFields, err = GenDoc(reflect.New(field.Type).Elem().Interface()) + if err != nil { + return nil, err + } + case reflect.Ptr: + elemType = field.Type.Elem().Kind().String() + elemFields, err = GenDoc(reflect.New(field.Type.Elem()).Elem().Interface()) + if err != nil { + return nil, err + } + case reflect.Slice: + elemType = listName(field.Type.Elem().Kind().String()) + elemFields, err = GenDoc(reflect.New(field.Type.Elem()).Elem().Interface()) + if err != nil { + return nil, err + } + default: + elemType = field.Type.Kind().String() + } + fields = append(fields, Doc{ + Key: name, + Comment: doc, + Value: elemFields, + Type: mapTypes(elemType), + }) + } + + return fields, nil +} + +func listName(name string) string { + return fmt.Sprintf("%s %s", name, listSuffix) +} + +func mapTypes(doc string) string { + docTypes := map[string]string{ + "struct": "", + "map": "key/value", + "slice": listSuffix, + listName("map"): listSuffix, + listName("slice"): listSuffix, + listName("struct"): listSuffix, + } + if docType, ok := docTypes[doc]; ok { + return docType + } + return doc +} diff --git a/ignite/pkg/clidoc/struct_test.go b/ignite/pkg/clidoc/struct_test.go new file mode 100644 index 0000000..084a9a7 --- /dev/null +++ b/ignite/pkg/clidoc/struct_test.go @@ -0,0 +1,221 @@ +package clidoc + +import ( + "strings" + "testing" + + "github.com/stretchr/testify/require" +) + +type ( + build struct { + Main string `yaml:"main,omitempty" doc:"doc of main"` + Binary string `yaml:"binary,omitempty" doc:""` + LDFlags []string `yaml:"ldflags,omitempty"` + Proto proto `yaml:"proto" doc:"doc of proto"` + PtrProto *proto `yaml:"ptr_proto" doc:"doc of pointer proto"` + Protos []proto `yaml:"protos" doc:"doc of protos"` + } + proto struct { + Path string `yaml:"path" doc:"path of proto file"` + ThirdPartyPaths []string `yaml:"third_party_paths" doc:"doc of third party paths"` + } +) + +func TestGenDoc(t *testing.T) { + tests := []struct { + name string + v interface{} + want Docs + err error + }{ + { + name: "build struct", + v: build{}, + want: Docs{ + { + Key: "main", + Comment: "doc of main", + Type: "string", + }, + { + Key: "binary", + Type: "string", + }, + { + Key: "ldflags", + Type: listName("string"), + }, + { + Key: "proto", + Value: Docs{ + { + Key: "path", + Comment: "path of proto file", + Type: "string", + }, + { + Key: "third_party_paths", + Comment: "doc of third party paths", + Type: listName("string"), + }, + }, + Comment: "doc of proto", + }, + { + Key: "ptr_proto", + Value: Docs{ + { + Key: "path", + Comment: "path of proto file", + Type: "string", + }, + { + Key: "third_party_paths", + Comment: "doc of third party paths", + Type: listName("string"), + }, + }, + Comment: "doc of pointer proto", + }, + { + Key: "protos", + Type: "list", + Value: Docs{ + { + Key: "path", + Comment: "path of proto file", + Type: "string", + }, + { + Key: "third_party_paths", + Comment: "doc of third party paths", + Type: listName("string"), + }, + }, + Comment: "doc of protos", + }, + }, + }, + { + name: "proto struct", + v: proto{}, + want: Docs{ + { + Key: "path", + Comment: "path of proto file", + Type: "string", + }, + { + Key: "third_party_paths", + Comment: "doc of third party paths", + Type: listName("string"), + }, + }, + }, + { + name: "Invalid struct", + v: []map[string]interface{}{}, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := GenDoc(tt.v) + if tt.err != nil { + require.Error(t, err) + require.Equal(t, tt.err.Error(), err.Error()) + return + } + require.NoError(t, err) + require.Equal(t, tt.want, got) + }) + } +} + +func TestDocs_String(t *testing.T) { + tests := []struct { + name string + d Docs + want string + }{ + { + name: "many entries", + d: Docs{ + { + Key: "main", + Comment: "doc of main", + }, + { + Key: "binary", + }, + { + Key: "ldflags [array]", + }, + { + Key: "proto", + Value: Docs{ + { + Key: "path", + Comment: "path of proto file", + }, + { + Key: "third_party_paths [array]", + Comment: "doc of third party paths", + }, + }, + Comment: "doc of proto", + }, + { + Key: "protos [array]", + Value: Docs{ + { + Key: "path", + Comment: "path of proto file", + }, + { + Key: "third_party_paths [array]", + Comment: "doc of third party paths", + }, + }, + Comment: "doc of protos", + }, + }, + want: ` +main: # doc of main +binary: # +ldflags [array]: # +proto: # doc of proto + path: # path of proto file + third_party_paths [array]: # doc of third party paths +protos [array]: # doc of protos + path: # path of proto file + third_party_paths [array]: # doc of third party paths`, + }, + { + name: "no entries", + d: Docs{}, + }, + { + name: "two entries", + d: Docs{ + { + Key: "path", + Comment: "path of proto file", + }, + { + Key: "third_party_paths [array]", + Comment: "doc of third party paths", + }, + }, + want: ` +path: # path of proto file +third_party_paths [array]: # doc of third party paths`, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := tt.d.String() + require.Equal(t, strings.TrimSpace(tt.want), strings.TrimSpace(got)) + }) + } +} diff --git a/ignite/pkg/cliui/bubbleconfirm/confirm.go b/ignite/pkg/cliui/bubbleconfirm/confirm.go new file mode 100644 index 0000000..72b90d1 --- /dev/null +++ b/ignite/pkg/cliui/bubbleconfirm/confirm.go @@ -0,0 +1,111 @@ +package bubbleconfirm + +import ( + "fmt" + + tea "github.com/charmbracelet/bubbletea" + "github.com/charmbracelet/lipgloss" +) + +// confirmation result values. +const ( + Undecided = iota + Yes + No +) + +var ( + // styles for the confirmation dialog. + questionStyle = lipgloss.NewStyle().Foreground(lipgloss.Color("230")) + cursorStyle = lipgloss.NewStyle().Foreground(lipgloss.Color("212")) + yesStyle = lipgloss.NewStyle().Foreground(lipgloss.Color("42")) + noStyle = lipgloss.NewStyle().Foreground(lipgloss.Color("9")) +) + +// Model represents the bubbletea model for a confirmation prompt. +type Model struct { + Question string + cursor int + choice int + done bool +} + +// NewModel creates a new confirmation model with the given question. +func NewModel(question string) Model { + return Model{ + Question: question, + cursor: 0, // 0 = yes, 1 = no + choice: Undecided, + } +} + +// Init initializes the model. +func (m Model) Init() tea.Cmd { + return nil +} + +// Update handles messages and updates the model. +func (m Model) Update(msg tea.Msg) (tea.Model, tea.Cmd) { + switch msg := msg.(type) { //nolint:gocritic // more readable than if-else + case tea.KeyMsg: + switch msg.String() { + case "ctrl+c", "q", "esc": + m.done = true + m.choice = No + return m, tea.Quit + case "left", "h": + if m.cursor > 0 { + m.cursor-- + } + case "right", "l": + if m.cursor < 1 { + m.cursor++ + } + case "enter", " ": + // set choice based on cursor position + m.done = true + if m.cursor == 0 { + m.choice = Yes + } else { + m.choice = No + } + return m, tea.Quit + case "y", "Y": + m.done = true + m.choice = Yes + return m, tea.Quit + case "n", "N": + m.done = true + m.choice = No + return m, tea.Quit + } + } + return m, nil +} + +// View renders the confirmation prompt. +func (m Model) View() string { + if m.done { + return "" + } + + question := questionStyle.Render(m.Question) + yes := "Yes" + no := "No" + + // apply styles based on cursor position + if m.cursor == 0 { + yes = cursorStyle.Render("[") + yesStyle.Render(yes) + cursorStyle.Render("]") + no = "[ " + no + " ]" + } else { + yes = "[ " + yes + " ]" + no = cursorStyle.Render("[") + noStyle.Render(no) + cursorStyle.Render("]") + } + + return fmt.Sprintf("%s\n%s %s\n", question, yes, no) +} + +// Choice returns the selected choice (Yes, No, or Undecided). +func (m Model) Choice() int { + return m.choice +} diff --git a/ignite/pkg/cliui/bubbleconfirm/confirm_test.go b/ignite/pkg/cliui/bubbleconfirm/confirm_test.go new file mode 100644 index 0000000..4792757 --- /dev/null +++ b/ignite/pkg/cliui/bubbleconfirm/confirm_test.go @@ -0,0 +1,119 @@ +package bubbleconfirm + +import ( + "testing" + + tea "github.com/charmbracelet/bubbletea" + "github.com/spf13/pflag" + "github.com/stretchr/testify/require" +) + +func TestNewModel(t *testing.T) { + m := NewModel("Continue?") + + require.Equal(t, "Continue?", m.Question) + require.Equal(t, 0, m.cursor) + require.Equal(t, Undecided, m.Choice()) +} + +func TestModelUpdateNavigationAndSelect(t *testing.T) { + m := NewModel("Question") + + next, _ := m.Update(tea.KeyMsg{Type: tea.KeyRight}) + m = next.(Model) + require.Equal(t, 1, m.cursor) + + next, cmd := m.Update(tea.KeyMsg{Type: tea.KeyEnter}) + m = next.(Model) + require.Equal(t, No, m.Choice()) + require.NotNil(t, cmd) +} + +func TestModelUpdateDirectYesNoChoices(t *testing.T) { + m := NewModel("Question") + + next, cmd := m.Update(tea.KeyMsg{Type: tea.KeyRunes, Runes: []rune{'y'}}) + m = next.(Model) + require.Equal(t, Yes, m.Choice()) + require.NotNil(t, cmd) + + m = NewModel("Question") + next, cmd = m.Update(tea.KeyMsg{Type: tea.KeyRunes, Runes: []rune{'n'}}) + m = next.(Model) + require.Equal(t, No, m.Choice()) + require.NotNil(t, cmd) +} + +func TestNewQuestionOptions(t *testing.T) { + var answer string + + q := NewQuestion( + "Name", + &answer, + DefaultAnswer("alice"), + Required(), + HideAnswer(), + GetConfirmation(), + ) + + require.Equal(t, "Name", q.question) + require.Equal(t, "alice", q.defaultAnswer) + require.True(t, q.required) + require.True(t, q.hidden) + require.True(t, q.shouldConfirm) + require.Equal(t, &answer, q.answer) +} + +func TestInputModelUpdateRequiredValidation(t *testing.T) { + m := inputModel{ + Question: "Name", + Required: true, + } + + next, _ := m.Update(tea.KeyMsg{Type: tea.KeyEnter}) + m = next.(inputModel) + + require.False(t, m.done) + require.Equal(t, "this information is required", m.Error) +} + +func TestInputModelUpdateTypingAndBackspace(t *testing.T) { + m := inputModel{ + Question: "Name", + } + + next, _ := m.Update(tea.KeyMsg{Type: tea.KeyRunes, Runes: []rune{'a'}}) + m = next.(inputModel) + require.Equal(t, "a", m.Value) + require.Equal(t, 1, m.cursorPos) + + next, _ = m.Update(tea.KeyMsg{Type: tea.KeyBackspace}) + m = next.(inputModel) + require.Equal(t, "", m.Value) + require.Equal(t, 0, m.cursorPos) +} + +func TestValuesFromFlagsOrAskUsesProvidedFlagValues(t *testing.T) { + fs := pflag.NewFlagSet("test", pflag.ContinueOnError) + fs.String("username", "", "username") + fs.String("region", "", "region") + require.NoError(t, fs.Set("username", "alice")) + require.NoError(t, fs.Set("region", "earth")) + + values, err := ValuesFromFlagsOrAsk( + fs, + "", + NewFlag("username", true), + NewFlag("region", false), + ) + require.NoError(t, err) + require.Equal(t, "alice", values["username"]) + require.Equal(t, "earth", values["region"]) +} + +func TestValuesFromFlagsOrAskReturnsErrorForUndefinedFlag(t *testing.T) { + fs := pflag.NewFlagSet("test", pflag.ContinueOnError) + + _, err := ValuesFromFlagsOrAsk(fs, "", NewFlag("missing", true)) + require.Error(t, err) +} diff --git a/ignite/pkg/cliui/bubbleconfirm/question.go b/ignite/pkg/cliui/bubbleconfirm/question.go new file mode 100644 index 0000000..42186f4 --- /dev/null +++ b/ignite/pkg/cliui/bubbleconfirm/question.go @@ -0,0 +1,339 @@ +package bubbleconfirm + +import ( + "context" + "fmt" + "reflect" + "strings" + + tea "github.com/charmbracelet/bubbletea" + "github.com/charmbracelet/lipgloss" + "github.com/spf13/pflag" + + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +var ( + // styles for the question input. + activeStyle = lipgloss.NewStyle().Foreground(lipgloss.Color("212")) + promptStyle = lipgloss.NewStyle().Foreground(lipgloss.Color("99")) + placeholderStyle = lipgloss.NewStyle().Foreground(lipgloss.Color("240")) + errorStyle = lipgloss.NewStyle().Foreground(lipgloss.Color("9")) +) + +// ErrInterrupted is returned when the input process is interrupted. +var ErrInterrupted = errors.New("interrupted") + +// ErrConfirmationFailed is returned when second answer is not the same with first one. +var ErrConfirmationFailed = errors.New("failed to confirm, your answers were different") + +// Question holds information on what to ask user and where +// the answer stored at. +type Question struct { + question string + defaultAnswer interface{} + answer interface{} + hidden bool + shouldConfirm bool + required bool +} + +// Option configures Question. +type Option func(*Question) + +// DefaultAnswer sets a default answer to Question. +func DefaultAnswer(answer interface{}) Option { + return func(q *Question) { + q.defaultAnswer = answer + } +} + +// Required marks the answer as required. +func Required() Option { + return func(q *Question) { + q.required = true + } +} + +// HideAnswer hides the answer to prevent secret information being leaked. +func HideAnswer() Option { + return func(q *Question) { + q.hidden = true + } +} + +// GetConfirmation prompts confirmation for the given answer. +func GetConfirmation() Option { + return func(q *Question) { + q.shouldConfirm = true + } +} + +// NewQuestion creates a new question. +func NewQuestion(question string, answer interface{}, options ...Option) Question { + q := Question{ + question: question, + answer: answer, + } + for _, o := range options { + o(&q) + } + return q +} + +// inputModel represents the bubbletea model for an input prompt. +type inputModel struct { + Question string + Value string + Hidden bool + Required bool + DefaultValue string + Error string + cursorPos int + done bool +} + +// Init initializes the input model. +func (m inputModel) Init() tea.Cmd { + return nil +} + +// Update handles messages and updates the input model. +func (m inputModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) { + switch msg := msg.(type) { //nolint:gocritic // more readable than if-else + case tea.KeyMsg: + switch msg.String() { + case "ctrl+c", "esc": + m.done = true + return m, tea.Quit + case "enter": + // validate if input is required + if m.Required && strings.TrimSpace(m.Value) == "" { + m.Error = "this information is required" + return m, nil + } + + m.done = true + return m, tea.Quit + case "backspace": + if m.cursorPos > 0 { + m.Value = m.Value[:m.cursorPos-1] + m.Value[m.cursorPos:] + m.cursorPos-- + } + case "left": + if m.cursorPos > 0 { + m.cursorPos-- + } + case "right": + if m.cursorPos < len(m.Value) { + m.cursorPos++ + } + case "home": + m.cursorPos = 0 + case "end": + m.cursorPos = len(m.Value) + default: + // only accept printable characters + if len(msg.Runes) == 1 { + m.Value = m.Value[:m.cursorPos] + string(msg.Runes) + m.Value[m.cursorPos:] + m.cursorPos++ + m.Error = "" + } + } + } + return m, nil +} + +// View renders the input prompt. +func (m inputModel) View() string { + if m.done { + return "" + } + + question := m.Question + if !m.Required { + question += " (optional)" + } + question = questionStyle.Render(question) + + var input string + if m.Hidden { + // show asterisks for hidden input + input = strings.Repeat("*", len(m.Value)) + } else { + input = m.Value + } + + // show cursor position + var display string + if m.Value == "" && m.DefaultValue != "" { //nolint:gocritic // more readable than switch + // show default value as placeholder + display = placeholderStyle.Render(m.DefaultValue) + } else if m.cursorPos < len(input) { + display = input[:m.cursorPos] + activeStyle.Render(string(input[m.cursorPos])) + input[m.cursorPos+1:] + } else { + display = input + activeStyle.Render("_") + } + + prompt := fmt.Sprintf("%s\n%s ", question, promptStyle.Render("›")) + + if m.Error != "" { + return prompt + display + "\n" + errorStyle.Render(m.Error) + } + + return prompt + display +} + +func ask(q Question) error { + // prepare default value as string + defaultValue := "" + if q.defaultAnswer != nil { + defaultValue = fmt.Sprintf("%v", q.defaultAnswer) + } + + // create and init the model + m := inputModel{ + Question: q.question, + Hidden: q.hidden, + Required: q.required, + DefaultValue: defaultValue, + } + + // run the bubbletea program + p := tea.NewProgram(&m) + result, err := p.Run() + if err != nil { + return err + } + + finalModel := result.(inputModel) + if !finalModel.done { + return ErrInterrupted + } + + // if empty and we have a default, use the default + value := finalModel.Value + if value == "" && defaultValue != "" { + value = defaultValue + } + + // convert the string value to the target type and store it + switch ptr := q.answer.(type) { + case *string: + *ptr = value + case *int: + var i int + _, err := fmt.Sscanf(value, "%d", &i) + if err == nil { + *ptr = i + } + case *float64: + var f float64 + _, err := fmt.Sscanf(value, "%f", &f) + if err == nil { + *ptr = f + } + case *bool: + *ptr = strings.ToLower(value) == "true" || value == "1" || strings.ToLower(value) == "yes" || strings.ToLower(value) == "y" + default: + // use reflection for other types + v := reflect.ValueOf(ptr).Elem() + if v.Kind() == reflect.String { + v.SetString(value) + } + } + + return nil +} + +// Ask asks questions and collect answers. +func Ask(question ...Question) (err error) { + defer func() { + if errors.Is(err, ErrInterrupted) { + err = context.Canceled + } + }() + + for _, q := range question { + if err := ask(q); err != nil { + return err + } + + if q.shouldConfirm { + var secondAnswer string + + var options []Option + if q.required { + options = append(options, Required()) + } + if q.hidden { + options = append(options, HideAnswer()) + } + if err := ask(NewQuestion("Confirm "+q.question, &secondAnswer, options...)); err != nil { + return err + } + + t := reflect.TypeOf(secondAnswer) + compAnswer := reflect.ValueOf(q.answer).Elem().Convert(t).String() + if secondAnswer != compAnswer { + return ErrConfirmationFailed + } + } + } + return nil +} + +// Flag represents a cmd flag. +type Flag struct { + Name string + IsRequired bool +} + +// NewFlag creates a new flag. +func NewFlag(name string, isRequired bool) Flag { + return Flag{name, isRequired} +} + +// ValuesFromFlagsOrAsk returns values of flags within map[string]string where map's +// key is the name of the flag and value is flag's value. +// when provided, values are collected through command otherwise they're asked by prompting user. +// title used as a message while prompting. +func ValuesFromFlagsOrAsk(fset *pflag.FlagSet, title string, flags ...Flag) (values map[string]string, err error) { + values = make(map[string]string) + + answers := make(map[string]*string) + var questions []Question + + for _, f := range flags { + flag := fset.Lookup(f.Name) + if flag == nil { + return nil, errors.Errorf("flag %q is not defined", f.Name) + } + if value, _ := fset.GetString(f.Name); value != "" { + values[f.Name] = value + continue + } + + var value string + answers[f.Name] = &value + + var options []Option + if f.IsRequired { + options = append(options, Required()) + } + questions = append(questions, NewQuestion(flag.Usage, &value, options...)) + } + + if len(questions) > 0 && title != "" { + fmt.Println(title) + } + if err := Ask(questions...); err != nil { + return values, err + } + + for name, answer := range answers { + values[name] = *answer + } + + return values, nil +} diff --git a/ignite/pkg/cliui/clispinner/clispinner.go b/ignite/pkg/cliui/clispinner/clispinner.go new file mode 100644 index 0000000..148d249 --- /dev/null +++ b/ignite/pkg/cliui/clispinner/clispinner.go @@ -0,0 +1,77 @@ +package clispinner + +import ( + "io" + "os" + + "golang.org/x/term" +) + +// DefaultText defines the default spinner text. +const DefaultText = "Initializing..." + +type ( + Spinner interface { + SetText(text string) Spinner + SetPrefix(text string) Spinner + SetCharset(charset []string) Spinner + SetColor(color string) Spinner + Start() Spinner + Stop() Spinner + IsActive() bool + Writer() io.Writer + } + + Option func(*Options) + + Options struct { + writer io.Writer + text string + charset []string + } +) + +// WithWriter configures an output for a spinner. +func WithWriter(w io.Writer) Option { + return func(options *Options) { + options.writer = w + } +} + +// WithText configures the spinner text. +func WithText(text string) Option { + return func(options *Options) { + options.text = text + } +} + +// WithCharset configures the spinner charset. +func WithCharset(charset []string) Option { + return func(options *Options) { + options.charset = charset + } +} + +// New creates a new spinner. +func New(options ...Option) Spinner { + o := Options{} + for _, apply := range options { + apply(&o) + } + + if isRunningInTerminal(o.writer) { + return newTermSpinner(o) + } + return newSimpleSpinner(o) +} + +// isRunningInTerminal check if the writer file descriptor is a terminal. +func isRunningInTerminal(w io.Writer) bool { + if w == nil { + return term.IsTerminal(int(os.Stdout.Fd())) + } + if f, ok := w.(*os.File); ok { + return term.IsTerminal(int(f.Fd())) + } + return false +} diff --git a/ignite/pkg/cliui/clispinner/clispinner_test.go b/ignite/pkg/cliui/clispinner/clispinner_test.go new file mode 100644 index 0000000..307bb96 --- /dev/null +++ b/ignite/pkg/cliui/clispinner/clispinner_test.go @@ -0,0 +1,121 @@ +package clispinner + +import ( + "bytes" + "fmt" + "os" + "sync" + "testing" + "time" + + "github.com/stretchr/testify/require" +) + +func TestNewUsesSimpleSpinnerForNonTerminalWriter(t *testing.T) { + s := New(WithWriter(&bytes.Buffer{})) + _, ok := s.(*SimpleSpinner) + require.True(t, ok) +} + +func TestIsRunningInTerminalFalseForNonFileWriter(t *testing.T) { + require.False(t, isRunningInTerminal(&bytes.Buffer{})) +} + +func TestIsRunningInTerminalFalseForRegularFile(t *testing.T) { + f, err := os.CreateTemp(t.TempDir(), "spinner-writer-*") + require.NoError(t, err) + t.Cleanup(func() { + require.NoError(t, f.Close()) + }) + + require.False(t, isRunningInTerminal(f)) +} + +func TestNewSimpleSpinnerDefaults(t *testing.T) { + s := newSimpleSpinner(Options{}) + + require.Equal(t, DefaultText, s.text) + require.Equal(t, simpleCharset, s.charset) + require.Equal(t, os.Stdout, s.writer) +} + +func TestSimpleSpinnerSetters(t *testing.T) { + s := newSimpleSpinner(Options{}) + + require.Same(t, s, s.SetText("text")) + require.Same(t, s, s.SetPrefix("prefix")) + require.Same(t, s, s.SetCharset([]string{"1", "2"})) + require.Same(t, s, s.SetColor("red")) + + require.Equal(t, "text", s.text) + require.Equal(t, "prefix", s.prefix) + require.Equal(t, []string{"1", "2"}, s.charset) + require.Equal(t, "red", s.color) +} + +func TestSimpleSpinnerStartAndStop(t *testing.T) { + oldRefreshRate := simpleRefreshRate + oldColor := simpleColor + simpleRefreshRate = time.Millisecond + simpleColor = func(i ...interface{}) string { return fmt.Sprint(i...) } + t.Cleanup(func() { + simpleRefreshRate = oldRefreshRate + simpleColor = oldColor + }) + + out := &safeBuffer{} + s := newSimpleSpinner(Options{ + writer: out, + text: "working", + charset: []string{"."}, + }) + + require.False(t, s.IsActive()) + s.Start() + require.True(t, s.IsActive()) + + require.Eventually(t, func() bool { + return out.Len() > 0 + }, 200*time.Millisecond, 5*time.Millisecond) + + s.Stop() + require.False(t, s.IsActive()) +} + +func TestNewTermSpinnerDefaultsAndSetters(t *testing.T) { + var out bytes.Buffer + s := newTermSpinner(Options{ + writer: &out, + text: "booting", + charset: []string{"A", "B"}, + }) + + require.Equal(t, []string{"A", "B"}, s.charset) + require.Equal(t, &out, s.Writer()) + require.Equal(t, " booting", s.sp.Suffix) + + require.Same(t, s, s.SetText("running")) + require.Same(t, s, s.SetPrefix("ignite")) + require.Same(t, s, s.SetCharset([]string{"X"})) + require.Same(t, s, s.SetColor("green")) + + require.Equal(t, "ignite ", s.sp.Prefix) + require.Equal(t, " running", s.sp.Suffix) +} + +type safeBuffer struct { + mu sync.Mutex + buf bytes.Buffer +} + +func (b *safeBuffer) Write(p []byte) (int, error) { + b.mu.Lock() + defer b.mu.Unlock() + return b.buf.Write(p) +} + +func (b *safeBuffer) Len() int { + b.mu.Lock() + defer b.mu.Unlock() + return b.buf.Len() +} diff --git a/ignite/pkg/cliui/clispinner/simple.go b/ignite/pkg/cliui/clispinner/simple.go new file mode 100644 index 0000000..6197a37 --- /dev/null +++ b/ignite/pkg/cliui/clispinner/simple.go @@ -0,0 +1,162 @@ +package clispinner + +import ( + "fmt" + "io" + "os" + "sync" + "time" + + "github.com/briandowns/spinner" + + "github.com/ignite/cli/v29/ignite/pkg/cliui/colors" +) + +var ( + simpleCharset = spinner.CharSets[4] + simpleRefreshRate = time.Millisecond * 300 + simpleColor = colors.Spinner +) + +type SimpleSpinner struct { + mu sync.Mutex + writer io.Writer + charset []string + text string + prefix string + color string + active bool + stopChan chan struct{} +} + +// newSimpleSpinner creates a new simple spinner. +func newSimpleSpinner(o Options) *SimpleSpinner { + text := o.text + if text == "" { + text = DefaultText + } + + charset := o.charset + if len(charset) == 0 { + charset = simpleCharset + } + + writer := o.writer + if writer == nil { + writer = os.Stdout + } + + return &SimpleSpinner{ + charset: charset, + text: text, + writer: writer, + } +} + +// SetText sets the text for the spinner. +func (s *SimpleSpinner) SetText(text string) Spinner { + s.mu.Lock() + s.text = text + s.mu.Unlock() + return s +} + +// SetPrefix sets the prefix for the spinner. +func (s *SimpleSpinner) SetPrefix(prefix string) Spinner { + s.mu.Lock() + s.prefix = prefix + s.mu.Unlock() + return s +} + +// SetCharset sets the charset for the spinner. +func (s *SimpleSpinner) SetCharset(charset []string) Spinner { + s.mu.Lock() + s.charset = charset + s.mu.Unlock() + return s +} + +// SetColor sets the color for the spinner (if color functionality is added). +func (s *SimpleSpinner) SetColor(color string) Spinner { + s.mu.Lock() + s.color = color + s.mu.Unlock() + return s +} + +// Start begins the spinner animation. +func (s *SimpleSpinner) Start() Spinner { + s.mu.Lock() + if s.active { + s.mu.Unlock() + return s // Do nothing if already active + } + s.active = true + s.stopChan = make(chan struct{}) + stop := s.stopChan + + writer := s.writer + s.mu.Unlock() + + // Start the animation loop in a separate goroutine + go func(stop <-chan struct{}) { + ticker := time.NewTicker(simpleRefreshRate) + defer ticker.Stop() + + index := 0 + for { + select { + case <-stop: // Stop the spinner + _, _ = fmt.Fprintf(writer, "\r\033[K") // Clear the spinner's line + return + case <-ticker.C: // Update the spinner on each tick + s.mu.Lock() + charset := s.charset + if len(charset) == 0 { + charset = simpleCharset + } + frame := charset[index] + str := fmt.Sprintf("\r%s%s %s", s.prefix, simpleColor(frame), s.text) + _, _ = fmt.Fprint(writer, str) // Update the spinner in the same line + index++ + if index >= len(charset) { + index = 0 + } + s.mu.Unlock() + } + } + }(stop) + return s +} + +// Stop ends the spinner animation. +func (s *SimpleSpinner) Stop() Spinner { + s.mu.Lock() + if !s.active { + s.mu.Unlock() + return s // Do nothing if already inactive + } + stop := s.stopChan + s.active = false + s.stopChan = nil + s.mu.Unlock() + + if stop != nil { + close(stop) + } + fmt.Print("\r") // Clear spinner line on stop + return s +} + +// IsActive returns whether the spinner is currently active. +func (s *SimpleSpinner) IsActive() bool { + s.mu.Lock() + defer s.mu.Unlock() + return s.active +} + +// Writer returns the spinner writer. +func (s *SimpleSpinner) Writer() io.Writer { + return s.writer +} diff --git a/ignite/pkg/cliui/clispinner/terminal.go b/ignite/pkg/cliui/clispinner/terminal.go new file mode 100644 index 0000000..0357638 --- /dev/null +++ b/ignite/pkg/cliui/clispinner/terminal.go @@ -0,0 +1,100 @@ +package clispinner + +import ( + "io" + "time" + + "github.com/briandowns/spinner" +) + +var ( + terminalCharset = spinner.CharSets[4] + terminalRefreshRate = time.Millisecond * 200 + terminalColor = "blue" +) + +type TermSpinner struct { + sp *spinner.Spinner + charset []string +} + +// newTermSpinner creates a new terminal spinner. +func newTermSpinner(o Options) *TermSpinner { + text := o.text + if text == "" { + text = DefaultText + } + + charset := o.charset + if len(charset) == 0 { + charset = terminalCharset + } + + spOptions := []spinner.Option{ + spinner.WithColor(terminalColor), + spinner.WithSuffix(" " + text), + } + + if o.writer != nil { + spOptions = append(spOptions, spinner.WithWriter(o.writer)) + } + + return &TermSpinner{ + sp: spinner.New(charset, terminalRefreshRate, spOptions...), + charset: charset, + } +} + +// SetText sets the text for spinner. +func (s *TermSpinner) SetText(text string) Spinner { + s.sp.Lock() + s.sp.Suffix = " " + text + s.sp.Unlock() + return s +} + +// SetPrefix sets the prefix for spinner. +func (s *TermSpinner) SetPrefix(text string) Spinner { + s.sp.Lock() + s.sp.Prefix = text + " " + s.sp.Unlock() + return s +} + +// SetCharset sets the prefix for spinner. +func (s *TermSpinner) SetCharset(charset []string) Spinner { + s.sp.UpdateCharSet(charset) + return s +} + +// SetColor sets the prefix for spinner. +func (s *TermSpinner) SetColor(color string) Spinner { + _ = s.sp.Color(color) + return s +} + +// Start starts spinning. +func (s *TermSpinner) Start() Spinner { + s.sp.Start() + return s +} + +// Stop stops spinning. +func (s *TermSpinner) Stop() Spinner { + s.sp.Stop() + s.sp.Prefix = "" + _ = s.sp.Color(terminalColor) + s.sp.UpdateCharSet(s.charset) + s.sp.Stop() + return s +} + +// IsActive returns whether the spinner is currently active. +func (s *TermSpinner) IsActive() bool { + return s.sp.Active() +} + +// Writer returns the spinner writer. +func (s *TermSpinner) Writer() io.Writer { + return s.sp.Writer +} diff --git a/ignite/pkg/cliui/cliui.go b/ignite/pkg/cliui/cliui.go new file mode 100644 index 0000000..0771b49 --- /dev/null +++ b/ignite/pkg/cliui/cliui.go @@ -0,0 +1,339 @@ +package cliui + +import ( + "fmt" + "io" + "os" + "sync" + + tea "github.com/charmbracelet/bubbletea" + + "github.com/ignite/cli/v29/ignite/pkg/cliui/bubbleconfirm" + "github.com/ignite/cli/v29/ignite/pkg/cliui/clispinner" + "github.com/ignite/cli/v29/ignite/pkg/cliui/entrywriter" + uilog "github.com/ignite/cli/v29/ignite/pkg/cliui/log" + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/events" +) + +type sessionOptions struct { + stdin io.ReadCloser + stdout io.WriteCloser + stderr io.WriteCloser + + spinnerStart bool + spinnerText string + + ignoreEvents bool + verbosity uilog.Verbosity +} + +// Session controls command line interaction with users. +type Session struct { + options sessionOptions + ev events.Bus + spinner clispinner.Spinner + out uilog.Output + wg *sync.WaitGroup + ended bool + skipUI bool +} + +// Option configures session options. +type Option func(s *Session) + +// WithStdout sets the starndard output for the session. +func WithStdout(stdout io.WriteCloser) Option { + return func(s *Session) { + s.options.stdout = stdout + } +} + +// WithStderr sets base stderr for a Session. +func WithStderr(stderr io.WriteCloser) Option { + return func(s *Session) { + s.options.stderr = stderr + } +} + +// WithStdin sets the starndard input for the session. +func WithStdin(stdin io.ReadCloser) Option { + return func(s *Session) { + s.options.stdin = stdin + } +} + +// WithVerbosity sets a verbosity level for the Session. +func WithVerbosity(v uilog.Verbosity) Option { + return func(s *Session) { + s.options.verbosity = v + } +} + +// IgnoreEvents configures the session to avoid displaying events. +// This is a compatibility option to be able to use the session and +// the events bus when models are used to manage CLI UI. The session +// won't handle the events when this option is present. +func IgnoreEvents() Option { + return func(s *Session) { + s.options.ignoreEvents = true + } +} + +// StartSpinner forces spinner to be spinning right after creation. +func StartSpinner() Option { + return func(s *Session) { + s.options.spinnerStart = true + } +} + +// StartSpinnerWithText forces spinner to be spinning right after creation +// with a custom status text. +func StartSpinnerWithText(text string) Option { + return func(s *Session) { + s.options.spinnerStart = true + s.options.spinnerText = text + } +} + +func WithoutUserInteraction(yes bool) Option { + return func(s *Session) { + s.skipUI = yes + } +} + +// New creates a new Session. +func New(options ...Option) *Session { + session := Session{ + ev: events.NewBus(), + wg: &sync.WaitGroup{}, + options: sessionOptions{ + stdin: os.Stdin, + stdout: os.Stdout, + stderr: os.Stderr, + spinnerText: clispinner.DefaultText, + }, + } + + for _, apply := range options { + apply(&session) + } + + logOptions := []uilog.Option{ + uilog.WithStdout(session.options.stdout), + uilog.WithStderr(session.options.stderr), + } + + if session.options.verbosity == uilog.VerbosityVerbose { + logOptions = append(logOptions, uilog.Verbose()) + } + + session.out = uilog.NewOutput(logOptions...) + + if session.options.spinnerStart { + session.StartSpinner(session.options.spinnerText) + } + + // The main loop that prints the events uses a wait group to block + // the session end until all the events are printed. + if !session.options.ignoreEvents { + session.wg.Add(1) + go session.handleEvents() + } + + return &session +} + +// EventBus returns the event bus of the session. +func (s Session) EventBus() events.Bus { + return s.ev +} + +// Verbosity returns the verbosity level for the session output. +func (s Session) Verbosity() uilog.Verbosity { + return s.options.verbosity +} + +// NewOutput returns a new logging output bound to the session. +// The new output will use the session's verbosity, stderr and stdout. +// Label and color arguments are used to prefix the output when the +// session verbosity is verbose. +func (s Session) NewOutput(label, color string) uilog.Output { + options := []uilog.Option{ + uilog.WithStdout(s.options.stdout), + uilog.WithStderr(s.options.stderr), + } + + if s.options.verbosity == uilog.VerbosityVerbose { + options = append(options, uilog.CustomVerbose(label, color)) + } + + return uilog.NewOutput(options...) +} + +// SpinnerMessage change the spinner message. +func (s *Session) SpinnerMessage(text string) { + s.StopSpinner() + s.StartSpinner(text) +} + +// StartSpinner starts the spinner. +func (s *Session) StartSpinner(text string) { + if s.options.ignoreEvents { + return + } + + // Verbose mode must not render the spinner but instead + // it should just print the text to display next to the + // app label otherwise the verbose logs would be printed + // with an invalid format. + if s.options.verbosity == uilog.VerbosityVerbose { + fmt.Fprint(s.out.Stdout(), text) + return + } + + if s.spinner == nil { + s.spinner = clispinner.New(clispinner.WithWriter(s.out.Stdout())) + } + + s.spinner.SetText(text).Start() +} + +// StopSpinner stops the spinner. +func (s Session) StopSpinner() { + if s.spinner == nil { + return + } + + s.spinner.Stop() +} + +// PauseSpinner pauses spinner and returns a function to restart the spinner. +func (s Session) PauseSpinner() (restart func()) { + isActive := s.spinner != nil && s.spinner.IsActive() + if isActive { + s.spinner.Stop() + } + + return func() { + if isActive { + s.spinner.Start() + } + } +} + +// Printf prints formatted arbitrary message. +func (s Session) Printf(format string, a ...interface{}) error { + defer s.PauseSpinner()() + _, err := fmt.Fprintf(s.out.Stdout(), format, a...) + return err +} + +// Println prints arbitrary message with line break. +func (s Session) Println(messages ...interface{}) error { + defer s.PauseSpinner()() + _, err := fmt.Fprintln(s.out.Stdout(), messages...) + return err +} + +// Print prints arbitrary message. +func (s Session) Print(messages ...interface{}) error { + defer s.PauseSpinner()() + _, err := fmt.Fprint(s.out.Stdout(), messages...) + return err +} + +// Ask asks questions in the terminal and collect answers. +func (s Session) Ask(questions ...bubbleconfirm.Question) error { + // If the flag yes was set true, we skip the user interaction + if s.skipUI { + return nil + } + defer s.PauseSpinner()() + // TODO provide writer from the session + return bubbleconfirm.Ask(questions...) +} + +// ErrAbort is returned when the user aborts the operation. +var ErrAbort = errors.New("aborted or not confirmed") + +// AskConfirm asks a yes/no question using a bubbletea dialog. +func (s Session) AskConfirm(message string) error { + if s.skipUI { + return nil + } + + defer s.PauseSpinner()() + + // Create and run the bubbletea program + p := tea.NewProgram(bubbleconfirm.NewModel(message)) + + // Run the program + m, err := p.Run() + if err != nil { + return err + } + + // Type assert to our model + confirmModel, ok := m.(bubbleconfirm.Model) + if !ok { + return errors.New("could not assert type to bubbleconfirm.Model") + } + + // Check the result + if confirmModel.Choice() != bubbleconfirm.Yes { + return ErrAbort + } + + return nil +} + +// PrintTable prints table data. +func (s Session) PrintTable(header []string, entries ...[]string) error { + defer s.PauseSpinner()() + return entrywriter.MustWrite(s.out.Stdout(), header, entries...) +} + +// End finishes the session by stopping the spinner and the event bus. +// Once the session is ended it should not be used anymore. +func (s *Session) End() { + if s.ended { + return + } + + s.StopSpinner() + s.ev.Stop() + s.wg.Wait() + s.ended = true +} + +func (s *Session) handleEvents() { + defer s.wg.Done() + + stdout := s.out.Stdout() + + for e := range s.ev.Events() { + switch e.ProgressIndication { + case events.IndicationStart: + s.StartSpinner(e.String()) + case events.IndicationUpdate: + if s.spinner == nil { + // When the spinner is not initialized print the event + fmt.Fprintf(stdout, "%s\n", e) + } else { + // Otherwise update the spinner with a new text + s.spinner.SetText(e.String()) + } + case events.IndicationFinish: + s.StopSpinner() + fmt.Fprintf(stdout, "%s\n", e) + case events.IndicationNone: + fallthrough + default: + // The text printed here won't be removed when the spinner stops + resume := s.PauseSpinner() + fmt.Fprintf(stdout, "%s\n", e) + resume() + } + } +} diff --git a/ignite/pkg/cliui/cliui_test.go b/ignite/pkg/cliui/cliui_test.go new file mode 100644 index 0000000..9ddfdc1 --- /dev/null +++ b/ignite/pkg/cliui/cliui_test.go @@ -0,0 +1,93 @@ +package cliui + +import ( + "bytes" + "io" + "strings" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/cliui/clispinner" + uilog "github.com/ignite/cli/v29/ignite/pkg/cliui/log" + "github.com/ignite/cli/v29/ignite/pkg/xio" +) + +type fakeSpinner struct { + active bool +} + +func (f *fakeSpinner) SetText(string) clispinner.Spinner { return f } +func (f *fakeSpinner) SetPrefix(string) clispinner.Spinner { return f } +func (f *fakeSpinner) SetCharset([]string) clispinner.Spinner { return f } +func (f *fakeSpinner) SetColor(string) clispinner.Spinner { return f } +func (f *fakeSpinner) Start() clispinner.Spinner { + f.active = true + return f +} + +func (f *fakeSpinner) Stop() clispinner.Spinner { + f.active = false + return f +} +func (f *fakeSpinner) IsActive() bool { return f.active } +func (f *fakeSpinner) Writer() io.Writer { + return io.Discard +} + +func TestNewWithOptions(t *testing.T) { + var outBuf, errBuf bytes.Buffer + session := New( + WithStdout(xio.NopWriteCloser(&outBuf)), + WithStderr(xio.NopWriteCloser(&errBuf)), + WithStdin(io.NopCloser(strings.NewReader(""))), + IgnoreEvents(), + WithVerbosity(uilog.VerbosityVerbose), + ) + t.Cleanup(session.End) + + require.EqualValues(t, uilog.VerbosityVerbose, session.Verbosity()) +} + +func TestAskAndAskConfirmSkipUI(t *testing.T) { + session := New(IgnoreEvents(), WithoutUserInteraction(true)) + t.Cleanup(session.End) + + require.NoError(t, session.Ask()) + require.NoError(t, session.AskConfirm("continue?")) +} + +func TestPauseSpinner(t *testing.T) { + session := New(IgnoreEvents()) + t.Cleanup(session.End) + + sp := &fakeSpinner{active: true} + session.spinner = sp + + restart := session.PauseSpinner() + require.False(t, sp.IsActive()) + + restart() + require.True(t, sp.IsActive()) +} + +func TestStartSpinnerVerboseWritesText(t *testing.T) { + var outBuf bytes.Buffer + session := Session{ + options: sessionOptions{ + verbosity: uilog.VerbosityVerbose, + }, + out: uilog.NewOutput(uilog.WithStdout(xio.NopWriteCloser(&outBuf))), + } + + session.StartSpinner("working") + require.Contains(t, outBuf.String(), "working") +} + +func TestEndIsIdempotent(t *testing.T) { + session := New(IgnoreEvents()) + session.End() + require.True(t, session.ended) + session.End() + require.True(t, session.ended) +} diff --git a/ignite/pkg/cliui/colors/colors.go b/ignite/pkg/cliui/colors/colors.go new file mode 100644 index 0000000..75b98f8 --- /dev/null +++ b/ignite/pkg/cliui/colors/colors.go @@ -0,0 +1,77 @@ +package colors + +import ( + "fmt" + + "github.com/charmbracelet/lipgloss" +) + +const ( + Yellow = "#c4a000" + Red = "#ef2929" + Green = "#4e9a06" + Magenta = "#75507b" + Cyan = "#34e2e2" + White = "#FFFFFF" + HiBlue = "#729FCF" + Blue = "#0a2fc4" +) + +var ( + info = lipgloss.NewStyle().Foreground(lipgloss.Color(Yellow)) + infof = lipgloss.NewStyle().Foreground(lipgloss.Color(Yellow)) + err = lipgloss.NewStyle().Foreground(lipgloss.Color(Red)) + success = lipgloss.NewStyle().Foreground(lipgloss.Color(Green)) + modified = lipgloss.NewStyle().Foreground(lipgloss.Color(Magenta)) + name = lipgloss.NewStyle().Bold(true) + mnemonic = lipgloss.NewStyle().Foreground(lipgloss.Color(HiBlue)) + spinner = lipgloss.NewStyle().Foreground(lipgloss.Color(Blue)) + faint = lipgloss.NewStyle().Faint(true) +) + +// SprintFunc returns a function to apply a foreground color to any number of texts. +// The returned function receives strings as arguments with the text that should be colorized. +// Color specifies a color by hex or ANSI value. +func SprintFunc(color string) func(i ...interface{}) string { + return func(i ...interface{}) string { + style := lipgloss.NewStyle().Foreground(lipgloss.Color(color)) + return style.Render(fmt.Sprint(i...)) + } +} + +func Info(i ...interface{}) string { + return info.Render(fmt.Sprint(i...)) +} + +func Infof(format string, i ...interface{}) string { + return infof.Render(fmt.Sprintf(format, i...)) +} + +func Error(i ...interface{}) string { + return err.Render(fmt.Sprint(i...)) +} + +func Success(i ...interface{}) string { + return success.Render(fmt.Sprint(i...)) +} + +func Modified(i ...interface{}) string { + return modified.Render(fmt.Sprint(i...)) +} + +func Name(i ...interface{}) string { + return name.Render(fmt.Sprint(i...)) +} + +func Mnemonic(i ...interface{}) string { + return mnemonic.Render(fmt.Sprint(i...)) +} + +func Spinner(i ...interface{}) string { + return spinner.Render(fmt.Sprint(i...)) +} + +// Faint styles the text using a dimmer shade for the foreground color. +func Faint(i ...interface{}) string { + return faint.Render(fmt.Sprint(i...)) +} diff --git a/ignite/pkg/cliui/colors/colors_test.go b/ignite/pkg/cliui/colors/colors_test.go new file mode 100644 index 0000000..e72b6ec --- /dev/null +++ b/ignite/pkg/cliui/colors/colors_test.go @@ -0,0 +1,24 @@ +package colors + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestSprintFunc(t *testing.T) { + got := SprintFunc(Green)("ignite") + require.Contains(t, got, "ignite") +} + +func TestFormatters(t *testing.T) { + require.Contains(t, Info("a"), "a") + require.Contains(t, Infof("%s", "b"), "b") + require.Contains(t, Error("c"), "c") + require.Contains(t, Success("d"), "d") + require.Contains(t, Modified("e"), "e") + require.Contains(t, Name("f"), "f") + require.Contains(t, Mnemonic("g"), "g") + require.Contains(t, Spinner("h"), "h") + require.Contains(t, Faint("i"), "i") +} diff --git a/ignite/pkg/cliui/entrywriter/entrywriter.go b/ignite/pkg/cliui/entrywriter/entrywriter.go new file mode 100644 index 0000000..4e26451 --- /dev/null +++ b/ignite/pkg/cliui/entrywriter/entrywriter.go @@ -0,0 +1,65 @@ +package entrywriter + +import ( + "fmt" + "io" + "text/tabwriter" + + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/xstrings" +) + +const ( + None = "-" +) + +var ErrInvalidFormat = errors.New("invalid entry format") + +// MustWrite writes into out the tabulated entries and panic if the entry format is invalid. +func MustWrite(out io.Writer, header []string, entries ...[]string) error { + err := Write(out, header, entries...) + if errors.Is(err, ErrInvalidFormat) { + panic(err) + } + return err +} + +// Write writes into out the tabulated entries. +func Write(out io.Writer, header []string, entries ...[]string) error { + w := &tabwriter.Writer{} + w.Init(out, 0, 8, 0, '\t', 0) + + formatLine := func(line []string, title bool) (formatted string) { + for _, cell := range line { + if title { + cell = xstrings.Title(cell) + } + formatted += fmt.Sprintf("%s \t", cell) + } + return formatted + } + + if len(header) == 0 { + return errors.Wrap(ErrInvalidFormat, "empty header") + } + + // write header + if _, err := fmt.Fprintln(w, formatLine(header, true)); err != nil { + return err + } + + // write entries + for i, entry := range entries { + if len(entry) != len(header) { + return errors.Wrapf(ErrInvalidFormat, "entry %d doesn't match header length", i) + } + if _, err := fmt.Fprint(w, formatLine(entry, false)+"\n"); err != nil { + return err + } + } + + if _, err := fmt.Fprintln(w); err != nil { + return err + } + return w.Flush() +} diff --git a/ignite/pkg/cliui/entrywriter/entrywriter_test.go b/ignite/pkg/cliui/entrywriter/entrywriter_test.go new file mode 100644 index 0000000..d778531 --- /dev/null +++ b/ignite/pkg/cliui/entrywriter/entrywriter_test.go @@ -0,0 +1,40 @@ +package entrywriter_test + +import ( + "io" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/cliui/entrywriter" + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +type WriterWithError struct{} + +func (WriterWithError) Write(_ []byte) (n int, err error) { + return 0, errors.New("writer with error") +} + +func TestWrite(t *testing.T) { + header := []string{"foobar", "bar", "foo"} + + entries := [][]string{ + {"foo", "bar", "foobar"}, + {"bar", "foobar", "foo"}, + {"foobar", "foo", "bar"}, + } + + require.NoError(t, entrywriter.Write(io.Discard, header, entries...)) + require.NoError(t, entrywriter.Write(io.Discard, header), "should allow no entry") + + err := entrywriter.Write(io.Discard, []string{}) + require.ErrorIs(t, err, entrywriter.ErrInvalidFormat, "should prevent no header") + + entries[0] = []string{"foo", "bar"} + err = entrywriter.Write(io.Discard, header, entries...) + require.ErrorIs(t, err, entrywriter.ErrInvalidFormat, "should prevent entry length mismatch") + + var wErr WriterWithError + require.Error(t, entrywriter.Write(wErr, header, entries...), "should catch writer errors") +} diff --git a/ignite/pkg/cliui/icons/icon.go b/ignite/pkg/cliui/icons/icon.go new file mode 100644 index 0000000..e55c8a2 --- /dev/null +++ b/ignite/pkg/cliui/icons/icon.go @@ -0,0 +1,23 @@ +package icons + +import ( + "github.com/ignite/cli/v29/ignite/pkg/cliui/colors" +) + +var ( + Earth = "🌍" + CD = "💿" + User = "👤" + Tada = "🎉" + Survey = "💬" + Announcement = "🗣️" + + // OK is an OK mark. + OK = colors.SprintFunc(colors.Green)("✔") + // NotOK is a red cross mark. + NotOK = colors.SprintFunc(colors.Red)("✘") + // Bullet is a bullet mark. + Bullet = colors.SprintFunc(colors.Yellow)("⋆") + // Info is an info mark. + Info = colors.SprintFunc(colors.Yellow)("𝓲") +) diff --git a/ignite/pkg/cliui/icons/icon_test.go b/ignite/pkg/cliui/icons/icon_test.go new file mode 100644 index 0000000..8ffa989 --- /dev/null +++ b/ignite/pkg/cliui/icons/icon_test.go @@ -0,0 +1,21 @@ +package icons + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestIconsAreInitialized(t *testing.T) { + require.NotEmpty(t, Earth) + require.NotEmpty(t, CD) + require.NotEmpty(t, User) + require.NotEmpty(t, Tada) + require.NotEmpty(t, Survey) + require.NotEmpty(t, Announcement) + + require.Contains(t, OK, "✔") + require.Contains(t, NotOK, "✘") + require.Contains(t, Bullet, "⋆") + require.Contains(t, Info, "𝓲") +} diff --git a/ignite/pkg/cliui/lineprefixer/lineprefixer.go b/ignite/pkg/cliui/lineprefixer/lineprefixer.go new file mode 100644 index 0000000..6044beb --- /dev/null +++ b/ignite/pkg/cliui/lineprefixer/lineprefixer.go @@ -0,0 +1,48 @@ +// Package lineprefixer is a helpers to add prefixes to new lines. +package lineprefixer + +import ( + "bytes" + "io" +) + +// Writer is a prefixed line writer. +type Writer struct { + prefix func() string + w io.Writer + shouldPrefix bool +} + +// NewWriter returns a new Writer that adds prefixes to each line +// written. It then writes prefixed data stream into w. +func NewWriter(w io.Writer, prefix func() string) *Writer { + return &Writer{ + prefix: prefix, + w: w, + shouldPrefix: true, + } +} + +// Write implements io.Writer. +func (p *Writer) Write(b []byte) (n int, err error) { + var ( + numBytes = len(b) + lastChar = b[numBytes-1] + newLine = byte('\n') + snewLine = []byte{newLine} + replaceCount = bytes.Count(b, snewLine) + prefix = []byte(p.prefix()) + ) + if lastChar == newLine { + replaceCount-- + } + b = bytes.Replace(b, snewLine, append(snewLine, prefix...), replaceCount) + if p.shouldPrefix { + b = append(prefix, b...) + } + p.shouldPrefix = lastChar == newLine + if _, err := p.w.Write(b); err != nil { + return 0, err + } + return numBytes, nil +} diff --git a/ignite/pkg/cliui/lineprefixer/lineprefixer_test.go b/ignite/pkg/cliui/lineprefixer/lineprefixer_test.go new file mode 100644 index 0000000..7f0946d --- /dev/null +++ b/ignite/pkg/cliui/lineprefixer/lineprefixer_test.go @@ -0,0 +1,27 @@ +package lineprefixer + +import ( + "bytes" + "io" + "strings" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestWriter(t *testing.T) { + logs := `hello, +this +is +Starport!` + buf := bytes.Buffer{} + w := NewWriter(&buf, func() string { return "[TENDERMINT] " }) + _, err := io.Copy(w, strings.NewReader(logs)) + require.NoError(t, err) + require.Equal(t, `[TENDERMINT] hello, +[TENDERMINT] this +[TENDERMINT] is +[TENDERMINT] Starport!`, + buf.String(), + ) +} diff --git a/ignite/pkg/cliui/log/output.go b/ignite/pkg/cliui/log/output.go new file mode 100644 index 0000000..71a4424 --- /dev/null +++ b/ignite/pkg/cliui/log/output.go @@ -0,0 +1,146 @@ +package uilog + +import ( + "io" + "os" + + "github.com/ignite/cli/v29/ignite/pkg/cliui/colors" + "github.com/ignite/cli/v29/ignite/pkg/cliui/lineprefixer" + "github.com/ignite/cli/v29/ignite/pkg/cliui/prefixgen" + "github.com/ignite/cli/v29/ignite/pkg/xio" +) + +const ( + defaultVerboseLabel = "ignite" + defaultVerboseLabelColor = colors.Red +) + +// Verbosity enumerates possible verbosity levels for CLI output. +type Verbosity uint8 + +const ( + VerbositySilent = iota + VerbosityDefault + VerbosityVerbose +) + +// Outputer defines an interface for logging output creation. +type Outputer interface { + // NewOutput returns a new logging output. + NewOutput(label, color string) Output + + // Verbosity returns the current verbosity level for the logging output. + Verbosity() Verbosity +} + +// Output stores writers for standard output and error. +type Output struct { + verbosity Verbosity + stdout io.WriteCloser + stderr io.WriteCloser +} + +// Stdout returns the standard output writer. +func (o Output) Stdout() io.WriteCloser { + return o.stdout +} + +// Stderr returns the standard error writer. +func (o Output) Stderr() io.WriteCloser { + return o.stderr +} + +// Verbosity returns the log output verbosity. +func (o Output) Verbosity() Verbosity { + return o.verbosity +} + +type option struct { + stdout io.WriteCloser + stderr io.WriteCloser + verbosity Verbosity + verboseLabel string + verboseLabelColor string +} + +// Option configures log output options. +type Option func(*option) + +// Verbose changes the log output to be prefixed with "ignite". +func Verbose() Option { + return func(o *option) { + o.verbosity = VerbosityVerbose + o.verboseLabel = defaultVerboseLabel + o.verboseLabelColor = defaultVerboseLabelColor + } +} + +// CustomVerbose changes the log output to be prefixed with a custom label. +func CustomVerbose(label, color string) Option { + return func(o *option) { + o.verbosity = VerbosityVerbose + o.verboseLabel = label + o.verboseLabelColor = color + } +} + +// Silent creates a log output that doesn't print any of the written lines. +func Silent() Option { + return func(o *option) { + o.verbosity = VerbositySilent + } +} + +// WithStdout sets a custom writer to use instead of the default `os.Stdout`. +func WithStdout(r io.WriteCloser) Option { + return func(o *option) { + o.stdout = r + } +} + +// WithStderr sets a custom writer to use instead of the default `os.Stderr`. +func WithStderr(r io.WriteCloser) Option { + return func(o *option) { + o.stderr = r + } +} + +// NewOutput creates a new log output. +// By default, the new output uses the default OS stdout and stderr to +// initialize the outputs with a default verbosity that doesn't change +// the output. +func NewOutput(options ...Option) (out Output) { + o := option{ + verbosity: VerbosityDefault, + stdout: os.Stdout, + stderr: os.Stderr, + } + + for _, apply := range options { + apply(&o) + } + + out.verbosity = o.verbosity + + switch o.verbosity { + case VerbositySilent: + out.stdout = xio.NopWriteCloser(io.Discard) + out.stderr = xio.NopWriteCloser(io.Discard) + case VerbosityVerbose: + // Function to add a custom prefix to each log output + prefixer := func(w io.Writer) *lineprefixer.Writer { + options := prefixgen.Common(prefixgen.Color(o.verboseLabelColor)) + prefix := prefixgen.New(o.verboseLabel, options...).Gen() + + return lineprefixer.NewWriter(w, func() string { return prefix }) + } + + out.stdout = xio.NopWriteCloser(prefixer(o.stdout)) + out.stderr = xio.NopWriteCloser(prefixer(o.stderr)) + default: + out.stdout = o.stdout + out.stderr = o.stderr + } + + return out +} diff --git a/ignite/pkg/cliui/log/output_test.go b/ignite/pkg/cliui/log/output_test.go new file mode 100644 index 0000000..b25fa9b --- /dev/null +++ b/ignite/pkg/cliui/log/output_test.go @@ -0,0 +1,60 @@ +package uilog + +import ( + "bytes" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/xio" +) + +func TestNewOutputDefault(t *testing.T) { + var outBuf, errBuf bytes.Buffer + out := NewOutput( + WithStdout(xio.NopWriteCloser(&outBuf)), + WithStderr(xio.NopWriteCloser(&errBuf)), + ) + + _, err := out.Stdout().Write([]byte("stdout")) + require.NoError(t, err) + _, err = out.Stderr().Write([]byte("stderr")) + require.NoError(t, err) + + require.EqualValues(t, VerbosityDefault, out.Verbosity()) + require.Equal(t, "stdout", outBuf.String()) + require.Equal(t, "stderr", errBuf.String()) +} + +func TestNewOutputSilent(t *testing.T) { + var outBuf, errBuf bytes.Buffer + out := NewOutput( + WithStdout(xio.NopWriteCloser(&outBuf)), + WithStderr(xio.NopWriteCloser(&errBuf)), + Silent(), + ) + + _, err := out.Stdout().Write([]byte("stdout")) + require.NoError(t, err) + _, err = out.Stderr().Write([]byte("stderr")) + require.NoError(t, err) + + require.EqualValues(t, VerbositySilent, out.Verbosity()) + require.Empty(t, outBuf.String()) + require.Empty(t, errBuf.String()) +} + +func TestNewOutputVerbose(t *testing.T) { + var outBuf bytes.Buffer + out := NewOutput( + WithStdout(xio.NopWriteCloser(&outBuf)), + CustomVerbose("ignite", "red"), + ) + + _, err := out.Stdout().Write([]byte("hello\n")) + require.NoError(t, err) + + require.EqualValues(t, VerbosityVerbose, out.Verbosity()) + require.Contains(t, outBuf.String(), "hello") + require.Contains(t, outBuf.String(), "IGNITE") +} diff --git a/ignite/pkg/cliui/model/events.go b/ignite/pkg/cliui/model/events.go new file mode 100644 index 0000000..21be39b --- /dev/null +++ b/ignite/pkg/cliui/model/events.go @@ -0,0 +1,255 @@ +package cliuimodel + +import ( + "container/list" + "fmt" + "strings" + "time" + + "github.com/charmbracelet/bubbles/spinner" + tea "github.com/charmbracelet/bubbletea" + + "github.com/ignite/cli/v29/ignite/pkg/cliui/colors" + "github.com/ignite/cli/v29/ignite/pkg/cliui/icons" + "github.com/ignite/cli/v29/ignite/pkg/events" +) + +// EventMsg defines a message for events. +type EventMsg struct { + events.Event + + Start time.Time + Duration time.Duration +} + +// NewStatusEvents returns a new events model. +func NewStatusEvents(bus events.Provider, maxHistory int) StatusEvents { + return StatusEvents{ + events: list.New(), + spinner: NewSpinner(), + maxHistory: maxHistory, + bus: bus, + } +} + +// StatusEvents defines a model for status events. +// The model renders a view that can be divided in three sections. +// The first one displays the "static" events which are the ones +// that are not status events. The second section displays a spinner +// with the status event that is in progress, and the third one +// displays a list with the past status events. +type StatusEvents struct { + static []events.Event + events *list.List + spinner spinner.Model + maxHistory int + bus events.Provider +} + +func (m *StatusEvents) ClearEvents() { + m.static = nil + m.events.Init() +} + +func (m StatusEvents) Wait() tea.Cmd { + return tea.Batch(m.spinner.Tick, m.WaitEvent) +} + +func (m StatusEvents) WaitEvent() tea.Msg { + e := <-m.bus.Events() + + return EventMsg{ + Start: time.Now(), + Event: e, + } +} + +func (m StatusEvents) Update(msg tea.Msg) (StatusEvents, tea.Cmd) { + var cmd tea.Cmd + + switch msg := msg.(type) { + case EventMsg: + if msg.InProgress() { + // Save the duration of the current ongoing event before setting a new one + if e := m.events.Front(); e != nil { + evt := e.Value.(EventMsg) + evt.Duration = time.Since(evt.Start) + e.Value = evt + } + + // Add the event to the queue + m.events.PushFront(msg) + + // Only show a reduced history of events + if m.events.Len() > m.maxHistory { + m.events.Remove(m.events.Back()) + } + } else { + // Events that have no progress status are considered static, + // so they will be printed without the spinner and won't be + // removed from the output until the view is removed. + m.static = append(m.static, msg.Event) + } + + // Return a command to wait for the next event + cmd = m.Wait() + default: + // Update the spinner state and get a new tick command + m.spinner, cmd = m.spinner.Update(msg) + } + + return m, cmd +} + +func (m StatusEvents) View() string { + var view strings.Builder + + // Display static events first + for _, evt := range m.static { + view.WriteString(evt.String()) + + if !strings.HasSuffix(evt.Message, "\n") { + view.WriteRune('\n') + } + } + + // Make sure there is a line between the static and status events + if m.static != nil && m.events.Len() > 0 { + view.WriteRune('\n') + } + + // Display status events + if m.events.Len() > 0 { + for e := m.events.Front(); e != nil; e = e.Next() { + evt := e.Value.(EventMsg) + + // The first event is displayed using a spinner + if e.Prev() == nil { + fmt.Fprintf(&view, "%s%s\n", m.spinner.View(), evt) + + if e.Next() != nil { + view.WriteRune('\n') + } + + continue + } + + // Display finished status event + d := evt.Duration.Round(time.Second) + s := strings.TrimSuffix(evt.String(), "...") + + fmt.Fprintf(&view, "%s %s %s\n", icons.OK, s, colors.Faint(d.String())) + } + } + + return view.String() +} + +// NewEvents returns a new events model. +func NewEvents(bus events.Provider) Events { + return Events{ + events: list.New(), + bus: bus, + spinner: NewSpinner(), + } +} + +// Events defines a model for events. +// The model renders a view that prints all received events one after +// the other. Status events are displayed with a spinner and removed +// from the list once they finish. +type Events struct { + events *list.List + bus events.Provider + spinner spinner.Model +} + +func (m *Events) ClearEvents() { + m.events.Init() +} + +func (m Events) Wait() tea.Cmd { + // Check if the last added event is a status event + // and if so make sure that the spinner is updated. + if e := m.events.Back(); e != nil { + if evt := e.Value.(events.Event); evt.InProgress() { + return tea.Batch(m.spinner.Tick, m.WaitEvent) + } + } + + // By default, just wait until the next event is received + return m.WaitEvent +} + +func (m Events) WaitEvent() tea.Msg { + e := <-m.bus.Events() + + return EventMsg{ + Event: e, + Start: time.Now(), + } +} + +func (m Events) Update(msg tea.Msg) (Events, tea.Cmd) { + var cmd tea.Cmd + + switch msg := msg.(type) { + case EventMsg: + // Remove the last event if is a status one. + // Status events must always be the last event in the list so the + // spinner is displayed at the bottom and not in between events. + // They are removed when another status event is received. + if e := m.events.Back(); e != nil { + if evt := e.Value.(events.Event); evt.InProgress() { + m.events.Remove(e) + } + } + + // Append event at the end of the list + m.events.PushBack(msg.Event) + + // Return a command to wait for the next event + cmd = m.Wait() + default: + // Update the spinner state and get a new tick command + m.spinner, cmd = m.spinner.Update(msg) + } + + return m, cmd +} + +func (m Events) View() string { + var ( + view strings.Builder + group string + ) + + // Display the list of events + for e := m.events.Front(); e != nil; e = e.Next() { + evt := e.Value.(events.Event) + + // Add an empty line when the event group changes but omit it + // for the first event to avoid adding an initial empty line. + if group != evt.Group && e.Prev() != nil { + // Update the group being displayed + group = evt.Group + + view.WriteRune('\n') + } + + if e.Next() == nil && evt.InProgress() { + // When the event is the last one and is a status event display a spinner... + fmt.Fprintf(&view, "\n%s%s", m.spinner.View(), evt) + } else { + // Otherwise display the event without the spinner + view.WriteString(evt.String()) + } + + // Make sure that events have an EOL, so they are displayed right below each other + if !strings.HasSuffix(evt.Message, "\n") { + view.WriteRune('\n') + } + } + + return view.String() +} diff --git a/ignite/pkg/cliui/model/events_test.go b/ignite/pkg/cliui/model/events_test.go new file mode 100644 index 0000000..e6c4108 --- /dev/null +++ b/ignite/pkg/cliui/model/events_test.go @@ -0,0 +1,88 @@ +package cliuimodel_test + +import ( + "fmt" + "strings" + "testing" + "time" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/cliui/colors" + "github.com/ignite/cli/v29/ignite/pkg/cliui/icons" + cliuimodel "github.com/ignite/cli/v29/ignite/pkg/cliui/model" + "github.com/ignite/cli/v29/ignite/pkg/events" +) + +func TestStatusEventsView(t *testing.T) { + // Arrange + spinner := cliuimodel.NewSpinner() + queue := []string{"Event 1...", "Event 2..."} + model := cliuimodel.NewStatusEvents(dummyEventsProvider{}, len(queue)) + want := fmt.Sprintf( + "Static event\n\n%s%s\n\n%s %s %s\n", + spinner.View(), + queue[1], + icons.OK, + strings.TrimSuffix(queue[0], "..."), + colors.Faint("0s"), + ) + + // Arrange: Update model with status events + for _, s := range queue { + model, _ = model.Update(cliuimodel.EventMsg{ + Event: events.New(s, events.ProgressStart()), + Start: time.Now(), + }) + } + + // Arrange: Add one static event + model, _ = model.Update(cliuimodel.EventMsg{ + Event: events.New("Static event"), + }) + + // Act + view := model.View() + + // Assert + require.Equal(t, want, view) +} + +func TestEventsView(t *testing.T) { + // Arrange + spinner := cliuimodel.NewSpinner() + model := cliuimodel.NewEvents(dummyEventsProvider{}) + queue := []string{"Event 1", "Event 2"} + want := fmt.Sprintf( + "%s\n%s\n\n%sStatus Event...\n", + queue[0], + queue[1], + spinner.View(), + ) + + // Arrange: Update model with events + for _, s := range queue { + model, _ = model.Update(cliuimodel.EventMsg{ + Event: events.New(s), + }) + } + + // Arrange: Add one status event + model, _ = model.Update(cliuimodel.EventMsg{ + Event: events.New("Status Event...", events.ProgressStart()), + }) + + // Act + view := model.View() + + // Assert + require.Equal(t, want, view) +} + +type dummyEventsProvider struct{} + +func (dummyEventsProvider) Events() <-chan events.Event { + c := make(chan events.Event) + close(c) + return c +} diff --git a/ignite/pkg/cliui/model/model.go b/ignite/pkg/cliui/model/model.go new file mode 100644 index 0000000..e5123a4 --- /dev/null +++ b/ignite/pkg/cliui/model/model.go @@ -0,0 +1,29 @@ +package cliuimodel + +import ( + "fmt" + + "github.com/muesli/reflow/indent" +) + +const ( + defaultIndent = 2 +) + +type ( + // ErrorMsg defines a message for errors. + ErrorMsg struct { + Error error + } + + // QuitMsg defines a message for stopping the command. + QuitMsg struct{} +) + +// FormatView formats a model view padding and indentation. +func FormatView(view string) string { + // Indent the view lines + view = indent.String(view, defaultIndent) + // Add top and bottom paddings + return fmt.Sprintf("\n%s\n", view) +} diff --git a/ignite/pkg/cliui/model/spinner.go b/ignite/pkg/cliui/model/spinner.go new file mode 100644 index 0000000..b2c44d9 --- /dev/null +++ b/ignite/pkg/cliui/model/spinner.go @@ -0,0 +1,25 @@ +package cliuimodel + +import ( + "time" + + "github.com/charmbracelet/bubbles/spinner" + "github.com/charmbracelet/lipgloss" +) + +// ColorSpinner defines the foreground color for the spinner. +const ColorSpinner = "#3465A4" + +// Spinner defines the spinner model animation. +var Spinner = spinner.Spinner{ + Frames: []string{"◢ ", "◣ ", "◤ ", "◥ "}, + FPS: time.Second / 5, +} + +// NewSpinner returns a new spinner model. +func NewSpinner() spinner.Model { + s := spinner.New() + s.Spinner = Spinner + s.Style = lipgloss.NewStyle().Foreground(lipgloss.Color(ColorSpinner)) + return s +} diff --git a/ignite/pkg/cliui/prefixgen/prefixgen.go b/ignite/pkg/cliui/prefixgen/prefixgen.go new file mode 100644 index 0000000..615d66c --- /dev/null +++ b/ignite/pkg/cliui/prefixgen/prefixgen.go @@ -0,0 +1,88 @@ +// Package prefixgen is a prefix generation helper for log messages +// and any other kind. +package prefixgen + +import ( + "fmt" + "strings" + + "github.com/ignite/cli/v29/ignite/pkg/cliui/colors" +) + +// Prefixer generates prefixes. +type Prefixer struct { + format string + color string + left, right string + convertUppercase bool +} + +// Option configures Prefixer. +type Option func(p *Prefixer) + +// Color sets color to the prefix. +func Color(color string) Option { + return func(p *Prefixer) { + p.color = color + } +} + +// SquareBrackets adds square brackets to the prefix. +func SquareBrackets() Option { + return func(p *Prefixer) { + p.left = "[" + p.right = "]" + } +} + +// SpaceRight adds rights space to the prefix. +func SpaceRight() Option { + return func(p *Prefixer) { + p.right += " " + } +} + +// Uppercase formats the prefix to uppercase. +func Uppercase() Option { + return func(p *Prefixer) { + p.convertUppercase = true + } +} + +// Common holds some common prefix options and extends those +// options by given options. +func Common(options ...Option) []Option { + return append([]Option{ + SquareBrackets(), + SpaceRight(), + Uppercase(), + }, options...) +} + +// New creates a new Prefixer with format and options. +// Format is an fmt.Sprintf() like format to dynamically create prefix texts +// as needed. +func New(format string, options ...Option) *Prefixer { + p := &Prefixer{ + format: format, + } + for _, o := range options { + o(p) + } + return p +} + +// Gen generates a new prefix by applying s to format given during New(). +func (p *Prefixer) Gen(s ...interface{}) string { + format := p.format + format = p.left + format + format += p.right + prefix := fmt.Sprintf(format, s...) + if p.convertUppercase { + prefix = strings.ToUpper(prefix) + } + if p.color != "" { + return colors.SprintFunc(p.color)(prefix) + } + return prefix +} diff --git a/ignite/pkg/cliui/prefixgen/prefixgen_test.go b/ignite/pkg/cliui/prefixgen/prefixgen_test.go new file mode 100644 index 0000000..de3f67b --- /dev/null +++ b/ignite/pkg/cliui/prefixgen/prefixgen_test.go @@ -0,0 +1,23 @@ +package prefixgen + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestGen(t *testing.T) { + cases := []struct { + expected string + given string + }{ + {"[TENDERMINT] ", New("Tendermint", Common()...).Gen()}, + {"Tendermint", New("Tendermint").Gen()}, + {"appd", New("%sd").Gen("app")}, + } + for _, tt := range cases { + t.Run(tt.expected, func(t *testing.T) { + require.Equal(t, tt.expected, tt.given) + }) + } +} diff --git a/ignite/pkg/cliui/view/accountview/account.go b/ignite/pkg/cliui/view/accountview/account.go new file mode 100644 index 0000000..ec8c9fb --- /dev/null +++ b/ignite/pkg/cliui/view/accountview/account.go @@ -0,0 +1,76 @@ +package accountview + +import ( + "fmt" + "strings" + + "github.com/ignite/cli/v29/ignite/pkg/cliui/colors" + "github.com/ignite/cli/v29/ignite/pkg/cliui/icons" +) + +var ( + fmtExistingAccount = "%s %s's account address: %s\n" + fmtNewAccount = "%s Added account %s with address %s and mnemonic:\n%s\n" +) + +type Option func(*Account) + +type Account struct { + Name string + Address string + Mnemonic string +} + +func WithMnemonic(mnemonic string) Option { + return func(a *Account) { + a.Mnemonic = mnemonic + } +} + +func NewAccount(name, address string, options ...Option) Account { + a := Account{ + Name: name, + Address: address, + } + + for _, apply := range options { + apply(&a) + } + + return a +} + +func (a Account) String() string { + name := colors.Name(a.Name) + + // The account is new when the mnemonic is available + if a.Mnemonic != "" { + return fmt.Sprintf(fmtNewAccount, icons.OK, name, a.Address, colors.Mnemonic(a.Mnemonic)) + } + + return fmt.Sprintf(fmtExistingAccount, icons.User, name, a.Address) +} + +type Accounts []Account + +func (a Accounts) String() string { + b := strings.Builder{} + + for i, acc := range a { + // Make sure accounts are separated by an + // empty line when the mnemonic is available. + if i > 0 && acc.Mnemonic != "" { + b.WriteRune('\n') + } + + b.WriteString(acc.String()) + } + + b.WriteRune('\n') + + return b.String() +} + +func (a Accounts) Append(acc Account) Accounts { + return append(a, acc) +} diff --git a/ignite/pkg/cliui/view/accountview/account_test.go b/ignite/pkg/cliui/view/accountview/account_test.go new file mode 100644 index 0000000..58e821e --- /dev/null +++ b/ignite/pkg/cliui/view/accountview/account_test.go @@ -0,0 +1,37 @@ +package accountview_test + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/ignite/cli/v29/ignite/pkg/cliui/view/accountview" +) + +func TestAccountString(t *testing.T) { + tests := []struct { + name string + account accountview.Account + want string + }{ + { + name: "new account (mnemonic available) to string is not idented", + account: accountview.NewAccount("alice", "cosmos193he38n21khnmb2", accountview.WithMnemonic("person estate daughter box chimney clay bronze ring story truck make excess ring frame desk start food leader sleep predict item rifle stem boy")), + want: "✔ Added account alice with address cosmos193he38n21khnmb2 and mnemonic:\nperson estate daughter box chimney clay bronze ring story truck make excess ring frame desk start food leader sleep predict item rifle stem boy\n", + }, + { + name: "existent account to string is not idented", + account: accountview.NewAccount("alice", "cosmos193he38n21khnmb2"), + want: "👤 alice's account address: cosmos193he38n21khnmb2\n", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := tt.account.String() + + assert.NotEmpty(t, result) + assert.Equal(t, tt.want, result) + }) + } +} diff --git a/ignite/pkg/cliui/view/errorview/error.go b/ignite/pkg/cliui/view/errorview/error.go new file mode 100644 index 0000000..8a3503f --- /dev/null +++ b/ignite/pkg/cliui/view/errorview/error.go @@ -0,0 +1,28 @@ +package errorview + +import ( + "strings" + + "github.com/muesli/reflow/wordwrap" + + "github.com/ignite/cli/v29/ignite/pkg/cliui/colors" +) + +func NewError(err error) Error { + return Error{err} +} + +type Error struct { + Err error +} + +func (e Error) String() string { + s := strings.TrimSpace(e.Err.Error()) + + w := wordwrap.NewWriter(80) + w.Breakpoints = []rune{' '} + _, _ = w.Write([]byte(s)) + _ = w.Close() + + return colors.Error(w.String()) +} diff --git a/ignite/pkg/cliui/view/errorview/error_test.go b/ignite/pkg/cliui/view/errorview/error_test.go new file mode 100644 index 0000000..ae7baad --- /dev/null +++ b/ignite/pkg/cliui/view/errorview/error_test.go @@ -0,0 +1,16 @@ +package errorview + +import ( + stdErrors "errors" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestErrorString(t *testing.T) { + view := NewError(stdErrors.New(" hello world ")) + out := view.String() + + require.Contains(t, out, "hello world") + require.NotContains(t, out, " hello world ") +} diff --git a/ignite/pkg/cmdrunner/cmdrunner.go b/ignite/pkg/cmdrunner/cmdrunner.go new file mode 100644 index 0000000..cbe68f1 --- /dev/null +++ b/ignite/pkg/cmdrunner/cmdrunner.go @@ -0,0 +1,261 @@ +package cmdrunner + +import ( + "context" + "fmt" + "io" + "os" + "os/exec" + "strings" + + "golang.org/x/sync/errgroup" + + "github.com/ignite/cli/v29/ignite/pkg/cmdrunner/step" + "github.com/ignite/cli/v29/ignite/pkg/env" + "github.com/ignite/cli/v29/ignite/pkg/goenv" +) + +// Runner is an object to run commands. +type Runner struct { + endSignal os.Signal + stdout io.Writer + stderr io.Writer + stdin io.Reader + workdir string + runParallel bool + debug bool +} + +// Option defines option to run commands. +type Option func(*Runner) + +// DefaultStdout provides the default stdout for the commands to run. +func DefaultStdout(writer io.Writer) Option { + return func(r *Runner) { + r.stdout = writer + } +} + +// DefaultStderr provides the default stderr for the commands to run. +func DefaultStderr(writer io.Writer) Option { + return func(r *Runner) { + r.stderr = writer + } +} + +// DefaultStdin provides the default stdin for the commands to run. +func DefaultStdin(reader io.Reader) Option { + return func(r *Runner) { + r.stdin = reader + } +} + +// DefaultWorkdir provides the default working directory for the commands to run. +func DefaultWorkdir(path string) Option { + return func(r *Runner) { + r.workdir = path + } +} + +// RunParallel allows commands to run concurrently. +func RunParallel() Option { + return func(r *Runner) { + r.runParallel = true + } +} + +// EndSignal configures s to be signaled to the processes to end them. +func EndSignal(s os.Signal) Option { + return func(r *Runner) { + r.endSignal = s + } +} + +func EnableDebug() Option { + return func(r *Runner) { + r.debug = true + } +} + +// New returns a new command runner. +func New(options ...Option) *Runner { + runner := &Runner{ + endSignal: os.Interrupt, + debug: env.IsDebug(), + } + for _, apply := range options { + apply(runner) + } + return runner +} + +// Run blocks until all steps have completed their executions. +func (r *Runner) Run(ctx context.Context, steps ...*step.Step) error { + if len(steps) == 0 { + return nil + } + g, ctx := errgroup.WithContext(ctx) + for i, step := range steps { + // copy s to a new variable to allocate a new address, + // so we can safely use it inside goroutines spawned in this loop. + if r.debug { + var cd string + if step.Workdir != "" { + cd = fmt.Sprintf("cd %s;", step.Workdir) + } + fmt.Printf("Step %d: %s%s %s %s\n", i, cd, strings.Join(step.Env, " "), + step.Exec.Command, + strings.Join(step.Exec.Args, " ")) + } + if err := ctx.Err(); err != nil { + return err + } + if err := step.PreExec(); err != nil { + return err + } + runPostExecs := func(processErr error) error { + // if context is canceled, then we can ignore exit error of the + // process because it should be exited because of the cancellation. + var err error + ctxErr := ctx.Err() + if ctxErr != nil { + err = ctxErr + } else { + err = processErr + } + for _, exec := range step.PostExecs { + if err := exec(err); err != nil { + return err + } + } + if len(step.PostExecs) > 0 { + return nil + } + return err + } + command := r.newCommand(step) + startErr := command.Start() + if startErr != nil { + if err := runPostExecs(startErr); err != nil { + return err + } + continue + } + go func() { + <-ctx.Done() + command.Signal(r.endSignal) + }() + if err := step.InExec(); err != nil { + return err + } + if len(step.WriteData) > 0 { + if _, err := command.Write(step.WriteData); err != nil { + return err + } + } + if r.runParallel { + g.Go(func() error { + return runPostExecs(command.Wait()) + }) + } else if err := runPostExecs(command.Wait()); err != nil { + return err + } + } + return g.Wait() +} + +// Executor represents a command to execute. +type Executor interface { + Wait() error + Start() error + Signal(os.Signal) + Write(data []byte) (n int, err error) +} + +// dummyExecutor is an executor that does nothing. +type dummyExecutor struct{} + +func (e *dummyExecutor) Start() error { return nil } + +func (e *dummyExecutor) Wait() error { return nil } + +func (e *dummyExecutor) Signal(os.Signal) {} + +func (e *dummyExecutor) Write([]byte) (int, error) { return 0, nil } + +// cmdSignal is an executor with signal processing. +type cmdSignal struct { + *exec.Cmd +} + +func (e *cmdSignal) Signal(s os.Signal) { _ = e.Cmd.Process.Signal(s) } + +func (e *cmdSignal) Write([]byte) (n int, err error) { return 0, nil } + +// cmdSignalWithWriter is an executor with signal processing and that can write into stdin. +type cmdSignalWithWriter struct { + *exec.Cmd + w io.WriteCloser +} + +func (e *cmdSignalWithWriter) Signal(s os.Signal) { _ = e.Cmd.Process.Signal(s) } + +func (e *cmdSignalWithWriter) Write(data []byte) (n int, err error) { + defer e.w.Close() + return e.w.Write(data) +} + +// newCommand returns a new command to execute. +func (r *Runner) newCommand(step *step.Step) Executor { + // Return a dummy executor in case of an empty command + if step.Exec.Command == "" { + return &dummyExecutor{} + } + var ( + stdout = step.Stdout + stderr = step.Stderr + stdin = step.Stdin + dir = step.Workdir + ) + + // Define standard input and outputs + if stdout == nil { + stdout = r.stdout + } + if stderr == nil { + stderr = r.stderr + } + if stdin == nil { + stdin = r.stdin + } + if dir == "" { + dir = r.workdir + } + + // Initialize command + command := exec.Command(step.Exec.Command, step.Exec.Args...) //nolint:gosec + command.Stdout = stdout + command.Stderr = stderr + command.Dir = dir + command.Env = append(os.Environ(), step.Env...) + command.Env = append(command.Env, Env("PATH", goenv.Path())) + + // If a custom stdin is provided it will be as the stdin for the command + if stdin != nil { + command.Stdin = stdin + return &cmdSignal{command} + } + + // If no custom stdin, the executor can write into the stdin of the program + writer, err := command.StdinPipe() + if err != nil { + // TODO do not panic + panic(err) + } + return &cmdSignalWithWriter{command, writer} +} + +// Env returns a new env var value from key and val. +func Env(key, val string) string { + return fmt.Sprintf("%s=%s", key, val) +} diff --git a/ignite/pkg/cmdrunner/cmdrunner_test.go b/ignite/pkg/cmdrunner/cmdrunner_test.go new file mode 100644 index 0000000..b74dd48 --- /dev/null +++ b/ignite/pkg/cmdrunner/cmdrunner_test.go @@ -0,0 +1,123 @@ +package cmdrunner + +import ( + "bytes" + "context" + stdErrors "errors" + "os" + "strings" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/cmdrunner/step" + "github.com/ignite/cli/v29/ignite/pkg/goenv" +) + +func TestNewAppliesOptions(t *testing.T) { + stdout := &bytes.Buffer{} + stderr := &bytes.Buffer{} + stdin := strings.NewReader("stdin") + + r := New( + DefaultStdout(stdout), + DefaultStderr(stderr), + DefaultStdin(stdin), + DefaultWorkdir("/tmp/work"), + RunParallel(), + EndSignal(os.Kill), + EnableDebug(), + ) + + require.Equal(t, stdout, r.stdout) + require.Equal(t, stderr, r.stderr) + require.Equal(t, stdin, r.stdin) + require.Equal(t, "/tmp/work", r.workdir) + require.True(t, r.runParallel) + require.Equal(t, os.Kill, r.endSignal) + require.True(t, r.debug) +} + +func TestEnv(t *testing.T) { + require.Equal(t, "KEY=value", Env("KEY", "value")) +} + +func TestNewCommandReturnsDummyExecutorForEmptyCommand(t *testing.T) { + executor := New().newCommand(step.New()) + _, ok := executor.(*dummyExecutor) + require.True(t, ok) +} + +func TestNewCommandUsesDefaultsWhenStepDoesNotProvideIO(t *testing.T) { + stdout := &bytes.Buffer{} + stderr := &bytes.Buffer{} + r := New( + DefaultStdout(stdout), + DefaultStderr(stderr), + DefaultWorkdir("/tmp/work"), + ) + + executor := r.newCommand(step.New( + step.Exec("echo", "hello"), + step.Env("A=B"), + )) + + cmdExecutor, ok := executor.(*cmdSignalWithWriter) + require.True(t, ok) + require.Equal(t, stdout, cmdExecutor.Stdout) + require.Equal(t, stderr, cmdExecutor.Stderr) + require.Equal(t, "/tmp/work", cmdExecutor.Dir) + require.Contains(t, cmdExecutor.Env, "A=B") + require.Contains(t, cmdExecutor.Env, Env("PATH", goenv.Path())) +} + +func TestNewCommandWithCustomStdinReturnsCmdSignal(t *testing.T) { + stdin := strings.NewReader("input") + + executor := New().newCommand(step.New( + step.Exec("echo"), + step.Stdin(stdin), + )) + + cmdExecutor, ok := executor.(*cmdSignal) + require.True(t, ok) + require.Equal(t, stdin, cmdExecutor.Stdin) +} + +func TestRunWithoutStepsReturnsNil(t *testing.T) { + err := New().Run(context.Background()) + require.NoError(t, err) +} + +func TestRunReturnsPreExecError(t *testing.T) { + expectedErr := stdErrors.New("pre exec error") + + err := New().Run(context.Background(), step.New( + step.PreExec(func() error { return expectedErr }), + )) + + require.ErrorIs(t, err, expectedErr) +} + +func TestRunReturnsStartErrorWithoutPostExec(t *testing.T) { + err := New().Run(context.Background(), step.New( + step.Exec("this-command-does-not-exist-cmdrunner-test"), + )) + + require.Error(t, err) +} + +func TestRunCanHandleStartErrorInPostExec(t *testing.T) { + var receivedErr error + + err := New().Run(context.Background(), step.New( + step.Exec("this-command-does-not-exist-cmdrunner-test"), + step.PostExec(func(err error) error { + receivedErr = err + return nil + }), + )) + + require.NoError(t, err) + require.Error(t, receivedErr) +} diff --git a/ignite/pkg/cmdrunner/exec/exec.go b/ignite/pkg/cmdrunner/exec/exec.go new file mode 100644 index 0000000..4c0aa56 --- /dev/null +++ b/ignite/pkg/cmdrunner/exec/exec.go @@ -0,0 +1,86 @@ +// Package exec provides easy access to command execution for basic uses. +package exec + +import ( + "bytes" + "context" + "fmt" + "os/exec" + "strings" + + "github.com/ignite/cli/v29/ignite/pkg/cmdrunner" + "github.com/ignite/cli/v29/ignite/pkg/cmdrunner/step" + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +// ExitError is an alias to exec.ExitError. +type ExitError = exec.ExitError + +type execConfig struct { + stepOptions []step.Option + includeStdLogsToError bool +} + +type Option func(*execConfig) + +func StepOption(o step.Option) Option { + return func(c *execConfig) { + c.stepOptions = append(c.stepOptions, o) + } +} + +func IncludeStdLogsToError() Option { + return func(c *execConfig) { + c.includeStdLogsToError = true + } +} + +// Exec executes a command with args, it's a shortcut func for basic command executions. +func Exec(ctx context.Context, fullCommand []string, options ...Option) error { + errb := &bytes.Buffer{} + logs := &bytes.Buffer{} + + c := &execConfig{ + stepOptions: []step.Option{ + step.Exec(fullCommand[0], fullCommand[1:]...), + step.Stdout(logs), + step.Stderr(errb), + }, + } + + for _, apply := range options { + apply(c) + } + + err := cmdrunner.New().Run(ctx, step.New(c.stepOptions...)) + if err != nil { + return &Error{ + Err: errors.Wrap(err, errb.String()), + Command: strings.Join(fullCommand, " "), + StdLogs: logs.String(), + includeStdLogsToError: c.includeStdLogsToError, + } + } + + return nil +} + +// Error provides detailed errors from the executed program. +type Error struct { + Err error + Command string + StdLogs string // collected logs from code generation tools. + includeStdLogsToError bool +} + +func (e *Error) Unwrap() error { + return e.Err +} + +func (e *Error) Error() string { + message := fmt.Sprintf("error while running command %s: %s", e.Command, e.Err.Error()) + if e.includeStdLogsToError && strings.TrimSpace(e.StdLogs) != "" { + return fmt.Sprintf("%s\n\n%s", message, e.StdLogs) + } + return message +} diff --git a/ignite/pkg/cmdrunner/exec/exec_test.go b/ignite/pkg/cmdrunner/exec/exec_test.go new file mode 100644 index 0000000..a27377d --- /dev/null +++ b/ignite/pkg/cmdrunner/exec/exec_test.go @@ -0,0 +1,46 @@ +package exec + +import ( + "context" + "os" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/cmdrunner/step" +) + +func TestExecSuccess(t *testing.T) { + err := Exec(context.Background(), []string{"go", "version"}) + require.NoError(t, err) +} + +func TestExecReturnsDetailedError(t *testing.T) { + err := Exec(context.Background(), []string{"command-that-does-not-exist-ignite-test"}) + require.Error(t, err) + + var detailed *Error + require.ErrorAs(t, err, &detailed) + require.Contains(t, detailed.Error(), "error while running command") + require.Contains(t, detailed.Error(), "command-that-does-not-exist-ignite-test") +} + +func TestExecIncludesStdLogsWhenConfigured(t *testing.T) { + if _, err := os.Stat("/bin/sh"); err != nil { + t.Skip("/bin/sh not available") + } + + err := Exec( + context.Background(), + []string{"/bin/sh", "-c", "echo stdout-log; exit 1"}, + IncludeStdLogsToError(), + ) + require.Error(t, err) + require.Contains(t, err.Error(), "stdout-log") +} + +func TestStepOptionAddsStepOption(t *testing.T) { + cfg := &execConfig{} + StepOption(step.Workdir("/tmp"))(cfg) + require.Len(t, cfg.stepOptions, 1) +} diff --git a/ignite/pkg/cmdrunner/step/step.go b/ignite/pkg/cmdrunner/step/step.go new file mode 100644 index 0000000..eec7803 --- /dev/null +++ b/ignite/pkg/cmdrunner/step/step.go @@ -0,0 +1,118 @@ +package step + +import ( + "io" +) + +type Step struct { + Exec Execution + PreExec func() error + InExec func() error + PostExecs []func(error) error + Stdout io.Writer + Stderr io.Writer + Stdin io.Reader + Workdir string + Env []string + WriteData []byte +} + +type Option func(*Step) + +type Options []Option + +func NewOptions() Options { + return Options{} +} + +func (o Options) Add(options ...Option) Options { + return append(o, options...) +} + +func New(options ...Option) *Step { + s := &Step{ + PreExec: func() error { return nil }, + InExec: func() error { return nil }, + PostExecs: make([]func(error) error, 0), + } + for _, o := range options { + o(s) + } + return s +} + +type Execution struct { + Command string + Args []string +} + +func Exec(command string, args ...string) Option { + return func(s *Step) { + s.Exec = Execution{command, args} + } +} + +func PreExec(hook func() error) Option { + return func(s *Step) { + s.PreExec = hook + } +} + +func InExec(hook func() error) Option { + return func(s *Step) { + s.InExec = hook + } +} + +func PostExec(hook func(exitErr error) error) Option { // *os.ExitError + return func(s *Step) { + s.PostExecs = append(s.PostExecs, hook) + } +} + +func Stdout(w io.Writer) Option { + return func(s *Step) { + s.Stdout = w + } +} + +func Stderr(w io.Writer) Option { + return func(s *Step) { + s.Stderr = w + } +} + +func Stdin(r io.Reader) Option { + return func(s *Step) { + s.Stdin = r + } +} + +func Workdir(path string) Option { + return func(s *Step) { + s.Workdir = path + } +} + +func Env(e ...string) Option { + return func(s *Step) { + s.Env = e + } +} + +func Write(data []byte) Option { + return func(s *Step) { + s.WriteData = data + } +} + +type Steps []*Step + +func NewSteps(steps ...*Step) Steps { + return steps +} + +func (s *Steps) Add(steps ...*Step) Steps { + *s = append(*s, steps...) + return *s +} diff --git a/ignite/pkg/cmdrunner/step/step_test.go b/ignite/pkg/cmdrunner/step/step_test.go new file mode 100644 index 0000000..7c93acd --- /dev/null +++ b/ignite/pkg/cmdrunner/step/step_test.go @@ -0,0 +1,64 @@ +package step + +import ( + "bytes" + "errors" + "strings" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestNewDefaults(t *testing.T) { + s := New() + require.NoError(t, s.PreExec()) + require.NoError(t, s.InExec()) + require.Empty(t, s.PostExecs) +} + +func TestNewAppliesOptions(t *testing.T) { + stdout := &bytes.Buffer{} + stderr := &bytes.Buffer{} + stdin := strings.NewReader("stdin") + postErr := errors.New("post") + + s := New( + Exec("go", "version"), + Stdout(stdout), + Stderr(stderr), + Stdin(stdin), + Workdir("/tmp/work"), + Env("A=B", "C=D"), + Write([]byte("payload")), + PreExec(func() error { return postErr }), + InExec(func() error { return postErr }), + PostExec(func(err error) error { return err }), + ) + + require.Equal(t, Execution{Command: "go", Args: []string{"version"}}, s.Exec) + require.Equal(t, stdout, s.Stdout) + require.Equal(t, stderr, s.Stderr) + require.Equal(t, stdin, s.Stdin) + require.Equal(t, "/tmp/work", s.Workdir) + require.Equal(t, []string{"A=B", "C=D"}, s.Env) + require.Equal(t, []byte("payload"), s.WriteData) + require.ErrorIs(t, s.PreExec(), postErr) + require.ErrorIs(t, s.InExec(), postErr) + require.Len(t, s.PostExecs, 1) + require.ErrorIs(t, s.PostExecs[0](postErr), postErr) +} + +func TestOptionsAdd(t *testing.T) { + options := NewOptions().Add(Exec("go"), Workdir("/tmp")) + require.Len(t, options, 2) +} + +func TestStepsAdd(t *testing.T) { + s1 := New(Exec("one")) + s2 := New(Exec("two")) + steps := NewSteps(s1) + got := (&steps).Add(s2) + require.Len(t, got, 2) + require.Equal(t, "one", got[0].Exec.Command) + require.Equal(t, "two", got[1].Exec.Command) +} diff --git a/ignite/pkg/confile/confile.go b/ignite/pkg/confile/confile.go new file mode 100644 index 0000000..e86386c --- /dev/null +++ b/ignite/pkg/confile/confile.go @@ -0,0 +1,50 @@ +// Package confile is helper to load and overwrite configuration files. +package confile + +import ( + "os" + "path/filepath" +) + +// ConfigFile represents a configuration file. +type ConfigFile struct { + creator EncodingCreator + path string +} + +// New starts a new ConfigFile by using creator as underlying EncodingCreator to encode and +// decode config file that presents or will present on path. +func New(creator EncodingCreator, path string) *ConfigFile { + return &ConfigFile{ + creator: creator, + path: path, + } +} + +// Load loads content of config file into v if file exist on path. +// otherwise nothing loaded into v and no error is returned. +func (c *ConfigFile) Load(v interface{}) error { + file, err := os.Open(c.path) + if err != nil { + if os.IsNotExist(err) { + return nil + } + return err + } + defer file.Close() + return c.creator.Create(file).Decode(v) +} + +// Save saves v into config file by overwriting the previous content it also creates the +// config file if it wasn't exist. +func (c *ConfigFile) Save(v interface{}) error { + if err := os.MkdirAll(filepath.Dir(c.path), 0o755); err != nil { + return err + } + file, err := os.OpenFile(c.path, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0o644) + if err != nil { + return err + } + defer file.Close() + return c.creator.Create(file).Encode(v) +} diff --git a/ignite/pkg/confile/confile_test.go b/ignite/pkg/confile/confile_test.go new file mode 100644 index 0000000..ef9dad3 --- /dev/null +++ b/ignite/pkg/confile/confile_test.go @@ -0,0 +1,53 @@ +package confile + +import ( + "io" + "os" + "strings" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestAll(t *testing.T) { + cases := []struct { + name string + ec EncodingCreator + content string + }{ + {"json", DefaultJSONEncodingCreator, `{"hello":"world"}`}, + {"yaml", DefaultYAMLEncodingCreator, `hello: world`}, + {"toml", DefaultTOMLEncodingCreator, `hello = "world"`}, + } + + type data struct { + Hello string `json:"hello" yaml:"hello" toml:"hello"` + } + + for _, tt := range cases { + t.Run(tt.name, func(t *testing.T) { + file, err := os.CreateTemp("", "") + require.NoError(t, err) + defer func() { + file.Close() + os.Remove(file.Name()) + }() + + _, err = io.Copy(file, strings.NewReader(tt.content)) + require.NoError(t, err) + + cf := New(tt.ec, file.Name()) + var d data + require.NoError(t, cf.Load(&d)) + require.Equal(t, "world", d.Hello) + + d.Hello = "cosmos" + require.NoError(t, cf.Save(d)) + + cf2 := New(tt.ec, file.Name()) + var d2 data + require.NoError(t, cf2.Load(&d2)) + require.Equal(t, "cosmos", d2.Hello) + }) + } +} diff --git a/ignite/pkg/confile/encoding.go b/ignite/pkg/confile/encoding.go new file mode 100644 index 0000000..a9445f7 --- /dev/null +++ b/ignite/pkg/confile/encoding.go @@ -0,0 +1,75 @@ +package confile + +import ( + "encoding/json" + "io" + + "github.com/goccy/go-yaml" + "github.com/pelletier/go-toml" +) + +// EncodingCreator defines a constructor to create an EncodingCreator from +// an io.ReadWriter. +type EncodingCreator interface { + Create(io.ReadWriter) EncodeDecoder +} + +// EncodeDecoder combines Encoder and Decoder. +type EncodeDecoder interface { + Encoder + Decoder +} + +// Encoder should encode a v into io.Writer given to EncodingCreator. +type Encoder interface { + Encode(v interface{}) error +} + +// Decoder should decode a v from io.Reader given to EncodingCreator. +type Decoder interface { + Decode(v interface{}) error +} + +// Encoding implements EncodeDecoder. +type Encoding struct { + Encoder + Decoder +} + +// NewEncoding returns a new EncodeDecoder implementation from e end d. +func NewEncoding(e Encoder, d Decoder) EncodeDecoder { + return &Encoding{ + Encoder: e, + Decoder: d, + } +} + +// DefaultJSONEncodingCreator implements EncodingCreator for JSON encoding. +var DefaultJSONEncodingCreator = &JSONEncodingCreator{} + +// DefaultYAMLEncodingCreator implements EncodingCreator for YAML encoding. +var DefaultYAMLEncodingCreator = &YAMLEncodingCreator{} + +// DefaultTOMLEncodingCreator implements EncodingCreator for TOML encoding. +var DefaultTOMLEncodingCreator = &TOMLEncodingCreator{} + +// JSONEncodingCreator implements EncodingCreator for JSON encoding. +type JSONEncodingCreator struct{} + +func (e *JSONEncodingCreator) Create(rw io.ReadWriter) EncodeDecoder { + return NewEncoding(json.NewEncoder(rw), json.NewDecoder(rw)) +} + +// YAMLEncodingCreator implements EncodingCreator for YAML encoding. +type YAMLEncodingCreator struct{} + +func (e *YAMLEncodingCreator) Create(rw io.ReadWriter) EncodeDecoder { + return NewEncoding(yaml.NewEncoder(rw), yaml.NewDecoder(rw)) +} + +// TOMLEncodingCreator implements EncodingCreator for TOML encoding. +type TOMLEncodingCreator struct{} + +func (e *TOMLEncodingCreator) Create(rw io.ReadWriter) EncodeDecoder { + return NewEncoding(toml.NewEncoder(rw), toml.NewDecoder(rw)) +} diff --git a/ignite/pkg/cosmosaccount/cosmosaccount.go b/ignite/pkg/cosmosaccount/cosmosaccount.go new file mode 100644 index 0000000..fde63e8 --- /dev/null +++ b/ignite/pkg/cosmosaccount/cosmosaccount.go @@ -0,0 +1,369 @@ +package cosmosaccount + +import ( + "bufio" + "encoding/hex" + "fmt" + "os" + + dkeyring "github.com/99designs/keyring" + "github.com/cosmos/go-bip39" + + addresscodec "cosmossdk.io/core/address" + + "github.com/cosmos/cosmos-sdk/codec" + "github.com/cosmos/cosmos-sdk/codec/address" + "github.com/cosmos/cosmos-sdk/codec/types" + cryptocodec "github.com/cosmos/cosmos-sdk/crypto/codec" + "github.com/cosmos/cosmos-sdk/crypto/hd" + "github.com/cosmos/cosmos-sdk/crypto/keyring" + sdktypes "github.com/cosmos/cosmos-sdk/types" + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +const ( + // KeyringServiceName used for the name of keyring in OS backend. + KeyringServiceName = "ignite" + + // DefaultAccount is the name of the default account. + DefaultAccount = "default" +) + +// KeyringHome used to store account related data. +var KeyringHome = os.ExpandEnv("$HOME/.ignite/accounts") + +var ErrAccountExists = errors.New("account already exists") + +const ( + CoinTypeCosmos = sdktypes.CoinType + AccountPrefixCosmos = "cosmos" +) + +// KeyringBackend is the backend for where keys are stored. +type KeyringBackend string + +const ( + // KeyringTest is the test keyring backend. With this backend, your keys will be + // stored under your app's data dir. + KeyringTest KeyringBackend = "test" + + // KeyringOS is the OS keyring backend. With this backend, your keys will be + // stored in your operating system's secured keyring. + KeyringOS KeyringBackend = "os" + + // KeyringMemory is in memory keyring backend, your keys will be stored in application memory. + KeyringMemory KeyringBackend = "memory" +) + +// Registry for accounts. +type Registry struct { + homePath string + keyringServiceName string + keyringBackend KeyringBackend + addressCodec addresscodec.Codec + coinType uint32 + + Keyring keyring.Keyring +} + +// Option configures your registry. +type Option func(*Registry) + +func WithHome(path string) Option { + return func(c *Registry) { + c.homePath = path + } +} + +func WithKeyringServiceName(name string) Option { + return func(c *Registry) { + c.keyringServiceName = name + } +} + +func WithKeyringBackend(backend KeyringBackend) Option { + return func(c *Registry) { + c.keyringBackend = backend + } +} + +func WithBech32Prefix(prefix string) Option { + return func(c *Registry) { + c.addressCodec = address.NewBech32Codec(prefix) + } +} + +func WithCoinType(coinType uint32) Option { + return func(c *Registry) { + c.coinType = coinType + } +} + +// New creates a new registry to manage accounts. +func New(options ...Option) (Registry, error) { + r := Registry{ + keyringServiceName: sdktypes.KeyringServiceName(), + keyringBackend: KeyringTest, + homePath: KeyringHome, + addressCodec: address.NewBech32Codec(AccountPrefixCosmos), + coinType: CoinTypeCosmos, + } + + for _, apply := range options { + apply(&r) + } + + var err error + inBuf := bufio.NewReader(os.Stdin) + interfaceRegistry := types.NewInterfaceRegistry() + cryptocodec.RegisterInterfaces(interfaceRegistry) + cdc := codec.NewProtoCodec(interfaceRegistry) + r.Keyring, err = keyring.New(r.keyringServiceName, string(r.keyringBackend), r.homePath, inBuf, cdc) + if err != nil { + return Registry{}, err + } + + return r, nil +} + +func NewStandalone(options ...Option) (Registry, error) { + return New( + append([]Option{ + WithKeyringServiceName(KeyringServiceName), + WithHome(KeyringHome), + }, options...)..., + ) +} + +func NewInMemory(options ...Option) (Registry, error) { + return New( + append([]Option{ + WithKeyringBackend(KeyringMemory), + }, options...)..., + ) +} + +// Account represents a Cosmos SDK account. +type Account struct { + // Name of the account. + Name string + + // Record holds additional info about the account. + Record *keyring.Record +} + +// Address returns the address of the account from given prefix. +func (a Account) Address(accPrefix string) (string, error) { + if accPrefix == "" { + accPrefix = AccountPrefixCosmos + } + + pk, err := a.Record.GetPubKey() + if err != nil { + return "", err + } + + addressCodec := address.NewBech32Codec(accPrefix) + addr, err := addressCodec.BytesToString(pk.Address()) + if err != nil { + return "", err + } + + return addr, nil +} + +// PubKey returns a public key for account. +func (a Account) PubKey() (string, error) { + pk, err := a.Record.GetPubKey() + if err != nil { + return "", err + } + + return pk.String(), nil +} + +// EnsureDefaultAccount ensures that default account exists. +func (r Registry) EnsureDefaultAccount() error { + _, err := r.GetByName(DefaultAccount) + + var accErr *AccountDoesNotExistError + if errors.As(err, &accErr) { + _, _, err = r.Create(DefaultAccount) + return err + } + + return err +} + +// Create creates a new account with name. +func (r Registry) Create(name string) (acc Account, mnemonic string, err error) { + if _, err = r.GetByName(name); err == nil { + return Account{}, "", ErrAccountExists + } + var accErr *AccountDoesNotExistError + if !errors.As(err, &accErr) { + return Account{}, "", err + } + entropySeed, err := bip39.NewEntropy(256) + if err != nil { + return Account{}, "", err + } + mnemonic, err = bip39.NewMnemonic(entropySeed) + if err != nil { + return Account{}, "", err + } + algo, err := r.algo() + if err != nil { + return Account{}, "", err + } + record, err := r.Keyring.NewAccount(name, mnemonic, "", r.hdPath(), algo) + if err != nil { + return Account{}, "", err + } + + acc = Account{ + Name: name, + Record: record, + } + + return acc, mnemonic, nil +} + +// Import imports an existing account with name and passphrase and secret where secret can be a +// mnemonic or a private key. +func (r Registry) Import(name, secret, passphrase string) (Account, error) { + _, err := r.GetByName(name) + if err == nil { + return Account{}, ErrAccountExists + } + var accErr *AccountDoesNotExistError + if !errors.As(err, &accErr) { + return Account{}, err + } + + if bip39.IsMnemonicValid(secret) { + algo, err := r.algo() + if err != nil { + return Account{}, err + } + _, err = r.Keyring.NewAccount(name, secret, passphrase, r.hdPath(), algo) + if err != nil { + return Account{}, err + } + } else if err := r.Keyring.ImportPrivKey(name, secret, passphrase); err != nil { + return Account{}, err + } + + return r.GetByName(name) +} + +// Export exports an account as a private key. +func (r Registry) Export(name, passphrase string) (key string, err error) { + if _, err = r.GetByName(name); err != nil { + return "", err + } + + return r.Keyring.ExportPrivKeyArmor(name, passphrase) +} + +// ExportHex exports an account as a private key in hex. +func (r Registry) ExportHex(name, passphrase string) (hex string, err error) { + if _, err = r.GetByName(name); err != nil { + return "", err + } + + return unsafeExportPrivKeyHex(r.Keyring, name, passphrase) +} + +func unsafeExportPrivKeyHex(kr keyring.Keyring, uid, passphrase string) (privKey string, err error) { + priv, err := kr.ExportPrivKeyArmor(uid, passphrase) + if err != nil { + return "", err + } + + return hex.EncodeToString([]byte(priv)), nil +} + +// GetByName returns an account by its name. +func (r Registry) GetByName(name string) (Account, error) { + record, err := r.Keyring.Key(name) + if errors.Is(err, dkeyring.ErrKeyNotFound) || errors.Is(err, sdkerrors.ErrKeyNotFound) { + return Account{}, &AccountDoesNotExistError{name} + } + if err != nil { + return Account{}, err + } + + acc := Account{ + Name: name, + Record: record, + } + + return acc, nil +} + +// GetByAddress returns an account by its address. +func (r Registry) GetByAddress(address string) (Account, error) { + sdkAddr, err := r.addressCodec.StringToBytes(address) + if err != nil { + return Account{}, err + } + record, err := r.Keyring.KeyByAddress(sdktypes.AccAddress(sdkAddr)) + if errors.Is(err, dkeyring.ErrKeyNotFound) || errors.Is(err, sdkerrors.ErrKeyNotFound) { + return Account{}, &AccountDoesNotExistError{address} + } + if err != nil { + return Account{}, err + } + return Account{ + Name: record.Name, + Record: record, + }, nil +} + +// List lists all accounts. +func (r Registry) List() ([]Account, error) { + records, err := r.Keyring.List() + if err != nil { + return nil, err + } + + var accounts []Account + + for _, record := range records { + accounts = append(accounts, Account{ + Name: record.Name, + Record: record, + }) + } + + return accounts, nil +} + +// DeleteByName deletes an account by name. +func (r Registry) DeleteByName(name string) error { + err := r.Keyring.Delete(name) + if errors.Is(err, dkeyring.ErrKeyNotFound) { + return &AccountDoesNotExistError{name} + } + return err +} + +func (r Registry) hdPath() string { + return hd.CreateHDPath(r.coinType, 0, 0).String() +} + +func (r Registry) algo() (keyring.SignatureAlgo, error) { + algos, _ := r.Keyring.SupportedAlgorithms() + return keyring.NewSigningAlgoFromString(string(hd.Secp256k1Type), algos) +} + +type AccountDoesNotExistError struct { + Name string +} + +func (e *AccountDoesNotExistError) Error() string { + return fmt.Sprintf("account %q does not exist", e.Name) +} diff --git a/ignite/pkg/cosmosaccount/cosmosaccount_test.go b/ignite/pkg/cosmosaccount/cosmosaccount_test.go new file mode 100644 index 0000000..47a7279 --- /dev/null +++ b/ignite/pkg/cosmosaccount/cosmosaccount_test.go @@ -0,0 +1,72 @@ +package cosmosaccount_test + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/cosmosaccount" +) + +const testAccountName = "myTestAccount" + +func TestRegistry(t *testing.T) { + tmpDir := t.TempDir() + registry, err := cosmosaccount.New(cosmosaccount.WithHome(tmpDir)) + require.NoError(t, err) + + account, mnemonic, err := registry.Create(testAccountName) + require.NoError(t, err) + require.Equal(t, testAccountName, account.Name) + require.NotEmpty(t, account.Record.PubKey.Value) + + getAccount, err := registry.GetByName(testAccountName) + require.NoError(t, err) + require.Equal(t, getAccount, account) + + sdkaddr, _ := account.Record.GetAddress() + addr := sdkaddr.String() + getAccount, err = registry.GetByAddress(addr) + require.NoError(t, err) + require.Equal(t, getAccount.Record.PubKey, account.Record.PubKey) + require.Equal(t, getAccount.Name, testAccountName) + require.Equal(t, getAccount.Name, account.Name) + require.Equal(t, getAccount.Name, account.Record.Name) + + addr, err = account.Address("cosmos") + require.NoError(t, err) + getAccount, err = registry.GetByAddress(addr) + require.NoError(t, err) + require.Equal(t, getAccount.Record.PubKey, account.Record.PubKey) + require.Equal(t, getAccount.Name, testAccountName) + require.Equal(t, getAccount.Name, account.Name) + require.Equal(t, getAccount.Name, account.Record.Name) + + secondTmpDir := t.TempDir() + secondRegistry, err := cosmosaccount.New(cosmosaccount.WithHome(secondTmpDir)) + require.NoError(t, err) + + importedAccount, err := secondRegistry.Import(testAccountName, mnemonic, "") + require.NoError(t, err) + require.Equal(t, testAccountName, importedAccount.Name) + require.Equal(t, importedAccount.Record.PubKey, account.Record.PubKey) + + _, _, err = registry.Create("another one") + require.NoError(t, err) + list, err := registry.List() + require.NoError(t, err) + require.Equal(t, 2, len(list)) + + err = registry.DeleteByName(testAccountName) + require.NoError(t, err) + afterDeleteList, err := registry.List() + require.NoError(t, err) + require.Equal(t, 1, len(afterDeleteList)) + + _, err = registry.GetByName(testAccountName) + var expectedErr *cosmosaccount.AccountDoesNotExistError + require.ErrorAs(t, err, &expectedErr) + + _, err = registry.GetByAddress(addr) + require.ErrorAs(t, err, &expectedErr) +} diff --git a/ignite/pkg/cosmosanalysis/app/app.go b/ignite/pkg/cosmosanalysis/app/app.go new file mode 100644 index 0000000..514dba6 --- /dev/null +++ b/ignite/pkg/cosmosanalysis/app/app.go @@ -0,0 +1,413 @@ +package app + +import ( + "context" + "go/ast" + "go/parser" + "go/token" + "os" + "path/filepath" + "strings" + + "github.com/ignite/cli/v29/ignite/pkg/cosmosanalysis" + "github.com/ignite/cli/v29/ignite/pkg/cosmosver" + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/goanalysis" + "github.com/ignite/cli/v29/ignite/pkg/gomodule" + "github.com/ignite/cli/v29/ignite/pkg/xast" +) + +const registerRoutesMethod = "RegisterAPIRoutes" + +// CheckKeeper checks for the existence of the keeper with the provided name in the app structure. +func CheckKeeper(path, keeperName string) error { + // find app type + appImpl, err := cosmosanalysis.FindEmbed(path, cosmosanalysis.AppEmbeddedTypes) + if err != nil { + return err + } + if len(appImpl) != 1 { + return errors.Errorf("app.go should contain a single app (got %d)", len(appImpl)) + } + appTypeName := appImpl[0] + + // Inspect the module for app struct + var found bool + fileSet := token.NewFileSet() + pkgs, err := parser.ParseDir(fileSet, path, nil, 0) + if err != nil { + return err + } + for _, pkg := range pkgs { + for _, f := range pkg.Files { + ast.Inspect(f, func(n ast.Node) bool { + // look for struct methods. + appType, ok := n.(*ast.TypeSpec) + if !ok || appType.Name.Name != appTypeName { + return true + } + + appStruct, ok := appType.Type.(*ast.StructType) + if !ok { + return true + } + + // Search for the keeper specific field + for _, field := range appStruct.Fields.List { + for _, fieldName := range field.Names { + if fieldName.Name == keeperName { + found = true + return false + } + } + } + + return false + }) + } + } + + if !found { + return errors.Errorf("app doesn't contain %s", keeperName) + } + return nil +} + +// FindRegisteredModules returns all registered modules into the chain root. +func FindRegisteredModules(chainRoot string) ([]string, error) { + // Assumption: modules are registered in the app package + appFilePath, err := cosmosanalysis.FindAppFilePath(chainRoot) + if err != nil { + return nil, err + } + // The directory where the app file is located. + // This is required to resolve references within the app package. + appDir := filepath.Dir(appFilePath) + + appPkg, _, err := xast.ParseDir(appDir) + if err != nil { + return nil, err + } + + // Search the app for the imported SDK modules + var discovered []string + for _, f := range appPkg.Files { + discovered = append(discovered, goanalysis.FindBlankImports(f)...) + fileImports := goanalysis.FormatImports(f) + d, err := DiscoverModules(f, chainRoot, fileImports) + if err != nil { + return nil, err + } + discovered = append(discovered, d...) + } + + // Discover IBC wired modules + // TODO: This can be removed once IBC modules use dependency injection + ibcPath := filepath.Join(chainRoot, "app", "ibc.go") + if _, err := os.Stat(ibcPath); err == nil { + m, err := discoverIBCModules(ibcPath) + if err != nil { + return nil, err + } + + discovered = append(discovered, m...) + } + + return removeDuplicateEntries(discovered), nil +} + +// DiscoverModules find a map of import modules based on the configured app. +func DiscoverModules(file *ast.File, chainRoot string, fileImports map[string]string) ([]string, error) { + // find app type + appImpl := cosmosanalysis.FindEmbedInFile(file, cosmosanalysis.AppEmbeddedTypes) + appTypeName := "App" + switch { + case len(appImpl) > 1: + return nil, errors.Errorf("app.go should contain only a single app (got %d)", len(appImpl)) + case len(appImpl) == 1: + appTypeName = appImpl[0] + } + + var discovered []string + for _, decl := range file.Decls { + switch x := decl.(type) { + case *ast.GenDecl: + discovered = append(discovered, discoverKeeperModules(x, appTypeName, fileImports)...) + case *ast.FuncDecl: + // The modules registered by Cosmos SDK `rumtime.App` are included + // when the app registers API modules though the `App` instance. + if isRuntimeAppCalled(x) { + m, err := discoverRuntimeAppModules(chainRoot) + if err != nil { + return nil, err + } + discovered = append(discovered, m...) + } + } + } + return removeDuplicateEntries(discovered), nil +} + +func removeDuplicateEntries(entries []string) (res []string) { + seen := make(map[string]struct{}) + for _, e := range entries { + if _, ok := seen[e]; ok { + continue + } + + seen[e] = struct{}{} + res = append(res, e) + } + return +} + +func discoverKeeperModules(d *ast.GenDecl, appTypeName string, imports map[string]string) []string { + var modules []string + for _, spec := range d.Specs { + typeSpec, ok := spec.(*ast.TypeSpec) + if !ok { + continue + } + if typeSpec.Name.Name != appTypeName { + continue + } + structType, ok := typeSpec.Type.(*ast.StructType) + if !ok { + continue + } + for _, field := range structType.Fields.List { + f := field.Type + CheckSpec: + switch spec := f.(type) { + case *ast.StarExpr: + f, ok = spec.X.(*ast.SelectorExpr) + if !ok { + continue + } + goto CheckSpec + case *ast.SelectorExpr: + if !strings.HasSuffix(spec.Sel.Name, "Keeper") { + continue + } + ident, ok := spec.X.(*ast.Ident) + if !ok { + continue + } + fileImport, ok := imports[ident.Name] + if !ok { + continue + } + modules = append(modules, removeKeeperPkgPath(fileImport)) + } + } + } + return modules +} + +func discoverRuntimeAppModules(chainRoot string) ([]string, error) { + // Resolve the absolute path to the Cosmos SDK module + cosmosPath, err := resolveCosmosPackagePath(chainRoot) + if err != nil { + return nil, err + } + + var modules []string + + // When runtime package doesn't exists it means is an older Cosmos SDK version, + // so all the module API registrations are defined within user's app. + path := filepath.Join(cosmosPath, "runtime", "app.go") + if _, err := os.Stat(path); os.IsNotExist(err) { + return modules, nil + } + + f, _, err := xast.ParseFile(path) + if err != nil { + return nil, err + } + + imports := goanalysis.FormatImports(f) + err = xast.Inspect(f, func(n ast.Node) error { + if pkgs := findRegisterAPIRoutesRegistrations(n); pkgs != nil { + for _, p := range pkgs { + if m := imports[p]; m != "" { + modules = append(modules, m) + } + } + return xast.ErrStop + } + return nil + }) + if err != nil { + return nil, err + } + return modules, nil +} + +func discoverIBCModules(ibcPath string) ([]string, error) { + f, _, err := xast.ParseFile(ibcPath) + if err != nil { + return nil, err + } + + var ( + names []string + imports = goanalysis.FormatImports(f) + ) + err = xast.Inspect(f, func(n ast.Node) error { + fn, _ := n.(*ast.FuncDecl) + if fn == nil { + return nil + } + + if fn.Name.Name != "RegisterIBC" && fn.Name.Name != "AddIBCModuleManager" { + return nil + } + + for _, stmt := range fn.Body.List { + x, _ := stmt.(*ast.AssignStmt) + if x == nil { + continue + } + + if len(x.Rhs) == 0 { + continue + } + + c, _ := x.Rhs[0].(*ast.CompositeLit) + if c == nil { + continue + } + + s, _ := c.Type.(*ast.SelectorExpr) + if s == nil || s.Sel.Name != "AppModule" { + continue + } + + if m, _ := s.X.(*ast.Ident); m != nil { + names = append(names, m.Name) + } + } + + return xast.ErrStop + }) + if err != nil { + return nil, err + } + + var modules []string + for _, n := range names { + modules = append(modules, imports[n]) + } + return modules, nil +} + +func resolveCosmosPackagePath(chainRoot string) (string, error) { + modFile, err := gomodule.ParseAt(chainRoot) + if err != nil { + return "", err + } + + deps, err := gomodule.ResolveDependencies(modFile, false) + if err != nil { + return "", err + } + + var pkg string + for _, dep := range deps { + // dependencies are resolved, so we need to check for possible SDK forks + if cosmosver.CosmosSDKModulePathPattern.MatchString(dep.Path) { + pkg = dep.String() + break + } + } + + if pkg == "" { + return "", errors.New("cosmos-sdk package version not found") + } + + m, err := gomodule.FindModule(context.Background(), chainRoot, pkg) + if err != nil { + return "", err + } + return m.Dir, nil +} + +func findRegisterAPIRoutesRegistrations(n ast.Node) []string { + funcLitType, ok := n.(*ast.FuncDecl) + if !ok { + return nil + } + + if funcLitType.Name.Name != registerRoutesMethod { + return nil + } + + var packagesRegistered []string + for _, stmt := range funcLitType.Body.List { + exprStmt, ok := stmt.(*ast.ExprStmt) + if !ok { + continue + } + exprCall, ok := exprStmt.X.(*ast.CallExpr) + if !ok { + continue + } + exprFun, ok := exprCall.Fun.(*ast.SelectorExpr) + if !ok || exprFun.Sel.Name != "RegisterGRPCGatewayRoutes" { + continue + } + identType, ok := exprFun.X.(*ast.Ident) + if !ok { + continue + } + pkgName := identType.Name + if pkgName == "" { + continue + } + packagesRegistered = append(packagesRegistered, identType.Name) + } + return packagesRegistered +} + +func removeKeeperPkgPath(pkg string) string { + path := strings.TrimSuffix(pkg, "/keeper") + path = strings.TrimSuffix(path, "/controller") + return strings.TrimSuffix(path, "/host") +} + +func isRuntimeAppCalled(fn *ast.FuncDecl) bool { + if fn.Name.Name != registerRoutesMethod { + return false + } + + for _, stmt := range fn.Body.List { + exprStmt, ok := stmt.(*ast.ExprStmt) + if !ok { + continue + } + + exprCall, ok := exprStmt.X.(*ast.CallExpr) + if !ok { + continue + } + + exprFun, ok := exprCall.Fun.(*ast.SelectorExpr) + if !ok || exprFun.Sel.Name != registerRoutesMethod { + continue + } + + exprSel, ok := exprFun.X.(*ast.SelectorExpr) + if !ok || exprSel.Sel.Name != "App" { + continue + } + + identType, ok := exprSel.X.(*ast.Ident) + if !ok || identType.Name != "app" { + continue + } + + return true + } + + return false +} diff --git a/ignite/pkg/cosmosanalysis/app/app_test.go b/ignite/pkg/cosmosanalysis/app/app_test.go new file mode 100644 index 0000000..fb3b066 --- /dev/null +++ b/ignite/pkg/cosmosanalysis/app/app_test.go @@ -0,0 +1,238 @@ +package app + +import ( + _ "embed" + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/goanalysis" + "github.com/ignite/cli/v29/ignite/pkg/xast" +) + +var ( + //go:embed testdata/app_minimal.go + AppMinimalFile []byte + //go:embed testdata/app_generic.go + AppGenericFile []byte + //go:embed testdata/no_app.go + NoAppFile []byte + //go:embed testdata/two_app.go + TwoAppFile []byte + //go:embed testdata/app_di.go + AppDepinject []byte +) + +func TestCheckKeeper(t *testing.T) { + tests := []struct { + name string + appFile []byte + keeperName string + expectedError string + }{ + { + name: "minimal app", + appFile: AppMinimalFile, + keeperName: "FooKeeper", + }, + { + name: "generic app", + appFile: AppGenericFile, + keeperName: "FooKeeper", + }, + { + name: "no app", + appFile: NoAppFile, + keeperName: "FooKeeper", + expectedError: "app.go should contain a single app (got 0)", + }, + { + name: "two apps", + appFile: TwoAppFile, + keeperName: "FooKeeper", + expectedError: "app.go should contain a single app (got 2)", + }, + { + name: "app depinject", + appFile: AppDepinject, + keeperName: "FooKeeper", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + tmpDir := t.TempDir() + tmpFile := filepath.Join(tmpDir, "app.go") + err := os.WriteFile(tmpFile, tt.appFile, 0o644) + require.NoError(t, err) + + err = CheckKeeper(tmpDir, tt.keeperName) + + if tt.expectedError != "" { + require.EqualError(t, err, tt.expectedError) + return + } + require.NoError(t, err) + }) + } +} + +func TestFindRegisteredModules(t *testing.T) { + basicModules := []string{ + "github.com/cosmos/cosmos-sdk/x/auth", + "github.com/cosmos/cosmos-sdk/x/bank", + "github.com/cosmos/cosmos-sdk/x/staking", + "github.com/cosmos/cosmos-sdk/x/gov", + "github.com/username/test/x/foo", + "github.com/cosmos/cosmos-sdk/x/auth/tx", + "github.com/cosmos/cosmos-sdk/client/grpc/tmservice", + "github.com/cosmos/cosmos-sdk/client/grpc/node", + } + + cases := []struct { + name string + path string + expectedModules []string + }{ + { + name: "new basic manager with only a app.go", + path: "testdata/modules/single_app", + expectedModules: append(basicModules, "github.com/cosmos/ibc-go/v7/modules/core"), + }, + { + name: "with runtime api routes", + path: "testdata/modules/runtime", + expectedModules: basicModules, + }, + { + name: "with app_config.go file", + path: "testdata/modules/app_config", + expectedModules: []string{ + "cosmossdk.io/x/circuit", + "cosmossdk.io/x/evidence", + "cosmossdk.io/x/feegrant/module", + "cosmossdk.io/x/upgrade", + "github.com/cosmos/cosmos-sdk/x/auth", + "github.com/cosmos/cosmos-sdk/x/auth/tx", + "github.com/cosmos/cosmos-sdk/x/auth/tx/config", + "github.com/cosmos/cosmos-sdk/x/auth/vesting", + "github.com/cosmos/cosmos-sdk/x/authz/module", + "github.com/cosmos/cosmos-sdk/x/bank", + "github.com/cosmos/cosmos-sdk/x/consensus", + "github.com/cosmos/cosmos-sdk/x/distribution", + "github.com/cosmos/cosmos-sdk/x/group/module", + "github.com/cosmos/cosmos-sdk/x/mint", + "github.com/cosmos/cosmos-sdk/x/params", + "github.com/cosmos/cosmos-sdk/x/slashing", + "github.com/cosmos/cosmos-sdk/x/staking", + "github.com/ignite/mars/x/mars", + "github.com/cosmos/cosmos-sdk/x/gov", + "github.com/username/test/x/foo", + "github.com/cosmos/cosmos-sdk/client/grpc/tmservice", + "github.com/cosmos/cosmos-sdk/client/grpc/node", + }, + }, + } + + for _, tt := range cases { + t.Run(tt.name, func(t *testing.T) { + got, err := FindRegisteredModules(tt.path) + require.NoError(t, err) + require.ElementsMatch(t, tt.expectedModules, got) + }) + } +} + +func TestDiscoverModules(t *testing.T) { + basicModules := []string{ + "github.com/cosmos/cosmos-sdk/x/auth", + "github.com/cosmos/cosmos-sdk/x/bank", + "github.com/cosmos/cosmos-sdk/x/staking", + "github.com/cosmos/cosmos-sdk/x/gov", + "github.com/username/test/x/foo", + "github.com/cosmos/cosmos-sdk/x/auth/tx", + "github.com/cosmos/cosmos-sdk/client/grpc/tmservice", + "github.com/cosmos/cosmos-sdk/client/grpc/node", + } + + cases := []struct { + name string + path string + expectedModules []string + }{ + { + name: "new basic manager with only a app.go", + path: "testdata/modules/single_app", + expectedModules: append(basicModules, "github.com/cosmos/ibc-go/v7/modules/core"), + }, + { + name: "with app_config.go file", + path: "testdata/modules/app_config", + expectedModules: basicModules, + }, + { + name: "with runtime api routes", + path: "testdata/modules/runtime", + expectedModules: basicModules, + }, + } + + for _, tt := range cases { + t.Run(tt.name, func(t *testing.T) { + appPkg, _, err := xast.ParseDir(tt.path) + require.NoError(t, err) + + got := make([]string, 0) + for _, f := range appPkg.Files { + fileImports := goanalysis.FormatImports(f) + modules, err := DiscoverModules(f, tt.path, fileImports) + require.NoError(t, err) + if modules != nil { + got = append(got, modules...) + } + } + require.ElementsMatch(t, tt.expectedModules, got) + }) + } +} + +func Test_removeKeeperPkgPath(t *testing.T) { + tests := []struct { + name string + arg string + want string + }{ + { + name: "test controller keeper", + arg: "github.com/cosmos/ibc-go/v5/modules/apps/27-interchain-accounts/controller/keeper", + want: "github.com/cosmos/ibc-go/v5/modules/apps/27-interchain-accounts", + }, + { + name: "test controller", + arg: "github.com/cosmos/ibc-go/v5/modules/apps/27-interchain-accounts/controller", + want: "github.com/cosmos/ibc-go/v5/modules/apps/27-interchain-accounts", + }, + { + name: "test keeper", + arg: "github.com/cosmos/ibc-go/v5/modules/apps/27-interchain-accounts/keeper", + want: "github.com/cosmos/ibc-go/v5/modules/apps/27-interchain-accounts", + }, + { + name: "test controller keeper", + arg: "github.com/cosmos/ibc-go/v5/modules/apps/27-interchain-accounts/host/controller/keeper", + want: "github.com/cosmos/ibc-go/v5/modules/apps/27-interchain-accounts", + }, + { + name: "test host controller keeper", + arg: "github.com/cosmos/ibc-go/v5/modules/apps/27-interchain-accounts/controller/host/keeper", + want: "github.com/cosmos/ibc-go/v5/modules/apps/27-interchain-accounts/controller", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := removeKeeperPkgPath(tt.arg) + require.Equal(t, tt.want, got) + }) + } +} diff --git a/ignite/pkg/cosmosanalysis/app/testdata/app_di.go b/ignite/pkg/cosmosanalysis/app/testdata/app_di.go new file mode 100644 index 0000000..4a13641 --- /dev/null +++ b/ignite/pkg/cosmosanalysis/app/testdata/app_di.go @@ -0,0 +1,409 @@ +//go:build !hack-to-no-include-deps-in-ignite + +package simapp + +import ( + _ "embed" + "io" + "os" + "path/filepath" + + "cosmossdk.io/client/v2/autocli" + "cosmossdk.io/depinject" + "cosmossdk.io/log" + storetypes "cosmossdk.io/store/types" + "cosmossdk.io/x/evidence" + evidencekeeper "cosmossdk.io/x/evidence/keeper" + feegrantkeeper "cosmossdk.io/x/feegrant/keeper" + feegrantmodule "cosmossdk.io/x/feegrant/module" + nftkeeper "cosmossdk.io/x/nft/keeper" + nftmodule "cosmossdk.io/x/nft/module" + "cosmossdk.io/x/upgrade" + upgradekeeper "cosmossdk.io/x/upgrade/keeper" + dbm "github.com/cosmos/cosmos-db" + "github.com/cosmos/cosmos-sdk/baseapp" + "github.com/cosmos/cosmos-sdk/client" + "github.com/cosmos/cosmos-sdk/codec" + codectypes "github.com/cosmos/cosmos-sdk/codec/types" + "github.com/cosmos/cosmos-sdk/runtime" + "github.com/cosmos/cosmos-sdk/server" + "github.com/cosmos/cosmos-sdk/server/api" + "github.com/cosmos/cosmos-sdk/server/config" + servertypes "github.com/cosmos/cosmos-sdk/server/types" + sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/cosmos/cosmos-sdk/types/module" + "github.com/cosmos/cosmos-sdk/x/auth" + authkeeper "github.com/cosmos/cosmos-sdk/x/auth/keeper" + authsims "github.com/cosmos/cosmos-sdk/x/auth/simulation" + _ "github.com/cosmos/cosmos-sdk/x/auth/tx/config" // import for side-effects + authtypes "github.com/cosmos/cosmos-sdk/x/auth/types" + "github.com/cosmos/cosmos-sdk/x/auth/vesting" + authzkeeper "github.com/cosmos/cosmos-sdk/x/authz/keeper" + authzmodule "github.com/cosmos/cosmos-sdk/x/authz/module" + "github.com/cosmos/cosmos-sdk/x/bank" + bankkeeper "github.com/cosmos/cosmos-sdk/x/bank/keeper" + consensus "github.com/cosmos/cosmos-sdk/x/consensus" + consensuskeeper "github.com/cosmos/cosmos-sdk/x/consensus/keeper" + distr "github.com/cosmos/cosmos-sdk/x/distribution" + distrkeeper "github.com/cosmos/cosmos-sdk/x/distribution/keeper" + "github.com/cosmos/cosmos-sdk/x/genutil" + genutiltypes "github.com/cosmos/cosmos-sdk/x/genutil/types" + "github.com/cosmos/cosmos-sdk/x/gov" + govclient "github.com/cosmos/cosmos-sdk/x/gov/client" + govkeeper "github.com/cosmos/cosmos-sdk/x/gov/keeper" + groupkeeper "github.com/cosmos/cosmos-sdk/x/group/keeper" + groupmodule "github.com/cosmos/cosmos-sdk/x/group/module" + "github.com/cosmos/cosmos-sdk/x/mint" + mintkeeper "github.com/cosmos/cosmos-sdk/x/mint/keeper" + "github.com/cosmos/cosmos-sdk/x/params" + paramsclient "github.com/cosmos/cosmos-sdk/x/params/client" + paramskeeper "github.com/cosmos/cosmos-sdk/x/params/keeper" + paramstypes "github.com/cosmos/cosmos-sdk/x/params/types" + "github.com/cosmos/cosmos-sdk/x/slashing" + slashingkeeper "github.com/cosmos/cosmos-sdk/x/slashing/keeper" + "github.com/cosmos/cosmos-sdk/x/staking" + stakingkeeper "github.com/cosmos/cosmos-sdk/x/staking/keeper" + abci "github.com/tendermint/tendermint/abci/types" +) + +var ( + // DefaultNodeHome default home directories for the application daemon + DefaultNodeHome string + + // ModuleBasics defines the module BasicManager is in charge of setting up basic, + // non-dependant module elements, such as codec registration + // and genesis verification. + ModuleBasics = module.NewBasicManager( + auth.AppModuleBasic{}, + genutil.NewAppModuleBasic(genutiltypes.DefaultMessageValidator), + bank.AppModuleBasic{}, + staking.AppModuleBasic{}, + mint.AppModuleBasic{}, + distr.AppModuleBasic{}, + gov.NewAppModuleBasic( + []govclient.ProposalHandler{ + paramsclient.ProposalHandler, + }, + ), + params.AppModuleBasic{}, + slashing.AppModuleBasic{}, + feegrantmodule.AppModuleBasic{}, + upgrade.AppModuleBasic{}, + evidence.AppModuleBasic{}, + authzmodule.AppModuleBasic{}, + groupmodule.AppModuleBasic{}, + vesting.AppModuleBasic{}, + nftmodule.AppModuleBasic{}, + consensus.AppModuleBasic{}, + ) +) + +var ( + _ runtime.AppI = (*SimApp)(nil) + _ servertypes.Application = (*SimApp)(nil) +) + +// SimApp extends an ABCI application, but with most of its parameters exported. +// They are exported for convenience in creating helper functions, as object +// capabilities aren't needed for testing. +type SimApp struct { + *runtime.App + legacyAmino *codec.LegacyAmino + appCodec codec.Codec + txConfig client.TxConfig + interfaceRegistry codectypes.InterfaceRegistry + autoCliOpts autocli.AppOptions + + // keepers + AccountKeeper authkeeper.AccountKeeper + BankKeeper bankkeeper.Keeper + StakingKeeper *stakingkeeper.Keeper + SlashingKeeper slashingkeeper.Keeper + MintKeeper mintkeeper.Keeper + DistrKeeper distrkeeper.Keeper + GovKeeper *govkeeper.Keeper + UpgradeKeeper *upgradekeeper.Keeper + ParamsKeeper paramskeeper.Keeper + AuthzKeeper authzkeeper.Keeper + EvidenceKeeper evidencekeeper.Keeper + FeeGrantKeeper feegrantkeeper.Keeper + GroupKeeper groupkeeper.Keeper + NFTKeeper nftkeeper.Keeper + ConsensusParamsKeeper consensuskeeper.Keeper + FooKeeper foo.keeper + + // simulation manager + sm *module.SimulationManager +} + +func init() { + userHomeDir, err := os.UserHomeDir() + if err != nil { + panic(err) + } + + DefaultNodeHome = filepath.Join(userHomeDir, ".simapp") +} + +// NewSimApp returns a reference to an initialized SimApp. +func NewSimApp( + logger log.Logger, + db dbm.DB, + traceStore io.Writer, + loadLatest bool, + appOpts servertypes.AppOptions, + baseAppOptions ...func(*baseapp.BaseApp), +) *SimApp { + var ( + app = &SimApp{} + appBuilder *runtime.AppBuilder + + // merge the AppConfig and other configuration in one config + appConfig = depinject.Configs( + AppConfig, + depinject.Supply( + // supply the application options + appOpts, + + // ADVANCED CONFIGURATION + + // + // AUTH + // + // For providing a custom function required in auth to generate custom account types + // add it below. By default the auth module uses simulation.RandomGenesisAccounts. + // + // authtypes.RandomGenesisAccountsFn(simulation.RandomGenesisAccounts), + + // For providing a custom a base account type add it below. + // By default the auth module uses authtypes.ProtoBaseAccount(). + // + // func() sdk.AccountI { return authtypes.ProtoBaseAccount() }, + + // + // MINT + // + + // For providing a custom inflation function for x/mint add here your + // custom function that implements the minttypes.InflationCalculationFn + // interface. + ), + ) + ) + + if err := depinject.Inject(appConfig, + &appBuilder, + &app.appCodec, + &app.legacyAmino, + &app.txConfig, + &app.interfaceRegistry, + &app.autoCliOpts, + &app.AccountKeeper, + &app.BankKeeper, + &app.StakingKeeper, + &app.SlashingKeeper, + &app.MintKeeper, + &app.DistrKeeper, + &app.GovKeeper, + &app.UpgradeKeeper, + &app.ParamsKeeper, + &app.AuthzKeeper, + &app.EvidenceKeeper, + &app.FeeGrantKeeper, + &app.GroupKeeper, + &app.NFTKeeper, + &app.ConsensusParamsKeeper, + ); err != nil { + panic(err) + } + + // Below we could construct and set an application specific mempool and + // ABCI 1.0 PrepareProposal and ProcessProposal handlers. These defaults are + // already set in the SDK's BaseApp, this shows an example of how to override + // them. + // + // Example: + // + // app.App = appBuilder.Build(...) + // nonceMempool := mempool.NewSenderNonceMempool() + // abciPropHandler := NewDefaultProposalHandler(nonceMempool, app.App.BaseApp) + // + // app.App.BaseApp.SetMempool(nonceMempool) + // app.App.BaseApp.SetPrepareProposal(abciPropHandler.PrepareProposalHandler()) + // app.App.BaseApp.SetProcessProposal(abciPropHandler.ProcessProposalHandler()) + // + // Alternatively, you can construct BaseApp options, append those to + // baseAppOptions and pass them to the appBuilder. + // + // Example: + // + // prepareOpt = func(app *baseapp.BaseApp) { + // abciPropHandler := baseapp.NewDefaultProposalHandler(nonceMempool, app) + // app.SetPrepareProposal(abciPropHandler.PrepareProposalHandler()) + // } + // baseAppOptions = append(baseAppOptions, prepareOpt) + + app.App = appBuilder.Build(logger, db, traceStore, baseAppOptions...) + + // register streaming services + if err := app.RegisterStreamingServices(appOpts, app.kvStoreKeys()); err != nil { + panic(err) + } + + /**** Module Options ****/ + + // RegisterUpgradeHandlers is used for registering any on-chain upgrades. + app.RegisterUpgradeHandlers() + + // create the simulation manager and define the order of the modules for deterministic simulations + // + // NOTE: this is not required apps that don't use the simulator for fuzz testing + // transactions + overrideModules := map[string]module.AppModuleSimulation{ + authtypes.ModuleName: auth.NewAppModule(app.appCodec, app.AccountKeeper, authsims.RandomGenesisAccounts, app.GetSubspace(authtypes.ModuleName)), + } + app.sm = module.NewSimulationManagerFromAppModules(app.ModuleManager.Modules, overrideModules) + + app.sm.RegisterStoreDecoders() + + // A custom InitChainer can be set if extra pre-init-genesis logic is required. + // By default, when using app wiring enabled module, this is not required. + // For instance, the upgrade module will set automatically the module version map in its init genesis thanks to app wiring. + // However, when registering a module manually (i.e. that does not support app wiring), the module version map + // must be set manually as follow. The upgrade module will de-duplicate the module version map. + // + // app.SetInitChainer(func(ctx sdk.Context, req abci.RequestInitChain) abci.ResponseInitChain { + // app.UpgradeKeeper.SetModuleVersionMap(ctx, app.ModuleManager.GetVersionMap()) + // return app.App.InitChainer(ctx, req) + // }) + + if err := app.Load(loadLatest); err != nil { + panic(err) + } + + return app +} + +// Name returns the name of the App +func (app *SimApp) Name() string { return app.BaseApp.Name() } + +func (app *SimApp) BeginBlocker(sdk.Context, abci.RequestBeginBlock) abci.ResponseBeginBlock { + return abci.ResponseBeginBlock{} +} + +func (app *SimApp) EndBlocker(sdk.Context, abci.RequestEndBlock) abci.ResponseEndBlock { + return abci.ResponseEndBlock{} +} + +// LegacyAmino returns SimApp's amino codec. +// +// NOTE: This is solely to be used for testing purposes as it may be desirable +// for modules to register their own custom testing types. +func (app *SimApp) LegacyAmino() *codec.LegacyAmino { + return app.legacyAmino +} + +// AppCodec returns SimApp's app codec. +// +// NOTE: This is solely to be used for testing purposes as it may be desirable +// for modules to register their own custom testing types. +func (app *SimApp) AppCodec() codec.Codec { + return app.appCodec +} + +// InterfaceRegistry returns SimApp's InterfaceRegistry +func (app *SimApp) InterfaceRegistry() codectypes.InterfaceRegistry { + return app.interfaceRegistry +} + +// TxConfig returns SimApp's TxConfig +func (app *SimApp) TxConfig() client.TxConfig { + return app.txConfig +} + +// AutoCliOpts returns the autocli options for the app. +func (app *SimApp) AutoCliOpts() autocli.AppOptions { + return app.autoCliOpts +} + +// GetKey returns the KVStoreKey for the provided store key. +func (app *SimApp) GetKey(storeKey string) *storetypes.KVStoreKey { + sk := app.UnsafeFindStoreKey(storeKey) + kvStoreKey, ok := sk.(*storetypes.KVStoreKey) + if !ok { + return nil + } + return kvStoreKey +} + +// GetMemKey returns the MemoryStoreKey for the provided store key. +func (app *SimApp) GetMemKey(storeKey string) *storetypes.MemoryStoreKey { + key, ok := app.UnsafeFindStoreKey(storeKey).(*storetypes.MemoryStoreKey) + if !ok { + return nil + } + + return key +} + +// kvStoreKeys returns all the kv store keys registered inside App. +func (app *SimApp) kvStoreKeys() map[string]*storetypes.KVStoreKey { + keys := make(map[string]*storetypes.KVStoreKey) + for _, k := range app.GetStoreKeys() { + if kv, ok := k.(*storetypes.KVStoreKey); ok { + keys[kv.Name()] = kv + } + } + + return keys +} + +// GetSubspace returns a param subspace for a given module name. +func (app *SimApp) GetSubspace(moduleName string) paramstypes.Subspace { + subspace, _ := app.ParamsKeeper.GetSubspace(moduleName) + return subspace +} + +// SimulationManager implements the SimulationApp interface +func (app *SimApp) SimulationManager() *module.SimulationManager { + return app.sm +} + +// RegisterAPIRoutes registers all application module routes with the provided +// API server. +func (app *SimApp) RegisterAPIRoutes(apiSvr *api.Server, apiConfig config.APIConfig) { + app.App.RegisterAPIRoutes(apiSvr, apiConfig) + // register swagger API in app.go so that other applications can override easily + if err := server.RegisterSwaggerAPI(apiSvr.ClientCtx, apiSvr.Router, apiConfig.Swagger); err != nil { + panic(err) + } +} + +// GetMaccPerms returns a copy of the module account permissions +// +// NOTE: This is solely to be used for testing purposes. +func GetMaccPerms() map[string][]string { + dup := make(map[string][]string) + for _, perms := range moduleAccPerms { + dup[perms.Account] = perms.Permissions + } + + return dup +} + +// BlockedAddresses returns all the app's blocked account addresses. +func BlockedAddresses() map[string]bool { + result := make(map[string]bool) + + if len(blockAccAddrs) > 0 { + for _, addr := range blockAccAddrs { + result[addr] = true + } + } else { + for addr := range GetMaccPerms() { + result[addr] = true + } + } + + return result +} diff --git a/ignite/pkg/cosmosanalysis/app/testdata/app_generic.go b/ignite/pkg/cosmosanalysis/app/testdata/app_generic.go new file mode 100644 index 0000000..72370c0 --- /dev/null +++ b/ignite/pkg/cosmosanalysis/app/testdata/app_generic.go @@ -0,0 +1,22 @@ +package foo + +import ( + "github.com/cosmos/cosmos-sdk/runtime" + sdk "github.com/cosmos/cosmos-sdk/types" + abci "github.com/tendermint/tendermint/abci/types" +) + +type Foo[T any] struct { + *runtime.App + + FooKeeper foo.keeper + i T +} + +func (f Foo[T]) BeginBlocker(ctx sdk.Context, req abci.RequestBeginBlock) abci.ResponseBeginBlock { + return f.App.BeginBlocker(ctx, req) +} + +func (f Foo[T]) EndBlocker(ctx sdk.Context, req abci.RequestEndBlock) abci.ResponseEndBlock { + return f.App.EndBlocker(ctx, req) +} diff --git a/ignite/pkg/cosmosanalysis/app/testdata/app_minimal.go b/ignite/pkg/cosmosanalysis/app/testdata/app_minimal.go new file mode 100644 index 0000000..a6654ea --- /dev/null +++ b/ignite/pkg/cosmosanalysis/app/testdata/app_minimal.go @@ -0,0 +1,21 @@ +package foo + +import ( + "github.com/cosmos/cosmos-sdk/baseapp" + sdk "github.com/cosmos/cosmos-sdk/types" + abci "github.com/tendermint/tendermint/abci/types" +) + +type Foo struct { + *baseapp.BaseApp + + FooKeeper foo.keeper +} + +func (f Foo) BeginBlocker(ctx sdk.Context, req abci.RequestBeginBlock) abci.ResponseBeginBlock { + return f.BaseApp.BeginBlocker(ctx, req) +} + +func (f Foo) EndBlocker(ctx sdk.Context, req abci.RequestEndBlock) abci.ResponseEndBlock { + return f.BaseApp.EndBlocker(ctx, req) +} diff --git a/ignite/pkg/cosmosanalysis/app/testdata/modules/app_config/app.go b/ignite/pkg/cosmosanalysis/app/testdata/modules/app_config/app.go new file mode 100644 index 0000000..2b20bf0 --- /dev/null +++ b/ignite/pkg/cosmosanalysis/app/testdata/modules/app_config/app.go @@ -0,0 +1,107 @@ +package app + +import ( + "github.com/cosmos/cosmos-sdk/client" + "github.com/cosmos/cosmos-sdk/runtime" + "github.com/cosmos/cosmos-sdk/server" + "github.com/cosmos/cosmos-sdk/server/api" + "github.com/cosmos/cosmos-sdk/server/config" + storetypes "github.com/cosmos/cosmos-sdk/store/types" + sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/cosmos/cosmos-sdk/types/module" + authkeeper "github.com/cosmos/cosmos-sdk/x/auth/keeper" + bankkeeper "github.com/cosmos/cosmos-sdk/x/bank/keeper" + govkeeper "github.com/cosmos/cosmos-sdk/x/gov/keeper" + paramstypes "github.com/cosmos/cosmos-sdk/x/params/types" + stakingkeeper "github.com/cosmos/cosmos-sdk/x/staking/keeper" + "github.com/gogo/protobuf/codec" + abci "github.com/tendermint/tendermint/abci/types" + fookeeper "github.com/username/test/x/foo/keeper" +) + +type Foo struct { + runtime.App + + AuthKeeper authkeeper.Keeper + BankKeeper bankkeeper.Keeper + StakingKeeper stakingkeeper.Keeper + GovKeeper govkeeper.Keeper + FooKeeper fookeeper.Keeper +} + +func (Foo) Name() string { + return "foo" +} + +func (Foo) BeginBlocker(sdk.Context, abci.RequestBeginBlock) abci.ResponseBeginBlock { + return abci.ResponseBeginBlock{} +} + +func (Foo) EndBlocker(sdk.Context, abci.RequestEndBlock) abci.ResponseEndBlock { + return abci.ResponseEndBlock{} +} + +func (Foo) RegisterAPIRoutes(apiSvr *api.Server, apiConfig config.APIConfig) { + _ = apiSvr.ClientCtx +} + +func (Foo) GetKey(storeKey string) *storetypes.KVStoreKey { return nil } + +func (Foo) TxConfig() client.TxConfig { return nil } + +func (Foo) AppCodec() codec.Codec { + return app.appCodec +} + +// GetKey returns the KVStoreKey for the provided store key. +func (Foo) GetKey(storeKey string) *storetypes.KVStoreKey { + sk := app.UnsafeFindStoreKey(storeKey) + kvStoreKey, ok := sk.(*storetypes.KVStoreKey) + if !ok { + return nil + } + return kvStoreKey +} + +// GetMemKey returns the MemoryStoreKey for the provided store key. +func (Foo) GetMemKey(storeKey string) *storetypes.MemoryStoreKey { + key, ok := app.UnsafeFindStoreKey(storeKey).(*storetypes.MemoryStoreKey) + if !ok { + return nil + } + + return key +} + +// kvStoreKeys returns all the kv store keys registered inside App. +func (Foo) kvStoreKeys() map[string]*storetypes.KVStoreKey { + keys := make(map[string]*storetypes.KVStoreKey) + for _, k := range app.GetStoreKeys() { + if kv, ok := k.(*storetypes.KVStoreKey); ok { + keys[kv.Name()] = kv + } + } + + return keys +} + +// GetSubspace returns a param subspace for a given module name. +func (Foo) GetSubspace(moduleName string) paramstypes.Subspace { + subspace, _ := app.ParamsKeeper.GetSubspace(moduleName) + return subspace +} + +// SimulationManager implements the SimulationApp interface +func (Foo) SimulationManager() *module.SimulationManager { + return app.sm +} + +// RegisterAPIRoutes registers all application module routes with the provided +// API server. +func (Foo) RegisterAPIRoutes(apiSvr *api.Server, apiConfig config.APIConfig) { + app.App.RegisterAPIRoutes(apiSvr, apiConfig) + // register swagger API in app.go so that other applications can override easily + if err := server.RegisterSwaggerAPI(apiSvr.ClientCtx, apiSvr.Router, apiConfig.Swagger); err != nil { + panic(err) + } +} diff --git a/ignite/pkg/cosmosanalysis/app/testdata/modules/app_config/app_config.go b/ignite/pkg/cosmosanalysis/app/testdata/modules/app_config/app_config.go new file mode 100644 index 0000000..3e8bbbe --- /dev/null +++ b/ignite/pkg/cosmosanalysis/app/testdata/modules/app_config/app_config.go @@ -0,0 +1,297 @@ +package app + +import ( + "time" + + runtimev1alpha1 "cosmossdk.io/api/cosmos/app/runtime/v1alpha1" + appv1alpha1 "cosmossdk.io/api/cosmos/app/v1alpha1" + authmodulev1 "cosmossdk.io/api/cosmos/auth/module/v1" + authzmodulev1 "cosmossdk.io/api/cosmos/authz/module/v1" + bankmodulev1 "cosmossdk.io/api/cosmos/bank/module/v1" + circuitmodulev1 "cosmossdk.io/api/cosmos/circuit/module/v1" + consensusmodulev1 "cosmossdk.io/api/cosmos/consensus/module/v1" + distrmodulev1 "cosmossdk.io/api/cosmos/distribution/module/v1" + evidencemodulev1 "cosmossdk.io/api/cosmos/evidence/module/v1" + feegrantmodulev1 "cosmossdk.io/api/cosmos/feegrant/module/v1" + genutilmodulev1 "cosmossdk.io/api/cosmos/genutil/module/v1" + govmodulev1 "cosmossdk.io/api/cosmos/gov/module/v1" + groupmodulev1 "cosmossdk.io/api/cosmos/group/module/v1" + mintmodulev1 "cosmossdk.io/api/cosmos/mint/module/v1" + paramsmodulev1 "cosmossdk.io/api/cosmos/params/module/v1" + slashingmodulev1 "cosmossdk.io/api/cosmos/slashing/module/v1" + stakingmodulev1 "cosmossdk.io/api/cosmos/staking/module/v1" + txconfigv1 "cosmossdk.io/api/cosmos/tx/config/v1" + upgrademodulev1 "cosmossdk.io/api/cosmos/upgrade/module/v1" + vestingmodulev1 "cosmossdk.io/api/cosmos/vesting/module/v1" + "cosmossdk.io/depinject" + "cosmossdk.io/depinject/appconfig" + _ "cosmossdk.io/x/circuit" // import for side-effects + circuittypes "cosmossdk.io/x/circuit/types" + _ "cosmossdk.io/x/evidence" // import for side-effects + evidencetypes "cosmossdk.io/x/evidence/types" + "cosmossdk.io/x/feegrant" + _ "cosmossdk.io/x/feegrant/module" // import for side-effects + _ "cosmossdk.io/x/upgrade" // import for side-effects + upgradetypes "cosmossdk.io/x/upgrade/types" + "github.com/cosmos/cosmos-sdk/runtime" + "github.com/cosmos/cosmos-sdk/types/module" + _ "github.com/cosmos/cosmos-sdk/x/auth/tx/config" // import for side-effects + authtypes "github.com/cosmos/cosmos-sdk/x/auth/types" + _ "github.com/cosmos/cosmos-sdk/x/auth/vesting" // import for side-effects + vestingtypes "github.com/cosmos/cosmos-sdk/x/auth/vesting/types" + "github.com/cosmos/cosmos-sdk/x/authz" + _ "github.com/cosmos/cosmos-sdk/x/authz/module" // import for side-effects + _ "github.com/cosmos/cosmos-sdk/x/bank" // import for side-effects + banktypes "github.com/cosmos/cosmos-sdk/x/bank/types" + _ "github.com/cosmos/cosmos-sdk/x/consensus" // import for side-effects + consensustypes "github.com/cosmos/cosmos-sdk/x/consensus/types" + _ "github.com/cosmos/cosmos-sdk/x/distribution" // import for side-effects + distrtypes "github.com/cosmos/cosmos-sdk/x/distribution/types" + "github.com/cosmos/cosmos-sdk/x/genutil" + genutiltypes "github.com/cosmos/cosmos-sdk/x/genutil/types" + "github.com/cosmos/cosmos-sdk/x/gov" + govtypes "github.com/cosmos/cosmos-sdk/x/gov/types" + "github.com/cosmos/cosmos-sdk/x/group" + _ "github.com/cosmos/cosmos-sdk/x/group/module" // import for side-effects + _ "github.com/cosmos/cosmos-sdk/x/mint" // import for side-effects + minttypes "github.com/cosmos/cosmos-sdk/x/mint/types" + _ "github.com/cosmos/cosmos-sdk/x/params" // import for side-effects + paramstypes "github.com/cosmos/cosmos-sdk/x/params/types" + _ "github.com/cosmos/cosmos-sdk/x/slashing" // import for side-effects + slashingtypes "github.com/cosmos/cosmos-sdk/x/slashing/types" + _ "github.com/cosmos/cosmos-sdk/x/staking" // import for side-effects + stakingtypes "github.com/cosmos/cosmos-sdk/x/staking/types" + capabilitytypes "github.com/cosmos/ibc-go/modules/capability/types" + icatypes "github.com/cosmos/ibc-go/v8/modules/apps/27-interchain-accounts/types" + ibcfeetypes "github.com/cosmos/ibc-go/v8/modules/apps/29-fee/types" + ibctransfertypes "github.com/cosmos/ibc-go/v8/modules/apps/transfer/types" + ibcexported "github.com/cosmos/ibc-go/v8/modules/core/exported" + "google.golang.org/protobuf/types/known/durationpb" + + _ "github.com/ignite/mars/x/mars" // import for side-effects + marsmoduletypes "github.com/ignite/mars/x/mars/types" +) + +var ( + // NOTE: The genutils module must occur after staking so that pools are + // properly initialized with tokens from genesis accounts. + // NOTE: The genutils module must also occur after auth so that it can access the params from auth. + // NOTE: Capability module must occur first so that it can initialize any capabilities + // so that other modules that want to create or claim capabilities afterwards in InitChain + // can do so safely. + genesisModuleOrder = []string{ + // cosmos sdk modules + authtypes.ModuleName, + banktypes.ModuleName, + distrtypes.ModuleName, + stakingtypes.ModuleName, + slashingtypes.ModuleName, + govtypes.ModuleName, + minttypes.ModuleName, + genutiltypes.ModuleName, + evidencetypes.ModuleName, + authz.ModuleName, + feegrant.ModuleName, + group.ModuleName, + paramstypes.ModuleName, + upgradetypes.ModuleName, + vestingtypes.ModuleName, + circuittypes.ModuleName, + // ibc modules + // capabilitytypes.ModuleName, + // ibcexported.ModuleName, + // ibctransfertypes.ModuleName, + // icatypes.ModuleName, + // chain modules + marsmoduletypes.ModuleName, + } + + // During begin block slashing happens after distr.BeginBlocker so that + // there is nothing left over in the validator fee pool, so as to keep the + // CanWithdrawInvariant invariant. + // NOTE: staking module is required if HistoricalEntries param > 0 + // NOTE: capability module's beginblocker must come before any modules using capabilities (e.g. IBC) + beginBlockers = []string{ + // cosmos sdk modules + upgradetypes.ModuleName, + minttypes.ModuleName, + distrtypes.ModuleName, + slashingtypes.ModuleName, + evidencetypes.ModuleName, + stakingtypes.ModuleName, + authz.ModuleName, + // ibc modules + capabilitytypes.ModuleName, + ibcexported.ModuleName, + ibctransfertypes.ModuleName, + icatypes.ModuleName, + ibcfeetypes.ModuleName, + // chain modules + marsmoduletypes.ModuleName, + } + + endBlockers = []string{ + // cosmos sdk modules + govtypes.ModuleName, + stakingtypes.ModuleName, + feegrant.ModuleName, + group.ModuleName, + // ibc modules + ibcexported.ModuleName, + ibctransfertypes.ModuleName, + capabilitytypes.ModuleName, + icatypes.ModuleName, + ibcfeetypes.ModuleName, + // chain modules + marsmoduletypes.ModuleName, + } + + // module account permissions + moduleAccPerms = []*authmodulev1.ModuleAccountPermission{ + {Account: authtypes.FeeCollectorName}, + {Account: distrtypes.ModuleName}, + {Account: minttypes.ModuleName, Permissions: []string{authtypes.Minter}}, + {Account: stakingtypes.BondedPoolName, Permissions: []string{authtypes.Burner, stakingtypes.ModuleName}}, + {Account: stakingtypes.NotBondedPoolName, Permissions: []string{authtypes.Burner, stakingtypes.ModuleName}}, + {Account: govtypes.ModuleName, Permissions: []string{authtypes.Burner}}, + {Account: ibctransfertypes.ModuleName, Permissions: []string{authtypes.Minter, authtypes.Burner}}, + {Account: ibcfeetypes.ModuleName}, + {Account: icatypes.ModuleName}, + } + + // blocked account addresses + blockAccAddrs = []string{ + authtypes.FeeCollectorName, + distrtypes.ModuleName, + minttypes.ModuleName, + stakingtypes.BondedPoolName, + stakingtypes.NotBondedPoolName, + // We allow the following module accounts to receive funds: + // govtypes.ModuleName + } + + // AppConfig application configuration (used by depinject) + AppConfig = depinject.Configs(appconfig.Compose(&appv1alpha1.Config{ + Modules: []*appv1alpha1.ModuleConfig{ + { + Name: runtime.ModuleName, + Config: appconfig.WrapAny(&runtimev1alpha1.Module{ + AppName: Name, + BeginBlockers: beginBlockers, + EndBlockers: endBlockers, + InitGenesis: genesisModuleOrder, + OverrideStoreKeys: []*runtimev1alpha1.StoreKeyConfig{ + { + ModuleName: authtypes.ModuleName, + KvStoreKey: "acc", + }, + }, + // When ExportGenesis is not specified, the export genesis module order + // is equal to the init genesis order + // ExportGenesis: genesisModuleOrder, + // Uncomment if you want to set a custom migration order here. + // OrderMigrations: nil, + }), + }, + { + Name: authtypes.ModuleName, + Config: appconfig.WrapAny(&authmodulev1.Module{ + Bech32Prefix: AccountAddressPrefix, + ModuleAccountPermissions: moduleAccPerms, + // By default modules authority is the governance module. This is configurable with the following: + // Authority: "group", // A custom module authority can be set using a module name + // Authority: "cosmos1cwwv22j5ca08ggdv9c2uky355k908694z577tv", // or a specific address + }), + }, + { + Name: vestingtypes.ModuleName, + Config: appconfig.WrapAny(&vestingmodulev1.Module{}), + }, + { + Name: banktypes.ModuleName, + Config: appconfig.WrapAny(&bankmodulev1.Module{ + BlockedModuleAccountsOverride: blockAccAddrs, + }), + }, + { + Name: stakingtypes.ModuleName, + Config: appconfig.WrapAny(&stakingmodulev1.Module{ + // NOTE: specifying a prefix is only necessary when using bech32 addresses + // If not specfied, the auth Bech32Prefix appended with "valoper" and "valcons" is used by default + Bech32PrefixValidator: AccountAddressPrefix + "valoper", + Bech32PrefixConsensus: AccountAddressPrefix + "valcons", + }), + }, + { + Name: slashingtypes.ModuleName, + Config: appconfig.WrapAny(&slashingmodulev1.Module{}), + }, + { + Name: paramstypes.ModuleName, + Config: appconfig.WrapAny(¶msmodulev1.Module{}), + }, + { + Name: "tx", + Config: appconfig.WrapAny(&txconfigv1.Config{}), + }, + { + Name: genutiltypes.ModuleName, + Config: appconfig.WrapAny(&genutilmodulev1.Module{}), + }, + { + Name: authz.ModuleName, + Config: appconfig.WrapAny(&authzmodulev1.Module{}), + }, + { + Name: upgradetypes.ModuleName, + Config: appconfig.WrapAny(&upgrademodulev1.Module{}), + }, + { + Name: distrtypes.ModuleName, + Config: appconfig.WrapAny(&distrmodulev1.Module{}), + }, + { + Name: evidencetypes.ModuleName, + Config: appconfig.WrapAny(&evidencemodulev1.Module{}), + }, + { + Name: minttypes.ModuleName, + Config: appconfig.WrapAny(&mintmodulev1.Module{}), + }, + { + Name: group.ModuleName, + Config: appconfig.WrapAny(&groupmodulev1.Module{ + MaxExecutionPeriod: durationpb.New(time.Second * 1209600), + MaxMetadataLen: 255, + }), + }, + { + Name: feegrant.ModuleName, + Config: appconfig.WrapAny(&feegrantmodulev1.Module{}), + }, + { + Name: govtypes.ModuleName, + Config: appconfig.WrapAny(&govmodulev1.Module{}), + }, + { + Name: consensustypes.ModuleName, + Config: appconfig.WrapAny(&consensusmodulev1.Module{}), + }, + { + Name: circuittypes.ModuleName, + Config: appconfig.WrapAny(&circuitmodulev1.Module{}), + }, + { + Name: marsmoduletypes.ModuleName, + Config: appconfig.WrapAny(&marsmoduletypes.Module{}), + }, + }, + }), + depinject.Supply( + // supply custom module basics + map[string]module.AppModuleBasic{ + genutiltypes.ModuleName: genutil.NewAppModuleBasic(genutiltypes.DefaultMessageValidator), + govtypes.ModuleName: gov.NewAppModuleBasic(getGovProposalHandlers()), + }, + )) +) diff --git a/ignite/pkg/cosmosanalysis/app/testdata/modules/app_config/go.mod b/ignite/pkg/cosmosanalysis/app/testdata/modules/app_config/go.mod new file mode 100644 index 0000000..6a5fab3 --- /dev/null +++ b/ignite/pkg/cosmosanalysis/app/testdata/modules/app_config/go.mod @@ -0,0 +1,30 @@ +module app + +go 1.20 + +require ( + cosmossdk.io/api v0.3.1 + cosmossdk.io/core v0.5.1 + cosmossdk.io/depinject v1.0.0-alpha.3 + cosmossdk.io/errors v1.0.0-beta.7 + cosmossdk.io/math v1.0.1 + github.com/bufbuild/buf v1.32.1 + github.com/cometbft/cometbft v0.37.2 + github.com/cometbft/cometbft-db v0.8.0 + github.com/cosmos/cosmos-proto v1.0.0-beta.2 + github.com/cosmos/cosmos-sdk v0.47.3 + github.com/cosmos/gogoproto v1.4.10 + github.com/cosmos/ibc-go/v7 v7.2.0 + github.com/golang/protobuf v1.5.4 + github.com/gorilla/mux v1.8.0 + github.com/grpc-ecosystem/grpc-gateway v1.16.0 + github.com/grpc-ecosystem/grpc-gateway/v2 v2.15.2 + github.com/spf13/cast v1.5.1 + github.com/spf13/cobra v1.8.0 + github.com/spf13/pflag v1.0.5 + github.com/stretchr/testify v1.9.0 + google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1 + google.golang.org/grpc v1.64.0 + google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0 + google.golang.org/protobuf v1.34.1 +) diff --git a/ignite/pkg/cosmosanalysis/app/testdata/modules/runtime/app.go b/ignite/pkg/cosmosanalysis/app/testdata/modules/runtime/app.go new file mode 100644 index 0000000..1c5c052 --- /dev/null +++ b/ignite/pkg/cosmosanalysis/app/testdata/modules/runtime/app.go @@ -0,0 +1,131 @@ +package app + +import ( + "cosmossdk.io/api/tendermint/abci" + "cosmossdk.io/client/v2/autocli" + "github.com/cosmos/cosmos-sdk/client" + codectypes "github.com/cosmos/cosmos-sdk/codec/types" + "github.com/cosmos/cosmos-sdk/runtime" + "github.com/cosmos/cosmos-sdk/server" + "github.com/cosmos/cosmos-sdk/server/api" + "github.com/cosmos/cosmos-sdk/server/config" + storetypes "github.com/cosmos/cosmos-sdk/store/types" + sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/cosmos/cosmos-sdk/types/module" + "github.com/cosmos/cosmos-sdk/x/auth" + authkeeper "github.com/cosmos/cosmos-sdk/x/auth/keeper" + "github.com/cosmos/cosmos-sdk/x/bank" + bankkeeper "github.com/cosmos/cosmos-sdk/x/bank/keeper" + "github.com/cosmos/cosmos-sdk/x/gov" + govclient "github.com/cosmos/cosmos-sdk/x/gov/client" + govkeeper "github.com/cosmos/cosmos-sdk/x/gov/keeper" + paramsclient "github.com/cosmos/cosmos-sdk/x/params/client" + paramstypes "github.com/cosmos/cosmos-sdk/x/params/types" + "github.com/cosmos/cosmos-sdk/x/staking" + stakingkeeper "github.com/cosmos/cosmos-sdk/x/staking/keeper" + "github.com/gogo/protobuf/codec" + foomodule "github.com/username/test/x/foo" + fookeeper "github.com/username/test/x/foo/keeper" +) + +// App modules are defined as NewBasicManager arguments +var ModuleBasics = module.NewBasicManager( + auth.AppModuleBasic{}, + bank.AppModuleBasic{}, + staking.AppModuleBasic{}, + gov.NewAppModuleBasic([]govclient.ProposalHandler{ + paramsclient.ProposalHandler, + }), + foomodule.AppModuleBasic{}, +) + +type Foo struct { + *runtime.App + + AuthKeeper authkeeper.Keeper + BankKeeper bankkeeper.Keeper + StakingKeeper stakingkeeper.Keeper + GovKeeper govkeeper.Keeper + FooKeeper fookeeper.Keeper +} + +func (Foo) Name() string { return "foo" } +func (Foo) InterfaceRegistry() codectypes.InterfaceRegistry { return nil } +func (Foo) TxConfig() client.TxConfig { return nil } +func (Foo) AutoCliOpts() autocli.AppOptions { return autocli.AppOptions{} } + +func (Foo) BeginBlocker(sdk.Context, abci.RequestBeginBlock) abci.ResponseBeginBlock { + return abci.ResponseBeginBlock{} +} + +func (Foo) EndBlocker(sdk.Context, abci.RequestEndBlock) abci.ResponseEndBlock { + return abci.ResponseEndBlock{} +} + +func (app *Foo) RegisterAPIRoutes(s *api.Server, cfg config.APIConfig) { + // This module should be discovered + foomodule.RegisterGRPCGatewayRoutes(s.ClientCtx, s.GRPCGatewayRouter) + // Runtime app modules for the current Cosmos SDK should be discovered too + app.App.RegisterAPIRoutes(apiSvr, apiConfig) +} + +func (Foo) GetKey(storeKey string) *storetypes.KVStoreKey { return nil } + +func (Foo) TxConfig() client.TxConfig { return nil } + +func (Foo) AppCodec() codec.Codec { + return app.appCodec +} + +// GetKey returns the KVStoreKey for the provided store key. +func (Foo) GetKey(storeKey string) *storetypes.KVStoreKey { + sk := app.UnsafeFindStoreKey(storeKey) + kvStoreKey, ok := sk.(*storetypes.KVStoreKey) + if !ok { + return nil + } + return kvStoreKey +} + +// GetMemKey returns the MemoryStoreKey for the provided store key. +func (Foo) GetMemKey(storeKey string) *storetypes.MemoryStoreKey { + key, ok := app.UnsafeFindStoreKey(storeKey).(*storetypes.MemoryStoreKey) + if !ok { + return nil + } + + return key +} + +// kvStoreKeys returns all the kv store keys registered inside App. +func (Foo) kvStoreKeys() map[string]*storetypes.KVStoreKey { + keys := make(map[string]*storetypes.KVStoreKey) + for _, k := range app.GetStoreKeys() { + if kv, ok := k.(*storetypes.KVStoreKey); ok { + keys[kv.Name()] = kv + } + } + + return keys +} + +// GetSubspace returns a param subspace for a given module name. +func (Foo) GetSubspace(moduleName string) paramstypes.Subspace { + subspace, _ := app.ParamsKeeper.GetSubspace(moduleName) + return subspace +} + +// SimulationManager implements the SimulationApp interface +func (Foo) SimulationManager() *module.SimulationManager { + return app.sm +} + +// RegisterAPIRoutes registers all application module routes with the provided +// API server. +func (Foo) RegisterAPIRoutes(apiSvr *api.Server, apiConfig config.APIConfig) { + app.App.RegisterAPIRoutes(apiSvr, apiConfig) + // register swagger API in app.go so that other applications can override easily + if err := server.RegisterSwaggerAPI(apiSvr.ClientCtx, apiSvr.Router, apiConfig.Swagger); err != nil { + panic(err) + } +} diff --git a/ignite/pkg/cosmosanalysis/app/testdata/modules/runtime/go.mod b/ignite/pkg/cosmosanalysis/app/testdata/modules/runtime/go.mod new file mode 100644 index 0000000..1720d96 --- /dev/null +++ b/ignite/pkg/cosmosanalysis/app/testdata/modules/runtime/go.mod @@ -0,0 +1,37 @@ +module app + +go 1.20 + +require ( + cosmossdk.io/api v0.3.1 + cosmossdk.io/core v0.5.1 + cosmossdk.io/depinject v1.0.0-alpha.3 + cosmossdk.io/errors v1.0.0-beta.7 + cosmossdk.io/math v1.0.1 + github.com/bufbuild/buf v1.32.1 + github.com/cometbft/cometbft v0.37.2 + github.com/cometbft/cometbft-db v0.8.0 + github.com/cosmos/cosmos-proto v1.0.0-beta.2 + github.com/cosmos/cosmos-sdk v0.47.3 + github.com/cosmos/gogoproto v1.4.10 + github.com/cosmos/ibc-go/v7 v7.2.0 + github.com/golang/protobuf v1.5.4 + github.com/gorilla/mux v1.8.0 + github.com/grpc-ecosystem/grpc-gateway v1.16.0 + github.com/grpc-ecosystem/grpc-gateway/v2 v2.15.2 + github.com/spf13/cast v1.5.1 + github.com/spf13/cobra v1.8.0 + github.com/spf13/pflag v1.0.5 + github.com/stretchr/testify v1.9.0 + google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1 + google.golang.org/grpc v1.64.0 + google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0 + google.golang.org/protobuf v1.34.1 +) + +replace ( + // use cosmos fork of keyring + github.com/99designs/keyring => github.com/cosmos/keyring v1.2.0 + // replace broken goleveldb + github.com/syndtr/goleveldb => github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7 +) diff --git a/ignite/pkg/cosmosanalysis/app/testdata/modules/single_app/app.go b/ignite/pkg/cosmosanalysis/app/testdata/modules/single_app/app.go new file mode 100644 index 0000000..f419888 --- /dev/null +++ b/ignite/pkg/cosmosanalysis/app/testdata/modules/single_app/app.go @@ -0,0 +1,109 @@ +package app + +import ( + "github.com/cosmos/cosmos-sdk/baseapp" + "github.com/cosmos/cosmos-sdk/client" + "github.com/cosmos/cosmos-sdk/server" + "github.com/cosmos/cosmos-sdk/server/api" + "github.com/cosmos/cosmos-sdk/server/config" + storetypes "github.com/cosmos/cosmos-sdk/store/types" + sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/cosmos/cosmos-sdk/types/module" + authkeeper "github.com/cosmos/cosmos-sdk/x/auth/keeper" + bankkeeper "github.com/cosmos/cosmos-sdk/x/bank/keeper" + govkeeper "github.com/cosmos/cosmos-sdk/x/gov/keeper" + paramstypes "github.com/cosmos/cosmos-sdk/x/params/types" + stakingkeeper "github.com/cosmos/cosmos-sdk/x/staking/keeper" + ibckeeper "github.com/cosmos/ibc-go/v7/modules/core/keeper" + "github.com/gogo/protobuf/codec" + abci "github.com/tendermint/tendermint/abci/types" + fookeeper "github.com/username/test/x/foo/keeper" +) + +type Foo struct { + baseapp.BaseApp + + AuthKeeper authkeeper.Keeper + BankKeeper bankkeeper.Keeper + StakingKeeper stakingkeeper.Keeper + GovKeeper govkeeper.Keeper + FooKeeper fookeeper.Keeper + ibckeeper ibckeeper.Keeper +} + +func (Foo) Name() string { + return "foo" +} + +func (Foo) GetKey(storeKey string) *storetypes.KVStoreKey { return nil } + +func (Foo) TxConfig() client.TxConfig { return nil } + +func (Foo) BeginBlocker(sdk.Context, abci.RequestBeginBlock) abci.ResponseBeginBlock { + return abci.ResponseBeginBlock{} +} + +func (Foo) EndBlocker(sdk.Context, abci.RequestEndBlock) abci.ResponseEndBlock { + return abci.ResponseEndBlock{} +} + +func (Foo) RegisterAPIRoutes(apiSvr *api.Server, apiConfig config.APIConfig) { + _ = apiSvr.ClientCtx +} + +func (Foo) AppCodec() codec.Codec { + return app.appCodec +} + +// GetKey returns the KVStoreKey for the provided store key. +func (Foo) GetKey(storeKey string) *storetypes.KVStoreKey { + sk := app.UnsafeFindStoreKey(storeKey) + kvStoreKey, ok := sk.(*storetypes.KVStoreKey) + if !ok { + return nil + } + return kvStoreKey +} + +// GetMemKey returns the MemoryStoreKey for the provided store key. +func (Foo) GetMemKey(storeKey string) *storetypes.MemoryStoreKey { + key, ok := app.UnsafeFindStoreKey(storeKey).(*storetypes.MemoryStoreKey) + if !ok { + return nil + } + + return key +} + +// kvStoreKeys returns all the kv store keys registered inside App. +func (Foo) kvStoreKeys() map[string]*storetypes.KVStoreKey { + keys := make(map[string]*storetypes.KVStoreKey) + for _, k := range app.GetStoreKeys() { + if kv, ok := k.(*storetypes.KVStoreKey); ok { + keys[kv.Name()] = kv + } + } + + return keys +} + +// GetSubspace returns a param subspace for a given module name. +func (Foo) GetSubspace(moduleName string) paramstypes.Subspace { + subspace, _ := app.ParamsKeeper.GetSubspace(moduleName) + return subspace +} + +// SimulationManager implements the SimulationApp interface +func (Foo) SimulationManager() *module.SimulationManager { + return app.sm +} + +// RegisterAPIRoutes registers all application module routes with the provided +// API server. +func (Foo) RegisterAPIRoutes(apiSvr *api.Server, apiConfig config.APIConfig) { + app.App.RegisterAPIRoutes(apiSvr, apiConfig) + // register swagger API in app.go so that other applications can override easily + if err := server.RegisterSwaggerAPI(apiSvr.ClientCtx, apiSvr.Router, apiConfig.Swagger); err != nil { + panic(err) + } +} diff --git a/ignite/pkg/cosmosanalysis/app/testdata/modules/single_app/go.mod b/ignite/pkg/cosmosanalysis/app/testdata/modules/single_app/go.mod new file mode 100644 index 0000000..6a5fab3 --- /dev/null +++ b/ignite/pkg/cosmosanalysis/app/testdata/modules/single_app/go.mod @@ -0,0 +1,30 @@ +module app + +go 1.20 + +require ( + cosmossdk.io/api v0.3.1 + cosmossdk.io/core v0.5.1 + cosmossdk.io/depinject v1.0.0-alpha.3 + cosmossdk.io/errors v1.0.0-beta.7 + cosmossdk.io/math v1.0.1 + github.com/bufbuild/buf v1.32.1 + github.com/cometbft/cometbft v0.37.2 + github.com/cometbft/cometbft-db v0.8.0 + github.com/cosmos/cosmos-proto v1.0.0-beta.2 + github.com/cosmos/cosmos-sdk v0.47.3 + github.com/cosmos/gogoproto v1.4.10 + github.com/cosmos/ibc-go/v7 v7.2.0 + github.com/golang/protobuf v1.5.4 + github.com/gorilla/mux v1.8.0 + github.com/grpc-ecosystem/grpc-gateway v1.16.0 + github.com/grpc-ecosystem/grpc-gateway/v2 v2.15.2 + github.com/spf13/cast v1.5.1 + github.com/spf13/cobra v1.8.0 + github.com/spf13/pflag v1.0.5 + github.com/stretchr/testify v1.9.0 + google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1 + google.golang.org/grpc v1.64.0 + google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0 + google.golang.org/protobuf v1.34.1 +) diff --git a/ignite/pkg/cosmosanalysis/app/testdata/no_app.go b/ignite/pkg/cosmosanalysis/app/testdata/no_app.go new file mode 100644 index 0000000..e402a91 --- /dev/null +++ b/ignite/pkg/cosmosanalysis/app/testdata/no_app.go @@ -0,0 +1,5 @@ +package foo + +type Bar struct { + FooKeeper foo.keeper +} diff --git a/ignite/pkg/cosmosanalysis/app/testdata/two_app.go b/ignite/pkg/cosmosanalysis/app/testdata/two_app.go new file mode 100644 index 0000000..1e607f6 --- /dev/null +++ b/ignite/pkg/cosmosanalysis/app/testdata/two_app.go @@ -0,0 +1,36 @@ +package foo + +import ( + "github.com/cosmos/cosmos-sdk/baseapp" + "github.com/cosmos/cosmos-sdk/runtime" + sdk "github.com/cosmos/cosmos-sdk/types" + abci "github.com/tendermint/tendermint/abci/types" +) + +type Foo struct { + *runtime.App + + FooKeeper foo.keeper +} + +func (f Foo) BeginBlocker(ctx sdk.Context, req abci.RequestBeginBlock) abci.ResponseBeginBlock { + return f.App.BeginBlocker(ctx, req) +} + +func (f Foo) EndBlocker(ctx sdk.Context, req abci.RequestEndBlock) abci.ResponseEndBlock { + return f.App.EndBlocker(ctx, req) +} + +type Bar struct { + *baseapp.BaseApp + + FooKeeper foo.keeper +} + +func (f Bar) BeginBlocker(ctx sdk.Context, req abci.RequestBeginBlock) abci.ResponseBeginBlock { + return app.mm.BeginBlock(ctx, req) +} + +func (f Bar) EndBlocker(ctx sdk.Context, req abci.RequestEndBlock) abci.ResponseEndBlock { + return app.mm.EndBlock(ctx, req) +} diff --git a/ignite/pkg/cosmosanalysis/cosmosanalysis.go b/ignite/pkg/cosmosanalysis/cosmosanalysis.go new file mode 100644 index 0000000..8598bec --- /dev/null +++ b/ignite/pkg/cosmosanalysis/cosmosanalysis.go @@ -0,0 +1,453 @@ +// Package cosmosanalysis provides a toolset for statically analysing Cosmos SDK's +// source code and blockchain source codes based on the Cosmos SDK +package cosmosanalysis + +import ( + "fmt" + "go/ast" + "go/parser" + "go/token" + "os" + "path/filepath" + "strings" + + "golang.org/x/mod/modfile" + + "github.com/ignite/cli/v29/ignite/pkg/cosmosver" + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/gomodule" +) + +const ( + tendermintModulePath = "github.com/cometbft/cometbft" + appFileName = "app.go" + defaultAppFilePath = "app/" + appFileName +) + +var AppEmbeddedTypes = []string{ + "github.com/cosmos/cosmos-sdk/runtime.App", + "github.com/cosmos/cosmos-sdk/baseapp.BaseApp", +} + +// implementation tracks the implementation of an interface for a given struct. +type implementation map[string]bool + +// DeepFindImplementation functions the same as FindImplementation, but walks recursively through the folder structure +// Useful if implementations might be in sub folders. +func DeepFindImplementation(modulePath string, interfaceList []string) (found []string, err error) { + err = filepath.Walk(modulePath, + func(path string, info os.FileInfo, err error) error { + if err != nil { + return err + } + if !info.IsDir() { + return nil + } + + currFound, err := FindImplementation(path, interfaceList) + if err != nil { + return err + } + + found = append(found, currFound...) + return nil + }) + if err != nil { + return nil, err + } + + return found, nil +} + +// FindImplementation finds the name of all types that implement the provided interface. +func FindImplementation(modulePath string, interfaceList []string) (found []string, err error) { + // parse go packages/files under path + fset := token.NewFileSet() + + pkgs, err := parser.ParseDir(fset, modulePath, nil, 0) + if err != nil { + return nil, err + } + for _, pkg := range pkgs { + var files []*ast.File + for _, f := range pkg.Files { + files = append(files, f) + } + found = append(found, findImplementationInFiles(files, interfaceList)...) + } + + return found, nil +} + +// FindImplementationInFile find all struct implements the interfaceList into an ast.File. +func FindImplementationInFile(n ast.Node, interfaceList []string) (found []string) { + // collect all structs under path to find out the ones that satisfies the implementation + structImplementations := make(map[string]implementation) + + findImplementation(n, func(methodName, structName string) bool { + // mark the implementation that this struct satisfies. + if _, ok := structImplementations[structName]; !ok { + structImplementations[structName] = newImplementation(interfaceList) + } + + structImplementations[structName][methodName] = true + + return true + }) + + for name, impl := range structImplementations { + if checkImplementation(impl) { + found = append(found, name) + } + } + + return found +} + +// findImplementationInFiles find all struct implements the interfaceList into a list of ast.File. +func findImplementationInFiles(files []*ast.File, interfaceList []string) (found []string) { + // collect all structs under path to find out the ones that satisfies the implementation + structImplementations := make(map[string]implementation) + + for _, f := range files { + findImplementation(f, func(methodName, structName string) bool { + // mark the implementation that this struct satisfies. + if _, ok := structImplementations[structName]; !ok { + structImplementations[structName] = newImplementation(interfaceList) + } + + structImplementations[structName][methodName] = true + + return true + }) + } + + for name, impl := range structImplementations { + if checkImplementation(impl) { + found = append(found, name) + } + } + + return found +} + +// findImplementation parse the ast.Node and call the callback if is a struct implementation. +func findImplementation(f ast.Node, endCallback func(methodName, structName string) bool) { + ast.Inspect(f, func(n ast.Node) bool { + // look for struct methods. + methodDecl, ok := n.(*ast.FuncDecl) + if !ok { + return true + } + + // not a method. + if methodDecl.Recv == nil { + return true + } + + methodName := methodDecl.Name.Name + + // find the struct name that method belongs to. + t := methodDecl.Recv.List[0].Type + var ident *ast.Ident + switch t := t.(type) { + case *ast.Ident: + // method with a value receiver + ident = t + case *ast.IndexExpr: + // generic method with a value receiver + ident = t.X.(*ast.Ident) + case *ast.StarExpr: + switch t := t.X.(type) { + case *ast.Ident: + // method with a pointer receiver + ident = t + case *ast.IndexExpr: + // generic method with a pointer receiver + ident = t.X.(*ast.Ident) + default: + return true + } + default: + return true + } + structName := ident.Name + + if endCallback != nil { + return endCallback(methodName, structName) + } + return true + }) +} + +// newImplementation returns a new object to parse implementation of an interface. +func newImplementation(interfaceList []string) implementation { + impl := make(implementation) + for _, m := range interfaceList { + impl[m] = false + } + return impl +} + +// checkImplementation checks if the entire implementation is satisfied. +func checkImplementation(r implementation) bool { + for _, ok := range r { + if !ok { + return false + } + } + return true +} + +// FindEmbed finds the name of all types that embed one of the target types in a given module path. +// targetEmbeddedTypes should be a list of fully qualified type names (e.g., "package/path.TypeName"). +func FindEmbed(modulePath string, targetEmbeddedTypes []string) (found []string, err error) { + // parse go packages/files under path + fset := token.NewFileSet() + + pkgs, err := parser.ParseDir(fset, modulePath, nil, 0) + if err != nil { + return nil, err + } + + for _, pkg := range pkgs { + for _, fileNode := range pkg.Files { + foundStructs := findStructsEmbeddingInFile(fileNode, targetEmbeddedTypes) + found = append(found, foundStructs...) + } + } + + // Deduplicate results as a struct might be found in multiple files of the same package (though unlikely for structs) + // or if the same struct name exists in different packages (FindEmbed currently doesn't qualify by package). + if len(found) > 0 { + uniqueNamesMap := make(map[string]struct{}) + var uniqueResult []string + for _, name := range found { + if _, exists := uniqueNamesMap[name]; !exists { + uniqueNamesMap[name] = struct{}{} + uniqueResult = append(uniqueResult, name) + } + } + return uniqueResult, nil + } + + return found, nil +} + +// FindEmbedInFile finds all struct names in a given AST node that embed one of the target types. +// The AST node is expected to be an *ast.File. +// targetEmbeddedTypes should be a list of fully qualified type names (e.g., "package/path.TypeName"). +func FindEmbedInFile(n ast.Node, targetEmbeddedTypes []string) (found []string) { + fileNode, ok := n.(*ast.File) + if !ok { + return nil + } + + return findStructsEmbeddingInFile(fileNode, targetEmbeddedTypes) +} + +// findStructsEmbeddingInFile checks if any struct in the given AST file embeds one of the target types. +// targetTypes should be fully qualified (e.g., "package/path.TypeName"). +func findStructsEmbeddingInFile(fileNode *ast.File, targetEmbeddedTypes []string) (foundStructNames []string) { + // activeTargets maps local package name to a set of expected TypeNames from that package + activeTargets := make(map[string]map[string]struct{}) + + for _, targetFQN := range targetEmbeddedTypes { + dotIndex := strings.LastIndex(targetFQN, ".") + if dotIndex == -1 || dotIndex == 0 || dotIndex == len(targetFQN)-1 { + continue // invalid format + } + expectedImportPath := targetFQN[:dotIndex] + expectedTypeName := targetFQN[dotIndex+1:] + + for _, imp := range fileNode.Imports { + importPath := strings.Trim(imp.Path.Value, `"`) + if importPath == expectedImportPath { + localPkgName := "" + if imp.Name != nil { // alias used + localPkgName = imp.Name.Name + } else { + // default name (last part of the path) + // this is a common heuristic, e.g. "github.com/cosmos/cosmos-sdk/runtime" -> "runtime" + pathParts := strings.Split(importPath, "/") + localPkgName = pathParts[len(pathParts)-1] + } + + if _, ok := activeTargets[localPkgName]; !ok { + activeTargets[localPkgName] = make(map[string]struct{}) + } + activeTargets[localPkgName][expectedTypeName] = struct{}{} + break // found the import for this target, move to next targetFQN + } + } + } + + if len(activeTargets) == 0 { + return nil // none of the target packages are imported in this file + } + + ast.Inspect(fileNode, func(n ast.Node) bool { + typeSpec, ok := n.(*ast.TypeSpec) + if !ok { + return true + } + + structType, ok := typeSpec.Type.(*ast.StructType) + if !ok { + return true + } + + for _, field := range structType.Fields.List { + if len(field.Names) == 0 { // embedded field + var selExpr *ast.SelectorExpr + fieldType := field.Type + + if starExpr, isStar := fieldType.(*ast.StarExpr); isStar { + fieldType = starExpr.X // unwrap pointer + } + + if se, isSel := fieldType.(*ast.SelectorExpr); isSel { + selExpr = se + } else { + continue + } + + pkgIdent, okIdent := selExpr.X.(*ast.Ident) + if !okIdent { + continue + } + + pkgNameInCode := pkgIdent.Name + typeNameInCode := selExpr.Sel.Name + + if expectedTypeNamesSet, pkgFound := activeTargets[pkgNameInCode]; pkgFound { + if _, typeFound := expectedTypeNamesSet[typeNameInCode]; typeFound { + foundStructNames = append(foundStructNames, typeSpec.Name.Name) + } + } + } + } + return true + }) + + // deduplicate if a struct somehow embeds multiple (or the same) target type + if len(foundStructNames) > 0 { + uniqueNamesMap := make(map[string]struct{}) + var uniqueResult []string + for _, name := range foundStructNames { + if _, exists := uniqueNamesMap[name]; !exists { + uniqueNamesMap[name] = struct{}{} + uniqueResult = append(uniqueResult, name) + } + } + return uniqueResult + } + + return foundStructNames +} + +// ErrPathNotChain is returned by IsChainPath() when path is not a chain path. +type ErrPathNotChain struct { + path string + err error +} + +func (e ErrPathNotChain) Error() string { + return fmt.Sprintf("%s not a chain path: %v", e.path, e.err) +} + +// IsChainPath returns nil if path contains a cosmos chain. +func IsChainPath(path string) error { + errf := func(err error) error { + return ErrPathNotChain{path: path, err: err} + } + modFile, err := gomodule.ParseAt(path) + if err != nil { + return errf(err) + } + if err := ValidateGoMod(modFile); err != nil { + return errf(err) + } + return nil +} + +// ValidateGoMod check if the cosmos-sdk and the tendermint packages are imported. +func ValidateGoMod(module *modfile.File) error { + moduleCheck := map[string]bool{ + cosmosver.CosmosModulePath: true, + tendermintModulePath: true, + } + + for _, r := range module.Require { + delete(moduleCheck, r.Mod.Path) + } + for m := range moduleCheck { + return errors.Errorf("invalid go module, missing %s package dependency", m) + } + return nil +} + +// FindAppFilePath Looks for the app file that embeds the runtime.App or baseapp.BaseApp types. +func FindAppFilePath(chainRoot string) (path string, err error) { + var foundAppStructFiles []string + err = filepath.Walk(chainRoot, func(currentPath string, info os.FileInfo, err error) error { + if err != nil { + return err + } + if info.IsDir() || filepath.Ext(info.Name()) != ".go" { + return nil + } + + fset := token.NewFileSet() + f, err := parser.ParseFile(fset, currentPath, nil, 0) + if err != nil { + // log or handle error, e.g. by returning nil to continue walking + return nil + } + + structNames := findStructsEmbeddingInFile(f, AppEmbeddedTypes) + if len(structNames) > 0 { + foundAppStructFiles = append(foundAppStructFiles, currentPath) + } + return nil + }) + if err != nil { + return "", err + } + + numFound := len(foundAppStructFiles) + if numFound == 0 { + return "", errors.New("app.go file cannot be found") + } + + if numFound == 1 { + return foundAppStructFiles[0], nil + } + + // multiple files found, prefer one named appFileName ("app.go") + appFilePath := "" + for _, p := range foundAppStructFiles { + if filepath.Base(p) == appFileName { + if appFilePath != "" { + // more than one app.go found among candidates, fallback to default + return getDefaultAppFile(chainRoot) + } + appFilePath = p + } + } + + if appFilePath != "" { + return appFilePath, nil + } + + // no app.go found among the candidates, or multiple candidates and none are app.go, + // fallback to default app path logic + return getDefaultAppFile(chainRoot) +} + +// getDefaultAppFile returns the default app.go file path for a chain. +func getDefaultAppFile(chainRoot string) (string, error) { + path := filepath.Join(chainRoot, defaultAppFilePath) + _, err := os.Stat(path) + return path, errors.Wrap(err, "cannot locate your app.go") +} diff --git a/ignite/pkg/cosmosanalysis/cosmosanalysis_test.go b/ignite/pkg/cosmosanalysis/cosmosanalysis_test.go new file mode 100644 index 0000000..5603441 --- /dev/null +++ b/ignite/pkg/cosmosanalysis/cosmosanalysis_test.go @@ -0,0 +1,374 @@ +package cosmosanalysis_test + +import ( + "go/ast" + "go/parser" + "go/token" + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/cosmosanalysis" + "github.com/ignite/cli/v29/ignite/pkg/gomodule" +) + +var ( + expectedInterface = []string{"foo", "bar", "foobar"} + + file1 = []byte(` +package foo + +type Foo struct {} +func (f Foo) foo() {} +func (f Foo) bar() {} +func (f Foo) foobar() {} + +type Bar struct {} +func (b *Bar) foo() {} +func (b *Bar) bar() {} +func (b *Bar) barfoo() {} +`) + + file2 = []byte(` +package foo + +type Foobar struct {} +func (f Foobar) foo() {} +func (f Foobar) bar() {} +func (f Foobar) foobar() {} +func (f Foobar) barfoo() {} + +type Generic[T any] struct { + i T +} +func (Generic[T]) foo(){} +func (Generic[T]) bar() {} +func (Generic[T]) foobar() {} + +type GenericP[T any] struct { + i T +} +func (*GenericP[T]) foo(){} +func (*GenericP[T]) bar() {} +func (*GenericP[T]) foobar() {} +`) + + noImplementation = []byte(` +package foo +type Foo struct {} +func (f Foo) nofoo() {} +func (f Foo) nobar() {} +func (f Foo) nofoobar() {} +`) + + partialImplementation = []byte(` +package foo +type Foo struct {} +func (f Foo) foo() {} +func (f Foo) bar() {} +`) + restOfImplementation = []byte(` +package foo +func (f Foo) foobar() {} +`) + + appFile = []byte(` +package app + +import ( + runtime "github.com/cosmos/cosmos-sdk/runtime" +) + +type App struct { + *runtime.App +}`) + appTestFile = []byte(` +package app_test + +import ( + runtime "github.com/cosmos/cosmos-sdk/runtime" +) + +type App struct { + *runtime.App +}`) + + appFileSDKv47 = []byte(` +package app + +import "github.com/cosmos/cosmos-sdk/baseapp" + +type App struct { + baseapp.BaseApp +}`) + + embeddedTypeFile = []byte(` +package foo + +import ( + "github.com/cosmos/cosmos-sdk/baseapp" + runtime "github.com/cosmos/cosmos-sdk/runtime" +) + +type App1 struct { + *runtime.App +} + +type App2 struct { + baseapp.BaseApp +} + +type NotApp struct{} + +// AppPointer uses a pointer to an embedded type +type AppPointer struct { + *runtime.App +} + +// AppNoEmbed has the type but doesn't embed it +type AppNoEmbed struct { + a runtime.App +} + +// OtherEmbed embeds a different type from a target package +type OtherEmbed struct { + *runtime.Server +} +`) + appModuleGoMod = []byte(` +module example.com/foo + +go 1.19 + +require ( + github.com/cosmos/cosmos-sdk v0.47.0 +)`) +) + +func TestFindImplementation(t *testing.T) { + tmpDir := t.TempDir() + + f1 := filepath.Join(tmpDir, "1.go") + err := os.WriteFile(f1, file1, 0o644) + require.NoError(t, err) + + f2 := filepath.Join(tmpDir, "2.go") + err = os.WriteFile(f2, file2, 0o644) + require.NoError(t, err) + + // find in dir + found, err := cosmosanalysis.FindImplementation(tmpDir, expectedInterface) + require.NoError(t, err) + require.ElementsMatch(t, found, []string{"Foo", "Foobar", "Generic", "GenericP"}) + + // empty directory + emptyDir := t.TempDir() + found, err = cosmosanalysis.FindImplementation(emptyDir, expectedInterface) + require.NoError(t, err) + require.Empty(t, found) + + // can't provide file + _, err = cosmosanalysis.FindImplementation(filepath.Join(tmpDir, "1.go"), expectedInterface) + require.Error(t, err) +} + +func TestFindImplementationInSpreadInMultipleFiles(t *testing.T) { + tmpDir := t.TempDir() + + f1 := filepath.Join(tmpDir, "1.go") + err := os.WriteFile(f1, partialImplementation, 0o644) + require.NoError(t, err) + f2 := filepath.Join(tmpDir, "2.go") + err = os.WriteFile(f2, restOfImplementation, 0o644) + require.NoError(t, err) + + found, err := cosmosanalysis.FindImplementation(tmpDir, expectedInterface) + require.NoError(t, err) + require.Len(t, found, 1) + require.Contains(t, found, "Foo") +} + +func TestFindImplementationNotFound(t *testing.T) { + tmpDir1 := t.TempDir() + tmpDir2 := t.TempDir() + + noImplFile := filepath.Join(tmpDir1, "1.go") + err := os.WriteFile(noImplFile, noImplementation, 0o644) + require.NoError(t, err) + partialImplFile := filepath.Join(tmpDir2, "2.go") + err = os.WriteFile(partialImplFile, partialImplementation, 0o644) + require.NoError(t, err) + + // No implementation + found, err := cosmosanalysis.FindImplementation(tmpDir1, expectedInterface) + require.NoError(t, err) + require.Len(t, found, 0) + + // Partial implementation + found, err = cosmosanalysis.FindImplementation(tmpDir2, expectedInterface) + require.NoError(t, err) + require.Len(t, found, 0) +} + +func TestFindAppFilePath(t *testing.T) { + tmpDir1 := t.TempDir() + tmpDir2 := t.TempDir() + + appFolder1 := filepath.Join(tmpDir1, "app") + appFolder2 := filepath.Join(tmpDir1, "myOwnAppDir") + appFolder3 := filepath.Join(tmpDir2, "sdk47AppDir") + err := os.Mkdir(appFolder1, 0o700) + require.NoError(t, err) + err = os.Mkdir(appFolder2, 0o700) + require.NoError(t, err) + err = os.Mkdir(appFolder3, 0o700) + require.NoError(t, err) + + // No file + _, err = cosmosanalysis.FindAppFilePath(tmpDir1) + require.Equal(t, "app.go file cannot be found", err.Error()) + + // Only one file with app implementation + myOwnAppFilePath := filepath.Join(appFolder2, "my_own_app.go") + err = os.WriteFile(myOwnAppFilePath, appFile, 0o644) + require.NoError(t, err) + pathFound, err := cosmosanalysis.FindAppFilePath(tmpDir1) + require.NoError(t, err) + require.Equal(t, myOwnAppFilePath, pathFound) + + // With a test file added + appTestFilePath := filepath.Join(appFolder2, "my_own_app_test.go") + err = os.WriteFile(appTestFilePath, appTestFile, 0o644) + require.NoError(t, err) + _, err = cosmosanalysis.FindAppFilePath(tmpDir1) + require.Error(t, err) + require.Contains(t, err.Error(), "cannot locate your app.go") + + // With an additional app file (that is app.go) + appFilePath := filepath.Join(appFolder1, "app.go") + err = os.WriteFile(appFilePath, appFile, 0o644) + require.NoError(t, err) + pathFound, err = cosmosanalysis.FindAppFilePath(tmpDir1) + require.NoError(t, err) + require.Equal(t, appFilePath, pathFound) + + // With two app.go files + extraAppFilePath := filepath.Join(appFolder2, "app.go") + err = os.WriteFile(extraAppFilePath, appFile, 0o644) + require.NoError(t, err) + pathFound, err = cosmosanalysis.FindAppFilePath(tmpDir1) + require.NoError(t, err) + require.Equal(t, filepath.Join(appFolder1, "app.go"), pathFound) + + // With an app.go file from a Cosmos SDK v0.47 app + sdk47AppFilePath := filepath.Join(appFolder3, "app_sdk_47.go") + err = os.WriteFile(sdk47AppFilePath, appFileSDKv47, 0o644) + require.NoError(t, err) + pathFound, err = cosmosanalysis.FindAppFilePath(tmpDir2) + require.NoError(t, err) + require.Equal(t, sdk47AppFilePath, pathFound) +} + +func TestIsChainPath(t *testing.T) { + err := cosmosanalysis.IsChainPath(".") + require.ErrorAs(t, err, &cosmosanalysis.ErrPathNotChain{}) + + err = cosmosanalysis.IsChainPath("testdata/chain") + require.NoError(t, err) + + // testdata/chain-sdk-fork is a chain using a fork of the Cosmos SDK + // so it should still be considered as a chain as ValidateGoMod + // does not resolve the module file replacement. + err = cosmosanalysis.IsChainPath("testdata/chain-sdk-fork") + require.NoError(t, err) +} + +func TestValidateGoMod(t *testing.T) { + modFile, err := gomodule.ParseAt("testdata/chain") + require.NoError(t, err) + err = cosmosanalysis.ValidateGoMod(modFile) + require.NoError(t, err) + + modFile, err = gomodule.ParseAt("testdata/chain-sdk-fork") + require.NoError(t, err) + err = cosmosanalysis.ValidateGoMod(modFile) + require.NoError(t, err) +} + +func TestFindEmbed(t *testing.T) { + tmpDir := t.TempDir() + + // Create a dummy go.mod for the test package + modPath := filepath.Join(tmpDir, "go.mod") + err := os.WriteFile(modPath, appModuleGoMod, 0o644) + require.NoError(t, err) + + // Create the test file + filePath := filepath.Join(tmpDir, "app.go") + err = os.WriteFile(filePath, embeddedTypeFile, 0o644) + require.NoError(t, err) + + targets := []string{ + "github.com/cosmos/cosmos-sdk/runtime.App", + "github.com/cosmos/cosmos-sdk/baseapp.BaseApp", + } + + found, err := cosmosanalysis.FindEmbed(tmpDir, targets) + require.NoError(t, err) + require.ElementsMatch(t, []string{"App1", "App2", "AppPointer"}, found) + + // Test with a directory that doesn't contain the target embeds + emptyDir := t.TempDir() + modPathEmpty := filepath.Join(emptyDir, "go.mod") + err = os.WriteFile(modPathEmpty, appModuleGoMod, 0o644) + require.NoError(t, err) + otherFilePath := filepath.Join(emptyDir, "other.go") + err = os.WriteFile(otherFilePath, []byte(`package foo; type Bar struct{}`), 0o644) + require.NoError(t, err) + + foundEmpty, err := cosmosanalysis.FindEmbed(emptyDir, targets) + require.NoError(t, err) + require.Empty(t, foundEmpty) + + // Test with non-existent directory + _, err = cosmosanalysis.FindEmbed(filepath.Join(tmpDir, "nonexistent"), targets) + require.Error(t, err) // Expect an error because parser.ParseDir will fail +} + +func TestFindEmbedInFile(t *testing.T) { + tmpDir := t.TempDir() + filePath := filepath.Join(tmpDir, "app.go") + err := os.WriteFile(filePath, embeddedTypeFile, 0o644) + require.NoError(t, err) + + fset := token.NewFileSet() + fileNode, err := parser.ParseFile(fset, filePath, nil, 0) + require.NoError(t, err) + + targets := []string{ + "github.com/cosmos/cosmos-sdk/runtime.App", + "github.com/cosmos/cosmos-sdk/baseapp.BaseApp", + "github.com/cosmos/cosmos-sdk/runtime.Server", // To test OtherEmbed + } + + found := cosmosanalysis.FindEmbedInFile(fileNode, targets) + require.ElementsMatch(t, []string{"App1", "App2", "AppPointer", "OtherEmbed"}, found) + + // Test with a node that is not an *ast.File (though the function expects it) + // Create a simple ident node + identNode := ast.NewIdent("SomeIdent") + foundNotFile := cosmosanalysis.FindEmbedInFile(identNode, targets) + require.Empty(t, foundNotFile) // Expect empty as it's not a file node + + // Test with a file that doesn't import/embed the target types + otherContent := `package bar; type Bar struct{}` + otherFilePath := filepath.Join(tmpDir, "other.go") + err = os.WriteFile(otherFilePath, []byte(otherContent), 0o644) + require.NoError(t, err) + otherFileNode, err := parser.ParseFile(fset, otherFilePath, nil, 0) + require.NoError(t, err) + foundOther := cosmosanalysis.FindEmbedInFile(otherFileNode, targets) + require.Empty(t, foundOther) +} diff --git a/ignite/pkg/cosmosanalysis/module/module.go b/ignite/pkg/cosmosanalysis/module/module.go new file mode 100644 index 0000000..c3e8b7d --- /dev/null +++ b/ignite/pkg/cosmosanalysis/module/module.go @@ -0,0 +1,419 @@ +package module + +import ( + "context" + "fmt" + "path" + "path/filepath" + "strings" + + "golang.org/x/mod/semver" + + "github.com/ignite/cli/v29/ignite/pkg/cosmosanalysis" + "github.com/ignite/cli/v29/ignite/pkg/cosmosanalysis/app" + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/gomodule" + "github.com/ignite/cli/v29/ignite/pkg/protoanalysis" + "github.com/ignite/cli/v29/ignite/pkg/xstrings" +) + +// Msgs is a module import path-sdk msgs pair. +type Msgs map[string][]string + +// Module keeps metadata about a Cosmos SDK module. +type Module struct { + // Name of the module. + Name string `json:"name,omitempty"` + + // GoModulePath of the app where the module is defined. + GoModulePath string `json:"go_module_path,omitempty"` + + // Pkg holds the proto package info. + Pkg protoanalysis.Package `json:"package,omitempty"` + + // Msgs is a list of sdk.Msg implementation of the module. + Msgs []Msg `json:"messages,omitempty"` + + // HTTPQueries is a list of module queries. + HTTPQueries []HTTPQuery `json:"http_queries,omitempty"` + + // Types is a list of proto types that might be used by module. + Types []Type `json:"types,omitempty"` +} + +// Msg keeps metadata about an sdk.Msg implementation. +type Msg struct { + // Name of the type. + Name string `json:"name,omitempty"` + + // URI of the type. + URI string `json:"uri,omitempty"` + + // FilePath is the path of the proto file where message is defined. + FilePath string `json:"file_path,omitempty"` +} + +// HTTPQuery is an sdk Query. +type HTTPQuery struct { + // Name of the RPC func. + Name string `json:"name,omitempty"` + + // RequestType is the type of the request. + RequestType string `json:"request_type,omitempty"` + + // ResponseType is the type of the response. + ResponseType string `json:"response_type,omitempty"` + + // FullName of the query with service name and rpc func name. + FullName string `json:"full_name,omitempty"` + + // Rules keeps info about configured HTTP rules of RPC functions. + Rules []protoanalysis.HTTPRule `json:"rules,omitempty"` + + // Paginated indicates that the query is using pagination. + Paginated bool `json:"paginated,omitempty"` + + // FilePath is the path of the .proto file where message is defined at. + FilePath string `json:"file_path,omitempty"` +} + +// Type is a proto type that might be used by module. +type Type struct { + // Name of the type. + Name string `json:"name,omitempty"` + + // FilePath is the path of the .proto file where message is defined at. + FilePath string `json:"file_path,omitempty"` +} + +type moduleDiscoverer struct { + sourcePath string + protoPath string + basegopath string + registeredModules []string +} + +// IsCosmosSDKPackage check if a Go import path is a Cosmos SDK package. +// These type of package have the "cosmossdk.io/x" prefix or "github.com/cosmos/cosmos-sdk" prefix. +func IsCosmosSDKPackage(path string) bool { + return strings.Contains(path, "cosmossdk.io/x/") || strings.Contains(path, "github.com/cosmos/cosmos-sdk") +} + +// Discover discovers and returns modules and their types that are registered in the app +// chainRoot is the root path of the chain +// sourcePath is the root path of the go module which the proto dir is from +// +// Discovery algorithm make use of registered modules and proto definitions to find relevant +// registered modules. It does so by: +// 1. Getting all the registered Go modules from the app. +// 2. Parsing the proto files to find services and messages. +// 3. Check if the proto services are implemented in any of the registered modules. +func Discover(ctx context.Context, chainRoot, sourcePath string, options ...DiscoverOption) ([]Module, error) { + var o discoverOptions + for _, apply := range options { + apply(&o) + } + + // find out base Go import path of the blockchain. + gm, err := gomodule.ParseAt(sourcePath) + if err != nil { + if errors.Is(err, gomodule.ErrGoModNotFound) { + return []Module{}, nil + } + return nil, err + } + + // Find all the modules registered by the app + registeredModules, err := app.FindRegisteredModules(chainRoot) + if err != nil { + return nil, err + } + + // Go import path of the app module + basegopath := gm.Module.Mod.Path + + // Keep the custom app's modules and filter out the third + // party ones that are not defined within the app. + appModules := make([]string, 0) + for _, m := range registeredModules { + if strings.HasPrefix(m, basegopath) { + appModules = append(appModules, m) + } + } + + if len(appModules) == 0 { + return []Module{}, nil + } + + // Switch the proto path for "cosmossdk.io" module packages to the official Cosmos + // SDK package because the module packages doesn't contain the proto files. These + // files are only available from the Cosmos SDK package. + var protoPath string + if o.sdkDir != "" && IsCosmosSDKPackage(sourcePath) { + protoPath = switchCosmosSDKPackagePath(sourcePath, o.sdkDir) + } else { + protoPath = filepath.Join(sourcePath, o.protoDir) + } + + md := &moduleDiscoverer{ + protoPath: protoPath, + sourcePath: sourcePath, + basegopath: basegopath, + registeredModules: appModules, + } + + // Find proto packages that belong to modules under x/. + pkgs, err := md.findModuleProtoPkgs(ctx) + if err != nil { + return nil, err + } + + if len(pkgs) == 0 { + return []Module{}, nil + } + + var modules []Module + + for _, pkg := range pkgs { + m, err := md.discover(pkg) + if err != nil { + return nil, err + } + + if m.Name == "" { + continue + } + + modules = append(modules, m) + } + + return modules, nil +} + +// IsRootPath checks if a Go import path is a custom app module. +// Custom app modules are defined inside the "x" directory. +func IsRootPath(path string) bool { + return filepath.Base(filepath.Dir(path)) == "x" +} + +// RootPath returns the Go import path of a custom app module. +// An empty string is returned when the path doesn't belong to a custom module. +func RootPath(path string) string { + for !IsRootPath(path) { + if path = filepath.Dir(path); path == "." { + return "" + } + } + + return path +} + +// RootGoImportPath returns a Go import path with the version suffix removed. +func RootGoImportPath(importPath string) string { + if p, v := path.Split(importPath); semver.IsValid(v) { + return strings.TrimRight(p, "/") + } + + return importPath +} + +// discover discovers and sdk module by a proto pkg. +func (d *moduleDiscoverer) discover(pkg protoanalysis.Package) (Module, error) { + // Check if the proto package services are implemented + // by any of the modules registered by the app. + if ok, err := d.isPkgFromRegisteredModule(pkg); err != nil || !ok { + return Module{}, err + } + + if len(pkg.Services) == 0 { + return Module{}, nil + } + + m := Module{ + Name: pkg.ModuleName(), + GoModulePath: d.basegopath, + Pkg: pkg, + } + + // isType whether if protomsg can be added as an any Type to Module. + isType := func(protomsg protoanalysis.Message) bool { + // do not use GenesisState type. + if protomsg.Name == "GenesisState" { + return false + } + + // do not use if used as a request/return type of RPC. + for _, s := range pkg.Services { + for _, q := range s.RPCFuncs { + if q.RequestType == protomsg.Name || q.ReturnsType == protomsg.Name { + return false + } + } + } + + return true + } + + // fill types. + for _, protomsg := range pkg.Messages { + if !isType(protomsg) { + continue + } + + m.Types = append(m.Types, Type{ + Name: protomsg.Name, + FilePath: protomsg.Path, + }) + } + + // fill queries & messages. + for _, s := range pkg.Services { + for _, q := range s.RPCFuncs { + pkgmsg, ok := pkg.FindMessageByName(q.RequestType) + if !ok { + // no msg found in the proto defs corresponds to discovered sdk message. + // if it cannot be found, nothing to worry about, this means that it is used + // only internally and not open for actual use. + continue + } + + switch s.Name { + case "Msg": + + m.Msgs = append(m.Msgs, Msg{ + Name: q.RequestType, + URI: fmt.Sprintf("%s.%s", pkg.Name, q.RequestType), + FilePath: pkgmsg.Path, + }) + case "Query", "Service": + // no http rules means this query is not exposed as a REST endpoint. + if len(q.HTTPRules) == 0 { + continue + } + + // check if the query is paginated. + isPaginated := false + for _, hr := range q.HTTPRules { + if hr.IsPaginated() { + isPaginated = true + break + } + } + + m.HTTPQueries = append(m.HTTPQueries, HTTPQuery{ + Name: q.Name, + FullName: s.Name + q.Name, + Rules: q.HTTPRules, + Paginated: isPaginated, + FilePath: pkgmsg.Path, + RequestType: q.RequestType, + ResponseType: q.ReturnsType, + }) + } + } + } + + return m, nil +} + +func (d *moduleDiscoverer) findModuleProtoPkgs(ctx context.Context) ([]protoanalysis.Package, error) { + // find out all proto packages inside blockchain. + allprotopkgs, err := protoanalysis.Parse(ctx, nil, d.protoPath) + if err != nil { + return nil, err + } + + // Remove version suffix from the Go import path if it exists. + // Proto files might omit the version in the Go import path even + // when the app module is using versioning. + basegopath := RootGoImportPath(d.basegopath) + + // filter out proto packages that do not represent x/ modules of blockchain. + var xprotopkgs []protoanalysis.Package + for _, pkg := range allprotopkgs { + if !strings.HasPrefix(pkg.GoImportPath(), basegopath) { + continue + } + + xprotopkgs = append(xprotopkgs, pkg) + } + + return xprotopkgs, nil +} + +// Checks if the proto package is implemented by any of the modules registered by the app. +func (d moduleDiscoverer) isPkgFromRegisteredModule(pkg protoanalysis.Package) (bool, error) { + // Get the Go module import defined by the proto package + goModuleImport := pkg.GoImportPath() + + // Try to get the Go import path of the custom app module that should implement + // the package RPC services. When the import path doesn't import a package + // from the standard "x" folder use the path defined by the proto package. + // Using the custom app module root path guarantees that if the RPC services + // implementation exists in the module it will always be found. + if p := RootPath(goModuleImport); p != "" { + goModuleImport = p + } + + // Get a Go import path with the version suffix removed + rootGoPath := RootGoImportPath(d.basegopath) + + for _, m := range d.registeredModules { + // Extract the relative module path from the Go import path + implRelPath := strings.TrimPrefix(m, d.basegopath) + + // Handle the case where the Go module has a version + // suffix and the registered module doesn't. + if implRelPath == m { + implRelPath = strings.TrimPrefix(m, rootGoPath) + } + + // Absolute path to the app module + implPath := filepath.Join(d.sourcePath, implRelPath) + + for _, s := range pkg.Services { + // List of the RPC service method names defined by the current proto service + methods := make([]string, len(s.RPCFuncs)) + for i, rpcFunc := range s.RPCFuncs { + methods[i] = rpcFunc.Name + } + + // Find the Go implementation of the service defined in the proto package + found, err := cosmosanalysis.DeepFindImplementation(implPath, methods) + if err != nil { + return false, err + } + + // Sometimes the registered module definition is located in a different + // directory branch from where the RPC implementation is defined. In this + // case search the RPC implementation in all custom app module files. + if len(found) == 0 && strings.HasPrefix(m, goModuleImport) { + altImplRelPath := strings.TrimPrefix(goModuleImport, d.basegopath) + if altImplRelPath == goModuleImport { + altImplRelPath = strings.TrimPrefix(goModuleImport, rootGoPath) + } + + altImplPath := filepath.Join(d.sourcePath, altImplRelPath) + + found, err = cosmosanalysis.DeepFindImplementation(altImplPath, methods) + if err != nil { + return false, err + } + } + + if len(found) > 0 { + return true, nil + } + } + } + + return false, nil +} + +func switchCosmosSDKPackagePath(srcPath, sdkDir string) string { + modName := xstrings.StringBetween(srcPath, "/x/", "@") + if modName == "" { + return srcPath + } + return filepath.Join(sdkDir, "proto", "cosmos", modName) +} diff --git a/ignite/pkg/cosmosanalysis/module/module_test.go b/ignite/pkg/cosmosanalysis/module/module_test.go new file mode 100644 index 0000000..183e3f3 --- /dev/null +++ b/ignite/pkg/cosmosanalysis/module/module_test.go @@ -0,0 +1,345 @@ +package module_test + +import ( + "context" + "path/filepath" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/cosmosanalysis/module" + "github.com/ignite/cli/v29/ignite/pkg/protoanalysis" +) + +func newModule(relChainPath, goImportPath string) module.Module { + return module.Module{ + Name: "mars", + GoModulePath: goImportPath, + Pkg: protoanalysis.Package{ + Name: "tendermint.planet.mars", + Path: filepath.Join(relChainPath, "proto/planet/mars"), + Files: protoanalysis.Files{ + protoanalysis.File{ + Path: filepath.Join(relChainPath, "proto/planet/mars/mars.proto"), + Dependencies: []string{ + "cosmos/base/query/v1beta1/pagination.proto", + "google/api/annotations.proto", + }, + }, + }, + GoImportName: "github.com/tendermint/planet/x/mars/types", + Messages: []protoanalysis.Message{ + { + Name: "MsgMyMessageRequest", + Path: filepath.Join(relChainPath, "proto/planet/mars/mars.proto"), + HighestFieldNumber: 1, + Fields: map[string]string{ + "mytypefield": "string", + }, + }, + { + Name: "MsgMyMessageResponse", + Path: filepath.Join(relChainPath, "proto/planet/mars/mars.proto"), + HighestFieldNumber: 1, + Fields: map[string]string{ + "mytypefield": "string", + }, + }, + { + Name: "MsgBarRequest", + Path: filepath.Join(relChainPath, "proto/planet/mars/mars.proto"), + HighestFieldNumber: 1, + Fields: map[string]string{ + "mytypefield": "string", + }, + }, + { + Name: "MsgBarResponse", + Path: filepath.Join(relChainPath, "proto/planet/mars/mars.proto"), + HighestFieldNumber: 1, + Fields: map[string]string{ + "mytypefield": "string", + }, + }, + { + Name: "QueryMyQueryRequest", + Path: filepath.Join(relChainPath, "proto/planet/mars/mars.proto"), + HighestFieldNumber: 2, + Fields: map[string]string{ + "mytypefield": "string", + "pagination": "cosmos.base.query.v1beta1.PageRequest", + }, + }, + { + Name: "QueryMyQueryResponse", + Path: filepath.Join(relChainPath, "proto/planet/mars/mars.proto"), + HighestFieldNumber: 1, + Fields: map[string]string{"pagination": "cosmos.base.query.v1beta1.PageResponse"}, + }, + { + Name: "QueryFooRequest", + Path: filepath.Join(relChainPath, "proto/planet/mars/mars.proto"), + HighestFieldNumber: 0, + Fields: map[string]string{}, + }, + { + Name: "QueryFooResponse", + Path: filepath.Join(relChainPath, "proto/planet/mars/mars.proto"), + HighestFieldNumber: 1, + Fields: map[string]string{"bar": "string"}, + }, + }, + Services: []protoanalysis.Service{ + { + Name: "Msg", + RPCFuncs: []protoanalysis.RPCFunc{ + { + Name: "MyMessage", + RequestType: "MsgMyMessageRequest", + ReturnsType: "MsgMyMessageResponse", + }, + { + Name: "Bar", + RequestType: "MsgBarRequest", + ReturnsType: "MsgBarResponse", + }, + }, + }, + { + Name: "Query", + RPCFuncs: []protoanalysis.RPCFunc{ + { + Name: "MyQuery", + RequestType: "QueryMyQueryRequest", + ReturnsType: "QueryMyQueryResponse", + HTTPRules: []protoanalysis.HTTPRule{ + { + Endpoint: "/tendermint/mars/my_query/{mytypefield}", + Params: []string{"mytypefield"}, + HasQuery: true, + QueryFields: map[string]string{ + "pagination": "cosmos.base.query.v1beta1.PageRequest", + }, + HasBody: false, + }, + }, + }, + { + Name: "Foo", + RequestType: "QueryFooRequest", + ReturnsType: "QueryFooResponse", + HTTPRules: []protoanalysis.HTTPRule{ + { + Endpoint: "/tendermint/mars/foo", + HasQuery: false, + HasBody: false, + }, + }, + }, + }, + }, + }, + }, + Msgs: []module.Msg{ + { + Name: "MsgMyMessageRequest", + URI: "tendermint.planet.mars.MsgMyMessageRequest", + FilePath: filepath.Join(relChainPath, "proto/planet/mars/mars.proto"), + }, + { + Name: "MsgBarRequest", + URI: "tendermint.planet.mars.MsgBarRequest", + FilePath: filepath.Join(relChainPath, "proto/planet/mars/mars.proto"), + }, + }, + HTTPQueries: []module.HTTPQuery{ + { + Name: "MyQuery", + FullName: "QueryMyQuery", + RequestType: "QueryMyQueryRequest", + ResponseType: "QueryMyQueryResponse", + Rules: []protoanalysis.HTTPRule{ + { + Endpoint: "/tendermint/mars/my_query/{mytypefield}", + Params: []string{"mytypefield"}, + HasQuery: true, + QueryFields: map[string]string{ + "pagination": "cosmos.base.query.v1beta1.PageRequest", + }, + HasBody: false, + }, + }, + Paginated: true, + FilePath: filepath.Join(relChainPath, "proto/planet/mars/mars.proto"), + }, + { + Name: "Foo", + FullName: "QueryFoo", + RequestType: "QueryFooRequest", + ResponseType: "QueryFooResponse", + Rules: []protoanalysis.HTTPRule{ + { + Endpoint: "/tendermint/mars/foo", + HasQuery: false, + HasBody: false, + }, + }, + FilePath: filepath.Join(relChainPath, "proto/planet/mars/mars.proto"), + }, + }, + Types: []module.Type(nil), + } +} + +func TestDiscover(t *testing.T) { + ctx := context.Background() + sourcePath := "testdata/planet" + testModule := newModule(sourcePath, "github.com/tendermint/planet") + + tests := []struct { + name, sourcePath, protoDir string + want []module.Module + }{ + { + name: "test valid", + sourcePath: sourcePath, + protoDir: "proto", + want: []module.Module{testModule}, + }, { + name: "test no proto folder", + sourcePath: sourcePath, + protoDir: "", + want: []module.Module{testModule}, + }, { + name: "test invalid proto folder", + sourcePath: sourcePath, + protoDir: "invalid", + want: []module.Module{}, + }, { + name: "test invalid folder", + sourcePath: "testdata/invalid", + protoDir: "", + want: []module.Module{}, + }, { + name: "test invalid main and proto folder", + sourcePath: "../../..", + protoDir: "proto", + want: []module.Module{}, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + modules, err := module.Discover(ctx, sourcePath, tt.sourcePath, module.WithProtoDir(tt.protoDir)) + + require.NoError(t, err) + require.Equal(t, tt.want, modules) + }) + } +} + +func TestDiscoverWithAppV2(t *testing.T) { + ctx := context.Background() + sourcePath := "testdata/earth" + testModule := newModule(sourcePath, "github.com/tendermint/planet") + + tests := []struct { + name, protoDir string + want []module.Module + }{ + { + name: "test valid", + protoDir: "proto", + want: []module.Module{testModule}, + }, { + name: "test valid with version suffix", + protoDir: "proto", + want: []module.Module{testModule}, + }, { + name: "test no proto folder", + protoDir: "", + want: []module.Module{testModule}, + }, { + name: "test invalid proto folder", + protoDir: "invalid", + want: []module.Module{}, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + modules, err := module.Discover(ctx, sourcePath, sourcePath, module.WithProtoDir(tt.protoDir)) + + require.NoError(t, err) + require.Equal(t, tt.want, modules) + }) + } +} + +func TestIsRootPath(t *testing.T) { + cases := []struct { + name, path string + want bool + }{ + { + name: "custom module import path", + path: "github.com/chain/x/my_module", + want: true, + }, + { + name: "generic import path", + path: "github.com/username/project", + want: false, + }, + } + + for _, tt := range cases { + t.Run(tt.name, func(t *testing.T) { + require.Equal(t, tt.want, module.IsRootPath(tt.path)) + }) + } +} + +func TestRootPath(t *testing.T) { + cases := []struct { + name, path, want string + }{ + { + name: "custom module import path", + path: "github.com/username/chain/x/my_module/child/folder", + want: "github.com/username/chain/x/my_module", + }, + { + name: "generic import path", + path: "github.com/username/project/child/folder", + want: "", + }, + } + + for _, tt := range cases { + t.Run(tt.name, func(t *testing.T) { + require.Equal(t, tt.want, module.RootPath(tt.path)) + }) + } +} + +func TestRootGoImportPath(t *testing.T) { + cases := []struct { + name, path, want string + }{ + { + name: "go import path with version suffix", + path: "github.com/username/chain/v2", + want: "github.com/username/chain", + }, + { + name: "go import path without version suffix", + path: "github.com/username/chain", + want: "github.com/username/chain", + }, + } + + for _, tt := range cases { + t.Run(tt.name, func(t *testing.T) { + require.Equal(t, tt.want, module.RootGoImportPath(tt.path)) + }) + } +} diff --git a/ignite/pkg/cosmosanalysis/module/options.go b/ignite/pkg/cosmosanalysis/module/options.go new file mode 100644 index 0000000..b1051d8 --- /dev/null +++ b/ignite/pkg/cosmosanalysis/module/options.go @@ -0,0 +1,22 @@ +package module + +// DiscoverOption configures calls to Discovery function. +type DiscoverOption func(*discoverOptions) + +type discoverOptions struct { + protoDir, sdkDir string +} + +// WithProtoDir sets the relative proto directory path. +func WithProtoDir(path string) DiscoverOption { + return func(o *discoverOptions) { + o.protoDir = path + } +} + +// WithSDKDir sets the absolute directory path to the Cosmos SDK Go package. +func WithSDKDir(path string) DiscoverOption { + return func(o *discoverOptions) { + o.sdkDir = path + } +} diff --git a/ignite/pkg/cosmosanalysis/module/testdata/earth/app/app.go b/ignite/pkg/cosmosanalysis/module/testdata/earth/app/app.go new file mode 100644 index 0000000..d4078ec --- /dev/null +++ b/ignite/pkg/cosmosanalysis/module/testdata/earth/app/app.go @@ -0,0 +1,23 @@ +package app + +import ( + "github.com/cosmos/cosmos-sdk/runtime" + "github.com/cosmos/cosmos-sdk/types/module" + authkeeper "github.com/cosmos/cosmos-sdk/x/auth/keeper" + bankkeeper "github.com/cosmos/cosmos-sdk/x/bank/keeper" + govkeeper "github.com/cosmos/cosmos-sdk/x/gov/keeper" + stakingkeeper "github.com/cosmos/cosmos-sdk/x/staking/keeper" + marskeeper "github.com/tendermint/planet/x/mars/keeper" +) + +type Foo struct { + *runtime.App + + AuthKeeper authkeeper.Keeper + BankKeeper bankkeeper.Keeper + StakingKeeper stakingkeeper.Keeper + GovKeeper govkeeper.Keeper + MarsKeeper marskeeper.Keeper +} + +var ModuleBasics = module.NewBasicManager(foo.AppModuleBasic{}) diff --git a/ignite/pkg/cosmosanalysis/module/testdata/earth/app/app_config.go b/ignite/pkg/cosmosanalysis/module/testdata/earth/app/app_config.go new file mode 100644 index 0000000..c825a7e --- /dev/null +++ b/ignite/pkg/cosmosanalysis/module/testdata/earth/app/app_config.go @@ -0,0 +1,304 @@ +package app + +import ( + "time" + + runtimev1alpha1 "cosmossdk.io/api/cosmos/app/runtime/v1alpha1" + appv1alpha1 "cosmossdk.io/api/cosmos/app/v1alpha1" + authmodulev1 "cosmossdk.io/api/cosmos/auth/module/v1" + authzmodulev1 "cosmossdk.io/api/cosmos/authz/module/v1" + bankmodulev1 "cosmossdk.io/api/cosmos/bank/module/v1" + circuitmodulev1 "cosmossdk.io/api/cosmos/circuit/module/v1" + consensusmodulev1 "cosmossdk.io/api/cosmos/consensus/module/v1" + distrmodulev1 "cosmossdk.io/api/cosmos/distribution/module/v1" + evidencemodulev1 "cosmossdk.io/api/cosmos/evidence/module/v1" + feegrantmodulev1 "cosmossdk.io/api/cosmos/feegrant/module/v1" + genutilmodulev1 "cosmossdk.io/api/cosmos/genutil/module/v1" + govmodulev1 "cosmossdk.io/api/cosmos/gov/module/v1" + groupmodulev1 "cosmossdk.io/api/cosmos/group/module/v1" + mintmodulev1 "cosmossdk.io/api/cosmos/mint/module/v1" + paramsmodulev1 "cosmossdk.io/api/cosmos/params/module/v1" + slashingmodulev1 "cosmossdk.io/api/cosmos/slashing/module/v1" + stakingmodulev1 "cosmossdk.io/api/cosmos/staking/module/v1" + txconfigv1 "cosmossdk.io/api/cosmos/tx/config/v1" + upgrademodulev1 "cosmossdk.io/api/cosmos/upgrade/module/v1" + vestingmodulev1 "cosmossdk.io/api/cosmos/vesting/module/v1" + "cosmossdk.io/depinject" + "cosmossdk.io/depinject/appconfig" + _ "cosmossdk.io/x/circuit" // import for side-effects + circuittypes "cosmossdk.io/x/circuit/types" + _ "cosmossdk.io/x/evidence" // import for side-effects + evidencetypes "cosmossdk.io/x/evidence/types" + "cosmossdk.io/x/feegrant" + _ "cosmossdk.io/x/feegrant/module" // import for side-effects + _ "cosmossdk.io/x/upgrade" // import for side-effects + upgradetypes "cosmossdk.io/x/upgrade/types" + "github.com/cosmos/cosmos-sdk/runtime" + "github.com/cosmos/cosmos-sdk/types/module" + _ "github.com/cosmos/cosmos-sdk/x/auth/tx/config" // import for side-effects + authtypes "github.com/cosmos/cosmos-sdk/x/auth/types" + _ "github.com/cosmos/cosmos-sdk/x/auth/vesting" // import for side-effects + vestingtypes "github.com/cosmos/cosmos-sdk/x/auth/vesting/types" + "github.com/cosmos/cosmos-sdk/x/authz" + _ "github.com/cosmos/cosmos-sdk/x/authz/module" // import for side-effects + _ "github.com/cosmos/cosmos-sdk/x/bank" // import for side-effects + banktypes "github.com/cosmos/cosmos-sdk/x/bank/types" + _ "github.com/cosmos/cosmos-sdk/x/consensus" // import for side-effects + consensustypes "github.com/cosmos/cosmos-sdk/x/consensus/types" + _ "github.com/cosmos/cosmos-sdk/x/distribution" // import for side-effects + distrtypes "github.com/cosmos/cosmos-sdk/x/distribution/types" + "github.com/cosmos/cosmos-sdk/x/genutil" + genutiltypes "github.com/cosmos/cosmos-sdk/x/genutil/types" + "github.com/cosmos/cosmos-sdk/x/gov" + govtypes "github.com/cosmos/cosmos-sdk/x/gov/types" + "github.com/cosmos/cosmos-sdk/x/group" + _ "github.com/cosmos/cosmos-sdk/x/group/module" // import for side-effects + _ "github.com/cosmos/cosmos-sdk/x/mint" // import for side-effects + minttypes "github.com/cosmos/cosmos-sdk/x/mint/types" + _ "github.com/cosmos/cosmos-sdk/x/params" // import for side-effects + paramstypes "github.com/cosmos/cosmos-sdk/x/params/types" + _ "github.com/cosmos/cosmos-sdk/x/slashing" // import for side-effects + slashingtypes "github.com/cosmos/cosmos-sdk/x/slashing/types" + _ "github.com/cosmos/cosmos-sdk/x/staking" // import for side-effects + stakingtypes "github.com/cosmos/cosmos-sdk/x/staking/types" + _ "github.com/cosmos/ibc-go/modules/capability" // import for side-effects + capabilitytypes "github.com/cosmos/ibc-go/modules/capability/types" + _ "github.com/cosmos/ibc-go/v8/modules/apps/27-interchain-accounts" // import for side-effects + icatypes "github.com/cosmos/ibc-go/v8/modules/apps/27-interchain-accounts/types" + _ "github.com/cosmos/ibc-go/v8/modules/apps/29-fee" // import for side-effects + ibcfeetypes "github.com/cosmos/ibc-go/v8/modules/apps/29-fee/types" + ibctransfertypes "github.com/cosmos/ibc-go/v8/modules/apps/transfer/types" + ibcexported "github.com/cosmos/ibc-go/v8/modules/core/exported" + "google.golang.org/protobuf/types/known/durationpb" + + _ "github.com/tendermint/mars/x/mars" // import for side-effects + marsmoduletypes "github.com/tendermint/mars/x/mars/types" +) + +var ( + // NOTE: The genutils module must occur after staking so that pools are + // properly initialized with tokens from genesis accounts. + // NOTE: The genutils module must also occur after auth so that it can access the params from auth. + // NOTE: Capability module must occur first so that it can initialize any capabilities + // so that other modules that want to create or claim capabilities afterwards in InitChain + // can do so safely. + genesisModuleOrder = []string{ + // cosmos sdk modules + authtypes.ModuleName, + banktypes.ModuleName, + distrtypes.ModuleName, + stakingtypes.ModuleName, + slashingtypes.ModuleName, + govtypes.ModuleName, + minttypes.ModuleName, + genutiltypes.ModuleName, + evidencetypes.ModuleName, + authz.ModuleName, + feegrant.ModuleName, + group.ModuleName, + paramstypes.ModuleName, + upgradetypes.ModuleName, + vestingtypes.ModuleName, + circuittypes.ModuleName, + // ibc modules + // capabilitytypes.ModuleName, + // ibcexported.ModuleName, + // ibctransfertypes.ModuleName, + // icatypes.ModuleName, + // chain modules + marsmoduletypes.ModuleName, + } + + // During begin block slashing happens after distr.BeginBlocker so that + // there is nothing left over in the validator fee pool, so as to keep the + // CanWithdrawInvariant invariant. + // NOTE: staking module is required if HistoricalEntries param > 0 + // NOTE: capability module's beginblocker must come before any modules using capabilities (e.g. IBC) + beginBlockers = []string{ + // cosmos sdk modules + minttypes.ModuleName, + distrtypes.ModuleName, + slashingtypes.ModuleName, + evidencetypes.ModuleName, + stakingtypes.ModuleName, + authz.ModuleName, + // ibc modules + capabilitytypes.ModuleName, + ibcexported.ModuleName, + ibctransfertypes.ModuleName, + icatypes.ModuleName, + ibcfeetypes.ModuleName, + // chain modules + marsmoduletypes.ModuleName, + } + + endBlockers = []string{ + // cosmos sdk modules + govtypes.ModuleName, + stakingtypes.ModuleName, + feegrant.ModuleName, + group.ModuleName, + // ibc modules + ibcexported.ModuleName, + ibctransfertypes.ModuleName, + capabilitytypes.ModuleName, + icatypes.ModuleName, + ibcfeetypes.ModuleName, + // chain modules + marsmoduletypes.ModuleName, + } + + preBlockers = []string{ + upgradetypes.ModuleName, + } + + // module account permissions + moduleAccPerms = []*authmodulev1.ModuleAccountPermission{ + {Account: authtypes.FeeCollectorName}, + {Account: distrtypes.ModuleName}, + {Account: minttypes.ModuleName, Permissions: []string{authtypes.Minter}}, + {Account: stakingtypes.BondedPoolName, Permissions: []string{authtypes.Burner, stakingtypes.ModuleName}}, + {Account: stakingtypes.NotBondedPoolName, Permissions: []string{authtypes.Burner, stakingtypes.ModuleName}}, + {Account: govtypes.ModuleName, Permissions: []string{authtypes.Burner}}, + {Account: ibctransfertypes.ModuleName, Permissions: []string{authtypes.Minter, authtypes.Burner}}, + {Account: ibcfeetypes.ModuleName}, + {Account: icatypes.ModuleName}, + } + + // blocked account addresses + blockAccAddrs = []string{ + authtypes.FeeCollectorName, + distrtypes.ModuleName, + minttypes.ModuleName, + stakingtypes.BondedPoolName, + stakingtypes.NotBondedPoolName, + // We allow the following module accounts to receive funds: + // govtypes.ModuleName + } + + // AppConfig application configuration (used by depinject) + AppConfig = depinject.Configs(appconfig.Compose(&appv1alpha1.Config{ + Modules: []*appv1alpha1.ModuleConfig{ + { + Name: runtime.ModuleName, + Config: appconfig.WrapAny(&runtimev1alpha1.Module{ + AppName: Name, + PreBlockers: preBlockers, + BeginBlockers: beginBlockers, + EndBlockers: endBlockers, + InitGenesis: genesisModuleOrder, + OverrideStoreKeys: []*runtimev1alpha1.StoreKeyConfig{ + { + ModuleName: authtypes.ModuleName, + KvStoreKey: "acc", + }, + }, + // When ExportGenesis is not specified, the export genesis module order + // is equal to the init genesis order + // ExportGenesis: genesisModuleOrder, + // Uncomment if you want to set a custom migration order here. + // OrderMigrations: nil, + }), + }, + { + Name: authtypes.ModuleName, + Config: appconfig.WrapAny(&authmodulev1.Module{ + Bech32Prefix: AccountAddressPrefix, + ModuleAccountPermissions: moduleAccPerms, + // By default modules authority is the governance module. This is configurable with the following: + // Authority: "group", // A custom module authority can be set using a module name + // Authority: "cosmos1cwwv22j5ca08ggdv9c2uky355k908694z577tv", // or a specific address + }), + }, + { + Name: vestingtypes.ModuleName, + Config: appconfig.WrapAny(&vestingmodulev1.Module{}), + }, + { + Name: banktypes.ModuleName, + Config: appconfig.WrapAny(&bankmodulev1.Module{ + BlockedModuleAccountsOverride: blockAccAddrs, + }), + }, + { + Name: stakingtypes.ModuleName, + Config: appconfig.WrapAny(&stakingmodulev1.Module{ + // NOTE: specifying a prefix is only necessary when using bech32 addresses + // If not specfied, the auth Bech32Prefix appended with "valoper" and "valcons" is used by default + Bech32PrefixValidator: AccountAddressPrefix + "valoper", + Bech32PrefixConsensus: AccountAddressPrefix + "valcons", + }), + }, + { + Name: slashingtypes.ModuleName, + Config: appconfig.WrapAny(&slashingmodulev1.Module{}), + }, + { + Name: paramstypes.ModuleName, + Config: appconfig.WrapAny(¶msmodulev1.Module{}), + }, + { + Name: "tx", + Config: appconfig.WrapAny(&txconfigv1.Config{}), + }, + { + Name: genutiltypes.ModuleName, + Config: appconfig.WrapAny(&genutilmodulev1.Module{}), + }, + { + Name: authz.ModuleName, + Config: appconfig.WrapAny(&authzmodulev1.Module{}), + }, + { + Name: upgradetypes.ModuleName, + Config: appconfig.WrapAny(&upgrademodulev1.Module{}), + }, + { + Name: distrtypes.ModuleName, + Config: appconfig.WrapAny(&distrmodulev1.Module{}), + }, + { + Name: evidencetypes.ModuleName, + Config: appconfig.WrapAny(&evidencemodulev1.Module{}), + }, + { + Name: minttypes.ModuleName, + Config: appconfig.WrapAny(&mintmodulev1.Module{}), + }, + { + Name: group.ModuleName, + Config: appconfig.WrapAny(&groupmodulev1.Module{ + MaxExecutionPeriod: durationpb.New(time.Second * 1209600), + MaxMetadataLen: 255, + }), + }, + { + Name: feegrant.ModuleName, + Config: appconfig.WrapAny(&feegrantmodulev1.Module{}), + }, + { + Name: govtypes.ModuleName, + Config: appconfig.WrapAny(&govmodulev1.Module{}), + }, + { + Name: consensustypes.ModuleName, + Config: appconfig.WrapAny(&consensusmodulev1.Module{}), + }, + { + Name: circuittypes.ModuleName, + Config: appconfig.WrapAny(&circuitmodulev1.Module{}), + }, + { + Name: marsmoduletypes.ModuleName, + Config: appconfig.WrapAny(&marsmoduletypes.Module{}), + }, + }, + }), + depinject.Supply( + // supply custom module basics + map[string]module.AppModuleBasic{ + genutiltypes.ModuleName: genutil.NewAppModuleBasic(genutiltypes.DefaultMessageValidator), + govtypes.ModuleName: gov.NewAppModuleBasic(getGovProposalHandlers()), + }, + )) +) diff --git a/ignite/pkg/cosmosanalysis/module/testdata/earth/go.mod b/ignite/pkg/cosmosanalysis/module/testdata/earth/go.mod new file mode 100644 index 0000000..e4102d2 --- /dev/null +++ b/ignite/pkg/cosmosanalysis/module/testdata/earth/go.mod @@ -0,0 +1,3 @@ +module github.com/tendermint/planet + +go 1.18 diff --git a/ignite/pkg/cosmosanalysis/module/testdata/earth/proto/planet/mars/mars.proto b/ignite/pkg/cosmosanalysis/module/testdata/earth/proto/planet/mars/mars.proto new file mode 100644 index 0000000..f6267cb --- /dev/null +++ b/ignite/pkg/cosmosanalysis/module/testdata/earth/proto/planet/mars/mars.proto @@ -0,0 +1,52 @@ +syntax = "proto3"; +package tendermint.planet.mars; +import "cosmos/base/query/v1beta1/pagination.proto"; +import "google/api/annotations.proto"; +option go_package = "github.com/tendermint/planet/x/mars/types"; + +service Msg { + rpc MyMessage(MsgMyMessageRequest) returns (MsgMyMessageResponse); + + rpc Bar(MsgBarRequest) returns (MsgBarResponse); +} + +message MsgMyMessageRequest { + string mytypefield = 1; +} + +message MsgMyMessageResponse { + string mytypefield = 1; +} + +message MsgBarRequest { + string mytypefield = 1; +} + +message MsgBarResponse { + string mytypefield = 1; +} + +service Query { + rpc MyQuery(QueryMyQueryRequest) returns (QueryMyQueryResponse) { + option (google.api.http).get = "/tendermint/mars/my_query/{mytypefield}"; + } + + rpc Foo(QueryFooRequest) returns (QueryFooResponse) { + option (google.api.http).get = "/tendermint/mars/foo"; + } +} + +message QueryMyQueryRequest { + string mytypefield = 1; + cosmos.base.query.v1beta1.PageRequest pagination = 2; +} + +message QueryMyQueryResponse { + cosmos.base.query.v1beta1.PageResponse pagination = 1; +} + +message QueryFooRequest {} + +message QueryFooResponse { + string bar = 1; +} diff --git a/ignite/pkg/cosmosanalysis/module/testdata/earth/x/mars/keeper/query_my_query.go b/ignite/pkg/cosmosanalysis/module/testdata/earth/x/mars/keeper/query_my_query.go new file mode 100644 index 0000000..73428e1 --- /dev/null +++ b/ignite/pkg/cosmosanalysis/module/testdata/earth/x/mars/keeper/query_my_query.go @@ -0,0 +1,17 @@ +package keeper + +import ( + "context" + + "github.com/tendermint/planet/x/mars/types" +) + +type Keeper struct{} + +func (k Keeper) MyQuery(goCtx context.Context, req *types.QueryMyQueryRequest) (*types.QueryMyQueryResponse, error) { + return nil, nil +} + +func (k Keeper) Foo(goCtx context.Context, req *types.QueryFooRequest) (*types.QueryFooResponse, error) { + return nil, nil +} diff --git a/ignite/pkg/cosmosanalysis/module/testdata/earth/x/mars/types/types.go b/ignite/pkg/cosmosanalysis/module/testdata/earth/x/mars/types/types.go new file mode 100644 index 0000000..8996191 --- /dev/null +++ b/ignite/pkg/cosmosanalysis/module/testdata/earth/x/mars/types/types.go @@ -0,0 +1,8 @@ +package types + +type ( + QueryMyQueryRequest struct{} + QueryMyQueryResponse struct{} + QueryFooRequest struct{} + QueryFooResponse struct{} +) diff --git a/ignite/pkg/cosmosanalysis/module/testdata/planet/app/app.go b/ignite/pkg/cosmosanalysis/module/testdata/planet/app/app.go new file mode 100644 index 0000000..f0afa54 --- /dev/null +++ b/ignite/pkg/cosmosanalysis/module/testdata/planet/app/app.go @@ -0,0 +1,20 @@ +package app + +import ( + "github.com/cosmos/cosmos-sdk/baseapp" + authkeeper "github.com/cosmos/cosmos-sdk/x/auth/keeper" + bankkeeper "github.com/cosmos/cosmos-sdk/x/bank/keeper" + govkeeper "github.com/cosmos/cosmos-sdk/x/gov/keeper" + stakingkeeper "github.com/cosmos/cosmos-sdk/x/staking/keeper" + marskeeper "github.com/tendermint/planet/x/mars/keeper" +) + +type Foo struct { + baseapp.BaseApp + + AuthKeeper authkeeper.Keeper + BankKeeper bankkeeper.Keeper + StakingKeeper stakingkeeper.Keeper + GovKeeper govkeeper.Keeper + MarsKeeper marskeeper.Keeper +} diff --git a/ignite/pkg/cosmosanalysis/module/testdata/planet/go.mod b/ignite/pkg/cosmosanalysis/module/testdata/planet/go.mod new file mode 100644 index 0000000..5fd9ffd --- /dev/null +++ b/ignite/pkg/cosmosanalysis/module/testdata/planet/go.mod @@ -0,0 +1,4 @@ +module github.com/tendermint/planet + +go 1.16 + diff --git a/ignite/pkg/cosmosanalysis/module/testdata/planet/proto/planet/mars/mars.proto b/ignite/pkg/cosmosanalysis/module/testdata/planet/proto/planet/mars/mars.proto new file mode 100644 index 0000000..f6267cb --- /dev/null +++ b/ignite/pkg/cosmosanalysis/module/testdata/planet/proto/planet/mars/mars.proto @@ -0,0 +1,52 @@ +syntax = "proto3"; +package tendermint.planet.mars; +import "cosmos/base/query/v1beta1/pagination.proto"; +import "google/api/annotations.proto"; +option go_package = "github.com/tendermint/planet/x/mars/types"; + +service Msg { + rpc MyMessage(MsgMyMessageRequest) returns (MsgMyMessageResponse); + + rpc Bar(MsgBarRequest) returns (MsgBarResponse); +} + +message MsgMyMessageRequest { + string mytypefield = 1; +} + +message MsgMyMessageResponse { + string mytypefield = 1; +} + +message MsgBarRequest { + string mytypefield = 1; +} + +message MsgBarResponse { + string mytypefield = 1; +} + +service Query { + rpc MyQuery(QueryMyQueryRequest) returns (QueryMyQueryResponse) { + option (google.api.http).get = "/tendermint/mars/my_query/{mytypefield}"; + } + + rpc Foo(QueryFooRequest) returns (QueryFooResponse) { + option (google.api.http).get = "/tendermint/mars/foo"; + } +} + +message QueryMyQueryRequest { + string mytypefield = 1; + cosmos.base.query.v1beta1.PageRequest pagination = 2; +} + +message QueryMyQueryResponse { + cosmos.base.query.v1beta1.PageResponse pagination = 1; +} + +message QueryFooRequest {} + +message QueryFooResponse { + string bar = 1; +} diff --git a/ignite/pkg/cosmosanalysis/module/testdata/planet/x/mars/keeper/query_my_query.go b/ignite/pkg/cosmosanalysis/module/testdata/planet/x/mars/keeper/query_my_query.go new file mode 100644 index 0000000..73428e1 --- /dev/null +++ b/ignite/pkg/cosmosanalysis/module/testdata/planet/x/mars/keeper/query_my_query.go @@ -0,0 +1,17 @@ +package keeper + +import ( + "context" + + "github.com/tendermint/planet/x/mars/types" +) + +type Keeper struct{} + +func (k Keeper) MyQuery(goCtx context.Context, req *types.QueryMyQueryRequest) (*types.QueryMyQueryResponse, error) { + return nil, nil +} + +func (k Keeper) Foo(goCtx context.Context, req *types.QueryFooRequest) (*types.QueryFooResponse, error) { + return nil, nil +} diff --git a/ignite/pkg/cosmosanalysis/module/testdata/planet/x/mars/types/types.go b/ignite/pkg/cosmosanalysis/module/testdata/planet/x/mars/types/types.go new file mode 100644 index 0000000..8996191 --- /dev/null +++ b/ignite/pkg/cosmosanalysis/module/testdata/planet/x/mars/types/types.go @@ -0,0 +1,8 @@ +package types + +type ( + QueryMyQueryRequest struct{} + QueryMyQueryResponse struct{} + QueryFooRequest struct{} + QueryFooResponse struct{} +) diff --git a/ignite/pkg/cosmosanalysis/testdata/chain-sdk-fork/go.mod b/ignite/pkg/cosmosanalysis/testdata/chain-sdk-fork/go.mod new file mode 100644 index 0000000..b586c36 --- /dev/null +++ b/ignite/pkg/cosmosanalysis/testdata/chain-sdk-fork/go.mod @@ -0,0 +1,170 @@ +module my-new-chain + +go 1.23 + +require ( + cosmossdk.io/api v0.7.2 + github.com/cometbft/cometbft v0.38.1 + github.com/cometbft/cometbft-db v0.8.0 + github.com/cosmos/cosmos-sdk v0.47.3 + github.com/cosmos/gogoproto v1.4.11 + github.com/cosmos/ibc-go/v7 v7.1.0 + github.com/golang/protobuf v1.5.3 + github.com/gorilla/mux v1.8.1 + github.com/grpc-ecosystem/grpc-gateway v1.16.0 + github.com/grpc-ecosystem/grpc-gateway/v2 v2.15.2 + github.com/spf13/cast v1.5.1 + github.com/spf13/cobra v1.8.0 + github.com/spf13/pflag v1.0.5 + github.com/stretchr/testify v1.8.4 + google.golang.org/genproto v0.0.0-20231016165738-49dd2c1f3d0b + google.golang.org/grpc v1.59.0 + gopkg.in/yaml.v2 v2.4.0 +) + +require ( + cloud.google.com/go v0.110.8 // indirect + cloud.google.com/go/compute v1.23.1 // indirect + cloud.google.com/go/compute/metadata v0.2.3 // indirect + cloud.google.com/go/iam v1.1.3 // indirect + cloud.google.com/go/storage v1.30.1 // indirect + cosmossdk.io/core v0.11.0 // indirect + cosmossdk.io/depinject v1.0.0-alpha.4 // indirect + cosmossdk.io/errors v1.0.0 // indirect + cosmossdk.io/math v1.2.0 // indirect + cosmossdk.io/tools/rosetta v0.2.1 // indirect + filippo.io/edwards25519 v1.0.0 // indirect + github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4 // indirect + github.com/99designs/keyring v1.2.1 // indirect + github.com/ChainSafe/go-schnorrkel v0.0.0-20200405005733-88cbf1b4c40d // indirect + github.com/armon/go-metrics v0.4.1 // indirect + github.com/aws/aws-sdk-go v1.44.203 // indirect + github.com/beorn7/perks v1.0.1 // indirect + github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d // indirect + github.com/bgentry/speakeasy v0.1.1-0.20220910012023-760eaf8b6816 // indirect + github.com/btcsuite/btcd/btcec/v2 v2.3.2 // indirect + github.com/cenkalti/backoff/v4 v4.2.1 // indirect + github.com/cespare/xxhash v1.1.0 // indirect + github.com/cespare/xxhash/v2 v2.2.0 // indirect + github.com/chzyer/readline v1.5.1 // indirect + github.com/cockroachdb/apd/v2 v2.0.2 // indirect + github.com/coinbase/rosetta-sdk-go/types v1.0.0 // indirect + github.com/confio/ics23/go v0.9.0 // indirect + github.com/cosmos/btcutil v1.0.5 // indirect + github.com/cosmos/cosmos-proto v1.0.0-beta.3 // indirect + github.com/cosmos/go-bip39 v1.0.0 // indirect + github.com/cosmos/gogogateway v1.2.0 // indirect + github.com/cosmos/iavl v1.0.0 // indirect + github.com/cosmos/ics23/go v0.10.0 // indirect + github.com/cosmos/ledger-cosmos-go v0.13.3 // indirect + github.com/cosmos/rosetta-sdk-go v0.10.0 // indirect + github.com/creachadair/taskgroup v0.4.2 // indirect + github.com/danieljoos/wincred v1.1.2 // indirect + github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect + github.com/decred/dcrd/dcrec/secp256k1/v4 v4.2.0 // indirect + github.com/desertbit/timer v0.0.0-20180107155436-c41aec40b27f // indirect + github.com/dgraph-io/badger/v2 v2.2007.4 // indirect + github.com/dgraph-io/ristretto v0.1.1 // indirect + github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 // indirect + github.com/dustin/go-humanize v1.0.1 // indirect + github.com/dvsekhvalnov/jose2go v1.5.0 // indirect + github.com/felixge/httpsnoop v1.0.2 // indirect + github.com/fsnotify/fsnotify v1.6.0 // indirect + github.com/ghodss/yaml v1.0.0 // indirect + github.com/go-kit/kit v0.13.0 // indirect + github.com/go-kit/log v0.2.1 // indirect + github.com/go-logfmt/logfmt v0.6.0 // indirect + github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2 // indirect + github.com/gogo/googleapis v1.4.1 // indirect + github.com/gogo/protobuf v1.3.2 // indirect + github.com/golang/glog v1.1.2 // indirect + github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect + github.com/golang/mock v1.6.0 // indirect + github.com/golang/snappy v0.0.4 // indirect + github.com/google/btree v1.1.2 // indirect + github.com/google/go-cmp v0.6.0 // indirect + github.com/google/orderedcode v0.0.1 // indirect + github.com/google/uuid v1.3.1 // indirect + github.com/googleapis/enterprise-certificate-proxy v0.3.1 // indirect + github.com/googleapis/gax-go/v2 v2.12.0 // indirect + github.com/gorilla/handlers v1.5.1 // indirect + github.com/gorilla/websocket v1.5.1 // indirect + github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 // indirect + github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c // indirect + github.com/gtank/merlin v0.1.1 // indirect + github.com/gtank/ristretto255 v0.1.2 // indirect + github.com/hashicorp/go-cleanhttp v0.5.2 // indirect + github.com/hashicorp/go-getter v1.7.1 // indirect + github.com/hashicorp/go-immutable-radix v1.3.1 // indirect + github.com/hashicorp/go-safetemp v1.0.0 // indirect + github.com/hashicorp/go-version v1.6.0 // indirect + github.com/hashicorp/golang-lru v1.0.2 // indirect + github.com/hashicorp/hcl v1.0.0 // indirect + github.com/hdevalence/ed25519consensus v0.1.0 // indirect + github.com/huandu/skiplist v1.2.0 // indirect + github.com/improbable-eng/grpc-web v0.15.0 // indirect + github.com/inconshreveable/mousetrap v1.1.0 // indirect + github.com/jmespath/go-jmespath v0.4.0 // indirect + github.com/jmhodges/levigo v1.0.0 // indirect + github.com/json-iterator/go v1.1.12 // indirect + github.com/klauspost/compress v1.17.2 // indirect + github.com/lib/pq v1.10.7 // indirect + github.com/libp2p/go-buffer-pool v0.1.0 // indirect + github.com/magiconair/properties v1.8.7 // indirect + github.com/manifoldco/promptui v0.9.0 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect + github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect + github.com/mimoo/StrobeGo v0.0.0-20210601165009-122bf33a46e0 // indirect + github.com/minio/highwayhash v1.0.2 // indirect + github.com/mitchellh/go-homedir v1.1.0 // indirect + github.com/mitchellh/go-testing-interface v1.14.1 // indirect + github.com/mitchellh/mapstructure v1.5.0 // indirect + github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect + github.com/modern-go/reflect2 v1.0.2 // indirect + github.com/mtibben/percent v0.2.1 // indirect + github.com/pelletier/go-toml/v2 v2.1.0 // indirect + github.com/petermattis/goid v0.0.0-20230904192822-1876fd5063bc // indirect + github.com/pkg/errors v0.9.1 // indirect + github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect + github.com/prometheus/client_golang v1.17.0 // indirect + github.com/prometheus/client_model v0.5.0 // indirect + github.com/prometheus/common v0.45.0 // indirect + github.com/prometheus/procfs v0.12.0 // indirect + github.com/rakyll/statik v0.1.7 // indirect + github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 // indirect + github.com/rs/cors v1.10.1 // indirect + github.com/sasha-s/go-deadlock v0.3.1 // indirect + github.com/spf13/afero v1.10.0 // indirect + github.com/spf13/jwalterweatherman v1.1.0 // indirect + github.com/spf13/viper v1.17.0 // indirect + github.com/subosito/gotenv v1.6.0 // indirect + github.com/syndtr/goleveldb v1.0.1-0.20220721030215-126854af5e6d // indirect + github.com/tecbot/gorocksdb v0.0.0-20191217155057-f0fad39f321c // indirect + github.com/tendermint/go-amino v0.16.0 // indirect + github.com/tidwall/btree v1.7.0 // indirect + github.com/ulikunitz/xz v0.5.11 // indirect + github.com/zondax/hid v0.9.2 // indirect + github.com/zondax/ledger-go v0.14.3 // indirect + go.etcd.io/bbolt v1.3.7 // indirect + go.opencensus.io v0.24.0 // indirect + golang.org/x/crypto v0.16.0 // indirect + golang.org/x/exp v0.0.0-20231006140011-7918f672742d // indirect + golang.org/x/net v0.19.0 // indirect + golang.org/x/oauth2 v0.12.0 // indirect + golang.org/x/sys v0.15.0 // indirect + golang.org/x/term v0.15.0 // indirect + golang.org/x/text v0.14.0 // indirect + golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 // indirect + google.golang.org/api v0.143.0 // indirect + google.golang.org/appengine v1.6.7 // indirect + google.golang.org/protobuf v1.31.0 // indirect + gopkg.in/ini.v1 v1.67.0 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect + nhooyr.io/websocket v1.8.7 // indirect + pgregory.net/rapid v1.1.0 // indirect + sigs.k8s.io/yaml v1.3.0 // indirect +) + +replace github.com/syndtr/goleveldb => github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7 + +replace github.com/cosmos/cosmos-sdk => github.com/rollkit/cosmos-sdk v0.50.1-rollkit-v0.11.6-no-fraud-proofs diff --git a/ignite/pkg/cosmosanalysis/testdata/chain/go.mod b/ignite/pkg/cosmosanalysis/testdata/chain/go.mod new file mode 100644 index 0000000..6dfe6aa --- /dev/null +++ b/ignite/pkg/cosmosanalysis/testdata/chain/go.mod @@ -0,0 +1,168 @@ +module my-new-chain + +go 1.19 + +require ( + cosmossdk.io/api v0.3.1 + github.com/cometbft/cometbft v0.37.2 + github.com/cometbft/cometbft-db v0.7.0 + github.com/cosmos/cosmos-sdk v0.47.3 + github.com/cosmos/gogoproto v1.4.10 + github.com/cosmos/ibc-go/v7 v7.1.0 + github.com/golang/protobuf v1.5.3 + github.com/gorilla/mux v1.8.0 + github.com/grpc-ecosystem/grpc-gateway v1.16.0 + github.com/grpc-ecosystem/grpc-gateway/v2 v2.15.2 + github.com/spf13/cast v1.5.0 + github.com/spf13/cobra v1.6.1 + github.com/spf13/pflag v1.0.5 + github.com/stretchr/testify v1.8.2 + google.golang.org/genproto v0.0.0-20230306155012-7f2fa6fef1f4 + google.golang.org/grpc v1.55.0 + gopkg.in/yaml.v2 v2.4.0 +) + +require ( + cloud.google.com/go v0.110.0 // indirect + cloud.google.com/go/compute v1.18.0 // indirect + cloud.google.com/go/compute/metadata v0.2.3 // indirect + cloud.google.com/go/iam v0.12.0 // indirect + cloud.google.com/go/storage v1.29.0 // indirect + cosmossdk.io/core v0.5.1 // indirect + cosmossdk.io/depinject v1.0.0-alpha.3 // indirect + cosmossdk.io/errors v1.0.0-beta.7 // indirect + cosmossdk.io/math v1.0.1 // indirect + cosmossdk.io/tools/rosetta v0.2.1 // indirect + filippo.io/edwards25519 v1.0.0 // indirect + github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4 // indirect + github.com/99designs/keyring v1.2.1 // indirect + github.com/ChainSafe/go-schnorrkel v0.0.0-20200405005733-88cbf1b4c40d // indirect + github.com/armon/go-metrics v0.4.1 // indirect + github.com/aws/aws-sdk-go v1.44.203 // indirect + github.com/beorn7/perks v1.0.1 // indirect + github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d // indirect + github.com/bgentry/speakeasy v0.1.1-0.20220910012023-760eaf8b6816 // indirect + github.com/btcsuite/btcd/btcec/v2 v2.3.2 // indirect + github.com/cenkalti/backoff/v4 v4.1.3 // indirect + github.com/cespare/xxhash v1.1.0 // indirect + github.com/cespare/xxhash/v2 v2.2.0 // indirect + github.com/chzyer/readline v1.5.1 // indirect + github.com/cockroachdb/apd/v2 v2.0.2 // indirect + github.com/coinbase/rosetta-sdk-go/types v1.0.0 // indirect + github.com/confio/ics23/go v0.9.0 // indirect + github.com/cosmos/btcutil v1.0.5 // indirect + github.com/cosmos/cosmos-proto v1.0.0-beta.2 // indirect + github.com/cosmos/go-bip39 v1.0.0 // indirect + github.com/cosmos/gogogateway v1.2.0 // indirect + github.com/cosmos/iavl v0.20.0 // indirect + github.com/cosmos/ics23/go v0.10.0 // indirect + github.com/cosmos/ledger-cosmos-go v0.12.1 // indirect + github.com/cosmos/rosetta-sdk-go v0.10.0 // indirect + github.com/creachadair/taskgroup v0.4.2 // indirect + github.com/danieljoos/wincred v1.1.2 // indirect + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/decred/dcrd/dcrec/secp256k1/v4 v4.1.0 // indirect + github.com/desertbit/timer v0.0.0-20180107155436-c41aec40b27f // indirect + github.com/dgraph-io/badger/v2 v2.2007.4 // indirect + github.com/dgraph-io/ristretto v0.1.1 // indirect + github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 // indirect + github.com/dustin/go-humanize v1.0.1 // indirect + github.com/dvsekhvalnov/jose2go v1.5.0 // indirect + github.com/felixge/httpsnoop v1.0.2 // indirect + github.com/fsnotify/fsnotify v1.6.0 // indirect + github.com/ghodss/yaml v1.0.0 // indirect + github.com/go-kit/kit v0.12.0 // indirect + github.com/go-kit/log v0.2.1 // indirect + github.com/go-logfmt/logfmt v0.6.0 // indirect + github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2 // indirect + github.com/gogo/googleapis v1.4.1 // indirect + github.com/gogo/protobuf v1.3.2 // indirect + github.com/golang/glog v1.1.0 // indirect + github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect + github.com/golang/mock v1.6.0 // indirect + github.com/golang/snappy v0.0.4 // indirect + github.com/google/btree v1.1.2 // indirect + github.com/google/go-cmp v0.5.9 // indirect + github.com/google/orderedcode v0.0.1 // indirect + github.com/google/uuid v1.3.0 // indirect + github.com/googleapis/enterprise-certificate-proxy v0.2.3 // indirect + github.com/googleapis/gax-go/v2 v2.7.0 // indirect + github.com/gorilla/handlers v1.5.1 // indirect + github.com/gorilla/websocket v1.5.0 // indirect + github.com/grpc-ecosystem/go-grpc-middleware v1.3.0 // indirect + github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c // indirect + github.com/gtank/merlin v0.1.1 // indirect + github.com/gtank/ristretto255 v0.1.2 // indirect + github.com/hashicorp/go-cleanhttp v0.5.2 // indirect + github.com/hashicorp/go-getter v1.7.1 // indirect + github.com/hashicorp/go-immutable-radix v1.3.1 // indirect + github.com/hashicorp/go-safetemp v1.0.0 // indirect + github.com/hashicorp/go-version v1.6.0 // indirect + github.com/hashicorp/golang-lru v0.5.5-0.20210104140557-80c98217689d // indirect + github.com/hashicorp/hcl v1.0.0 // indirect + github.com/hdevalence/ed25519consensus v0.1.0 // indirect + github.com/huandu/skiplist v1.2.0 // indirect + github.com/improbable-eng/grpc-web v0.15.0 // indirect + github.com/inconshreveable/mousetrap v1.1.0 // indirect + github.com/jmespath/go-jmespath v0.4.0 // indirect + github.com/jmhodges/levigo v1.0.0 // indirect + github.com/json-iterator/go v1.1.12 // indirect + github.com/klauspost/compress v1.16.3 // indirect + github.com/lib/pq v1.10.7 // indirect + github.com/libp2p/go-buffer-pool v0.1.0 // indirect + github.com/magiconair/properties v1.8.7 // indirect + github.com/manifoldco/promptui v0.9.0 // indirect + github.com/mattn/go-isatty v0.0.18 // indirect + github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect + github.com/mimoo/StrobeGo v0.0.0-20210601165009-122bf33a46e0 // indirect + github.com/minio/highwayhash v1.0.2 // indirect + github.com/mitchellh/go-homedir v1.1.0 // indirect + github.com/mitchellh/go-testing-interface v1.14.1 // indirect + github.com/mitchellh/mapstructure v1.5.0 // indirect + github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect + github.com/modern-go/reflect2 v1.0.2 // indirect + github.com/mtibben/percent v0.2.1 // indirect + github.com/pelletier/go-toml/v2 v2.0.7 // indirect + github.com/petermattis/goid v0.0.0-20230317030725-371a4b8eda08 // indirect + github.com/pkg/errors v0.9.1 // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/prometheus/client_golang v1.14.0 // indirect + github.com/prometheus/client_model v0.3.0 // indirect + github.com/prometheus/common v0.42.0 // indirect + github.com/prometheus/procfs v0.9.0 // indirect + github.com/rakyll/statik v0.1.7 // indirect + github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 // indirect + github.com/rs/cors v1.8.3 // indirect + github.com/sasha-s/go-deadlock v0.3.1 // indirect + github.com/spf13/afero v1.9.3 // indirect + github.com/spf13/jwalterweatherman v1.1.0 // indirect + github.com/spf13/viper v1.15.0 // indirect + github.com/subosito/gotenv v1.4.2 // indirect + github.com/syndtr/goleveldb v1.0.1-0.20220721030215-126854af5e6d // indirect + github.com/tecbot/gorocksdb v0.0.0-20191217155057-f0fad39f321c // indirect + github.com/tendermint/go-amino v0.16.0 // indirect + github.com/tidwall/btree v1.6.0 // indirect + github.com/ulikunitz/xz v0.5.11 // indirect + github.com/zondax/hid v0.9.1 // indirect + github.com/zondax/ledger-go v0.14.1 // indirect + go.etcd.io/bbolt v1.3.7 // indirect + go.opencensus.io v0.24.0 // indirect + golang.org/x/crypto v0.8.0 // indirect + golang.org/x/exp v0.0.0-20230515195305-f3d0a9c9a5cc // indirect + golang.org/x/net v0.9.0 // indirect + golang.org/x/oauth2 v0.6.0 // indirect + golang.org/x/sys v0.7.0 // indirect + golang.org/x/term v0.7.0 // indirect + golang.org/x/text v0.9.0 // indirect + golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 // indirect + google.golang.org/api v0.110.0 // indirect + google.golang.org/appengine v1.6.7 // indirect + google.golang.org/protobuf v1.30.0 // indirect + gopkg.in/ini.v1 v1.67.0 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect + nhooyr.io/websocket v1.8.6 // indirect + pgregory.net/rapid v0.5.5 // indirect + sigs.k8s.io/yaml v1.3.0 // indirect +) + +replace github.com/syndtr/goleveldb => github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7 diff --git a/ignite/pkg/cosmosbuf/buf.go b/ignite/pkg/cosmosbuf/buf.go new file mode 100644 index 0000000..5c99f1b --- /dev/null +++ b/ignite/pkg/cosmosbuf/buf.go @@ -0,0 +1,379 @@ +package cosmosbuf + +import ( + "bytes" + "context" + "fmt" + "maps" + "path/filepath" + "strings" + + "github.com/gobwas/glob" + "golang.org/x/sync/errgroup" + + "github.com/ignite/cli/v29/ignite/pkg/cache" + "github.com/ignite/cli/v29/ignite/pkg/cmdrunner/exec" + "github.com/ignite/cli/v29/ignite/pkg/cmdrunner/step" + "github.com/ignite/cli/v29/ignite/pkg/dircache" + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/xos" +) + +const ( + flagTemplate = "template" + flagOutput = "output" + flagErrorFormat = "error-format" + flagLogFormat = "log-format" + flagWorkspace = "workspace" + flagBufGenYaml = "buf-gen-yaml" + flagIncludeImports = "include-imports" + flagIncludeWellKnownTypes = "include-wkt" + flagWrite = "write" + flagPath = "path" + fmtJSON = "json" + bufGenPrefix = "buf.gen." + + // CMD*** are the buf commands. + CMDBuf = "buf" + CMDGenerate Command = "generate" + CMDExport Command = "export" + CMDFormat Command = "format" + CMDConfig Command = "config" + CMDDep Command = "dep" + + specCacheNamespace = "generate.buf" +) + +var ( + commands = map[Command]struct{}{ + CMDGenerate: {}, + CMDExport: {}, + CMDFormat: {}, + CMDConfig: {}, + CMDDep: {}, + } + + // ErrInvalidCommand indicates an invalid command name. + ErrInvalidCommand = errors.New("invalid command name") + + // ErrProtoFilesNotFound indicates that no ".proto" files were found. + ErrProtoFilesNotFound = errors.New("no proto files found") +) + +type ( + // Command represents a high level command under buf. + Command string + + // Buf represents the buf application structure. + Buf struct { + cache dircache.Cache + } + + // genOptions used to configure code generation. + genOptions struct { + excluded []glob.Glob + flags map[string]string + fileByFile bool + includeImports bool + includeWKT bool + moduleName string + } + + // GenOption configures code generation. + GenOption func(*genOptions) +) + +func newGenOptions() genOptions { + return genOptions{ + flags: make(map[string]string), + excluded: make([]glob.Glob, 0), + fileByFile: false, + includeWKT: false, + includeImports: false, + moduleName: "", + } +} + +// WithFlag provides flag options for the buf generate command. +func WithFlag(flag, value string) GenOption { + return func(o *genOptions) { + o.flags[flag] = value + } +} + +// ExcludeFiles exclude file names from the generate command using glob. +func ExcludeFiles(patterns ...string) GenOption { + return func(o *genOptions) { + for _, pattern := range patterns { + o.excluded = append(o.excluded, glob.MustCompile(pattern)) + } + } +} + +// IncludeImports also generate all imports except for Well-Known Types. +func IncludeImports() GenOption { + return func(o *genOptions) { + o.includeImports = true + } +} + +// IncludeWKT also generate Well-Known Types. +// Cannot be set without IncludeImports. +func IncludeWKT() GenOption { + return func(o *genOptions) { + o.includeImports = true + o.includeWKT = true + } +} + +// WithModuleName sets the module name to filter protos for. +func WithModuleName(value string) GenOption { + return func(o *genOptions) { + o.moduleName = value + } +} + +// FileByFile runs the generate command for each proto file. +func FileByFile() GenOption { + return func(o *genOptions) { + o.fileByFile = true + } +} + +// New creates a new Buf based on the installed binary. +func New(cacheStorage cache.Storage, goModPath string) (Buf, error) { + bufCacheDir := filepath.Join(CMDBuf, goModPath) + c, err := dircache.New(cacheStorage, bufCacheDir, specCacheNamespace) + if err != nil { + return Buf{}, err + } + + return Buf{ + cache: c, + }, nil +} + +func cmd() []string { + return []string{"go", "tool", "github.com/bufbuild/buf/cmd/buf"} +} + +// String returns the command name. +func (c Command) String() string { + return string(c) +} + +// Update updates module dependencies. +// By default updates all dependencies unless one or more dependencies are specified. +func (b Buf) Update(ctx context.Context, modDir string) error { + files, err := xos.FindFiles(modDir, xos.WithExtension(xos.ProtoFile)) + if err != nil { + return err + } + if len(files) == 0 { + return errors.Errorf("%w: %s", ErrProtoFilesNotFound, modDir) + } + + cmd, err := b.command(CMDDep, nil, "update", modDir) + if err != nil { + return err + } + return b.runCommand(ctx, cmd...) +} + +// Migrate runs the buf Migrate command for the files in the app directory. +func (b Buf) Migrate(ctx context.Context, protoDir string) error { + yamlFiles, err := xos.FindFiles(protoDir, + xos.WithExtension(xos.YMLFile), + xos.WithExtension(xos.YAMLFile), + xos.WithPrefix(bufGenPrefix), + ) + if err != nil { + return err + } + + flags := map[string]string{ + flagWorkspace: ".", + } + + if len(yamlFiles) > 0 { + flags[flagBufGenYaml] = strings.Join(yamlFiles, ",") + } + + cmd, err := b.command(CMDConfig, flags, "migrate") + if err != nil { + return err + } + + return b.runCommand(ctx, cmd...) +} + +// Export runs the buf Export command for the files in the proto directory. +func (b Buf) Export(ctx context.Context, protoDir, output string) error { + files, err := xos.FindFiles(protoDir, xos.WithExtension(xos.ProtoFile)) + if err != nil { + return err + } + if len(files) == 0 { + return errors.Errorf("%w: %s", ErrProtoFilesNotFound, protoDir) + } + + flags := map[string]string{ + flagOutput: output, + } + cmd, err := b.command(CMDExport, flags, protoDir) + if err != nil { + return err + } + + return b.runCommand(ctx, cmd...) +} + +// Format runs the buf Format command for the files in the provided path. +func (b Buf) Format(ctx context.Context, path string) error { + flags := map[string]string{ + flagWrite: "true", + } + cmd, err := b.command(CMDFormat, flags, path) + if err != nil { + return err + } + + return b.runCommand(ctx, cmd...) +} + +// Generate runs the buf Generate command for each file into the proto directory. +func (b Buf) Generate( + ctx context.Context, + protoPath, + output, + template string, + options ...GenOption, +) (err error) { + opts := newGenOptions() + for _, apply := range options { + apply(&opts) + } + modulePath := protoPath + if opts.moduleName != "" { + path := append([]string{protoPath}, strings.Split(opts.moduleName, ".")...) + modulePath = filepath.Join(path...) + } + // find all proto files into the path. + foundFiles, err := xos.FindFiles(modulePath, xos.WithExtension(xos.ProtoFile)) + if err != nil || len(foundFiles) == 0 { + return err + } + + // check if already exist a cache for the template. + key, err := b.cache.CopyTo(protoPath, output, template) + if err != nil && !errors.Is(err, dircache.ErrCacheNotFound) { + return err + } else if err == nil { + return nil + } + + // remove excluded and cached files. + protoFiles := make([]string, 0) + for _, file := range foundFiles { + okExclude := false + for _, g := range opts.excluded { + if g.Match(file) { + okExclude = true + break + } + } + if !okExclude { + protoFiles = append(protoFiles, file) + } + } + if len(protoFiles) == 0 { + return nil + } + + flags := map[string]string{ + flagTemplate: template, + flagOutput: output, + flagErrorFormat: fmtJSON, + flagLogFormat: fmtJSON, + } + maps.Copy(flags, opts.flags) + if opts.includeImports { + flags[flagIncludeImports] = "true" + } + if opts.includeWKT { + flags[flagIncludeWellKnownTypes] = "true" + } + + if !opts.fileByFile { + cmd, err := b.command(CMDGenerate, flags, protoPath) + if err != nil { + return err + } + for _, file := range protoFiles { + cmd = append(cmd, fmt.Sprintf("--%s=%s", flagPath, file)) + } + if err := b.runCommand(ctx, cmd...); err != nil { + return err + } + } else { + g, ctx := errgroup.WithContext(ctx) + for _, file := range protoFiles { + cmd, err := b.command(CMDGenerate, flags, file) + if err != nil { + return err + } + + g.Go(func() error { + cmd := cmd + return b.runCommand(ctx, cmd...) + }) + } + if err := g.Wait(); err != nil { + return err + } + } + + return b.cache.Save(output, key) +} + +// runCommand run the buf CLI command. +func (b Buf) runCommand(ctx context.Context, cmd ...string) error { + execOpts := []exec.Option{ + exec.IncludeStdLogsToError(), + } + return exec.Exec(ctx, cmd, execOpts...) +} + +// command generate the buf CLI command. +func (b Buf) command( + c Command, + flags map[string]string, + args ...string, +) ([]string, error) { + if _, ok := commands[c]; !ok { + return nil, errors.Errorf("%w: %s", ErrInvalidCommand, c) + } + + command := append( + cmd(), + c.String(), + ) + command = append(command, args...) + + for flag, value := range flags { + command = append(command, + fmt.Sprintf("--%s=%s", flag, value), + ) + } + return command, nil +} + +// Version runs the buf Version command. +func Version(ctx context.Context) (string, error) { + bufOut := &bytes.Buffer{} + if err := exec.Exec(ctx, append(cmd(), "--version"), exec.StepOption(step.Stdout(bufOut))); err != nil { + return "", err + } + + return strings.TrimSpace(bufOut.String()), nil +} diff --git a/ignite/pkg/cosmosbuf/buf_test.go b/ignite/pkg/cosmosbuf/buf_test.go new file mode 100644 index 0000000..02367af --- /dev/null +++ b/ignite/pkg/cosmosbuf/buf_test.go @@ -0,0 +1,65 @@ +package cosmosbuf + +import ( + "strings" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestNewGenOptionsDefaults(t *testing.T) { + opts := newGenOptions() + require.Empty(t, opts.flags) + require.Empty(t, opts.excluded) + require.False(t, opts.fileByFile) + require.False(t, opts.includeImports) + require.False(t, opts.includeWKT) + require.Empty(t, opts.moduleName) +} + +func TestGenOptions(t *testing.T) { + opts := newGenOptions() + + WithFlag("foo", "bar")(&opts) + ExcludeFiles("*.proto")(&opts) + IncludeImports()(&opts) + FileByFile()(&opts) + WithModuleName("ignite.chain")(&opts) + + require.Equal(t, "bar", opts.flags["foo"]) + require.Len(t, opts.excluded, 1) + require.True(t, opts.includeImports) + require.True(t, opts.fileByFile) + require.Equal(t, "ignite.chain", opts.moduleName) + + IncludeWKT()(&opts) + require.True(t, opts.includeImports) + require.True(t, opts.includeWKT) +} + +func TestCommandString(t *testing.T) { + require.Equal(t, "generate", CMDGenerate.String()) +} + +func TestCommandReturnsErrorForInvalidCommand(t *testing.T) { + _, err := Buf{}.command(Command("invalid"), nil) + require.Error(t, err) + require.ErrorIs(t, err, ErrInvalidCommand) +} + +func TestCommandBuildsExpectedArguments(t *testing.T) { + flags := map[string]string{ + "template": "buf.gen.yaml", + "output": "out", + } + + got, err := Buf{}.command(CMDGenerate, flags, "proto") + require.NoError(t, err) + require.GreaterOrEqual(t, len(got), 4) + require.Equal(t, []string{"go", "tool", "github.com/bufbuild/buf/cmd/buf", "generate"}, got[:4]) + require.Contains(t, got, "proto") + + joined := strings.Join(got, " ") + require.Contains(t, joined, "--template=buf.gen.yaml") + require.Contains(t, joined, "--output=out") +} diff --git a/ignite/pkg/cosmosclient/bank.go b/ignite/pkg/cosmosclient/bank.go new file mode 100644 index 0000000..9b61975 --- /dev/null +++ b/ignite/pkg/cosmosclient/bank.go @@ -0,0 +1,41 @@ +package cosmosclient + +import ( + "context" + + sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/cosmos/cosmos-sdk/types/query" + banktypes "github.com/cosmos/cosmos-sdk/x/bank/types" + + "github.com/ignite/cli/v29/ignite/pkg/cosmosaccount" +) + +func (c Client) BankBalances(ctx context.Context, address string, pagination *query.PageRequest) (sdk.Coins, error) { + defer c.lockBech32Prefix()() + + req := &banktypes.QueryAllBalancesRequest{ + Address: address, + Pagination: pagination, + } + + resp, err := c.bankQueryClient.AllBalances(ctx, req) + if err != nil { + return nil, rpcError(c.nodeAddress, err) + } + return resp.Balances, nil +} + +func (c Client) BankSendTx(ctx context.Context, fromAccount cosmosaccount.Account, toAddress string, amount sdk.Coins) (TxService, error) { + addr, err := fromAccount.Address(c.bech32Prefix) + if err != nil { + return TxService{}, err + } + + msg := &banktypes.MsgSend{ + FromAddress: addr, + ToAddress: toAddress, + Amount: amount, + } + + return c.CreateTx(ctx, fromAccount, msg) +} diff --git a/ignite/pkg/cosmosclient/bank_test.go b/ignite/pkg/cosmosclient/bank_test.go new file mode 100644 index 0000000..ee48c3d --- /dev/null +++ b/ignite/pkg/cosmosclient/bank_test.go @@ -0,0 +1,41 @@ +package cosmosclient_test + +import ( + "context" + "testing" + + "cosmossdk.io/math" + sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/cosmos/cosmos-sdk/types/query" + banktypes "github.com/cosmos/cosmos-sdk/x/bank/types" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestClientBankBalances(t *testing.T) { + var ( + ctx = context.Background() + address = "address" + pagination = &query.PageRequest{Offset: 1} + expectedBalances = sdk.NewCoins( + sdk.NewCoin("token", math.NewInt(1000)), + sdk.NewCoin("stake", math.NewInt(2000)), + ) + ) + c := newClient(t, func(s suite) { + req := &banktypes.QueryAllBalancesRequest{ + Address: address, + Pagination: pagination, + } + + s.bankQueryClient.EXPECT().AllBalances(ctx, req). + Return(&banktypes.QueryAllBalancesResponse{ + Balances: expectedBalances, + }, nil) + }) + + balances, err := c.BankBalances(ctx, address, pagination) + + require.NoError(t, err) + assert.Equal(t, expectedBalances, balances) +} diff --git a/ignite/pkg/cosmosclient/consensus.go b/ignite/pkg/cosmosclient/consensus.go new file mode 100644 index 0000000..b9dfe7f --- /dev/null +++ b/ignite/pkg/cosmosclient/consensus.go @@ -0,0 +1,62 @@ +package cosmosclient + +import ( + "context" + "encoding/base64" + "time" + + "github.com/cometbft/cometbft/libs/bytes" + cmtproto "github.com/cometbft/cometbft/proto/tendermint/types" + tmtypes "github.com/cometbft/cometbft/types" +) + +// ConsensusInfo is the validator consensus info. +type ConsensusInfo struct { + Timestamp string `json:"Timestamp"` + Root string `json:"Root"` + NextValidatorsHash string `json:"NextValidatorsHash"` + ValidatorSet *cmtproto.ValidatorSet `json:"ValidatorSet"` +} + +// ConsensusInfo returns the appropriate tendermint consensus state by given height +// and the validator set for the next height. +func (c Client) ConsensusInfo(ctx context.Context, height int64) (ConsensusInfo, error) { + node, err := c.Context().GetNode() + if err != nil { + return ConsensusInfo{}, err + } + + commit, err := node.Commit(ctx, &height) + if err != nil { + return ConsensusInfo{}, err + } + + var ( + page = 1 + count = 10_000 + ) + validators, err := node.Validators(ctx, &height, &page, &count) + if err != nil { + return ConsensusInfo{}, err + } + + protoValset, err := tmtypes.NewValidatorSet(validators.Validators).ToProto() + if err != nil { + return ConsensusInfo{}, err + } + + heightNext := height + 1 + validatorsNext, err := node.Validators(ctx, &heightNext, &page, &count) + if err != nil { + return ConsensusInfo{}, err + } + + hash := tmtypes.NewValidatorSet(validatorsNext.Validators).Hash() + + return ConsensusInfo{ + Timestamp: commit.Time.Format(time.RFC3339Nano), + NextValidatorsHash: bytes.HexBytes(hash).String(), + Root: base64.StdEncoding.EncodeToString(commit.AppHash), + ValidatorSet: protoValset, + }, nil +} diff --git a/ignite/pkg/cosmosclient/cosmosclient.go b/ignite/pkg/cosmosclient/cosmosclient.go new file mode 100644 index 0000000..b2aa548 --- /dev/null +++ b/ignite/pkg/cosmosclient/cosmosclient.go @@ -0,0 +1,864 @@ +// Package cosmosclient provides a standalone client to connect to Cosmos SDK chains. +package cosmosclient + +import ( + "context" + "encoding/hex" + "io" + "net/url" + "os" + "path/filepath" + "strconv" + "strings" + "sync" + "time" + + "github.com/cenkalti/backoff" + gogogrpc "github.com/cosmos/gogoproto/grpc" + "github.com/cosmos/gogoproto/proto" + prototypes "github.com/cosmos/gogoproto/types" + + "github.com/cosmos/cosmos-sdk/client" + "github.com/cosmos/cosmos-sdk/client/flags" + "github.com/cosmos/cosmos-sdk/client/tx" + "github.com/cosmos/cosmos-sdk/codec" + codectypes "github.com/cosmos/cosmos-sdk/codec/types" + cryptocodec "github.com/cosmos/cosmos-sdk/crypto/codec" + sdktypes "github.com/cosmos/cosmos-sdk/types" + txtypes "github.com/cosmos/cosmos-sdk/types/tx" + "github.com/cosmos/cosmos-sdk/types/tx/signing" + authtx "github.com/cosmos/cosmos-sdk/x/auth/tx" + authtypes "github.com/cosmos/cosmos-sdk/x/auth/types" + banktypes "github.com/cosmos/cosmos-sdk/x/bank/types" + staking "github.com/cosmos/cosmos-sdk/x/staking/types" + + rpcclient "github.com/cometbft/cometbft/rpc/client" + rpchttp "github.com/cometbft/cometbft/rpc/client/http" + ctypes "github.com/cometbft/cometbft/rpc/core/types" + + "github.com/ignite/cli/v29/ignite/pkg/cosmosaccount" + "github.com/ignite/cli/v29/ignite/pkg/cosmosfaucet" + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +var ( + // FaucetTransferEnsureDuration is the duration that BroadcastTx will wait when a faucet transfer + // is triggered prior to broadcasting but transfer's tx is not committed in the state yet. + FaucetTransferEnsureDuration = time.Second * 40 + + // ErrInvalidBlockHeight is returned when a block height value is not valid. + ErrInvalidBlockHeight = errors.New("block height must be greater than 0") + + errCannotRetrieveFundsFromFaucet = errors.New("cannot retrieve funds from faucet") +) + +const ( + // GasAuto allows to calculate gas automatically when sending transaction. + GasAuto = "auto" + + defaultNodeAddress = "http://localhost:26657" + defaultGasAdjustment = 1.0 + defaultGasLimit = 300000 + + defaultFaucetAddress = "http://localhost:4500" + defaultFaucetDenom = "token" + defaultFaucetMinAmount = 100 + + defaultTXsPerPage = 30 + + searchHeight = "tx.height" + + orderAsc = "asc" +) + +// FaucetClient allows to mock the cosmosfaucet.Client. +// +//go:generate mockery --srcpkg . --name FaucetClient --structname FaucetClient --filename faucet_client.go --with-expecter +type FaucetClient interface { + Transfer(context.Context, cosmosfaucet.TransferRequest) (cosmosfaucet.TransferResponse, error) +} + +// Gasometer allows mocking the tx.CalculateGas func. +// +//go:generate mockery --srcpkg . --name Gasometer --filename gasometer.go --with-expecter +type Gasometer interface { + CalculateGas(clientCtx gogogrpc.ClientConn, txf tx.Factory, msgs ...sdktypes.Msg) (*txtypes.SimulateResponse, uint64, error) +} + +// Signer allows mocking the tx.Sign func. +// +//go:generate mockery --srcpkg . --name Signer --filename signer.go --with-expecter +type Signer interface { + Sign(ctx context.Context, txf tx.Factory, name string, txBuilder client.TxBuilder, overwriteSig bool) error +} + +// Client is a client to access your chain by querying and broadcasting transactions. +type Client struct { + // RPC is Tendermint RPC. + RPC rpcclient.Client + + // TxFactory is a Cosmos SDK tx factory. + TxFactory tx.Factory + + // context is a Cosmos SDK client context. + context client.Context + + // AccountRegistry is the registry to access accounts. + AccountRegistry cosmosaccount.Registry + + accountRetriever client.AccountRetriever + bankQueryClient banktypes.QueryClient + faucetClient FaucetClient + gasometer Gasometer + signer Signer + + bech32Prefix string + + nodeAddress string + out io.Writer + chainID string + + useFaucet bool + faucetAddress string + faucetDenom string + faucetMinAmount uint64 + + homePath string + keyringServiceName string + keyringBackend cosmosaccount.KeyringBackend + keyringDir string + + gas string + gasPrices string + gasAdjustment float64 + fees string + generateOnly bool +} + +// Option configures your client. +// Option, are global to the client and affect all transactions. +// If you want to override a global option on a transaction, use the TxOptions struct. +type Option func(*Client) + +// WithHome sets the data dir of your chain. This option is used to access your chain's +// file based keyring which is only needed when you deal with creating and signing transactions. +// when it is not provided, your data dir will be assumed as `$HOME/.your-chain-id`. +func WithHome(path string) Option { + return func(c *Client) { + c.homePath = path + } +} + +// WithKeyringServiceName used as the keyring name when you are using OS keyring backend. +// by default, it is `cosmos`. +func WithKeyringServiceName(name string) Option { + return func(c *Client) { + c.keyringServiceName = name + } +} + +// WithKeyringBackend sets your keyring backend. By default, it is `test`. +func WithKeyringBackend(backend cosmosaccount.KeyringBackend) Option { + return func(c *Client) { + c.keyringBackend = backend + } +} + +// WithKeyringDir sets the directory of the keyring. By default, it uses cosmosaccount.KeyringHome. +func WithKeyringDir(keyringDir string) Option { + return func(c *Client) { + c.keyringDir = keyringDir + } +} + +// WithNodeAddress sets the node address of your chain. When this option is not provided +// `http://localhost:26657` is used as default. +func WithNodeAddress(addr string) Option { + return func(c *Client) { + c.nodeAddress = addr + } +} + +// Deprecated: use WithBech32Prefix instead. +var WithAddressPrefix = WithBech32Prefix + +// WithBech32Prefix sets the address prefix on the client. +func WithBech32Prefix(prefix string) Option { + return func(c *Client) { + c.bech32Prefix = prefix + } +} + +// WithUseFaucet sets the faucet address on the client. +func WithUseFaucet(faucetAddress, denom string, minAmount uint64) Option { + return func(c *Client) { + c.useFaucet = true + c.faucetAddress = faucetAddress + if denom != "" { + c.faucetDenom = denom + } + if minAmount != 0 { + c.faucetMinAmount = minAmount + } + } +} + +// WithGas sets an explicit gas-limit on transactions. +// Set to "auto" to calculate automatically. +func WithGas(gas string) Option { + return func(c *Client) { + c.gas = gas + } +} + +// WithGasPrices sets the price per gas (e.g. 0.1uatom). +func WithGasPrices(gasPrices string) Option { + return func(c *Client) { + c.gasPrices = gasPrices + } +} + +// WithGasAdjustment sets the gas adjustment. +func WithGasAdjustment(gasAdjustment float64) Option { + return func(c *Client) { + c.gasAdjustment = gasAdjustment + } +} + +// WithFees sets the fees (e.g. 10uatom) on the client. +// It will be used for all transactions if not overridden on the transaction options. +func WithFees(fees string) Option { + return func(c *Client) { + c.fees = fees + } +} + +// WithGenerateOnly tells if txs will be generated only. +func WithGenerateOnly(generateOnly bool) Option { + return func(c *Client) { + c.generateOnly = generateOnly + } +} + +// WithRPCClient sets a tendermint RPC client. +// Already set by default. +func WithRPCClient(rpc rpcclient.Client) Option { + return func(c *Client) { + c.RPC = rpc + } +} + +// WithAccountRetriever sets the account retriever +// Already set by default. +func WithAccountRetriever(accountRetriever client.AccountRetriever) Option { + return func(c *Client) { + c.accountRetriever = accountRetriever + } +} + +// WithBankQueryClient sets the bank query client. +// Already set by default. +func WithBankQueryClient(bankQueryClient banktypes.QueryClient) Option { + return func(c *Client) { + c.bankQueryClient = bankQueryClient + } +} + +// WithFaucetClient sets the faucet client. +// Already set by default. +func WithFaucetClient(faucetClient FaucetClient) Option { + return func(c *Client) { + c.faucetClient = faucetClient + } +} + +// WithGasometer sets the gasometer. +// Already set by default. +func WithGasometer(gasometer Gasometer) Option { + return func(c *Client) { + c.gasometer = gasometer + } +} + +// WithSigner sets the signer. +// Already set by default. +func WithSigner(signer Signer) Option { + return func(c *Client) { + c.signer = signer + } +} + +// New creates a new client with given options. +func New(ctx context.Context, options ...Option) (Client, error) { + c := Client{ + nodeAddress: defaultNodeAddress, + keyringBackend: cosmosaccount.KeyringTest, + bech32Prefix: cosmosaccount.AccountPrefixCosmos, + faucetAddress: defaultFaucetAddress, + faucetDenom: defaultFaucetDenom, + faucetMinAmount: defaultFaucetMinAmount, + out: io.Discard, + gas: strconv.Itoa(defaultGasLimit), + } + + var err error + + for _, apply := range options { + apply(&c) + } + + if c.RPC == nil { + if c.RPC, err = rpchttp.New(c.nodeAddress, "/websocket"); err != nil { + return Client{}, err + } + } + // Wrap RPC client to have more contextualized errors + c.RPC = rpcWrapper{ + Client: c.RPC, + nodeAddress: c.nodeAddress, + } + + statusResp, err := c.RPC.Status(ctx) + if err != nil { + return Client{}, err + } + + c.chainID = statusResp.NodeInfo.Network + + if c.homePath == "" { + home, err := os.UserHomeDir() + if err != nil { + return Client{}, err + } + c.homePath = filepath.Join(home, "."+c.chainID) + } + + if c.keyringDir == "" { + c.keyringDir = c.homePath + } + + c.AccountRegistry, err = cosmosaccount.New( + cosmosaccount.WithKeyringServiceName(c.keyringServiceName), + cosmosaccount.WithKeyringBackend(c.keyringBackend), + cosmosaccount.WithHome(c.keyringDir), + cosmosaccount.WithBech32Prefix(c.bech32Prefix), + ) + if err != nil { + return Client{}, err + } + + c.context = c.newContext() + c.TxFactory = newFactory(c.context) + + if c.accountRetriever == nil { + c.accountRetriever = authtypes.AccountRetriever{} + } + if c.bankQueryClient == nil { + c.bankQueryClient = banktypes.NewQueryClient(c.context) + } + if c.faucetClient == nil { + c.faucetClient = cosmosfaucet.NewClient(c.faucetAddress) + } + if c.gasometer == nil { + c.gasometer = gasometer{} + } + if c.signer == nil { + c.signer = signer{} + } + // set address prefix in SDK global config + c.SetConfigAddressPrefix() + + return c, nil +} + +// LatestBlockHeight returns the latest block height of the app. +func (c Client) LatestBlockHeight(ctx context.Context) (int64, error) { + resp, err := c.Status(ctx) + if err != nil { + return 0, err + } + return resp.SyncInfo.LatestBlockHeight, nil +} + +// WaitForNextBlock waits until next block is committed. +// It reads the current block height and then waits for another block to be +// committed, or returns an error if ctx is canceled. +func (c Client) WaitForNextBlock(ctx context.Context) error { + return c.WaitForNBlocks(ctx, 1) +} + +// WaitForNBlocks reads the current block height and then waits for another n +// blocks to be committed, or returns an error if ctx is canceled. +func (c Client) WaitForNBlocks(ctx context.Context, n int64) error { + start, err := c.LatestBlockHeight(ctx) + if err != nil { + return err + } + return c.WaitForBlockHeight(ctx, start+n) +} + +// WaitForBlockHeight waits until block height h is committed, or returns an +// error if ctx is canceled. +func (c Client) WaitForBlockHeight(ctx context.Context, h int64) error { + ticker := time.NewTicker(time.Second) + defer ticker.Stop() + + for { + latestHeight, err := c.LatestBlockHeight(ctx) + if err != nil { + return err + } + if latestHeight >= h { + return nil + } + select { + case <-ctx.Done(): + return errors.Wrap(ctx.Err(), "timeout exceeded waiting for block") + case <-ticker.C: + } + } +} + +// WaitForTx requests the tx from hash, if not found, waits for next block and +// tries again. Returns an error if ctx is canceled. +func (c Client) WaitForTx(ctx context.Context, hash string) (*ctypes.ResultTx, error) { + bz, err := hex.DecodeString(hash) + if err != nil { + return nil, errors.Wrapf(err, "unable to decode tx hash '%s'", hash) + } + for { + resp, err := c.RPC.Tx(ctx, bz, false) + if err != nil { + if strings.Contains(err.Error(), "not found") { + // Tx not found, wait for next block and try again + err := c.WaitForNextBlock(ctx) + if err != nil { + return nil, errors.Wrap(err, "waiting for next block") + } + continue + } + return nil, errors.Wrapf(err, "fetching tx '%s'", hash) + } + // Tx found + return resp, nil + } +} + +// Account returns the account with name or address equal to nameOrAddress. +func (c Client) Account(nameOrAddress string) (cosmosaccount.Account, error) { + defer c.lockBech32Prefix()() + + acc, err := c.AccountRegistry.GetByName(nameOrAddress) + if err == nil { + return acc, nil + } + return c.AccountRegistry.GetByAddress(nameOrAddress) +} + +// Address returns the account address from account name. +func (c Client) Address(accountName string) (string, error) { + a, err := c.AccountRegistry.GetByName(accountName) + if err != nil { + return "", err + } + return a.Address(c.bech32Prefix) +} + +// Context returns client context. +func (c Client) Context() client.Context { + return c.context +} + +// SetConfigAddressPrefix sets the account prefix in the SDK global config. +func (c Client) SetConfigAddressPrefix() { + // TODO find a better way if possible. + // https://github.com/ignite/cli/issues/2744 + mconf.Lock() + defer mconf.Unlock() + config := sdktypes.GetConfig() + config.SetBech32PrefixForAccount(c.bech32Prefix, c.bech32Prefix+"pub") +} + +// Response of your broadcasted transaction. +type Response struct { + Codec codec.Codec + + // TxResponse is the underlying tx response. + *sdktypes.TxResponse +} + +// Decode decodes the proto func response defined in your Msg service into your message type. +// message needs to be a pointer. and you need to provide the correct proto message(struct) type to the Decode func. +// +// e.g., for the following CreateChain func the type would be: `types.MsgCreateChainResponse`. +// +// ```proto +// +// service Msg { +// rpc CreateChain(MsgCreateChain) returns (MsgCreateChainResponse); +// } +// +// ``` +// +//nolint:godot,nolintlint +func (r Response) Decode(message proto.Message) error { + data, err := hex.DecodeString(r.Data) + if err != nil { + return err + } + + var txMsgData sdktypes.TxMsgData + if err := r.Codec.Unmarshal(data, &txMsgData); err != nil { + return err + } + + // check deprecated Data + if len(txMsgData.Data) != 0 { + resData := txMsgData.Data[0] + return prototypes.UnmarshalAny(&prototypes.Any{ + // TODO get type url dynamically(basically remove `+ "Response"`) after the following issue has solved. + // https://github.com/ignite/cli/issues/2098 + // https://github.com/cosmos/cosmos-sdk/issues/10496 + TypeUrl: resData.MsgType + "Response", + Value: resData.Data, + }, message) + } + + resData := txMsgData.MsgResponses[0] + return prototypes.UnmarshalAny(&prototypes.Any{ + TypeUrl: resData.TypeUrl, + Value: resData.Value, + }, message) +} + +// Status returns the node status. +func (c Client) Status(ctx context.Context) (*ctypes.ResultStatus, error) { + return c.RPC.Status(ctx) +} + +// protects sdktypes.Config. +var mconf sync.Mutex + +func (c Client) lockBech32Prefix() (unlockFn func()) { + mconf.Lock() + config := sdktypes.GetConfig() + config.SetBech32PrefixForAccount(c.bech32Prefix, c.bech32Prefix+"pub") + return mconf.Unlock +} + +func (c Client) BroadcastTx(ctx context.Context, account cosmosaccount.Account, msgs ...sdktypes.Msg) (Response, error) { + txService, err := c.CreateTx(ctx, account, msgs...) + if err != nil { + return Response{}, err + } + + return txService.Broadcast(ctx) +} + +// CreateTxWithOptions creates a transaction with the given options. +// Options override global client options. +func (c Client) CreateTxWithOptions(ctx context.Context, account cosmosaccount.Account, options TxOptions, msgs ...sdktypes.Msg) (TxService, error) { + defer c.lockBech32Prefix()() + + if c.useFaucet && !c.generateOnly { + addr, err := account.Address(c.bech32Prefix) + if err != nil { + return TxService{}, errors.WithStack(err) + } + if err := c.makeSureAccountHasTokens(ctx, addr); err != nil { + return TxService{}, err + } + } + + sdkaddr, err := account.Record.GetAddress() + if err != nil { + return TxService{}, errors.WithStack(err) + } + + clientCtx := c.context. + WithFromName(account.Name). + WithFromAddress(sdkaddr) + + txf, err := c.prepareFactory(clientCtx) + if err != nil { + return TxService{}, err + } + + if options.Memo != "" { + txf = txf.WithMemo(options.Memo) + } + + txf = txf.WithFees(c.fees) + if options.Fees != "" { + txf = txf.WithFees(options.Fees) + } + + if options.GasLimit != 0 { + txf = txf.WithGas(options.GasLimit) + } else { + if c.gasAdjustment != 0 && c.gasAdjustment != defaultGasAdjustment { + txf = txf.WithGasAdjustment(c.gasAdjustment) + } + + var gas uint64 + if c.gas != "" && c.gas != GasAuto { + gas, err = strconv.ParseUint(c.gas, 10, 64) + if err != nil { + return TxService{}, errors.WithStack(err) + } + } else { + _, gas, err = c.gasometer.CalculateGas(clientCtx, txf, msgs...) + if err != nil { + return TxService{}, errors.WithStack(err) + } + // the simulated gas can vary from the actual gas needed for a real transaction + // we add an amount to ensure sufficient gas is provided + gas += 20000 + } + + txf = txf.WithGas(gas) + } + + if c.gasPrices != "" { + txf = txf.WithGasPrices(c.gasPrices) + } + + txUnsigned, err := txf.BuildUnsignedTx(msgs...) + if err != nil { + return TxService{}, errors.WithStack(err) + } + + txUnsigned.SetFeeGranter(clientCtx.FeeGranter) + + return TxService{ + client: c, + clientContext: clientCtx, + txBuilder: txUnsigned, + txFactory: txf, + }, nil +} + +func (c Client) CreateTx(ctx context.Context, account cosmosaccount.Account, msgs ...sdktypes.Msg) (TxService, error) { + return c.CreateTxWithOptions(ctx, account, TxOptions{}, msgs...) +} + +// GetBlockTXs returns the transactions in a block. +// The list of transactions can be empty if there are no transactions in the block +// at the moment this method is called. +// Tendermint might index a limited number of block so trying to fetch transactions +// from a block that is not indexed would return an error. +func (c Client) GetBlockTXs(ctx context.Context, height int64) (txs []TX, err error) { + if height == 0 { + return nil, ErrInvalidBlockHeight + } + + r, err := c.RPC.Block(ctx, &height) + if err != nil { + return nil, errors.Errorf("failed to fetch block %d: %w", height, err) + } + + query := createTxSearchByHeightQuery(height) + + // TODO: improve to fetch pages in parallel (requires fetching page 1 to calculate n. of pages) + page := 1 + perPage := defaultTXsPerPage + blockTime := r.Block.Time + for { + res, err := c.RPC.TxSearch(ctx, query, false, &page, &perPage, orderAsc) + if err != nil { + return nil, err + } + + for _, tx := range res.Txs { + txs = append(txs, TX{ + BlockTime: blockTime, + Raw: tx, + }) + } + + // Stop when the last page is fetched + if res.TotalCount <= (page * perPage) { + break + } + + page++ + } + + return txs, nil +} + +// CollectTXs collects transactions from multiple consecutive blocks. +// Transactions from a single block are send to the channel only if all transactions +// from that block are collected successfully. +// Blocks are traversed sequentially starting from a height until the latest block height +// available at the moment this method is called. +// The channel might contain the transactions collected successfully up until that point +// when an error is returned. +func (c Client) CollectTXs(ctx context.Context, fromHeight int64, tc chan<- []TX) error { + defer close(tc) + + latestHeight, err := c.LatestBlockHeight(ctx) + if err != nil { + return errors.Errorf("failed to fetch latest block height: %w", err) + } + + if fromHeight == 0 { + fromHeight = 1 + } + + for height := fromHeight; height <= latestHeight; height++ { + txs, err := c.GetBlockTXs(ctx, height) + if err != nil { + return err + } + + // Ignore blocks without transactions + if txs == nil { + continue + } + + // Make sure that collection finishes if the context + // is done when the transactions channel is full + select { + case <-ctx.Done(): + return ctx.Err() + case tc <- txs: + } + } + + return nil +} + +// makeSureAccountHasTokens makes sure the address has a positive balance. +// It requests funds from the faucet if the address has an empty balance. +func (c *Client) makeSureAccountHasTokens(ctx context.Context, address string) error { + if err := c.checkAccountBalance(ctx, address); err == nil { + return nil + } + + // request coins from the faucet. + faucetResp, err := c.faucetClient.Transfer(ctx, cosmosfaucet.TransferRequest{AccountAddress: address}) + if err != nil { + return errors.Wrap(errCannotRetrieveFundsFromFaucet, err.Error()) + } + if faucetResp.Error != "" { + return errors.Wrap(errCannotRetrieveFundsFromFaucet, faucetResp.Error) + } + + // make sure funds are retrieved. + ctx, cancel := context.WithTimeout(ctx, FaucetTransferEnsureDuration) + defer cancel() + + return backoff.Retry(func() error { + return c.checkAccountBalance(ctx, address) + }, backoff.WithContext(backoff.NewConstantBackOff(time.Second), ctx)) +} + +func (c *Client) checkAccountBalance(ctx context.Context, address string) error { + resp, err := c.bankQueryClient.Balance(ctx, &banktypes.QueryBalanceRequest{ + Address: address, + Denom: c.faucetDenom, + }) + if err != nil { + return err + } + + if resp.Balance.Amount.Uint64() >= c.faucetMinAmount { + return nil + } + + return errors.Errorf("account has not enough %q balance, min. required amount: %d", c.faucetDenom, c.faucetMinAmount) +} + +// handleBroadcastResult handles the result of broadcast messages result and checks if an error occurred. +func handleBroadcastResult(resp *sdktypes.TxResponse, err error) error { + if err != nil { + if strings.Contains(err.Error(), "not found") { + return errors.New("make sure that your account has enough balance") + } + return err + } + + if resp.Code > 0 { + return errors.Errorf("error code: '%d' msg: '%s'", resp.Code, resp.RawLog) + } + return nil +} + +func (c *Client) prepareFactory(clientCtx client.Context) (tx.Factory, error) { + var ( + from = clientCtx.GetFromAddress() + txf = c.TxFactory + ) + + if err := c.accountRetriever.EnsureExists(clientCtx, from); err != nil { + return txf, errors.WithStack(err) + } + + initNum, initSeq := txf.AccountNumber(), txf.Sequence() + if initNum == 0 || initSeq == 0 { + num, seq, err := c.accountRetriever.GetAccountNumberSequence(clientCtx, from) + if err != nil { + return txf, errors.WithStack(err) + } + + if initNum == 0 { + txf = txf.WithAccountNumber(num) + } + + if initSeq == 0 { + txf = txf.WithSequence(seq) + } + } + + return txf, nil +} + +func (c Client) newContext() client.Context { + var ( + amino = codec.NewLegacyAmino() + interfaceRegistry = codectypes.NewInterfaceRegistry() + marshaler = codec.NewProtoCodec(interfaceRegistry) + txConfig = authtx.NewTxConfig(marshaler, authtx.DefaultSignModes) + ) + + authtypes.RegisterInterfaces(interfaceRegistry) + cryptocodec.RegisterInterfaces(interfaceRegistry) + sdktypes.RegisterInterfaces(interfaceRegistry) + staking.RegisterInterfaces(interfaceRegistry) + cryptocodec.RegisterInterfaces(interfaceRegistry) + banktypes.RegisterInterfaces(interfaceRegistry) + + return client.Context{}. + WithChainID(c.chainID). + WithInterfaceRegistry(interfaceRegistry). + WithCodec(marshaler). + WithTxConfig(txConfig). + WithLegacyAmino(amino). + WithInput(os.Stdin). + WithOutput(c.out). + WithAccountRetriever(c.accountRetriever). + WithBroadcastMode(flags.BroadcastSync). + WithHomeDir(c.homePath). + WithClient(c.RPC). + WithSkipConfirmation(true). + WithKeyring(c.AccountRegistry.Keyring). + WithGenerateOnly(c.generateOnly) +} + +func newFactory(clientCtx client.Context) tx.Factory { + return tx.Factory{}. + WithChainID(clientCtx.ChainID). + WithKeybase(clientCtx.Keyring). + WithGas(defaultGasLimit). + WithGasAdjustment(defaultGasAdjustment). + WithSignMode(signing.SignMode_SIGN_MODE_UNSPECIFIED). + WithAccountRetriever(clientCtx.AccountRetriever). + WithTxConfig(clientCtx.TxConfig) +} + +func createTxSearchByHeightQuery(height int64) string { + params := url.Values{} + params.Set(searchHeight, strconv.FormatInt(height, 10)) + return params.Encode() +} diff --git a/ignite/pkg/cosmosclient/cosmosclient_test.go b/ignite/pkg/cosmosclient/cosmosclient_test.go new file mode 100644 index 0000000..27ed70a --- /dev/null +++ b/ignite/pkg/cosmosclient/cosmosclient_test.go @@ -0,0 +1,1040 @@ +package cosmosclient_test + +import ( + "bufio" + "context" + "encoding/hex" + "fmt" + "io" + "os" + "testing" + "time" + + "cosmossdk.io/math" + "github.com/cometbft/cometbft/p2p" + ctypes "github.com/cometbft/cometbft/rpc/core/types" + tmtypes "github.com/cometbft/cometbft/types" + "github.com/cosmos/cosmos-sdk/client/flags" + sdktypes "github.com/cosmos/cosmos-sdk/types" + "github.com/cosmos/cosmos-sdk/types/tx/signing" + banktypes "github.com/cosmos/cosmos-sdk/x/bank/types" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/cosmosaccount" + "github.com/ignite/cli/v29/ignite/pkg/cosmosclient" + "github.com/ignite/cli/v29/ignite/pkg/cosmosclient/mocks" + "github.com/ignite/cli/v29/ignite/pkg/cosmosclient/testutil" + "github.com/ignite/cli/v29/ignite/pkg/cosmosfaucet" + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +const ( + defaultFaucetDenom = "token" + defaultFaucetMinAmount = 100 +) + +type suite struct { + rpcClient *mocks.RPCClient + accountRetriever *mocks.AccountRetriever + bankQueryClient *mocks.BankQueryClient + gasometer *mocks.Gasometer + faucetClient *mocks.FaucetClient + signer *mocks.Signer +} + +func newClient(t *testing.T, setup func(suite), opts ...cosmosclient.Option) cosmosclient.Client { + t.Helper() + + s := suite{ + rpcClient: mocks.NewRPCClient(t), + accountRetriever: mocks.NewAccountRetriever(t), + bankQueryClient: mocks.NewBankQueryClient(t), + gasometer: mocks.NewGasometer(t), + faucetClient: mocks.NewFaucetClient(t), + signer: mocks.NewSigner(t), + } + // Because rpcClient is passed as argument inside clientContext of mocked + // methods, we must EXPECT a call to String (because testify/mock is calling + // String() on mocked methods' args) + s.rpcClient.EXPECT().String().Return("plop").Maybe() + // cosmosclient.New always makes a call to Status + s.rpcClient.EXPECT().Status(mock.Anything). + Return(&ctypes.ResultStatus{ + NodeInfo: p2p.DefaultNodeInfo{Network: "mychain"}, + }, nil).Once() + if setup != nil { + setup(s) + } + opts = append(opts, []cosmosclient.Option{ + cosmosclient.WithKeyringBackend(cosmosaccount.KeyringMemory), + cosmosclient.WithRPCClient(s.rpcClient), + cosmosclient.WithAccountRetriever(s.accountRetriever), + cosmosclient.WithBankQueryClient(s.bankQueryClient), + cosmosclient.WithGasometer(s.gasometer), + cosmosclient.WithFaucetClient(s.faucetClient), + cosmosclient.WithSigner(s.signer), + }...) + c, err := cosmosclient.New(context.Background(), opts...) + require.NoError(t, err) + return c +} + +func TestNew(t *testing.T) { + c := newClient(t, nil) + + ctx := c.Context() + require.Equal(t, "mychain", ctx.ChainID) + require.NotNil(t, ctx.InterfaceRegistry) + require.NotNil(t, ctx.Codec) + require.NotNil(t, ctx.TxConfig) + require.NotNil(t, ctx.LegacyAmino) + require.Equal(t, bufio.NewReader(os.Stdin), ctx.Input) + require.Equal(t, io.Discard, ctx.Output) + require.NotNil(t, ctx.AccountRetriever) + require.Equal(t, flags.BroadcastSync, ctx.BroadcastMode) + home, err := os.UserHomeDir() + require.NoError(t, err) + require.Equal(t, home+"/.mychain", ctx.HomeDir) + require.NotNil(t, ctx.Client) + require.True(t, ctx.SkipConfirm) + require.Equal(t, c.AccountRegistry.Keyring, ctx.Keyring) + require.False(t, ctx.GenerateOnly) + txf := c.TxFactory + require.Equal(t, "mychain", txf.ChainID()) + require.Equal(t, c.AccountRegistry.Keyring, txf.Keybase()) + require.EqualValues(t, 300000, txf.Gas()) + require.Equal(t, 1.0, txf.GasAdjustment()) + require.Equal(t, signing.SignMode_SIGN_MODE_UNSPECIFIED, txf.SignMode()) + require.NotNil(t, txf.AccountRetriever()) +} + +func TestClientWaitForBlockHeight(t *testing.T) { + targetBlockHeight := int64(42) + tests := []struct { + name string + timeout time.Duration + expectedError string + setup func(suite) + }{ + { + name: "ok: no wait", + setup: func(s suite) { + s.rpcClient.EXPECT().Status(mock.Anything).Return(&ctypes.ResultStatus{ + SyncInfo: ctypes.SyncInfo{LatestBlockHeight: targetBlockHeight}, + }, nil) + }, + }, + { + name: "ok: wait 1 time", + timeout: time.Second * 2, // must exceed the wait loop duration + setup: func(s suite) { + s.rpcClient.EXPECT().Status(mock.Anything).Return(&ctypes.ResultStatus{ + SyncInfo: ctypes.SyncInfo{LatestBlockHeight: targetBlockHeight - 1}, + }, nil).Once() + s.rpcClient.EXPECT().Status(mock.Anything).Return(&ctypes.ResultStatus{ + SyncInfo: ctypes.SyncInfo{LatestBlockHeight: targetBlockHeight}, + }, nil).Once() + }, + }, + { + name: "fail: wait expired", + timeout: time.Millisecond, + expectedError: "timeout exceeded waiting for block: context deadline exceeded", + setup: func(s suite) { + s.rpcClient.EXPECT().Status(mock.Anything).Return(&ctypes.ResultStatus{ + SyncInfo: ctypes.SyncInfo{LatestBlockHeight: targetBlockHeight - 1}, + }, nil) + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + c := newClient(t, tt.setup) + ctx, cancel := context.WithTimeout(context.Background(), tt.timeout) + defer cancel() + + err := c.WaitForBlockHeight(ctx, targetBlockHeight) + + if tt.expectedError != "" { + require.EqualError(t, err, tt.expectedError) + return + } + require.NoError(t, err) + }) + } +} + +func TestClientWaitForTx(t *testing.T) { + var ( + ctx = context.Background() + hash = "abcd" + hashBytes, _ = hex.DecodeString(hash) + result = &ctypes.ResultTx{ + Hash: hashBytes, + } + ) + tests := []struct { + name string + hash string + expectedError string + expectedResult *ctypes.ResultTx + setup func(suite) + }{ + { + name: "fail: hash not in hex format", + hash: "zzz", + expectedError: "unable to decode tx hash 'zzz': encoding/hex: invalid byte: U+007A 'z'", + }, + { + name: "ok: tx found immediately", + hash: hash, + expectedResult: result, + setup: func(s suite) { + s.rpcClient.EXPECT().Tx(ctx, hashBytes, false).Return(result, nil) + }, + }, + { + name: "fail: tx returns an unexpected error", + hash: hash, + expectedError: "fetching tx 'abcd': error while requesting node 'http://localhost:26657': oups", + setup: func(s suite) { + s.rpcClient.EXPECT().Tx(ctx, hashBytes, false).Return(nil, errors.New("oups")) + }, + }, + { + name: "ok: tx found after 1 block", + hash: hash, + expectedResult: result, + setup: func(s suite) { + // tx is not found + s.rpcClient.EXPECT().Tx(ctx, hashBytes, false).Return(nil, errors.New("tx abcd not found")).Once() + // wait for next block + s.rpcClient.EXPECT().Status(ctx).Return(&ctypes.ResultStatus{ + SyncInfo: ctypes.SyncInfo{LatestBlockHeight: 1}, + }, nil).Once() + s.rpcClient.EXPECT().Status(ctx).Return(&ctypes.ResultStatus{ + SyncInfo: ctypes.SyncInfo{LatestBlockHeight: 2}, + }, nil).Once() + // next block reached, check tx again, this time it's found. + s.rpcClient.EXPECT().Tx(ctx, hashBytes, false).Return(result, nil).Once() + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + c := newClient(t, tt.setup) + + res, err := c.WaitForTx(ctx, tt.hash) + + if tt.expectedError != "" { + require.EqualError(t, err, tt.expectedError) + return + } + require.NoError(t, err) + require.Equal(t, tt.expectedResult, res) + }) + } +} + +func TestClientAccount(t *testing.T) { + var ( + accountName = "bob" + passphrase = "passphrase" + ) + r, err := cosmosaccount.NewInMemory() + require.NoError(t, err) + expectedAccount, _, err := r.Create(accountName) + require.NoError(t, err) + expectedAddr, err := expectedAccount.Address("cosmos") + require.NoError(t, err) + // Export created account to we can import it in the Client below. + key, err := r.Export(accountName, passphrase) + require.NoError(t, err) + + tests := []struct { + name string + addressOrName string + expectedError string + }{ + { + name: "ok: find by name", + addressOrName: expectedAccount.Name, + }, + { + name: "ok: find by address", + addressOrName: expectedAddr, + }, + { + name: "fail: name not found", + addressOrName: "unknown", + expectedError: "decoding bech32 failed: invalid bech32 string length 7", + }, + { + name: "fail: address not found", + addressOrName: "cosmos1cs4hpwrpna6ucsgsa78jfp403l7gdynukrxkrv", + expectedError: `account "cosmos1cs4hpwrpna6ucsgsa78jfp403l7gdynukrxkrv" does not exist`, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + c := newClient(t, nil) + _, err := c.AccountRegistry.Import(accountName, key, passphrase) + require.NoError(t, err) + + account, err := c.Account(tt.addressOrName) + + if tt.expectedError != "" { + require.EqualError(t, err, tt.expectedError) + return + } + require.NoError(t, err) + require.Equal(t, expectedAccount, account) + }) + } +} + +func TestClientAddress(t *testing.T) { + var ( + accountName = "bob" + passphrase = "passphrase" + ) + r, err := cosmosaccount.NewInMemory() + require.NoError(t, err) + expectedAccount, _, err := r.Create(accountName) + require.NoError(t, err) + // Export created account to we can import it in the Client below. + key, err := r.Export(accountName, passphrase) + require.NoError(t, err) + + tests := []struct { + name string + accountName string + opts []cosmosclient.Option + expectedError string + expectedPrefix string + }{ + { + name: "ok: name exists", + accountName: expectedAccount.Name, + expectedPrefix: "cosmos", + }, + { + name: "ok: name exists with different prefix", + opts: []cosmosclient.Option{ + cosmosclient.WithAddressPrefix("test"), + }, + accountName: expectedAccount.Name, + expectedPrefix: "test", + }, + { + name: "fail: name not found", + accountName: "unknown", + expectedError: `account "unknown" does not exist`, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + c := newClient(t, nil, tt.opts...) + _, err := c.AccountRegistry.Import(accountName, key, passphrase) + require.NoError(t, err) + + address, err := c.Address(tt.accountName) + + if tt.expectedError != "" { + require.EqualError(t, err, tt.expectedError) + return + } + require.NoError(t, err) + expectedAddr, err := expectedAccount.Address(tt.expectedPrefix) + require.NoError(t, err) + require.Equal(t, expectedAddr, address) + }) + } +} + +func TestClientStatus(t *testing.T) { + var ( + ctx = context.Background() + expectedStatus = &ctypes.ResultStatus{ + NodeInfo: p2p.DefaultNodeInfo{Network: "mychain"}, + } + ) + tests := []struct { + name string + expectedError string + setup func(suite) + }{ + { + name: "ok", + setup: func(s suite) { + s.rpcClient.EXPECT().Status(ctx).Return(expectedStatus, nil).Once() + }, + }, + { + name: "fail", + expectedError: "error while requesting node 'http://localhost:26657': oups", + setup: func(s suite) { + s.rpcClient.EXPECT().Status(ctx).Return(expectedStatus, errors.New("oups")).Once() + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + c := newClient(t, tt.setup) + + status, err := c.Status(ctx) + + if tt.expectedError != "" { + require.EqualError(t, err, tt.expectedError) + return + } + require.NoError(t, err) + assert.Equal(t, expectedStatus, status) + }) + } +} + +func TestClientCreateTx(t *testing.T) { + var ( + ctx = context.Background() + accountName = "bob" + passphrase = "passphrase" + ) + r, err := cosmosaccount.NewInMemory() + require.NoError(t, err) + a, _, err := r.Create(accountName) + require.NoError(t, err) + // Export created account to we can import it in the Client below. + key, err := r.Export(accountName, passphrase) + require.NoError(t, err) + sdkaddr, err := a.Record.GetAddress() + require.NoError(t, err) + + tests := []struct { + name string + opts []cosmosclient.Option + msg sdktypes.Msg + expectedJSONTx string + expectedError string + setup func(s suite) + }{ + { + name: "fail: account doesn't exist", + expectedError: "nope", + setup: func(s suite) { + s.accountRetriever.EXPECT(). + EnsureExists(mock.Anything, sdkaddr).Return(errors.New("nope")) + }, + }, + { + name: "ok: with default values", + msg: &banktypes.MsgSend{ + FromAddress: "from", + ToAddress: "to", + Amount: sdktypes.NewCoins( + sdktypes.NewCoin("token", math.NewIntFromUint64(1)), + ), + }, + expectedJSONTx: `{"body":{"messages":[{"@type":"/cosmos.bank.v1beta1.MsgSend","from_address":"from","to_address":"to","amount":[{"denom":"token","amount":"1"}]}],"memo":"","timeout_height":"0","timeout_timestamp":null,"unordered":false,"extension_options":[],"non_critical_extension_options":[]},"auth_info":{"signer_infos":[],"fee":{"amount":[],"gas_limit":"300000","payer":"","granter":""},"tip":null},"signatures":[]}`, + setup: func(s suite) { + s.expectPrepareFactory(sdkaddr) + }, + }, + { + name: "ok: with faucet enabled, account balance is high enough", + opts: []cosmosclient.Option{ + cosmosclient.WithUseFaucet("localhost:1234", "", 0), + }, + msg: &banktypes.MsgSend{ + FromAddress: "from", + ToAddress: "to", + Amount: sdktypes.NewCoins( + sdktypes.NewCoin("token", math.NewIntFromUint64(1)), + ), + }, + expectedJSONTx: `{"body":{"messages":[{"@type":"/cosmos.bank.v1beta1.MsgSend","from_address":"from","to_address":"to","amount":[{"denom":"token","amount":"1"}]}],"memo":"","timeout_height":"0","timeout_timestamp":null,"unordered":false,"extension_options":[],"non_critical_extension_options":[]},"auth_info":{"signer_infos":[],"fee":{"amount":[],"gas_limit":"300000","payer":"","granter":""},"tip":null},"signatures":[]}`, + setup: func(s suite) { + s.expectMakeSureAccountHasToken(sdkaddr.String(), defaultFaucetMinAmount) + + s.expectPrepareFactory(sdkaddr) + }, + }, + { + name: "ok: with faucet enabled, account balance is too low", + opts: []cosmosclient.Option{ + cosmosclient.WithUseFaucet("localhost:1234", "", 0), + }, + msg: &banktypes.MsgSend{ + FromAddress: "from", + ToAddress: "to", + Amount: sdktypes.NewCoins( + sdktypes.NewCoin("token", math.NewIntFromUint64(1)), + ), + }, + expectedJSONTx: `{"body":{"messages":[{"@type":"/cosmos.bank.v1beta1.MsgSend","from_address":"from","to_address":"to","amount":[{"denom":"token","amount":"1"}]}],"memo":"","timeout_height":"0","timeout_timestamp":null,"unordered":false,"extension_options":[],"non_critical_extension_options":[]},"auth_info":{"signer_infos":[],"fee":{"amount":[],"gas_limit":"300000","payer":"","granter":""},"tip":null},"signatures":[]}`, + setup: func(s suite) { + s.expectMakeSureAccountHasToken(sdkaddr.String(), defaultFaucetMinAmount-1) + s.expectPrepareFactory(sdkaddr) + }, + }, + { + name: "ok: with fees", + opts: []cosmosclient.Option{ + cosmosclient.WithFees("10token"), + }, + msg: &banktypes.MsgSend{ + FromAddress: "from", + ToAddress: "to", + Amount: sdktypes.NewCoins( + sdktypes.NewCoin("token", math.NewIntFromUint64(1)), + ), + }, + expectedJSONTx: `{"body":{"messages":[{"@type":"/cosmos.bank.v1beta1.MsgSend","from_address":"from","to_address":"to","amount":[{"denom":"token","amount":"1"}]}],"memo":"","timeout_height":"0","timeout_timestamp":null,"unordered":false,"extension_options":[],"non_critical_extension_options":[]},"auth_info":{"signer_infos":[],"fee":{"amount":[{"denom":"token","amount":"10"}],"gas_limit":"300000","payer":"","granter":""},"tip":null},"signatures":[]}`, + setup: func(s suite) { + s.expectPrepareFactory(sdkaddr) + }, + }, + { + name: "ok: with gas price", + opts: []cosmosclient.Option{ + // Should set fees to 3*defaultGasLimit + cosmosclient.WithGasPrices("3token"), + }, + msg: &banktypes.MsgSend{ + FromAddress: "from", + ToAddress: "to", + Amount: sdktypes.NewCoins( + sdktypes.NewCoin("token", math.NewIntFromUint64(1)), + ), + }, + expectedJSONTx: `{"body":{"messages":[{"@type":"/cosmos.bank.v1beta1.MsgSend","from_address":"from","to_address":"to","amount":[{"denom":"token","amount":"1"}]}],"memo":"","timeout_height":"0","timeout_timestamp":null,"unordered":false,"extension_options":[],"non_critical_extension_options":[]},"auth_info":{"signer_infos":[],"fee":{"amount":[{"denom":"token","amount":"900000"}],"gas_limit":"300000","payer":"","granter":""},"tip":null},"signatures":[]}`, + setup: func(s suite) { + s.expectPrepareFactory(sdkaddr) + }, + }, + { + name: "fail: with fees, gas prices and gas adjustment", + opts: []cosmosclient.Option{ + cosmosclient.WithFees("10token"), + cosmosclient.WithGasPrices("3token"), + cosmosclient.WithGasAdjustment(2.1), + }, + msg: &banktypes.MsgSend{ + FromAddress: "from", + ToAddress: "to", + Amount: sdktypes.NewCoins( + sdktypes.NewCoin("token", math.NewIntFromUint64(1)), + ), + }, + expectedError: "cannot provide both fees and gas prices", + setup: func(s suite) { + s.expectPrepareFactory(sdkaddr) + }, + }, + { + name: "ok: without empty gas limit", + opts: []cosmosclient.Option{ + cosmosclient.WithGas(""), + }, + msg: &banktypes.MsgSend{ + FromAddress: "from", + ToAddress: "to", + Amount: sdktypes.NewCoins( + sdktypes.NewCoin("token", math.NewIntFromUint64(1)), + ), + }, + expectedJSONTx: `{"body":{"messages":[{"@type":"/cosmos.bank.v1beta1.MsgSend","from_address":"from","to_address":"to","amount":[{"denom":"token","amount":"1"}]}],"memo":"","timeout_height":"0","timeout_timestamp":null,"unordered":false,"extension_options":[],"non_critical_extension_options":[]},"auth_info":{"signer_infos":[],"fee":{"amount":[],"gas_limit":"20042","payer":"","granter":""},"tip":null},"signatures":[]}`, + setup: func(s suite) { + s.expectPrepareFactory(sdkaddr) + s.gasometer.EXPECT(). + CalculateGas(mock.Anything, mock.Anything, mock.Anything). + Return(nil, 42, nil) + }, + }, + { + name: "ok: without auto gas limit", + opts: []cosmosclient.Option{ + cosmosclient.WithGas("auto"), + }, + msg: &banktypes.MsgSend{ + FromAddress: "from", + ToAddress: "to", + Amount: sdktypes.NewCoins( + sdktypes.NewCoin("token", math.NewIntFromUint64(1)), + ), + }, + expectedJSONTx: `{"body":{"messages":[{"@type":"/cosmos.bank.v1beta1.MsgSend","from_address":"from","to_address":"to","amount":[{"denom":"token","amount":"1"}]}],"memo":"","timeout_height":"0","timeout_timestamp":null,"unordered":false,"extension_options":[],"non_critical_extension_options":[]},"auth_info":{"signer_infos":[],"fee":{"amount":[],"gas_limit":"20042","payer":"","granter":""},"tip":null},"signatures":[]}`, + setup: func(s suite) { + s.expectPrepareFactory(sdkaddr) + s.gasometer.EXPECT(). + CalculateGas(mock.Anything, mock.Anything, mock.Anything). + Return(nil, 42, nil) + }, + }, + { + name: "ok: with gas adjustment", + opts: []cosmosclient.Option{ + cosmosclient.WithGasAdjustment(2.4), + }, + msg: &banktypes.MsgSend{ + FromAddress: "from", + ToAddress: "to", + Amount: sdktypes.NewCoins( + sdktypes.NewCoin("token", math.NewIntFromUint64(1)), + ), + }, + expectedJSONTx: `{"body":{"messages":[{"@type":"/cosmos.bank.v1beta1.MsgSend","from_address":"from","to_address":"to","amount":[{"denom":"token","amount":"1"}]}],"memo":"","timeout_height":"0","timeout_timestamp":null,"unordered":false,"extension_options":[],"non_critical_extension_options":[]},"auth_info":{"signer_infos":[],"fee":{"amount":[],"gas_limit":"300000","payer":"","granter":""},"tip":null},"signatures":[]}`, + setup: func(s suite) { + s.expectPrepareFactory(sdkaddr) + }, + }, + { + name: "ok: without gas price and zero gas adjustment", + opts: []cosmosclient.Option{ + cosmosclient.WithGas("auto"), + cosmosclient.WithGasAdjustment(0), + }, + msg: &banktypes.MsgSend{ + FromAddress: "from", + ToAddress: "to", + Amount: sdktypes.NewCoins( + sdktypes.NewCoin("token", math.NewIntFromUint64(1)), + ), + }, + expectedJSONTx: `{"body":{"messages":[{"@type":"/cosmos.bank.v1beta1.MsgSend","from_address":"from","to_address":"to","amount":[{"denom":"token","amount":"1"}]}],"memo":"","timeout_height":"0","timeout_timestamp":null,"unordered":false,"extension_options":[],"non_critical_extension_options":[]},"auth_info":{"signer_infos":[],"fee":{"amount":[],"gas_limit":"20042","payer":"","granter":""},"tip":null},"signatures":[]} +`, + setup: func(s suite) { + s.expectPrepareFactory(sdkaddr) + s.gasometer.EXPECT(). + CalculateGas(mock.Anything, mock.Anything, mock.Anything). + Return(nil, 42, nil) + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + c := newClient(t, tt.setup, tt.opts...) + account, err := c.AccountRegistry.Import(accountName, key, passphrase) + require.NoError(t, err) + + txs, err := c.CreateTx(ctx, account, tt.msg) + + if tt.expectedError != "" { + require.EqualError(t, err, tt.expectedError) + return + } + require.NoError(t, err) + assert.NotNil(t, txs) + bz, err := txs.EncodeJSON() + require.NoError(t, err) + require.JSONEq(t, tt.expectedJSONTx, string(bz)) + }) + } +} + +func TestGetBlockTXs(t *testing.T) { + m := testutil.NewTendermintClientMock(t) + ctx := context.Background() + + // Mock the Block RPC endpoint + block := createTestBlock(1) + + m.On("Block", ctx, &block.Height).Return(&ctypes.ResultBlock{Block: &block}, nil) + + // Mock the TxSearch RPC endpoint + searchQry := fmt.Sprintf("tx.height=%d", block.Height) + page := 1 + perPage := 30 + rtx := ctypes.ResultTx{} + resSearch := ctypes.ResultTxSearch{ + Txs: []*ctypes.ResultTx{&rtx}, + TotalCount: 1, + } + + m.On("TxSearch", ctx, searchQry, false, &page, &perPage, "asc").Return(&resSearch, nil) + + // Create a cosmos client that uses the RPC mock + client := cosmosclient.Client{RPC: m} + + txs, err := client.GetBlockTXs(ctx, block.Height) + + // Assert + require.NoError(t, err) + require.Equal(t, txs, []cosmosclient.TX{ + { + BlockTime: block.Time, + Raw: &rtx, + }, + }) + + m.AssertNumberOfCalls(t, "Block", 1) + m.AssertNumberOfCalls(t, "TxSearch", 1) +} + +func TestGetBlockTXsWithBlockError(t *testing.T) { + m := testutil.NewTendermintClientMock(t) + + wantErr := errors.New("expected error") + + // Mock the Block RPC endpoint to return an error + m.OnBlock().Return(nil, wantErr) + + // Create a cosmos client that uses the RPC mock + client := cosmosclient.Client{RPC: m} + + txs, err := client.GetBlockTXs(context.Background(), 1) + + // Assert + require.ErrorIs(t, err, wantErr) + require.Nil(t, txs) + + m.AssertNumberOfCalls(t, "Block", 1) + m.AssertNumberOfCalls(t, "TxSearch", 0) +} + +func TestGetBlockTXsPagination(t *testing.T) { + m := testutil.NewTendermintClientMock(t) + + // Mock the Block RPC endpoint + block := createTestBlock(1) + + m.OnBlock().Return(&ctypes.ResultBlock{Block: &block}, nil) + + // Mock the TxSearch RPC endpoint and fake the number of + // transactions, so it is called twice to fetch two pages + ctx := context.Background() + searchQry := fmt.Sprintf("tx.height=%d", block.Height) + perPage := 30 + fakeCount := perPage + 1 + first := 1 + second := 2 + firstPage := ctypes.ResultTxSearch{ + Txs: []*ctypes.ResultTx{{}}, + TotalCount: fakeCount, + } + secondPage := ctypes.ResultTxSearch{ + Txs: []*ctypes.ResultTx{{}}, + TotalCount: fakeCount, + } + + m.On("TxSearch", ctx, searchQry, false, &first, &perPage, "asc").Return(&firstPage, nil) + m.On("TxSearch", ctx, searchQry, false, &second, &perPage, "asc").Return(&secondPage, nil) + + // Create a cosmos client that uses the RPC mock + client := cosmosclient.Client{RPC: m} + + txs, err := client.GetBlockTXs(ctx, block.Height) + + // Assert + require.NoError(t, err) + require.Equal(t, txs, []cosmosclient.TX{ + { + BlockTime: block.Time, + Raw: firstPage.Txs[0], + }, + { + BlockTime: block.Time, + Raw: secondPage.Txs[0], + }, + }) + + m.AssertNumberOfCalls(t, "Block", 1) + m.AssertNumberOfCalls(t, "TxSearch", 2) +} + +func TestGetBlockTXsWithSearchError(t *testing.T) { + m := testutil.NewTendermintClientMock(t) + + wantErr := errors.New("expected error") + + // Mock the Block RPC endpoint + block := createTestBlock(1) + + m.OnBlock().Return(&ctypes.ResultBlock{Block: &block}, nil) + + // Mock the TxSearch RPC endpoint to return an error + m.OnTxSearch().Return(nil, wantErr) + + // Create a cosmos client that uses the RPC mock + client := cosmosclient.Client{RPC: m} + + txs, err := client.GetBlockTXs(context.Background(), block.Height) + + // Assert + require.ErrorIs(t, err, wantErr) + require.Nil(t, txs) + + m.AssertNumberOfCalls(t, "Block", 1) + m.AssertNumberOfCalls(t, "TxSearch", 1) +} + +func TestCollectTXs(t *testing.T) { + m := testutil.NewTendermintClientMock(t) + ctx := context.Background() + + // Mock the Status RPC endpoint to report that only two blocks exists + status := ctypes.ResultStatus{ + SyncInfo: ctypes.SyncInfo{ + LatestBlockHeight: 2, + }, + } + + m.On("Status", ctx).Return(&status, nil) + + // Mock the Block RPC endpoint to return two blocks + b1 := createTestBlock(1) + b2 := createTestBlock(2) + + m.On("Block", ctx, &b1.Height).Return(&ctypes.ResultBlock{Block: &b1}, nil) + m.On("Block", ctx, &b2.Height).Return(&ctypes.ResultBlock{Block: &b2}, nil) + + // Mock the TxSearch RPC endpoint to return each of the two block. + // Transactions are empty because only the pointer address is required to assert. + page := 1 + perPage := 30 + q1 := "tx.height=1" + r1 := ctypes.ResultTxSearch{ + Txs: []*ctypes.ResultTx{{}}, + TotalCount: 1, + } + q2 := "tx.height=2" + r2 := ctypes.ResultTxSearch{ + Txs: []*ctypes.ResultTx{{}, {}}, + TotalCount: 2, + } + + m.On("TxSearch", ctx, q1, false, &page, &perPage, "asc").Return(&r1, nil) + m.On("TxSearch", ctx, q2, false, &page, &perPage, "asc").Return(&r2, nil) + + // Prepare expected values + wantTXs := []cosmosclient.TX{ + { + BlockTime: b1.Time, + Raw: r1.Txs[0], + }, + { + BlockTime: b2.Time, + Raw: r2.Txs[0], + }, + { + BlockTime: b2.Time, + Raw: r2.Txs[1], + }, + } + + // Create a cosmos client that uses the RPC mock + client := cosmosclient.Client{RPC: m} + + // Create a channel to receive the transactions from the two blocks. + // The channel must be closed after the call to collect. + tc := make(chan []cosmosclient.TX) + + // Collect all transactions + var ( + txs []cosmosclient.TX + open bool + ) + + finished := make(chan struct{}) + go func() { + defer close(finished) + + for t := range tc { + txs = append(txs, t...) + } + }() + + err := client.CollectTXs(ctx, 1, tc) + + select { + case <-time.After(time.Second): + t.Fatal("expected CollectTXs to finish sooner") + case <-finished: + } + + select { + case _, open = <-tc: + default: + } + + // Assert + require.NoError(t, err) + require.Equal(t, wantTXs, txs) + require.False(t, open, "expected transaction channel to be closed") +} + +func TestCollectTXsWithStatusError(t *testing.T) { + m := testutil.NewTendermintClientMock(t) + + wantErr := errors.New("expected error") + + // Mock the Status RPC endpoint to return an error + m.OnStatus().Return(nil, wantErr) + + // Create a cosmos client that uses the RPC mock + client := cosmosclient.Client{RPC: m} + + // Create a channel to receive the transactions from the two blocks. + // The channel must be closed after the call to collect. + tc := make(chan []cosmosclient.TX) + + open := false + ctx := context.Background() + err := client.CollectTXs(ctx, 1, tc) + + select { + case _, open = <-tc: + default: + } + + // Assert + require.ErrorIs(t, err, wantErr) + require.False(t, open, "expected transaction channel to be closed") +} + +func TestCollectTXsWithBlockError(t *testing.T) { + m := testutil.NewTendermintClientMock(t) + + wantErr := errors.New("expected error") + + // Mock the Status RPC endpoint + status := ctypes.ResultStatus{ + SyncInfo: ctypes.SyncInfo{ + LatestBlockHeight: 1, + }, + } + + m.OnStatus().Return(&status, nil) + + // Mock the Block RPC endpoint to return an error + m.OnBlock().Return(nil, wantErr) + + // Create a cosmos client that uses the RPC mock + client := cosmosclient.Client{RPC: m} + + // Create a channel to receive the transactions from the two blocks. + // The channel must be closed after the call to collect. + tc := make(chan []cosmosclient.TX) + + open := false + ctx := context.Background() + err := client.CollectTXs(ctx, 1, tc) + + select { + case _, open = <-tc: + default: + } + + // Assert + require.ErrorIs(t, err, wantErr) + require.False(t, open, "expected transaction channel to be closed") +} + +func TestCollectTXsWithContextDone(t *testing.T) { + m := testutil.NewTendermintClientMock(t) + + // Mock the Status RPC endpoint + status := ctypes.ResultStatus{ + SyncInfo: ctypes.SyncInfo{ + LatestBlockHeight: 1, + }, + } + + m.OnStatus().Return(&status, nil) + + // Mock the Block RPC endpoint + block := createTestBlock(1) + + m.OnBlock().Return(&ctypes.ResultBlock{Block: &block}, nil) + + // Mock the TxSearch RPC endpoint + rs := ctypes.ResultTxSearch{ + Txs: []*ctypes.ResultTx{{}}, + TotalCount: 1, + } + + m.OnTxSearch().Return(&rs, nil) + + // Create a cosmos client that uses the RPC mock + client := cosmosclient.Client{RPC: m} + + // Create a channel to receive the transactions from the two blocks. + // The channel must be closed after the call to collect. + tc := make(chan []cosmosclient.TX) + + // Create a context and cancel it so the collect call finishes because the context is done + ctx, cancel := context.WithCancel(context.Background()) + cancel() + + open := false + err := client.CollectTXs(ctx, 1, tc) + + select { + case _, open = <-tc: + default: + } + + // Assert + require.ErrorIs(t, err, ctx.Err()) + require.False(t, open, "expected transaction channel to be closed") +} + +func (s suite) expectMakeSureAccountHasToken(address string, balance int64) { + currentBalance := sdktypes.NewInt64Coin(defaultFaucetDenom, balance) + s.bankQueryClient.EXPECT().Balance( + context.Background(), + &banktypes.QueryBalanceRequest{ + Address: address, + Denom: defaultFaucetDenom, + }, + ).Return( + &banktypes.QueryBalanceResponse{ + Balance: ¤tBalance, + }, + nil, + ).Once() + if balance >= defaultFaucetMinAmount { + // balance is high enough, faucet won't be called + return + } + + s.faucetClient.EXPECT().Transfer(context.Background(), + cosmosfaucet.TransferRequest{AccountAddress: address}, + ).Return( + cosmosfaucet.TransferResponse{}, nil, + ) + + newBalance := sdktypes.NewInt64Coin(defaultFaucetDenom, defaultFaucetMinAmount) + s.bankQueryClient.EXPECT().Balance( + mock.Anything, + &banktypes.QueryBalanceRequest{ + Address: address, + Denom: defaultFaucetDenom, + }, + ).Return( + &banktypes.QueryBalanceResponse{ + Balance: &newBalance, + }, + nil, + ).Once() +} + +func (s suite) expectPrepareFactory(sdkaddr sdktypes.Address) { + s.accountRetriever.EXPECT(). + EnsureExists(mock.Anything, sdkaddr). + Return(nil) + s.accountRetriever.EXPECT(). + GetAccountNumberSequence(mock.Anything, sdkaddr). + Return(1, 2, nil) +} + +func createTestBlock(height int64) tmtypes.Block { + return tmtypes.Block{ + Header: tmtypes.Header{ + Height: height, + Time: time.Now(), + }, + } +} diff --git a/ignite/pkg/cosmosclient/gasometer.go b/ignite/pkg/cosmosclient/gasometer.go new file mode 100644 index 0000000..9210641 --- /dev/null +++ b/ignite/pkg/cosmosclient/gasometer.go @@ -0,0 +1,16 @@ +package cosmosclient + +import ( + gogogrpc "github.com/cosmos/gogoproto/grpc" + + "github.com/cosmos/cosmos-sdk/client/tx" + sdktypes "github.com/cosmos/cosmos-sdk/types" + txtypes "github.com/cosmos/cosmos-sdk/types/tx" +) + +// gasometer implements the Gasometer interface. +type gasometer struct{} + +func (gasometer) CalculateGas(clientCtx gogogrpc.ClientConn, txf tx.Factory, msgs ...sdktypes.Msg) (*txtypes.SimulateResponse, uint64, error) { + return tx.CalculateGas(clientCtx, txf, msgs...) +} diff --git a/ignite/pkg/cosmosclient/mocks/account_retriever.go b/ignite/pkg/cosmosclient/mocks/account_retriever.go new file mode 100644 index 0000000..e80eeee --- /dev/null +++ b/ignite/pkg/cosmosclient/mocks/account_retriever.go @@ -0,0 +1,273 @@ +// Code generated by mockery v2.53.3. DO NOT EDIT. + +package mocks + +import ( + client "github.com/cosmos/cosmos-sdk/client" + mock "github.com/stretchr/testify/mock" + + types "github.com/cosmos/cosmos-sdk/types" +) + +// AccountRetriever is an autogenerated mock type for the AccountRetriever type +type AccountRetriever struct { + mock.Mock +} + +type AccountRetriever_Expecter struct { + mock *mock.Mock +} + +func (_m *AccountRetriever) EXPECT() *AccountRetriever_Expecter { + return &AccountRetriever_Expecter{mock: &_m.Mock} +} + +// EnsureExists provides a mock function with given fields: clientCtx, addr +func (_m *AccountRetriever) EnsureExists(clientCtx client.Context, addr types.AccAddress) error { + ret := _m.Called(clientCtx, addr) + + if len(ret) == 0 { + panic("no return value specified for EnsureExists") + } + + var r0 error + if rf, ok := ret.Get(0).(func(client.Context, types.AccAddress) error); ok { + r0 = rf(clientCtx, addr) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// AccountRetriever_EnsureExists_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'EnsureExists' +type AccountRetriever_EnsureExists_Call struct { + *mock.Call +} + +// EnsureExists is a helper method to define mock.On call +// - clientCtx client.Context +// - addr types.AccAddress +func (_e *AccountRetriever_Expecter) EnsureExists(clientCtx interface{}, addr interface{}) *AccountRetriever_EnsureExists_Call { + return &AccountRetriever_EnsureExists_Call{Call: _e.mock.On("EnsureExists", clientCtx, addr)} +} + +func (_c *AccountRetriever_EnsureExists_Call) Run(run func(clientCtx client.Context, addr types.AccAddress)) *AccountRetriever_EnsureExists_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(client.Context), args[1].(types.AccAddress)) + }) + return _c +} + +func (_c *AccountRetriever_EnsureExists_Call) Return(_a0 error) *AccountRetriever_EnsureExists_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *AccountRetriever_EnsureExists_Call) RunAndReturn(run func(client.Context, types.AccAddress) error) *AccountRetriever_EnsureExists_Call { + _c.Call.Return(run) + return _c +} + +// GetAccount provides a mock function with given fields: clientCtx, addr +func (_m *AccountRetriever) GetAccount(clientCtx client.Context, addr types.AccAddress) (client.Account, error) { + ret := _m.Called(clientCtx, addr) + + if len(ret) == 0 { + panic("no return value specified for GetAccount") + } + + var r0 client.Account + var r1 error + if rf, ok := ret.Get(0).(func(client.Context, types.AccAddress) (client.Account, error)); ok { + return rf(clientCtx, addr) + } + if rf, ok := ret.Get(0).(func(client.Context, types.AccAddress) client.Account); ok { + r0 = rf(clientCtx, addr) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(client.Account) + } + } + + if rf, ok := ret.Get(1).(func(client.Context, types.AccAddress) error); ok { + r1 = rf(clientCtx, addr) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// AccountRetriever_GetAccount_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetAccount' +type AccountRetriever_GetAccount_Call struct { + *mock.Call +} + +// GetAccount is a helper method to define mock.On call +// - clientCtx client.Context +// - addr types.AccAddress +func (_e *AccountRetriever_Expecter) GetAccount(clientCtx interface{}, addr interface{}) *AccountRetriever_GetAccount_Call { + return &AccountRetriever_GetAccount_Call{Call: _e.mock.On("GetAccount", clientCtx, addr)} +} + +func (_c *AccountRetriever_GetAccount_Call) Run(run func(clientCtx client.Context, addr types.AccAddress)) *AccountRetriever_GetAccount_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(client.Context), args[1].(types.AccAddress)) + }) + return _c +} + +func (_c *AccountRetriever_GetAccount_Call) Return(_a0 client.Account, _a1 error) *AccountRetriever_GetAccount_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *AccountRetriever_GetAccount_Call) RunAndReturn(run func(client.Context, types.AccAddress) (client.Account, error)) *AccountRetriever_GetAccount_Call { + _c.Call.Return(run) + return _c +} + +// GetAccountNumberSequence provides a mock function with given fields: clientCtx, addr +func (_m *AccountRetriever) GetAccountNumberSequence(clientCtx client.Context, addr types.AccAddress) (uint64, uint64, error) { + ret := _m.Called(clientCtx, addr) + + if len(ret) == 0 { + panic("no return value specified for GetAccountNumberSequence") + } + + var r0 uint64 + var r1 uint64 + var r2 error + if rf, ok := ret.Get(0).(func(client.Context, types.AccAddress) (uint64, uint64, error)); ok { + return rf(clientCtx, addr) + } + if rf, ok := ret.Get(0).(func(client.Context, types.AccAddress) uint64); ok { + r0 = rf(clientCtx, addr) + } else { + r0 = ret.Get(0).(uint64) + } + + if rf, ok := ret.Get(1).(func(client.Context, types.AccAddress) uint64); ok { + r1 = rf(clientCtx, addr) + } else { + r1 = ret.Get(1).(uint64) + } + + if rf, ok := ret.Get(2).(func(client.Context, types.AccAddress) error); ok { + r2 = rf(clientCtx, addr) + } else { + r2 = ret.Error(2) + } + + return r0, r1, r2 +} + +// AccountRetriever_GetAccountNumberSequence_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetAccountNumberSequence' +type AccountRetriever_GetAccountNumberSequence_Call struct { + *mock.Call +} + +// GetAccountNumberSequence is a helper method to define mock.On call +// - clientCtx client.Context +// - addr types.AccAddress +func (_e *AccountRetriever_Expecter) GetAccountNumberSequence(clientCtx interface{}, addr interface{}) *AccountRetriever_GetAccountNumberSequence_Call { + return &AccountRetriever_GetAccountNumberSequence_Call{Call: _e.mock.On("GetAccountNumberSequence", clientCtx, addr)} +} + +func (_c *AccountRetriever_GetAccountNumberSequence_Call) Run(run func(clientCtx client.Context, addr types.AccAddress)) *AccountRetriever_GetAccountNumberSequence_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(client.Context), args[1].(types.AccAddress)) + }) + return _c +} + +func (_c *AccountRetriever_GetAccountNumberSequence_Call) Return(accNum uint64, accSeq uint64, err error) *AccountRetriever_GetAccountNumberSequence_Call { + _c.Call.Return(accNum, accSeq, err) + return _c +} + +func (_c *AccountRetriever_GetAccountNumberSequence_Call) RunAndReturn(run func(client.Context, types.AccAddress) (uint64, uint64, error)) *AccountRetriever_GetAccountNumberSequence_Call { + _c.Call.Return(run) + return _c +} + +// GetAccountWithHeight provides a mock function with given fields: clientCtx, addr +func (_m *AccountRetriever) GetAccountWithHeight(clientCtx client.Context, addr types.AccAddress) (client.Account, int64, error) { + ret := _m.Called(clientCtx, addr) + + if len(ret) == 0 { + panic("no return value specified for GetAccountWithHeight") + } + + var r0 client.Account + var r1 int64 + var r2 error + if rf, ok := ret.Get(0).(func(client.Context, types.AccAddress) (client.Account, int64, error)); ok { + return rf(clientCtx, addr) + } + if rf, ok := ret.Get(0).(func(client.Context, types.AccAddress) client.Account); ok { + r0 = rf(clientCtx, addr) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(client.Account) + } + } + + if rf, ok := ret.Get(1).(func(client.Context, types.AccAddress) int64); ok { + r1 = rf(clientCtx, addr) + } else { + r1 = ret.Get(1).(int64) + } + + if rf, ok := ret.Get(2).(func(client.Context, types.AccAddress) error); ok { + r2 = rf(clientCtx, addr) + } else { + r2 = ret.Error(2) + } + + return r0, r1, r2 +} + +// AccountRetriever_GetAccountWithHeight_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetAccountWithHeight' +type AccountRetriever_GetAccountWithHeight_Call struct { + *mock.Call +} + +// GetAccountWithHeight is a helper method to define mock.On call +// - clientCtx client.Context +// - addr types.AccAddress +func (_e *AccountRetriever_Expecter) GetAccountWithHeight(clientCtx interface{}, addr interface{}) *AccountRetriever_GetAccountWithHeight_Call { + return &AccountRetriever_GetAccountWithHeight_Call{Call: _e.mock.On("GetAccountWithHeight", clientCtx, addr)} +} + +func (_c *AccountRetriever_GetAccountWithHeight_Call) Run(run func(clientCtx client.Context, addr types.AccAddress)) *AccountRetriever_GetAccountWithHeight_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(client.Context), args[1].(types.AccAddress)) + }) + return _c +} + +func (_c *AccountRetriever_GetAccountWithHeight_Call) Return(_a0 client.Account, _a1 int64, _a2 error) *AccountRetriever_GetAccountWithHeight_Call { + _c.Call.Return(_a0, _a1, _a2) + return _c +} + +func (_c *AccountRetriever_GetAccountWithHeight_Call) RunAndReturn(run func(client.Context, types.AccAddress) (client.Account, int64, error)) *AccountRetriever_GetAccountWithHeight_Call { + _c.Call.Return(run) + return _c +} + +// NewAccountRetriever creates a new instance of AccountRetriever. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewAccountRetriever(t interface { + mock.TestingT + Cleanup(func()) +}) *AccountRetriever { + mock := &AccountRetriever{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/ignite/pkg/cosmosclient/mocks/bank_query_client.go b/ignite/pkg/cosmosclient/mocks/bank_query_client.go new file mode 100644 index 0000000..c02f3b0 --- /dev/null +++ b/ignite/pkg/cosmosclient/mocks/bank_query_client.go @@ -0,0 +1,1002 @@ +// Code generated by mockery v2.53.3. DO NOT EDIT. + +package mocks + +import ( + context "context" + + grpc "google.golang.org/grpc" + + mock "github.com/stretchr/testify/mock" + + types "github.com/cosmos/cosmos-sdk/x/bank/types" +) + +// BankQueryClient is an autogenerated mock type for the QueryClient type +type BankQueryClient struct { + mock.Mock +} + +type BankQueryClient_Expecter struct { + mock *mock.Mock +} + +func (_m *BankQueryClient) EXPECT() *BankQueryClient_Expecter { + return &BankQueryClient_Expecter{mock: &_m.Mock} +} + +// AllBalances provides a mock function with given fields: ctx, in, opts +func (_m *BankQueryClient) AllBalances(ctx context.Context, in *types.QueryAllBalancesRequest, opts ...grpc.CallOption) (*types.QueryAllBalancesResponse, error) { + _va := make([]interface{}, len(opts)) + for _i := range opts { + _va[_i] = opts[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, in) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for AllBalances") + } + + var r0 *types.QueryAllBalancesResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *types.QueryAllBalancesRequest, ...grpc.CallOption) (*types.QueryAllBalancesResponse, error)); ok { + return rf(ctx, in, opts...) + } + if rf, ok := ret.Get(0).(func(context.Context, *types.QueryAllBalancesRequest, ...grpc.CallOption) *types.QueryAllBalancesResponse); ok { + r0 = rf(ctx, in, opts...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*types.QueryAllBalancesResponse) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *types.QueryAllBalancesRequest, ...grpc.CallOption) error); ok { + r1 = rf(ctx, in, opts...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// BankQueryClient_AllBalances_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'AllBalances' +type BankQueryClient_AllBalances_Call struct { + *mock.Call +} + +// AllBalances is a helper method to define mock.On call +// - ctx context.Context +// - in *types.QueryAllBalancesRequest +// - opts ...grpc.CallOption +func (_e *BankQueryClient_Expecter) AllBalances(ctx interface{}, in interface{}, opts ...interface{}) *BankQueryClient_AllBalances_Call { + return &BankQueryClient_AllBalances_Call{Call: _e.mock.On("AllBalances", + append([]interface{}{ctx, in}, opts...)...)} +} + +func (_c *BankQueryClient_AllBalances_Call) Run(run func(ctx context.Context, in *types.QueryAllBalancesRequest, opts ...grpc.CallOption)) *BankQueryClient_AllBalances_Call { + _c.Call.Run(func(args mock.Arguments) { + variadicArgs := make([]grpc.CallOption, len(args)-2) + for i, a := range args[2:] { + if a != nil { + variadicArgs[i] = a.(grpc.CallOption) + } + } + run(args[0].(context.Context), args[1].(*types.QueryAllBalancesRequest), variadicArgs...) + }) + return _c +} + +func (_c *BankQueryClient_AllBalances_Call) Return(_a0 *types.QueryAllBalancesResponse, _a1 error) *BankQueryClient_AllBalances_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *BankQueryClient_AllBalances_Call) RunAndReturn(run func(context.Context, *types.QueryAllBalancesRequest, ...grpc.CallOption) (*types.QueryAllBalancesResponse, error)) *BankQueryClient_AllBalances_Call { + _c.Call.Return(run) + return _c +} + +// Balance provides a mock function with given fields: ctx, in, opts +func (_m *BankQueryClient) Balance(ctx context.Context, in *types.QueryBalanceRequest, opts ...grpc.CallOption) (*types.QueryBalanceResponse, error) { + _va := make([]interface{}, len(opts)) + for _i := range opts { + _va[_i] = opts[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, in) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for Balance") + } + + var r0 *types.QueryBalanceResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *types.QueryBalanceRequest, ...grpc.CallOption) (*types.QueryBalanceResponse, error)); ok { + return rf(ctx, in, opts...) + } + if rf, ok := ret.Get(0).(func(context.Context, *types.QueryBalanceRequest, ...grpc.CallOption) *types.QueryBalanceResponse); ok { + r0 = rf(ctx, in, opts...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*types.QueryBalanceResponse) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *types.QueryBalanceRequest, ...grpc.CallOption) error); ok { + r1 = rf(ctx, in, opts...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// BankQueryClient_Balance_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Balance' +type BankQueryClient_Balance_Call struct { + *mock.Call +} + +// Balance is a helper method to define mock.On call +// - ctx context.Context +// - in *types.QueryBalanceRequest +// - opts ...grpc.CallOption +func (_e *BankQueryClient_Expecter) Balance(ctx interface{}, in interface{}, opts ...interface{}) *BankQueryClient_Balance_Call { + return &BankQueryClient_Balance_Call{Call: _e.mock.On("Balance", + append([]interface{}{ctx, in}, opts...)...)} +} + +func (_c *BankQueryClient_Balance_Call) Run(run func(ctx context.Context, in *types.QueryBalanceRequest, opts ...grpc.CallOption)) *BankQueryClient_Balance_Call { + _c.Call.Run(func(args mock.Arguments) { + variadicArgs := make([]grpc.CallOption, len(args)-2) + for i, a := range args[2:] { + if a != nil { + variadicArgs[i] = a.(grpc.CallOption) + } + } + run(args[0].(context.Context), args[1].(*types.QueryBalanceRequest), variadicArgs...) + }) + return _c +} + +func (_c *BankQueryClient_Balance_Call) Return(_a0 *types.QueryBalanceResponse, _a1 error) *BankQueryClient_Balance_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *BankQueryClient_Balance_Call) RunAndReturn(run func(context.Context, *types.QueryBalanceRequest, ...grpc.CallOption) (*types.QueryBalanceResponse, error)) *BankQueryClient_Balance_Call { + _c.Call.Return(run) + return _c +} + +// DenomMetadata provides a mock function with given fields: ctx, in, opts +func (_m *BankQueryClient) DenomMetadata(ctx context.Context, in *types.QueryDenomMetadataRequest, opts ...grpc.CallOption) (*types.QueryDenomMetadataResponse, error) { + _va := make([]interface{}, len(opts)) + for _i := range opts { + _va[_i] = opts[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, in) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for DenomMetadata") + } + + var r0 *types.QueryDenomMetadataResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *types.QueryDenomMetadataRequest, ...grpc.CallOption) (*types.QueryDenomMetadataResponse, error)); ok { + return rf(ctx, in, opts...) + } + if rf, ok := ret.Get(0).(func(context.Context, *types.QueryDenomMetadataRequest, ...grpc.CallOption) *types.QueryDenomMetadataResponse); ok { + r0 = rf(ctx, in, opts...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*types.QueryDenomMetadataResponse) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *types.QueryDenomMetadataRequest, ...grpc.CallOption) error); ok { + r1 = rf(ctx, in, opts...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// BankQueryClient_DenomMetadata_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DenomMetadata' +type BankQueryClient_DenomMetadata_Call struct { + *mock.Call +} + +// DenomMetadata is a helper method to define mock.On call +// - ctx context.Context +// - in *types.QueryDenomMetadataRequest +// - opts ...grpc.CallOption +func (_e *BankQueryClient_Expecter) DenomMetadata(ctx interface{}, in interface{}, opts ...interface{}) *BankQueryClient_DenomMetadata_Call { + return &BankQueryClient_DenomMetadata_Call{Call: _e.mock.On("DenomMetadata", + append([]interface{}{ctx, in}, opts...)...)} +} + +func (_c *BankQueryClient_DenomMetadata_Call) Run(run func(ctx context.Context, in *types.QueryDenomMetadataRequest, opts ...grpc.CallOption)) *BankQueryClient_DenomMetadata_Call { + _c.Call.Run(func(args mock.Arguments) { + variadicArgs := make([]grpc.CallOption, len(args)-2) + for i, a := range args[2:] { + if a != nil { + variadicArgs[i] = a.(grpc.CallOption) + } + } + run(args[0].(context.Context), args[1].(*types.QueryDenomMetadataRequest), variadicArgs...) + }) + return _c +} + +func (_c *BankQueryClient_DenomMetadata_Call) Return(_a0 *types.QueryDenomMetadataResponse, _a1 error) *BankQueryClient_DenomMetadata_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *BankQueryClient_DenomMetadata_Call) RunAndReturn(run func(context.Context, *types.QueryDenomMetadataRequest, ...grpc.CallOption) (*types.QueryDenomMetadataResponse, error)) *BankQueryClient_DenomMetadata_Call { + _c.Call.Return(run) + return _c +} + +// DenomMetadataByQueryString provides a mock function with given fields: ctx, in, opts +func (_m *BankQueryClient) DenomMetadataByQueryString(ctx context.Context, in *types.QueryDenomMetadataByQueryStringRequest, opts ...grpc.CallOption) (*types.QueryDenomMetadataByQueryStringResponse, error) { + _va := make([]interface{}, len(opts)) + for _i := range opts { + _va[_i] = opts[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, in) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for DenomMetadataByQueryString") + } + + var r0 *types.QueryDenomMetadataByQueryStringResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *types.QueryDenomMetadataByQueryStringRequest, ...grpc.CallOption) (*types.QueryDenomMetadataByQueryStringResponse, error)); ok { + return rf(ctx, in, opts...) + } + if rf, ok := ret.Get(0).(func(context.Context, *types.QueryDenomMetadataByQueryStringRequest, ...grpc.CallOption) *types.QueryDenomMetadataByQueryStringResponse); ok { + r0 = rf(ctx, in, opts...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*types.QueryDenomMetadataByQueryStringResponse) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *types.QueryDenomMetadataByQueryStringRequest, ...grpc.CallOption) error); ok { + r1 = rf(ctx, in, opts...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// BankQueryClient_DenomMetadataByQueryString_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DenomMetadataByQueryString' +type BankQueryClient_DenomMetadataByQueryString_Call struct { + *mock.Call +} + +// DenomMetadataByQueryString is a helper method to define mock.On call +// - ctx context.Context +// - in *types.QueryDenomMetadataByQueryStringRequest +// - opts ...grpc.CallOption +func (_e *BankQueryClient_Expecter) DenomMetadataByQueryString(ctx interface{}, in interface{}, opts ...interface{}) *BankQueryClient_DenomMetadataByQueryString_Call { + return &BankQueryClient_DenomMetadataByQueryString_Call{Call: _e.mock.On("DenomMetadataByQueryString", + append([]interface{}{ctx, in}, opts...)...)} +} + +func (_c *BankQueryClient_DenomMetadataByQueryString_Call) Run(run func(ctx context.Context, in *types.QueryDenomMetadataByQueryStringRequest, opts ...grpc.CallOption)) *BankQueryClient_DenomMetadataByQueryString_Call { + _c.Call.Run(func(args mock.Arguments) { + variadicArgs := make([]grpc.CallOption, len(args)-2) + for i, a := range args[2:] { + if a != nil { + variadicArgs[i] = a.(grpc.CallOption) + } + } + run(args[0].(context.Context), args[1].(*types.QueryDenomMetadataByQueryStringRequest), variadicArgs...) + }) + return _c +} + +func (_c *BankQueryClient_DenomMetadataByQueryString_Call) Return(_a0 *types.QueryDenomMetadataByQueryStringResponse, _a1 error) *BankQueryClient_DenomMetadataByQueryString_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *BankQueryClient_DenomMetadataByQueryString_Call) RunAndReturn(run func(context.Context, *types.QueryDenomMetadataByQueryStringRequest, ...grpc.CallOption) (*types.QueryDenomMetadataByQueryStringResponse, error)) *BankQueryClient_DenomMetadataByQueryString_Call { + _c.Call.Return(run) + return _c +} + +// DenomOwners provides a mock function with given fields: ctx, in, opts +func (_m *BankQueryClient) DenomOwners(ctx context.Context, in *types.QueryDenomOwnersRequest, opts ...grpc.CallOption) (*types.QueryDenomOwnersResponse, error) { + _va := make([]interface{}, len(opts)) + for _i := range opts { + _va[_i] = opts[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, in) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for DenomOwners") + } + + var r0 *types.QueryDenomOwnersResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *types.QueryDenomOwnersRequest, ...grpc.CallOption) (*types.QueryDenomOwnersResponse, error)); ok { + return rf(ctx, in, opts...) + } + if rf, ok := ret.Get(0).(func(context.Context, *types.QueryDenomOwnersRequest, ...grpc.CallOption) *types.QueryDenomOwnersResponse); ok { + r0 = rf(ctx, in, opts...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*types.QueryDenomOwnersResponse) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *types.QueryDenomOwnersRequest, ...grpc.CallOption) error); ok { + r1 = rf(ctx, in, opts...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// BankQueryClient_DenomOwners_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DenomOwners' +type BankQueryClient_DenomOwners_Call struct { + *mock.Call +} + +// DenomOwners is a helper method to define mock.On call +// - ctx context.Context +// - in *types.QueryDenomOwnersRequest +// - opts ...grpc.CallOption +func (_e *BankQueryClient_Expecter) DenomOwners(ctx interface{}, in interface{}, opts ...interface{}) *BankQueryClient_DenomOwners_Call { + return &BankQueryClient_DenomOwners_Call{Call: _e.mock.On("DenomOwners", + append([]interface{}{ctx, in}, opts...)...)} +} + +func (_c *BankQueryClient_DenomOwners_Call) Run(run func(ctx context.Context, in *types.QueryDenomOwnersRequest, opts ...grpc.CallOption)) *BankQueryClient_DenomOwners_Call { + _c.Call.Run(func(args mock.Arguments) { + variadicArgs := make([]grpc.CallOption, len(args)-2) + for i, a := range args[2:] { + if a != nil { + variadicArgs[i] = a.(grpc.CallOption) + } + } + run(args[0].(context.Context), args[1].(*types.QueryDenomOwnersRequest), variadicArgs...) + }) + return _c +} + +func (_c *BankQueryClient_DenomOwners_Call) Return(_a0 *types.QueryDenomOwnersResponse, _a1 error) *BankQueryClient_DenomOwners_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *BankQueryClient_DenomOwners_Call) RunAndReturn(run func(context.Context, *types.QueryDenomOwnersRequest, ...grpc.CallOption) (*types.QueryDenomOwnersResponse, error)) *BankQueryClient_DenomOwners_Call { + _c.Call.Return(run) + return _c +} + +// DenomOwnersByQuery provides a mock function with given fields: ctx, in, opts +func (_m *BankQueryClient) DenomOwnersByQuery(ctx context.Context, in *types.QueryDenomOwnersByQueryRequest, opts ...grpc.CallOption) (*types.QueryDenomOwnersByQueryResponse, error) { + _va := make([]interface{}, len(opts)) + for _i := range opts { + _va[_i] = opts[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, in) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for DenomOwnersByQuery") + } + + var r0 *types.QueryDenomOwnersByQueryResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *types.QueryDenomOwnersByQueryRequest, ...grpc.CallOption) (*types.QueryDenomOwnersByQueryResponse, error)); ok { + return rf(ctx, in, opts...) + } + if rf, ok := ret.Get(0).(func(context.Context, *types.QueryDenomOwnersByQueryRequest, ...grpc.CallOption) *types.QueryDenomOwnersByQueryResponse); ok { + r0 = rf(ctx, in, opts...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*types.QueryDenomOwnersByQueryResponse) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *types.QueryDenomOwnersByQueryRequest, ...grpc.CallOption) error); ok { + r1 = rf(ctx, in, opts...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// BankQueryClient_DenomOwnersByQuery_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DenomOwnersByQuery' +type BankQueryClient_DenomOwnersByQuery_Call struct { + *mock.Call +} + +// DenomOwnersByQuery is a helper method to define mock.On call +// - ctx context.Context +// - in *types.QueryDenomOwnersByQueryRequest +// - opts ...grpc.CallOption +func (_e *BankQueryClient_Expecter) DenomOwnersByQuery(ctx interface{}, in interface{}, opts ...interface{}) *BankQueryClient_DenomOwnersByQuery_Call { + return &BankQueryClient_DenomOwnersByQuery_Call{Call: _e.mock.On("DenomOwnersByQuery", + append([]interface{}{ctx, in}, opts...)...)} +} + +func (_c *BankQueryClient_DenomOwnersByQuery_Call) Run(run func(ctx context.Context, in *types.QueryDenomOwnersByQueryRequest, opts ...grpc.CallOption)) *BankQueryClient_DenomOwnersByQuery_Call { + _c.Call.Run(func(args mock.Arguments) { + variadicArgs := make([]grpc.CallOption, len(args)-2) + for i, a := range args[2:] { + if a != nil { + variadicArgs[i] = a.(grpc.CallOption) + } + } + run(args[0].(context.Context), args[1].(*types.QueryDenomOwnersByQueryRequest), variadicArgs...) + }) + return _c +} + +func (_c *BankQueryClient_DenomOwnersByQuery_Call) Return(_a0 *types.QueryDenomOwnersByQueryResponse, _a1 error) *BankQueryClient_DenomOwnersByQuery_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *BankQueryClient_DenomOwnersByQuery_Call) RunAndReturn(run func(context.Context, *types.QueryDenomOwnersByQueryRequest, ...grpc.CallOption) (*types.QueryDenomOwnersByQueryResponse, error)) *BankQueryClient_DenomOwnersByQuery_Call { + _c.Call.Return(run) + return _c +} + +// DenomsMetadata provides a mock function with given fields: ctx, in, opts +func (_m *BankQueryClient) DenomsMetadata(ctx context.Context, in *types.QueryDenomsMetadataRequest, opts ...grpc.CallOption) (*types.QueryDenomsMetadataResponse, error) { + _va := make([]interface{}, len(opts)) + for _i := range opts { + _va[_i] = opts[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, in) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for DenomsMetadata") + } + + var r0 *types.QueryDenomsMetadataResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *types.QueryDenomsMetadataRequest, ...grpc.CallOption) (*types.QueryDenomsMetadataResponse, error)); ok { + return rf(ctx, in, opts...) + } + if rf, ok := ret.Get(0).(func(context.Context, *types.QueryDenomsMetadataRequest, ...grpc.CallOption) *types.QueryDenomsMetadataResponse); ok { + r0 = rf(ctx, in, opts...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*types.QueryDenomsMetadataResponse) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *types.QueryDenomsMetadataRequest, ...grpc.CallOption) error); ok { + r1 = rf(ctx, in, opts...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// BankQueryClient_DenomsMetadata_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DenomsMetadata' +type BankQueryClient_DenomsMetadata_Call struct { + *mock.Call +} + +// DenomsMetadata is a helper method to define mock.On call +// - ctx context.Context +// - in *types.QueryDenomsMetadataRequest +// - opts ...grpc.CallOption +func (_e *BankQueryClient_Expecter) DenomsMetadata(ctx interface{}, in interface{}, opts ...interface{}) *BankQueryClient_DenomsMetadata_Call { + return &BankQueryClient_DenomsMetadata_Call{Call: _e.mock.On("DenomsMetadata", + append([]interface{}{ctx, in}, opts...)...)} +} + +func (_c *BankQueryClient_DenomsMetadata_Call) Run(run func(ctx context.Context, in *types.QueryDenomsMetadataRequest, opts ...grpc.CallOption)) *BankQueryClient_DenomsMetadata_Call { + _c.Call.Run(func(args mock.Arguments) { + variadicArgs := make([]grpc.CallOption, len(args)-2) + for i, a := range args[2:] { + if a != nil { + variadicArgs[i] = a.(grpc.CallOption) + } + } + run(args[0].(context.Context), args[1].(*types.QueryDenomsMetadataRequest), variadicArgs...) + }) + return _c +} + +func (_c *BankQueryClient_DenomsMetadata_Call) Return(_a0 *types.QueryDenomsMetadataResponse, _a1 error) *BankQueryClient_DenomsMetadata_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *BankQueryClient_DenomsMetadata_Call) RunAndReturn(run func(context.Context, *types.QueryDenomsMetadataRequest, ...grpc.CallOption) (*types.QueryDenomsMetadataResponse, error)) *BankQueryClient_DenomsMetadata_Call { + _c.Call.Return(run) + return _c +} + +// Params provides a mock function with given fields: ctx, in, opts +func (_m *BankQueryClient) Params(ctx context.Context, in *types.QueryParamsRequest, opts ...grpc.CallOption) (*types.QueryParamsResponse, error) { + _va := make([]interface{}, len(opts)) + for _i := range opts { + _va[_i] = opts[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, in) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for Params") + } + + var r0 *types.QueryParamsResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *types.QueryParamsRequest, ...grpc.CallOption) (*types.QueryParamsResponse, error)); ok { + return rf(ctx, in, opts...) + } + if rf, ok := ret.Get(0).(func(context.Context, *types.QueryParamsRequest, ...grpc.CallOption) *types.QueryParamsResponse); ok { + r0 = rf(ctx, in, opts...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*types.QueryParamsResponse) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *types.QueryParamsRequest, ...grpc.CallOption) error); ok { + r1 = rf(ctx, in, opts...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// BankQueryClient_Params_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Params' +type BankQueryClient_Params_Call struct { + *mock.Call +} + +// Params is a helper method to define mock.On call +// - ctx context.Context +// - in *types.QueryParamsRequest +// - opts ...grpc.CallOption +func (_e *BankQueryClient_Expecter) Params(ctx interface{}, in interface{}, opts ...interface{}) *BankQueryClient_Params_Call { + return &BankQueryClient_Params_Call{Call: _e.mock.On("Params", + append([]interface{}{ctx, in}, opts...)...)} +} + +func (_c *BankQueryClient_Params_Call) Run(run func(ctx context.Context, in *types.QueryParamsRequest, opts ...grpc.CallOption)) *BankQueryClient_Params_Call { + _c.Call.Run(func(args mock.Arguments) { + variadicArgs := make([]grpc.CallOption, len(args)-2) + for i, a := range args[2:] { + if a != nil { + variadicArgs[i] = a.(grpc.CallOption) + } + } + run(args[0].(context.Context), args[1].(*types.QueryParamsRequest), variadicArgs...) + }) + return _c +} + +func (_c *BankQueryClient_Params_Call) Return(_a0 *types.QueryParamsResponse, _a1 error) *BankQueryClient_Params_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *BankQueryClient_Params_Call) RunAndReturn(run func(context.Context, *types.QueryParamsRequest, ...grpc.CallOption) (*types.QueryParamsResponse, error)) *BankQueryClient_Params_Call { + _c.Call.Return(run) + return _c +} + +// SendEnabled provides a mock function with given fields: ctx, in, opts +func (_m *BankQueryClient) SendEnabled(ctx context.Context, in *types.QuerySendEnabledRequest, opts ...grpc.CallOption) (*types.QuerySendEnabledResponse, error) { + _va := make([]interface{}, len(opts)) + for _i := range opts { + _va[_i] = opts[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, in) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for SendEnabled") + } + + var r0 *types.QuerySendEnabledResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *types.QuerySendEnabledRequest, ...grpc.CallOption) (*types.QuerySendEnabledResponse, error)); ok { + return rf(ctx, in, opts...) + } + if rf, ok := ret.Get(0).(func(context.Context, *types.QuerySendEnabledRequest, ...grpc.CallOption) *types.QuerySendEnabledResponse); ok { + r0 = rf(ctx, in, opts...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*types.QuerySendEnabledResponse) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *types.QuerySendEnabledRequest, ...grpc.CallOption) error); ok { + r1 = rf(ctx, in, opts...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// BankQueryClient_SendEnabled_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'SendEnabled' +type BankQueryClient_SendEnabled_Call struct { + *mock.Call +} + +// SendEnabled is a helper method to define mock.On call +// - ctx context.Context +// - in *types.QuerySendEnabledRequest +// - opts ...grpc.CallOption +func (_e *BankQueryClient_Expecter) SendEnabled(ctx interface{}, in interface{}, opts ...interface{}) *BankQueryClient_SendEnabled_Call { + return &BankQueryClient_SendEnabled_Call{Call: _e.mock.On("SendEnabled", + append([]interface{}{ctx, in}, opts...)...)} +} + +func (_c *BankQueryClient_SendEnabled_Call) Run(run func(ctx context.Context, in *types.QuerySendEnabledRequest, opts ...grpc.CallOption)) *BankQueryClient_SendEnabled_Call { + _c.Call.Run(func(args mock.Arguments) { + variadicArgs := make([]grpc.CallOption, len(args)-2) + for i, a := range args[2:] { + if a != nil { + variadicArgs[i] = a.(grpc.CallOption) + } + } + run(args[0].(context.Context), args[1].(*types.QuerySendEnabledRequest), variadicArgs...) + }) + return _c +} + +func (_c *BankQueryClient_SendEnabled_Call) Return(_a0 *types.QuerySendEnabledResponse, _a1 error) *BankQueryClient_SendEnabled_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *BankQueryClient_SendEnabled_Call) RunAndReturn(run func(context.Context, *types.QuerySendEnabledRequest, ...grpc.CallOption) (*types.QuerySendEnabledResponse, error)) *BankQueryClient_SendEnabled_Call { + _c.Call.Return(run) + return _c +} + +// SpendableBalanceByDenom provides a mock function with given fields: ctx, in, opts +func (_m *BankQueryClient) SpendableBalanceByDenom(ctx context.Context, in *types.QuerySpendableBalanceByDenomRequest, opts ...grpc.CallOption) (*types.QuerySpendableBalanceByDenomResponse, error) { + _va := make([]interface{}, len(opts)) + for _i := range opts { + _va[_i] = opts[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, in) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for SpendableBalanceByDenom") + } + + var r0 *types.QuerySpendableBalanceByDenomResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *types.QuerySpendableBalanceByDenomRequest, ...grpc.CallOption) (*types.QuerySpendableBalanceByDenomResponse, error)); ok { + return rf(ctx, in, opts...) + } + if rf, ok := ret.Get(0).(func(context.Context, *types.QuerySpendableBalanceByDenomRequest, ...grpc.CallOption) *types.QuerySpendableBalanceByDenomResponse); ok { + r0 = rf(ctx, in, opts...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*types.QuerySpendableBalanceByDenomResponse) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *types.QuerySpendableBalanceByDenomRequest, ...grpc.CallOption) error); ok { + r1 = rf(ctx, in, opts...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// BankQueryClient_SpendableBalanceByDenom_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'SpendableBalanceByDenom' +type BankQueryClient_SpendableBalanceByDenom_Call struct { + *mock.Call +} + +// SpendableBalanceByDenom is a helper method to define mock.On call +// - ctx context.Context +// - in *types.QuerySpendableBalanceByDenomRequest +// - opts ...grpc.CallOption +func (_e *BankQueryClient_Expecter) SpendableBalanceByDenom(ctx interface{}, in interface{}, opts ...interface{}) *BankQueryClient_SpendableBalanceByDenom_Call { + return &BankQueryClient_SpendableBalanceByDenom_Call{Call: _e.mock.On("SpendableBalanceByDenom", + append([]interface{}{ctx, in}, opts...)...)} +} + +func (_c *BankQueryClient_SpendableBalanceByDenom_Call) Run(run func(ctx context.Context, in *types.QuerySpendableBalanceByDenomRequest, opts ...grpc.CallOption)) *BankQueryClient_SpendableBalanceByDenom_Call { + _c.Call.Run(func(args mock.Arguments) { + variadicArgs := make([]grpc.CallOption, len(args)-2) + for i, a := range args[2:] { + if a != nil { + variadicArgs[i] = a.(grpc.CallOption) + } + } + run(args[0].(context.Context), args[1].(*types.QuerySpendableBalanceByDenomRequest), variadicArgs...) + }) + return _c +} + +func (_c *BankQueryClient_SpendableBalanceByDenom_Call) Return(_a0 *types.QuerySpendableBalanceByDenomResponse, _a1 error) *BankQueryClient_SpendableBalanceByDenom_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *BankQueryClient_SpendableBalanceByDenom_Call) RunAndReturn(run func(context.Context, *types.QuerySpendableBalanceByDenomRequest, ...grpc.CallOption) (*types.QuerySpendableBalanceByDenomResponse, error)) *BankQueryClient_SpendableBalanceByDenom_Call { + _c.Call.Return(run) + return _c +} + +// SpendableBalances provides a mock function with given fields: ctx, in, opts +func (_m *BankQueryClient) SpendableBalances(ctx context.Context, in *types.QuerySpendableBalancesRequest, opts ...grpc.CallOption) (*types.QuerySpendableBalancesResponse, error) { + _va := make([]interface{}, len(opts)) + for _i := range opts { + _va[_i] = opts[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, in) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for SpendableBalances") + } + + var r0 *types.QuerySpendableBalancesResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *types.QuerySpendableBalancesRequest, ...grpc.CallOption) (*types.QuerySpendableBalancesResponse, error)); ok { + return rf(ctx, in, opts...) + } + if rf, ok := ret.Get(0).(func(context.Context, *types.QuerySpendableBalancesRequest, ...grpc.CallOption) *types.QuerySpendableBalancesResponse); ok { + r0 = rf(ctx, in, opts...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*types.QuerySpendableBalancesResponse) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *types.QuerySpendableBalancesRequest, ...grpc.CallOption) error); ok { + r1 = rf(ctx, in, opts...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// BankQueryClient_SpendableBalances_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'SpendableBalances' +type BankQueryClient_SpendableBalances_Call struct { + *mock.Call +} + +// SpendableBalances is a helper method to define mock.On call +// - ctx context.Context +// - in *types.QuerySpendableBalancesRequest +// - opts ...grpc.CallOption +func (_e *BankQueryClient_Expecter) SpendableBalances(ctx interface{}, in interface{}, opts ...interface{}) *BankQueryClient_SpendableBalances_Call { + return &BankQueryClient_SpendableBalances_Call{Call: _e.mock.On("SpendableBalances", + append([]interface{}{ctx, in}, opts...)...)} +} + +func (_c *BankQueryClient_SpendableBalances_Call) Run(run func(ctx context.Context, in *types.QuerySpendableBalancesRequest, opts ...grpc.CallOption)) *BankQueryClient_SpendableBalances_Call { + _c.Call.Run(func(args mock.Arguments) { + variadicArgs := make([]grpc.CallOption, len(args)-2) + for i, a := range args[2:] { + if a != nil { + variadicArgs[i] = a.(grpc.CallOption) + } + } + run(args[0].(context.Context), args[1].(*types.QuerySpendableBalancesRequest), variadicArgs...) + }) + return _c +} + +func (_c *BankQueryClient_SpendableBalances_Call) Return(_a0 *types.QuerySpendableBalancesResponse, _a1 error) *BankQueryClient_SpendableBalances_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *BankQueryClient_SpendableBalances_Call) RunAndReturn(run func(context.Context, *types.QuerySpendableBalancesRequest, ...grpc.CallOption) (*types.QuerySpendableBalancesResponse, error)) *BankQueryClient_SpendableBalances_Call { + _c.Call.Return(run) + return _c +} + +// SupplyOf provides a mock function with given fields: ctx, in, opts +func (_m *BankQueryClient) SupplyOf(ctx context.Context, in *types.QuerySupplyOfRequest, opts ...grpc.CallOption) (*types.QuerySupplyOfResponse, error) { + _va := make([]interface{}, len(opts)) + for _i := range opts { + _va[_i] = opts[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, in) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for SupplyOf") + } + + var r0 *types.QuerySupplyOfResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *types.QuerySupplyOfRequest, ...grpc.CallOption) (*types.QuerySupplyOfResponse, error)); ok { + return rf(ctx, in, opts...) + } + if rf, ok := ret.Get(0).(func(context.Context, *types.QuerySupplyOfRequest, ...grpc.CallOption) *types.QuerySupplyOfResponse); ok { + r0 = rf(ctx, in, opts...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*types.QuerySupplyOfResponse) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *types.QuerySupplyOfRequest, ...grpc.CallOption) error); ok { + r1 = rf(ctx, in, opts...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// BankQueryClient_SupplyOf_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'SupplyOf' +type BankQueryClient_SupplyOf_Call struct { + *mock.Call +} + +// SupplyOf is a helper method to define mock.On call +// - ctx context.Context +// - in *types.QuerySupplyOfRequest +// - opts ...grpc.CallOption +func (_e *BankQueryClient_Expecter) SupplyOf(ctx interface{}, in interface{}, opts ...interface{}) *BankQueryClient_SupplyOf_Call { + return &BankQueryClient_SupplyOf_Call{Call: _e.mock.On("SupplyOf", + append([]interface{}{ctx, in}, opts...)...)} +} + +func (_c *BankQueryClient_SupplyOf_Call) Run(run func(ctx context.Context, in *types.QuerySupplyOfRequest, opts ...grpc.CallOption)) *BankQueryClient_SupplyOf_Call { + _c.Call.Run(func(args mock.Arguments) { + variadicArgs := make([]grpc.CallOption, len(args)-2) + for i, a := range args[2:] { + if a != nil { + variadicArgs[i] = a.(grpc.CallOption) + } + } + run(args[0].(context.Context), args[1].(*types.QuerySupplyOfRequest), variadicArgs...) + }) + return _c +} + +func (_c *BankQueryClient_SupplyOf_Call) Return(_a0 *types.QuerySupplyOfResponse, _a1 error) *BankQueryClient_SupplyOf_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *BankQueryClient_SupplyOf_Call) RunAndReturn(run func(context.Context, *types.QuerySupplyOfRequest, ...grpc.CallOption) (*types.QuerySupplyOfResponse, error)) *BankQueryClient_SupplyOf_Call { + _c.Call.Return(run) + return _c +} + +// TotalSupply provides a mock function with given fields: ctx, in, opts +func (_m *BankQueryClient) TotalSupply(ctx context.Context, in *types.QueryTotalSupplyRequest, opts ...grpc.CallOption) (*types.QueryTotalSupplyResponse, error) { + _va := make([]interface{}, len(opts)) + for _i := range opts { + _va[_i] = opts[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, in) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for TotalSupply") + } + + var r0 *types.QueryTotalSupplyResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *types.QueryTotalSupplyRequest, ...grpc.CallOption) (*types.QueryTotalSupplyResponse, error)); ok { + return rf(ctx, in, opts...) + } + if rf, ok := ret.Get(0).(func(context.Context, *types.QueryTotalSupplyRequest, ...grpc.CallOption) *types.QueryTotalSupplyResponse); ok { + r0 = rf(ctx, in, opts...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*types.QueryTotalSupplyResponse) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *types.QueryTotalSupplyRequest, ...grpc.CallOption) error); ok { + r1 = rf(ctx, in, opts...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// BankQueryClient_TotalSupply_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'TotalSupply' +type BankQueryClient_TotalSupply_Call struct { + *mock.Call +} + +// TotalSupply is a helper method to define mock.On call +// - ctx context.Context +// - in *types.QueryTotalSupplyRequest +// - opts ...grpc.CallOption +func (_e *BankQueryClient_Expecter) TotalSupply(ctx interface{}, in interface{}, opts ...interface{}) *BankQueryClient_TotalSupply_Call { + return &BankQueryClient_TotalSupply_Call{Call: _e.mock.On("TotalSupply", + append([]interface{}{ctx, in}, opts...)...)} +} + +func (_c *BankQueryClient_TotalSupply_Call) Run(run func(ctx context.Context, in *types.QueryTotalSupplyRequest, opts ...grpc.CallOption)) *BankQueryClient_TotalSupply_Call { + _c.Call.Run(func(args mock.Arguments) { + variadicArgs := make([]grpc.CallOption, len(args)-2) + for i, a := range args[2:] { + if a != nil { + variadicArgs[i] = a.(grpc.CallOption) + } + } + run(args[0].(context.Context), args[1].(*types.QueryTotalSupplyRequest), variadicArgs...) + }) + return _c +} + +func (_c *BankQueryClient_TotalSupply_Call) Return(_a0 *types.QueryTotalSupplyResponse, _a1 error) *BankQueryClient_TotalSupply_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *BankQueryClient_TotalSupply_Call) RunAndReturn(run func(context.Context, *types.QueryTotalSupplyRequest, ...grpc.CallOption) (*types.QueryTotalSupplyResponse, error)) *BankQueryClient_TotalSupply_Call { + _c.Call.Return(run) + return _c +} + +// NewBankQueryClient creates a new instance of BankQueryClient. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewBankQueryClient(t interface { + mock.TestingT + Cleanup(func()) +}) *BankQueryClient { + mock := &BankQueryClient{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/ignite/pkg/cosmosclient/mocks/faucet_client.go b/ignite/pkg/cosmosclient/mocks/faucet_client.go new file mode 100644 index 0000000..41e82d2 --- /dev/null +++ b/ignite/pkg/cosmosclient/mocks/faucet_client.go @@ -0,0 +1,95 @@ +// Code generated by mockery v2.53.3. DO NOT EDIT. + +package mocks + +import ( + context "context" + + cosmosfaucet "github.com/ignite/cli/v29/ignite/pkg/cosmosfaucet" + + mock "github.com/stretchr/testify/mock" +) + +// FaucetClient is an autogenerated mock type for the FaucetClient type +type FaucetClient struct { + mock.Mock +} + +type FaucetClient_Expecter struct { + mock *mock.Mock +} + +func (_m *FaucetClient) EXPECT() *FaucetClient_Expecter { + return &FaucetClient_Expecter{mock: &_m.Mock} +} + +// Transfer provides a mock function with given fields: _a0, _a1 +func (_m *FaucetClient) Transfer(_a0 context.Context, _a1 cosmosfaucet.TransferRequest) (cosmosfaucet.TransferResponse, error) { + ret := _m.Called(_a0, _a1) + + if len(ret) == 0 { + panic("no return value specified for Transfer") + } + + var r0 cosmosfaucet.TransferResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, cosmosfaucet.TransferRequest) (cosmosfaucet.TransferResponse, error)); ok { + return rf(_a0, _a1) + } + if rf, ok := ret.Get(0).(func(context.Context, cosmosfaucet.TransferRequest) cosmosfaucet.TransferResponse); ok { + r0 = rf(_a0, _a1) + } else { + r0 = ret.Get(0).(cosmosfaucet.TransferResponse) + } + + if rf, ok := ret.Get(1).(func(context.Context, cosmosfaucet.TransferRequest) error); ok { + r1 = rf(_a0, _a1) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FaucetClient_Transfer_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Transfer' +type FaucetClient_Transfer_Call struct { + *mock.Call +} + +// Transfer is a helper method to define mock.On call +// - _a0 context.Context +// - _a1 cosmosfaucet.TransferRequest +func (_e *FaucetClient_Expecter) Transfer(_a0 interface{}, _a1 interface{}) *FaucetClient_Transfer_Call { + return &FaucetClient_Transfer_Call{Call: _e.mock.On("Transfer", _a0, _a1)} +} + +func (_c *FaucetClient_Transfer_Call) Run(run func(_a0 context.Context, _a1 cosmosfaucet.TransferRequest)) *FaucetClient_Transfer_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(cosmosfaucet.TransferRequest)) + }) + return _c +} + +func (_c *FaucetClient_Transfer_Call) Return(_a0 cosmosfaucet.TransferResponse, _a1 error) *FaucetClient_Transfer_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *FaucetClient_Transfer_Call) RunAndReturn(run func(context.Context, cosmosfaucet.TransferRequest) (cosmosfaucet.TransferResponse, error)) *FaucetClient_Transfer_Call { + _c.Call.Return(run) + return _c +} + +// NewFaucetClient creates a new instance of FaucetClient. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewFaucetClient(t interface { + mock.TestingT + Cleanup(func()) +}) *FaucetClient { + mock := &FaucetClient{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/ignite/pkg/cosmosclient/mocks/gasometer.go b/ignite/pkg/cosmosclient/mocks/gasometer.go new file mode 100644 index 0000000..ff56e03 --- /dev/null +++ b/ignite/pkg/cosmosclient/mocks/gasometer.go @@ -0,0 +1,118 @@ +// Code generated by mockery v2.36.1. DO NOT EDIT. + +package mocks + +import ( + grpc "github.com/cosmos/gogoproto/grpc" + mock "github.com/stretchr/testify/mock" + + proto "github.com/cosmos/gogoproto/proto" + + tx "github.com/cosmos/cosmos-sdk/client/tx" + + typestx "github.com/cosmos/cosmos-sdk/types/tx" +) + +// Gasometer is an autogenerated mock type for the Gasometer type +type Gasometer struct { + mock.Mock +} + +type Gasometer_Expecter struct { + mock *mock.Mock +} + +func (_m *Gasometer) EXPECT() *Gasometer_Expecter { + return &Gasometer_Expecter{mock: &_m.Mock} +} + +// CalculateGas provides a mock function with given fields: clientCtx, txf, msgs +func (_m *Gasometer) CalculateGas(clientCtx grpc.ClientConn, txf tx.Factory, msgs ...proto.Message) (*typestx.SimulateResponse, uint64, error) { + _va := make([]interface{}, len(msgs)) + for _i := range msgs { + _va[_i] = msgs[_i] + } + var _ca []interface{} + _ca = append(_ca, clientCtx, txf) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *typestx.SimulateResponse + var r1 uint64 + var r2 error + if rf, ok := ret.Get(0).(func(grpc.ClientConn, tx.Factory, ...proto.Message) (*typestx.SimulateResponse, uint64, error)); ok { + return rf(clientCtx, txf, msgs...) + } + if rf, ok := ret.Get(0).(func(grpc.ClientConn, tx.Factory, ...proto.Message) *typestx.SimulateResponse); ok { + r0 = rf(clientCtx, txf, msgs...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*typestx.SimulateResponse) + } + } + + if rf, ok := ret.Get(1).(func(grpc.ClientConn, tx.Factory, ...proto.Message) uint64); ok { + r1 = rf(clientCtx, txf, msgs...) + } else { + r1 = ret.Get(1).(uint64) + } + + if rf, ok := ret.Get(2).(func(grpc.ClientConn, tx.Factory, ...proto.Message) error); ok { + r2 = rf(clientCtx, txf, msgs...) + } else { + r2 = ret.Error(2) + } + + return r0, r1, r2 +} + +// Gasometer_CalculateGas_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CalculateGas' +type Gasometer_CalculateGas_Call struct { + *mock.Call +} + +// CalculateGas is a helper method to define mock.On call +// - clientCtx grpc.ClientConn +// - txf tx.Factory +// - msgs ...proto.Message +func (_e *Gasometer_Expecter) CalculateGas(clientCtx interface{}, txf interface{}, msgs ...interface{}) *Gasometer_CalculateGas_Call { + return &Gasometer_CalculateGas_Call{Call: _e.mock.On("CalculateGas", + append([]interface{}{clientCtx, txf}, msgs...)...)} +} + +func (_c *Gasometer_CalculateGas_Call) Run(run func(clientCtx grpc.ClientConn, txf tx.Factory, msgs ...proto.Message)) *Gasometer_CalculateGas_Call { + _c.Call.Run(func(args mock.Arguments) { + variadicArgs := make([]proto.Message, len(args)-2) + for i, a := range args[2:] { + if a != nil { + variadicArgs[i] = a.(proto.Message) + } + } + run(args[0].(grpc.ClientConn), args[1].(tx.Factory), variadicArgs...) + }) + return _c +} + +func (_c *Gasometer_CalculateGas_Call) Return(_a0 *typestx.SimulateResponse, _a1 uint64, _a2 error) *Gasometer_CalculateGas_Call { + _c.Call.Return(_a0, _a1, _a2) + return _c +} + +func (_c *Gasometer_CalculateGas_Call) RunAndReturn(run func(grpc.ClientConn, tx.Factory, ...proto.Message) (*typestx.SimulateResponse, uint64, error)) *Gasometer_CalculateGas_Call { + _c.Call.Return(run) + return _c +} + +// NewGasometer creates a new instance of Gasometer. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewGasometer(t interface { + mock.TestingT + Cleanup(func()) +}) *Gasometer { + mock := &Gasometer{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/ignite/pkg/cosmosclient/mocks/rpc_client.go b/ignite/pkg/cosmosclient/mocks/rpc_client.go new file mode 100644 index 0000000..43e453c --- /dev/null +++ b/ignite/pkg/cosmosclient/mocks/rpc_client.go @@ -0,0 +1,2359 @@ +// Code generated by mockery v2.53.3. DO NOT EDIT. + +package mocks + +import ( + bytes "github.com/cometbft/cometbft/libs/bytes" + client "github.com/cometbft/cometbft/rpc/client" + + context "context" + + coretypes "github.com/cometbft/cometbft/rpc/core/types" + + log "github.com/cometbft/cometbft/libs/log" + + mock "github.com/stretchr/testify/mock" + + types "github.com/cometbft/cometbft/types" +) + +// RPCClient is an autogenerated mock type for the Client type +type RPCClient struct { + mock.Mock +} + +type RPCClient_Expecter struct { + mock *mock.Mock +} + +func (_m *RPCClient) EXPECT() *RPCClient_Expecter { + return &RPCClient_Expecter{mock: &_m.Mock} +} + +// ABCIInfo provides a mock function with given fields: _a0 +func (_m *RPCClient) ABCIInfo(_a0 context.Context) (*coretypes.ResultABCIInfo, error) { + ret := _m.Called(_a0) + + if len(ret) == 0 { + panic("no return value specified for ABCIInfo") + } + + var r0 *coretypes.ResultABCIInfo + var r1 error + if rf, ok := ret.Get(0).(func(context.Context) (*coretypes.ResultABCIInfo, error)); ok { + return rf(_a0) + } + if rf, ok := ret.Get(0).(func(context.Context) *coretypes.ResultABCIInfo); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coretypes.ResultABCIInfo) + } + } + + if rf, ok := ret.Get(1).(func(context.Context) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// RPCClient_ABCIInfo_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ABCIInfo' +type RPCClient_ABCIInfo_Call struct { + *mock.Call +} + +// ABCIInfo is a helper method to define mock.On call +// - _a0 context.Context +func (_e *RPCClient_Expecter) ABCIInfo(_a0 interface{}) *RPCClient_ABCIInfo_Call { + return &RPCClient_ABCIInfo_Call{Call: _e.mock.On("ABCIInfo", _a0)} +} + +func (_c *RPCClient_ABCIInfo_Call) Run(run func(_a0 context.Context)) *RPCClient_ABCIInfo_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context)) + }) + return _c +} + +func (_c *RPCClient_ABCIInfo_Call) Return(_a0 *coretypes.ResultABCIInfo, _a1 error) *RPCClient_ABCIInfo_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *RPCClient_ABCIInfo_Call) RunAndReturn(run func(context.Context) (*coretypes.ResultABCIInfo, error)) *RPCClient_ABCIInfo_Call { + _c.Call.Return(run) + return _c +} + +// ABCIQuery provides a mock function with given fields: ctx, path, data +func (_m *RPCClient) ABCIQuery(ctx context.Context, path string, data bytes.HexBytes) (*coretypes.ResultABCIQuery, error) { + ret := _m.Called(ctx, path, data) + + if len(ret) == 0 { + panic("no return value specified for ABCIQuery") + } + + var r0 *coretypes.ResultABCIQuery + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, string, bytes.HexBytes) (*coretypes.ResultABCIQuery, error)); ok { + return rf(ctx, path, data) + } + if rf, ok := ret.Get(0).(func(context.Context, string, bytes.HexBytes) *coretypes.ResultABCIQuery); ok { + r0 = rf(ctx, path, data) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coretypes.ResultABCIQuery) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, string, bytes.HexBytes) error); ok { + r1 = rf(ctx, path, data) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// RPCClient_ABCIQuery_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ABCIQuery' +type RPCClient_ABCIQuery_Call struct { + *mock.Call +} + +// ABCIQuery is a helper method to define mock.On call +// - ctx context.Context +// - path string +// - data bytes.HexBytes +func (_e *RPCClient_Expecter) ABCIQuery(ctx interface{}, path interface{}, data interface{}) *RPCClient_ABCIQuery_Call { + return &RPCClient_ABCIQuery_Call{Call: _e.mock.On("ABCIQuery", ctx, path, data)} +} + +func (_c *RPCClient_ABCIQuery_Call) Run(run func(ctx context.Context, path string, data bytes.HexBytes)) *RPCClient_ABCIQuery_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string), args[2].(bytes.HexBytes)) + }) + return _c +} + +func (_c *RPCClient_ABCIQuery_Call) Return(_a0 *coretypes.ResultABCIQuery, _a1 error) *RPCClient_ABCIQuery_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *RPCClient_ABCIQuery_Call) RunAndReturn(run func(context.Context, string, bytes.HexBytes) (*coretypes.ResultABCIQuery, error)) *RPCClient_ABCIQuery_Call { + _c.Call.Return(run) + return _c +} + +// ABCIQueryWithOptions provides a mock function with given fields: ctx, path, data, opts +func (_m *RPCClient) ABCIQueryWithOptions(ctx context.Context, path string, data bytes.HexBytes, opts client.ABCIQueryOptions) (*coretypes.ResultABCIQuery, error) { + ret := _m.Called(ctx, path, data, opts) + + if len(ret) == 0 { + panic("no return value specified for ABCIQueryWithOptions") + } + + var r0 *coretypes.ResultABCIQuery + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, string, bytes.HexBytes, client.ABCIQueryOptions) (*coretypes.ResultABCIQuery, error)); ok { + return rf(ctx, path, data, opts) + } + if rf, ok := ret.Get(0).(func(context.Context, string, bytes.HexBytes, client.ABCIQueryOptions) *coretypes.ResultABCIQuery); ok { + r0 = rf(ctx, path, data, opts) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coretypes.ResultABCIQuery) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, string, bytes.HexBytes, client.ABCIQueryOptions) error); ok { + r1 = rf(ctx, path, data, opts) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// RPCClient_ABCIQueryWithOptions_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ABCIQueryWithOptions' +type RPCClient_ABCIQueryWithOptions_Call struct { + *mock.Call +} + +// ABCIQueryWithOptions is a helper method to define mock.On call +// - ctx context.Context +// - path string +// - data bytes.HexBytes +// - opts client.ABCIQueryOptions +func (_e *RPCClient_Expecter) ABCIQueryWithOptions(ctx interface{}, path interface{}, data interface{}, opts interface{}) *RPCClient_ABCIQueryWithOptions_Call { + return &RPCClient_ABCIQueryWithOptions_Call{Call: _e.mock.On("ABCIQueryWithOptions", ctx, path, data, opts)} +} + +func (_c *RPCClient_ABCIQueryWithOptions_Call) Run(run func(ctx context.Context, path string, data bytes.HexBytes, opts client.ABCIQueryOptions)) *RPCClient_ABCIQueryWithOptions_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string), args[2].(bytes.HexBytes), args[3].(client.ABCIQueryOptions)) + }) + return _c +} + +func (_c *RPCClient_ABCIQueryWithOptions_Call) Return(_a0 *coretypes.ResultABCIQuery, _a1 error) *RPCClient_ABCIQueryWithOptions_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *RPCClient_ABCIQueryWithOptions_Call) RunAndReturn(run func(context.Context, string, bytes.HexBytes, client.ABCIQueryOptions) (*coretypes.ResultABCIQuery, error)) *RPCClient_ABCIQueryWithOptions_Call { + _c.Call.Return(run) + return _c +} + +// Block provides a mock function with given fields: ctx, height +func (_m *RPCClient) Block(ctx context.Context, height *int64) (*coretypes.ResultBlock, error) { + ret := _m.Called(ctx, height) + + if len(ret) == 0 { + panic("no return value specified for Block") + } + + var r0 *coretypes.ResultBlock + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *int64) (*coretypes.ResultBlock, error)); ok { + return rf(ctx, height) + } + if rf, ok := ret.Get(0).(func(context.Context, *int64) *coretypes.ResultBlock); ok { + r0 = rf(ctx, height) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coretypes.ResultBlock) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *int64) error); ok { + r1 = rf(ctx, height) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// RPCClient_Block_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Block' +type RPCClient_Block_Call struct { + *mock.Call +} + +// Block is a helper method to define mock.On call +// - ctx context.Context +// - height *int64 +func (_e *RPCClient_Expecter) Block(ctx interface{}, height interface{}) *RPCClient_Block_Call { + return &RPCClient_Block_Call{Call: _e.mock.On("Block", ctx, height)} +} + +func (_c *RPCClient_Block_Call) Run(run func(ctx context.Context, height *int64)) *RPCClient_Block_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(*int64)) + }) + return _c +} + +func (_c *RPCClient_Block_Call) Return(_a0 *coretypes.ResultBlock, _a1 error) *RPCClient_Block_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *RPCClient_Block_Call) RunAndReturn(run func(context.Context, *int64) (*coretypes.ResultBlock, error)) *RPCClient_Block_Call { + _c.Call.Return(run) + return _c +} + +// BlockByHash provides a mock function with given fields: ctx, hash +func (_m *RPCClient) BlockByHash(ctx context.Context, hash []byte) (*coretypes.ResultBlock, error) { + ret := _m.Called(ctx, hash) + + if len(ret) == 0 { + panic("no return value specified for BlockByHash") + } + + var r0 *coretypes.ResultBlock + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, []byte) (*coretypes.ResultBlock, error)); ok { + return rf(ctx, hash) + } + if rf, ok := ret.Get(0).(func(context.Context, []byte) *coretypes.ResultBlock); ok { + r0 = rf(ctx, hash) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coretypes.ResultBlock) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, []byte) error); ok { + r1 = rf(ctx, hash) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// RPCClient_BlockByHash_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'BlockByHash' +type RPCClient_BlockByHash_Call struct { + *mock.Call +} + +// BlockByHash is a helper method to define mock.On call +// - ctx context.Context +// - hash []byte +func (_e *RPCClient_Expecter) BlockByHash(ctx interface{}, hash interface{}) *RPCClient_BlockByHash_Call { + return &RPCClient_BlockByHash_Call{Call: _e.mock.On("BlockByHash", ctx, hash)} +} + +func (_c *RPCClient_BlockByHash_Call) Run(run func(ctx context.Context, hash []byte)) *RPCClient_BlockByHash_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].([]byte)) + }) + return _c +} + +func (_c *RPCClient_BlockByHash_Call) Return(_a0 *coretypes.ResultBlock, _a1 error) *RPCClient_BlockByHash_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *RPCClient_BlockByHash_Call) RunAndReturn(run func(context.Context, []byte) (*coretypes.ResultBlock, error)) *RPCClient_BlockByHash_Call { + _c.Call.Return(run) + return _c +} + +// BlockResults provides a mock function with given fields: ctx, height +func (_m *RPCClient) BlockResults(ctx context.Context, height *int64) (*coretypes.ResultBlockResults, error) { + ret := _m.Called(ctx, height) + + if len(ret) == 0 { + panic("no return value specified for BlockResults") + } + + var r0 *coretypes.ResultBlockResults + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *int64) (*coretypes.ResultBlockResults, error)); ok { + return rf(ctx, height) + } + if rf, ok := ret.Get(0).(func(context.Context, *int64) *coretypes.ResultBlockResults); ok { + r0 = rf(ctx, height) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coretypes.ResultBlockResults) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *int64) error); ok { + r1 = rf(ctx, height) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// RPCClient_BlockResults_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'BlockResults' +type RPCClient_BlockResults_Call struct { + *mock.Call +} + +// BlockResults is a helper method to define mock.On call +// - ctx context.Context +// - height *int64 +func (_e *RPCClient_Expecter) BlockResults(ctx interface{}, height interface{}) *RPCClient_BlockResults_Call { + return &RPCClient_BlockResults_Call{Call: _e.mock.On("BlockResults", ctx, height)} +} + +func (_c *RPCClient_BlockResults_Call) Run(run func(ctx context.Context, height *int64)) *RPCClient_BlockResults_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(*int64)) + }) + return _c +} + +func (_c *RPCClient_BlockResults_Call) Return(_a0 *coretypes.ResultBlockResults, _a1 error) *RPCClient_BlockResults_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *RPCClient_BlockResults_Call) RunAndReturn(run func(context.Context, *int64) (*coretypes.ResultBlockResults, error)) *RPCClient_BlockResults_Call { + _c.Call.Return(run) + return _c +} + +// BlockSearch provides a mock function with given fields: ctx, query, page, perPage, orderBy +func (_m *RPCClient) BlockSearch(ctx context.Context, query string, page *int, perPage *int, orderBy string) (*coretypes.ResultBlockSearch, error) { + ret := _m.Called(ctx, query, page, perPage, orderBy) + + if len(ret) == 0 { + panic("no return value specified for BlockSearch") + } + + var r0 *coretypes.ResultBlockSearch + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, string, *int, *int, string) (*coretypes.ResultBlockSearch, error)); ok { + return rf(ctx, query, page, perPage, orderBy) + } + if rf, ok := ret.Get(0).(func(context.Context, string, *int, *int, string) *coretypes.ResultBlockSearch); ok { + r0 = rf(ctx, query, page, perPage, orderBy) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coretypes.ResultBlockSearch) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, string, *int, *int, string) error); ok { + r1 = rf(ctx, query, page, perPage, orderBy) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// RPCClient_BlockSearch_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'BlockSearch' +type RPCClient_BlockSearch_Call struct { + *mock.Call +} + +// BlockSearch is a helper method to define mock.On call +// - ctx context.Context +// - query string +// - page *int +// - perPage *int +// - orderBy string +func (_e *RPCClient_Expecter) BlockSearch(ctx interface{}, query interface{}, page interface{}, perPage interface{}, orderBy interface{}) *RPCClient_BlockSearch_Call { + return &RPCClient_BlockSearch_Call{Call: _e.mock.On("BlockSearch", ctx, query, page, perPage, orderBy)} +} + +func (_c *RPCClient_BlockSearch_Call) Run(run func(ctx context.Context, query string, page *int, perPage *int, orderBy string)) *RPCClient_BlockSearch_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string), args[2].(*int), args[3].(*int), args[4].(string)) + }) + return _c +} + +func (_c *RPCClient_BlockSearch_Call) Return(_a0 *coretypes.ResultBlockSearch, _a1 error) *RPCClient_BlockSearch_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *RPCClient_BlockSearch_Call) RunAndReturn(run func(context.Context, string, *int, *int, string) (*coretypes.ResultBlockSearch, error)) *RPCClient_BlockSearch_Call { + _c.Call.Return(run) + return _c +} + +// BlockchainInfo provides a mock function with given fields: ctx, minHeight, maxHeight +func (_m *RPCClient) BlockchainInfo(ctx context.Context, minHeight int64, maxHeight int64) (*coretypes.ResultBlockchainInfo, error) { + ret := _m.Called(ctx, minHeight, maxHeight) + + if len(ret) == 0 { + panic("no return value specified for BlockchainInfo") + } + + var r0 *coretypes.ResultBlockchainInfo + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, int64, int64) (*coretypes.ResultBlockchainInfo, error)); ok { + return rf(ctx, minHeight, maxHeight) + } + if rf, ok := ret.Get(0).(func(context.Context, int64, int64) *coretypes.ResultBlockchainInfo); ok { + r0 = rf(ctx, minHeight, maxHeight) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coretypes.ResultBlockchainInfo) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, int64, int64) error); ok { + r1 = rf(ctx, minHeight, maxHeight) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// RPCClient_BlockchainInfo_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'BlockchainInfo' +type RPCClient_BlockchainInfo_Call struct { + *mock.Call +} + +// BlockchainInfo is a helper method to define mock.On call +// - ctx context.Context +// - minHeight int64 +// - maxHeight int64 +func (_e *RPCClient_Expecter) BlockchainInfo(ctx interface{}, minHeight interface{}, maxHeight interface{}) *RPCClient_BlockchainInfo_Call { + return &RPCClient_BlockchainInfo_Call{Call: _e.mock.On("BlockchainInfo", ctx, minHeight, maxHeight)} +} + +func (_c *RPCClient_BlockchainInfo_Call) Run(run func(ctx context.Context, minHeight int64, maxHeight int64)) *RPCClient_BlockchainInfo_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(int64), args[2].(int64)) + }) + return _c +} + +func (_c *RPCClient_BlockchainInfo_Call) Return(_a0 *coretypes.ResultBlockchainInfo, _a1 error) *RPCClient_BlockchainInfo_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *RPCClient_BlockchainInfo_Call) RunAndReturn(run func(context.Context, int64, int64) (*coretypes.ResultBlockchainInfo, error)) *RPCClient_BlockchainInfo_Call { + _c.Call.Return(run) + return _c +} + +// BroadcastEvidence provides a mock function with given fields: _a0, _a1 +func (_m *RPCClient) BroadcastEvidence(_a0 context.Context, _a1 types.Evidence) (*coretypes.ResultBroadcastEvidence, error) { + ret := _m.Called(_a0, _a1) + + if len(ret) == 0 { + panic("no return value specified for BroadcastEvidence") + } + + var r0 *coretypes.ResultBroadcastEvidence + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, types.Evidence) (*coretypes.ResultBroadcastEvidence, error)); ok { + return rf(_a0, _a1) + } + if rf, ok := ret.Get(0).(func(context.Context, types.Evidence) *coretypes.ResultBroadcastEvidence); ok { + r0 = rf(_a0, _a1) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coretypes.ResultBroadcastEvidence) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, types.Evidence) error); ok { + r1 = rf(_a0, _a1) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// RPCClient_BroadcastEvidence_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'BroadcastEvidence' +type RPCClient_BroadcastEvidence_Call struct { + *mock.Call +} + +// BroadcastEvidence is a helper method to define mock.On call +// - _a0 context.Context +// - _a1 types.Evidence +func (_e *RPCClient_Expecter) BroadcastEvidence(_a0 interface{}, _a1 interface{}) *RPCClient_BroadcastEvidence_Call { + return &RPCClient_BroadcastEvidence_Call{Call: _e.mock.On("BroadcastEvidence", _a0, _a1)} +} + +func (_c *RPCClient_BroadcastEvidence_Call) Run(run func(_a0 context.Context, _a1 types.Evidence)) *RPCClient_BroadcastEvidence_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(types.Evidence)) + }) + return _c +} + +func (_c *RPCClient_BroadcastEvidence_Call) Return(_a0 *coretypes.ResultBroadcastEvidence, _a1 error) *RPCClient_BroadcastEvidence_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *RPCClient_BroadcastEvidence_Call) RunAndReturn(run func(context.Context, types.Evidence) (*coretypes.ResultBroadcastEvidence, error)) *RPCClient_BroadcastEvidence_Call { + _c.Call.Return(run) + return _c +} + +// BroadcastTxAsync provides a mock function with given fields: _a0, _a1 +func (_m *RPCClient) BroadcastTxAsync(_a0 context.Context, _a1 types.Tx) (*coretypes.ResultBroadcastTx, error) { + ret := _m.Called(_a0, _a1) + + if len(ret) == 0 { + panic("no return value specified for BroadcastTxAsync") + } + + var r0 *coretypes.ResultBroadcastTx + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, types.Tx) (*coretypes.ResultBroadcastTx, error)); ok { + return rf(_a0, _a1) + } + if rf, ok := ret.Get(0).(func(context.Context, types.Tx) *coretypes.ResultBroadcastTx); ok { + r0 = rf(_a0, _a1) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coretypes.ResultBroadcastTx) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, types.Tx) error); ok { + r1 = rf(_a0, _a1) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// RPCClient_BroadcastTxAsync_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'BroadcastTxAsync' +type RPCClient_BroadcastTxAsync_Call struct { + *mock.Call +} + +// BroadcastTxAsync is a helper method to define mock.On call +// - _a0 context.Context +// - _a1 types.Tx +func (_e *RPCClient_Expecter) BroadcastTxAsync(_a0 interface{}, _a1 interface{}) *RPCClient_BroadcastTxAsync_Call { + return &RPCClient_BroadcastTxAsync_Call{Call: _e.mock.On("BroadcastTxAsync", _a0, _a1)} +} + +func (_c *RPCClient_BroadcastTxAsync_Call) Run(run func(_a0 context.Context, _a1 types.Tx)) *RPCClient_BroadcastTxAsync_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(types.Tx)) + }) + return _c +} + +func (_c *RPCClient_BroadcastTxAsync_Call) Return(_a0 *coretypes.ResultBroadcastTx, _a1 error) *RPCClient_BroadcastTxAsync_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *RPCClient_BroadcastTxAsync_Call) RunAndReturn(run func(context.Context, types.Tx) (*coretypes.ResultBroadcastTx, error)) *RPCClient_BroadcastTxAsync_Call { + _c.Call.Return(run) + return _c +} + +// BroadcastTxCommit provides a mock function with given fields: _a0, _a1 +func (_m *RPCClient) BroadcastTxCommit(_a0 context.Context, _a1 types.Tx) (*coretypes.ResultBroadcastTxCommit, error) { + ret := _m.Called(_a0, _a1) + + if len(ret) == 0 { + panic("no return value specified for BroadcastTxCommit") + } + + var r0 *coretypes.ResultBroadcastTxCommit + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, types.Tx) (*coretypes.ResultBroadcastTxCommit, error)); ok { + return rf(_a0, _a1) + } + if rf, ok := ret.Get(0).(func(context.Context, types.Tx) *coretypes.ResultBroadcastTxCommit); ok { + r0 = rf(_a0, _a1) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coretypes.ResultBroadcastTxCommit) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, types.Tx) error); ok { + r1 = rf(_a0, _a1) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// RPCClient_BroadcastTxCommit_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'BroadcastTxCommit' +type RPCClient_BroadcastTxCommit_Call struct { + *mock.Call +} + +// BroadcastTxCommit is a helper method to define mock.On call +// - _a0 context.Context +// - _a1 types.Tx +func (_e *RPCClient_Expecter) BroadcastTxCommit(_a0 interface{}, _a1 interface{}) *RPCClient_BroadcastTxCommit_Call { + return &RPCClient_BroadcastTxCommit_Call{Call: _e.mock.On("BroadcastTxCommit", _a0, _a1)} +} + +func (_c *RPCClient_BroadcastTxCommit_Call) Run(run func(_a0 context.Context, _a1 types.Tx)) *RPCClient_BroadcastTxCommit_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(types.Tx)) + }) + return _c +} + +func (_c *RPCClient_BroadcastTxCommit_Call) Return(_a0 *coretypes.ResultBroadcastTxCommit, _a1 error) *RPCClient_BroadcastTxCommit_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *RPCClient_BroadcastTxCommit_Call) RunAndReturn(run func(context.Context, types.Tx) (*coretypes.ResultBroadcastTxCommit, error)) *RPCClient_BroadcastTxCommit_Call { + _c.Call.Return(run) + return _c +} + +// BroadcastTxSync provides a mock function with given fields: _a0, _a1 +func (_m *RPCClient) BroadcastTxSync(_a0 context.Context, _a1 types.Tx) (*coretypes.ResultBroadcastTx, error) { + ret := _m.Called(_a0, _a1) + + if len(ret) == 0 { + panic("no return value specified for BroadcastTxSync") + } + + var r0 *coretypes.ResultBroadcastTx + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, types.Tx) (*coretypes.ResultBroadcastTx, error)); ok { + return rf(_a0, _a1) + } + if rf, ok := ret.Get(0).(func(context.Context, types.Tx) *coretypes.ResultBroadcastTx); ok { + r0 = rf(_a0, _a1) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coretypes.ResultBroadcastTx) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, types.Tx) error); ok { + r1 = rf(_a0, _a1) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// RPCClient_BroadcastTxSync_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'BroadcastTxSync' +type RPCClient_BroadcastTxSync_Call struct { + *mock.Call +} + +// BroadcastTxSync is a helper method to define mock.On call +// - _a0 context.Context +// - _a1 types.Tx +func (_e *RPCClient_Expecter) BroadcastTxSync(_a0 interface{}, _a1 interface{}) *RPCClient_BroadcastTxSync_Call { + return &RPCClient_BroadcastTxSync_Call{Call: _e.mock.On("BroadcastTxSync", _a0, _a1)} +} + +func (_c *RPCClient_BroadcastTxSync_Call) Run(run func(_a0 context.Context, _a1 types.Tx)) *RPCClient_BroadcastTxSync_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(types.Tx)) + }) + return _c +} + +func (_c *RPCClient_BroadcastTxSync_Call) Return(_a0 *coretypes.ResultBroadcastTx, _a1 error) *RPCClient_BroadcastTxSync_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *RPCClient_BroadcastTxSync_Call) RunAndReturn(run func(context.Context, types.Tx) (*coretypes.ResultBroadcastTx, error)) *RPCClient_BroadcastTxSync_Call { + _c.Call.Return(run) + return _c +} + +// CheckTx provides a mock function with given fields: _a0, _a1 +func (_m *RPCClient) CheckTx(_a0 context.Context, _a1 types.Tx) (*coretypes.ResultCheckTx, error) { + ret := _m.Called(_a0, _a1) + + if len(ret) == 0 { + panic("no return value specified for CheckTx") + } + + var r0 *coretypes.ResultCheckTx + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, types.Tx) (*coretypes.ResultCheckTx, error)); ok { + return rf(_a0, _a1) + } + if rf, ok := ret.Get(0).(func(context.Context, types.Tx) *coretypes.ResultCheckTx); ok { + r0 = rf(_a0, _a1) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coretypes.ResultCheckTx) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, types.Tx) error); ok { + r1 = rf(_a0, _a1) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// RPCClient_CheckTx_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CheckTx' +type RPCClient_CheckTx_Call struct { + *mock.Call +} + +// CheckTx is a helper method to define mock.On call +// - _a0 context.Context +// - _a1 types.Tx +func (_e *RPCClient_Expecter) CheckTx(_a0 interface{}, _a1 interface{}) *RPCClient_CheckTx_Call { + return &RPCClient_CheckTx_Call{Call: _e.mock.On("CheckTx", _a0, _a1)} +} + +func (_c *RPCClient_CheckTx_Call) Run(run func(_a0 context.Context, _a1 types.Tx)) *RPCClient_CheckTx_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(types.Tx)) + }) + return _c +} + +func (_c *RPCClient_CheckTx_Call) Return(_a0 *coretypes.ResultCheckTx, _a1 error) *RPCClient_CheckTx_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *RPCClient_CheckTx_Call) RunAndReturn(run func(context.Context, types.Tx) (*coretypes.ResultCheckTx, error)) *RPCClient_CheckTx_Call { + _c.Call.Return(run) + return _c +} + +// Commit provides a mock function with given fields: ctx, height +func (_m *RPCClient) Commit(ctx context.Context, height *int64) (*coretypes.ResultCommit, error) { + ret := _m.Called(ctx, height) + + if len(ret) == 0 { + panic("no return value specified for Commit") + } + + var r0 *coretypes.ResultCommit + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *int64) (*coretypes.ResultCommit, error)); ok { + return rf(ctx, height) + } + if rf, ok := ret.Get(0).(func(context.Context, *int64) *coretypes.ResultCommit); ok { + r0 = rf(ctx, height) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coretypes.ResultCommit) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *int64) error); ok { + r1 = rf(ctx, height) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// RPCClient_Commit_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Commit' +type RPCClient_Commit_Call struct { + *mock.Call +} + +// Commit is a helper method to define mock.On call +// - ctx context.Context +// - height *int64 +func (_e *RPCClient_Expecter) Commit(ctx interface{}, height interface{}) *RPCClient_Commit_Call { + return &RPCClient_Commit_Call{Call: _e.mock.On("Commit", ctx, height)} +} + +func (_c *RPCClient_Commit_Call) Run(run func(ctx context.Context, height *int64)) *RPCClient_Commit_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(*int64)) + }) + return _c +} + +func (_c *RPCClient_Commit_Call) Return(_a0 *coretypes.ResultCommit, _a1 error) *RPCClient_Commit_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *RPCClient_Commit_Call) RunAndReturn(run func(context.Context, *int64) (*coretypes.ResultCommit, error)) *RPCClient_Commit_Call { + _c.Call.Return(run) + return _c +} + +// ConsensusParams provides a mock function with given fields: ctx, height +func (_m *RPCClient) ConsensusParams(ctx context.Context, height *int64) (*coretypes.ResultConsensusParams, error) { + ret := _m.Called(ctx, height) + + if len(ret) == 0 { + panic("no return value specified for ConsensusParams") + } + + var r0 *coretypes.ResultConsensusParams + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *int64) (*coretypes.ResultConsensusParams, error)); ok { + return rf(ctx, height) + } + if rf, ok := ret.Get(0).(func(context.Context, *int64) *coretypes.ResultConsensusParams); ok { + r0 = rf(ctx, height) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coretypes.ResultConsensusParams) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *int64) error); ok { + r1 = rf(ctx, height) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// RPCClient_ConsensusParams_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ConsensusParams' +type RPCClient_ConsensusParams_Call struct { + *mock.Call +} + +// ConsensusParams is a helper method to define mock.On call +// - ctx context.Context +// - height *int64 +func (_e *RPCClient_Expecter) ConsensusParams(ctx interface{}, height interface{}) *RPCClient_ConsensusParams_Call { + return &RPCClient_ConsensusParams_Call{Call: _e.mock.On("ConsensusParams", ctx, height)} +} + +func (_c *RPCClient_ConsensusParams_Call) Run(run func(ctx context.Context, height *int64)) *RPCClient_ConsensusParams_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(*int64)) + }) + return _c +} + +func (_c *RPCClient_ConsensusParams_Call) Return(_a0 *coretypes.ResultConsensusParams, _a1 error) *RPCClient_ConsensusParams_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *RPCClient_ConsensusParams_Call) RunAndReturn(run func(context.Context, *int64) (*coretypes.ResultConsensusParams, error)) *RPCClient_ConsensusParams_Call { + _c.Call.Return(run) + return _c +} + +// ConsensusState provides a mock function with given fields: _a0 +func (_m *RPCClient) ConsensusState(_a0 context.Context) (*coretypes.ResultConsensusState, error) { + ret := _m.Called(_a0) + + if len(ret) == 0 { + panic("no return value specified for ConsensusState") + } + + var r0 *coretypes.ResultConsensusState + var r1 error + if rf, ok := ret.Get(0).(func(context.Context) (*coretypes.ResultConsensusState, error)); ok { + return rf(_a0) + } + if rf, ok := ret.Get(0).(func(context.Context) *coretypes.ResultConsensusState); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coretypes.ResultConsensusState) + } + } + + if rf, ok := ret.Get(1).(func(context.Context) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// RPCClient_ConsensusState_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ConsensusState' +type RPCClient_ConsensusState_Call struct { + *mock.Call +} + +// ConsensusState is a helper method to define mock.On call +// - _a0 context.Context +func (_e *RPCClient_Expecter) ConsensusState(_a0 interface{}) *RPCClient_ConsensusState_Call { + return &RPCClient_ConsensusState_Call{Call: _e.mock.On("ConsensusState", _a0)} +} + +func (_c *RPCClient_ConsensusState_Call) Run(run func(_a0 context.Context)) *RPCClient_ConsensusState_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context)) + }) + return _c +} + +func (_c *RPCClient_ConsensusState_Call) Return(_a0 *coretypes.ResultConsensusState, _a1 error) *RPCClient_ConsensusState_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *RPCClient_ConsensusState_Call) RunAndReturn(run func(context.Context) (*coretypes.ResultConsensusState, error)) *RPCClient_ConsensusState_Call { + _c.Call.Return(run) + return _c +} + +// DumpConsensusState provides a mock function with given fields: _a0 +func (_m *RPCClient) DumpConsensusState(_a0 context.Context) (*coretypes.ResultDumpConsensusState, error) { + ret := _m.Called(_a0) + + if len(ret) == 0 { + panic("no return value specified for DumpConsensusState") + } + + var r0 *coretypes.ResultDumpConsensusState + var r1 error + if rf, ok := ret.Get(0).(func(context.Context) (*coretypes.ResultDumpConsensusState, error)); ok { + return rf(_a0) + } + if rf, ok := ret.Get(0).(func(context.Context) *coretypes.ResultDumpConsensusState); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coretypes.ResultDumpConsensusState) + } + } + + if rf, ok := ret.Get(1).(func(context.Context) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// RPCClient_DumpConsensusState_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DumpConsensusState' +type RPCClient_DumpConsensusState_Call struct { + *mock.Call +} + +// DumpConsensusState is a helper method to define mock.On call +// - _a0 context.Context +func (_e *RPCClient_Expecter) DumpConsensusState(_a0 interface{}) *RPCClient_DumpConsensusState_Call { + return &RPCClient_DumpConsensusState_Call{Call: _e.mock.On("DumpConsensusState", _a0)} +} + +func (_c *RPCClient_DumpConsensusState_Call) Run(run func(_a0 context.Context)) *RPCClient_DumpConsensusState_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context)) + }) + return _c +} + +func (_c *RPCClient_DumpConsensusState_Call) Return(_a0 *coretypes.ResultDumpConsensusState, _a1 error) *RPCClient_DumpConsensusState_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *RPCClient_DumpConsensusState_Call) RunAndReturn(run func(context.Context) (*coretypes.ResultDumpConsensusState, error)) *RPCClient_DumpConsensusState_Call { + _c.Call.Return(run) + return _c +} + +// Genesis provides a mock function with given fields: _a0 +func (_m *RPCClient) Genesis(_a0 context.Context) (*coretypes.ResultGenesis, error) { + ret := _m.Called(_a0) + + if len(ret) == 0 { + panic("no return value specified for Genesis") + } + + var r0 *coretypes.ResultGenesis + var r1 error + if rf, ok := ret.Get(0).(func(context.Context) (*coretypes.ResultGenesis, error)); ok { + return rf(_a0) + } + if rf, ok := ret.Get(0).(func(context.Context) *coretypes.ResultGenesis); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coretypes.ResultGenesis) + } + } + + if rf, ok := ret.Get(1).(func(context.Context) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// RPCClient_Genesis_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Genesis' +type RPCClient_Genesis_Call struct { + *mock.Call +} + +// Genesis is a helper method to define mock.On call +// - _a0 context.Context +func (_e *RPCClient_Expecter) Genesis(_a0 interface{}) *RPCClient_Genesis_Call { + return &RPCClient_Genesis_Call{Call: _e.mock.On("Genesis", _a0)} +} + +func (_c *RPCClient_Genesis_Call) Run(run func(_a0 context.Context)) *RPCClient_Genesis_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context)) + }) + return _c +} + +func (_c *RPCClient_Genesis_Call) Return(_a0 *coretypes.ResultGenesis, _a1 error) *RPCClient_Genesis_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *RPCClient_Genesis_Call) RunAndReturn(run func(context.Context) (*coretypes.ResultGenesis, error)) *RPCClient_Genesis_Call { + _c.Call.Return(run) + return _c +} + +// GenesisChunked provides a mock function with given fields: _a0, _a1 +func (_m *RPCClient) GenesisChunked(_a0 context.Context, _a1 uint) (*coretypes.ResultGenesisChunk, error) { + ret := _m.Called(_a0, _a1) + + if len(ret) == 0 { + panic("no return value specified for GenesisChunked") + } + + var r0 *coretypes.ResultGenesisChunk + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, uint) (*coretypes.ResultGenesisChunk, error)); ok { + return rf(_a0, _a1) + } + if rf, ok := ret.Get(0).(func(context.Context, uint) *coretypes.ResultGenesisChunk); ok { + r0 = rf(_a0, _a1) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coretypes.ResultGenesisChunk) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, uint) error); ok { + r1 = rf(_a0, _a1) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// RPCClient_GenesisChunked_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GenesisChunked' +type RPCClient_GenesisChunked_Call struct { + *mock.Call +} + +// GenesisChunked is a helper method to define mock.On call +// - _a0 context.Context +// - _a1 uint +func (_e *RPCClient_Expecter) GenesisChunked(_a0 interface{}, _a1 interface{}) *RPCClient_GenesisChunked_Call { + return &RPCClient_GenesisChunked_Call{Call: _e.mock.On("GenesisChunked", _a0, _a1)} +} + +func (_c *RPCClient_GenesisChunked_Call) Run(run func(_a0 context.Context, _a1 uint)) *RPCClient_GenesisChunked_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(uint)) + }) + return _c +} + +func (_c *RPCClient_GenesisChunked_Call) Return(_a0 *coretypes.ResultGenesisChunk, _a1 error) *RPCClient_GenesisChunked_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *RPCClient_GenesisChunked_Call) RunAndReturn(run func(context.Context, uint) (*coretypes.ResultGenesisChunk, error)) *RPCClient_GenesisChunked_Call { + _c.Call.Return(run) + return _c +} + +// Header provides a mock function with given fields: ctx, height +func (_m *RPCClient) Header(ctx context.Context, height *int64) (*coretypes.ResultHeader, error) { + ret := _m.Called(ctx, height) + + if len(ret) == 0 { + panic("no return value specified for Header") + } + + var r0 *coretypes.ResultHeader + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *int64) (*coretypes.ResultHeader, error)); ok { + return rf(ctx, height) + } + if rf, ok := ret.Get(0).(func(context.Context, *int64) *coretypes.ResultHeader); ok { + r0 = rf(ctx, height) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coretypes.ResultHeader) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *int64) error); ok { + r1 = rf(ctx, height) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// RPCClient_Header_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Header' +type RPCClient_Header_Call struct { + *mock.Call +} + +// Header is a helper method to define mock.On call +// - ctx context.Context +// - height *int64 +func (_e *RPCClient_Expecter) Header(ctx interface{}, height interface{}) *RPCClient_Header_Call { + return &RPCClient_Header_Call{Call: _e.mock.On("Header", ctx, height)} +} + +func (_c *RPCClient_Header_Call) Run(run func(ctx context.Context, height *int64)) *RPCClient_Header_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(*int64)) + }) + return _c +} + +func (_c *RPCClient_Header_Call) Return(_a0 *coretypes.ResultHeader, _a1 error) *RPCClient_Header_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *RPCClient_Header_Call) RunAndReturn(run func(context.Context, *int64) (*coretypes.ResultHeader, error)) *RPCClient_Header_Call { + _c.Call.Return(run) + return _c +} + +// HeaderByHash provides a mock function with given fields: ctx, hash +func (_m *RPCClient) HeaderByHash(ctx context.Context, hash bytes.HexBytes) (*coretypes.ResultHeader, error) { + ret := _m.Called(ctx, hash) + + if len(ret) == 0 { + panic("no return value specified for HeaderByHash") + } + + var r0 *coretypes.ResultHeader + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, bytes.HexBytes) (*coretypes.ResultHeader, error)); ok { + return rf(ctx, hash) + } + if rf, ok := ret.Get(0).(func(context.Context, bytes.HexBytes) *coretypes.ResultHeader); ok { + r0 = rf(ctx, hash) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coretypes.ResultHeader) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, bytes.HexBytes) error); ok { + r1 = rf(ctx, hash) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// RPCClient_HeaderByHash_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'HeaderByHash' +type RPCClient_HeaderByHash_Call struct { + *mock.Call +} + +// HeaderByHash is a helper method to define mock.On call +// - ctx context.Context +// - hash bytes.HexBytes +func (_e *RPCClient_Expecter) HeaderByHash(ctx interface{}, hash interface{}) *RPCClient_HeaderByHash_Call { + return &RPCClient_HeaderByHash_Call{Call: _e.mock.On("HeaderByHash", ctx, hash)} +} + +func (_c *RPCClient_HeaderByHash_Call) Run(run func(ctx context.Context, hash bytes.HexBytes)) *RPCClient_HeaderByHash_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(bytes.HexBytes)) + }) + return _c +} + +func (_c *RPCClient_HeaderByHash_Call) Return(_a0 *coretypes.ResultHeader, _a1 error) *RPCClient_HeaderByHash_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *RPCClient_HeaderByHash_Call) RunAndReturn(run func(context.Context, bytes.HexBytes) (*coretypes.ResultHeader, error)) *RPCClient_HeaderByHash_Call { + _c.Call.Return(run) + return _c +} + +// Health provides a mock function with given fields: _a0 +func (_m *RPCClient) Health(_a0 context.Context) (*coretypes.ResultHealth, error) { + ret := _m.Called(_a0) + + if len(ret) == 0 { + panic("no return value specified for Health") + } + + var r0 *coretypes.ResultHealth + var r1 error + if rf, ok := ret.Get(0).(func(context.Context) (*coretypes.ResultHealth, error)); ok { + return rf(_a0) + } + if rf, ok := ret.Get(0).(func(context.Context) *coretypes.ResultHealth); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coretypes.ResultHealth) + } + } + + if rf, ok := ret.Get(1).(func(context.Context) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// RPCClient_Health_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Health' +type RPCClient_Health_Call struct { + *mock.Call +} + +// Health is a helper method to define mock.On call +// - _a0 context.Context +func (_e *RPCClient_Expecter) Health(_a0 interface{}) *RPCClient_Health_Call { + return &RPCClient_Health_Call{Call: _e.mock.On("Health", _a0)} +} + +func (_c *RPCClient_Health_Call) Run(run func(_a0 context.Context)) *RPCClient_Health_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context)) + }) + return _c +} + +func (_c *RPCClient_Health_Call) Return(_a0 *coretypes.ResultHealth, _a1 error) *RPCClient_Health_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *RPCClient_Health_Call) RunAndReturn(run func(context.Context) (*coretypes.ResultHealth, error)) *RPCClient_Health_Call { + _c.Call.Return(run) + return _c +} + +// IsRunning provides a mock function with no fields +func (_m *RPCClient) IsRunning() bool { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for IsRunning") + } + + var r0 bool + if rf, ok := ret.Get(0).(func() bool); ok { + r0 = rf() + } else { + r0 = ret.Get(0).(bool) + } + + return r0 +} + +// RPCClient_IsRunning_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'IsRunning' +type RPCClient_IsRunning_Call struct { + *mock.Call +} + +// IsRunning is a helper method to define mock.On call +func (_e *RPCClient_Expecter) IsRunning() *RPCClient_IsRunning_Call { + return &RPCClient_IsRunning_Call{Call: _e.mock.On("IsRunning")} +} + +func (_c *RPCClient_IsRunning_Call) Run(run func()) *RPCClient_IsRunning_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *RPCClient_IsRunning_Call) Return(_a0 bool) *RPCClient_IsRunning_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *RPCClient_IsRunning_Call) RunAndReturn(run func() bool) *RPCClient_IsRunning_Call { + _c.Call.Return(run) + return _c +} + +// NetInfo provides a mock function with given fields: _a0 +func (_m *RPCClient) NetInfo(_a0 context.Context) (*coretypes.ResultNetInfo, error) { + ret := _m.Called(_a0) + + if len(ret) == 0 { + panic("no return value specified for NetInfo") + } + + var r0 *coretypes.ResultNetInfo + var r1 error + if rf, ok := ret.Get(0).(func(context.Context) (*coretypes.ResultNetInfo, error)); ok { + return rf(_a0) + } + if rf, ok := ret.Get(0).(func(context.Context) *coretypes.ResultNetInfo); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coretypes.ResultNetInfo) + } + } + + if rf, ok := ret.Get(1).(func(context.Context) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// RPCClient_NetInfo_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'NetInfo' +type RPCClient_NetInfo_Call struct { + *mock.Call +} + +// NetInfo is a helper method to define mock.On call +// - _a0 context.Context +func (_e *RPCClient_Expecter) NetInfo(_a0 interface{}) *RPCClient_NetInfo_Call { + return &RPCClient_NetInfo_Call{Call: _e.mock.On("NetInfo", _a0)} +} + +func (_c *RPCClient_NetInfo_Call) Run(run func(_a0 context.Context)) *RPCClient_NetInfo_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context)) + }) + return _c +} + +func (_c *RPCClient_NetInfo_Call) Return(_a0 *coretypes.ResultNetInfo, _a1 error) *RPCClient_NetInfo_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *RPCClient_NetInfo_Call) RunAndReturn(run func(context.Context) (*coretypes.ResultNetInfo, error)) *RPCClient_NetInfo_Call { + _c.Call.Return(run) + return _c +} + +// NumUnconfirmedTxs provides a mock function with given fields: _a0 +func (_m *RPCClient) NumUnconfirmedTxs(_a0 context.Context) (*coretypes.ResultUnconfirmedTxs, error) { + ret := _m.Called(_a0) + + if len(ret) == 0 { + panic("no return value specified for NumUnconfirmedTxs") + } + + var r0 *coretypes.ResultUnconfirmedTxs + var r1 error + if rf, ok := ret.Get(0).(func(context.Context) (*coretypes.ResultUnconfirmedTxs, error)); ok { + return rf(_a0) + } + if rf, ok := ret.Get(0).(func(context.Context) *coretypes.ResultUnconfirmedTxs); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coretypes.ResultUnconfirmedTxs) + } + } + + if rf, ok := ret.Get(1).(func(context.Context) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// RPCClient_NumUnconfirmedTxs_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'NumUnconfirmedTxs' +type RPCClient_NumUnconfirmedTxs_Call struct { + *mock.Call +} + +// NumUnconfirmedTxs is a helper method to define mock.On call +// - _a0 context.Context +func (_e *RPCClient_Expecter) NumUnconfirmedTxs(_a0 interface{}) *RPCClient_NumUnconfirmedTxs_Call { + return &RPCClient_NumUnconfirmedTxs_Call{Call: _e.mock.On("NumUnconfirmedTxs", _a0)} +} + +func (_c *RPCClient_NumUnconfirmedTxs_Call) Run(run func(_a0 context.Context)) *RPCClient_NumUnconfirmedTxs_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context)) + }) + return _c +} + +func (_c *RPCClient_NumUnconfirmedTxs_Call) Return(_a0 *coretypes.ResultUnconfirmedTxs, _a1 error) *RPCClient_NumUnconfirmedTxs_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *RPCClient_NumUnconfirmedTxs_Call) RunAndReturn(run func(context.Context) (*coretypes.ResultUnconfirmedTxs, error)) *RPCClient_NumUnconfirmedTxs_Call { + _c.Call.Return(run) + return _c +} + +// OnReset provides a mock function with no fields +func (_m *RPCClient) OnReset() error { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for OnReset") + } + + var r0 error + if rf, ok := ret.Get(0).(func() error); ok { + r0 = rf() + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// RPCClient_OnReset_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'OnReset' +type RPCClient_OnReset_Call struct { + *mock.Call +} + +// OnReset is a helper method to define mock.On call +func (_e *RPCClient_Expecter) OnReset() *RPCClient_OnReset_Call { + return &RPCClient_OnReset_Call{Call: _e.mock.On("OnReset")} +} + +func (_c *RPCClient_OnReset_Call) Run(run func()) *RPCClient_OnReset_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *RPCClient_OnReset_Call) Return(_a0 error) *RPCClient_OnReset_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *RPCClient_OnReset_Call) RunAndReturn(run func() error) *RPCClient_OnReset_Call { + _c.Call.Return(run) + return _c +} + +// OnStart provides a mock function with no fields +func (_m *RPCClient) OnStart() error { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for OnStart") + } + + var r0 error + if rf, ok := ret.Get(0).(func() error); ok { + r0 = rf() + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// RPCClient_OnStart_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'OnStart' +type RPCClient_OnStart_Call struct { + *mock.Call +} + +// OnStart is a helper method to define mock.On call +func (_e *RPCClient_Expecter) OnStart() *RPCClient_OnStart_Call { + return &RPCClient_OnStart_Call{Call: _e.mock.On("OnStart")} +} + +func (_c *RPCClient_OnStart_Call) Run(run func()) *RPCClient_OnStart_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *RPCClient_OnStart_Call) Return(_a0 error) *RPCClient_OnStart_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *RPCClient_OnStart_Call) RunAndReturn(run func() error) *RPCClient_OnStart_Call { + _c.Call.Return(run) + return _c +} + +// OnStop provides a mock function with no fields +func (_m *RPCClient) OnStop() { + _m.Called() +} + +// RPCClient_OnStop_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'OnStop' +type RPCClient_OnStop_Call struct { + *mock.Call +} + +// OnStop is a helper method to define mock.On call +func (_e *RPCClient_Expecter) OnStop() *RPCClient_OnStop_Call { + return &RPCClient_OnStop_Call{Call: _e.mock.On("OnStop")} +} + +func (_c *RPCClient_OnStop_Call) Run(run func()) *RPCClient_OnStop_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *RPCClient_OnStop_Call) Return() *RPCClient_OnStop_Call { + _c.Call.Return() + return _c +} + +func (_c *RPCClient_OnStop_Call) RunAndReturn(run func()) *RPCClient_OnStop_Call { + _c.Run(run) + return _c +} + +// Quit provides a mock function with no fields +func (_m *RPCClient) Quit() <-chan struct{} { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for Quit") + } + + var r0 <-chan struct{} + if rf, ok := ret.Get(0).(func() <-chan struct{}); ok { + r0 = rf() + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(<-chan struct{}) + } + } + + return r0 +} + +// RPCClient_Quit_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Quit' +type RPCClient_Quit_Call struct { + *mock.Call +} + +// Quit is a helper method to define mock.On call +func (_e *RPCClient_Expecter) Quit() *RPCClient_Quit_Call { + return &RPCClient_Quit_Call{Call: _e.mock.On("Quit")} +} + +func (_c *RPCClient_Quit_Call) Run(run func()) *RPCClient_Quit_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *RPCClient_Quit_Call) Return(_a0 <-chan struct{}) *RPCClient_Quit_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *RPCClient_Quit_Call) RunAndReturn(run func() <-chan struct{}) *RPCClient_Quit_Call { + _c.Call.Return(run) + return _c +} + +// Reset provides a mock function with no fields +func (_m *RPCClient) Reset() error { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for Reset") + } + + var r0 error + if rf, ok := ret.Get(0).(func() error); ok { + r0 = rf() + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// RPCClient_Reset_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Reset' +type RPCClient_Reset_Call struct { + *mock.Call +} + +// Reset is a helper method to define mock.On call +func (_e *RPCClient_Expecter) Reset() *RPCClient_Reset_Call { + return &RPCClient_Reset_Call{Call: _e.mock.On("Reset")} +} + +func (_c *RPCClient_Reset_Call) Run(run func()) *RPCClient_Reset_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *RPCClient_Reset_Call) Return(_a0 error) *RPCClient_Reset_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *RPCClient_Reset_Call) RunAndReturn(run func() error) *RPCClient_Reset_Call { + _c.Call.Return(run) + return _c +} + +// SetLogger provides a mock function with given fields: _a0 +func (_m *RPCClient) SetLogger(_a0 log.Logger) { + _m.Called(_a0) +} + +// RPCClient_SetLogger_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'SetLogger' +type RPCClient_SetLogger_Call struct { + *mock.Call +} + +// SetLogger is a helper method to define mock.On call +// - _a0 log.Logger +func (_e *RPCClient_Expecter) SetLogger(_a0 interface{}) *RPCClient_SetLogger_Call { + return &RPCClient_SetLogger_Call{Call: _e.mock.On("SetLogger", _a0)} +} + +func (_c *RPCClient_SetLogger_Call) Run(run func(_a0 log.Logger)) *RPCClient_SetLogger_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(log.Logger)) + }) + return _c +} + +func (_c *RPCClient_SetLogger_Call) Return() *RPCClient_SetLogger_Call { + _c.Call.Return() + return _c +} + +func (_c *RPCClient_SetLogger_Call) RunAndReturn(run func(log.Logger)) *RPCClient_SetLogger_Call { + _c.Run(run) + return _c +} + +// Start provides a mock function with no fields +func (_m *RPCClient) Start() error { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for Start") + } + + var r0 error + if rf, ok := ret.Get(0).(func() error); ok { + r0 = rf() + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// RPCClient_Start_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Start' +type RPCClient_Start_Call struct { + *mock.Call +} + +// Start is a helper method to define mock.On call +func (_e *RPCClient_Expecter) Start() *RPCClient_Start_Call { + return &RPCClient_Start_Call{Call: _e.mock.On("Start")} +} + +func (_c *RPCClient_Start_Call) Run(run func()) *RPCClient_Start_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *RPCClient_Start_Call) Return(_a0 error) *RPCClient_Start_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *RPCClient_Start_Call) RunAndReturn(run func() error) *RPCClient_Start_Call { + _c.Call.Return(run) + return _c +} + +// Status provides a mock function with given fields: _a0 +func (_m *RPCClient) Status(_a0 context.Context) (*coretypes.ResultStatus, error) { + ret := _m.Called(_a0) + + if len(ret) == 0 { + panic("no return value specified for Status") + } + + var r0 *coretypes.ResultStatus + var r1 error + if rf, ok := ret.Get(0).(func(context.Context) (*coretypes.ResultStatus, error)); ok { + return rf(_a0) + } + if rf, ok := ret.Get(0).(func(context.Context) *coretypes.ResultStatus); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coretypes.ResultStatus) + } + } + + if rf, ok := ret.Get(1).(func(context.Context) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// RPCClient_Status_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Status' +type RPCClient_Status_Call struct { + *mock.Call +} + +// Status is a helper method to define mock.On call +// - _a0 context.Context +func (_e *RPCClient_Expecter) Status(_a0 interface{}) *RPCClient_Status_Call { + return &RPCClient_Status_Call{Call: _e.mock.On("Status", _a0)} +} + +func (_c *RPCClient_Status_Call) Run(run func(_a0 context.Context)) *RPCClient_Status_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context)) + }) + return _c +} + +func (_c *RPCClient_Status_Call) Return(_a0 *coretypes.ResultStatus, _a1 error) *RPCClient_Status_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *RPCClient_Status_Call) RunAndReturn(run func(context.Context) (*coretypes.ResultStatus, error)) *RPCClient_Status_Call { + _c.Call.Return(run) + return _c +} + +// Stop provides a mock function with no fields +func (_m *RPCClient) Stop() error { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for Stop") + } + + var r0 error + if rf, ok := ret.Get(0).(func() error); ok { + r0 = rf() + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// RPCClient_Stop_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Stop' +type RPCClient_Stop_Call struct { + *mock.Call +} + +// Stop is a helper method to define mock.On call +func (_e *RPCClient_Expecter) Stop() *RPCClient_Stop_Call { + return &RPCClient_Stop_Call{Call: _e.mock.On("Stop")} +} + +func (_c *RPCClient_Stop_Call) Run(run func()) *RPCClient_Stop_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *RPCClient_Stop_Call) Return(_a0 error) *RPCClient_Stop_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *RPCClient_Stop_Call) RunAndReturn(run func() error) *RPCClient_Stop_Call { + _c.Call.Return(run) + return _c +} + +// String provides a mock function with no fields +func (_m *RPCClient) String() string { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for String") + } + + var r0 string + if rf, ok := ret.Get(0).(func() string); ok { + r0 = rf() + } else { + r0 = ret.Get(0).(string) + } + + return r0 +} + +// RPCClient_String_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'String' +type RPCClient_String_Call struct { + *mock.Call +} + +// String is a helper method to define mock.On call +func (_e *RPCClient_Expecter) String() *RPCClient_String_Call { + return &RPCClient_String_Call{Call: _e.mock.On("String")} +} + +func (_c *RPCClient_String_Call) Run(run func()) *RPCClient_String_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *RPCClient_String_Call) Return(_a0 string) *RPCClient_String_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *RPCClient_String_Call) RunAndReturn(run func() string) *RPCClient_String_Call { + _c.Call.Return(run) + return _c +} + +// Subscribe provides a mock function with given fields: ctx, subscriber, query, outCapacity +func (_m *RPCClient) Subscribe(ctx context.Context, subscriber string, query string, outCapacity ...int) (<-chan coretypes.ResultEvent, error) { + _va := make([]interface{}, len(outCapacity)) + for _i := range outCapacity { + _va[_i] = outCapacity[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, subscriber, query) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for Subscribe") + } + + var r0 <-chan coretypes.ResultEvent + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, string, string, ...int) (<-chan coretypes.ResultEvent, error)); ok { + return rf(ctx, subscriber, query, outCapacity...) + } + if rf, ok := ret.Get(0).(func(context.Context, string, string, ...int) <-chan coretypes.ResultEvent); ok { + r0 = rf(ctx, subscriber, query, outCapacity...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(<-chan coretypes.ResultEvent) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, string, string, ...int) error); ok { + r1 = rf(ctx, subscriber, query, outCapacity...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// RPCClient_Subscribe_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Subscribe' +type RPCClient_Subscribe_Call struct { + *mock.Call +} + +// Subscribe is a helper method to define mock.On call +// - ctx context.Context +// - subscriber string +// - query string +// - outCapacity ...int +func (_e *RPCClient_Expecter) Subscribe(ctx interface{}, subscriber interface{}, query interface{}, outCapacity ...interface{}) *RPCClient_Subscribe_Call { + return &RPCClient_Subscribe_Call{Call: _e.mock.On("Subscribe", + append([]interface{}{ctx, subscriber, query}, outCapacity...)...)} +} + +func (_c *RPCClient_Subscribe_Call) Run(run func(ctx context.Context, subscriber string, query string, outCapacity ...int)) *RPCClient_Subscribe_Call { + _c.Call.Run(func(args mock.Arguments) { + variadicArgs := make([]int, len(args)-3) + for i, a := range args[3:] { + if a != nil { + variadicArgs[i] = a.(int) + } + } + run(args[0].(context.Context), args[1].(string), args[2].(string), variadicArgs...) + }) + return _c +} + +func (_c *RPCClient_Subscribe_Call) Return(out <-chan coretypes.ResultEvent, err error) *RPCClient_Subscribe_Call { + _c.Call.Return(out, err) + return _c +} + +func (_c *RPCClient_Subscribe_Call) RunAndReturn(run func(context.Context, string, string, ...int) (<-chan coretypes.ResultEvent, error)) *RPCClient_Subscribe_Call { + _c.Call.Return(run) + return _c +} + +// Tx provides a mock function with given fields: ctx, hash, prove +func (_m *RPCClient) Tx(ctx context.Context, hash []byte, prove bool) (*coretypes.ResultTx, error) { + ret := _m.Called(ctx, hash, prove) + + if len(ret) == 0 { + panic("no return value specified for Tx") + } + + var r0 *coretypes.ResultTx + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, []byte, bool) (*coretypes.ResultTx, error)); ok { + return rf(ctx, hash, prove) + } + if rf, ok := ret.Get(0).(func(context.Context, []byte, bool) *coretypes.ResultTx); ok { + r0 = rf(ctx, hash, prove) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coretypes.ResultTx) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, []byte, bool) error); ok { + r1 = rf(ctx, hash, prove) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// RPCClient_Tx_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Tx' +type RPCClient_Tx_Call struct { + *mock.Call +} + +// Tx is a helper method to define mock.On call +// - ctx context.Context +// - hash []byte +// - prove bool +func (_e *RPCClient_Expecter) Tx(ctx interface{}, hash interface{}, prove interface{}) *RPCClient_Tx_Call { + return &RPCClient_Tx_Call{Call: _e.mock.On("Tx", ctx, hash, prove)} +} + +func (_c *RPCClient_Tx_Call) Run(run func(ctx context.Context, hash []byte, prove bool)) *RPCClient_Tx_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].([]byte), args[2].(bool)) + }) + return _c +} + +func (_c *RPCClient_Tx_Call) Return(_a0 *coretypes.ResultTx, _a1 error) *RPCClient_Tx_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *RPCClient_Tx_Call) RunAndReturn(run func(context.Context, []byte, bool) (*coretypes.ResultTx, error)) *RPCClient_Tx_Call { + _c.Call.Return(run) + return _c +} + +// TxSearch provides a mock function with given fields: ctx, query, prove, page, perPage, orderBy +func (_m *RPCClient) TxSearch(ctx context.Context, query string, prove bool, page *int, perPage *int, orderBy string) (*coretypes.ResultTxSearch, error) { + ret := _m.Called(ctx, query, prove, page, perPage, orderBy) + + if len(ret) == 0 { + panic("no return value specified for TxSearch") + } + + var r0 *coretypes.ResultTxSearch + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, string, bool, *int, *int, string) (*coretypes.ResultTxSearch, error)); ok { + return rf(ctx, query, prove, page, perPage, orderBy) + } + if rf, ok := ret.Get(0).(func(context.Context, string, bool, *int, *int, string) *coretypes.ResultTxSearch); ok { + r0 = rf(ctx, query, prove, page, perPage, orderBy) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coretypes.ResultTxSearch) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, string, bool, *int, *int, string) error); ok { + r1 = rf(ctx, query, prove, page, perPage, orderBy) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// RPCClient_TxSearch_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'TxSearch' +type RPCClient_TxSearch_Call struct { + *mock.Call +} + +// TxSearch is a helper method to define mock.On call +// - ctx context.Context +// - query string +// - prove bool +// - page *int +// - perPage *int +// - orderBy string +func (_e *RPCClient_Expecter) TxSearch(ctx interface{}, query interface{}, prove interface{}, page interface{}, perPage interface{}, orderBy interface{}) *RPCClient_TxSearch_Call { + return &RPCClient_TxSearch_Call{Call: _e.mock.On("TxSearch", ctx, query, prove, page, perPage, orderBy)} +} + +func (_c *RPCClient_TxSearch_Call) Run(run func(ctx context.Context, query string, prove bool, page *int, perPage *int, orderBy string)) *RPCClient_TxSearch_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string), args[2].(bool), args[3].(*int), args[4].(*int), args[5].(string)) + }) + return _c +} + +func (_c *RPCClient_TxSearch_Call) Return(_a0 *coretypes.ResultTxSearch, _a1 error) *RPCClient_TxSearch_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *RPCClient_TxSearch_Call) RunAndReturn(run func(context.Context, string, bool, *int, *int, string) (*coretypes.ResultTxSearch, error)) *RPCClient_TxSearch_Call { + _c.Call.Return(run) + return _c +} + +// UnconfirmedTxs provides a mock function with given fields: ctx, limit +func (_m *RPCClient) UnconfirmedTxs(ctx context.Context, limit *int) (*coretypes.ResultUnconfirmedTxs, error) { + ret := _m.Called(ctx, limit) + + if len(ret) == 0 { + panic("no return value specified for UnconfirmedTxs") + } + + var r0 *coretypes.ResultUnconfirmedTxs + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *int) (*coretypes.ResultUnconfirmedTxs, error)); ok { + return rf(ctx, limit) + } + if rf, ok := ret.Get(0).(func(context.Context, *int) *coretypes.ResultUnconfirmedTxs); ok { + r0 = rf(ctx, limit) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coretypes.ResultUnconfirmedTxs) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *int) error); ok { + r1 = rf(ctx, limit) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// RPCClient_UnconfirmedTxs_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UnconfirmedTxs' +type RPCClient_UnconfirmedTxs_Call struct { + *mock.Call +} + +// UnconfirmedTxs is a helper method to define mock.On call +// - ctx context.Context +// - limit *int +func (_e *RPCClient_Expecter) UnconfirmedTxs(ctx interface{}, limit interface{}) *RPCClient_UnconfirmedTxs_Call { + return &RPCClient_UnconfirmedTxs_Call{Call: _e.mock.On("UnconfirmedTxs", ctx, limit)} +} + +func (_c *RPCClient_UnconfirmedTxs_Call) Run(run func(ctx context.Context, limit *int)) *RPCClient_UnconfirmedTxs_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(*int)) + }) + return _c +} + +func (_c *RPCClient_UnconfirmedTxs_Call) Return(_a0 *coretypes.ResultUnconfirmedTxs, _a1 error) *RPCClient_UnconfirmedTxs_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *RPCClient_UnconfirmedTxs_Call) RunAndReturn(run func(context.Context, *int) (*coretypes.ResultUnconfirmedTxs, error)) *RPCClient_UnconfirmedTxs_Call { + _c.Call.Return(run) + return _c +} + +// Unsubscribe provides a mock function with given fields: ctx, subscriber, query +func (_m *RPCClient) Unsubscribe(ctx context.Context, subscriber string, query string) error { + ret := _m.Called(ctx, subscriber, query) + + if len(ret) == 0 { + panic("no return value specified for Unsubscribe") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, string, string) error); ok { + r0 = rf(ctx, subscriber, query) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// RPCClient_Unsubscribe_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Unsubscribe' +type RPCClient_Unsubscribe_Call struct { + *mock.Call +} + +// Unsubscribe is a helper method to define mock.On call +// - ctx context.Context +// - subscriber string +// - query string +func (_e *RPCClient_Expecter) Unsubscribe(ctx interface{}, subscriber interface{}, query interface{}) *RPCClient_Unsubscribe_Call { + return &RPCClient_Unsubscribe_Call{Call: _e.mock.On("Unsubscribe", ctx, subscriber, query)} +} + +func (_c *RPCClient_Unsubscribe_Call) Run(run func(ctx context.Context, subscriber string, query string)) *RPCClient_Unsubscribe_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string), args[2].(string)) + }) + return _c +} + +func (_c *RPCClient_Unsubscribe_Call) Return(_a0 error) *RPCClient_Unsubscribe_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *RPCClient_Unsubscribe_Call) RunAndReturn(run func(context.Context, string, string) error) *RPCClient_Unsubscribe_Call { + _c.Call.Return(run) + return _c +} + +// UnsubscribeAll provides a mock function with given fields: ctx, subscriber +func (_m *RPCClient) UnsubscribeAll(ctx context.Context, subscriber string) error { + ret := _m.Called(ctx, subscriber) + + if len(ret) == 0 { + panic("no return value specified for UnsubscribeAll") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, string) error); ok { + r0 = rf(ctx, subscriber) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// RPCClient_UnsubscribeAll_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UnsubscribeAll' +type RPCClient_UnsubscribeAll_Call struct { + *mock.Call +} + +// UnsubscribeAll is a helper method to define mock.On call +// - ctx context.Context +// - subscriber string +func (_e *RPCClient_Expecter) UnsubscribeAll(ctx interface{}, subscriber interface{}) *RPCClient_UnsubscribeAll_Call { + return &RPCClient_UnsubscribeAll_Call{Call: _e.mock.On("UnsubscribeAll", ctx, subscriber)} +} + +func (_c *RPCClient_UnsubscribeAll_Call) Run(run func(ctx context.Context, subscriber string)) *RPCClient_UnsubscribeAll_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string)) + }) + return _c +} + +func (_c *RPCClient_UnsubscribeAll_Call) Return(_a0 error) *RPCClient_UnsubscribeAll_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *RPCClient_UnsubscribeAll_Call) RunAndReturn(run func(context.Context, string) error) *RPCClient_UnsubscribeAll_Call { + _c.Call.Return(run) + return _c +} + +// Validators provides a mock function with given fields: ctx, height, page, perPage +func (_m *RPCClient) Validators(ctx context.Context, height *int64, page *int, perPage *int) (*coretypes.ResultValidators, error) { + ret := _m.Called(ctx, height, page, perPage) + + if len(ret) == 0 { + panic("no return value specified for Validators") + } + + var r0 *coretypes.ResultValidators + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *int64, *int, *int) (*coretypes.ResultValidators, error)); ok { + return rf(ctx, height, page, perPage) + } + if rf, ok := ret.Get(0).(func(context.Context, *int64, *int, *int) *coretypes.ResultValidators); ok { + r0 = rf(ctx, height, page, perPage) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coretypes.ResultValidators) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *int64, *int, *int) error); ok { + r1 = rf(ctx, height, page, perPage) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// RPCClient_Validators_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Validators' +type RPCClient_Validators_Call struct { + *mock.Call +} + +// Validators is a helper method to define mock.On call +// - ctx context.Context +// - height *int64 +// - page *int +// - perPage *int +func (_e *RPCClient_Expecter) Validators(ctx interface{}, height interface{}, page interface{}, perPage interface{}) *RPCClient_Validators_Call { + return &RPCClient_Validators_Call{Call: _e.mock.On("Validators", ctx, height, page, perPage)} +} + +func (_c *RPCClient_Validators_Call) Run(run func(ctx context.Context, height *int64, page *int, perPage *int)) *RPCClient_Validators_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(*int64), args[2].(*int), args[3].(*int)) + }) + return _c +} + +func (_c *RPCClient_Validators_Call) Return(_a0 *coretypes.ResultValidators, _a1 error) *RPCClient_Validators_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *RPCClient_Validators_Call) RunAndReturn(run func(context.Context, *int64, *int, *int) (*coretypes.ResultValidators, error)) *RPCClient_Validators_Call { + _c.Call.Return(run) + return _c +} + +// NewRPCClient creates a new instance of RPCClient. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewRPCClient(t interface { + mock.TestingT + Cleanup(func()) +}) *RPCClient { + mock := &RPCClient{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/ignite/pkg/cosmosclient/mocks/signer.go b/ignite/pkg/cosmosclient/mocks/signer.go new file mode 100644 index 0000000..91b2543 --- /dev/null +++ b/ignite/pkg/cosmosclient/mocks/signer.go @@ -0,0 +1,90 @@ +// Code generated by mockery v2.53.3. DO NOT EDIT. + +package mocks + +import ( + context "context" + + client "github.com/cosmos/cosmos-sdk/client" + + mock "github.com/stretchr/testify/mock" + + tx "github.com/cosmos/cosmos-sdk/client/tx" +) + +// Signer is an autogenerated mock type for the Signer type +type Signer struct { + mock.Mock +} + +type Signer_Expecter struct { + mock *mock.Mock +} + +func (_m *Signer) EXPECT() *Signer_Expecter { + return &Signer_Expecter{mock: &_m.Mock} +} + +// Sign provides a mock function with given fields: ctx, txf, name, txBuilder, overwriteSig +func (_m *Signer) Sign(ctx context.Context, txf tx.Factory, name string, txBuilder client.TxBuilder, overwriteSig bool) error { + ret := _m.Called(ctx, txf, name, txBuilder, overwriteSig) + + if len(ret) == 0 { + panic("no return value specified for Sign") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, tx.Factory, string, client.TxBuilder, bool) error); ok { + r0 = rf(ctx, txf, name, txBuilder, overwriteSig) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// Signer_Sign_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Sign' +type Signer_Sign_Call struct { + *mock.Call +} + +// Sign is a helper method to define mock.On call +// - ctx context.Context +// - txf tx.Factory +// - name string +// - txBuilder client.TxBuilder +// - overwriteSig bool +func (_e *Signer_Expecter) Sign(ctx interface{}, txf interface{}, name interface{}, txBuilder interface{}, overwriteSig interface{}) *Signer_Sign_Call { + return &Signer_Sign_Call{Call: _e.mock.On("Sign", ctx, txf, name, txBuilder, overwriteSig)} +} + +func (_c *Signer_Sign_Call) Run(run func(ctx context.Context, txf tx.Factory, name string, txBuilder client.TxBuilder, overwriteSig bool)) *Signer_Sign_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(tx.Factory), args[2].(string), args[3].(client.TxBuilder), args[4].(bool)) + }) + return _c +} + +func (_c *Signer_Sign_Call) Return(_a0 error) *Signer_Sign_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *Signer_Sign_Call) RunAndReturn(run func(context.Context, tx.Factory, string, client.TxBuilder, bool) error) *Signer_Sign_Call { + _c.Call.Return(run) + return _c +} + +// NewSigner creates a new instance of Signer. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewSigner(t interface { + mock.TestingT + Cleanup(func()) +}) *Signer { + mock := &Signer{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/ignite/pkg/cosmosclient/rpc.go b/ignite/pkg/cosmosclient/rpc.go new file mode 100644 index 0000000..69101b6 --- /dev/null +++ b/ignite/pkg/cosmosclient/rpc.go @@ -0,0 +1,155 @@ +package cosmosclient + +import ( + "context" + + "github.com/cometbft/cometbft/libs/bytes" + rpcclient "github.com/cometbft/cometbft/rpc/client" + ctypes "github.com/cometbft/cometbft/rpc/core/types" + "github.com/cometbft/cometbft/types" + + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +// rpcWrapper is a rpclient.Client but with more contextualized errors. +// Useful because the original implementation may return JSON errors when the +// requested node is busy, which is confusing for the user. With rpcWrapper, +// the error is prefixed with 'error while requesting node xxx: JSON error'. +type rpcWrapper struct { + rpcclient.Client + nodeAddress string +} + +func rpcError(node string, err error) error { + return errors.Wrapf(err, "error while requesting node '%s'", node) +} + +func (rpc rpcWrapper) ABCIInfo(ctx context.Context) (*ctypes.ResultABCIInfo, error) { + res, err := rpc.Client.ABCIInfo(ctx) + return res, rpcError(rpc.nodeAddress, err) +} + +func (rpc rpcWrapper) ABCIQuery(ctx context.Context, path string, data bytes.HexBytes) (*ctypes.ResultABCIQuery, error) { + res, err := rpc.Client.ABCIQuery(ctx, path, data) + return res, rpcError(rpc.nodeAddress, err) +} + +func (rpc rpcWrapper) ABCIQueryWithOptions(ctx context.Context, path string, data bytes.HexBytes, opts rpcclient.ABCIQueryOptions) (*ctypes.ResultABCIQuery, error) { + res, err := rpc.Client.ABCIQueryWithOptions(ctx, path, data, opts) + return res, rpcError(rpc.nodeAddress, err) +} + +func (rpc rpcWrapper) BroadcastTxCommit(ctx context.Context, tx types.Tx) (*ctypes.ResultBroadcastTxCommit, error) { + res, err := rpc.Client.BroadcastTxCommit(ctx, tx) + return res, rpcError(rpc.nodeAddress, err) +} + +func (rpc rpcWrapper) BroadcastTxAsync(ctx context.Context, tx types.Tx) (*ctypes.ResultBroadcastTx, error) { + res, err := rpc.Client.BroadcastTxAsync(ctx, tx) + return res, rpcError(rpc.nodeAddress, err) +} + +func (rpc rpcWrapper) BroadcastTxSync(ctx context.Context, tx types.Tx) (*ctypes.ResultBroadcastTx, error) { + res, err := rpc.Client.BroadcastTxSync(ctx, tx) + return res, rpcError(rpc.nodeAddress, err) +} + +func (rpc rpcWrapper) GenesisChunked(ctx context.Context, n uint) (*ctypes.ResultGenesisChunk, error) { + res, err := rpc.Client.GenesisChunked(ctx, n) + return res, rpcError(rpc.nodeAddress, err) +} + +func (rpc rpcWrapper) BlockchainInfo(ctx context.Context, minHeight int64, maxHeight int64) (*ctypes.ResultBlockchainInfo, error) { + res, err := rpc.Client.BlockchainInfo(ctx, minHeight, maxHeight) + return res, rpcError(rpc.nodeAddress, err) +} + +func (rpc rpcWrapper) NetInfo(ctx context.Context) (*ctypes.ResultNetInfo, error) { + res, err := rpc.Client.NetInfo(ctx) + return res, rpcError(rpc.nodeAddress, err) +} + +func (rpc rpcWrapper) DumpConsensusState(ctx context.Context) (*ctypes.ResultDumpConsensusState, error) { + res, err := rpc.Client.DumpConsensusState(ctx) + return res, rpcError(rpc.nodeAddress, err) +} + +func (rpc rpcWrapper) ConsensusState(ctx context.Context) (*ctypes.ResultConsensusState, error) { + res, err := rpc.Client.ConsensusState(ctx) + return res, rpcError(rpc.nodeAddress, err) +} + +func (rpc rpcWrapper) ConsensusParams(ctx context.Context, height *int64) (*ctypes.ResultConsensusParams, error) { + res, err := rpc.Client.ConsensusParams(ctx, height) + return res, rpcError(rpc.nodeAddress, err) +} + +func (rpc rpcWrapper) Health(ctx context.Context) (*ctypes.ResultHealth, error) { + res, err := rpc.Client.Health(ctx) + return res, rpcError(rpc.nodeAddress, err) +} + +func (rpc rpcWrapper) Block(ctx context.Context, height *int64) (*ctypes.ResultBlock, error) { + res, err := rpc.Client.Block(ctx, height) + return res, rpcError(rpc.nodeAddress, err) +} + +func (rpc rpcWrapper) BlockByHash(ctx context.Context, hash []byte) (*ctypes.ResultBlock, error) { + res, err := rpc.Client.BlockByHash(ctx, hash) + return res, rpcError(rpc.nodeAddress, err) +} + +func (rpc rpcWrapper) BlockResults(ctx context.Context, height *int64) (*ctypes.ResultBlockResults, error) { + res, err := rpc.Client.BlockResults(ctx, height) + return res, rpcError(rpc.nodeAddress, err) +} + +func (rpc rpcWrapper) Commit(ctx context.Context, height *int64) (*ctypes.ResultCommit, error) { + res, err := rpc.Client.Commit(ctx, height) + return res, rpcError(rpc.nodeAddress, err) +} + +func (rpc rpcWrapper) Validators(ctx context.Context, height *int64, page *int, perPage *int) (*ctypes.ResultValidators, error) { + res, err := rpc.Client.Validators(ctx, height, page, perPage) + return res, rpcError(rpc.nodeAddress, err) +} + +func (rpc rpcWrapper) Tx(ctx context.Context, hash []byte, prove bool) (*ctypes.ResultTx, error) { + res, err := rpc.Client.Tx(ctx, hash, prove) + return res, rpcError(rpc.nodeAddress, err) +} + +func (rpc rpcWrapper) TxSearch(ctx context.Context, query string, prove bool, page *int, perPage *int, orderBy string) (*ctypes.ResultTxSearch, error) { + res, err := rpc.Client.TxSearch(ctx, query, prove, page, perPage, orderBy) + return res, rpcError(rpc.nodeAddress, err) +} + +func (rpc rpcWrapper) BlockSearch(ctx context.Context, query string, page *int, perPage *int, orderBy string) (*ctypes.ResultBlockSearch, error) { + res, err := rpc.Client.BlockSearch(ctx, query, page, perPage, orderBy) + return res, rpcError(rpc.nodeAddress, err) +} + +func (rpc rpcWrapper) Status(ctx context.Context) (*ctypes.ResultStatus, error) { + res, err := rpc.Client.Status(ctx) + return res, rpcError(rpc.nodeAddress, err) +} + +func (rpc rpcWrapper) BroadcastEvidence(ctx context.Context, e types.Evidence) (*ctypes.ResultBroadcastEvidence, error) { + res, err := rpc.Client.BroadcastEvidence(ctx, e) + return res, rpcError(rpc.nodeAddress, err) +} + +func (rpc rpcWrapper) UnconfirmedTxs(ctx context.Context, limit *int) (*ctypes.ResultUnconfirmedTxs, error) { + res, err := rpc.Client.UnconfirmedTxs(ctx, limit) + return res, rpcError(rpc.nodeAddress, err) +} + +func (rpc rpcWrapper) NumUnconfirmedTxs(ctx context.Context) (*ctypes.ResultUnconfirmedTxs, error) { + res, err := rpc.Client.NumUnconfirmedTxs(ctx) + return res, rpcError(rpc.nodeAddress, err) +} + +func (rpc rpcWrapper) CheckTx(ctx context.Context, tx types.Tx) (*ctypes.ResultCheckTx, error) { + res, err := rpc.Client.CheckTx(ctx, tx) + return res, rpcError(rpc.nodeAddress, err) +} diff --git a/ignite/pkg/cosmosclient/signer.go b/ignite/pkg/cosmosclient/signer.go new file mode 100644 index 0000000..2ec54c0 --- /dev/null +++ b/ignite/pkg/cosmosclient/signer.go @@ -0,0 +1,17 @@ +package cosmosclient + +import ( + "context" + + "github.com/cosmos/cosmos-sdk/client" + "github.com/cosmos/cosmos-sdk/client/tx" +) + +var _ Signer = signer{} + +// signer implements the Signer interface. +type signer struct{} + +func (signer) Sign(ctx context.Context, txf tx.Factory, name string, txBuilder client.TxBuilder, overwriteSig bool) error { + return tx.Sign(ctx, txf, name, txBuilder, overwriteSig) +} diff --git a/ignite/pkg/cosmosclient/testutil/mocks.go b/ignite/pkg/cosmosclient/testutil/mocks.go new file mode 100644 index 0000000..24dcbb5 --- /dev/null +++ b/ignite/pkg/cosmosclient/testutil/mocks.go @@ -0,0 +1,53 @@ +package testutil + +import ( + "testing" + + "github.com/stretchr/testify/mock" + + "github.com/ignite/cli/v29/ignite/pkg/cosmosclient/mocks" +) + +//go:generate mockery --srcpkg github.com/cometbft/cometbft/rpc/client --name Client --structname RPCClient --filename rpc_client.go --output ../mocks --with-expecter +//go:generate mockery --srcpkg github.com/cosmos/cosmos-sdk/client --name AccountRetriever --filename account_retriever.go --output ../mocks --with-expecter +//go:generate mockery --srcpkg github.com/cosmos/cosmos-sdk/x/bank/types --name QueryClient --structname BankQueryClient --filename bank_query_client.go --output ../mocks --with-expecter + +// NewTendermintClientMock creates a new Tendermint RPC client mock. +func NewTendermintClientMock(t *testing.T) *TendermintClientMock { + t.Helper() + m := TendermintClientMock{} + m.Test(t) + + return &m +} + +// TendermintClientMock mocks Tendermint's RPC client. +type TendermintClientMock struct { + mocks.RPCClient +} + +// OnStatus starts a generic call mock on the Status RPC method. +func (m *TendermintClientMock) OnStatus() *mock.Call { + return m.On("Status", mock.Anything) +} + +// OnBlock starts a generic call mock on the Block RPC method. +func (m *TendermintClientMock) OnBlock() *mock.Call { + return m.On("Block", RepeatMockArgs(2)...) +} + +// OnTxSearch starts a generic call mock on the TxSearch RPC method. +func (m *TendermintClientMock) OnTxSearch() *mock.Call { + return m.On("TxSearch", RepeatMockArgs(6)...) +} + +// RepeatMockArgs returns a slice with an N number of mock.Anything arguments. +// This function can be useful to define a number of generic consecutive arguments +// for mocked method calls. +func RepeatMockArgs(n int) (args []interface{}) { + for i := 0; i < n; i++ { + args = append(args, mock.Anything) + } + + return args +} diff --git a/ignite/pkg/cosmosclient/testutil/mocks_test.go b/ignite/pkg/cosmosclient/testutil/mocks_test.go new file mode 100644 index 0000000..8f6416a --- /dev/null +++ b/ignite/pkg/cosmosclient/testutil/mocks_test.go @@ -0,0 +1,23 @@ +package testutil + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestNewTendermintClientMock(t *testing.T) { + m := NewTendermintClientMock(t) + require.NotNil(t, m) + require.NotNil(t, m.OnStatus()) + require.NotNil(t, m.OnBlock()) + require.NotNil(t, m.OnTxSearch()) +} + +func TestRepeatMockArgs(t *testing.T) { + args := RepeatMockArgs(3) + require.Len(t, args, 3) + for _, arg := range args { + require.NotNil(t, arg) + } +} diff --git a/ignite/pkg/cosmosclient/tx.go b/ignite/pkg/cosmosclient/tx.go new file mode 100644 index 0000000..2d95cbe --- /dev/null +++ b/ignite/pkg/cosmosclient/tx.go @@ -0,0 +1,66 @@ +package cosmosclient + +import ( + "encoding/json" + "time" + + ctypes "github.com/cometbft/cometbft/rpc/core/types" + + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +// TX defines a block transaction. +type TX struct { + // BlockTime returns the time of the block that contains the transaction. + BlockTime time.Time + + // Raw contains the transaction as returned by the Tendermint API. + Raw *ctypes.ResultTx +} + +// GetEvents returns the transaction events. +func (t TX) GetEvents() (events []TXEvent, err error) { + for _, e := range t.Raw.TxResult.Events { + evt := TXEvent{Type: e.Type} + + for _, a := range e.Attributes { + // Make sure that the attribute value is a valid JSON encoded string. + // Tendermint event attribute values contain JSON encoded values without quotes + // so string values need to be encoded to be quoted and saved as valid JSONB. + v, err := formatAttributeValue([]byte(a.Value)) + if err != nil { + return nil, errors.Errorf("error encoding event attr '%s.%s': %w", e.Type, a.Key, err) + } + + evt.Attributes = append(evt.Attributes, TXEventAttribute{ + Key: a.Key, + Value: v, + }) + } + + events = append(events, evt) + } + + return events, nil +} + +// TXEvent defines a transaction event. +type TXEvent struct { + Type string `json:"type"` + Attributes []TXEventAttribute `json:"attributes"` +} + +// TXEventAttribute defines a transaction event attribute. +type TXEventAttribute struct { + Key string `json:"key"` + Value []byte `json:"value"` +} + +func formatAttributeValue(v []byte) ([]byte, error) { + if json.Valid(v) { + return v, nil + } + + // Encode all string or invalid values + return json.Marshal(string(v)) +} diff --git a/ignite/pkg/cosmosclient/tx_options.go b/ignite/pkg/cosmosclient/tx_options.go new file mode 100644 index 0000000..e892fd0 --- /dev/null +++ b/ignite/pkg/cosmosclient/tx_options.go @@ -0,0 +1,15 @@ +package cosmosclient + +// TxOptions contains options for creating a transaction. +// It is used by the CreateTxWithOptions method. +type TxOptions struct { + // Memo is the memo to be used for the transaction. + Memo string + + // GasLimit is the gas limit to be used for the transaction. + // If GasLimit is set to 0, the gas limit will be automatically calculated. + GasLimit uint64 + + // Fees is the fees to be used for the transaction. + Fees string +} diff --git a/ignite/pkg/cosmosclient/txservice.go b/ignite/pkg/cosmosclient/txservice.go new file mode 100644 index 0000000..506da40 --- /dev/null +++ b/ignite/pkg/cosmosclient/txservice.go @@ -0,0 +1,142 @@ +package cosmosclient + +import ( + "context" + + "github.com/cosmos/cosmos-sdk/client" + "github.com/cosmos/cosmos-sdk/client/tx" + sdktypes "github.com/cosmos/cosmos-sdk/types" + + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +// BroadcastOption configures broadcast behavior. +type BroadcastOption func(*broadcastConfig) + +// broadcastConfig holds configuration for broadcast operations. +type broadcastConfig struct { + sequence uint64 + unordered bool +} + +// WithSequence overrides the account sequence for the transaction. +func WithSequence(sequence uint64) BroadcastOption { + return func(cfg *broadcastConfig) { + cfg.sequence = sequence + } +} + +// WithUnordered sets the unordered flag for the transaction. +// NOTE: This is only supported for Cosmos SDK versions >= v0.53. +func WithUnordered(unordered bool) BroadcastOption { + return func(cfg *broadcastConfig) { + cfg.unordered = unordered + } +} + +type TxService struct { + client Client + clientContext client.Context + txBuilder client.TxBuilder + txFactory tx.Factory +} + +// Gas is gas decided to use for this tx. +// either calculated or configured by the caller. +func (s TxService) Gas() uint64 { + return s.txBuilder.GetTx().GetGas() +} + +// broadcast signs and broadcasts the transaction returning the initial broadcast response. +func (s TxService) broadcast(ctx context.Context, opts ...BroadcastOption) (*sdktypes.TxResponse, error) { + defer s.client.lockBech32Prefix()() + + // validate msgs. + for _, msg := range s.txBuilder.GetTx().GetMsgs() { + msg, ok := msg.(sdktypes.HasValidateBasic) + if !ok { + continue + } + if err := msg.ValidateBasic(); err != nil { + return nil, errors.WithStack(err) + } + } + + // Apply broadcast options + cfg := &broadcastConfig{} + for _, opt := range opts { + opt(cfg) + } + + // Override sequence if specified + if cfg.sequence != 0 { + s.txFactory = s.txFactory.WithSequence(cfg.sequence) + } + s.txFactory = s.txFactory.WithUnordered(cfg.unordered) + + accountName := s.clientContext.FromName + if err := s.client.signer.Sign(ctx, s.txFactory, accountName, s.txBuilder, true); err != nil { + return nil, errors.WithStack(err) + } + + txBytes, err := s.clientContext.TxConfig.TxEncoder()(s.txBuilder.GetTx()) + if err != nil { + return nil, errors.WithStack(err) + } + + resp, err := s.clientContext.BroadcastTx(txBytes) + if err := handleBroadcastResult(resp, err); err != nil { + return nil, err + } + + return resp, nil +} + +// Broadcast signs and broadcasts this tx. +// If faucet is enabled and if the "from" account doesn't have enough funds, is +// it automatically filled with the default amount, and the tx is broadcasted +// again. Note that this may still end with the same error if the amount is +// greater than the amount dumped by the faucet. +func (s TxService) Broadcast(ctx context.Context, opts ...BroadcastOption) (Response, error) { + resp, err := s.broadcast(ctx, opts...) + if err != nil { + return Response{}, err + } + + res, err := s.client.WaitForTx(ctx, resp.TxHash) + if err != nil { + return Response{}, err + } + // NOTE(tb) second and third parameters are omitted: + // - second parameter represents the tx and should be of type sdktypes.Any, + // but it is very ugly to decode, not sure if it's worth it (see sdk code + // x/auth/query.go method makeTxResult) + // - third parameter represents the timestamp of the tx, which must be + // fetched from the block itself. So it requires another API call to + // fetch the block from res.Height, not sure if it's worth it too. + resp = sdktypes.NewResponseResultTx(res, nil, "") + + return Response{ + Codec: s.clientContext.Codec, + TxResponse: resp, + }, handleBroadcastResult(resp, err) +} + +// BroadcastAsync signs and broadcasts this tx. +// It is similar to Broadcast but it does not wait for the transaction to be included in a block. +func (s TxService) BroadcastAsync(ctx context.Context, opts ...BroadcastOption) (Response, error) { + resp, err := s.broadcast(ctx, opts...) + if err != nil { + return Response{}, err + } + + return Response{ + Codec: s.clientContext.Codec, + TxResponse: resp, + }, handleBroadcastResult(resp, err) +} + +// EncodeJSON encodes the transaction as a json string. +func (s TxService) EncodeJSON() ([]byte, error) { + return s.client.context.TxConfig.TxJSONEncoder()(s.txBuilder.GetTx()) +} diff --git a/ignite/pkg/cosmosclient/txservice_test.go b/ignite/pkg/cosmosclient/txservice_test.go new file mode 100644 index 0000000..f4d1697 --- /dev/null +++ b/ignite/pkg/cosmosclient/txservice_test.go @@ -0,0 +1,309 @@ +package cosmosclient_test + +import ( + "encoding/hex" + "testing" + + "cosmossdk.io/math" + abci "github.com/cometbft/cometbft/abci/types" + ctypes "github.com/cometbft/cometbft/rpc/core/types" + sdktypes "github.com/cosmos/cosmos-sdk/types" + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + banktypes "github.com/cosmos/cosmos-sdk/x/bank/types" + "github.com/stretchr/testify/mock" + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/cosmosaccount" + "github.com/ignite/cli/v29/ignite/pkg/cosmosclient" + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +func TestTxServiceBroadcast(t *testing.T) { + var ( + accountName = "bob" + passphrase = "passphrase" + txHash = []byte{1, 2, 3} + txHashStr = hex.EncodeToString(txHash) + ) + r, err := cosmosaccount.NewInMemory() + require.NoError(t, err) + a, _, err := r.Create(accountName) + require.NoError(t, err) + // Export created account to we can import it in the Client below. + key, err := r.Export(accountName, passphrase) + require.NoError(t, err) + sdkaddr, err := a.Record.GetAddress() + require.NoError(t, err) + msg := &banktypes.MsgSend{ + FromAddress: sdkaddr.String(), + ToAddress: "cosmos1k8e50d2d8xkdfw9c4et3m45llh69e7xzw6uzga", + Amount: sdktypes.NewCoins( + sdktypes.NewCoin("token", math.NewIntFromUint64(1)), + ), + } + tests := []struct { + name string + msg sdktypes.Msg + opts []cosmosclient.Option + expectedResponse *sdktypes.TxResponse + expectedError string + setup func(suite) + }{ + { + name: "fail: error not found", + msg: msg, + expectedError: "make sure that your account has enough balance", + + setup: func(s suite) { + s.expectPrepareFactory(sdkaddr) + s.signer.EXPECT(). + Sign(mock.Anything, mock.Anything, "bob", mock.Anything, true). + Return(nil) + s.rpcClient.EXPECT(). + BroadcastTxSync(mock.Anything, mock.Anything). + Return(nil, sdkerrors.ErrNotFound) + }, + }, + { + name: "fail: response code > 0", + msg: msg, + expectedError: "error code: '42' msg: 'oups'", + + setup: func(s suite) { + s.expectPrepareFactory(sdkaddr) + s.signer.EXPECT(). + Sign(mock.Anything, mock.Anything, "bob", mock.Anything, true). + Return(nil) + s.rpcClient.EXPECT(). + BroadcastTxSync(mock.Anything, mock.Anything). + Return(&ctypes.ResultBroadcastTx{ + Code: 42, + Log: "oups", + }, nil) + }, + }, + { + name: "ok: tx confirmed immediately", + msg: msg, + expectedResponse: &sdktypes.TxResponse{ + TxHash: txHashStr, + RawLog: "log", + }, + + setup: func(s suite) { + s.expectPrepareFactory(sdkaddr) + s.signer.EXPECT(). + Sign(mock.Anything, mock.Anything, "bob", mock.Anything, true). + Return(nil) + s.rpcClient.EXPECT(). + BroadcastTxSync(mock.Anything, mock.Anything). + Return(&ctypes.ResultBroadcastTx{ + Hash: txHash, + }, nil) + + // Tx is broadcasted, now check for confirmation + s.rpcClient.EXPECT().Tx(mock.Anything, txHash, false). + Return(&ctypes.ResultTx{ + Hash: txHash, + TxResult: abci.ExecTxResult{ + Log: "log", + }, + }, nil) + }, + }, + { + name: "fail: tx confirmed with error code", + msg: msg, + expectedError: "error code: '42' msg: 'oups'", + + setup: func(s suite) { + s.expectPrepareFactory(sdkaddr) + s.signer.EXPECT(). + Sign(mock.Anything, mock.Anything, "bob", mock.Anything, true). + Return(nil) + s.rpcClient.EXPECT(). + BroadcastTxSync(mock.Anything, mock.Anything). + Return(&ctypes.ResultBroadcastTx{ + Hash: txHash, + }, nil) + + // Tx is broadcasted, now check for confirmation + s.rpcClient.EXPECT().Tx(mock.Anything, txHash, false). + Return(&ctypes.ResultTx{ + Hash: txHash, + TxResult: abci.ExecTxResult{ + Code: 42, + Log: "oups", + }, + }, nil) + }, + }, + { + name: "ok: tx confirmed after a while", + msg: msg, + expectedResponse: &sdktypes.TxResponse{ + TxHash: txHashStr, + }, + + setup: func(s suite) { + s.expectPrepareFactory(sdkaddr) + s.signer.EXPECT(). + Sign(mock.Anything, mock.Anything, "bob", mock.Anything, true). + Return(nil) + s.rpcClient.EXPECT(). + BroadcastTxSync(mock.Anything, mock.Anything). + Return(&ctypes.ResultBroadcastTx{ + Hash: txHash, + }, nil) + + // Tx is broadcasted, now check for confirmation + // First time the tx is not found (not confirmed yet) + s.rpcClient.EXPECT().Tx(mock.Anything, txHash, false). + Return(nil, errors.New("not found")).Once() + // Wait for 1 block + s.rpcClient.EXPECT().Status(mock.Anything). + Return(&ctypes.ResultStatus{ + SyncInfo: ctypes.SyncInfo{LatestBlockHeight: 1}, + }, nil).Once() + s.rpcClient.EXPECT().Status(mock.Anything). + Return(&ctypes.ResultStatus{ + SyncInfo: ctypes.SyncInfo{LatestBlockHeight: 2}, + }, nil).Once() + // Then try gain to fetch the tx, this time it is confirmed + s.rpcClient.EXPECT().Tx(mock.Anything, txHash, false). + Return(&ctypes.ResultTx{ + Hash: txHash, + }, nil) + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + c := newClient(t, tt.setup, tt.opts...) + account, err := c.AccountRegistry.Import(accountName, key, passphrase) + require.NoError(t, err) + ctx := c.Context(). + WithFromName(accountName). + WithFromAddress(sdkaddr) + txService, err := c.CreateTx(ctx.CmdContext, account, tt.msg) + require.NoError(t, err) + + res, err := txService.Broadcast(ctx.CmdContext) + if tt.expectedError != "" { + require.EqualError(t, err, tt.expectedError) + return + } + require.NoError(t, err) + require.Equal(t, ctx.Codec, res.Codec) + require.Equal(t, tt.expectedResponse, res.TxResponse) + }) + } +} + +func TestTxServiceBroadcastAsync(t *testing.T) { + var ( + accountName = "bob" + passphrase = "passphrase" + txHash = []byte{1, 2, 3} + txHashStr = hex.EncodeToString(txHash) + ) + r, err := cosmosaccount.NewInMemory() + require.NoError(t, err) + a, _, err := r.Create(accountName) + require.NoError(t, err) + // Export created account to we can import it in the Client below. + key, err := r.Export(accountName, passphrase) + require.NoError(t, err) + sdkaddr, err := a.Record.GetAddress() + require.NoError(t, err) + msg := &banktypes.MsgSend{ + FromAddress: sdkaddr.String(), + ToAddress: "cosmos1k8e50d2d8xkdfw9c4et3m45llh69e7xzw6uzga", + Amount: sdktypes.NewCoins( + sdktypes.NewCoin("token", math.NewIntFromUint64(1)), + ), + } + tests := []struct { + name string + msg sdktypes.Msg + opts []cosmosclient.Option + expectedResponse *sdktypes.TxResponse + expectedError string + setup func(suite) + }{ + { + name: "fail: error not found", + msg: msg, + expectedError: "make sure that your account has enough balance", + + setup: func(s suite) { + s.expectPrepareFactory(sdkaddr) + s.signer.EXPECT(). + Sign(mock.Anything, mock.Anything, "bob", mock.Anything, true). + Return(nil) + s.rpcClient.EXPECT(). + BroadcastTxSync(mock.Anything, mock.Anything). + Return(nil, sdkerrors.ErrNotFound) + }, + }, + { + name: "fail: response code > 0", + msg: msg, + expectedError: "error code: '42' msg: 'oups'", + + setup: func(s suite) { + s.expectPrepareFactory(sdkaddr) + s.signer.EXPECT(). + Sign(mock.Anything, mock.Anything, "bob", mock.Anything, true). + Return(nil) + s.rpcClient.EXPECT(). + BroadcastTxSync(mock.Anything, mock.Anything). + Return(&ctypes.ResultBroadcastTx{ + Code: 42, + Log: "oups", + }, nil) + }, + }, + { + name: "ok: tx broadcasted successfully", + msg: msg, + expectedResponse: &sdktypes.TxResponse{ + TxHash: txHashStr, + RawLog: "", + }, + + setup: func(s suite) { + s.expectPrepareFactory(sdkaddr) + s.signer.EXPECT(). + Sign(mock.Anything, mock.Anything, "bob", mock.Anything, true). + Return(nil) + s.rpcClient.EXPECT(). + BroadcastTxSync(mock.Anything, mock.Anything). + Return(&ctypes.ResultBroadcastTx{ + Hash: txHash, + }, nil) + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + c := newClient(t, tt.setup, tt.opts...) + account, err := c.AccountRegistry.Import(accountName, key, passphrase) + require.NoError(t, err) + ctx := c.Context(). + WithFromName(accountName). + WithFromAddress(sdkaddr) + txService, err := c.CreateTx(ctx.CmdContext, account, tt.msg) + require.NoError(t, err) + + res, err := txService.BroadcastAsync(ctx.CmdContext) + if tt.expectedError != "" { + require.EqualError(t, err, tt.expectedError) + return + } + require.NoError(t, err) + require.Equal(t, ctx.Codec, res.Codec) + require.Equal(t, tt.expectedResponse, res.TxResponse) + }) + } +} diff --git a/ignite/pkg/cosmosfaucet/client_http.go b/ignite/pkg/cosmosfaucet/client_http.go new file mode 100644 index 0000000..46964a5 --- /dev/null +++ b/ignite/pkg/cosmosfaucet/client_http.go @@ -0,0 +1,86 @@ +package cosmosfaucet + +import ( + "bytes" + "context" + "encoding/json" + "io" + "net/http" + + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +// ErrTransferRequest is an error that occurs when a transfer request fails. +type ErrTransferRequest struct { + Body string + StatusCode int +} + +// Error implements error. +func (err ErrTransferRequest) Error() string { + return http.StatusText(err.StatusCode) +} + +// HTTPClient is a faucet client. +type HTTPClient struct { + addr string +} + +// NewClient returns a new faucet client. +func NewClient(addr string) HTTPClient { + return HTTPClient{addr} +} + +// Transfer requests tokens from the faucet with req. +func (c HTTPClient) Transfer(ctx context.Context, req TransferRequest) (TransferResponse, error) { + data, err := json.Marshal(req) + if err != nil { + return TransferResponse{}, err + } + + hreq, err := http.NewRequestWithContext(ctx, http.MethodPost, c.addr, bytes.NewReader(data)) + if err != nil { + return TransferResponse{}, err + } + + hres, err := http.DefaultClient.Do(hreq) + if err != nil { + return TransferResponse{}, err + } + defer hres.Body.Close() + + if hres.StatusCode != http.StatusOK { + bodyBytes, _ := io.ReadAll(hres.Body) + return TransferResponse{}, ErrTransferRequest{Body: string(bodyBytes), StatusCode: hres.StatusCode} + } + + var res TransferResponse + if err = json.NewDecoder(hres.Body).Decode(&res); err != nil { + return TransferResponse{}, err + } + + return res, nil +} + +// FaucetInfo fetch the faucet info for clients to determine if this is a real faucet and +// what is the chain id of the chain that faucet is operating for. +func (c HTTPClient) FaucetInfo(ctx context.Context) (FaucetInfoResponse, error) { + hreq, err := http.NewRequestWithContext(ctx, http.MethodGet, c.addr+"/info", nil) + if err != nil { + return FaucetInfoResponse{}, err + } + + hres, err := http.DefaultClient.Do(hreq) + if err != nil { + return FaucetInfoResponse{}, err + } + defer hres.Body.Close() + + if hres.StatusCode != http.StatusOK { + return FaucetInfoResponse{}, errors.New(http.StatusText(hres.StatusCode)) + } + + var res FaucetInfoResponse + err = json.NewDecoder(hres.Body).Decode(&res) + return res, err +} diff --git a/ignite/pkg/cosmosfaucet/cosmosfaucet.go b/ignite/pkg/cosmosfaucet/cosmosfaucet.go new file mode 100644 index 0000000..69634c1 --- /dev/null +++ b/ignite/pkg/cosmosfaucet/cosmosfaucet.go @@ -0,0 +1,201 @@ +// Package cosmosfaucet is a faucet to request tokens for sdk accounts. +package cosmosfaucet + +import ( + "context" + "time" + + sdkmath "cosmossdk.io/math" + + sdk "github.com/cosmos/cosmos-sdk/types" + + chaincmdrunner "github.com/ignite/cli/v29/ignite/pkg/chaincmd/runner" + "github.com/ignite/cli/v29/ignite/pkg/cosmosver" + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +const ( + // DefaultAccountName is the default account to transfer tokens from. + DefaultAccountName = "faucet" + + // DefaultDenom is the default denomination to distribute. + DefaultDenom = "uatom" + + // DefaultAmount specifies the default amount to transfer to an account + // on each request. + DefaultAmount = 10000000 + + // DefaultMaxAmount specifies the maximum amount that can be transferred to an + // account in all times. + DefaultMaxAmount = 100000000 + + // DefaultRefreshWindow specifies the time after which the max amount limit + // is refreshed for an account [1 year]. + DefaultRefreshWindow = time.Hour * 24 * 365 +) + +// Faucet represents a faucet. +type Faucet struct { + // runner used to interact with blockchain's binary to transfer tokens. + runner chaincmdrunner.Runner + + // chainID is the chain id of the chain that faucet is operating for. + chainID string + + // accountName to transfer tokens from. + accountName string + + // accountMnemonic is the mnemonic of the account. + accountMnemonic string + + // coinType registered coin type number for HD derivation (BIP-0044). + coinType string + + // accountNumber registered account number for HD derivation (BIP-0044). + accountNumber string + + // addressIndex registered address index for HD derivation (BIP-0044). + addressIndex string + + // coins keeps a list of coins that can be distributed by the faucet. + coins sdk.Coins + + // coinsMax is a denom-max pair. + // it holds the maximum amounts of coins that can be sent to a single account. + coinsMax map[string]sdkmath.Int + + // fee to pay along with the transaction + feeAmount sdk.Coin + + limitRefreshWindow time.Duration + + // openAPIData holds template data customizations for serving OpenAPI page & spec. + openAPIData openAPIData + + // version holds the cosmos-sdk version. + version cosmosver.Version + + // indexerDisabled tells whether the indexing is disabled on the node. + indexerDisabled bool +} + +// Option configures the faucetOptions. +type Option func(*Faucet) + +// Account provides the account information to transfer tokens from. +// when mnemonic isn't provided, account assumed to be exists in the keyring. +func Account(name, mnemonic, coinType, accountNumber, addressIndex string) Option { + return func(f *Faucet) { + f.accountName = name + f.accountMnemonic = mnemonic + f.coinType = coinType + f.accountNumber = accountNumber + f.addressIndex = addressIndex + } +} + +// Coin adds a new coin to coins list to distribute by the faucet. +// the first coin added to the list considered as the default coin during transfer requests. +// +// amount is the amount of the coin can be distributed per request. +// maxAmount is the maximum amount of the coin that can be sent to a single account. +// denom is denomination of the coin to be distributed by the faucet. +func Coin(amount, maxAmount sdkmath.Int, denom string) Option { + return func(f *Faucet) { + f.coins = append(f.coins, sdk.NewCoin(denom, amount)) + f.coinsMax[denom] = maxAmount + } +} + +// RefreshWindow adds the duration to refresh the transfer limit to the faucet. +func RefreshWindow(refreshWindow time.Duration) Option { + return func(f *Faucet) { + f.limitRefreshWindow = refreshWindow + } +} + +// ChainID adds chain id to faucet. faucet will automatically fetch when it isn't provided. +func ChainID(id string) Option { + return func(f *Faucet) { + f.chainID = id + } +} + +// FeeAmount sets a fee that it will be paid during the transaction. +func FeeAmount(amount sdkmath.Int, denom string) Option { + return func(f *Faucet) { + f.feeAmount = sdk.NewCoin(denom, amount) + } +} + +// OpenAPI configures how to serve Open API page and spec. +func OpenAPI(apiAddress string) Option { + return func(f *Faucet) { + f.openAPIData.APIAddress = apiAddress + } +} + +// Version configures the cosmos-sdk version. +func Version(version cosmosver.Version) Option { + return func(f *Faucet) { + f.version = version + } +} + +// IndexerDisabled tells whether the indexing is disabled on the node. +// Without indexing, the faucet won't be able to check the limits for each account, nor verify the transaction status. +func IndexerDisabled() Option { + return func(f *Faucet) { + f.indexerDisabled = true + } +} + +// New creates a new faucet with ccr (to access and use blockchain's CLI) and given options. +func New(ctx context.Context, ccr chaincmdrunner.Runner, options ...Option) (Faucet, error) { + f := Faucet{ + runner: ccr, + accountName: DefaultAccountName, + coinsMax: make(map[string]sdkmath.Int), + openAPIData: openAPIData{"Blockchain", "http://localhost:1317"}, + } + + for _, apply := range options { + apply(&f) + } + + if len(f.coins) == 0 { + Coin(sdkmath.NewInt(DefaultAmount), sdkmath.NewInt(DefaultMaxAmount), DefaultDenom)(&f) + } + f.coins = f.coins.Sort() + + if f.limitRefreshWindow == 0 { + RefreshWindow(DefaultRefreshWindow)(&f) + } + + // import the account if mnemonic is provided. + if f.accountMnemonic != "" { + _, err := f.runner.AddAccount( + ctx, + f.accountName, + f.accountMnemonic, + f.coinType, + f.accountNumber, + f.addressIndex, + ) + if err != nil && !errors.Is(err, chaincmdrunner.ErrAccountAlreadyExists) { + return Faucet{}, err + } + } + + if f.chainID == "" { + status, err := f.runner.Status(ctx) + if err != nil { + return Faucet{}, err + } + + f.chainID = status.ChainID + f.openAPIData.ChainID = status.ChainID + } + + return f, nil +} diff --git a/ignite/pkg/cosmosfaucet/http.go b/ignite/pkg/cosmosfaucet/http.go new file mode 100644 index 0000000..bcb3026 --- /dev/null +++ b/ignite/pkg/cosmosfaucet/http.go @@ -0,0 +1,44 @@ +package cosmosfaucet + +import ( + "net/http" + + "github.com/rs/cors" + + "github.com/ignite/cli/v29/ignite/pkg/openapiconsole" +) + +// ServeHTTP implements http.Handler to expose the functionality of Faucet.Transfer() via HTTP. +// request/response payloads are compatible with the previous implementation at allinbits/cosmos-faucet. +func (f Faucet) ServeHTTP(w http.ResponseWriter, r *http.Request) { + mux := http.NewServeMux() + + mux.Handle("/", cors.Default().Handler(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + switch { + case r.Method == http.MethodPost || r.Method == http.MethodOptions: + f.faucetHandler(w, r) + case r.Method == http.MethodGet: + openapiconsole.Handler("Faucet", "openapi.yml")(w, r) + default: + http.NotFound(w, r) + } + }))) + + mux.Handle("/info", cors.Default().Handler(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.Method == http.MethodGet || r.Method == http.MethodOptions { + f.faucetInfoHandler(w, r) + } else { + http.NotFound(w, r) + } + }))) + + mux.HandleFunc("/openapi.yml", func(w http.ResponseWriter, r *http.Request) { + if r.Method == http.MethodGet { + f.openAPISpecHandler(w, r) + } else { + http.NotFound(w, r) + } + }) + + mux.ServeHTTP(w, r) +} diff --git a/ignite/pkg/cosmosfaucet/http_faucet.go b/ignite/pkg/cosmosfaucet/http_faucet.go new file mode 100644 index 0000000..4f0871d --- /dev/null +++ b/ignite/pkg/cosmosfaucet/http_faucet.go @@ -0,0 +1,108 @@ +package cosmosfaucet + +import ( + "context" + "encoding/json" + "net/http" + + sdk "github.com/cosmos/cosmos-sdk/types" + + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/xhttp" +) + +type TransferRequest struct { + // AccountAddress to request for coins. + AccountAddress string `json:"address"` + + // Coins that are requested. + // default ones used when this one isn't provided. + Coins []string `json:"coins,omitempty"` +} + +func NewTransferRequest(accountAddress string, coins []string) TransferRequest { + return TransferRequest{ + AccountAddress: accountAddress, + Coins: coins, + } +} + +type TransferResponse struct { + Hash string `json:"hash,omitempty"` + Error string `json:"error,omitempty"` +} + +func (f Faucet) faucetHandler(w http.ResponseWriter, r *http.Request) { + var req TransferRequest + + // decode request into req. + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + responseError(w, http.StatusBadRequest, err) + return + } + + // determine coins to transfer. + coins, err := f.coinsFromRequest(req) + if err != nil { + responseError(w, http.StatusBadRequest, err) + return + } + + // try performing the transfer + hash, err := f.Transfer(r.Context(), req.AccountAddress, coins) + if err != nil { + if errors.Is(err, context.Canceled) { + return + } + responseError(w, http.StatusInternalServerError, err) + return + } + responseSuccess(w, hash) +} + +// FaucetInfoResponse is the faucet info payload. +type FaucetInfoResponse struct { + // IsAFaucet indicates that this is a faucet endpoint. + // useful for auto discoveries. + IsAFaucet bool `json:"is_a_faucet"` + + // ChainID is chain id of the chain that faucet is running for. + ChainID string `json:"chain_id"` +} + +func (f Faucet) faucetInfoHandler(w http.ResponseWriter, _ *http.Request) { + _ = xhttp.ResponseJSON(w, http.StatusOK, FaucetInfoResponse{ + IsAFaucet: true, + ChainID: f.chainID, + }) +} + +// coinsFromRequest determines tokens to transfer from transfer request. +func (f Faucet) coinsFromRequest(req TransferRequest) (sdk.Coins, error) { + if len(req.Coins) == 0 { + return f.coins.Sort(), nil + } + + coins := sdk.NewCoins() + for _, c := range req.Coins { + coin, err := sdk.ParseCoinNormalized(c) + if err != nil { + return nil, err + } + coins = coins.Add(coin) + } + + return coins, nil +} + +func responseSuccess(w http.ResponseWriter, hash string) { + _ = xhttp.ResponseJSON(w, http.StatusOK, TransferResponse{ + Hash: hash, + }) +} + +func responseError(w http.ResponseWriter, code int, err error) { + _ = xhttp.ResponseJSON(w, code, TransferResponse{ + Error: err.Error(), + }) +} diff --git a/ignite/pkg/cosmosfaucet/http_openapi.go b/ignite/pkg/cosmosfaucet/http_openapi.go new file mode 100644 index 0000000..ebddd5b --- /dev/null +++ b/ignite/pkg/cosmosfaucet/http_openapi.go @@ -0,0 +1,25 @@ +package cosmosfaucet + +import ( + _ "embed" // used for embedding openapi assets. + "html/template" + "net/http" +) + +const ( + fileNameOpenAPISpec = "openapi/openapi.yml.tmpl" +) + +//go:embed openapi/openapi.yml.tmpl +var bytesOpenAPISpec []byte + +var tmplOpenAPISpec = template.Must(template.New(fileNameOpenAPISpec).Parse(string(bytesOpenAPISpec))) + +type openAPIData struct { + ChainID string + APIAddress string +} + +func (f Faucet) openAPISpecHandler(w http.ResponseWriter, _ *http.Request) { + _ = tmplOpenAPISpec.Execute(w, f.openAPIData) +} diff --git a/ignite/pkg/cosmosfaucet/http_test.go b/ignite/pkg/cosmosfaucet/http_test.go new file mode 100644 index 0000000..c28ef85 --- /dev/null +++ b/ignite/pkg/cosmosfaucet/http_test.go @@ -0,0 +1,47 @@ +package cosmosfaucet_test + +import ( + "net/http" + "net/http/httptest" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/cosmosfaucet" +) + +func TestServeHTTPCORS(t *testing.T) { + f := cosmosfaucet.Faucet{} + cases := []struct { + name, method, path string + }{ + { + name: "root endpoint", + method: "POST", + path: "/", + }, + { + name: "info endpoint", + method: "GET", + path: "/info", + }, + } + + for _, tt := range cases { + t.Run(tt.name, func(t *testing.T) { + // Arrange + res := httptest.NewRecorder() + req, _ := http.NewRequest("OPTIONS", tt.path, nil) + req.Header.Set("Access-Control-Request-Method", tt.method) + + // Act + f.ServeHTTP(res, req) + + // Assert + result := res.Result() + defer result.Body.Close() // Ensure the response body is closed + + require.Equal(t, http.StatusNoContent, result.StatusCode) + }) + } +} diff --git a/ignite/pkg/cosmosfaucet/openapi/openapi.yml.tmpl b/ignite/pkg/cosmosfaucet/openapi/openapi.yml.tmpl new file mode 100644 index 0000000..556d174 --- /dev/null +++ b/ignite/pkg/cosmosfaucet/openapi/openapi.yml.tmpl @@ -0,0 +1,61 @@ +swagger: "2.0" + +info: + description: "Faucet API doc and explorer.\n\nSend coins from the faucet account configured in `config.yml` to the receiver account." + version: "1.0.0" + title: "Faucet for {{ .ChainID }}" + +servers: + - url: / + +paths: + /: + post: + summary: "Send tokens to receiver account" + consumes: + - "application/json" + produces: + - "application/json" + parameters: + - in: "body" + name: "body" + description: "Send coins request object\n\nAfter making a sample execution by the 'Try it out' button in the right corner, visit the following link to see the difference in sample account's balance: {{ .APIAddress }}/cosmos/bank/v1beta1/balances/cosmos1uzv4v9g9xln2qx2vtqhz99yxum33calja5vruz" + required: true + schema: + $ref: "#/definitions/SendRequest" + responses: + "400": + description: "Bad request" + "500": + description: "Internal error" + "200": + description: "All coins are successfully sent\n\nAfter making a sample execution, visit the following link to see the difference in sample account's balance: {{ .APIAddress }}/cosmos/bank/v1beta1/balances/cosmos1uzv4v9g9xln2qx2vtqhz99yxum33calja5vruz" + schema: + $ref: "#/definitions/SendResponse" + +definitions: + SendRequest: + type: "object" + required: + - address + properties: + address: + type: "string" + default: "cosmos1uzv4v9g9xln2qx2vtqhz99yxum33calja5vruz" + coins: + type: "array" + default: + - 10token + items: + type: "string" + + SendResponse: + type: "object" + properties: + error: + type: "string" + + +externalDocs: + description: "Find out more about Ignite CLI" + url: "https://github.com/ignite/cli/tree/main/docs" diff --git a/ignite/pkg/cosmosfaucet/transfer.go b/ignite/pkg/cosmosfaucet/transfer.go new file mode 100644 index 0000000..00029cc --- /dev/null +++ b/ignite/pkg/cosmosfaucet/transfer.go @@ -0,0 +1,123 @@ +package cosmosfaucet + +import ( + "context" + "sync" + "time" + + sdkmath "cosmossdk.io/math" + + sdk "github.com/cosmos/cosmos-sdk/types" + + "github.com/ignite/cli/v29/ignite/pkg/chaincmd" + chaincmdrunner "github.com/ignite/cli/v29/ignite/pkg/chaincmd/runner" + "github.com/ignite/cli/v29/ignite/pkg/cosmosver" + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +// transferMutex is a mutex used for keeping transfer requests in a queue so checking account balance and sending tokens is atomic. +var transferMutex = &sync.Mutex{} + +// TotalTransferredAmount returns the total transferred amount from faucet account to toAccountAddress. +func (f Faucet) TotalTransferredAmount(ctx context.Context, toAccountAddress, denom string) (totalAmount sdkmath.Int, err error) { + fromAccount, err := f.runner.ShowAccount(ctx, f.accountName) + if err != nil { + return sdkmath.NewInt(0), err + } + + opts := []chaincmdrunner.EventSelector{ + chaincmdrunner.NewEventSelector("message", "sender", fromAccount.Address), + chaincmdrunner.NewEventSelector("transfer", "recipient", toAccountAddress), + } + + var events []chaincmdrunner.Event + if f.version.GTE(cosmosver.StargateFiftyVersion) { + events, err = f.runner.QueryTxByQuery(ctx, opts...) + if err != nil { + return sdkmath.NewInt(0), err + } + } else { + events, err = f.runner.QueryTxByEvents(ctx, opts...) + if err != nil { + return sdkmath.NewInt(0), err + } + } + + totalAmount = sdkmath.NewInt(0) + for _, event := range events { + if event.Type == "transfer" { + for _, attr := range event.Attributes { + if attr.Key == "amount" { + coins, err := sdk.ParseCoinsNormalized(attr.Value) + if err != nil { + return sdkmath.NewInt(0), err + } + + amount := coins.AmountOf(denom) + if amount.GT(sdkmath.NewInt(0)) && time.Since(event.Time) < f.limitRefreshWindow { + totalAmount = totalAmount.Add(amount) + } + } + } + } + } + + return totalAmount, nil +} + +// Transfer transfers amount of tokens from the faucet account to toAccountAddress. +func (f *Faucet) Transfer(ctx context.Context, toAccountAddress string, coins sdk.Coins) (string, error) { + transferMutex.Lock() + defer transferMutex.Unlock() + + transfer := sdk.NewCoins() + // check for each coin, the max transferred amount hasn't been reached + for _, c := range coins { + if f.indexerDisabled { // we cannot check the transfer history if indexer is disabled + transfer = transfer.Add(c) + continue + } + + totalSent, err := f.TotalTransferredAmount(ctx, toAccountAddress, c.Denom) + if err != nil { + return "", err + } + coinMax, found := f.coinsMax[c.Denom] + if found && !coinMax.IsNil() && !coinMax.Equal(sdkmath.NewInt(0)) { + if totalSent.GTE(coinMax) { + return "", errors.Errorf( + "account has reached to the max. allowed amount (%d) for %q denom", + coinMax, + c.Denom, + ) + } + + if (totalSent.Add(c.Amount)).GT(coinMax) { + return "", errors.Errorf( + `ask less amount for %q denom. account is reaching to the limit (%d) that faucet can tolerate`, + c.Denom, + coinMax, + ) + } + } + + transfer = transfer.Add(c) + } + + // perform transfer for all coins + fromAccount, err := f.runner.ShowAccount(ctx, f.accountName) + if err != nil { + return "", err + } + txHash, err := f.runner.BankSend(ctx, fromAccount.Address, toAccountAddress, transfer.String(), chaincmd.BankSendWithFees(f.feeAmount)) + if err != nil { + return "", err + } + + if f.indexerDisabled { + return txHash, nil // we cannot check the tx status if indexer is disabled + } + + // wait for send tx to be confirmed + return txHash, f.runner.WaitTx(ctx, txHash, time.Second, 30) +} diff --git a/ignite/pkg/cosmosfaucet/try_retrieve.go b/ignite/pkg/cosmosfaucet/try_retrieve.go new file mode 100644 index 0000000..4855a12 --- /dev/null +++ b/ignite/pkg/cosmosfaucet/try_retrieve.go @@ -0,0 +1,145 @@ +package cosmosfaucet + +import ( + "context" + "fmt" + "net" + "net/url" + "strings" + "time" + + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +// faucetTimeout used to set a timeout while transferring coins from a faucet. +const faucetTimeout = time.Second * 20 + +// TryRetrieve tries to retrieve tokens from a faucet. faucet address is used when it's provided. +// otherwise, it'll try to guess the faucet address from the rpc address of the chain. +// a non-nil error is returned if faucet's address cannot be determined or when coin retrieval is unsuccessful. +func TryRetrieve( + ctx context.Context, + chainID, + rpcAddress, + faucetAddress, + accountAddress string, +) (string, error) { + var faucetURL *url.URL + var err error + + if faucetAddress != "" { + // use if there is a user given faucet address. + faucetURL, err = url.Parse(faucetAddress) + } else { + // find faucet url. can be the user given, otherwise it is the guessed one. + faucetURL, err = discoverFaucetURL(ctx, chainID, rpcAddress) + } + if err != nil { + return "", err + } + + ctx, cancel := context.WithTimeout(ctx, faucetTimeout) + defer cancel() + + fc := NewClient(faucetURL.String()) + + resp, err := fc.Transfer(ctx, TransferRequest{ + AccountAddress: accountAddress, + }) + if err != nil { + return "", errors.Wrap(err, "faucet is not operational") + } + if resp.Error != "" { + return "", errors.Errorf("faucet is not operational: %s", resp.Error) + } + + return resp.Hash, nil +} + +func discoverFaucetURL(ctx context.Context, chainID, rpcAddress string) (*url.URL, error) { + // guess faucet address otherwise. + guessedURLs, err := guessFaucetURLs(rpcAddress) + if err != nil { + return nil, err + } + + for _, u := range guessedURLs { + // check if the potential faucet server accepts connections. + address := u.Host + if u.Scheme == "https" { + address += ":443" + } + if _, err := net.DialTimeout("tcp", address, time.Second); err != nil { + continue + } + + // ensure that this is a real faucet server. + info, err := NewClient(u.String()).FaucetInfo(ctx) + if err != nil || info.ChainID != chainID || !info.IsAFaucet { + continue + } + + return u, nil + } + + return nil, errors.New("no faucet available, please send coins to the address") +} + +// guess tries to guess all possible faucet addresses. +func guessFaucetURLs(rpcAddress string) ([]*url.URL, error) { + u, err := url.Parse(rpcAddress) + if err != nil { + return nil, err + } + + var guessedURLs []*url.URL + + possibilities := []struct { + port string + subname string + nameSeparator string + }{ + {"4500", "", "."}, + {"", "faucet", "."}, + {"", "4500", "-"}, + } + + // creating guesses addresses by basing RPC address. + for _, poss := range possibilities { + guess, _ := url.Parse(u.String()) // copy the original url. + for _, scheme := range []string{"http", "https"} { // do for both schemes. + guess, _ := url.Parse(guess.String()) // copy guess. + guess.Scheme = scheme + + // try with port numbers. + if poss.port != "" { + guess.Host = fmt.Sprintf("%s:%s", u.Hostname(), "4500") + guessedURLs = append(guessedURLs, guess) + continue + } + + // try with subnames. + if poss.subname != "" { + bases := []string{ + // try with appending subname to the default name. + // e.g.: faucet.my.domain. + u.Hostname(), + } + + // try with replacing the subname for 1 level. + // e.g.: faucet.domain. + sp := strings.SplitN(u.Hostname(), poss.nameSeparator, 2) + if len(sp) == 2 { + bases = append(bases, sp[1]) + } + for _, basename := range bases { + guess, _ := url.Parse(guess.String()) // copy guess. + guess.Host = fmt.Sprintf("%s%s%s", poss.subname, poss.nameSeparator, basename) + guessedURLs = append(guessedURLs, guess) + } + } + } + } + + return guessedURLs, nil +} diff --git a/ignite/pkg/cosmosgen/cosmosgen.go b/ignite/pkg/cosmosgen/cosmosgen.go new file mode 100644 index 0000000..ef06747 --- /dev/null +++ b/ignite/pkg/cosmosgen/cosmosgen.go @@ -0,0 +1,216 @@ +package cosmosgen + +import ( + "context" + "os" + "path/filepath" + "strings" + + "github.com/iancoleman/strcase" + gomodule "golang.org/x/mod/module" + + "github.com/ignite/cli/v29/ignite/pkg/cache" + "github.com/ignite/cli/v29/ignite/pkg/cosmosanalysis/module" + "github.com/ignite/cli/v29/ignite/pkg/cosmosbuf" + "github.com/ignite/cli/v29/ignite/pkg/events" +) + +// generateOptions used to configure code generation. +type generateOptions struct { + useCache bool + updateBufModule bool + ev events.Bus + + generateProtobuf bool + + jsOut func(module.Module) string + tsClientRootPath string + + composablesOut func(module.Module) string + composablesRootPath string + + openAPISpecOut string + openAPIExcludeList []string +} + +// ModulePathFunc defines a function type that returns a path based on a Cosmos SDK module. +type ModulePathFunc func(module.Module) string + +// Option configures code generation. +type Option func(*generateOptions) + +// WithTSClientGeneration adds Typescript Client code generation. +// The tsClientRootPath is used to determine the root path of generated Typescript classes. +func WithTSClientGeneration(out ModulePathFunc, tsClientRootPath string, useCache bool) Option { + return func(o *generateOptions) { + o.jsOut = out + o.tsClientRootPath = tsClientRootPath + o.useCache = useCache + } +} + +func WithComposablesGeneration(out ModulePathFunc, composablesRootPath string) Option { + return func(o *generateOptions) { + o.composablesOut = out + o.composablesRootPath = composablesRootPath + } +} + +// WithGoGeneration adds protobuf (gogoproto) code generation. +func WithGoGeneration() Option { + return func(o *generateOptions) { + o.generateProtobuf = true + } +} + +// WithOpenAPIGeneration adds OpenAPI spec generation. +func WithOpenAPIGeneration(out string, excludeList []string) Option { + return func(o *generateOptions) { + o.openAPISpecOut = out + o.openAPIExcludeList = excludeList + } +} + +// UpdateBufModule enables Buf config proto dependencies update. +// This option updates app's Buf config when proto packages or +// Buf modules are found within the Go dependencies. +func UpdateBufModule() Option { + return func(o *generateOptions) { + o.updateBufModule = true + } +} + +// CollectEvents sets an event bus for sending generation feedback events. +func CollectEvents(ev events.Bus) Option { + return func(c *generateOptions) { + c.ev = ev + } +} + +// generator generates code for sdk and sdk apps. +type generator struct { + buf cosmosbuf.Buf + cacheStorage cache.Storage + appPath string + protoDir string + goModPath string + frontendPath string + opts *generateOptions + sdkImport string + sdkDir string + deps []gomodule.Version + appModules []module.Module + appIncludes protoIncludes + thirdModules map[string][]module.Module + thirdModuleIncludes map[string]protoIncludes + tmpDirs []string + + // caches to avoid repeated operations + bufPathCache map[string]string + bufExportCache map[string]string + bufConfigCache map[string]struct{ Name string } +} + +func (g *generator) cleanup() { + // Remove temporary directories created during generation + for _, path := range g.tmpDirs { + _ = os.RemoveAll(path) + } +} + +// Generate generates code from protoDir of an SDK app residing at appPath with given options. +// protoDir must be relative to the projectPath. +func Generate(ctx context.Context, cacheStorage cache.Storage, appPath, protoDir, goModPath string, frontendPath string, options ...Option) error { + buf, err := cosmosbuf.New(cacheStorage, goModPath) + if err != nil { + return err + } + + g := &generator{ + buf: buf, + appPath: appPath, + protoDir: protoDir, + goModPath: goModPath, + frontendPath: frontendPath, + opts: &generateOptions{}, + thirdModules: make(map[string][]module.Module), + thirdModuleIncludes: make(map[string]protoIncludes), + cacheStorage: cacheStorage, + bufPathCache: make(map[string]string), + bufExportCache: make(map[string]string), + bufConfigCache: make(map[string]struct{ Name string }), + } + + defer g.cleanup() + + for _, apply := range options { + apply(g.opts) + } + + if err := g.setup(ctx); err != nil { + return err + } + + // Update app's Buf config for third party discovered proto modules. + // Go dependency packages might contain proto files which could also + // optionally be using Buf, so for those cases the discovered proto + // files should be available before code generation. + if g.opts.updateBufModule { + if err := g.updateBufModule(ctx); err != nil { + return err + } + } + + // Go generation must run first so the types are created before other + // generated code that requires sdk.Msg implementations to be defined + if g.opts.generateProtobuf { + if err := g.generateGoGo(ctx); err != nil { + return err + } + } + + if g.opts.openAPISpecOut != "" { + if err := g.generateOpenAPISpec(ctx, g.opts.openAPIExcludeList...); err != nil { + return err + } + } + + if g.opts.jsOut != nil { + if err := g.generateTS(ctx); err != nil { + return err + } + } + + if g.opts.composablesRootPath != "" { + if err := g.generateComposables(); err != nil { + return err + } + + // Update Vue app dependencies when Vue composables are generated. + // This update is required to link the "ts-client" folder so the + // package is available during development before publishing it. + if err := g.updateComposableDependencies(); err != nil { + return err + } + } + + return nil +} + +// TypescriptModulePath generates TS module paths for Cosmos SDK modules. +// The root path is used as prefix for the generated paths. +func TypescriptModulePath(rootPath string) ModulePathFunc { + return func(m module.Module) string { + return filepath.Join(rootPath, m.Pkg.Name) + } +} + +// ComposableModulePath generates useQuery hook/composable module paths for Cosmos SDK modules. +// The root path is used as prefix for the generated paths. +func ComposableModulePath(rootPath string) ModulePathFunc { + return func(m module.Module) string { + replacer := strings.NewReplacer("-", "_", ".", "_") + modPath := strcase.ToCamel(replacer.Replace(m.Pkg.Name)) + return filepath.Join(rootPath, "use"+modPath) + } +} diff --git a/ignite/pkg/cosmosgen/cosmosgen_test.go b/ignite/pkg/cosmosgen/cosmosgen_test.go new file mode 100644 index 0000000..1752d47 --- /dev/null +++ b/ignite/pkg/cosmosgen/cosmosgen_test.go @@ -0,0 +1,59 @@ +package cosmosgen + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/cosmosanalysis/module" + "github.com/ignite/cli/v29/ignite/pkg/protoanalysis" +) + +func TestTypescriptModulePath(t *testing.T) { + modulePath := TypescriptModulePath("prefix") + + cases := []struct { + name string + goModulePath string + protoPkgName string + want string + }{ + { + name: "github uri", + goModulePath: "github.com/owner/app", + protoPkgName: "owner.app.module", + want: "prefix/owner.app.module", + }, + { + name: "short uri", + goModulePath: "domain.com/app", + protoPkgName: "app.module", + want: "prefix/app.module", + }, + { + name: "path", + goModulePath: "owner/app", + protoPkgName: "owner.app.module", + want: "prefix/owner.app.module", + }, + { + name: "name", + goModulePath: "app", + protoPkgName: "app.module", + want: "prefix/app.module", + }, + } + + for _, tt := range cases { + t.Run(tt.name, func(t *testing.T) { + m := module.Module{ + GoModulePath: tt.goModulePath, + Pkg: protoanalysis.Package{ + Name: tt.protoPkgName, + }, + } + + require.Equal(t, tt.want, modulePath(m)) + }) + } +} diff --git a/ignite/pkg/cosmosgen/generate.go b/ignite/pkg/cosmosgen/generate.go new file mode 100644 index 0000000..2bbf07c --- /dev/null +++ b/ignite/pkg/cosmosgen/generate.go @@ -0,0 +1,737 @@ +package cosmosgen + +import ( + "bytes" + "context" + "fmt" + "io/fs" + "log" + "os" + "path/filepath" + "slices" + "strings" + + "gopkg.in/yaml.v3" + + "github.com/ignite/cli/v29/ignite/config/chain/defaults" + "github.com/ignite/cli/v29/ignite/pkg/cache" + "github.com/ignite/cli/v29/ignite/pkg/cliui/colors" + "github.com/ignite/cli/v29/ignite/pkg/cliui/icons" + "github.com/ignite/cli/v29/ignite/pkg/cmdrunner" + "github.com/ignite/cli/v29/ignite/pkg/cmdrunner/step" + "github.com/ignite/cli/v29/ignite/pkg/cosmosanalysis/module" + "github.com/ignite/cli/v29/ignite/pkg/cosmosbuf" + "github.com/ignite/cli/v29/ignite/pkg/cosmosver" + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/events" + "github.com/ignite/cli/v29/ignite/pkg/gomodule" + "github.com/ignite/cli/v29/ignite/pkg/xfilepath" + "github.com/ignite/cli/v29/ignite/pkg/xos" +) + +const ( + moduleCacheNamespace = "generate.setup.module" + sdkModuleCacheNamespace = "generate.setup.sdk_module" + includeProtoCacheNamespace = "generate.includes.proto" + bufYamlFilename = "buf.yaml" +) + +var ( + ErrBufConfig = errors.New("invalid Buf config") + ErrMissingSDKDep = errors.New("cosmos-sdk dependency not found") + + protocGlobalInclude = xfilepath.List( + xfilepath.JoinFromHome(xfilepath.Path("local/include")), + xfilepath.JoinFromHome(xfilepath.Path(".local/include")), + ) +) + +// protoIncludes contains proto include paths for a package. +type protoIncludes struct { + // Paths is a list of proto include paths. + Paths []string + + // BufPath is the path to the Buf config file when it exists. + BufPath string + + // ProtoPath contains the path to the package's proto directory. + ProtoPath string +} + +// protoAnalysis contains proto module analysis data for a Go package dependency. +type protoAnalysis struct { + // Path is the full path to the Go dependency + Path string + + // Modules contains the proto modules analysis data. + // The list is empty when the Go package has no proto files. + Modules []module.Module + + // Includes contain proto include paths. + // These paths should be used when generating code. + Includes protoIncludes + + // Cacheable indicates whether this analysis can be safely cached. + // Set to false when includes contain temporary directories. + Cacheable bool +} + +func newBufConfigError(path string, cause error) error { + return errors.Errorf("%w: %s: %w", ErrBufConfig, path, cause) +} + +// Cosmos SDK hosts proto files of own x/ modules and some third party ones needed by itself and +// blockchain apps. Generate should be aware of these and make them available to the blockchain +// app that wants to generate code for its own proto. +// +// blockchain apps may use different versions of the SDK. following code first makes sure that +// app's dependencies are download by 'go mod' and cached under the local filesystem. +// and then, it determines which version of the SDK is used by the app and what is the absolute path +// of its source code. +func (g *generator) setup(ctx context.Context) (err error) { + // Download dependencies once + if err := g.downloadDependencies(ctx); err != nil { + return err + } + + // Parse and resolve dependencies + if err := g.resolveDependencies(ctx); err != nil { + return err + } + + // Discover app modules and includes in parallel + if err := g.discoverAppModules(ctx); err != nil { + return err + } + + // Process third-party modules efficiently + return g.processThirdPartyModules(ctx) +} + +func (g *generator) downloadDependencies(ctx context.Context) error { + var errb bytes.Buffer + return cmdrunner. + New( + cmdrunner.DefaultStderr(&errb), + cmdrunner.DefaultWorkdir(g.appPath), + ).Run(ctx, step.New(step.Exec("go", "mod", "download"))) +} + +func (g *generator) resolveDependencies(ctx context.Context) error { + modFile, err := gomodule.ParseAt(g.appPath) + if err != nil { + return err + } + + // Read the dependencies defined in the `go.mod` file + g.deps, err = gomodule.ResolveDependencies(modFile, false) + if err != nil { + return err + } + + // Find and set SDK directory + dep, found := filterCosmosSDKModule(g.deps) + if !found { + return ErrMissingSDKDep + } + + g.sdkImport = dep.Path + g.sdkDir, err = gomodule.LocatePath(ctx, g.cacheStorage, g.appPath, dep) + return err +} + +func (g *generator) discoverAppModules(ctx context.Context) error { + // Discover app modules + var err error + g.appModules, err = module.Discover( + ctx, + g.appPath, + g.appPath, + module.WithProtoDir(g.protoDir), + module.WithSDKDir(g.sdkDir), + ) + if err != nil { + return err + } + + // Resolve app includes + g.appIncludes, _, err = g.resolveIncludes(ctx, g.appPath, g.protoDir) + return err +} + +func (g *generator) processThirdPartyModules(ctx context.Context) error { + moduleCache := cache.New[protoAnalysis](g.cacheStorage, moduleCacheNamespace) + sdkModuleCache := cache.New[protoAnalysis](g.cacheStorage, sdkModuleCacheNamespace) + + // Process dependencies in parallel for better performance + type depResult struct { + path string + analysis protoAnalysis + err error + } + + results := make(chan depResult, len(g.deps)) + semaphore := make(chan struct{}, 5) // Limit concurrent operations + + for _, dep := range g.deps { + go func(ctx context.Context, dep gomodule.Version) { + // check for cancellation first + if err := ctx.Err(); err != nil { + results <- depResult{path: "", analysis: protoAnalysis{}, err: err} + return + } + + select { + case semaphore <- struct{}{}: // Acquire + case <-ctx.Done(): + results <- depResult{path: "", analysis: protoAnalysis{}, err: ctx.Err()} + return + } + defer func() { <-semaphore }() // Release + + var depInfo protoAnalysis + var err error + + // Check if this is a Cosmos SDK module + // Optimization: All SDK modules share the same proto files from the SDK's proto directory: + // - cosmossdk.io/* (newer modular SDK packages like cosmossdk.io/math, cosmossdk.io/x/*) + // - github.com/cosmos/cosmos-sdk/* (traditional monolithic SDK packages) + // Instead of processing the same proto files multiple times for each SDK module + // dependency, we use a shared cache key based on the SDK import path. This eliminates: + // - Module discovery operations + // - Proto include resolution + // - Buf export operations + // - File system operations + // This can reduce processing time by 70-90% for projects with many SDK modules. + if module.IsCosmosSDKPackage(dep.Path) || strings.HasPrefix(dep.Path, "cosmossdk.io/") { + // Use a shared cache key for all SDK modules since they reference the same proto dir + sdkCacheKey := cache.Key("cosmos-sdk", g.sdkImport) + depInfo, err = sdkModuleCache.Get(sdkCacheKey) + + if errors.Is(err, cache.ErrorNotFound) { + // check for cancellation before expensive operation + if err := ctx.Err(); err != nil { + results <- depResult{path: "", analysis: protoAnalysis{}, err: err} + return + } + + depInfo, err = g.processNewDependency(ctx, dep) + if err == nil && len(depInfo.Modules) > 0 && depInfo.Cacheable { + // Cache using the shared SDK key for all SDK modules + _ = sdkModuleCache.Put(sdkCacheKey, depInfo) + } + } + } else { + // Regular module processing with individual cache keys + cacheKey := cache.Key(dep.Path, dep.Version) + depInfo, err = moduleCache.Get(cacheKey) + + if errors.Is(err, cache.ErrorNotFound) { + // check for cancellation before expensive operation + if err := ctx.Err(); err != nil { + results <- depResult{path: "", analysis: protoAnalysis{}, err: err} + return + } + + depInfo, err = g.processNewDependency(ctx, dep) + if err == nil && len(depInfo.Modules) > 0 && depInfo.Cacheable { + // Cache the result only if it's safe to do + _ = moduleCache.Put(cacheKey, depInfo) + } + } + } + + results <- depResult{path: depInfo.Path, analysis: depInfo, err: err} + }(ctx, dep) + } + + // Collect results + for i := 0; i < len(g.deps); i++ { + select { + case result := <-results: + if result.err != nil && !errors.Is(result.err, cache.ErrorNotFound) { + return result.err + } + + if result.analysis.Path != "" { + g.thirdModules[result.path] = result.analysis.Modules + g.thirdModuleIncludes[result.path] = result.analysis.Includes + } + case <-ctx.Done(): + return ctx.Err() + } + } + + return nil +} + +func (g *generator) processNewDependency(ctx context.Context, dep gomodule.Version) (protoAnalysis, error) { + // Get the absolute path to the package's directory + path, err := gomodule.LocatePath(ctx, g.cacheStorage, g.appPath, dep) + if err != nil { + return protoAnalysis{}, err + } + + // Discover modules + modules, err := module.Discover(ctx, g.appPath, path, module.WithSDKDir(g.sdkDir)) + if err != nil { + return protoAnalysis{}, err + } + + // Only resolve includes if modules exist + var includes protoIncludes + var cacheable bool + if len(modules) > 0 { + includes, cacheable, err = g.resolveIncludes(ctx, path, defaults.ProtoDir) + if err != nil { + return protoAnalysis{}, err + } + } else { + cacheable = true // No includes needed, safe to cache + } + + return protoAnalysis{ + Path: path, + Modules: modules, + Includes: includes, + Cacheable: cacheable, + }, nil +} + +func (g *generator) getProtoIncludeFolders(modPath string) []string { + return []string{filepath.Join(modPath, g.protoDir)} +} + +func (g *generator) findBufPath(modpath string) (string, error) { + // check cache first + if cached, exists := g.bufPathCache[modpath]; exists { + return cached, nil + } + + var bufPath string + // More efficient: check common locations first before walking entire tree + commonPaths := []string{ + filepath.Join(modpath, bufYamlFilename), + filepath.Join(modpath, "buf.yml"), + filepath.Join(modpath, "proto", bufYamlFilename), + filepath.Join(modpath, "proto", "buf.yml"), + } + + for _, path := range commonPaths { + if _, err := os.Stat(path); err == nil { + bufPath = path + break + } + } + + // If not found in common locations, walk the directory tree + if bufPath == "" { + err := filepath.WalkDir(modpath, func(path string, _ fs.DirEntry, err error) error { + if err != nil { + return err + } + base := filepath.Base(path) + if base == bufYamlFilename || base == "buf.yml" { + bufPath = path + return filepath.SkipAll + } + // Skip deep nested directories that are unlikely to contain buf configs + if strings.Count(path, string(os.PathSeparator)) > strings.Count(modpath, string(os.PathSeparator))+3 { + return filepath.SkipDir + } + return nil + }) + if err != nil { + return "", err + } + } + + // cache the result + g.bufPathCache[modpath] = bufPath + return bufPath, nil +} + +func (g *generator) generateBufIncludeFolder(ctx context.Context, modpath string) (string, error) { + // check cache first to avoid repeated export operations + // this is particularly important since multiple dependencies may reference + // the same proto path, causing redundant buf.Export calls and temp directory creation + if cached, exists := g.bufExportCache[modpath]; exists { + // verify the cached path still exists + if _, err := os.Stat(cached); err == nil { + return cached, nil + } + // remove invalid cache entry + delete(g.bufExportCache, modpath) + } + + protoPath, err := os.MkdirTemp("", "includeFolder") + if err != nil { + return "", err + } + + g.tmpDirs = append(g.tmpDirs, protoPath) + + err = g.buf.Export(ctx, modpath, protoPath) + if err != nil { + return "", err + } + + // cache the result for future use + g.bufExportCache[modpath] = protoPath + return protoPath, nil +} + +func (g *generator) resolveIncludes(ctx context.Context, path, protoDir string) (protoIncludes, bool, error) { + // Use a cache key that includes both path and protoDir for better cache hits + cacheKey := path + ":" + protoDir + includeCache := cache.New[protoIncludes](g.cacheStorage, includeProtoCacheNamespace) + + if cached, err := includeCache.Get(cacheKey); err == nil { + return cached, true, nil + } + + // Get global includes once and reuse + paths, err := protocGlobalInclude() + if err != nil { + return protoIncludes{}, false, err + } + + includes := protoIncludes{Paths: paths} + + // Determine proto path based on package type + var protoPath string + if module.IsCosmosSDKPackage(path) { + protoPath = filepath.Join(g.sdkDir, "proto") + } else { + protoPath = filepath.Join(path, protoDir) + if fi, err := os.Stat(protoPath); os.IsNotExist(err) { + protoPath, err = findInnerProtoFolder(path) + if err != nil { + // if proto directory does not exist, we just skip it + log.Print(err.Error()) + return protoIncludes{}, false, nil + } + } else if err != nil { + return protoIncludes{}, false, err + } else if !fi.IsDir() { + return includes, true, nil + } + } + + // Add proto path and find buf config + includes.Paths = append(includes.Paths, protoPath) + includes.ProtoPath = protoPath + + // Efficient buf path discovery + includes.BufPath, err = g.findBufPath(protoPath) + if err != nil { + return includes, false, err + } + + // Try project root if not found in proto path + if includes.BufPath == "" { + includes.BufPath, err = g.findBufPath(path) + if err != nil { + return includes, false, err + } + } + + // Handle buf config processing + cacheable := true + if includes.BufPath != "" { + bufProtoPath, err := g.generateBufIncludeFolder(ctx, protoPath) + if err != nil && !errors.Is(err, cosmosbuf.ErrProtoFilesNotFound) { + return protoIncludes{}, false, err + } + if bufProtoPath != "" { + includes.Paths = append(includes.Paths, bufProtoPath) + cacheable = false // Don't cache when temp directories are involved + } + } else { + // Legacy behavior: add configured directories + includes.Paths = append(includes.Paths, g.getProtoIncludeFolders(path)...) + } + + // Cache the result if appropriate + if cacheable { + _ = includeCache.Put(cacheKey, includes) + } + + return includes, cacheable, nil +} + +func (g generator) updateBufModule(ctx context.Context) error { + // Process in batch to reduce individual file operations + var bufDeps []string + var vendorOps []struct{ pkgName, protoPath string } + + for pkgPath, includes := range g.thirdModuleIncludes { + // Skip third party dependencies without proto files + if includes.ProtoPath == "" { + continue + } + + // Resolve the Go package and use the module name as the proto vendor directory name + modFile, err := gomodule.ParseAt(pkgPath) + if err != nil { + return err + } + + pkgName := modFile.Module.Mod.Path + + // Batch buf dependencies and vendor operations + if includes.BufPath != "" { + depName, err := g.getBufDependencyName(includes.BufPath) + if err != nil { + return err + } + if depName != "" { + bufDeps = append(bufDeps, depName) + } else { + vendorOps = append(vendorOps, struct{ pkgName, protoPath string }{pkgName, filepath.Dir(includes.BufPath)}) + } + } else { + vendorOps = append(vendorOps, struct{ pkgName, protoPath string }{pkgName, includes.ProtoPath}) + } + } + + // Process buf dependencies in batch + if len(bufDeps) > 0 { + if err := g.addBufDependencies(bufDeps); err != nil { + return err + } + } + + // Process vendor operations + for _, op := range vendorOps { + if err := g.vendorProtoPackage(op.pkgName, op.protoPath); err != nil { + return err + } + } + + // Update buf once at the end + if err := g.buf.Update( + ctx, + filepath.Dir(g.appIncludes.BufPath), + ); err != nil && !errors.Is(err, cosmosbuf.ErrProtoFilesNotFound) { + return err + } + return nil +} + +func (g generator) getBufDependencyName(bufPath string) (string, error) { + // check cache first + if cached, exists := g.bufConfigCache[bufPath]; exists { + return cached.Name, nil + } + + // Open and parse buf config + f, err := os.Open(bufPath) + if err != nil { + return "", err + } + defer f.Close() + + cfg := struct { + Name string `yaml:"name"` + }{} + + if err := yaml.NewDecoder(f).Decode(&cfg); err != nil { + return "", newBufConfigError(bufPath, err) + } + + // cache the result + g.bufConfigCache[bufPath] = struct{ Name string }{cfg.Name} + return cfg.Name, nil +} + +func (g generator) addBufDependencies(depNames []string) error { + if len(depNames) == 0 { + return nil + } + + // Read app's Buf config once + path := g.appIncludes.BufPath + bz, err := os.ReadFile(path) + if err != nil { + return err + } + + // Parse existing dependencies + cfg := struct { + Deps []string `yaml:"deps"` + }{} + if err := yaml.Unmarshal(bz, &cfg); err != nil { + return newBufConfigError(path, err) + } + + // Filter out already existing dependencies + var newDeps []string + for _, depName := range depNames { + if !slices.Contains(cfg.Deps, depName) { + newDeps = append(newDeps, depName) + } + } + + if len(newDeps) == 0 { + return nil // No new dependencies to add + } + + // Add new dependencies and update config + f, err := os.OpenFile(path, os.O_WRONLY|os.O_TRUNC, 0o644) + if err != nil { + return err + } + defer f.Close() + + var rawCfg map[string]interface{} + if err := yaml.Unmarshal(bz, &rawCfg); err != nil { + return newBufConfigError(path, err) + } + + rawCfg["deps"] = append(cfg.Deps, newDeps...) + + enc := yaml.NewEncoder(f) + defer enc.Close() + + if err := enc.Encode(rawCfg); err != nil { + return err + } + + // Send notifications for all new dependencies + for _, depName := range newDeps { + g.opts.ev.Send( + fmt.Sprintf("New Buf dependency added: %s", colors.Name(depName)), + events.Icon(icons.OK), + ) + } + + return nil +} + +func (g generator) vendorProtoPackage(pkgName, protoPath string) (err error) { + // Check that the dependency vendor directory doesn't exist + vendorRelPath := filepath.Join("proto_vendor", pkgName) + vendorPath := filepath.Join(g.appPath, vendorRelPath) + _, err = os.Stat(vendorPath) + if err != nil && !os.IsNotExist(err) { + return err + } + + // Skip vendoring when the dependency is already vendored + if !os.IsNotExist(err) { + return nil + } + + if err = os.MkdirAll(vendorPath, 0o777); err != nil { + return err + } + + // Make sure that the vendor folder is removed on error + defer func() { + if err != nil { + _ = os.RemoveAll(vendorPath) + } + }() + + if err = xos.CopyFolder(protoPath, vendorPath); err != nil { + return err + } + + path := filepath.Join(g.appPath, bufYamlFilename) + bz, err := os.ReadFile(path) + if err != nil { + return errors.Errorf("error reading Buf workspace file: %s: %w", path, err) + } + + ws := struct { + Version string `yaml:"version"` + Modules []struct { + Path string `yaml:"path"` + } `yaml:"modules"` + Deps []string `yaml:"deps"` + Lint any `yaml:"lint"` + Breaking any `yaml:"breaking"` + }{} + if err := yaml.Unmarshal(bz, &ws); err != nil { + return err + } + + ws.Modules = append(ws.Modules, struct { + Path string `yaml:"path"` + }{ + Path: vendorRelPath, + }) + + f, err := os.OpenFile(path, os.O_WRONLY|os.O_TRUNC, 0o644) + if err != nil { + return err + } + defer f.Close() + + enc := yaml.NewEncoder(f) + defer enc.Close() + if err = enc.Encode(ws); err != nil { + return err + } + + g.opts.ev.Send( + fmt.Sprintf("New Buf vendored dependency added: %s", colors.Name(vendorRelPath)), + events.Icon(icons.OK), + ) + + return nil +} + +func filterCosmosSDKModule(versions []gomodule.Version) (gomodule.Version, bool) { + for _, v := range versions { + if cosmosver.CosmosSDKModulePathPattern.MatchString(v.Path) { + return v, true + } + } + return gomodule.Version{}, false +} + +// findInnerProtoFolder attempts to find the proto directory in a module. +// it should be used as a fallback when the proto directory is not found in the expected location. +func findInnerProtoFolder(path string) (string, error) { + // attempt to find proto directory in the module + protoFiles, err := xos.FindFiles(path, xos.WithExtension(xos.ProtoFile)) + if err != nil { + return "", err + } + if len(protoFiles) == 0 { + return "", errors.Errorf("no proto folders found in %s", path) + } + + var protoDirs []string + for _, p := range protoFiles { + dir := filepath.Dir(p) + for { + if filepath.Base(dir) == "proto" { + protoDirs = append(protoDirs, dir) + break + } + parent := filepath.Dir(dir) + if parent == dir { // reached root + break + } + dir = parent + } + } + + if len(protoDirs) == 0 { + // Fallback to the parent of the first proto file found. + return filepath.Dir(protoFiles[0]), nil + } + + // Find the highest level proto directory (shortest path) + highest := protoDirs[0] + for _, d := range protoDirs[1:] { + if len(d) < len(highest) { + highest = d + } + } + + return highest, nil +} diff --git a/ignite/pkg/cosmosgen/generate_composables.go b/ignite/pkg/cosmosgen/generate_composables.go new file mode 100644 index 0000000..6e02a44 --- /dev/null +++ b/ignite/pkg/cosmosgen/generate_composables.go @@ -0,0 +1,162 @@ +package cosmosgen + +import ( + "encoding/json" + "fmt" + "os" + "path/filepath" + "strings" + + "dario.cat/mergo" + "golang.org/x/sync/errgroup" + + "github.com/ignite/cli/v29/ignite/pkg/cosmosanalysis/module" + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/gomodulepath" +) + +func (g *generator) checkVueExists() error { + _, err := os.Stat(filepath.Join(g.appPath, g.frontendPath)) + if errors.Is(err, os.ErrNotExist) { + return errors.New("frontend does not exist, please run `ignite scaffold vue` first") + } + + return err +} + +func (g *generator) updateComposableDependencies() error { + if err := g.checkVueExists(); err != nil { + return err + } + + // Init the path to the appropriate frontend folder inside the app + frontendPath := filepath.Join(g.appPath, g.frontendPath) + packagesPath := filepath.Join(g.appPath, g.frontendPath, "package.json") + + b, err := os.ReadFile(packagesPath) + if err != nil { + return err + } + + var pkg map[string]any + if err := json.Unmarshal(b, &pkg); err != nil { + return errors.Errorf("error parsing %s: %w", packagesPath, err) + } + + chainPath, _, err := gomodulepath.Find(g.appPath) + if err != nil { + return err + } + + // Make sure the TS client path is absolute + tsClientPath, err := filepath.Abs(g.opts.tsClientRootPath) + if err != nil { + return errors.Errorf("failed to read the absolute typescript client path: %w", err) + } + + // Add the link to the ts-client to the VUE app dependencies + appModulePath := gomodulepath.ExtractAppPath(chainPath.RawPath) + tsClientNS := strings.ReplaceAll(appModulePath, "/", "-") + tsClientName := fmt.Sprintf("%s-client-ts", tsClientNS) + tsClientRelPath, err := filepath.Rel(frontendPath, tsClientPath) + if err != nil { + return err + } + + err = mergo.Merge(&pkg, map[string]interface{}{ + "dependencies": map[string]interface{}{ + tsClientName: fmt.Sprintf("file:%s", tsClientRelPath), + }, + }) + if err != nil { + return errors.Errorf("failed to link ts-client dependency in the frontend app: %w", err) + } + + // Save the modified package.json with the new dependencies + file, err := os.OpenFile(packagesPath, os.O_RDWR|os.O_TRUNC, 0o644) + if err != nil { + return err + } + + defer file.Close() + + enc := json.NewEncoder(file) + enc.SetIndent("", " ") + if err := enc.Encode(pkg); err != nil { + return errors.Errorf("error updating %s: %w", packagesPath, err) + } + + return nil +} + +func (g *generator) generateComposables() error { + if err := g.checkVueExists(); err != nil { + return err + } + + chainPath, _, err := gomodulepath.Find(g.appPath) + if err != nil { + return err + } + + appModulePath := gomodulepath.ExtractAppPath(chainPath.RawPath) + data := generatePayload{ + Modules: g.appModules, + PackageNS: strings.ReplaceAll(appModulePath, "/", "-"), + } + + for _, modules := range g.thirdModules { + data.Modules = append(data.Modules, modules...) + } + + vsg := newComposablesGenerator(g) + if err := vsg.generateComposableTemplates(data); err != nil { + return err + } + + return vsg.generateRootTemplates(data) +} + +type composablesGenerator struct { + g *generator +} + +func newComposablesGenerator(g *generator) *composablesGenerator { + return &composablesGenerator{g} +} + +func (g *composablesGenerator) generateComposableTemplates(p generatePayload) error { + gg := &errgroup.Group{} + + for _, m := range p.Modules { + gg.Go(func() error { + return g.generateComposableTemplate(m, p) + }) + } + + return gg.Wait() +} + +func (g *composablesGenerator) generateComposableTemplate(m module.Module, p generatePayload) error { + outDir := g.g.opts.composablesOut(m) + if err := os.MkdirAll(outDir, 0o766); err != nil { + return err + } + + return templateTSClientComposable.Write(outDir, "", struct { + Module module.Module + PackageNS string + }{ + Module: m, + PackageNS: p.PackageNS, + }) +} + +func (g *composablesGenerator) generateRootTemplates(p generatePayload) error { + outDir := g.g.opts.composablesRootPath + if err := os.MkdirAll(outDir, 0o766); err != nil { + return err + } + + return templateTSClientComposableRoot.Write(outDir, "", p) +} diff --git a/ignite/pkg/cosmosgen/generate_go.go b/ignite/pkg/cosmosgen/generate_go.go new file mode 100644 index 0000000..3c9c5e1 --- /dev/null +++ b/ignite/pkg/cosmosgen/generate_go.go @@ -0,0 +1,53 @@ +package cosmosgen + +import ( + "context" + "os" + "path/filepath" + + "github.com/otiai10/copy" + + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +func (g *generator) gogoTemplate() string { + return filepath.Join(g.appPath, g.protoDir, "buf.gen.gogo.yaml") +} + +func (g *generator) protoPath() string { + return filepath.Join(g.appPath, g.protoDir) +} + +func (g *generator) generateGoGo(ctx context.Context) error { + // create a temporary dir to locate generated code under which later only some of them will be moved to the + // app's source code. this also prevents having leftover files in the app's source code or its parent dir - when + // command executed directly there - in case of an interrupt. + tmp, err := os.MkdirTemp("", "") + if err != nil { + return err + } + defer os.RemoveAll(tmp) + + // code generate for each module. + if err := g.buf.Generate( + ctx, + g.protoPath(), + tmp, + g.gogoTemplate(), + ); err != nil { + return err + } + + // move generated code for the app under the relative locations in its source code. + path := filepath.Join(tmp, g.goModPath) + if _, err := os.Stat(path); err == nil { + err = copy.Copy(path, g.appPath) + if err != nil { + return errors.Wrap(err, "cannot copy path") + } + } else if !os.IsNotExist(err) { + return err + } + + return nil +} diff --git a/ignite/pkg/cosmosgen/generate_openapi.go b/ignite/pkg/cosmosgen/generate_openapi.go new file mode 100644 index 0000000..c8994cf --- /dev/null +++ b/ignite/pkg/cosmosgen/generate_openapi.go @@ -0,0 +1,209 @@ +package cosmosgen + +import ( + "context" + "fmt" + "log" + "os" + "path/filepath" + "strings" + + "github.com/blang/semver/v4" + "github.com/iancoleman/strcase" + + "github.com/ignite/cli/v29/ignite/pkg/cache" + "github.com/ignite/cli/v29/ignite/pkg/cosmosbuf" + "github.com/ignite/cli/v29/ignite/pkg/dirchange" + "github.com/ignite/cli/v29/ignite/pkg/errors" + swaggercombine "github.com/ignite/cli/v29/ignite/pkg/swagger-combine" + "github.com/ignite/cli/v29/ignite/pkg/xos" +) + +const ( + specCacheNamespace = "generate.openapi.spec" + specFilename = "swagger.config.json" +) + +func (g *generator) openAPITemplate() string { + return filepath.Join(g.appPath, g.protoDir, "buf.gen.swagger.yaml") +} + +func (g *generator) generateOpenAPISpec(ctx context.Context, excludeList ...string) error { + var ( + specDirs = make([]string, 0) + conf = swaggercombine.New("HTTP API Console", g.goModPath) + ) + defer func() { + for _, dir := range specDirs { + _ = os.RemoveAll(dir) + } + }() + + specCache := cache.New[[]byte](g.cacheStorage, specCacheNamespace) + + var hasAnySpecChanged bool + + // gen generates a spec for a module where it's source code resides at src. + // and adds needed swaggercombine configure for it. + gen := func(appPath, protoDir, name string) error { + name = strcase.ToCamel(name) + protoPath := filepath.Join(appPath, protoDir) + + // check if directory exists + if _, err := os.Stat(protoPath); os.IsNotExist(err) { + var err error + protoPath, err = findInnerProtoFolder(appPath) + if err != nil { + // if proto directory does not exist, we just skip it + log.Print(err.Error()) + return nil + } + } + + dir, err := os.MkdirTemp("", "gen-openapi-module-spec") + if err != nil { + return err + } + + specDirs = append(specDirs, dir) + + var noChecksum bool + checksum, err := dirchange.ChecksumFromPaths(appPath, protoDir) + if errors.Is(err, dirchange.ErrNoFile) { + noChecksum = true + } else if err != nil { + return err + } + + cacheKey := fmt.Sprintf("%x", checksum) + if !noChecksum { + existingSpec, err := specCache.Get(cacheKey) + if err != nil && !errors.Is(err, cache.ErrorNotFound) { + return err + } + + if !errors.Is(err, cache.ErrorNotFound) { + specPath := filepath.Join(dir, specFilename) + if err := os.WriteFile(specPath, existingSpec, 0o600); err != nil { + return err + } + return conf.AddSpec(name, specPath, true) + } + } + + hasAnySpecChanged = true + if err = g.buf.Generate( + ctx, + protoPath, + dir, + g.openAPITemplate(), + cosmosbuf.ExcludeFiles( + append(excludeList, []string{ + "*/strangelove_ventures/poa/*", + "*/osmosis-labs/fee-abstraction/*", + "*/module.proto", + "*/testutil/*", + "*/testdata/*", + "*/cosmos/orm/*", + "*/cosmos/reflection/*", + "*/cosmos/app/v1alpha1/*", + "*/cosmos/tx/config/v1/config.proto", + "*/cosmos/msg/textual/v1/textual.proto", + "*/cosmos/vesting/v1beta1/vesting.proto", + }...)..., + ), + cosmosbuf.FileByFile(), + ); err != nil { + return errors.Wrapf(err, "failed to generate openapi spec %s, probably you need to exclude some proto files", protoPath) + } + + specs, err := xos.FindFiles(dir, xos.WithExtension(xos.JSONFile)) + if err != nil { + return err + } + + for _, spec := range specs { + f, err := os.ReadFile(spec) + if err != nil { + return err + } + + // if no checksum, the cacheKey is wrong, so we do not save it + if !noChecksum { + if err := specCache.Put(cacheKey, f); err != nil { + return err + } + } + + if err := conf.AddSpec(name, spec, true); err != nil { + return err + } + } + + return nil + } + + // generate specs for each module and persist them in the file system + // after add their path and config to swaggercombine.Config so we can combine them + // into a single spec. + + // protoc openapi generator acts weird on concurrent run, so do not use goroutines here. + if err := gen(g.appPath, g.protoDir, g.goModPath); err != nil { + return err + } + + doneMods := make(map[string]struct{}) + for _, modules := range g.thirdModules { + for _, m := range modules { + path := extractRootModulePath(m.Pkg.Path) + + if _, ok := doneMods[path]; ok { + continue + } + doneMods[path] = struct{}{} + + if err := gen(path, "proto", m.Name); err != nil { + return err + } + } + } + + out := g.opts.openAPISpecOut + + if !hasAnySpecChanged { + // In case the generated output has been changed + changed, err := dirchange.HasDirChecksumChanged(specCache, out, g.appPath, out) + if err != nil { + return err + } + + if !changed { + return nil + } + } + + // combine specs into one and save to out. + if err := conf.Combine(out); err != nil { + return err + } + + return dirchange.SaveDirChecksum(specCache, out, g.appPath, out) +} + +func extractRootModulePath(fullPath string) string { + var ( + segments = strings.Split(fullPath, "/") + modulePath = "/" + ) + + for _, segment := range segments { + modulePath = filepath.Join(modulePath, segment) + segmentName := strings.Split(segment, "@") + if len(segmentName) > 1 { + if _, err := semver.ParseTolerant(segmentName[1]); err == nil { + return modulePath + } + } + } + return fullPath +} diff --git a/ignite/pkg/cosmosgen/generate_openapi_test.go b/ignite/pkg/cosmosgen/generate_openapi_test.go new file mode 100644 index 0000000..cc41de2 --- /dev/null +++ b/ignite/pkg/cosmosgen/generate_openapi_test.go @@ -0,0 +1,111 @@ +package cosmosgen + +import ( + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/cache" + "github.com/ignite/cli/v29/ignite/pkg/cosmosanalysis/module" + "github.com/ignite/cli/v29/ignite/pkg/cosmosbuf" + "github.com/ignite/cli/v29/ignite/pkg/dirchange" + "github.com/ignite/cli/v29/ignite/pkg/env" + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +func Test_extractRootModulePath(t *testing.T) { + tests := []struct { + name string + path string + want string + }{ + { + name: "test cosmos-sdk path", + path: "/Users/ignite/Desktop/go/pkg/mod/github.com/cosmos/cosmos-sdk@v0.50.6/proto/cosmos/distribution/v1beta1", + want: "/Users/ignite/Desktop/go/pkg/mod/github.com/cosmos/cosmos-sdk@v0.50.6", + }, + { + name: "test cosmos-sdk module proto path", + path: "/Users/ignite/Desktop/go/pkg/mod/github.com/cosmos/cosmos-sdk@v0.50.6/x/bank", + want: "/Users/ignite/Desktop/go/pkg/mod/github.com/cosmos/cosmos-sdk@v0.50.6", + }, + { + name: "test ibc path", + path: "/Users/ignite/Desktop/go/pkg/mod/github.com/cosmos/ibc-go/v8@v8.2.0/proto/ibc/applications/interchain_accounts/controller/v1", + want: "/Users/ignite/Desktop/go/pkg/mod/github.com/cosmos/ibc-go/v8@v8.2.0", + }, + { + name: "test chain path", + path: "/Users/ignite/Desktop/go/src/github.com/ignite/venus", + want: "/Users/ignite/Desktop/go/src/github.com/ignite/venus", + }, + { + name: "test module path without version", + path: "/Users/ignite/Desktop/go/pkg/mod/github.com/grpc-ecosystem/grpc-gateway/proto/applications", + want: "/Users/ignite/Desktop/go/pkg/mod/github.com/grpc-ecosystem/grpc-gateway/proto/applications", + }, + { + name: "test module path with broken version", + path: "/Users/ignite/Desktop/go/pkg/mod/github.com/grpc-ecosystem/grpc-gateway@v1.$/controller", + want: "/Users/ignite/Desktop/go/pkg/mod/github.com/grpc-ecosystem/grpc-gateway@v1.$/controller", + }, + { + name: "test module path with v2 version", + path: "/Users/ignite/Desktop/go/pkg/mod/github.com/grpc-ecosystem/grpc-gateway/v2@v2.19.1/proto/files", + want: "/Users/ignite/Desktop/go/pkg/mod/github.com/grpc-ecosystem/grpc-gateway/v2@v2.19.1", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := extractRootModulePath(tt.path) + require.Equal(t, tt.want, got) + }) + } +} + +func TestGenerateOpenAPI(t *testing.T) { + r := require.New(t) + t.Setenv(env.ConfigDirEnvVar, t.TempDir()) + testdataDir := "testdata" + appDir := filepath.Join(testdataDir, "testchain") + openAPIFile := filepath.Join(appDir, "docs", "static", "openapi.json") + + cacheStorage, err := cache.NewStorage(filepath.Join(t.TempDir(), "cache.db")) + r.NoError(err) + + buf, err := cosmosbuf.New(cacheStorage, t.Name()) + r.NoError(err) + + // Use module discovery to collect test module proto. + m, err := module.Discover(t.Context(), appDir, appDir, module.WithProtoDir("proto")) + r.NoError(err, "failed to discover module") + r.Len(m, 1, "expected exactly one module to be discovered") + + g := &generator{ + appPath: appDir, + protoDir: "proto", + goModPath: "go.mod", + cacheStorage: cacheStorage, + buf: buf, + appModules: m, + opts: &generateOptions{ + openAPISpecOut: openAPIFile, + }, + } + + err = g.generateOpenAPISpec(t.Context()) + if err != nil && !errors.Is(err, dirchange.ErrNoFile) { + r.NoError(err, "failed to generate OpenAPI spec") + } + + // compare generated OpenAPI spec with golden files + goldenFile := filepath.Join(testdataDir, "expected_files", "openapi", "openapi.json") + gold, err := os.ReadFile(goldenFile) + r.NoError(err, "failed to read golden file: %s", goldenFile) + + gotBytes, err := os.ReadFile(openAPIFile) + r.NoError(err, "failed to read generated file: %s", openAPIFile) + r.Equal(string(gold), string(gotBytes), "generated OpenAPI spec does not match golden file") +} diff --git a/ignite/pkg/cosmosgen/generate_test.go b/ignite/pkg/cosmosgen/generate_test.go new file mode 100644 index 0000000..20c73f0 --- /dev/null +++ b/ignite/pkg/cosmosgen/generate_test.go @@ -0,0 +1,112 @@ +package cosmosgen + +import ( + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestFindInnerProtoFolder(t *testing.T) { + tmpDir, err := os.MkdirTemp("", "proto-test") + require.NoError(t, err) + defer os.RemoveAll(tmpDir) + + // create dummy files + create := func(path string) { + dir := filepath.Dir(path) + err := os.MkdirAll(dir, 0o755) + require.NoError(t, err) + _, err = os.Create(path) + require.NoError(t, err) + } + + tests := []struct { + name string + setup func(root string) + expectedPath string + expectError bool + }{ + { + name: "no proto files", + setup: func(root string) { + // No files created + }, + expectError: true, + }, + { + name: "single proto file in root", + setup: func(root string) { + create(filepath.Join(root, "a.proto")) + }, + expectedPath: ".", + }, + { + name: "single proto file in proto dir", + setup: func(root string) { + create(filepath.Join(root, "proto", "a.proto")) + }, + expectedPath: "proto", + }, + { + name: "multiple proto files in same proto dir", + setup: func(root string) { + create(filepath.Join(root, "proto", "a.proto")) + create(filepath.Join(root, "proto", "b.proto")) + }, + expectedPath: "proto", + }, + { + name: "nested proto directories", + setup: func(root string) { + create(filepath.Join(root, "proto", "a.proto")) + create(filepath.Join(root, "proto", "api", "v1", "b.proto")) + }, + expectedPath: "proto", + }, + { + name: "highest level proto directory", + setup: func(root string) { + create(filepath.Join(root, "proto", "a.proto")) + create(filepath.Join(root, "foo", "proto", "b.proto")) + }, + expectedPath: "proto", + }, + { + name: "no dir named proto", + setup: func(root string) { + create(filepath.Join(root, "api", "a.proto")) + }, + expectedPath: "api", + }, + { + name: "deeply nested with no proto dir name", + setup: func(root string) { + create(filepath.Join(root, "foo", "bar", "a.proto")) + }, + expectedPath: "foo/bar", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + caseRoot := filepath.Join(tmpDir, tt.name) + err := os.MkdirAll(caseRoot, 0o755) + require.NoError(t, err) + + tt.setup(caseRoot) + + result, err := findInnerProtoFolder(caseRoot) + + if tt.expectError { + require.Error(t, err) + return + } + require.NoError(t, err) + + expected := filepath.Join(caseRoot, tt.expectedPath) + require.Equal(t, expected, result) + }) + } +} diff --git a/ignite/pkg/cosmosgen/generate_typescript.go b/ignite/pkg/cosmosgen/generate_typescript.go new file mode 100644 index 0000000..870453b --- /dev/null +++ b/ignite/pkg/cosmosgen/generate_typescript.go @@ -0,0 +1,275 @@ +package cosmosgen + +import ( + "context" + "log" + "os" + "os/exec" + "path/filepath" + "sort" + "strings" + + "golang.org/x/sync/errgroup" + + "github.com/ignite/cli/v29/ignite/internal/buf" + "github.com/ignite/cli/v29/ignite/pkg/cache" + "github.com/ignite/cli/v29/ignite/pkg/cosmosanalysis/module" + "github.com/ignite/cli/v29/ignite/pkg/cosmosbuf" + "github.com/ignite/cli/v29/ignite/pkg/dirchange" + "github.com/ignite/cli/v29/ignite/pkg/gomodulepath" +) + +var ( + bufTokenEnvName = "BUF_TOKEN" + + dirchangeCacheNamespace = "generate.typescript.dirchange" + + protocGenTSProtoBin = "protoc-gen-ts_proto" + + msgBufAuth = "Note: Buf is limits remote plugin requests from unauthenticated users on 'buf.build'. Intensively using this function will get you rate limited. Authenticate with 'buf registry login' to avoid this (https://buf.build/docs/generate/auth-required)." +) + +const localTSProtoTmpl = `version: v1 +plugins: + - plugin: ts_proto + out: . + opt: + - logtostderr=true + - allow_merge=true + - json_names_for_fields=false + - ts_proto_opt=snakeToCamel=true + - ts_proto_opt=esModuleInterop=true + - ts_proto_out=. +` + +type tsGenerator struct { + g *generator + tsTemplateFile string + isLocalProto bool + + // hasLocalBufToken indicates whether the user had already a local Buf token. + hasLocalBufToken bool +} + +type generatePayload struct { + Modules []module.Module + PackageNS string +} + +func newTSGenerator(g *generator) *tsGenerator { + tsg := &tsGenerator{g: g} + if _, err := exec.LookPath(protocGenTSProtoBin); err == nil { + tsg.isLocalProto = true + } + + if !tsg.isLocalProto { + if os.Getenv(bufTokenEnvName) == "" { + token, err := buf.FetchToken() + if err != nil { + log.Printf("No '%s' binary found in PATH, using remote buf plugin for Typescript generation. %s\n", protocGenTSProtoBin, msgBufAuth) + } else { + os.Setenv(bufTokenEnvName, token) + } + } else { + tsg.hasLocalBufToken = true + } + } + + return tsg +} + +func (g *tsGenerator) tsTemplate() (string, error) { + if !g.isLocalProto { + return g.g.tsTemplate(), nil + } + if g.tsTemplateFile != "" { + return g.tsTemplateFile, nil + } + f, err := os.CreateTemp("", "buf-gen-ts-*.yaml") + if err != nil { + return "", err + } + defer f.Close() + if _, err := f.WriteString(localTSProtoTmpl); err != nil { + return "", err + } + g.tsTemplateFile = f.Name() + return g.tsTemplateFile, nil +} + +func (g *tsGenerator) cleanup() { + if g.tsTemplateFile != "" { + os.Remove(g.tsTemplateFile) + } + + // unset ignite buf token from env + if !g.hasLocalBufToken { + os.Unsetenv(bufTokenEnvName) + } +} + +func (g *generator) tsTemplate() string { + return filepath.Join(g.appPath, g.protoDir, "buf.gen.ts.yaml") +} + +func (g *generator) generateTS(ctx context.Context) error { + chainPath, _, err := gomodulepath.Find(g.appPath) + if err != nil { + return err + } + + appModulePath := gomodulepath.ExtractAppPath(chainPath.RawPath) + data := generatePayload{ + Modules: g.appModules, + PackageNS: strings.ReplaceAll(appModulePath, "/", "-"), + } + + // Make sure the modules are always sorted to keep the import + // and module registration order consistent so the generated + // files are not changed. + sort.SliceStable(data.Modules, func(i, j int) bool { + return data.Modules[i].Pkg.Name < data.Modules[j].Pkg.Name + }) + + tsg := newTSGenerator(g) + defer tsg.cleanup() + if err := tsg.generateModuleTemplates(ctx); err != nil { + return err + } + + // add third party modules to for the root template. + for _, modules := range g.thirdModules { + data.Modules = append(data.Modules, modules...) + } + + return tsg.generateRootTemplates(data) +} + +func (g *tsGenerator) generateModuleTemplates(ctx context.Context) error { + dirCache := cache.New[[]byte](g.g.cacheStorage, dirchangeCacheNamespace) + add := func(sourcePath string, m module.Module) error { + cacheKey := m.Pkg.Path + paths := []string{m.Pkg.Path, g.g.opts.jsOut(m)} + + // Always generate module templates by default unless cache is enabled, in which + // case the module template is generated when one or more files were changed in + // the module since the last generation. + if g.g.opts.useCache { + changed, err := dirchange.HasDirChecksumChanged(dirCache, cacheKey, sourcePath, paths...) + if err != nil { + return err + } + + if !changed { + return nil + } + } + + if err := g.generateModuleTemplate(ctx, sourcePath, m); err != nil { + return err + } + + return dirchange.SaveDirChecksum(dirCache, cacheKey, sourcePath, paths...) + } + + gg := &errgroup.Group{} + for _, m := range g.g.appModules { + gg.Go(func() error { + return add(g.g.appPath, m) + }) + } + + // Always generate third party modules; This is required because not generating them might + // lead to issues with the module registration in the root template. The root template must + // always be generated with 3rd party modules which means that if a new 3rd party module + // is available and not generated it would lead to the registration of a new not generated + // 3rd party module. + for sourcePath, modules := range g.g.thirdModules { + for _, m := range modules { + gg.Go(func() error { + return add(sourcePath, m) + }) + } + } + + return gg.Wait() +} + +func (g *tsGenerator) generateModuleTemplate( + ctx context.Context, + appPath string, + m module.Module, +) error { + var ( + out = g.g.opts.jsOut(m) + typesOut = filepath.Join(out, "types") + ) + + if err := os.MkdirAll(typesOut, 0o766); err != nil { + return err + } + if err := generateRouteNameFile(typesOut); err != nil { + return err + } + + // All "cosmossdk.io" module packages must use SDK's + // proto path which is where the proto files are stored. + protoPath := filepath.Join(appPath, g.g.protoDir) // use module app path + + if module.IsCosmosSDKPackage(appPath) { + protoPath = filepath.Join(g.g.sdkDir, "proto") + } + + // check if directory exists + if _, err := os.Stat(protoPath); os.IsNotExist(err) { + var err error + protoPath, err = findInnerProtoFolder(appPath) + if err != nil { + // if proto directory does not exist, we just skip it + log.Print(err.Error()) + return nil + } + } + + tsTemplate, err := g.tsTemplate() + if err != nil { + return err + } + + // code generate for each module. + if err := g.g.buf.Generate( + ctx, + protoPath, + typesOut, + tsTemplate, + cosmosbuf.IncludeWKT(), + cosmosbuf.WithModuleName(m.Pkg.Name), + ); err != nil { + return err + } + + // Generate the module template + if err := templateTSClientModule.Write(out, protoPath, struct { + Module module.Module + }{ + Module: m, + }); err != nil { + return err + } + + // Generate the rest API template (using axios) + return templateTSClientRest.Write(out, protoPath, struct { + module.Module + }{ + Module: m, + }) +} + +func (g *tsGenerator) generateRootTemplates(p generatePayload) error { + outDir := g.g.opts.tsClientRootPath + if err := os.MkdirAll(outDir, 0o766); err != nil { + return err + } + + return templateTSClientRoot.Write(outDir, "", p) +} diff --git a/ignite/pkg/cosmosgen/generate_typescript_test.go b/ignite/pkg/cosmosgen/generate_typescript_test.go new file mode 100644 index 0000000..b2fd4da --- /dev/null +++ b/ignite/pkg/cosmosgen/generate_typescript_test.go @@ -0,0 +1,83 @@ +package cosmosgen + +import ( + "fmt" + "os" + "os/exec" + "path/filepath" + "strings" + "testing" + + "github.com/ettle/strcase" + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/cache" + "github.com/ignite/cli/v29/ignite/pkg/cosmosanalysis/module" + "github.com/ignite/cli/v29/ignite/pkg/cosmosbuf" + "github.com/ignite/cli/v29/ignite/pkg/env" +) + +func TestGenerateTypeScript(t *testing.T) { + require := require.New(t) + t.Setenv(env.ConfigDirEnvVar, t.TempDir()) + if _, err := exec.LookPath(protocGenTSProtoBin); err != nil { + t.Skipf("%s not found in PATH", protocGenTSProtoBin) + } + testdataDir := "testdata" + appDir := filepath.Join(testdataDir, "testchain") + tsClientDir := filepath.Join(appDir, "ts-client") + + cacheStorage, err := cache.NewStorage(filepath.Join(t.TempDir(), "cache.db")) + require.NoError(err) + + buf, err := cosmosbuf.New(cacheStorage, t.Name()) + require.NoError(err) + + // Use module discovery to collect test module proto. + m, err := module.Discover(t.Context(), appDir, appDir, module.WithProtoDir("proto")) + require.NoError(err, "failed to discover module") + require.Len(m, 1, "expected exactly one module to be discovered") + + g := newTSGenerator(&generator{ + appPath: appDir, + protoDir: "proto", + goModPath: "go.mod", + cacheStorage: cacheStorage, + buf: buf, + appModules: m, + opts: &generateOptions{ + tsClientRootPath: tsClientDir, + useCache: false, + jsOut: func(m module.Module) string { + return filepath.Join(tsClientDir, fmt.Sprintf("%s.%s.%s", "ignite", "planet", strcase.ToKebab(m.Name))) + }, + }, + }) + + err = g.generateModuleTemplate(t.Context(), appDir, m[0]) + require.NoError(err, "failed to generate TypeScript files") + + err = g.generateRootTemplates(generatePayload{ + Modules: m, + PackageNS: strings.ReplaceAll(appDir, "/", "-"), + }) + require.NoError(err) + + // compare all generated files to golden files + goldenDir := filepath.Join(testdataDir, "expected_files", "ts-client") + _ = filepath.Walk(goldenDir, func(path string, info os.FileInfo, err error) error { + if err != nil || info.IsDir() { + return err + } + rel, _ := filepath.Rel(goldenDir, path) + got := filepath.Join(tsClientDir, rel) + gold, err := os.ReadFile(path) + require.NoError(err, "failed to read golden file: %s", path) + + gotBytes, err := os.ReadFile(got) + require.NoError(err, "failed to read generated file: %s", got) + require.Equal(string(gold), string(gotBytes), "file %s does not match golden file", rel) + + return nil + }) +} diff --git a/ignite/pkg/cosmosgen/install.go b/ignite/pkg/cosmosgen/install.go new file mode 100644 index 0000000..87a4a15 --- /dev/null +++ b/ignite/pkg/cosmosgen/install.go @@ -0,0 +1,68 @@ +package cosmosgen + +import ( + "golang.org/x/mod/modfile" +) + +// DepTools necessary tools to build and run the chain. +func DepTools() []string { + return []string{ + // buf build code generation. + "github.com/bufbuild/buf/cmd/buf", + "github.com/cosmos/gogoproto/protoc-gen-gocosmos", + "github.com/cosmos/gogoproto/protoc-gen-gogo", + "github.com/cosmos/cosmos-proto/cmd/protoc-gen-go-pulsar", + + // Go code generation plugin. + "google.golang.org/grpc/cmd/protoc-gen-go-grpc", + "google.golang.org/protobuf/cmd/protoc-gen-go", + + // grpc-gateway plugins. + "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway", + "github.com/grpc-ecosystem/grpc-gateway/v2/protoc-gen-openapiv2", + + // code style + "golang.org/x/tools/cmd/goimports", + "github.com/golangci/golangci-lint/cmd/golangci-lint", + } +} + +// MissingTools find missing tools imports from a given go.mod. +func MissingTools(f *modfile.File) (missingTools []string) { + imports := make(map[string]struct{}) + for _, imp := range f.Tool { + imports[imp.Path] = struct{}{} + } + + for _, tool := range DepTools() { + if _, ok := imports[tool]; !ok { + missingTools = append(missingTools, tool) + } + } + + return missingTools +} + +// UnusedTools find unused tools imports from a given go.mod. +func UnusedTools(f *modfile.File) (unusedTools []string) { + unused := []string{ + // regen protoc plugin + "github.com/regen-network/cosmos-proto/protoc-gen-gocosmos", + + // old ignite repo. + "github.com/ignite-hq/cli/ignite/pkg/cmdrunner", + "github.com/ignite-hq/cli/ignite/pkg/cmdrunner/step", + } + + imports := make(map[string]struct{}) + for _, imp := range f.Tool { + imports[imp.Path] = struct{}{} + } + + for _, tool := range unused { + if _, ok := imports[tool]; ok { + unusedTools = append(unusedTools, tool) + } + } + return +} diff --git a/ignite/pkg/cosmosgen/install_test.go b/ignite/pkg/cosmosgen/install_test.go new file mode 100644 index 0000000..df39fee --- /dev/null +++ b/ignite/pkg/cosmosgen/install_test.go @@ -0,0 +1,110 @@ +package cosmosgen_test + +import ( + "testing" + + "github.com/stretchr/testify/require" + "golang.org/x/mod/modfile" + + "github.com/ignite/cli/v29/ignite/pkg/cosmosgen" +) + +func TestMissingTools(t *testing.T) { + var ( + tools = cosmosgen.DepTools() + someTools = tools[:2] + missingTools = tools[2:] + ) + tests := []struct { + name string + modFile *modfile.File + want []string + }{ + { + name: "no missing tools", + modFile: createModFileWithTools(t, tools...), + want: nil, + }, + { + name: "some missing tools", + modFile: createModFileWithTools(t, someTools...), + want: missingTools, + }, + { + name: "all tools missing", + modFile: createModFileWithTools(t), + want: tools, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := cosmosgen.MissingTools(tt.modFile) + require.EqualValues(t, tt.want, got) + }) + } +} + +func TestUnusedTools(t *testing.T) { + tests := []struct { + name string + modFile *modfile.File + want []string + }{ + { + name: "all unused tools", + modFile: createModFileWithTools(t, + "github.com/regen-network/cosmos-proto/protoc-gen-gocosmos", + "github.com/ignite-hq/cli/ignite/pkg/cmdrunner", + "github.com/ignite-hq/cli/ignite/pkg/cmdrunner/step", + ), + want: []string{ + "github.com/regen-network/cosmos-proto/protoc-gen-gocosmos", + "github.com/ignite-hq/cli/ignite/pkg/cmdrunner", + "github.com/ignite-hq/cli/ignite/pkg/cmdrunner/step", + }, + }, + { + name: "some unused tools", + modFile: createModFileWithTools(t, + "github.com/ignite-hq/cli/ignite/pkg/cmdrunner", + ), + want: []string{"github.com/ignite-hq/cli/ignite/pkg/cmdrunner"}, + }, + { + name: "no tools unused", + modFile: createModFileWithTools(t, ""), + want: nil, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := cosmosgen.UnusedTools(tt.modFile) + require.EqualValues(t, tt.want, got) + }) + } +} + +// createModFileWithTools helper function to create a modfile.File with given tool paths. +// This simulates the Tool entries in a go.mod file. +func createModFileWithTools(t *testing.T, toolPaths ...string) *modfile.File { + // create a minimal go.mod content + content := "module test\n\ngo 1.24\n\n" + + // parse the basic module + f, err := modfile.Parse("go.mod", []byte(content), nil) + if err != nil { + t.Logf("failed to parse test go.mod content: %v", err) + t.FailNow() + } + + // add the tools + for _, path := range toolPaths { + if err := f.AddTool(path); err != nil { + t.Logf("failed to add tool %s to go.mod: %v", path, err) + t.FailNow() + } + } + + return f +} diff --git a/ignite/pkg/cosmosgen/sta.go b/ignite/pkg/cosmosgen/sta.go new file mode 100644 index 0000000..50421d2 --- /dev/null +++ b/ignite/pkg/cosmosgen/sta.go @@ -0,0 +1,58 @@ +package cosmosgen + +import ( + "os" + "path/filepath" +) + +const routeNameTemplate = `<% +const { routeInfo, utils } = it; +const { + operationId, + method, + route, + moduleName, + responsesTypes, + description, + tags, + summary, + pathArgs, +} = routeInfo; +const { _, fmtToJSDocLine, require } = utils; + +const methodAliases = { + get: (pathName, hasPathInserts) => + _.camelCase(` + "`" + `${pathName}_${hasPathInserts ? "detail" : "list"}` + "`" + `), + post: (pathName, hasPathInserts) => _.camelCase(` + "`" + `${pathName}_create` + "`" + `), + put: (pathName, hasPathInserts) => _.camelCase(` + "`" + `${pathName}_update` + "`" + `), + patch: (pathName, hasPathInserts) => _.camelCase(` + "`" + `${pathName}_partial_update` + "`" + `), + delete: (pathName, hasPathInserts) => _.camelCase(` + "`" + `${pathName}_delete` + "`" + `), +}; + +const createCustomOperationId = (method, route, moduleName) => { + const hasPathInserts = /\{(\w){1,}\}/g.test(route); + const splitedRouteBySlash = _.compact(_.replace(route, /\{(\w){1,}\}/g, "").split("/")); + const routeParts = (splitedRouteBySlash.length > 1 + ? splitedRouteBySlash.splice(1) + : splitedRouteBySlash + ).join("_"); + return routeParts.length > 3 && methodAliases[method] + ? methodAliases[method](routeParts, hasPathInserts) + : _.camelCase(_.lowerCase(method) + "_" + [moduleName].join("_")) || "index"; +}; + +if (operationId) { + let routeName = operationId.replace('_',''); + return routeName[0].toLowerCase() + routeName.slice(1); +} +if (route === "/") + return _.camelCase(` + "`" + `${_.lowerCase(method)}Root` + "`" + `); + +return createCustomOperationId(method, route, moduleName); +%>` + +// generateRouteNameFile generates the `route-name.eta` file. +func generateRouteNameFile(outPath string) error { + outTemplate := filepath.Join(outPath, "route-name.eta") + return os.WriteFile(outTemplate, []byte(routeNameTemplate), 0o600) +} diff --git a/ignite/pkg/cosmosgen/template.go b/ignite/pkg/cosmosgen/template.go new file mode 100644 index 0000000..c471205 --- /dev/null +++ b/ignite/pkg/cosmosgen/template.go @@ -0,0 +1,134 @@ +package cosmosgen + +import ( + "embed" + "os" + "path/filepath" + "strings" + "text/template" + + "github.com/iancoleman/strcase" + "golang.org/x/text/cases" + "golang.org/x/text/language" + + "github.com/ignite/cli/v29/ignite/pkg/xstrcase" +) + +var ( + //go:embed templates/* + templates embed.FS + + templateTSClientRoot = newTemplateWriter("root") + templateTSClientModule = newTemplateWriter("module") + templateTSClientRest = newTemplateWriter("rest") + templateTSClientComposable = newTemplateWriter("composable") + templateTSClientComposableRoot = newTemplateWriter("composable-root") +) + +type templateWriter struct { + templateDir string +} + +// newTemplateWriter returns a func for template residing at templatePath to initialize a text template +// with given protoPath. +func newTemplateWriter(templateDir string) templateWriter { + return templateWriter{ + templateDir, + } +} + +func (t templateWriter) Write(destDir, protoPath string, data interface{}) error { + base := filepath.Join("templates", t.templateDir) + + // find out templates inside the dir. + files, err := templates.ReadDir(base) + if err != nil { + return err + } + + var paths []string + for _, file := range files { + paths = append(paths, filepath.Join(base, file.Name())) + } + + funcs := template.FuncMap{ + "camelCase": strcase.ToLowerCamel, + "capitalCase": func(word string) string { + replacer := strings.NewReplacer("-", "_", ".", "_") + word = xstrcase.UpperCamel(replacer.Replace(word)) + + return cases.Title(language.English).String(word) + }, + "camelCaseLowerSta": func(word string) string { + replacer := strings.NewReplacer("-", "_", ".", "_") + + return strcase.ToLowerCamel(replacer.Replace(word)) + }, + "camelCaseUpperSta": func(word string) string { + replacer := strings.NewReplacer("-", "_", ".", "_") + + return xstrcase.UpperCamel(replacer.Replace(word)) + }, + "resolveFile": func(fullPath string) string { + _ = protoPath // eventually, we should use the proto folder name of this, for the application (but not for the other modules) + + res := strings.Split(fullPath, "proto/") + rel := res[len(res)-1] // get path after proto/ + rel = strings.TrimSuffix(rel, ".proto") + + return "./types/" + rel + }, + "transformPath": func(path string) string { + // transformPath converts a endpoint path to a valid JS substring path. + // e.g. /cosmos/bank/v1beta1/spendable_balances/{address}/by_denom -> /cosmos/bank/v1beta1/spendable_balances/${address}/by_denom + path = strings.ReplaceAll(path, "{", "${") + path = strings.ReplaceAll(path, "=**}", "}") + return path + }, + "transformParamsToUnion": func(params []string) string { + if len(params) == 0 { + return `""` + } + + var quotedParams []string + for _, param := range params { + quotedParams = append(quotedParams, `"`+param+`"`) + } + + return strings.Join(quotedParams, " | ") + }, + "inc": func(i int) int { + return i + 1 + }, + "replace": strings.ReplaceAll, + } + + // render and write the template. + write := func(path string) error { + tpl := template. + Must( + template. + New(filepath.Base(path)). + Funcs(funcs). + ParseFS(templates, paths...), + ) + + out := filepath.Join(destDir, strings.TrimSuffix(filepath.Base(path), ".tpl")) + + f, err := os.OpenFile(out, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0o766) + if err != nil { + return err + } + defer f.Close() + + return tpl.Execute(f, data) + } + + for _, path := range paths { + if err := write(path); err != nil { + return err + } + } + + return nil +} diff --git a/ignite/pkg/cosmosgen/templates/composable-root/useClient.ts.tpl b/ignite/pkg/cosmosgen/templates/composable-root/useClient.ts.tpl new file mode 100644 index 0000000..463e07d --- /dev/null +++ b/ignite/pkg/cosmosgen/templates/composable-root/useClient.ts.tpl @@ -0,0 +1,15 @@ +import { Client } from '{{ .PackageNS }}-client-ts' +import { env } from '../env'; + +const useClientInstance = () => { + const client = new Client(env); + return client; +}; +let clientInstance: ReturnType<typeof useClientInstance>; + +export const useClient = () => { + if (!clientInstance) { + clientInstance = useClientInstance(); + } + return clientInstance; +}; \ No newline at end of file diff --git a/ignite/pkg/cosmosgen/templates/composable/index.ts.tpl b/ignite/pkg/cosmosgen/templates/composable/index.ts.tpl new file mode 100644 index 0000000..3f27379 --- /dev/null +++ b/ignite/pkg/cosmosgen/templates/composable/index.ts.tpl @@ -0,0 +1,129 @@ +/* eslint-disable @typescript-eslint/no-unused-vars */ +import { useQuery, type UseQueryOptions, useInfiniteQuery, type UseInfiniteQueryOptions, type InfiniteData } from "@tanstack/vue-query"; +import { useClient } from '../useClient'; + +export default function use{{ camelCaseUpperSta $.Module.Pkg.Name }}() { + const client = useClient(); + + {{- range .Module.HTTPQueries -}} + {{- $FullName := .FullName -}} + {{- $Name := .Name -}} + {{- if .Paginated -}} + {{- range $i,$rule := .Rules -}} + {{- $n := "" -}} + {{- if (gt $i 0) -}} + {{- $n = inc $i -}} + {{- end }} + type {{ $FullName }}{{ $n }}Method = typeof client.{{ camelCaseUpperSta $.Module.Pkg.Name }}.query.{{ camelCase $FullName -}}; + type {{ $FullName }}{{ $n }}Data = Awaited<ReturnType<{{ $FullName }}{{ $n }}Method>>["data"] & { pageParam: number }; + const {{ $FullName }}{{ $n }} = ( + {{- if $rule.Params -}} + {{- range $j,$a :=$rule.Params -}} + {{- if (gt $j 0) -}}, {{ end -}} + {{- $a -}}: string + {{- end -}} + , {{ end -}} + {{- if $rule.HasQuery -}} + query: NonNullable<Parameters<{{ $FullName }}{{ $n }}Method>[{{- len $rule.Params -}}]>, + {{- end }} options: Partial<UseInfiniteQueryOptions<{{ $FullName }}{{ $n }}Data, unknown, InfiniteData<{{ $FullName }}{{ $n }}Data,number>, Array<string | unknown>, number>> , perPage: number) => { + const key = { type: '{{ $FullName }}{{ $n }}', {{ range $j,$a :=$rule.Params -}} + {{- if (gt $j 0) -}}, {{ end }} {{ $a -}} + {{- end -}} {{- if $rule.HasQuery -}} + {{- if $rule.Params -}}, {{ end -}} + query + {{- end }} }; + return useInfiniteQuery<{{ $FullName }}{{ $n }}Data, unknown, InfiniteData<{{ $FullName }}{{ $n }}Data,number>, Array<string | unknown>, number>({ queryKey: [key], queryFn: async (context: {pageParam?: number}) => { + const { pageParam=1 } = context; + {{- if or $rule.HasQuery $rule.Params}} + const { {{- if $rule.Params -}}{{- range $j,$a :=$rule.Params -}} + {{- if (gt $j 0) -}}, {{ end }} {{ $a -}} + {{- end -}}{{- if $rule.HasQuery -}}, {{- end -}}{{- end -}}{{- if $rule.HasQuery -}}query{{- end }} } = key{{ end }} + + query['pagination.limit']=perPage; + query['pagination.offset']= (pageParam-1)*perPage; + query['pagination.count_total']= true; + const res = await client.{{ camelCaseUpperSta $.Module.Pkg.Name }}.query.{{ camelCase $FullName -}} + {{- $n -}}({{- range $j,$a :=$rule.Params -}} + {{- if (gt $j 0) -}}, {{ end }} {{- $a -}} + {{- end -}} + {{- if $rule.HasQuery -}} + {{- if $rule.Params -}}, {{ end -}} + query ?? undefined + {{- end -}} + {{- if $rule.HasBody -}} + {{- if or $rule.HasQuery $rule.Params}},{{ end -}} + {...key} + {{- end -}} + ); + return { ...res.data, pageParam }; + }, ...options, + initialPageParam: 1, + getNextPageParam: (lastPage, allPages) => { if ((lastPage.pagination?.total ?? 0) >((lastPage.pageParam ?? 0) * perPage)) {return lastPage.pageParam+1 } else {return undefined}}, + getPreviousPageParam: (firstPage, allPages) => { if (firstPage.pageParam==1) { return undefined } else { return firstPage.pageParam-1}} + } + ); + } + {{ end -}} + {{- else -}} + {{- range $i,$rule := .Rules -}} + {{- $n := "" -}} + {{- if (gt $i 0) -}} + {{- $n = inc $i -}} + {{- end }} + + type {{ $FullName }}{{ $n }}Method = typeof client.{{ camelCaseUpperSta $.Module.Pkg.Name }}.query.{{ camelCase $FullName -}}; + type {{ $FullName }}{{ $n }}Data = Awaited<ReturnType<{{ $FullName }}{{ $n }}Method>>["data"]; + const {{ $FullName }}{{ $n }} = ( + {{- if $rule.Params -}} + {{- range $j,$a :=$rule.Params -}} + {{- if (gt $j 0) -}}, {{ end -}} + {{- $a -}}: string + {{- end -}} + , {{ end -}} + {{- if $rule.HasQuery -}} + query: NonNullable<Parameters<{{ $FullName }}{{ $n }}Method>[{{- len $rule.Params -}}]>, + {{- end }} options: Partial<UseQueryOptions<{{ $FullName }}{{ $n }}Data>>) => { + const key = { type: '{{ $FullName }}{{ $n }}', {{ range $j,$a :=$rule.Params -}} + {{- if (gt $j 0) -}}, {{ end }} {{ $a -}} + {{- end -}} {{- if $rule.HasQuery -}} + {{- if $rule.Params -}}, {{ end -}} + query + {{- end }} }; + return useQuery<{{ $FullName }}{{ $n }}Data>({ queryKey: [key], queryFn: async () => { + {{- if or $rule.HasQuery $rule.Params}} + const { {{- if $rule.Params -}}{{- range $j,$a :=$rule.Params -}} + {{- if (gt $j 0) -}}, {{ end }} {{ $a -}} + {{- end -}}{{- if $rule.HasQuery -}}, {{- end -}}{{- end -}}{{- if $rule.HasQuery -}}query{{- end }} } = key{{ end }} + const res = await client.{{ camelCaseUpperSta $.Module.Pkg.Name }}.query.{{ camelCase $FullName -}} + {{- $n -}}({{- range $j,$a :=$rule.Params -}} + {{- if (gt $j 0) -}}, {{ end }} {{- $a -}} + {{- end -}} + {{- if $rule.HasQuery -}} + {{- if $rule.Params -}}, {{ end -}} + query ?? undefined + {{- end -}} + {{- if $rule.HasBody -}} + {{- if or $rule.HasQuery $rule.Params}},{{ end -}} + {...key} + {{- end -}} + ); + return res.data; + }, ...options}); + } + {{ end -}} + {{- end -}} + {{- end }} + return { + {{- range .Module.HTTPQueries -}} + {{- $FullName := .FullName -}} + {{- $Name := .Name -}} + {{- range $i,$rule := .Rules -}} + {{- $n := "" -}} + {{- if (gt $i 0) -}} + {{- $n = inc $i -}} + {{- end -}} + {{ $FullName }}{{ $n }}, + {{- end -}} + {{- end }} + } +} diff --git a/ignite/pkg/cosmosgen/templates/module/index.ts.tpl b/ignite/pkg/cosmosgen/templates/module/index.ts.tpl new file mode 100644 index 0000000..4705917 --- /dev/null +++ b/ignite/pkg/cosmosgen/templates/module/index.ts.tpl @@ -0,0 +1,6 @@ +import IgntModule from './module'; +import { txClient, queryClient, registry } from './module'; +import { msgTypes } from './registry'; + +export * from "./types"; +export { IgntModule, msgTypes, txClient, queryClient, registry }; diff --git a/ignite/pkg/cosmosgen/templates/module/module.ts.tpl b/ignite/pkg/cosmosgen/templates/module/module.ts.tpl new file mode 100644 index 0000000..3c55a31 --- /dev/null +++ b/ignite/pkg/cosmosgen/templates/module/module.ts.tpl @@ -0,0 +1,130 @@ +// Generated by Ignite ignite.com/cli + +import { SigningStargateClient, DeliverTxResponse, StdFee } from "@cosmjs/stargate"; +import { EncodeObject, GeneratedType, OfflineSigner, Registry } from "@cosmjs/proto-signing"; +import { msgTypes } from './registry'; +import { IgniteClient } from "../client" +import { MissingWalletError } from "../helpers" +import { Api } from "./rest"; +{{ range .Module.Msgs }}import { {{ .Name }} } from "{{ resolveFile .FilePath }}"; +{{ end }} +{{ range .Module.Types }}import { {{ .Name }} as type{{- .Name -}} } from "./types" +{{ end }} +export { {{ range $i,$type:=.Module.Msgs }}{{ if (gt $i 0) }}, {{ end }}{{ $type.Name }}{{ end }} }; +{{ range .Module.Msgs }} +type send{{ .Name }}Params = { + value: {{ .Name }}, + fee?: StdFee, + memo?: string +}; +{{ end }} +{{ range .Module.Msgs }} +type {{ camelCase .Name }}Params = { + value: {{ .Name }}, +}; +{{ end }} + +export const registry = new Registry(msgTypes); + +type Field = { + name: string; + type: unknown; +} +function getStructure(template) { + const structure: {fields: Field[]} = { fields: [] } + for (let [key, value] of Object.entries(template)) { + let field = { name: key, type: typeof value } + structure.fields.push(field) + } + return structure +} +const defaultFee = { + amount: [], + gas: "200000", +}; + +interface TxClientOptions { + addr: string + prefix: string + signer?: OfflineSigner +} + +export const txClient = ({ signer, prefix, addr }: TxClientOptions = { addr: "http://localhost:26657", prefix: "cosmos" }) => { + + return { + {{ range .Module.Msgs }} + async send{{ .Name }}({ value, fee, memo }: send{{ .Name }}Params): Promise<DeliverTxResponse> { + if (!signer) { + throw new Error('TxClient:send{{ .Name }}: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry}); + let msg = this.{{ camelCase .Name }}({ value: {{ .Name }}.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:send{{ .Name }}: Could not broadcast Tx: '+ e.message) + } + }, + {{ end }} + {{ range .Module.Msgs }} + {{ camelCase .Name }}({ value }: {{ camelCase .Name }}Params): EncodeObject { + try { + return { typeUrl: "/{{ .URI }}", value: {{ .Name }}.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:{{ .Name }}: Could not create message: ' + e.message) + } + }, + {{ end }} + } +}; + +interface QueryClientOptions { + addr: string +} + +export const queryClient = ({ addr: addr }: QueryClientOptions = { addr: "http://localhost:1317" }) => { + return new Api({ baseURL: addr }); +}; + +class SDKModule { + public query: ReturnType<typeof queryClient>; + public tx: ReturnType<typeof txClient>; + public structure: Record<string,unknown>; + public registry: Array<[string, GeneratedType]> = []; + + constructor(client: IgniteClient) { + + this.query = queryClient({ addr: client.env.apiURL }); + this.updateTX(client); + this.structure = { + {{ range .Module.Types }}{{ .Name }}: getStructure(type{{ .Name }}.fromPartial({})), + {{ end }} + }; + client.on('signer-changed',(signer) => { + this.updateTX(client); + }) + } + updateTX(client: IgniteClient) { + const methods = txClient({ + signer: client.signer, + addr: client.env.rpcURL, + prefix: client.env.prefix ?? "cosmos", + }) + + this.tx = methods; + for (let m in methods) { + this.tx[m] = methods[m].bind(this.tx); + } + } +}; + +const IgntModule = (test: IgniteClient) => { + return { + module: { + {{ camelCaseUpperSta .Module.Pkg.Name }}: new SDKModule(test) + }, + registry: msgTypes + } +} +export default IgntModule; \ No newline at end of file diff --git a/ignite/pkg/cosmosgen/templates/module/registry.ts.tpl b/ignite/pkg/cosmosgen/templates/module/registry.ts.tpl new file mode 100644 index 0000000..1679b57 --- /dev/null +++ b/ignite/pkg/cosmosgen/templates/module/registry.ts.tpl @@ -0,0 +1,9 @@ +import { GeneratedType } from "@cosmjs/proto-signing"; +{{ range .Module.Msgs }}import { {{ .Name }} } from "{{ resolveFile .FilePath }}"; +{{ end }} +const msgTypes: Array<[string, GeneratedType]> = [ + {{ range .Module.Msgs }}["/{{ .URI }}", {{ .Name }}], + {{ end }} +]; + +export { msgTypes } \ No newline at end of file diff --git a/ignite/pkg/cosmosgen/templates/module/types.ts.tpl b/ignite/pkg/cosmosgen/templates/module/types.ts.tpl new file mode 100644 index 0000000..c60dd2f --- /dev/null +++ b/ignite/pkg/cosmosgen/templates/module/types.ts.tpl @@ -0,0 +1,7 @@ +{{ range .Module.Types }}import { {{ .Name }} } from "{{ resolveFile .FilePath }}" +{{ end }} + +export { + {{ range .Module.Types }}{{ .Name }}, + {{ end }} + } \ No newline at end of file diff --git a/ignite/pkg/cosmosgen/templates/rest/rest.ts.tpl b/ignite/pkg/cosmosgen/templates/rest/rest.ts.tpl new file mode 100644 index 0000000..b25ff0d --- /dev/null +++ b/ignite/pkg/cosmosgen/templates/rest/rest.ts.tpl @@ -0,0 +1,221 @@ +import axios, { AxiosInstance, AxiosRequestConfig, AxiosResponse, ResponseType } from "axios"; +{{ range .HTTPQueries }}import { {{ .ResponseType }} } from "{{ resolveFile .FilePath }}"; +{{ end }} +{{ range .HTTPQueries }}import { {{ .RequestType }} } from "{{ resolveFile .FilePath }}"; +{{ end }} + +import type {SnakeCasedPropertiesDeep} from 'type-fest'; + +export type QueryParamsType = Record<string | number, any>; + +export type FlattenObject<TValue> = CollapseEntries<CreateObjectEntries<TValue, TValue>>; + +type Entry = { key: string; value: unknown }; +type EmptyEntry<TValue> = { key: ''; value: TValue }; +type ExcludedTypes = Date | Set<unknown> | Map<unknown, unknown>; +type ArrayEncoder = `[${bigint}]`; + +type EscapeArrayKey<TKey extends string> = TKey extends `${infer TKeyBefore}.${ArrayEncoder}${infer TKeyAfter}` + ? EscapeArrayKey<`${TKeyBefore}${ArrayEncoder}${TKeyAfter}`> + : TKey; + +// Transforms entries to one flattened type +type CollapseEntries<TEntry extends Entry> = { + [E in TEntry as EscapeArrayKey<E['key']>]: E['value']; +}; + +// Transforms array type to object +type CreateArrayEntry<TValue, TValueInitial> = OmitItself< + TValue extends unknown[] ? { [k: ArrayEncoder]: TValue[number] } : TValue, + TValueInitial +>; + +// Omit the type that references itself +type OmitItself<TValue, TValueInitial> = TValue extends TValueInitial + ? EmptyEntry<TValue> + : OmitExcludedTypes<TValue, TValueInitial>; + +// Omit the type that is listed in ExcludedTypes union +type OmitExcludedTypes<TValue, TValueInitial> = TValue extends ExcludedTypes + ? EmptyEntry<TValue> + : CreateObjectEntries<TValue, TValueInitial>; + +type CreateObjectEntries<TValue, TValueInitial> = TValue extends object + ? { + // Checks that Key is of type string + [TKey in keyof TValue]-?: TKey extends string + ? // Nested key can be an object, run recursively to the bottom + CreateArrayEntry<TValue[TKey], TValueInitial> extends infer TNestedValue + ? TNestedValue extends Entry + ? TNestedValue['key'] extends '' + ? { + key: TKey; + value: TNestedValue['value']; + } + : + | { + key: `${TKey}.${TNestedValue['key']}`; + value: TNestedValue['value']; + } + | { + key: TKey; + value: TValue[TKey]; + } + : never + : never + : never; + }[keyof TValue] // Builds entry for each key + : EmptyEntry<TValue>; + +export type ChangeProtoToJSPrimitives<T extends object> = { + [key in keyof T]: T[key] extends Uint8Array | Date ? string : T[key] extends object ? ChangeProtoToJSPrimitives<T[key]>: T[key]; + // ^^^^ This line is used to convert Uint8Array to string, if you want to keep Uint8Array as is, you can remove this line +} + +export interface FullRequestParams extends Omit<AxiosRequestConfig, "data" | "params" | "url" | "responseType"> { + /** set parameter to `true` for call `securityWorker` for this request */ + secure?: boolean; + /** request path */ + path: string; + /** content type of request body */ + type?: ContentType; + /** query params */ + query?: QueryParamsType; + /** format of response (i.e. response.json() -> format: "json") */ + format?: ResponseType; + /** request body */ + body?: unknown; +} + +export type RequestParams = Omit<FullRequestParams, "body" | "method" | "query" | "path">; + +export interface ApiConfig<SecurityDataType = unknown> extends Omit<AxiosRequestConfig, "data" | "cancelToken"> { + securityWorker?: ( + securityData: SecurityDataType | null, + ) => Promise<AxiosRequestConfig | void> | AxiosRequestConfig | void; + secure?: boolean; + format?: ResponseType; +} + +export enum ContentType { + Json = "application/json", + FormData = "multipart/form-data", + UrlEncoded = "application/x-www-form-urlencoded", +} + +export class HttpClient<SecurityDataType = unknown> { + public instance: AxiosInstance; + private securityData: SecurityDataType | null = null; + private securityWorker?: ApiConfig<SecurityDataType>["securityWorker"]; + private secure?: boolean; + private format?: ResponseType; + + constructor({ securityWorker, secure, format, ...axiosConfig }: ApiConfig<SecurityDataType> = {}) { + this.instance = axios.create({ ...axiosConfig, baseURL: axiosConfig.baseURL || "" }); + this.secure = secure; + this.format = format; + this.securityWorker = securityWorker; + } + + public setSecurityData = (data: SecurityDataType | null) => { + this.securityData = data; + }; + + private mergeRequestParams(params1: AxiosRequestConfig, params2?: AxiosRequestConfig): AxiosRequestConfig { + return { + ...this.instance.defaults, + ...params1, + ...(params2 || {}), + headers: { + ...(this.instance.defaults.headers ), + ...(params1.headers || {}), + ...((params2 && params2.headers) || {}), + }, + } as AxiosRequestConfig; + } + + private createFormData(input: Record<string, unknown>): FormData { + return Object.keys(input || {}).reduce((formData, key) => { + const property = input[key]; + formData.append( + key, + property instanceof Blob + ? property + : typeof property === "object" && property !== null + ? JSON.stringify(property) + : `${property}`, + ); + return formData; + }, new FormData()); + } + + public request = async <T = any>({ + secure, + path, + type, + query, + format, + body, + ...params + }: FullRequestParams): Promise<AxiosResponse<T>> => { + const secureParams = + ((typeof secure === "boolean" ? secure : this.secure) && + this.securityWorker && + (await this.securityWorker(this.securityData))) || + {}; + const requestParams = this.mergeRequestParams(params, secureParams); + const responseFormat = (format && this.format) || void 0; + + if (type === ContentType.FormData && body && body !== null && typeof body === "object") { + requestParams.headers.common = { Accept: "*/*" }; + requestParams.headers.post = {}; + requestParams.headers.put = {}; + + body = this.createFormData(body as Record<string, unknown>); + } + + return this.instance.request({ + ...requestParams, + headers: { + ...(type && type !== ContentType.FormData ? { "Content-Type": type } : {}), + ...(requestParams.headers || {}), + }, + params: query, + responseType: responseFormat, + data: body, + url: path, + }); + }; +} + +/** + * @title {{ .Pkg.Name }} + */ +export class Api<SecurityDataType extends unknown> extends HttpClient<SecurityDataType> { + {{- range .HTTPQueries }} + /** + * {{ .FullName }} + * + * @tags Query + * @name {{ camelCase .FullName }} + * @request GET:{{ (index .Rules 0).Endpoint }} + */ + {{ camelCase .FullName }} = ( + {{- if (index .Rules 0).Params }} + {{- range $i, $param := (index .Rules 0).Params }}{{ if $i }}, {{ end }}{{ $param }}: string{{- end }}, + {{- end }}{{- if gt (len (index .Rules 0).QueryFields) 0 }} + query?: Omit<FlattenObject<SnakeCasedPropertiesDeep<ChangeProtoToJSPrimitives<{{ .RequestType }}>>>,{{ transformParamsToUnion (index .Rules 0).Params }}>, +{{- else}} + query?: Record<string, any>, +{{- end}} + params: RequestParams = {}, + ) => + this.request<SnakeCasedPropertiesDeep<ChangeProtoToJSPrimitives<{{ .ResponseType }}>>>({ + path: `{{ transformPath (index .Rules 0).Endpoint }}`, + method: "GET", + query: query, + format: "json", + ...params, + }); + {{ end }} +} \ No newline at end of file diff --git a/ignite/pkg/cosmosgen/templates/root/client.ts.tpl b/ignite/pkg/cosmosgen/templates/root/client.ts.tpl new file mode 100644 index 0000000..3c30aae --- /dev/null +++ b/ignite/pkg/cosmosgen/templates/root/client.ts.tpl @@ -0,0 +1,164 @@ +/// <reference path="./types.d.ts" /> +import { + GeneratedType, + OfflineSigner, + EncodeObject, + Registry, +} from "@cosmjs/proto-signing"; +import { SigningStargateClient, StdFee } from "@cosmjs/stargate"; +import { Env } from "./env"; +import { UnionToIntersection, Return, Constructor } from "./helpers"; +import { IgntModule } from "./modules"; +import { EventEmitter } from "events"; +import { ChainInfo } from "@keplr-wallet/types"; + +const defaultFee = { + amount: [], + gas: "200000", +}; + +export class IgniteClient extends EventEmitter { + static plugins: IgntModule[] = []; + env: Env; + signer?: OfflineSigner; + registry: Array<[string, GeneratedType]> = []; + static plugin<T extends IgntModule | IgntModule[]>(plugin: T) { + const currentPlugins = this.plugins; + + class AugmentedClient extends this { + static plugins = currentPlugins.concat(plugin); + } + + if (Array.isArray(plugin)) { + type Extension = UnionToIntersection<Return<T>['module']> + return AugmentedClient as typeof IgniteClient & Constructor<Extension>; + } + + type Extension = Return<T>['module'] + return AugmentedClient as typeof IgniteClient & Constructor<Extension>; + } + + async signAndBroadcast(msgs: EncodeObject[], fee: StdFee, memo: string) { + if (this.signer) { + const { address } = (await this.signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(this.env.rpcURL, this.signer, { registry: new Registry(this.registry) }); + return await signingClient.signAndBroadcast(address, msgs, fee ? fee : defaultFee, memo) + } else { + throw new Error(" Signer is not present."); + } + } + + constructor(env: Env, signer?: OfflineSigner) { + super(); + this.env = env; + this.setMaxListeners(0); + this.signer = signer; + const classConstructor = this.constructor as typeof IgniteClient; + classConstructor.plugins.forEach(plugin => { + const pluginInstance = plugin(this); + Object.assign(this, pluginInstance.module) + if (this.registry) { + this.registry = this.registry.concat(pluginInstance.registry) + } + }); + } + useSigner(signer: OfflineSigner) { + this.signer = signer; + this.emit("signer-changed", this.signer); + } + removeSigner() { + this.signer = undefined; + this.emit("signer-changed", this.signer); + } + async useKeplr(keplrChainInfo: Partial<ChainInfo> = {}) { + // Using queryClients directly because BaseClient has no knowledge of the modules at this stage + try { + const queryClient = ( + await import("./cosmos.base.tendermint.v1beta1/module") + ).queryClient; + const bankQueryClient = (await import("./cosmos.bank.v1beta1/module")) + .queryClient; + const stakingQueryClient = (await import("./cosmos.staking.v1beta1/module")).queryClient; + const stakingqc = stakingQueryClient({ addr: this.env.apiURL }); + const staking = await (await stakingqc.queryParams()).data; + const qc = queryClient({ addr: this.env.apiURL }); + const node_info = await (await qc.serviceGetNodeInfo()).data; + const chainId = node_info.default_node_info?.network ?? ""; + const chainName = chainId?.toUpperCase() + " Network"; + const bankqc = bankQueryClient({ addr: this.env.apiURL }); + const tokens = await (await bankqc.queryTotalSupply()).data; + const addrPrefix = this.env.prefix ?? "cosmos"; + const rpc = this.env.rpcURL; + const rest = this.env.apiURL; + + let bip44 = { + coinType: 118, + }; + + let bech32Config = { + bech32PrefixAccAddr: addrPrefix, + bech32PrefixAccPub: addrPrefix + "pub", + bech32PrefixValAddr: addrPrefix + "valoper", + bech32PrefixValPub: addrPrefix + "valoperpub", + bech32PrefixConsAddr: addrPrefix + "valcons", + bech32PrefixConsPub: addrPrefix + "valconspub", + }; + + let currencies = + tokens.supply?.map((x) => { + const y = { + coinDenom: x.denom?.toUpperCase() ?? "", + coinMinimalDenom: x.denom ?? "", + coinDecimals: 0, + }; + return y; + }) ?? []; + + let stakeCurrency = { + coinDenom: staking.params?.bond_denom?.toUpperCase() ?? "", + coinMinimalDenom: staking.params?.bond_denom ?? "", + coinDecimals: 0, + }; + + let feeCurrencies = + tokens.supply?.map((x) => { + const y = { + coinDenom: x.denom?.toUpperCase() ?? "", + coinMinimalDenom: x.denom ?? "", + coinDecimals: 0, + }; + return y; + }) ?? []; + + if (chainId) { + const suggestOptions: ChainInfo = { + chainId, + chainName, + rpc, + rest, + stakeCurrency, + bip44, + bech32Config, + currencies, + feeCurrencies, + ...keplrChainInfo, + }; + await window.keplr.experimentalSuggestChain(suggestOptions); + + window.keplr.defaultOptions = { + sign: { + preferNoSetFee: true, + preferNoSetMemo: true, + }, + }; + } + await window.keplr.enable(chainId); + this.signer = window.keplr.getOfflineSigner(chainId); + this.emit("signer-changed", this.signer); + } catch (e) { + throw new Error( + "Could not load tendermint, staking and bank modules. Please ensure your client loads them to use useKeplr()" + ); + } + } +} \ No newline at end of file diff --git a/ignite/pkg/cosmosgen/templates/root/env.ts.tpl b/ignite/pkg/cosmosgen/templates/root/env.ts.tpl new file mode 100644 index 0000000..dbd876a --- /dev/null +++ b/ignite/pkg/cosmosgen/templates/root/env.ts.tpl @@ -0,0 +1,7 @@ +import { OfflineSigner } from "@cosmjs/proto-signing"; + +export interface Env { + apiURL: string + rpcURL: string + prefix?: string +} \ No newline at end of file diff --git a/ignite/pkg/cosmosgen/templates/root/helpers.ts.tpl b/ignite/pkg/cosmosgen/templates/root/helpers.ts.tpl new file mode 100644 index 0000000..80e1ecf --- /dev/null +++ b/ignite/pkg/cosmosgen/templates/root/helpers.ts.tpl @@ -0,0 +1,32 @@ +export type Constructor<T> = new (...args: any[]) => T; + +export type AnyFunction = (...args: any) => any; + +export type UnionToIntersection<Union> = + (Union extends any + ? (argument: Union) => void + : never + ) extends (argument: infer Intersection) => void + ? Intersection + : never; + +export type Return<T> = + T extends AnyFunction + ? ReturnType<T> + : T extends AnyFunction[] + ? UnionToIntersection<ReturnType<T[number]>> + : never + + +export const MissingWalletError = new Error("wallet is required"); + +export function getStructure(template) { + let structure = { fields: [] as Array<unknown>} + for (const [key, value] of Object.entries(template)) { + let field: any = {} + field.name = key + field.type = typeof value + structure.fields.push(field) + } + return structure +} \ No newline at end of file diff --git a/ignite/pkg/cosmosgen/templates/root/index.ts.tpl b/ignite/pkg/cosmosgen/templates/root/index.ts.tpl new file mode 100644 index 0000000..721b762 --- /dev/null +++ b/ignite/pkg/cosmosgen/templates/root/index.ts.tpl @@ -0,0 +1,21 @@ +// Generated by Ignite ignite.com/cli +import { Registry } from '@cosmjs/proto-signing' +import { IgniteClient } from "./client"; +import { MissingWalletError } from "./helpers"; +{{ range .Modules }}import { IgntModule as {{ camelCaseUpperSta .Pkg.Name }}, msgTypes as {{ camelCaseUpperSta .Pkg.Name }}MsgTypes } from './{{ .Pkg.Name }}' +{{ end }} + +const Client = IgniteClient.plugin([ + {{ range $i,$module :=.Modules }}{{ if (gt $i 0) }}, {{ end }}{{ camelCaseUpperSta $module.Pkg.Name }}{{ end }} +]); + +const registry = new Registry([ + {{ range .Modules }}...{{ camelCaseUpperSta .Pkg.Name }}MsgTypes, + {{ end }} +]) + +export { + Client, + registry, + MissingWalletError +} diff --git a/ignite/pkg/cosmosgen/templates/root/modules.ts.tpl b/ignite/pkg/cosmosgen/templates/root/modules.ts.tpl new file mode 100644 index 0000000..49d2c8b --- /dev/null +++ b/ignite/pkg/cosmosgen/templates/root/modules.ts.tpl @@ -0,0 +1,5 @@ +import { IgniteClient } from "./client"; +import { GeneratedType } from "@cosmjs/proto-signing"; + +export type IgntModuleInterface = { [key: string]: any } +export type IgntModule = (instance: IgniteClient) => { module: IgntModuleInterface, registry: [string, GeneratedType][] } diff --git a/ignite/pkg/cosmosgen/templates/root/package.json.tpl b/ignite/pkg/cosmosgen/templates/root/package.json.tpl new file mode 100644 index 0000000..c238d51 --- /dev/null +++ b/ignite/pkg/cosmosgen/templates/root/package.json.tpl @@ -0,0 +1,39 @@ +{ + "name": "{{ .PackageNS }}-client-ts", + "version": "0.0.1", + "description": "Autogenerated Typescript Client", + "author": "Ignite Codegen <hello@ignite.com>", + "license": "Apache-2.0", + "licenses": [ + { + "type": "Apache-2.0", + "url": "http://www.apache.org/licenses/LICENSE-2.0" + } + ], + "main": "lib/index.js", + "publishConfig": { + "access": "public" + }, + "scripts": { + "build": "NODE_OPTIONS='--max-old-space-size=16384' tsc" + }, + "dependencies": { + "@cosmjs/proto-signing": "0.33.1", + "@cosmjs/stargate": "0.33.1", + "@keplr-wallet/types": "^0.12.234", + "axios": "1.9.0", + "buffer": "^6.0.3", + "events": "^3.3.0" + }, + "peerDependencies": { + "@cosmjs/proto-signing": "0.33.1", + "@cosmjs/stargate": "0.33.1" + }, + "devDependencies": { + "@bufbuild/protobuf": "^2.4.0", + "@types/events": "^3.0.3", + "qs": "^6.14.0", + "type-fest": "^4.41.0", + "typescript": "^5.8.3" + } +} diff --git a/ignite/pkg/cosmosgen/templates/root/tsconfig.json.tpl b/ignite/pkg/cosmosgen/templates/root/tsconfig.json.tpl new file mode 100644 index 0000000..6d679bb --- /dev/null +++ b/ignite/pkg/cosmosgen/templates/root/tsconfig.json.tpl @@ -0,0 +1,13 @@ +{ + "compilerOptions": { + "target": "ES2020", + "module": "ES2020", + "moduleResolution": "node", + "outDir": "./lib", + "declaration": true, + "allowSyntheticDefaultImports": true, + "esModuleInterop": false, + "strict": false, + "skipLibCheck": true + } + } \ No newline at end of file diff --git a/ignite/pkg/cosmosgen/templates/root/types.d.ts.tpl b/ignite/pkg/cosmosgen/templates/root/types.d.ts.tpl new file mode 100644 index 0000000..b839c08 --- /dev/null +++ b/ignite/pkg/cosmosgen/templates/root/types.d.ts.tpl @@ -0,0 +1,21 @@ +import { Keplr, Window as KeplrWindow } from '@keplr-wallet/types'; + +declare global { + interface KeplrIntereactionOptions { + readonly sign?: KeplrSignOptions; + } + + export interface KeplrSignOptions { + readonly preferNoSetFee?: boolean; + readonly preferNoSetMemo?: boolean; + readonly disableBalanceCheck?: boolean; + } + interface CustomKeplr extends Keplr { + enable(chainId: string | string[]): Promise<void>; + + defaultOptions: KeplrIntereactionOptions; + } + interface Window extends KeplrWindow { + keplr: CustomKeplr; + } +} \ No newline at end of file diff --git a/ignite/pkg/cosmosgen/testdata/expected_files/openapi/openapi.json b/ignite/pkg/cosmosgen/testdata/expected_files/openapi/openapi.json new file mode 100644 index 0000000..a866102 --- /dev/null +++ b/ignite/pkg/cosmosgen/testdata/expected_files/openapi/openapi.json @@ -0,0 +1 @@ +{"id":"go.mod","consumes":["application/json"],"produces":["application/json"],"swagger":"2.0","info":{"description":"Chain go.mod REST API","title":"HTTP API Console","contact":{"name":"go.mod"},"version":"version not set"},"paths":{"/ignite/mars/query_simple":{"get":{"tags":["Query"],"operationId":"GoModQuery_QuerySimple","responses":{"200":{"description":"A successful response.","schema":{"$ref":"#/definitions/ignite.planet.mars.QuerySimpleResponse"}},"default":{"description":"An unexpected error response.","schema":{"$ref":"#/definitions/google.rpc.Status"}}}}},"/ignite/mars/query_simple/{mytypefield}":{"get":{"tags":["Query"],"operationId":"GoModQuery_QuerySimpleParams","parameters":[{"type":"string","name":"mytypefield","in":"path","required":true}],"responses":{"200":{"description":"A successful response.","schema":{"$ref":"#/definitions/ignite.planet.mars.QuerySimpleParamsResponse"}},"default":{"description":"An unexpected error response.","schema":{"$ref":"#/definitions/google.rpc.Status"}}}}},"/ignite/mars/query_with_params/{mytypefield}":{"get":{"tags":["Query"],"operationId":"GoModQuery_QueryParamsWithPagination","parameters":[{"type":"string","name":"mytypefield","in":"path","required":true},{"type":"string","format":"byte","description":"key is a value returned in PageResponse.next_key to begin\nquerying the next page most efficiently. Only one of offset or key\nshould be set.","name":"pagination.key","in":"query"},{"type":"string","format":"uint64","description":"offset is a numeric offset that can be used when key is unavailable.\nIt is less efficient than using key. Only one of offset or key should\nbe set.","name":"pagination.offset","in":"query"},{"type":"string","format":"uint64","description":"limit is the total number of results to be returned in the result page.\nIf left empty it will default to a value to be set by each app.","name":"pagination.limit","in":"query"},{"type":"boolean","description":"count_total is set to true to indicate that the result set should include\na count of the total number of items available for pagination in UIs.\ncount_total is only respected when offset is used. It is ignored when key\nis set.","name":"pagination.count_total","in":"query"},{"type":"boolean","description":"reverse is set to true if results are to be returned in the descending order.","name":"pagination.reverse","in":"query"}],"responses":{"200":{"description":"A successful response.","schema":{"$ref":"#/definitions/ignite.planet.mars.QueryWithPaginationResponse"}},"default":{"description":"An unexpected error response.","schema":{"$ref":"#/definitions/google.rpc.Status"}}}}},"/ignite/mars/query_with_query_params/{mytypefield}":{"get":{"tags":["Query"],"operationId":"GoModQuery_QueryWithQueryParamsWithPagination","parameters":[{"type":"string","name":"mytypefield","in":"path","required":true},{"type":"string","name":"query_param","in":"query"},{"type":"string","format":"byte","description":"key is a value returned in PageResponse.next_key to begin\nquerying the next page most efficiently. Only one of offset or key\nshould be set.","name":"pagination.key","in":"query"},{"type":"string","format":"uint64","description":"offset is a numeric offset that can be used when key is unavailable.\nIt is less efficient than using key. Only one of offset or key should\nbe set.","name":"pagination.offset","in":"query"},{"type":"string","format":"uint64","description":"limit is the total number of results to be returned in the result page.\nIf left empty it will default to a value to be set by each app.","name":"pagination.limit","in":"query"},{"type":"boolean","description":"count_total is set to true to indicate that the result set should include\na count of the total number of items available for pagination in UIs.\ncount_total is only respected when offset is used. It is ignored when key\nis set.","name":"pagination.count_total","in":"query"},{"type":"boolean","description":"reverse is set to true if results are to be returned in the descending order.","name":"pagination.reverse","in":"query"}],"responses":{"200":{"description":"A successful response.","schema":{"$ref":"#/definitions/ignite.planet.mars.QueryWithQueryParamsWithPaginationResponse"}},"default":{"description":"An unexpected error response.","schema":{"$ref":"#/definitions/google.rpc.Status"}}}}},"/ignite/mars/query_with_query_params/{mytypefield}/{mybool}":{"get":{"tags":["Query"],"operationId":"GoModQuery_QueryWithQueryParams","parameters":[{"type":"string","name":"mytypefield","in":"path","required":true},{"type":"boolean","name":"mybool","in":"path","required":true},{"type":"string","name":"query_param","in":"query"},{"type":"array","items":{"type":"boolean"},"collectionFormat":"multi","name":"myrepeatedbool","in":"query"}],"responses":{"200":{"description":"A successful response.","schema":{"$ref":"#/definitions/ignite.planet.mars.QueryWithQueryParamsResponse"}},"default":{"description":"An unexpected error response.","schema":{"$ref":"#/definitions/google.rpc.Status"}}}}}},"definitions":{"cosmos.base.query.v1beta1.PageRequest":{"description":"message SomeRequest {\n Foo some_parameter = 1;\n PageRequest pagination = 2;\n }","type":"object","title":"PageRequest is to be embedded in gRPC request messages for efficient\npagination. Ex:","properties":{"count_total":{"description":"count_total is set to true to indicate that the result set should include\na count of the total number of items available for pagination in UIs.\ncount_total is only respected when offset is used. It is ignored when key\nis set.","type":"boolean"},"key":{"description":"key is a value returned in PageResponse.next_key to begin\nquerying the next page most efficiently. Only one of offset or key\nshould be set.","type":"string","format":"byte"},"limit":{"description":"limit is the total number of results to be returned in the result page.\nIf left empty it will default to a value to be set by each app.","type":"string","format":"uint64"},"offset":{"description":"offset is a numeric offset that can be used when key is unavailable.\nIt is less efficient than using key. Only one of offset or key should\nbe set.","type":"string","format":"uint64"},"reverse":{"description":"reverse is set to true if results are to be returned in the descending order.","type":"boolean"}}},"cosmos.base.query.v1beta1.PageResponse":{"description":"PageResponse is to be embedded in gRPC response messages where the\ncorresponding request message has used PageRequest.\n\n message SomeResponse {\n repeated Bar results = 1;\n PageResponse page = 2;\n }","type":"object","properties":{"next_key":{"description":"next_key is the key to be passed to PageRequest.key to\nquery the next page most efficiently. It will be empty if\nthere are no more results.","type":"string","format":"byte"},"total":{"type":"string","format":"uint64","title":"total is total number of results available if PageRequest.count_total\nwas set, its value is undefined otherwise"}}},"google.protobuf.Any":{"type":"object","properties":{"@type":{"type":"string"}},"additionalProperties":{}},"google.rpc.Status":{"type":"object","properties":{"code":{"type":"integer","format":"int32"},"details":{"type":"array","items":{"type":"object","$ref":"#/definitions/google.protobuf.Any"}},"message":{"type":"string"}}},"ignite.planet.mars.MsgBarResponse":{"type":"object","properties":{"mytypefield":{"type":"string"}}},"ignite.planet.mars.MsgMyMessageResponse":{"type":"object","properties":{"mytypefield":{"type":"string"}}},"ignite.planet.mars.QuerySimpleParamsResponse":{"type":"object","properties":{"bar":{"type":"string"}}},"ignite.planet.mars.QuerySimpleResponse":{"type":"object","properties":{"bar":{"type":"string"}}},"ignite.planet.mars.QueryWithPaginationResponse":{"type":"object","properties":{"pagination":{"$ref":"#/definitions/cosmos.base.query.v1beta1.PageResponse"}}},"ignite.planet.mars.QueryWithQueryParamsResponse":{"type":"object","properties":{"bar":{"type":"string"}}},"ignite.planet.mars.QueryWithQueryParamsWithPaginationResponse":{"type":"object","properties":{"bar":{"type":"string"},"pagination":{"$ref":"#/definitions/cosmos.base.query.v1beta1.PageResponse"}}}},"tags":[{"name":"Msg"},{"name":"Query"}]} \ No newline at end of file diff --git a/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/client.ts b/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/client.ts new file mode 100755 index 0000000..3c30aae --- /dev/null +++ b/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/client.ts @@ -0,0 +1,164 @@ +/// <reference path="./types.d.ts" /> +import { + GeneratedType, + OfflineSigner, + EncodeObject, + Registry, +} from "@cosmjs/proto-signing"; +import { SigningStargateClient, StdFee } from "@cosmjs/stargate"; +import { Env } from "./env"; +import { UnionToIntersection, Return, Constructor } from "./helpers"; +import { IgntModule } from "./modules"; +import { EventEmitter } from "events"; +import { ChainInfo } from "@keplr-wallet/types"; + +const defaultFee = { + amount: [], + gas: "200000", +}; + +export class IgniteClient extends EventEmitter { + static plugins: IgntModule[] = []; + env: Env; + signer?: OfflineSigner; + registry: Array<[string, GeneratedType]> = []; + static plugin<T extends IgntModule | IgntModule[]>(plugin: T) { + const currentPlugins = this.plugins; + + class AugmentedClient extends this { + static plugins = currentPlugins.concat(plugin); + } + + if (Array.isArray(plugin)) { + type Extension = UnionToIntersection<Return<T>['module']> + return AugmentedClient as typeof IgniteClient & Constructor<Extension>; + } + + type Extension = Return<T>['module'] + return AugmentedClient as typeof IgniteClient & Constructor<Extension>; + } + + async signAndBroadcast(msgs: EncodeObject[], fee: StdFee, memo: string) { + if (this.signer) { + const { address } = (await this.signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(this.env.rpcURL, this.signer, { registry: new Registry(this.registry) }); + return await signingClient.signAndBroadcast(address, msgs, fee ? fee : defaultFee, memo) + } else { + throw new Error(" Signer is not present."); + } + } + + constructor(env: Env, signer?: OfflineSigner) { + super(); + this.env = env; + this.setMaxListeners(0); + this.signer = signer; + const classConstructor = this.constructor as typeof IgniteClient; + classConstructor.plugins.forEach(plugin => { + const pluginInstance = plugin(this); + Object.assign(this, pluginInstance.module) + if (this.registry) { + this.registry = this.registry.concat(pluginInstance.registry) + } + }); + } + useSigner(signer: OfflineSigner) { + this.signer = signer; + this.emit("signer-changed", this.signer); + } + removeSigner() { + this.signer = undefined; + this.emit("signer-changed", this.signer); + } + async useKeplr(keplrChainInfo: Partial<ChainInfo> = {}) { + // Using queryClients directly because BaseClient has no knowledge of the modules at this stage + try { + const queryClient = ( + await import("./cosmos.base.tendermint.v1beta1/module") + ).queryClient; + const bankQueryClient = (await import("./cosmos.bank.v1beta1/module")) + .queryClient; + const stakingQueryClient = (await import("./cosmos.staking.v1beta1/module")).queryClient; + const stakingqc = stakingQueryClient({ addr: this.env.apiURL }); + const staking = await (await stakingqc.queryParams()).data; + const qc = queryClient({ addr: this.env.apiURL }); + const node_info = await (await qc.serviceGetNodeInfo()).data; + const chainId = node_info.default_node_info?.network ?? ""; + const chainName = chainId?.toUpperCase() + " Network"; + const bankqc = bankQueryClient({ addr: this.env.apiURL }); + const tokens = await (await bankqc.queryTotalSupply()).data; + const addrPrefix = this.env.prefix ?? "cosmos"; + const rpc = this.env.rpcURL; + const rest = this.env.apiURL; + + let bip44 = { + coinType: 118, + }; + + let bech32Config = { + bech32PrefixAccAddr: addrPrefix, + bech32PrefixAccPub: addrPrefix + "pub", + bech32PrefixValAddr: addrPrefix + "valoper", + bech32PrefixValPub: addrPrefix + "valoperpub", + bech32PrefixConsAddr: addrPrefix + "valcons", + bech32PrefixConsPub: addrPrefix + "valconspub", + }; + + let currencies = + tokens.supply?.map((x) => { + const y = { + coinDenom: x.denom?.toUpperCase() ?? "", + coinMinimalDenom: x.denom ?? "", + coinDecimals: 0, + }; + return y; + }) ?? []; + + let stakeCurrency = { + coinDenom: staking.params?.bond_denom?.toUpperCase() ?? "", + coinMinimalDenom: staking.params?.bond_denom ?? "", + coinDecimals: 0, + }; + + let feeCurrencies = + tokens.supply?.map((x) => { + const y = { + coinDenom: x.denom?.toUpperCase() ?? "", + coinMinimalDenom: x.denom ?? "", + coinDecimals: 0, + }; + return y; + }) ?? []; + + if (chainId) { + const suggestOptions: ChainInfo = { + chainId, + chainName, + rpc, + rest, + stakeCurrency, + bip44, + bech32Config, + currencies, + feeCurrencies, + ...keplrChainInfo, + }; + await window.keplr.experimentalSuggestChain(suggestOptions); + + window.keplr.defaultOptions = { + sign: { + preferNoSetFee: true, + preferNoSetMemo: true, + }, + }; + } + await window.keplr.enable(chainId); + this.signer = window.keplr.getOfflineSigner(chainId); + this.emit("signer-changed", this.signer); + } catch (e) { + throw new Error( + "Could not load tendermint, staking and bank modules. Please ensure your client loads them to use useKeplr()" + ); + } + } +} \ No newline at end of file diff --git a/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/env.ts b/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/env.ts new file mode 100755 index 0000000..dbd876a --- /dev/null +++ b/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/env.ts @@ -0,0 +1,7 @@ +import { OfflineSigner } from "@cosmjs/proto-signing"; + +export interface Env { + apiURL: string + rpcURL: string + prefix?: string +} \ No newline at end of file diff --git a/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/helpers.ts b/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/helpers.ts new file mode 100755 index 0000000..80e1ecf --- /dev/null +++ b/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/helpers.ts @@ -0,0 +1,32 @@ +export type Constructor<T> = new (...args: any[]) => T; + +export type AnyFunction = (...args: any) => any; + +export type UnionToIntersection<Union> = + (Union extends any + ? (argument: Union) => void + : never + ) extends (argument: infer Intersection) => void + ? Intersection + : never; + +export type Return<T> = + T extends AnyFunction + ? ReturnType<T> + : T extends AnyFunction[] + ? UnionToIntersection<ReturnType<T[number]>> + : never + + +export const MissingWalletError = new Error("wallet is required"); + +export function getStructure(template) { + let structure = { fields: [] as Array<unknown>} + for (const [key, value] of Object.entries(template)) { + let field: any = {} + field.name = key + field.type = typeof value + structure.fields.push(field) + } + return structure +} \ No newline at end of file diff --git a/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/ignite.planet.mars/index.ts b/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/ignite.planet.mars/index.ts new file mode 100755 index 0000000..4705917 --- /dev/null +++ b/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/ignite.planet.mars/index.ts @@ -0,0 +1,6 @@ +import IgntModule from './module'; +import { txClient, queryClient, registry } from './module'; +import { msgTypes } from './registry'; + +export * from "./types"; +export { IgntModule, msgTypes, txClient, queryClient, registry }; diff --git a/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/ignite.planet.mars/module.ts b/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/ignite.planet.mars/module.ts new file mode 100755 index 0000000..87bf764 --- /dev/null +++ b/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/ignite.planet.mars/module.ts @@ -0,0 +1,163 @@ +// Generated by Ignite ignite.com/cli + +import { SigningStargateClient, DeliverTxResponse, StdFee } from "@cosmjs/stargate"; +import { EncodeObject, GeneratedType, OfflineSigner, Registry } from "@cosmjs/proto-signing"; +import { msgTypes } from './registry'; +import { IgniteClient } from "../client" +import { MissingWalletError } from "../helpers" +import { Api } from "./rest"; +import { MsgMyMessageRequest } from "./types/ignite/planet/mars/mars"; +import { MsgBarRequest } from "./types/ignite/planet/mars/mars"; + +import { AnotherType as typeAnotherType} from "./types" + +export { MsgMyMessageRequest, MsgBarRequest }; + +type sendMsgMyMessageRequestParams = { + value: MsgMyMessageRequest, + fee?: StdFee, + memo?: string +}; + +type sendMsgBarRequestParams = { + value: MsgBarRequest, + fee?: StdFee, + memo?: string +}; + + +type msgMyMessageRequestParams = { + value: MsgMyMessageRequest, +}; + +type msgBarRequestParams = { + value: MsgBarRequest, +}; + + +export const registry = new Registry(msgTypes); + +type Field = { + name: string; + type: unknown; +} +function getStructure(template) { + const structure: {fields: Field[]} = { fields: [] } + for (let [key, value] of Object.entries(template)) { + let field = { name: key, type: typeof value } + structure.fields.push(field) + } + return structure +} +const defaultFee = { + amount: [], + gas: "200000", +}; + +interface TxClientOptions { + addr: string + prefix: string + signer?: OfflineSigner +} + +export const txClient = ({ signer, prefix, addr }: TxClientOptions = { addr: "http://localhost:26657", prefix: "cosmos" }) => { + + return { + + async sendMsgMyMessageRequest({ value, fee, memo }: sendMsgMyMessageRequestParams): Promise<DeliverTxResponse> { + if (!signer) { + throw new Error('TxClient:sendMsgMyMessageRequest: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry}); + let msg = this.msgMyMessageRequest({ value: MsgMyMessageRequest.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendMsgMyMessageRequest: Could not broadcast Tx: '+ e.message) + } + }, + + async sendMsgBarRequest({ value, fee, memo }: sendMsgBarRequestParams): Promise<DeliverTxResponse> { + if (!signer) { + throw new Error('TxClient:sendMsgBarRequest: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry}); + let msg = this.msgBarRequest({ value: MsgBarRequest.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendMsgBarRequest: Could not broadcast Tx: '+ e.message) + } + }, + + + msgMyMessageRequest({ value }: msgMyMessageRequestParams): EncodeObject { + try { + return { typeUrl: "/ignite.planet.mars.MsgMyMessageRequest", value: MsgMyMessageRequest.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:MsgMyMessageRequest: Could not create message: ' + e.message) + } + }, + + msgBarRequest({ value }: msgBarRequestParams): EncodeObject { + try { + return { typeUrl: "/ignite.planet.mars.MsgBarRequest", value: MsgBarRequest.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:MsgBarRequest: Could not create message: ' + e.message) + } + }, + + } +}; + +interface QueryClientOptions { + addr: string +} + +export const queryClient = ({ addr: addr }: QueryClientOptions = { addr: "http://localhost:1317" }) => { + return new Api({ baseURL: addr }); +}; + +class SDKModule { + public query: ReturnType<typeof queryClient>; + public tx: ReturnType<typeof txClient>; + public structure: Record<string,unknown>; + public registry: Array<[string, GeneratedType]> = []; + + constructor(client: IgniteClient) { + + this.query = queryClient({ addr: client.env.apiURL }); + this.updateTX(client); + this.structure = { + AnotherType: getStructure(typeAnotherType.fromPartial({})), + + }; + client.on('signer-changed',(signer) => { + this.updateTX(client); + }) + } + updateTX(client: IgniteClient) { + const methods = txClient({ + signer: client.signer, + addr: client.env.rpcURL, + prefix: client.env.prefix ?? "cosmos", + }) + + this.tx = methods; + for (let m in methods) { + this.tx[m] = methods[m].bind(this.tx); + } + } +}; + +const IgntModule = (test: IgniteClient) => { + return { + module: { + IgnitePlanetMars: new SDKModule(test) + }, + registry: msgTypes + } +} +export default IgntModule; \ No newline at end of file diff --git a/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/ignite.planet.mars/registry.ts b/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/ignite.planet.mars/registry.ts new file mode 100755 index 0000000..32e45de --- /dev/null +++ b/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/ignite.planet.mars/registry.ts @@ -0,0 +1,11 @@ +import { GeneratedType } from "@cosmjs/proto-signing"; +import { MsgMyMessageRequest } from "./types/ignite/planet/mars/mars"; +import { MsgBarRequest } from "./types/ignite/planet/mars/mars"; + +const msgTypes: Array<[string, GeneratedType]> = [ + ["/ignite.planet.mars.MsgMyMessageRequest", MsgMyMessageRequest], + ["/ignite.planet.mars.MsgBarRequest", MsgBarRequest], + +]; + +export { msgTypes } \ No newline at end of file diff --git a/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/ignite.planet.mars/rest.ts b/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/ignite.planet.mars/rest.ts new file mode 100755 index 0000000..da5286d --- /dev/null +++ b/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/ignite.planet.mars/rest.ts @@ -0,0 +1,298 @@ +import axios, { AxiosInstance, AxiosRequestConfig, AxiosResponse, ResponseType } from "axios"; +import { QuerySimpleResponse } from "./types/ignite/planet/mars/mars"; +import { QuerySimpleParamsResponse } from "./types/ignite/planet/mars/mars"; +import { QueryWithPaginationResponse } from "./types/ignite/planet/mars/mars"; +import { QueryWithQueryParamsResponse } from "./types/ignite/planet/mars/mars"; +import { QueryWithQueryParamsWithPaginationResponse } from "./types/ignite/planet/mars/mars"; + +import { QuerySimpleRequest } from "./types/ignite/planet/mars/mars"; +import { QuerySimpleParamsRequest } from "./types/ignite/planet/mars/mars"; +import { QueryWithPaginationRequest } from "./types/ignite/planet/mars/mars"; +import { QueryWithQueryParamsRequest } from "./types/ignite/planet/mars/mars"; +import { QueryWithQueryParamsWithPaginationRequest } from "./types/ignite/planet/mars/mars"; + + +import type {SnakeCasedPropertiesDeep} from 'type-fest'; + +export type QueryParamsType = Record<string | number, any>; + +export type FlattenObject<TValue> = CollapseEntries<CreateObjectEntries<TValue, TValue>>; + +type Entry = { key: string; value: unknown }; +type EmptyEntry<TValue> = { key: ''; value: TValue }; +type ExcludedTypes = Date | Set<unknown> | Map<unknown, unknown>; +type ArrayEncoder = `[${bigint}]`; + +type EscapeArrayKey<TKey extends string> = TKey extends `${infer TKeyBefore}.${ArrayEncoder}${infer TKeyAfter}` + ? EscapeArrayKey<`${TKeyBefore}${ArrayEncoder}${TKeyAfter}`> + : TKey; + +// Transforms entries to one flattened type +type CollapseEntries<TEntry extends Entry> = { + [E in TEntry as EscapeArrayKey<E['key']>]: E['value']; +}; + +// Transforms array type to object +type CreateArrayEntry<TValue, TValueInitial> = OmitItself< + TValue extends unknown[] ? { [k: ArrayEncoder]: TValue[number] } : TValue, + TValueInitial +>; + +// Omit the type that references itself +type OmitItself<TValue, TValueInitial> = TValue extends TValueInitial + ? EmptyEntry<TValue> + : OmitExcludedTypes<TValue, TValueInitial>; + +// Omit the type that is listed in ExcludedTypes union +type OmitExcludedTypes<TValue, TValueInitial> = TValue extends ExcludedTypes + ? EmptyEntry<TValue> + : CreateObjectEntries<TValue, TValueInitial>; + +type CreateObjectEntries<TValue, TValueInitial> = TValue extends object + ? { + // Checks that Key is of type string + [TKey in keyof TValue]-?: TKey extends string + ? // Nested key can be an object, run recursively to the bottom + CreateArrayEntry<TValue[TKey], TValueInitial> extends infer TNestedValue + ? TNestedValue extends Entry + ? TNestedValue['key'] extends '' + ? { + key: TKey; + value: TNestedValue['value']; + } + : + | { + key: `${TKey}.${TNestedValue['key']}`; + value: TNestedValue['value']; + } + | { + key: TKey; + value: TValue[TKey]; + } + : never + : never + : never; + }[keyof TValue] // Builds entry for each key + : EmptyEntry<TValue>; + +export type ChangeProtoToJSPrimitives<T extends object> = { + [key in keyof T]: T[key] extends Uint8Array | Date ? string : T[key] extends object ? ChangeProtoToJSPrimitives<T[key]>: T[key]; + // ^^^^ This line is used to convert Uint8Array to string, if you want to keep Uint8Array as is, you can remove this line +} + +export interface FullRequestParams extends Omit<AxiosRequestConfig, "data" | "params" | "url" | "responseType"> { + /** set parameter to `true` for call `securityWorker` for this request */ + secure?: boolean; + /** request path */ + path: string; + /** content type of request body */ + type?: ContentType; + /** query params */ + query?: QueryParamsType; + /** format of response (i.e. response.json() -> format: "json") */ + format?: ResponseType; + /** request body */ + body?: unknown; +} + +export type RequestParams = Omit<FullRequestParams, "body" | "method" | "query" | "path">; + +export interface ApiConfig<SecurityDataType = unknown> extends Omit<AxiosRequestConfig, "data" | "cancelToken"> { + securityWorker?: ( + securityData: SecurityDataType | null, + ) => Promise<AxiosRequestConfig | void> | AxiosRequestConfig | void; + secure?: boolean; + format?: ResponseType; +} + +export enum ContentType { + Json = "application/json", + FormData = "multipart/form-data", + UrlEncoded = "application/x-www-form-urlencoded", +} + +export class HttpClient<SecurityDataType = unknown> { + public instance: AxiosInstance; + private securityData: SecurityDataType | null = null; + private securityWorker?: ApiConfig<SecurityDataType>["securityWorker"]; + private secure?: boolean; + private format?: ResponseType; + + constructor({ securityWorker, secure, format, ...axiosConfig }: ApiConfig<SecurityDataType> = {}) { + this.instance = axios.create({ ...axiosConfig, baseURL: axiosConfig.baseURL || "" }); + this.secure = secure; + this.format = format; + this.securityWorker = securityWorker; + } + + public setSecurityData = (data: SecurityDataType | null) => { + this.securityData = data; + }; + + private mergeRequestParams(params1: AxiosRequestConfig, params2?: AxiosRequestConfig): AxiosRequestConfig { + return { + ...this.instance.defaults, + ...params1, + ...(params2 || {}), + headers: { + ...(this.instance.defaults.headers ), + ...(params1.headers || {}), + ...((params2 && params2.headers) || {}), + }, + } as AxiosRequestConfig; + } + + private createFormData(input: Record<string, unknown>): FormData { + return Object.keys(input || {}).reduce((formData, key) => { + const property = input[key]; + formData.append( + key, + property instanceof Blob + ? property + : typeof property === "object" && property !== null + ? JSON.stringify(property) + : `${property}`, + ); + return formData; + }, new FormData()); + } + + public request = async <T = any>({ + secure, + path, + type, + query, + format, + body, + ...params + }: FullRequestParams): Promise<AxiosResponse<T>> => { + const secureParams = + ((typeof secure === "boolean" ? secure : this.secure) && + this.securityWorker && + (await this.securityWorker(this.securityData))) || + {}; + const requestParams = this.mergeRequestParams(params, secureParams); + const responseFormat = (format && this.format) || void 0; + + if (type === ContentType.FormData && body && body !== null && typeof body === "object") { + requestParams.headers.common = { Accept: "*/*" }; + requestParams.headers.post = {}; + requestParams.headers.put = {}; + + body = this.createFormData(body as Record<string, unknown>); + } + + return this.instance.request({ + ...requestParams, + headers: { + ...(type && type !== ContentType.FormData ? { "Content-Type": type } : {}), + ...(requestParams.headers || {}), + }, + params: query, + responseType: responseFormat, + data: body, + url: path, + }); + }; +} + +/** + * @title ignite.planet.mars + */ +export class Api<SecurityDataType extends unknown> extends HttpClient<SecurityDataType> { + /** + * QueryQuerySimple + * + * @tags Query + * @name queryQuerySimple + * @request GET:/ignite/mars/query_simple + */ + queryQuerySimple = ( + query?: Record<string, any>, + params: RequestParams = {}, + ) => + this.request<SnakeCasedPropertiesDeep<ChangeProtoToJSPrimitives<QuerySimpleResponse>>>({ + path: `/ignite/mars/query_simple`, + method: "GET", + query: query, + format: "json", + ...params, + }); + + /** + * QueryQuerySimpleParams + * + * @tags Query + * @name queryQuerySimpleParams + * @request GET:/ignite/mars/query_simple/{mytypefield} + */ + queryQuerySimpleParams = (mytypefield: string, + query?: Record<string, any>, + params: RequestParams = {}, + ) => + this.request<SnakeCasedPropertiesDeep<ChangeProtoToJSPrimitives<QuerySimpleParamsResponse>>>({ + path: `/ignite/mars/query_simple/${mytypefield}`, + method: "GET", + query: query, + format: "json", + ...params, + }); + + /** + * QueryQueryParamsWithPagination + * + * @tags Query + * @name queryQueryParamsWithPagination + * @request GET:/ignite/mars/query_with_params/{mytypefield} + */ + queryQueryParamsWithPagination = (mytypefield: string, + query?: Omit<FlattenObject<SnakeCasedPropertiesDeep<ChangeProtoToJSPrimitives<QueryWithPaginationRequest>>>,"mytypefield">, + params: RequestParams = {}, + ) => + this.request<SnakeCasedPropertiesDeep<ChangeProtoToJSPrimitives<QueryWithPaginationResponse>>>({ + path: `/ignite/mars/query_with_params/${mytypefield}`, + method: "GET", + query: query, + format: "json", + ...params, + }); + + /** + * QueryQueryWithQueryParams + * + * @tags Query + * @name queryQueryWithQueryParams + * @request GET:/ignite/mars/query_with_query_params/{mytypefield}/{mybool} + */ + queryQueryWithQueryParams = (mytypefield: string, mybool: string, + query?: Omit<FlattenObject<SnakeCasedPropertiesDeep<ChangeProtoToJSPrimitives<QueryWithQueryParamsRequest>>>,"mytypefield" | "mybool">, + params: RequestParams = {}, + ) => + this.request<SnakeCasedPropertiesDeep<ChangeProtoToJSPrimitives<QueryWithQueryParamsResponse>>>({ + path: `/ignite/mars/query_with_query_params/${mytypefield}/${mybool}`, + method: "GET", + query: query, + format: "json", + ...params, + }); + + /** + * QueryQueryWithQueryParamsWithPagination + * + * @tags Query + * @name queryQueryWithQueryParamsWithPagination + * @request GET:/ignite/mars/query_with_query_params/{mytypefield} + */ + queryQueryWithQueryParamsWithPagination = (mytypefield: string, + query?: Omit<FlattenObject<SnakeCasedPropertiesDeep<ChangeProtoToJSPrimitives<QueryWithQueryParamsWithPaginationRequest>>>,"mytypefield">, + params: RequestParams = {}, + ) => + this.request<SnakeCasedPropertiesDeep<ChangeProtoToJSPrimitives<QueryWithQueryParamsWithPaginationResponse>>>({ + path: `/ignite/mars/query_with_query_params/${mytypefield}`, + method: "GET", + query: query, + format: "json", + ...params, + }); + +} \ No newline at end of file diff --git a/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/ignite.planet.mars/types.ts b/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/ignite.planet.mars/types.ts new file mode 100755 index 0000000..d4c7883 --- /dev/null +++ b/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/ignite.planet.mars/types.ts @@ -0,0 +1,7 @@ +import { AnotherType } from "./types/ignite/planet/mars/mars" + + +export { + AnotherType, + + } \ No newline at end of file diff --git a/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/ignite.planet.mars/types/cosmos/base/query/v1beta1/pagination.ts b/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/ignite.planet.mars/types/cosmos/base/query/v1beta1/pagination.ts new file mode 100644 index 0000000..9766c4e --- /dev/null +++ b/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/ignite.planet.mars/types/cosmos/base/query/v1beta1/pagination.ts @@ -0,0 +1,340 @@ +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.11.2 +// protoc unknown +// source: cosmos/base/query/v1beta1/pagination.proto + +/* eslint-disable */ +import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire"; + +export const protobufPackage = "cosmos.base.query.v1beta1"; + +/** + * PageRequest is to be embedded in gRPC request messages for efficient + * pagination. Ex: + * + * message SomeRequest { + * Foo some_parameter = 1; + * PageRequest pagination = 2; + * } + */ +export interface PageRequest { + /** + * key is a value returned in PageResponse.next_key to begin + * querying the next page most efficiently. Only one of offset or key + * should be set. + */ + key: Uint8Array; + /** + * offset is a numeric offset that can be used when key is unavailable. + * It is less efficient than using key. Only one of offset or key should + * be set. + */ + offset: number; + /** + * limit is the total number of results to be returned in the result page. + * If left empty it will default to a value to be set by each app. + */ + limit: number; + /** + * count_total is set to true to indicate that the result set should include + * a count of the total number of items available for pagination in UIs. + * count_total is only respected when offset is used. It is ignored when key + * is set. + */ + countTotal: boolean; + /** reverse is set to true if results are to be returned in the descending order. */ + reverse: boolean; +} + +/** + * PageResponse is to be embedded in gRPC response messages where the + * corresponding request message has used PageRequest. + * + * message SomeResponse { + * repeated Bar results = 1; + * PageResponse page = 2; + * } + */ +export interface PageResponse { + /** + * next_key is the key to be passed to PageRequest.key to + * query the next page most efficiently. It will be empty if + * there are no more results. + */ + nextKey: Uint8Array; + /** + * total is total number of results available if PageRequest.count_total + * was set, its value is undefined otherwise + */ + total: number; +} + +function createBasePageRequest(): PageRequest { + return { key: new Uint8Array(0), offset: 0, limit: 0, countTotal: false, reverse: false }; +} + +export const PageRequest: MessageFns<PageRequest> = { + encode(message: PageRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.key.length !== 0) { + writer.uint32(10).bytes(message.key); + } + if (message.offset !== 0) { + writer.uint32(16).uint64(message.offset); + } + if (message.limit !== 0) { + writer.uint32(24).uint64(message.limit); + } + if (message.countTotal !== false) { + writer.uint32(32).bool(message.countTotal); + } + if (message.reverse !== false) { + writer.uint32(40).bool(message.reverse); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): PageRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePageRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.key = reader.bytes(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.offset = longToNumber(reader.uint64()); + continue; + } + case 3: { + if (tag !== 24) { + break; + } + + message.limit = longToNumber(reader.uint64()); + continue; + } + case 4: { + if (tag !== 32) { + break; + } + + message.countTotal = reader.bool(); + continue; + } + case 5: { + if (tag !== 40) { + break; + } + + message.reverse = reader.bool(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): PageRequest { + return { + key: isSet(object.key) ? bytesFromBase64(object.key) : new Uint8Array(0), + offset: isSet(object.offset) ? globalThis.Number(object.offset) : 0, + limit: isSet(object.limit) ? globalThis.Number(object.limit) : 0, + countTotal: isSet(object.countTotal) + ? globalThis.Boolean(object.countTotal) + : isSet(object.count_total) + ? globalThis.Boolean(object.count_total) + : false, + reverse: isSet(object.reverse) ? globalThis.Boolean(object.reverse) : false, + }; + }, + + toJSON(message: PageRequest): unknown { + const obj: any = {}; + if (message.key.length !== 0) { + obj.key = base64FromBytes(message.key); + } + if (message.offset !== 0) { + obj.offset = Math.round(message.offset); + } + if (message.limit !== 0) { + obj.limit = Math.round(message.limit); + } + if (message.countTotal !== false) { + obj.countTotal = message.countTotal; + } + if (message.reverse !== false) { + obj.reverse = message.reverse; + } + return obj; + }, + + create<I extends Exact<DeepPartial<PageRequest>, I>>(base?: I): PageRequest { + return PageRequest.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<PageRequest>, I>>(object: I): PageRequest { + const message = createBasePageRequest(); + message.key = object.key ?? new Uint8Array(0); + message.offset = object.offset ?? 0; + message.limit = object.limit ?? 0; + message.countTotal = object.countTotal ?? false; + message.reverse = object.reverse ?? false; + return message; + }, +}; + +function createBasePageResponse(): PageResponse { + return { nextKey: new Uint8Array(0), total: 0 }; +} + +export const PageResponse: MessageFns<PageResponse> = { + encode(message: PageResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.nextKey.length !== 0) { + writer.uint32(10).bytes(message.nextKey); + } + if (message.total !== 0) { + writer.uint32(16).uint64(message.total); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): PageResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePageResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.nextKey = reader.bytes(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.total = longToNumber(reader.uint64()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): PageResponse { + return { + nextKey: isSet(object.nextKey) + ? bytesFromBase64(object.nextKey) + : isSet(object.next_key) + ? bytesFromBase64(object.next_key) + : new Uint8Array(0), + total: isSet(object.total) ? globalThis.Number(object.total) : 0, + }; + }, + + toJSON(message: PageResponse): unknown { + const obj: any = {}; + if (message.nextKey.length !== 0) { + obj.nextKey = base64FromBytes(message.nextKey); + } + if (message.total !== 0) { + obj.total = Math.round(message.total); + } + return obj; + }, + + create<I extends Exact<DeepPartial<PageResponse>, I>>(base?: I): PageResponse { + return PageResponse.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<PageResponse>, I>>(object: I): PageResponse { + const message = createBasePageResponse(); + message.nextKey = object.nextKey ?? new Uint8Array(0); + message.total = object.total ?? 0; + return message; + }, +}; + +function bytesFromBase64(b64: string): Uint8Array { + if ((globalThis as any).Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if ((globalThis as any).Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(globalThis.String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial<T> = T extends Builtin ? T + : T extends globalThis.Array<infer U> ? globalThis.Array<DeepPartial<U>> + : T extends ReadonlyArray<infer U> ? ReadonlyArray<DeepPartial<U>> + : T extends {} ? { [K in keyof T]?: DeepPartial<T[K]> } + : Partial<T>; + +type KeysOfUnion<T> = T extends T ? keyof T : never; +export type Exact<P, I extends P> = P extends Builtin ? P + : P & { [K in keyof P]: Exact<P[K], I[K]> } & { [K in Exclude<keyof I, KeysOfUnion<P>>]: never }; + +function longToNumber(int64: { toString(): string }): number { + const num = globalThis.Number(int64.toString()); + if (num > globalThis.Number.MAX_SAFE_INTEGER) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + if (num < globalThis.Number.MIN_SAFE_INTEGER) { + throw new globalThis.Error("Value is smaller than Number.MIN_SAFE_INTEGER"); + } + return num; +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} + +export interface MessageFns<T> { + encode(message: T, writer?: BinaryWriter): BinaryWriter; + decode(input: BinaryReader | Uint8Array, length?: number): T; + fromJSON(object: any): T; + toJSON(message: T): unknown; + create<I extends Exact<DeepPartial<T>, I>>(base?: I): T; + fromPartial<I extends Exact<DeepPartial<T>, I>>(object: I): T; +} diff --git a/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/ignite.planet.mars/types/cosmos_proto/cosmos.ts b/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/ignite.planet.mars/types/cosmos_proto/cosmos.ts new file mode 100644 index 0000000..3d799ec --- /dev/null +++ b/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/ignite.planet.mars/types/cosmos_proto/cosmos.ts @@ -0,0 +1,309 @@ +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.11.2 +// protoc unknown +// source: cosmos_proto/cosmos.proto + +/* eslint-disable */ +import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire"; + +export const protobufPackage = "cosmos_proto"; + +export enum ScalarType { + SCALAR_TYPE_UNSPECIFIED = 0, + SCALAR_TYPE_STRING = 1, + SCALAR_TYPE_BYTES = 2, + UNRECOGNIZED = -1, +} + +export function scalarTypeFromJSON(object: any): ScalarType { + switch (object) { + case 0: + case "SCALAR_TYPE_UNSPECIFIED": + return ScalarType.SCALAR_TYPE_UNSPECIFIED; + case 1: + case "SCALAR_TYPE_STRING": + return ScalarType.SCALAR_TYPE_STRING; + case 2: + case "SCALAR_TYPE_BYTES": + return ScalarType.SCALAR_TYPE_BYTES; + case -1: + case "UNRECOGNIZED": + default: + return ScalarType.UNRECOGNIZED; + } +} + +export function scalarTypeToJSON(object: ScalarType): string { + switch (object) { + case ScalarType.SCALAR_TYPE_UNSPECIFIED: + return "SCALAR_TYPE_UNSPECIFIED"; + case ScalarType.SCALAR_TYPE_STRING: + return "SCALAR_TYPE_STRING"; + case ScalarType.SCALAR_TYPE_BYTES: + return "SCALAR_TYPE_BYTES"; + case ScalarType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** + * InterfaceDescriptor describes an interface type to be used with + * accepts_interface and implements_interface and declared by declare_interface. + */ +export interface InterfaceDescriptor { + /** + * name is the name of the interface. It should be a short-name (without + * a period) such that the fully qualified name of the interface will be + * package.name, ex. for the package a.b and interface named C, the + * fully-qualified name will be a.b.C. + */ + name: string; + /** + * description is a human-readable description of the interface and its + * purpose. + */ + description: string; +} + +/** + * ScalarDescriptor describes an scalar type to be used with + * the scalar field option and declared by declare_scalar. + * Scalars extend simple protobuf built-in types with additional + * syntax and semantics, for instance to represent big integers. + * Scalars should ideally define an encoding such that there is only one + * valid syntactical representation for a given semantic meaning, + * i.e. the encoding should be deterministic. + */ +export interface ScalarDescriptor { + /** + * name is the name of the scalar. It should be a short-name (without + * a period) such that the fully qualified name of the scalar will be + * package.name, ex. for the package a.b and scalar named C, the + * fully-qualified name will be a.b.C. + */ + name: string; + /** + * description is a human-readable description of the scalar and its + * encoding format. For instance a big integer or decimal scalar should + * specify precisely the expected encoding format. + */ + description: string; + /** + * field_type is the type of field with which this scalar can be used. + * Scalars can be used with one and only one type of field so that + * encoding standards and simple and clear. Currently only string and + * bytes fields are supported for scalars. + */ + fieldType: ScalarType[]; +} + +function createBaseInterfaceDescriptor(): InterfaceDescriptor { + return { name: "", description: "" }; +} + +export const InterfaceDescriptor: MessageFns<InterfaceDescriptor> = { + encode(message: InterfaceDescriptor, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): InterfaceDescriptor { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseInterfaceDescriptor(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.description = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): InterfaceDescriptor { + return { + name: isSet(object.name) ? globalThis.String(object.name) : "", + description: isSet(object.description) ? globalThis.String(object.description) : "", + }; + }, + + toJSON(message: InterfaceDescriptor): unknown { + const obj: any = {}; + if (message.name !== "") { + obj.name = message.name; + } + if (message.description !== "") { + obj.description = message.description; + } + return obj; + }, + + create<I extends Exact<DeepPartial<InterfaceDescriptor>, I>>(base?: I): InterfaceDescriptor { + return InterfaceDescriptor.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<InterfaceDescriptor>, I>>(object: I): InterfaceDescriptor { + const message = createBaseInterfaceDescriptor(); + message.name = object.name ?? ""; + message.description = object.description ?? ""; + return message; + }, +}; + +function createBaseScalarDescriptor(): ScalarDescriptor { + return { name: "", description: "", fieldType: [] }; +} + +export const ScalarDescriptor: MessageFns<ScalarDescriptor> = { + encode(message: ScalarDescriptor, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + writer.uint32(26).fork(); + for (const v of message.fieldType) { + writer.int32(v); + } + writer.join(); + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): ScalarDescriptor { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseScalarDescriptor(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.description = reader.string(); + continue; + } + case 3: { + if (tag === 24) { + message.fieldType.push(reader.int32() as any); + + continue; + } + + if (tag === 26) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.fieldType.push(reader.int32() as any); + } + + continue; + } + + break; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): ScalarDescriptor { + return { + name: isSet(object.name) ? globalThis.String(object.name) : "", + description: isSet(object.description) ? globalThis.String(object.description) : "", + fieldType: globalThis.Array.isArray(object?.fieldType) + ? object.fieldType.map((e: any) => scalarTypeFromJSON(e)) + : globalThis.Array.isArray(object?.field_type) + ? object.field_type.map((e: any) => scalarTypeFromJSON(e)) + : [], + }; + }, + + toJSON(message: ScalarDescriptor): unknown { + const obj: any = {}; + if (message.name !== "") { + obj.name = message.name; + } + if (message.description !== "") { + obj.description = message.description; + } + if (message.fieldType?.length) { + obj.fieldType = message.fieldType.map((e) => scalarTypeToJSON(e)); + } + return obj; + }, + + create<I extends Exact<DeepPartial<ScalarDescriptor>, I>>(base?: I): ScalarDescriptor { + return ScalarDescriptor.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<ScalarDescriptor>, I>>(object: I): ScalarDescriptor { + const message = createBaseScalarDescriptor(); + message.name = object.name ?? ""; + message.description = object.description ?? ""; + message.fieldType = object.fieldType?.map((e) => e) || []; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial<T> = T extends Builtin ? T + : T extends globalThis.Array<infer U> ? globalThis.Array<DeepPartial<U>> + : T extends ReadonlyArray<infer U> ? ReadonlyArray<DeepPartial<U>> + : T extends {} ? { [K in keyof T]?: DeepPartial<T[K]> } + : Partial<T>; + +type KeysOfUnion<T> = T extends T ? keyof T : never; +export type Exact<P, I extends P> = P extends Builtin ? P + : P & { [K in keyof P]: Exact<P[K], I[K]> } & { [K in Exclude<keyof I, KeysOfUnion<P>>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} + +export interface MessageFns<T> { + encode(message: T, writer?: BinaryWriter): BinaryWriter; + decode(input: BinaryReader | Uint8Array, length?: number): T; + fromJSON(object: any): T; + toJSON(message: T): unknown; + create<I extends Exact<DeepPartial<T>, I>>(base?: I): T; + fromPartial<I extends Exact<DeepPartial<T>, I>>(object: I): T; +} diff --git a/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/ignite.planet.mars/types/google/api/annotations.ts b/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/ignite.planet.mars/types/google/api/annotations.ts new file mode 100644 index 0000000..a205c7c --- /dev/null +++ b/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/ignite.planet.mars/types/google/api/annotations.ts @@ -0,0 +1,9 @@ +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.11.2 +// protoc unknown +// source: google/api/annotations.proto + +/* eslint-disable */ + +export const protobufPackage = "google.api"; diff --git a/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/ignite.planet.mars/types/google/api/http.ts b/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/ignite.planet.mars/types/google/api/http.ts new file mode 100644 index 0000000..2c3ad9d --- /dev/null +++ b/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/ignite.planet.mars/types/google/api/http.ts @@ -0,0 +1,778 @@ +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.11.2 +// protoc unknown +// source: google/api/http.proto + +/* eslint-disable */ +import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire"; + +export const protobufPackage = "google.api"; + +/** + * Defines the HTTP configuration for an API service. It contains a list of + * [HttpRule][google.api.HttpRule], each specifying the mapping of an RPC method + * to one or more HTTP REST API methods. + */ +export interface Http { + /** + * A list of HTTP configuration rules that apply to individual API methods. + * + * **NOTE:** All service configuration rules follow "last one wins" order. + */ + rules: HttpRule[]; + /** + * When set to true, URL path parameters will be fully URI-decoded except in + * cases of single segment matches in reserved expansion, where "%2F" will be + * left encoded. + * + * The default behavior is to not decode RFC 6570 reserved characters in multi + * segment matches. + */ + fullyDecodeReservedExpansion: boolean; +} + +/** + * gRPC Transcoding + * + * gRPC Transcoding is a feature for mapping between a gRPC method and one or + * more HTTP REST endpoints. It allows developers to build a single API service + * that supports both gRPC APIs and REST APIs. Many systems, including [Google + * APIs](https://github.com/googleapis/googleapis), + * [Cloud Endpoints](https://cloud.google.com/endpoints), [gRPC + * Gateway](https://github.com/grpc-ecosystem/grpc-gateway), + * and [Envoy](https://github.com/envoyproxy/envoy) proxy support this feature + * and use it for large scale production services. + * + * `HttpRule` defines the schema of the gRPC/REST mapping. The mapping specifies + * how different portions of the gRPC request message are mapped to the URL + * path, URL query parameters, and HTTP request body. It also controls how the + * gRPC response message is mapped to the HTTP response body. `HttpRule` is + * typically specified as an `google.api.http` annotation on the gRPC method. + * + * Each mapping specifies a URL path template and an HTTP method. The path + * template may refer to one or more fields in the gRPC request message, as long + * as each field is a non-repeated field with a primitive (non-message) type. + * The path template controls how fields of the request message are mapped to + * the URL path. + * + * Example: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http) = { + * get: "/v1/{name=messages/*}" + * }; + * } + * } + * message GetMessageRequest { + * string name = 1; // Mapped to URL path. + * } + * message Message { + * string text = 1; // The resource content. + * } + * + * This enables an HTTP REST to gRPC mapping as below: + * + * - HTTP: `GET /v1/messages/123456` + * - gRPC: `GetMessage(name: "messages/123456")` + * + * Any fields in the request message which are not bound by the path template + * automatically become HTTP query parameters if there is no HTTP request body. + * For example: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http) = { + * get:"/v1/messages/{message_id}" + * }; + * } + * } + * message GetMessageRequest { + * message SubMessage { + * string subfield = 1; + * } + * string message_id = 1; // Mapped to URL path. + * int64 revision = 2; // Mapped to URL query parameter `revision`. + * SubMessage sub = 3; // Mapped to URL query parameter `sub.subfield`. + * } + * + * This enables a HTTP JSON to RPC mapping as below: + * + * - HTTP: `GET /v1/messages/123456?revision=2&sub.subfield=foo` + * - gRPC: `GetMessage(message_id: "123456" revision: 2 sub: + * SubMessage(subfield: "foo"))` + * + * Note that fields which are mapped to URL query parameters must have a + * primitive type or a repeated primitive type or a non-repeated message type. + * In the case of a repeated type, the parameter can be repeated in the URL + * as `...?param=A¶m=B`. In the case of a message type, each field of the + * message is mapped to a separate parameter, such as + * `...?foo.a=A&foo.b=B&foo.c=C`. + * + * For HTTP methods that allow a request body, the `body` field + * specifies the mapping. Consider a REST update method on the + * message resource collection: + * + * service Messaging { + * rpc UpdateMessage(UpdateMessageRequest) returns (Message) { + * option (google.api.http) = { + * patch: "/v1/messages/{message_id}" + * body: "message" + * }; + * } + * } + * message UpdateMessageRequest { + * string message_id = 1; // mapped to the URL + * Message message = 2; // mapped to the body + * } + * + * The following HTTP JSON to RPC mapping is enabled, where the + * representation of the JSON in the request body is determined by + * protos JSON encoding: + * + * - HTTP: `PATCH /v1/messages/123456 { "text": "Hi!" }` + * - gRPC: `UpdateMessage(message_id: "123456" message { text: "Hi!" })` + * + * The special name `*` can be used in the body mapping to define that + * every field not bound by the path template should be mapped to the + * request body. This enables the following alternative definition of + * the update method: + * + * service Messaging { + * rpc UpdateMessage(Message) returns (Message) { + * option (google.api.http) = { + * patch: "/v1/messages/{message_id}" + * body: "*" + * }; + * } + * } + * message Message { + * string message_id = 1; + * string text = 2; + * } + * + * The following HTTP JSON to RPC mapping is enabled: + * + * - HTTP: `PATCH /v1/messages/123456 { "text": "Hi!" }` + * - gRPC: `UpdateMessage(message_id: "123456" text: "Hi!")` + * + * Note that when using `*` in the body mapping, it is not possible to + * have HTTP parameters, as all fields not bound by the path end in + * the body. This makes this option more rarely used in practice when + * defining REST APIs. The common usage of `*` is in custom methods + * which don't use the URL at all for transferring data. + * + * It is possible to define multiple HTTP methods for one RPC by using + * the `additional_bindings` option. Example: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http) = { + * get: "/v1/messages/{message_id}" + * additional_bindings { + * get: "/v1/users/{user_id}/messages/{message_id}" + * } + * }; + * } + * } + * message GetMessageRequest { + * string message_id = 1; + * string user_id = 2; + * } + * + * This enables the following two alternative HTTP JSON to RPC mappings: + * + * - HTTP: `GET /v1/messages/123456` + * - gRPC: `GetMessage(message_id: "123456")` + * + * - HTTP: `GET /v1/users/me/messages/123456` + * - gRPC: `GetMessage(user_id: "me" message_id: "123456")` + * + * Rules for HTTP mapping + * + * 1. Leaf request fields (recursive expansion nested messages in the request + * message) are classified into three categories: + * - Fields referred by the path template. They are passed via the URL path. + * - Fields referred by the [HttpRule.body][google.api.HttpRule.body]. They + * are passed via the HTTP + * request body. + * - All other fields are passed via the URL query parameters, and the + * parameter name is the field path in the request message. A repeated + * field can be represented as multiple query parameters under the same + * name. + * 2. If [HttpRule.body][google.api.HttpRule.body] is "*", there is no URL + * query parameter, all fields + * are passed via URL path and HTTP request body. + * 3. If [HttpRule.body][google.api.HttpRule.body] is omitted, there is no HTTP + * request body, all + * fields are passed via URL path and URL query parameters. + * + * Path template syntax + * + * Template = "/" Segments [ Verb ] ; + * Segments = Segment { "/" Segment } ; + * Segment = "*" | "**" | LITERAL | Variable ; + * Variable = "{" FieldPath [ "=" Segments ] "}" ; + * FieldPath = IDENT { "." IDENT } ; + * Verb = ":" LITERAL ; + * + * The syntax `*` matches a single URL path segment. The syntax `**` matches + * zero or more URL path segments, which must be the last part of the URL path + * except the `Verb`. + * + * The syntax `Variable` matches part of the URL path as specified by its + * template. A variable template must not contain other variables. If a variable + * matches a single path segment, its template may be omitted, e.g. `{var}` + * is equivalent to `{var=*}`. + * + * The syntax `LITERAL` matches literal text in the URL path. If the `LITERAL` + * contains any reserved character, such characters should be percent-encoded + * before the matching. + * + * If a variable contains exactly one path segment, such as `"{var}"` or + * `"{var=*}"`, when such a variable is expanded into a URL path on the client + * side, all characters except `[-_.~0-9a-zA-Z]` are percent-encoded. The + * server side does the reverse decoding. Such variables show up in the + * [Discovery + * Document](https://developers.google.com/discovery/v1/reference/apis) as + * `{var}`. + * + * If a variable contains multiple path segments, such as `"{var=foo/*}"` + * or `"{var=**}"`, when such a variable is expanded into a URL path on the + * client side, all characters except `[-_.~/0-9a-zA-Z]` are percent-encoded. + * The server side does the reverse decoding, except "%2F" and "%2f" are left + * unchanged. Such variables show up in the + * [Discovery + * Document](https://developers.google.com/discovery/v1/reference/apis) as + * `{+var}`. + * + * Using gRPC API Service Configuration + * + * gRPC API Service Configuration (service config) is a configuration language + * for configuring a gRPC service to become a user-facing product. The + * service config is simply the YAML representation of the `google.api.Service` + * proto message. + * + * As an alternative to annotating your proto file, you can configure gRPC + * transcoding in your service config YAML files. You do this by specifying a + * `HttpRule` that maps the gRPC method to a REST endpoint, achieving the same + * effect as the proto annotation. This can be particularly useful if you + * have a proto that is reused in multiple services. Note that any transcoding + * specified in the service config will override any matching transcoding + * configuration in the proto. + * + * The following example selects a gRPC method and applies an `HttpRule` to it: + * + * http: + * rules: + * - selector: example.v1.Messaging.GetMessage + * get: /v1/messages/{message_id}/{sub.subfield} + * + * Special notes + * + * When gRPC Transcoding is used to map a gRPC to JSON REST endpoints, the + * proto to JSON conversion must follow the [proto3 + * specification](https://developers.google.com/protocol-buffers/docs/proto3#json). + * + * While the single segment variable follows the semantics of + * [RFC 6570](https://tools.ietf.org/html/rfc6570) Section 3.2.2 Simple String + * Expansion, the multi segment variable **does not** follow RFC 6570 Section + * 3.2.3 Reserved Expansion. The reason is that the Reserved Expansion + * does not expand special characters like `?` and `#`, which would lead + * to invalid URLs. As the result, gRPC Transcoding uses a custom encoding + * for multi segment variables. + * + * The path variables **must not** refer to any repeated or mapped field, + * because client libraries are not capable of handling such variable expansion. + * + * The path variables **must not** capture the leading "/" character. The reason + * is that the most common use case "{var}" does not capture the leading "/" + * character. For consistency, all path variables must share the same behavior. + * + * Repeated message fields must not be mapped to URL query parameters, because + * no client library can support such complicated mapping. + * + * If an API needs to use a JSON array for request or response body, it can map + * the request or response body to a repeated field. However, some gRPC + * Transcoding implementations may not support this feature. + */ +export interface HttpRule { + /** + * Selects a method to which this rule applies. + * + * Refer to [selector][google.api.DocumentationRule.selector] for syntax + * details. + */ + selector: string; + /** + * Maps to HTTP GET. Used for listing and getting information about + * resources. + */ + get?: + | string + | undefined; + /** Maps to HTTP PUT. Used for replacing a resource. */ + put?: + | string + | undefined; + /** Maps to HTTP POST. Used for creating a resource or performing an action. */ + post?: + | string + | undefined; + /** Maps to HTTP DELETE. Used for deleting a resource. */ + delete?: + | string + | undefined; + /** Maps to HTTP PATCH. Used for updating a resource. */ + patch?: + | string + | undefined; + /** + * The custom pattern is used for specifying an HTTP method that is not + * included in the `pattern` field, such as HEAD, or "*" to leave the + * HTTP method unspecified for this rule. The wild-card rule is useful + * for services that provide content to Web (HTML) clients. + */ + custom?: + | CustomHttpPattern + | undefined; + /** + * The name of the request field whose value is mapped to the HTTP request + * body, or `*` for mapping all request fields not captured by the path + * pattern to the HTTP body, or omitted for not having any HTTP request body. + * + * NOTE: the referred field must be present at the top-level of the request + * message type. + */ + body: string; + /** + * Optional. The name of the response field whose value is mapped to the HTTP + * response body. When omitted, the entire response message will be used + * as the HTTP response body. + * + * NOTE: The referred field must be present at the top-level of the response + * message type. + */ + responseBody: string; + /** + * Additional HTTP bindings for the selector. Nested bindings must + * not contain an `additional_bindings` field themselves (that is, + * the nesting may only be one level deep). + */ + additionalBindings: HttpRule[]; +} + +/** A custom pattern is used for defining custom HTTP verb. */ +export interface CustomHttpPattern { + /** The name of this custom HTTP verb. */ + kind: string; + /** The path matched by this custom verb. */ + path: string; +} + +function createBaseHttp(): Http { + return { rules: [], fullyDecodeReservedExpansion: false }; +} + +export const Http: MessageFns<Http> = { + encode(message: Http, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + for (const v of message.rules) { + HttpRule.encode(v!, writer.uint32(10).fork()).join(); + } + if (message.fullyDecodeReservedExpansion !== false) { + writer.uint32(16).bool(message.fullyDecodeReservedExpansion); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): Http { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHttp(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.rules.push(HttpRule.decode(reader, reader.uint32())); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.fullyDecodeReservedExpansion = reader.bool(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): Http { + return { + rules: globalThis.Array.isArray(object?.rules) ? object.rules.map((e: any) => HttpRule.fromJSON(e)) : [], + fullyDecodeReservedExpansion: isSet(object.fullyDecodeReservedExpansion) + ? globalThis.Boolean(object.fullyDecodeReservedExpansion) + : isSet(object.fully_decode_reserved_expansion) + ? globalThis.Boolean(object.fully_decode_reserved_expansion) + : false, + }; + }, + + toJSON(message: Http): unknown { + const obj: any = {}; + if (message.rules?.length) { + obj.rules = message.rules.map((e) => HttpRule.toJSON(e)); + } + if (message.fullyDecodeReservedExpansion !== false) { + obj.fullyDecodeReservedExpansion = message.fullyDecodeReservedExpansion; + } + return obj; + }, + + create<I extends Exact<DeepPartial<Http>, I>>(base?: I): Http { + return Http.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<Http>, I>>(object: I): Http { + const message = createBaseHttp(); + message.rules = object.rules?.map((e) => HttpRule.fromPartial(e)) || []; + message.fullyDecodeReservedExpansion = object.fullyDecodeReservedExpansion ?? false; + return message; + }, +}; + +function createBaseHttpRule(): HttpRule { + return { + selector: "", + get: undefined, + put: undefined, + post: undefined, + delete: undefined, + patch: undefined, + custom: undefined, + body: "", + responseBody: "", + additionalBindings: [], + }; +} + +export const HttpRule: MessageFns<HttpRule> = { + encode(message: HttpRule, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.selector !== "") { + writer.uint32(10).string(message.selector); + } + if (message.get !== undefined) { + writer.uint32(18).string(message.get); + } + if (message.put !== undefined) { + writer.uint32(26).string(message.put); + } + if (message.post !== undefined) { + writer.uint32(34).string(message.post); + } + if (message.delete !== undefined) { + writer.uint32(42).string(message.delete); + } + if (message.patch !== undefined) { + writer.uint32(50).string(message.patch); + } + if (message.custom !== undefined) { + CustomHttpPattern.encode(message.custom, writer.uint32(66).fork()).join(); + } + if (message.body !== "") { + writer.uint32(58).string(message.body); + } + if (message.responseBody !== "") { + writer.uint32(98).string(message.responseBody); + } + for (const v of message.additionalBindings) { + HttpRule.encode(v!, writer.uint32(90).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): HttpRule { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHttpRule(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.selector = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.get = reader.string(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.put = reader.string(); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + + message.post = reader.string(); + continue; + } + case 5: { + if (tag !== 42) { + break; + } + + message.delete = reader.string(); + continue; + } + case 6: { + if (tag !== 50) { + break; + } + + message.patch = reader.string(); + continue; + } + case 8: { + if (tag !== 66) { + break; + } + + message.custom = CustomHttpPattern.decode(reader, reader.uint32()); + continue; + } + case 7: { + if (tag !== 58) { + break; + } + + message.body = reader.string(); + continue; + } + case 12: { + if (tag !== 98) { + break; + } + + message.responseBody = reader.string(); + continue; + } + case 11: { + if (tag !== 90) { + break; + } + + message.additionalBindings.push(HttpRule.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): HttpRule { + return { + selector: isSet(object.selector) ? globalThis.String(object.selector) : "", + get: isSet(object.get) ? globalThis.String(object.get) : undefined, + put: isSet(object.put) ? globalThis.String(object.put) : undefined, + post: isSet(object.post) ? globalThis.String(object.post) : undefined, + delete: isSet(object.delete) ? globalThis.String(object.delete) : undefined, + patch: isSet(object.patch) ? globalThis.String(object.patch) : undefined, + custom: isSet(object.custom) ? CustomHttpPattern.fromJSON(object.custom) : undefined, + body: isSet(object.body) ? globalThis.String(object.body) : "", + responseBody: isSet(object.responseBody) + ? globalThis.String(object.responseBody) + : isSet(object.response_body) + ? globalThis.String(object.response_body) + : "", + additionalBindings: globalThis.Array.isArray(object?.additionalBindings) + ? object.additionalBindings.map((e: any) => HttpRule.fromJSON(e)) + : globalThis.Array.isArray(object?.additional_bindings) + ? object.additional_bindings.map((e: any) => HttpRule.fromJSON(e)) + : [], + }; + }, + + toJSON(message: HttpRule): unknown { + const obj: any = {}; + if (message.selector !== "") { + obj.selector = message.selector; + } + if (message.get !== undefined) { + obj.get = message.get; + } + if (message.put !== undefined) { + obj.put = message.put; + } + if (message.post !== undefined) { + obj.post = message.post; + } + if (message.delete !== undefined) { + obj.delete = message.delete; + } + if (message.patch !== undefined) { + obj.patch = message.patch; + } + if (message.custom !== undefined) { + obj.custom = CustomHttpPattern.toJSON(message.custom); + } + if (message.body !== "") { + obj.body = message.body; + } + if (message.responseBody !== "") { + obj.responseBody = message.responseBody; + } + if (message.additionalBindings?.length) { + obj.additionalBindings = message.additionalBindings.map((e) => HttpRule.toJSON(e)); + } + return obj; + }, + + create<I extends Exact<DeepPartial<HttpRule>, I>>(base?: I): HttpRule { + return HttpRule.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<HttpRule>, I>>(object: I): HttpRule { + const message = createBaseHttpRule(); + message.selector = object.selector ?? ""; + message.get = object.get ?? undefined; + message.put = object.put ?? undefined; + message.post = object.post ?? undefined; + message.delete = object.delete ?? undefined; + message.patch = object.patch ?? undefined; + message.custom = (object.custom !== undefined && object.custom !== null) + ? CustomHttpPattern.fromPartial(object.custom) + : undefined; + message.body = object.body ?? ""; + message.responseBody = object.responseBody ?? ""; + message.additionalBindings = object.additionalBindings?.map((e) => HttpRule.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseCustomHttpPattern(): CustomHttpPattern { + return { kind: "", path: "" }; +} + +export const CustomHttpPattern: MessageFns<CustomHttpPattern> = { + encode(message: CustomHttpPattern, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.kind !== "") { + writer.uint32(10).string(message.kind); + } + if (message.path !== "") { + writer.uint32(18).string(message.path); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): CustomHttpPattern { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCustomHttpPattern(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.kind = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.path = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): CustomHttpPattern { + return { + kind: isSet(object.kind) ? globalThis.String(object.kind) : "", + path: isSet(object.path) ? globalThis.String(object.path) : "", + }; + }, + + toJSON(message: CustomHttpPattern): unknown { + const obj: any = {}; + if (message.kind !== "") { + obj.kind = message.kind; + } + if (message.path !== "") { + obj.path = message.path; + } + return obj; + }, + + create<I extends Exact<DeepPartial<CustomHttpPattern>, I>>(base?: I): CustomHttpPattern { + return CustomHttpPattern.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<CustomHttpPattern>, I>>(object: I): CustomHttpPattern { + const message = createBaseCustomHttpPattern(); + message.kind = object.kind ?? ""; + message.path = object.path ?? ""; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial<T> = T extends Builtin ? T + : T extends globalThis.Array<infer U> ? globalThis.Array<DeepPartial<U>> + : T extends ReadonlyArray<infer U> ? ReadonlyArray<DeepPartial<U>> + : T extends {} ? { [K in keyof T]?: DeepPartial<T[K]> } + : Partial<T>; + +type KeysOfUnion<T> = T extends T ? keyof T : never; +export type Exact<P, I extends P> = P extends Builtin ? P + : P & { [K in keyof P]: Exact<P[K], I[K]> } & { [K in Exclude<keyof I, KeysOfUnion<P>>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} + +export interface MessageFns<T> { + encode(message: T, writer?: BinaryWriter): BinaryWriter; + decode(input: BinaryReader | Uint8Array, length?: number): T; + fromJSON(object: any): T; + toJSON(message: T): unknown; + create<I extends Exact<DeepPartial<T>, I>>(base?: I): T; + fromPartial<I extends Exact<DeepPartial<T>, I>>(object: I): T; +} diff --git a/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/ignite.planet.mars/types/google/protobuf/descriptor.ts b/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/ignite.planet.mars/types/google/protobuf/descriptor.ts new file mode 100644 index 0000000..5e7370b --- /dev/null +++ b/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/ignite.planet.mars/types/google/protobuf/descriptor.ts @@ -0,0 +1,7277 @@ +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.11.2 +// protoc unknown +// source: google/protobuf/descriptor.proto + +/* eslint-disable */ +import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire"; + +export const protobufPackage = "google.protobuf"; + +/** The full set of known editions. */ +export enum Edition { + /** EDITION_UNKNOWN - A placeholder for an unknown edition value. */ + EDITION_UNKNOWN = 0, + /** + * EDITION_LEGACY - A placeholder edition for specifying default behaviors *before* a feature + * was first introduced. This is effectively an "infinite past". + */ + EDITION_LEGACY = 900, + /** + * EDITION_PROTO2 - Legacy syntax "editions". These pre-date editions, but behave much like + * distinct editions. These can't be used to specify the edition of proto + * files, but feature definitions must supply proto2/proto3 defaults for + * backwards compatibility. + */ + EDITION_PROTO2 = 998, + EDITION_PROTO3 = 999, + /** + * EDITION_2023 - Editions that have been released. The specific values are arbitrary and + * should not be depended on, but they will always be time-ordered for easy + * comparison. + */ + EDITION_2023 = 1000, + EDITION_2024 = 1001, + /** + * EDITION_1_TEST_ONLY - Placeholder editions for testing feature resolution. These should not be + * used or relied on outside of tests. + */ + EDITION_1_TEST_ONLY = 1, + EDITION_2_TEST_ONLY = 2, + EDITION_99997_TEST_ONLY = 99997, + EDITION_99998_TEST_ONLY = 99998, + EDITION_99999_TEST_ONLY = 99999, + /** + * EDITION_MAX - Placeholder for specifying unbounded edition support. This should only + * ever be used by plugins that can expect to never require any changes to + * support a new edition. + */ + EDITION_MAX = 2147483647, + UNRECOGNIZED = -1, +} + +export function editionFromJSON(object: any): Edition { + switch (object) { + case 0: + case "EDITION_UNKNOWN": + return Edition.EDITION_UNKNOWN; + case 900: + case "EDITION_LEGACY": + return Edition.EDITION_LEGACY; + case 998: + case "EDITION_PROTO2": + return Edition.EDITION_PROTO2; + case 999: + case "EDITION_PROTO3": + return Edition.EDITION_PROTO3; + case 1000: + case "EDITION_2023": + return Edition.EDITION_2023; + case 1001: + case "EDITION_2024": + return Edition.EDITION_2024; + case 1: + case "EDITION_1_TEST_ONLY": + return Edition.EDITION_1_TEST_ONLY; + case 2: + case "EDITION_2_TEST_ONLY": + return Edition.EDITION_2_TEST_ONLY; + case 99997: + case "EDITION_99997_TEST_ONLY": + return Edition.EDITION_99997_TEST_ONLY; + case 99998: + case "EDITION_99998_TEST_ONLY": + return Edition.EDITION_99998_TEST_ONLY; + case 99999: + case "EDITION_99999_TEST_ONLY": + return Edition.EDITION_99999_TEST_ONLY; + case 2147483647: + case "EDITION_MAX": + return Edition.EDITION_MAX; + case -1: + case "UNRECOGNIZED": + default: + return Edition.UNRECOGNIZED; + } +} + +export function editionToJSON(object: Edition): string { + switch (object) { + case Edition.EDITION_UNKNOWN: + return "EDITION_UNKNOWN"; + case Edition.EDITION_LEGACY: + return "EDITION_LEGACY"; + case Edition.EDITION_PROTO2: + return "EDITION_PROTO2"; + case Edition.EDITION_PROTO3: + return "EDITION_PROTO3"; + case Edition.EDITION_2023: + return "EDITION_2023"; + case Edition.EDITION_2024: + return "EDITION_2024"; + case Edition.EDITION_1_TEST_ONLY: + return "EDITION_1_TEST_ONLY"; + case Edition.EDITION_2_TEST_ONLY: + return "EDITION_2_TEST_ONLY"; + case Edition.EDITION_99997_TEST_ONLY: + return "EDITION_99997_TEST_ONLY"; + case Edition.EDITION_99998_TEST_ONLY: + return "EDITION_99998_TEST_ONLY"; + case Edition.EDITION_99999_TEST_ONLY: + return "EDITION_99999_TEST_ONLY"; + case Edition.EDITION_MAX: + return "EDITION_MAX"; + case Edition.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** + * Describes the 'visibility' of a symbol with respect to the proto import + * system. Symbols can only be imported when the visibility rules do not prevent + * it (ex: local symbols cannot be imported). Visibility modifiers can only set + * on `message` and `enum` as they are the only types available to be referenced + * from other files. + */ +export enum SymbolVisibility { + VISIBILITY_UNSET = 0, + VISIBILITY_LOCAL = 1, + VISIBILITY_EXPORT = 2, + UNRECOGNIZED = -1, +} + +export function symbolVisibilityFromJSON(object: any): SymbolVisibility { + switch (object) { + case 0: + case "VISIBILITY_UNSET": + return SymbolVisibility.VISIBILITY_UNSET; + case 1: + case "VISIBILITY_LOCAL": + return SymbolVisibility.VISIBILITY_LOCAL; + case 2: + case "VISIBILITY_EXPORT": + return SymbolVisibility.VISIBILITY_EXPORT; + case -1: + case "UNRECOGNIZED": + default: + return SymbolVisibility.UNRECOGNIZED; + } +} + +export function symbolVisibilityToJSON(object: SymbolVisibility): string { + switch (object) { + case SymbolVisibility.VISIBILITY_UNSET: + return "VISIBILITY_UNSET"; + case SymbolVisibility.VISIBILITY_LOCAL: + return "VISIBILITY_LOCAL"; + case SymbolVisibility.VISIBILITY_EXPORT: + return "VISIBILITY_EXPORT"; + case SymbolVisibility.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** + * The protocol compiler can output a FileDescriptorSet containing the .proto + * files it parses. + */ +export interface FileDescriptorSet { + file: FileDescriptorProto[]; +} + +/** Describes a complete .proto file. */ +export interface FileDescriptorProto { + /** file name, relative to root of source tree */ + name?: + | string + | undefined; + /** e.g. "foo", "foo.bar", etc. */ + package?: + | string + | undefined; + /** Names of files imported by this file. */ + dependency: string[]; + /** Indexes of the public imported files in the dependency list above. */ + publicDependency: number[]; + /** + * Indexes of the weak imported files in the dependency list. + * For Google-internal migration only. Do not use. + */ + weakDependency: number[]; + /** + * Names of files imported by this file purely for the purpose of providing + * option extensions. These are excluded from the dependency list above. + */ + optionDependency: string[]; + /** All top-level definitions in this file. */ + messageType: DescriptorProto[]; + enumType: EnumDescriptorProto[]; + service: ServiceDescriptorProto[]; + extension: FieldDescriptorProto[]; + options?: + | FileOptions + | undefined; + /** + * This field contains optional information about the original source code. + * You may safely remove this entire field without harming runtime + * functionality of the descriptors -- the information is needed only by + * development tools. + */ + sourceCodeInfo?: + | SourceCodeInfo + | undefined; + /** + * The syntax of the proto file. + * The supported values are "proto2", "proto3", and "editions". + * + * If `edition` is present, this value must be "editions". + * WARNING: This field should only be used by protobuf plugins or special + * cases like the proto compiler. Other uses are discouraged and + * developers should rely on the protoreflect APIs for their client language. + */ + syntax?: + | string + | undefined; + /** + * The edition of the proto file. + * WARNING: This field should only be used by protobuf plugins or special + * cases like the proto compiler. Other uses are discouraged and + * developers should rely on the protoreflect APIs for their client language. + */ + edition?: Edition | undefined; +} + +/** Describes a message type. */ +export interface DescriptorProto { + name?: string | undefined; + field: FieldDescriptorProto[]; + extension: FieldDescriptorProto[]; + nestedType: DescriptorProto[]; + enumType: EnumDescriptorProto[]; + extensionRange: DescriptorProto_ExtensionRange[]; + oneofDecl: OneofDescriptorProto[]; + options?: MessageOptions | undefined; + reservedRange: DescriptorProto_ReservedRange[]; + /** + * Reserved field names, which may not be used by fields in the same message. + * A given name may only be reserved once. + */ + reservedName: string[]; + /** Support for `export` and `local` keywords on enums. */ + visibility?: SymbolVisibility | undefined; +} + +export interface DescriptorProto_ExtensionRange { + /** Inclusive. */ + start?: + | number + | undefined; + /** Exclusive. */ + end?: number | undefined; + options?: ExtensionRangeOptions | undefined; +} + +/** + * Range of reserved tag numbers. Reserved tag numbers may not be used by + * fields or extension ranges in the same message. Reserved ranges may + * not overlap. + */ +export interface DescriptorProto_ReservedRange { + /** Inclusive. */ + start?: + | number + | undefined; + /** Exclusive. */ + end?: number | undefined; +} + +export interface ExtensionRangeOptions { + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; + /** + * For external users: DO NOT USE. We are in the process of open sourcing + * extension declaration and executing internal cleanups before it can be + * used externally. + */ + declaration: ExtensionRangeOptions_Declaration[]; + /** Any features defined in the specific edition. */ + features?: + | FeatureSet + | undefined; + /** + * The verification state of the range. + * TODO: flip the default to DECLARATION once all empty ranges + * are marked as UNVERIFIED. + */ + verification?: ExtensionRangeOptions_VerificationState | undefined; +} + +/** The verification state of the extension range. */ +export enum ExtensionRangeOptions_VerificationState { + /** DECLARATION - All the extensions of the range must be declared. */ + DECLARATION = 0, + UNVERIFIED = 1, + UNRECOGNIZED = -1, +} + +export function extensionRangeOptions_VerificationStateFromJSON(object: any): ExtensionRangeOptions_VerificationState { + switch (object) { + case 0: + case "DECLARATION": + return ExtensionRangeOptions_VerificationState.DECLARATION; + case 1: + case "UNVERIFIED": + return ExtensionRangeOptions_VerificationState.UNVERIFIED; + case -1: + case "UNRECOGNIZED": + default: + return ExtensionRangeOptions_VerificationState.UNRECOGNIZED; + } +} + +export function extensionRangeOptions_VerificationStateToJSON(object: ExtensionRangeOptions_VerificationState): string { + switch (object) { + case ExtensionRangeOptions_VerificationState.DECLARATION: + return "DECLARATION"; + case ExtensionRangeOptions_VerificationState.UNVERIFIED: + return "UNVERIFIED"; + case ExtensionRangeOptions_VerificationState.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export interface ExtensionRangeOptions_Declaration { + /** The extension number declared within the extension range. */ + number?: + | number + | undefined; + /** + * The fully-qualified name of the extension field. There must be a leading + * dot in front of the full name. + */ + fullName?: + | string + | undefined; + /** + * The fully-qualified type name of the extension field. Unlike + * Metadata.type, Declaration.type must have a leading dot for messages + * and enums. + */ + type?: + | string + | undefined; + /** + * If true, indicates that the number is reserved in the extension range, + * and any extension field with the number will fail to compile. Set this + * when a declared extension field is deleted. + */ + reserved?: + | boolean + | undefined; + /** + * If true, indicates that the extension must be defined as repeated. + * Otherwise the extension must be defined as optional. + */ + repeated?: boolean | undefined; +} + +/** Describes a field within a message. */ +export interface FieldDescriptorProto { + name?: string | undefined; + number?: number | undefined; + label?: + | FieldDescriptorProto_Label + | undefined; + /** + * If type_name is set, this need not be set. If both this and type_name + * are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. + */ + type?: + | FieldDescriptorProto_Type + | undefined; + /** + * For message and enum types, this is the name of the type. If the name + * starts with a '.', it is fully-qualified. Otherwise, C++-like scoping + * rules are used to find the type (i.e. first the nested types within this + * message are searched, then within the parent, on up to the root + * namespace). + */ + typeName?: + | string + | undefined; + /** + * For extensions, this is the name of the type being extended. It is + * resolved in the same manner as type_name. + */ + extendee?: + | string + | undefined; + /** + * For numeric types, contains the original text representation of the value. + * For booleans, "true" or "false". + * For strings, contains the default text contents (not escaped in any way). + * For bytes, contains the C escaped value. All bytes >= 128 are escaped. + */ + defaultValue?: + | string + | undefined; + /** + * If set, gives the index of a oneof in the containing type's oneof_decl + * list. This field is a member of that oneof. + */ + oneofIndex?: + | number + | undefined; + /** + * JSON name of this field. The value is set by protocol compiler. If the + * user has set a "json_name" option on this field, that option's value + * will be used. Otherwise, it's deduced from the field's name by converting + * it to camelCase. + */ + jsonName?: string | undefined; + options?: + | FieldOptions + | undefined; + /** + * If true, this is a proto3 "optional". When a proto3 field is optional, it + * tracks presence regardless of field type. + * + * When proto3_optional is true, this field must belong to a oneof to signal + * to old proto3 clients that presence is tracked for this field. This oneof + * is known as a "synthetic" oneof, and this field must be its sole member + * (each proto3 optional field gets its own synthetic oneof). Synthetic oneofs + * exist in the descriptor only, and do not generate any API. Synthetic oneofs + * must be ordered after all "real" oneofs. + * + * For message fields, proto3_optional doesn't create any semantic change, + * since non-repeated message fields always track presence. However it still + * indicates the semantic detail of whether the user wrote "optional" or not. + * This can be useful for round-tripping the .proto file. For consistency we + * give message fields a synthetic oneof also, even though it is not required + * to track presence. This is especially important because the parser can't + * tell if a field is a message or an enum, so it must always create a + * synthetic oneof. + * + * Proto2 optional fields do not set this flag, because they already indicate + * optional with `LABEL_OPTIONAL`. + */ + proto3Optional?: boolean | undefined; +} + +export enum FieldDescriptorProto_Type { + /** + * TYPE_DOUBLE - 0 is reserved for errors. + * Order is weird for historical reasons. + */ + TYPE_DOUBLE = 1, + TYPE_FLOAT = 2, + /** + * TYPE_INT64 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if + * negative values are likely. + */ + TYPE_INT64 = 3, + TYPE_UINT64 = 4, + /** + * TYPE_INT32 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if + * negative values are likely. + */ + TYPE_INT32 = 5, + TYPE_FIXED64 = 6, + TYPE_FIXED32 = 7, + TYPE_BOOL = 8, + TYPE_STRING = 9, + /** + * TYPE_GROUP - Tag-delimited aggregate. + * Group type is deprecated and not supported after google.protobuf. However, Proto3 + * implementations should still be able to parse the group wire format and + * treat group fields as unknown fields. In Editions, the group wire format + * can be enabled via the `message_encoding` feature. + */ + TYPE_GROUP = 10, + /** TYPE_MESSAGE - Length-delimited aggregate. */ + TYPE_MESSAGE = 11, + /** TYPE_BYTES - New in version 2. */ + TYPE_BYTES = 12, + TYPE_UINT32 = 13, + TYPE_ENUM = 14, + TYPE_SFIXED32 = 15, + TYPE_SFIXED64 = 16, + /** TYPE_SINT32 - Uses ZigZag encoding. */ + TYPE_SINT32 = 17, + /** TYPE_SINT64 - Uses ZigZag encoding. */ + TYPE_SINT64 = 18, + UNRECOGNIZED = -1, +} + +export function fieldDescriptorProto_TypeFromJSON(object: any): FieldDescriptorProto_Type { + switch (object) { + case 1: + case "TYPE_DOUBLE": + return FieldDescriptorProto_Type.TYPE_DOUBLE; + case 2: + case "TYPE_FLOAT": + return FieldDescriptorProto_Type.TYPE_FLOAT; + case 3: + case "TYPE_INT64": + return FieldDescriptorProto_Type.TYPE_INT64; + case 4: + case "TYPE_UINT64": + return FieldDescriptorProto_Type.TYPE_UINT64; + case 5: + case "TYPE_INT32": + return FieldDescriptorProto_Type.TYPE_INT32; + case 6: + case "TYPE_FIXED64": + return FieldDescriptorProto_Type.TYPE_FIXED64; + case 7: + case "TYPE_FIXED32": + return FieldDescriptorProto_Type.TYPE_FIXED32; + case 8: + case "TYPE_BOOL": + return FieldDescriptorProto_Type.TYPE_BOOL; + case 9: + case "TYPE_STRING": + return FieldDescriptorProto_Type.TYPE_STRING; + case 10: + case "TYPE_GROUP": + return FieldDescriptorProto_Type.TYPE_GROUP; + case 11: + case "TYPE_MESSAGE": + return FieldDescriptorProto_Type.TYPE_MESSAGE; + case 12: + case "TYPE_BYTES": + return FieldDescriptorProto_Type.TYPE_BYTES; + case 13: + case "TYPE_UINT32": + return FieldDescriptorProto_Type.TYPE_UINT32; + case 14: + case "TYPE_ENUM": + return FieldDescriptorProto_Type.TYPE_ENUM; + case 15: + case "TYPE_SFIXED32": + return FieldDescriptorProto_Type.TYPE_SFIXED32; + case 16: + case "TYPE_SFIXED64": + return FieldDescriptorProto_Type.TYPE_SFIXED64; + case 17: + case "TYPE_SINT32": + return FieldDescriptorProto_Type.TYPE_SINT32; + case 18: + case "TYPE_SINT64": + return FieldDescriptorProto_Type.TYPE_SINT64; + case -1: + case "UNRECOGNIZED": + default: + return FieldDescriptorProto_Type.UNRECOGNIZED; + } +} + +export function fieldDescriptorProto_TypeToJSON(object: FieldDescriptorProto_Type): string { + switch (object) { + case FieldDescriptorProto_Type.TYPE_DOUBLE: + return "TYPE_DOUBLE"; + case FieldDescriptorProto_Type.TYPE_FLOAT: + return "TYPE_FLOAT"; + case FieldDescriptorProto_Type.TYPE_INT64: + return "TYPE_INT64"; + case FieldDescriptorProto_Type.TYPE_UINT64: + return "TYPE_UINT64"; + case FieldDescriptorProto_Type.TYPE_INT32: + return "TYPE_INT32"; + case FieldDescriptorProto_Type.TYPE_FIXED64: + return "TYPE_FIXED64"; + case FieldDescriptorProto_Type.TYPE_FIXED32: + return "TYPE_FIXED32"; + case FieldDescriptorProto_Type.TYPE_BOOL: + return "TYPE_BOOL"; + case FieldDescriptorProto_Type.TYPE_STRING: + return "TYPE_STRING"; + case FieldDescriptorProto_Type.TYPE_GROUP: + return "TYPE_GROUP"; + case FieldDescriptorProto_Type.TYPE_MESSAGE: + return "TYPE_MESSAGE"; + case FieldDescriptorProto_Type.TYPE_BYTES: + return "TYPE_BYTES"; + case FieldDescriptorProto_Type.TYPE_UINT32: + return "TYPE_UINT32"; + case FieldDescriptorProto_Type.TYPE_ENUM: + return "TYPE_ENUM"; + case FieldDescriptorProto_Type.TYPE_SFIXED32: + return "TYPE_SFIXED32"; + case FieldDescriptorProto_Type.TYPE_SFIXED64: + return "TYPE_SFIXED64"; + case FieldDescriptorProto_Type.TYPE_SINT32: + return "TYPE_SINT32"; + case FieldDescriptorProto_Type.TYPE_SINT64: + return "TYPE_SINT64"; + case FieldDescriptorProto_Type.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FieldDescriptorProto_Label { + /** LABEL_OPTIONAL - 0 is reserved for errors */ + LABEL_OPTIONAL = 1, + LABEL_REPEATED = 3, + /** + * LABEL_REQUIRED - The required label is only allowed in google.protobuf. In proto3 and Editions + * it's explicitly prohibited. In Editions, the `field_presence` feature + * can be used to get this behavior. + */ + LABEL_REQUIRED = 2, + UNRECOGNIZED = -1, +} + +export function fieldDescriptorProto_LabelFromJSON(object: any): FieldDescriptorProto_Label { + switch (object) { + case 1: + case "LABEL_OPTIONAL": + return FieldDescriptorProto_Label.LABEL_OPTIONAL; + case 3: + case "LABEL_REPEATED": + return FieldDescriptorProto_Label.LABEL_REPEATED; + case 2: + case "LABEL_REQUIRED": + return FieldDescriptorProto_Label.LABEL_REQUIRED; + case -1: + case "UNRECOGNIZED": + default: + return FieldDescriptorProto_Label.UNRECOGNIZED; + } +} + +export function fieldDescriptorProto_LabelToJSON(object: FieldDescriptorProto_Label): string { + switch (object) { + case FieldDescriptorProto_Label.LABEL_OPTIONAL: + return "LABEL_OPTIONAL"; + case FieldDescriptorProto_Label.LABEL_REPEATED: + return "LABEL_REPEATED"; + case FieldDescriptorProto_Label.LABEL_REQUIRED: + return "LABEL_REQUIRED"; + case FieldDescriptorProto_Label.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** Describes a oneof. */ +export interface OneofDescriptorProto { + name?: string | undefined; + options?: OneofOptions | undefined; +} + +/** Describes an enum type. */ +export interface EnumDescriptorProto { + name?: string | undefined; + value: EnumValueDescriptorProto[]; + options?: + | EnumOptions + | undefined; + /** + * Range of reserved numeric values. Reserved numeric values may not be used + * by enum values in the same enum declaration. Reserved ranges may not + * overlap. + */ + reservedRange: EnumDescriptorProto_EnumReservedRange[]; + /** + * Reserved enum value names, which may not be reused. A given name may only + * be reserved once. + */ + reservedName: string[]; + /** Support for `export` and `local` keywords on enums. */ + visibility?: SymbolVisibility | undefined; +} + +/** + * Range of reserved numeric values. Reserved values may not be used by + * entries in the same enum. Reserved ranges may not overlap. + * + * Note that this is distinct from DescriptorProto.ReservedRange in that it + * is inclusive such that it can appropriately represent the entire int32 + * domain. + */ +export interface EnumDescriptorProto_EnumReservedRange { + /** Inclusive. */ + start?: + | number + | undefined; + /** Inclusive. */ + end?: number | undefined; +} + +/** Describes a value within an enum. */ +export interface EnumValueDescriptorProto { + name?: string | undefined; + number?: number | undefined; + options?: EnumValueOptions | undefined; +} + +/** Describes a service. */ +export interface ServiceDescriptorProto { + name?: string | undefined; + method: MethodDescriptorProto[]; + options?: ServiceOptions | undefined; +} + +/** Describes a method of a service. */ +export interface MethodDescriptorProto { + name?: + | string + | undefined; + /** + * Input and output type names. These are resolved in the same way as + * FieldDescriptorProto.type_name, but must refer to a message type. + */ + inputType?: string | undefined; + outputType?: string | undefined; + options?: + | MethodOptions + | undefined; + /** Identifies if client streams multiple client messages */ + clientStreaming?: + | boolean + | undefined; + /** Identifies if server streams multiple server messages */ + serverStreaming?: boolean | undefined; +} + +export interface FileOptions { + /** + * Sets the Java package where classes generated from this .proto will be + * placed. By default, the proto package is used, but this is often + * inappropriate because proto packages do not normally start with backwards + * domain names. + */ + javaPackage?: + | string + | undefined; + /** + * Controls the name of the wrapper Java class generated for the .proto file. + * That class will always contain the .proto file's getDescriptor() method as + * well as any top-level extensions defined in the .proto file. + * If java_multiple_files is disabled, then all the other classes from the + * .proto file will be nested inside the single wrapper outer class. + */ + javaOuterClassname?: + | string + | undefined; + /** + * If enabled, then the Java code generator will generate a separate .java + * file for each top-level message, enum, and service defined in the .proto + * file. Thus, these types will *not* be nested inside the wrapper class + * named by java_outer_classname. However, the wrapper class will still be + * generated to contain the file's getDescriptor() method as well as any + * top-level extensions defined in the file. + */ + javaMultipleFiles?: + | boolean + | undefined; + /** + * This option does nothing. + * + * @deprecated + */ + javaGenerateEqualsAndHash?: + | boolean + | undefined; + /** + * A proto2 file can set this to true to opt in to UTF-8 checking for Java, + * which will throw an exception if invalid UTF-8 is parsed from the wire or + * assigned to a string field. + * + * TODO: clarify exactly what kinds of field types this option + * applies to, and update these docs accordingly. + * + * Proto3 files already perform these checks. Setting the option explicitly to + * false has no effect: it cannot be used to opt proto3 files out of UTF-8 + * checks. + */ + javaStringCheckUtf8?: boolean | undefined; + optimizeFor?: + | FileOptions_OptimizeMode + | undefined; + /** + * Sets the Go package where structs generated from this .proto will be + * placed. If omitted, the Go package will be derived from the following: + * - The basename of the package import path, if provided. + * - Otherwise, the package statement in the .proto file, if present. + * - Otherwise, the basename of the .proto file, without extension. + */ + goPackage?: + | string + | undefined; + /** + * Should generic services be generated in each language? "Generic" services + * are not specific to any particular RPC system. They are generated by the + * main code generators in each language (without additional plugins). + * Generic services were the only kind of service generation supported by + * early versions of google.protobuf. + * + * Generic services are now considered deprecated in favor of using plugins + * that generate code specific to your particular RPC system. Therefore, + * these default to false. Old code which depends on generic services should + * explicitly set them to true. + */ + ccGenericServices?: boolean | undefined; + javaGenericServices?: boolean | undefined; + pyGenericServices?: + | boolean + | undefined; + /** + * Is this file deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for everything in the file, or it will be completely ignored; in the very + * least, this is a formalization for deprecating files. + */ + deprecated?: + | boolean + | undefined; + /** + * Enables the use of arenas for the proto messages in this file. This applies + * only to generated classes for C++. + */ + ccEnableArenas?: + | boolean + | undefined; + /** + * Sets the objective c class prefix which is prepended to all objective c + * generated classes from this .proto. There is no default. + */ + objcClassPrefix?: + | string + | undefined; + /** Namespace for generated classes; defaults to the package. */ + csharpNamespace?: + | string + | undefined; + /** + * By default Swift generators will take the proto package and CamelCase it + * replacing '.' with underscore and use that to prefix the types/symbols + * defined. When this options is provided, they will use this value instead + * to prefix the types/symbols defined. + */ + swiftPrefix?: + | string + | undefined; + /** + * Sets the php class prefix which is prepended to all php generated classes + * from this .proto. Default is empty. + */ + phpClassPrefix?: + | string + | undefined; + /** + * Use this option to change the namespace of php generated classes. Default + * is empty. When this option is empty, the package name will be used for + * determining the namespace. + */ + phpNamespace?: + | string + | undefined; + /** + * Use this option to change the namespace of php generated metadata classes. + * Default is empty. When this option is empty, the proto file name will be + * used for determining the namespace. + */ + phpMetadataNamespace?: + | string + | undefined; + /** + * Use this option to change the package of ruby generated classes. Default + * is empty. When this option is not set, the package name will be used for + * determining the ruby package. + */ + rubyPackage?: + | string + | undefined; + /** + * Any features defined in the specific edition. + * WARNING: This field should only be used by protobuf plugins or special + * cases like the proto compiler. Other uses are discouraged and + * developers should rely on the protoreflect APIs for their client language. + */ + features?: + | FeatureSet + | undefined; + /** + * The parser stores options it doesn't recognize here. + * See the documentation for the "Options" section above. + */ + uninterpretedOption: UninterpretedOption[]; +} + +/** Generated classes can be optimized for speed or code size. */ +export enum FileOptions_OptimizeMode { + /** SPEED - Generate complete code for parsing, serialization, */ + SPEED = 1, + /** CODE_SIZE - etc. */ + CODE_SIZE = 2, + /** LITE_RUNTIME - Generate code using MessageLite and the lite runtime. */ + LITE_RUNTIME = 3, + UNRECOGNIZED = -1, +} + +export function fileOptions_OptimizeModeFromJSON(object: any): FileOptions_OptimizeMode { + switch (object) { + case 1: + case "SPEED": + return FileOptions_OptimizeMode.SPEED; + case 2: + case "CODE_SIZE": + return FileOptions_OptimizeMode.CODE_SIZE; + case 3: + case "LITE_RUNTIME": + return FileOptions_OptimizeMode.LITE_RUNTIME; + case -1: + case "UNRECOGNIZED": + default: + return FileOptions_OptimizeMode.UNRECOGNIZED; + } +} + +export function fileOptions_OptimizeModeToJSON(object: FileOptions_OptimizeMode): string { + switch (object) { + case FileOptions_OptimizeMode.SPEED: + return "SPEED"; + case FileOptions_OptimizeMode.CODE_SIZE: + return "CODE_SIZE"; + case FileOptions_OptimizeMode.LITE_RUNTIME: + return "LITE_RUNTIME"; + case FileOptions_OptimizeMode.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export interface MessageOptions { + /** + * Set true to use the old proto1 MessageSet wire format for extensions. + * This is provided for backwards-compatibility with the MessageSet wire + * format. You should not use this for any other reason: It's less + * efficient, has fewer features, and is more complicated. + * + * The message must be defined exactly as follows: + * message Foo { + * option message_set_wire_format = true; + * extensions 4 to max; + * } + * Note that the message cannot have any defined fields; MessageSets only + * have extensions. + * + * All extensions of your type must be singular messages; e.g. they cannot + * be int32s, enums, or repeated messages. + * + * Because this is an option, the above two restrictions are not enforced by + * the protocol compiler. + */ + messageSetWireFormat?: + | boolean + | undefined; + /** + * Disables the generation of the standard "descriptor()" accessor, which can + * conflict with a field of the same name. This is meant to make migration + * from proto1 easier; new code should avoid fields named "descriptor". + */ + noStandardDescriptorAccessor?: + | boolean + | undefined; + /** + * Is this message deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the message, or it will be completely ignored; in the very least, + * this is a formalization for deprecating messages. + */ + deprecated?: + | boolean + | undefined; + /** + * Whether the message is an automatically generated map entry type for the + * maps field. + * + * For maps fields: + * map<KeyType, ValueType> map_field = 1; + * The parsed descriptor looks like: + * message MapFieldEntry { + * option map_entry = true; + * optional KeyType key = 1; + * optional ValueType value = 2; + * } + * repeated MapFieldEntry map_field = 1; + * + * Implementations may choose not to generate the map_entry=true message, but + * use a native map in the target language to hold the keys and values. + * The reflection APIs in such implementations still need to work as + * if the field is a repeated message field. + * + * NOTE: Do not set the option in .proto files. Always use the maps syntax + * instead. The option should only be implicitly set by the proto compiler + * parser. + */ + mapEntry?: + | boolean + | undefined; + /** + * Enable the legacy handling of JSON field name conflicts. This lowercases + * and strips underscored from the fields before comparison in proto3 only. + * The new behavior takes `json_name` into account and applies to proto2 as + * well. + * + * This should only be used as a temporary measure against broken builds due + * to the change in behavior for JSON field name conflicts. + * + * TODO This is legacy behavior we plan to remove once downstream + * teams have had time to migrate. + * + * @deprecated + */ + deprecatedLegacyJsonFieldConflicts?: + | boolean + | undefined; + /** + * Any features defined in the specific edition. + * WARNING: This field should only be used by protobuf plugins or special + * cases like the proto compiler. Other uses are discouraged and + * developers should rely on the protoreflect APIs for their client language. + */ + features?: + | FeatureSet + | undefined; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface FieldOptions { + /** + * NOTE: ctype is deprecated. Use `features.(pb.cpp).string_type` instead. + * The ctype option instructs the C++ code generator to use a different + * representation of the field than it normally would. See the specific + * options below. This option is only implemented to support use of + * [ctype=CORD] and [ctype=STRING] (the default) on non-repeated fields of + * type "bytes" in the open source release. + * TODO: make ctype actually deprecated. + */ + ctype?: + | FieldOptions_CType + | undefined; + /** + * The packed option can be enabled for repeated primitive fields to enable + * a more efficient representation on the wire. Rather than repeatedly + * writing the tag and type for each element, the entire array is encoded as + * a single length-delimited blob. In proto3, only explicit setting it to + * false will avoid using packed encoding. This option is prohibited in + * Editions, but the `repeated_field_encoding` feature can be used to control + * the behavior. + */ + packed?: + | boolean + | undefined; + /** + * The jstype option determines the JavaScript type used for values of the + * field. The option is permitted only for 64 bit integral and fixed types + * (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING + * is represented as JavaScript string, which avoids loss of precision that + * can happen when a large value is converted to a floating point JavaScript. + * Specifying JS_NUMBER for the jstype causes the generated JavaScript code to + * use the JavaScript "number" type. The behavior of the default option + * JS_NORMAL is implementation dependent. + * + * This option is an enum to permit additional types to be added, e.g. + * goog.math.Integer. + */ + jstype?: + | FieldOptions_JSType + | undefined; + /** + * Should this field be parsed lazily? Lazy applies only to message-type + * fields. It means that when the outer message is initially parsed, the + * inner message's contents will not be parsed but instead stored in encoded + * form. The inner message will actually be parsed when it is first accessed. + * + * This is only a hint. Implementations are free to choose whether to use + * eager or lazy parsing regardless of the value of this option. However, + * setting this option true suggests that the protocol author believes that + * using lazy parsing on this field is worth the additional bookkeeping + * overhead typically needed to implement it. + * + * This option does not affect the public interface of any generated code; + * all method signatures remain the same. Furthermore, thread-safety of the + * interface is not affected by this option; const methods remain safe to + * call from multiple threads concurrently, while non-const methods continue + * to require exclusive access. + * + * Note that lazy message fields are still eagerly verified to check + * ill-formed wireformat or missing required fields. Calling IsInitialized() + * on the outer message would fail if the inner message has missing required + * fields. Failed verification would result in parsing failure (except when + * uninitialized messages are acceptable). + */ + lazy?: + | boolean + | undefined; + /** + * unverified_lazy does no correctness checks on the byte stream. This should + * only be used where lazy with verification is prohibitive for performance + * reasons. + */ + unverifiedLazy?: + | boolean + | undefined; + /** + * Is this field deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for accessors, or it will be completely ignored; in the very least, this + * is a formalization for deprecating fields. + */ + deprecated?: + | boolean + | undefined; + /** For Google-internal migration only. Do not use. */ + weak?: + | boolean + | undefined; + /** + * Indicate that the field value should not be printed out when using debug + * formats, e.g. when the field contains sensitive credentials. + */ + debugRedact?: boolean | undefined; + retention?: FieldOptions_OptionRetention | undefined; + targets: FieldOptions_OptionTargetType[]; + editionDefaults: FieldOptions_EditionDefault[]; + /** + * Any features defined in the specific edition. + * WARNING: This field should only be used by protobuf plugins or special + * cases like the proto compiler. Other uses are discouraged and + * developers should rely on the protoreflect APIs for their client language. + */ + features?: FeatureSet | undefined; + featureSupport?: + | FieldOptions_FeatureSupport + | undefined; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export enum FieldOptions_CType { + /** STRING - Default mode. */ + STRING = 0, + /** + * CORD - The option [ctype=CORD] may be applied to a non-repeated field of type + * "bytes". It indicates that in C++, the data should be stored in a Cord + * instead of a string. For very large strings, this may reduce memory + * fragmentation. It may also allow better performance when parsing from a + * Cord, or when parsing with aliasing enabled, as the parsed Cord may then + * alias the original buffer. + */ + CORD = 1, + STRING_PIECE = 2, + UNRECOGNIZED = -1, +} + +export function fieldOptions_CTypeFromJSON(object: any): FieldOptions_CType { + switch (object) { + case 0: + case "STRING": + return FieldOptions_CType.STRING; + case 1: + case "CORD": + return FieldOptions_CType.CORD; + case 2: + case "STRING_PIECE": + return FieldOptions_CType.STRING_PIECE; + case -1: + case "UNRECOGNIZED": + default: + return FieldOptions_CType.UNRECOGNIZED; + } +} + +export function fieldOptions_CTypeToJSON(object: FieldOptions_CType): string { + switch (object) { + case FieldOptions_CType.STRING: + return "STRING"; + case FieldOptions_CType.CORD: + return "CORD"; + case FieldOptions_CType.STRING_PIECE: + return "STRING_PIECE"; + case FieldOptions_CType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FieldOptions_JSType { + /** JS_NORMAL - Use the default type. */ + JS_NORMAL = 0, + /** JS_STRING - Use JavaScript strings. */ + JS_STRING = 1, + /** JS_NUMBER - Use JavaScript numbers. */ + JS_NUMBER = 2, + UNRECOGNIZED = -1, +} + +export function fieldOptions_JSTypeFromJSON(object: any): FieldOptions_JSType { + switch (object) { + case 0: + case "JS_NORMAL": + return FieldOptions_JSType.JS_NORMAL; + case 1: + case "JS_STRING": + return FieldOptions_JSType.JS_STRING; + case 2: + case "JS_NUMBER": + return FieldOptions_JSType.JS_NUMBER; + case -1: + case "UNRECOGNIZED": + default: + return FieldOptions_JSType.UNRECOGNIZED; + } +} + +export function fieldOptions_JSTypeToJSON(object: FieldOptions_JSType): string { + switch (object) { + case FieldOptions_JSType.JS_NORMAL: + return "JS_NORMAL"; + case FieldOptions_JSType.JS_STRING: + return "JS_STRING"; + case FieldOptions_JSType.JS_NUMBER: + return "JS_NUMBER"; + case FieldOptions_JSType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** If set to RETENTION_SOURCE, the option will be omitted from the binary. */ +export enum FieldOptions_OptionRetention { + RETENTION_UNKNOWN = 0, + RETENTION_RUNTIME = 1, + RETENTION_SOURCE = 2, + UNRECOGNIZED = -1, +} + +export function fieldOptions_OptionRetentionFromJSON(object: any): FieldOptions_OptionRetention { + switch (object) { + case 0: + case "RETENTION_UNKNOWN": + return FieldOptions_OptionRetention.RETENTION_UNKNOWN; + case 1: + case "RETENTION_RUNTIME": + return FieldOptions_OptionRetention.RETENTION_RUNTIME; + case 2: + case "RETENTION_SOURCE": + return FieldOptions_OptionRetention.RETENTION_SOURCE; + case -1: + case "UNRECOGNIZED": + default: + return FieldOptions_OptionRetention.UNRECOGNIZED; + } +} + +export function fieldOptions_OptionRetentionToJSON(object: FieldOptions_OptionRetention): string { + switch (object) { + case FieldOptions_OptionRetention.RETENTION_UNKNOWN: + return "RETENTION_UNKNOWN"; + case FieldOptions_OptionRetention.RETENTION_RUNTIME: + return "RETENTION_RUNTIME"; + case FieldOptions_OptionRetention.RETENTION_SOURCE: + return "RETENTION_SOURCE"; + case FieldOptions_OptionRetention.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** + * This indicates the types of entities that the field may apply to when used + * as an option. If it is unset, then the field may be freely used as an + * option on any kind of entity. + */ +export enum FieldOptions_OptionTargetType { + TARGET_TYPE_UNKNOWN = 0, + TARGET_TYPE_FILE = 1, + TARGET_TYPE_EXTENSION_RANGE = 2, + TARGET_TYPE_MESSAGE = 3, + TARGET_TYPE_FIELD = 4, + TARGET_TYPE_ONEOF = 5, + TARGET_TYPE_ENUM = 6, + TARGET_TYPE_ENUM_ENTRY = 7, + TARGET_TYPE_SERVICE = 8, + TARGET_TYPE_METHOD = 9, + UNRECOGNIZED = -1, +} + +export function fieldOptions_OptionTargetTypeFromJSON(object: any): FieldOptions_OptionTargetType { + switch (object) { + case 0: + case "TARGET_TYPE_UNKNOWN": + return FieldOptions_OptionTargetType.TARGET_TYPE_UNKNOWN; + case 1: + case "TARGET_TYPE_FILE": + return FieldOptions_OptionTargetType.TARGET_TYPE_FILE; + case 2: + case "TARGET_TYPE_EXTENSION_RANGE": + return FieldOptions_OptionTargetType.TARGET_TYPE_EXTENSION_RANGE; + case 3: + case "TARGET_TYPE_MESSAGE": + return FieldOptions_OptionTargetType.TARGET_TYPE_MESSAGE; + case 4: + case "TARGET_TYPE_FIELD": + return FieldOptions_OptionTargetType.TARGET_TYPE_FIELD; + case 5: + case "TARGET_TYPE_ONEOF": + return FieldOptions_OptionTargetType.TARGET_TYPE_ONEOF; + case 6: + case "TARGET_TYPE_ENUM": + return FieldOptions_OptionTargetType.TARGET_TYPE_ENUM; + case 7: + case "TARGET_TYPE_ENUM_ENTRY": + return FieldOptions_OptionTargetType.TARGET_TYPE_ENUM_ENTRY; + case 8: + case "TARGET_TYPE_SERVICE": + return FieldOptions_OptionTargetType.TARGET_TYPE_SERVICE; + case 9: + case "TARGET_TYPE_METHOD": + return FieldOptions_OptionTargetType.TARGET_TYPE_METHOD; + case -1: + case "UNRECOGNIZED": + default: + return FieldOptions_OptionTargetType.UNRECOGNIZED; + } +} + +export function fieldOptions_OptionTargetTypeToJSON(object: FieldOptions_OptionTargetType): string { + switch (object) { + case FieldOptions_OptionTargetType.TARGET_TYPE_UNKNOWN: + return "TARGET_TYPE_UNKNOWN"; + case FieldOptions_OptionTargetType.TARGET_TYPE_FILE: + return "TARGET_TYPE_FILE"; + case FieldOptions_OptionTargetType.TARGET_TYPE_EXTENSION_RANGE: + return "TARGET_TYPE_EXTENSION_RANGE"; + case FieldOptions_OptionTargetType.TARGET_TYPE_MESSAGE: + return "TARGET_TYPE_MESSAGE"; + case FieldOptions_OptionTargetType.TARGET_TYPE_FIELD: + return "TARGET_TYPE_FIELD"; + case FieldOptions_OptionTargetType.TARGET_TYPE_ONEOF: + return "TARGET_TYPE_ONEOF"; + case FieldOptions_OptionTargetType.TARGET_TYPE_ENUM: + return "TARGET_TYPE_ENUM"; + case FieldOptions_OptionTargetType.TARGET_TYPE_ENUM_ENTRY: + return "TARGET_TYPE_ENUM_ENTRY"; + case FieldOptions_OptionTargetType.TARGET_TYPE_SERVICE: + return "TARGET_TYPE_SERVICE"; + case FieldOptions_OptionTargetType.TARGET_TYPE_METHOD: + return "TARGET_TYPE_METHOD"; + case FieldOptions_OptionTargetType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export interface FieldOptions_EditionDefault { + edition?: + | Edition + | undefined; + /** Textproto value. */ + value?: string | undefined; +} + +/** Information about the support window of a feature. */ +export interface FieldOptions_FeatureSupport { + /** + * The edition that this feature was first available in. In editions + * earlier than this one, the default assigned to EDITION_LEGACY will be + * used, and proto files will not be able to override it. + */ + editionIntroduced?: + | Edition + | undefined; + /** + * The edition this feature becomes deprecated in. Using this after this + * edition may trigger warnings. + */ + editionDeprecated?: + | Edition + | undefined; + /** + * The deprecation warning text if this feature is used after the edition it + * was marked deprecated in. + */ + deprecationWarning?: + | string + | undefined; + /** + * The edition this feature is no longer available in. In editions after + * this one, the last default assigned will be used, and proto files will + * not be able to override it. + */ + editionRemoved?: Edition | undefined; +} + +export interface OneofOptions { + /** + * Any features defined in the specific edition. + * WARNING: This field should only be used by protobuf plugins or special + * cases like the proto compiler. Other uses are discouraged and + * developers should rely on the protoreflect APIs for their client language. + */ + features?: + | FeatureSet + | undefined; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface EnumOptions { + /** + * Set this option to true to allow mapping different tag names to the same + * value. + */ + allowAlias?: + | boolean + | undefined; + /** + * Is this enum deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the enum, or it will be completely ignored; in the very least, this + * is a formalization for deprecating enums. + */ + deprecated?: + | boolean + | undefined; + /** + * Enable the legacy handling of JSON field name conflicts. This lowercases + * and strips underscored from the fields before comparison in proto3 only. + * The new behavior takes `json_name` into account and applies to proto2 as + * well. + * TODO Remove this legacy behavior once downstream teams have + * had time to migrate. + * + * @deprecated + */ + deprecatedLegacyJsonFieldConflicts?: + | boolean + | undefined; + /** + * Any features defined in the specific edition. + * WARNING: This field should only be used by protobuf plugins or special + * cases like the proto compiler. Other uses are discouraged and + * developers should rely on the protoreflect APIs for their client language. + */ + features?: + | FeatureSet + | undefined; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface EnumValueOptions { + /** + * Is this enum value deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the enum value, or it will be completely ignored; in the very least, + * this is a formalization for deprecating enum values. + */ + deprecated?: + | boolean + | undefined; + /** + * Any features defined in the specific edition. + * WARNING: This field should only be used by protobuf plugins or special + * cases like the proto compiler. Other uses are discouraged and + * developers should rely on the protoreflect APIs for their client language. + */ + features?: + | FeatureSet + | undefined; + /** + * Indicate that fields annotated with this enum value should not be printed + * out when using debug formats, e.g. when the field contains sensitive + * credentials. + */ + debugRedact?: + | boolean + | undefined; + /** Information about the support window of a feature value. */ + featureSupport?: + | FieldOptions_FeatureSupport + | undefined; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface ServiceOptions { + /** + * Any features defined in the specific edition. + * WARNING: This field should only be used by protobuf plugins or special + * cases like the proto compiler. Other uses are discouraged and + * developers should rely on the protoreflect APIs for their client language. + */ + features?: + | FeatureSet + | undefined; + /** + * Is this service deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the service, or it will be completely ignored; in the very least, + * this is a formalization for deprecating services. + */ + deprecated?: + | boolean + | undefined; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface MethodOptions { + /** + * Is this method deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the method, or it will be completely ignored; in the very least, + * this is a formalization for deprecating methods. + */ + deprecated?: boolean | undefined; + idempotencyLevel?: + | MethodOptions_IdempotencyLevel + | undefined; + /** + * Any features defined in the specific edition. + * WARNING: This field should only be used by protobuf plugins or special + * cases like the proto compiler. Other uses are discouraged and + * developers should rely on the protoreflect APIs for their client language. + */ + features?: + | FeatureSet + | undefined; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +/** + * Is this method side-effect-free (or safe in HTTP parlance), or idempotent, + * or neither? HTTP based RPC implementation may choose GET verb for safe + * methods, and PUT verb for idempotent methods instead of the default POST. + */ +export enum MethodOptions_IdempotencyLevel { + IDEMPOTENCY_UNKNOWN = 0, + /** NO_SIDE_EFFECTS - implies idempotent */ + NO_SIDE_EFFECTS = 1, + /** IDEMPOTENT - idempotent, but may have side effects */ + IDEMPOTENT = 2, + UNRECOGNIZED = -1, +} + +export function methodOptions_IdempotencyLevelFromJSON(object: any): MethodOptions_IdempotencyLevel { + switch (object) { + case 0: + case "IDEMPOTENCY_UNKNOWN": + return MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN; + case 1: + case "NO_SIDE_EFFECTS": + return MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS; + case 2: + case "IDEMPOTENT": + return MethodOptions_IdempotencyLevel.IDEMPOTENT; + case -1: + case "UNRECOGNIZED": + default: + return MethodOptions_IdempotencyLevel.UNRECOGNIZED; + } +} + +export function methodOptions_IdempotencyLevelToJSON(object: MethodOptions_IdempotencyLevel): string { + switch (object) { + case MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN: + return "IDEMPOTENCY_UNKNOWN"; + case MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS: + return "NO_SIDE_EFFECTS"; + case MethodOptions_IdempotencyLevel.IDEMPOTENT: + return "IDEMPOTENT"; + case MethodOptions_IdempotencyLevel.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** + * A message representing a option the parser does not recognize. This only + * appears in options protos created by the compiler::Parser class. + * DescriptorPool resolves these when building Descriptor objects. Therefore, + * options protos in descriptor objects (e.g. returned by Descriptor::options(), + * or produced by Descriptor::CopyTo()) will never have UninterpretedOptions + * in them. + */ +export interface UninterpretedOption { + name: UninterpretedOption_NamePart[]; + /** + * The value of the uninterpreted option, in whatever type the tokenizer + * identified it as during parsing. Exactly one of these should be set. + */ + identifierValue?: string | undefined; + positiveIntValue?: number | undefined; + negativeIntValue?: number | undefined; + doubleValue?: number | undefined; + stringValue?: Uint8Array | undefined; + aggregateValue?: string | undefined; +} + +/** + * The name of the uninterpreted option. Each string represents a segment in + * a dot-separated name. is_extension is true iff a segment represents an + * extension (denoted with parentheses in options specs in .proto files). + * E.g.,{ ["foo", false], ["bar.baz", true], ["moo", false] } represents + * "foo.(bar.baz).moo". + */ +export interface UninterpretedOption_NamePart { + namePart: string; + isExtension: boolean; +} + +/** + * TODO Enums in C++ gencode (and potentially other languages) are + * not well scoped. This means that each of the feature enums below can clash + * with each other. The short names we've chosen maximize call-site + * readability, but leave us very open to this scenario. A future feature will + * be designed and implemented to handle this, hopefully before we ever hit a + * conflict here. + */ +export interface FeatureSet { + fieldPresence?: FeatureSet_FieldPresence | undefined; + enumType?: FeatureSet_EnumType | undefined; + repeatedFieldEncoding?: FeatureSet_RepeatedFieldEncoding | undefined; + utf8Validation?: FeatureSet_Utf8Validation | undefined; + messageEncoding?: FeatureSet_MessageEncoding | undefined; + jsonFormat?: FeatureSet_JsonFormat | undefined; + enforceNamingStyle?: FeatureSet_EnforceNamingStyle | undefined; + defaultSymbolVisibility?: FeatureSet_VisibilityFeature_DefaultSymbolVisibility | undefined; +} + +export enum FeatureSet_FieldPresence { + FIELD_PRESENCE_UNKNOWN = 0, + EXPLICIT = 1, + IMPLICIT = 2, + LEGACY_REQUIRED = 3, + UNRECOGNIZED = -1, +} + +export function featureSet_FieldPresenceFromJSON(object: any): FeatureSet_FieldPresence { + switch (object) { + case 0: + case "FIELD_PRESENCE_UNKNOWN": + return FeatureSet_FieldPresence.FIELD_PRESENCE_UNKNOWN; + case 1: + case "EXPLICIT": + return FeatureSet_FieldPresence.EXPLICIT; + case 2: + case "IMPLICIT": + return FeatureSet_FieldPresence.IMPLICIT; + case 3: + case "LEGACY_REQUIRED": + return FeatureSet_FieldPresence.LEGACY_REQUIRED; + case -1: + case "UNRECOGNIZED": + default: + return FeatureSet_FieldPresence.UNRECOGNIZED; + } +} + +export function featureSet_FieldPresenceToJSON(object: FeatureSet_FieldPresence): string { + switch (object) { + case FeatureSet_FieldPresence.FIELD_PRESENCE_UNKNOWN: + return "FIELD_PRESENCE_UNKNOWN"; + case FeatureSet_FieldPresence.EXPLICIT: + return "EXPLICIT"; + case FeatureSet_FieldPresence.IMPLICIT: + return "IMPLICIT"; + case FeatureSet_FieldPresence.LEGACY_REQUIRED: + return "LEGACY_REQUIRED"; + case FeatureSet_FieldPresence.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FeatureSet_EnumType { + ENUM_TYPE_UNKNOWN = 0, + OPEN = 1, + CLOSED = 2, + UNRECOGNIZED = -1, +} + +export function featureSet_EnumTypeFromJSON(object: any): FeatureSet_EnumType { + switch (object) { + case 0: + case "ENUM_TYPE_UNKNOWN": + return FeatureSet_EnumType.ENUM_TYPE_UNKNOWN; + case 1: + case "OPEN": + return FeatureSet_EnumType.OPEN; + case 2: + case "CLOSED": + return FeatureSet_EnumType.CLOSED; + case -1: + case "UNRECOGNIZED": + default: + return FeatureSet_EnumType.UNRECOGNIZED; + } +} + +export function featureSet_EnumTypeToJSON(object: FeatureSet_EnumType): string { + switch (object) { + case FeatureSet_EnumType.ENUM_TYPE_UNKNOWN: + return "ENUM_TYPE_UNKNOWN"; + case FeatureSet_EnumType.OPEN: + return "OPEN"; + case FeatureSet_EnumType.CLOSED: + return "CLOSED"; + case FeatureSet_EnumType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FeatureSet_RepeatedFieldEncoding { + REPEATED_FIELD_ENCODING_UNKNOWN = 0, + PACKED = 1, + EXPANDED = 2, + UNRECOGNIZED = -1, +} + +export function featureSet_RepeatedFieldEncodingFromJSON(object: any): FeatureSet_RepeatedFieldEncoding { + switch (object) { + case 0: + case "REPEATED_FIELD_ENCODING_UNKNOWN": + return FeatureSet_RepeatedFieldEncoding.REPEATED_FIELD_ENCODING_UNKNOWN; + case 1: + case "PACKED": + return FeatureSet_RepeatedFieldEncoding.PACKED; + case 2: + case "EXPANDED": + return FeatureSet_RepeatedFieldEncoding.EXPANDED; + case -1: + case "UNRECOGNIZED": + default: + return FeatureSet_RepeatedFieldEncoding.UNRECOGNIZED; + } +} + +export function featureSet_RepeatedFieldEncodingToJSON(object: FeatureSet_RepeatedFieldEncoding): string { + switch (object) { + case FeatureSet_RepeatedFieldEncoding.REPEATED_FIELD_ENCODING_UNKNOWN: + return "REPEATED_FIELD_ENCODING_UNKNOWN"; + case FeatureSet_RepeatedFieldEncoding.PACKED: + return "PACKED"; + case FeatureSet_RepeatedFieldEncoding.EXPANDED: + return "EXPANDED"; + case FeatureSet_RepeatedFieldEncoding.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FeatureSet_Utf8Validation { + UTF8_VALIDATION_UNKNOWN = 0, + VERIFY = 2, + NONE = 3, + UNRECOGNIZED = -1, +} + +export function featureSet_Utf8ValidationFromJSON(object: any): FeatureSet_Utf8Validation { + switch (object) { + case 0: + case "UTF8_VALIDATION_UNKNOWN": + return FeatureSet_Utf8Validation.UTF8_VALIDATION_UNKNOWN; + case 2: + case "VERIFY": + return FeatureSet_Utf8Validation.VERIFY; + case 3: + case "NONE": + return FeatureSet_Utf8Validation.NONE; + case -1: + case "UNRECOGNIZED": + default: + return FeatureSet_Utf8Validation.UNRECOGNIZED; + } +} + +export function featureSet_Utf8ValidationToJSON(object: FeatureSet_Utf8Validation): string { + switch (object) { + case FeatureSet_Utf8Validation.UTF8_VALIDATION_UNKNOWN: + return "UTF8_VALIDATION_UNKNOWN"; + case FeatureSet_Utf8Validation.VERIFY: + return "VERIFY"; + case FeatureSet_Utf8Validation.NONE: + return "NONE"; + case FeatureSet_Utf8Validation.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FeatureSet_MessageEncoding { + MESSAGE_ENCODING_UNKNOWN = 0, + LENGTH_PREFIXED = 1, + DELIMITED = 2, + UNRECOGNIZED = -1, +} + +export function featureSet_MessageEncodingFromJSON(object: any): FeatureSet_MessageEncoding { + switch (object) { + case 0: + case "MESSAGE_ENCODING_UNKNOWN": + return FeatureSet_MessageEncoding.MESSAGE_ENCODING_UNKNOWN; + case 1: + case "LENGTH_PREFIXED": + return FeatureSet_MessageEncoding.LENGTH_PREFIXED; + case 2: + case "DELIMITED": + return FeatureSet_MessageEncoding.DELIMITED; + case -1: + case "UNRECOGNIZED": + default: + return FeatureSet_MessageEncoding.UNRECOGNIZED; + } +} + +export function featureSet_MessageEncodingToJSON(object: FeatureSet_MessageEncoding): string { + switch (object) { + case FeatureSet_MessageEncoding.MESSAGE_ENCODING_UNKNOWN: + return "MESSAGE_ENCODING_UNKNOWN"; + case FeatureSet_MessageEncoding.LENGTH_PREFIXED: + return "LENGTH_PREFIXED"; + case FeatureSet_MessageEncoding.DELIMITED: + return "DELIMITED"; + case FeatureSet_MessageEncoding.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FeatureSet_JsonFormat { + JSON_FORMAT_UNKNOWN = 0, + ALLOW = 1, + LEGACY_BEST_EFFORT = 2, + UNRECOGNIZED = -1, +} + +export function featureSet_JsonFormatFromJSON(object: any): FeatureSet_JsonFormat { + switch (object) { + case 0: + case "JSON_FORMAT_UNKNOWN": + return FeatureSet_JsonFormat.JSON_FORMAT_UNKNOWN; + case 1: + case "ALLOW": + return FeatureSet_JsonFormat.ALLOW; + case 2: + case "LEGACY_BEST_EFFORT": + return FeatureSet_JsonFormat.LEGACY_BEST_EFFORT; + case -1: + case "UNRECOGNIZED": + default: + return FeatureSet_JsonFormat.UNRECOGNIZED; + } +} + +export function featureSet_JsonFormatToJSON(object: FeatureSet_JsonFormat): string { + switch (object) { + case FeatureSet_JsonFormat.JSON_FORMAT_UNKNOWN: + return "JSON_FORMAT_UNKNOWN"; + case FeatureSet_JsonFormat.ALLOW: + return "ALLOW"; + case FeatureSet_JsonFormat.LEGACY_BEST_EFFORT: + return "LEGACY_BEST_EFFORT"; + case FeatureSet_JsonFormat.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FeatureSet_EnforceNamingStyle { + ENFORCE_NAMING_STYLE_UNKNOWN = 0, + STYLE2024 = 1, + STYLE_LEGACY = 2, + UNRECOGNIZED = -1, +} + +export function featureSet_EnforceNamingStyleFromJSON(object: any): FeatureSet_EnforceNamingStyle { + switch (object) { + case 0: + case "ENFORCE_NAMING_STYLE_UNKNOWN": + return FeatureSet_EnforceNamingStyle.ENFORCE_NAMING_STYLE_UNKNOWN; + case 1: + case "STYLE2024": + return FeatureSet_EnforceNamingStyle.STYLE2024; + case 2: + case "STYLE_LEGACY": + return FeatureSet_EnforceNamingStyle.STYLE_LEGACY; + case -1: + case "UNRECOGNIZED": + default: + return FeatureSet_EnforceNamingStyle.UNRECOGNIZED; + } +} + +export function featureSet_EnforceNamingStyleToJSON(object: FeatureSet_EnforceNamingStyle): string { + switch (object) { + case FeatureSet_EnforceNamingStyle.ENFORCE_NAMING_STYLE_UNKNOWN: + return "ENFORCE_NAMING_STYLE_UNKNOWN"; + case FeatureSet_EnforceNamingStyle.STYLE2024: + return "STYLE2024"; + case FeatureSet_EnforceNamingStyle.STYLE_LEGACY: + return "STYLE_LEGACY"; + case FeatureSet_EnforceNamingStyle.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export interface FeatureSet_VisibilityFeature { +} + +export enum FeatureSet_VisibilityFeature_DefaultSymbolVisibility { + DEFAULT_SYMBOL_VISIBILITY_UNKNOWN = 0, + /** EXPORT_ALL - Default pre-EDITION_2024, all UNSET visibility are export. */ + EXPORT_ALL = 1, + /** EXPORT_TOP_LEVEL - All top-level symbols default to export, nested default to local. */ + EXPORT_TOP_LEVEL = 2, + /** LOCAL_ALL - All symbols default to local. */ + LOCAL_ALL = 3, + /** + * STRICT - All symbols local by default. Nested types cannot be exported. + * With special case caveat for message { enum {} reserved 1 to max; } + * This is the recommended setting for new protos. + */ + STRICT = 4, + UNRECOGNIZED = -1, +} + +export function featureSet_VisibilityFeature_DefaultSymbolVisibilityFromJSON( + object: any, +): FeatureSet_VisibilityFeature_DefaultSymbolVisibility { + switch (object) { + case 0: + case "DEFAULT_SYMBOL_VISIBILITY_UNKNOWN": + return FeatureSet_VisibilityFeature_DefaultSymbolVisibility.DEFAULT_SYMBOL_VISIBILITY_UNKNOWN; + case 1: + case "EXPORT_ALL": + return FeatureSet_VisibilityFeature_DefaultSymbolVisibility.EXPORT_ALL; + case 2: + case "EXPORT_TOP_LEVEL": + return FeatureSet_VisibilityFeature_DefaultSymbolVisibility.EXPORT_TOP_LEVEL; + case 3: + case "LOCAL_ALL": + return FeatureSet_VisibilityFeature_DefaultSymbolVisibility.LOCAL_ALL; + case 4: + case "STRICT": + return FeatureSet_VisibilityFeature_DefaultSymbolVisibility.STRICT; + case -1: + case "UNRECOGNIZED": + default: + return FeatureSet_VisibilityFeature_DefaultSymbolVisibility.UNRECOGNIZED; + } +} + +export function featureSet_VisibilityFeature_DefaultSymbolVisibilityToJSON( + object: FeatureSet_VisibilityFeature_DefaultSymbolVisibility, +): string { + switch (object) { + case FeatureSet_VisibilityFeature_DefaultSymbolVisibility.DEFAULT_SYMBOL_VISIBILITY_UNKNOWN: + return "DEFAULT_SYMBOL_VISIBILITY_UNKNOWN"; + case FeatureSet_VisibilityFeature_DefaultSymbolVisibility.EXPORT_ALL: + return "EXPORT_ALL"; + case FeatureSet_VisibilityFeature_DefaultSymbolVisibility.EXPORT_TOP_LEVEL: + return "EXPORT_TOP_LEVEL"; + case FeatureSet_VisibilityFeature_DefaultSymbolVisibility.LOCAL_ALL: + return "LOCAL_ALL"; + case FeatureSet_VisibilityFeature_DefaultSymbolVisibility.STRICT: + return "STRICT"; + case FeatureSet_VisibilityFeature_DefaultSymbolVisibility.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** + * A compiled specification for the defaults of a set of features. These + * messages are generated from FeatureSet extensions and can be used to seed + * feature resolution. The resolution with this object becomes a simple search + * for the closest matching edition, followed by proto merges. + */ +export interface FeatureSetDefaults { + defaults: FeatureSetDefaults_FeatureSetEditionDefault[]; + /** + * The minimum supported edition (inclusive) when this was constructed. + * Editions before this will not have defaults. + */ + minimumEdition?: + | Edition + | undefined; + /** + * The maximum known edition (inclusive) when this was constructed. Editions + * after this will not have reliable defaults. + */ + maximumEdition?: Edition | undefined; +} + +/** + * A map from every known edition with a unique set of defaults to its + * defaults. Not all editions may be contained here. For a given edition, + * the defaults at the closest matching edition ordered at or before it should + * be used. This field must be in strict ascending order by edition. + */ +export interface FeatureSetDefaults_FeatureSetEditionDefault { + edition?: + | Edition + | undefined; + /** Defaults of features that can be overridden in this edition. */ + overridableFeatures?: + | FeatureSet + | undefined; + /** Defaults of features that can't be overridden in this edition. */ + fixedFeatures?: FeatureSet | undefined; +} + +/** + * Encapsulates information about the original source file from which a + * FileDescriptorProto was generated. + */ +export interface SourceCodeInfo { + /** + * A Location identifies a piece of source code in a .proto file which + * corresponds to a particular definition. This information is intended + * to be useful to IDEs, code indexers, documentation generators, and similar + * tools. + * + * For example, say we have a file like: + * message Foo { + * optional string foo = 1; + * } + * Let's look at just the field definition: + * optional string foo = 1; + * ^ ^^ ^^ ^ ^^^ + * a bc de f ghi + * We have the following locations: + * span path represents + * [a,i) [ 4, 0, 2, 0 ] The whole field definition. + * [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). + * [c,d) [ 4, 0, 2, 0, 5 ] The type (string). + * [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). + * [g,h) [ 4, 0, 2, 0, 3 ] The number (1). + * + * Notes: + * - A location may refer to a repeated field itself (i.e. not to any + * particular index within it). This is used whenever a set of elements are + * logically enclosed in a single code segment. For example, an entire + * extend block (possibly containing multiple extension definitions) will + * have an outer location whose path refers to the "extensions" repeated + * field without an index. + * - Multiple locations may have the same path. This happens when a single + * logical declaration is spread out across multiple places. The most + * obvious example is the "extend" block again -- there may be multiple + * extend blocks in the same scope, each of which will have the same path. + * - A location's span is not always a subset of its parent's span. For + * example, the "extendee" of an extension declaration appears at the + * beginning of the "extend" block and is shared by all extensions within + * the block. + * - Just because a location's span is a subset of some other location's span + * does not mean that it is a descendant. For example, a "group" defines + * both a type and a field in a single declaration. Thus, the locations + * corresponding to the type and field and their components will overlap. + * - Code which tries to interpret locations should probably be designed to + * ignore those that it doesn't understand, as more types of locations could + * be recorded in the future. + */ + location: SourceCodeInfo_Location[]; +} + +export interface SourceCodeInfo_Location { + /** + * Identifies which part of the FileDescriptorProto was defined at this + * location. + * + * Each element is a field number or an index. They form a path from + * the root FileDescriptorProto to the place where the definition appears. + * For example, this path: + * [ 4, 3, 2, 7, 1 ] + * refers to: + * file.message_type(3) // 4, 3 + * .field(7) // 2, 7 + * .name() // 1 + * This is because FileDescriptorProto.message_type has field number 4: + * repeated DescriptorProto message_type = 4; + * and DescriptorProto.field has field number 2: + * repeated FieldDescriptorProto field = 2; + * and FieldDescriptorProto.name has field number 1: + * optional string name = 1; + * + * Thus, the above path gives the location of a field name. If we removed + * the last element: + * [ 4, 3, 2, 7 ] + * this path refers to the whole field declaration (from the beginning + * of the label to the terminating semicolon). + */ + path: number[]; + /** + * Always has exactly three or four elements: start line, start column, + * end line (optional, otherwise assumed same as start line), end column. + * These are packed into a single field for efficiency. Note that line + * and column numbers are zero-based -- typically you will want to add + * 1 to each before displaying to a user. + */ + span: number[]; + /** + * If this SourceCodeInfo represents a complete declaration, these are any + * comments appearing before and after the declaration which appear to be + * attached to the declaration. + * + * A series of line comments appearing on consecutive lines, with no other + * tokens appearing on those lines, will be treated as a single comment. + * + * leading_detached_comments will keep paragraphs of comments that appear + * before (but not connected to) the current element. Each paragraph, + * separated by empty lines, will be one comment element in the repeated + * field. + * + * Only the comment content is provided; comment markers (e.g. //) are + * stripped out. For block comments, leading whitespace and an asterisk + * will be stripped from the beginning of each line other than the first. + * Newlines are included in the output. + * + * Examples: + * + * optional int32 foo = 1; // Comment attached to foo. + * // Comment attached to bar. + * optional int32 bar = 2; + * + * optional string baz = 3; + * // Comment attached to baz. + * // Another line attached to baz. + * + * // Comment attached to moo. + * // + * // Another line attached to moo. + * optional double moo = 4; + * + * // Detached comment for corge. This is not leading or trailing comments + * // to moo or corge because there are blank lines separating it from + * // both. + * + * // Detached comment for corge paragraph 2. + * + * optional string corge = 5; + * /* Block comment attached + * * to corge. Leading asterisks + * * will be removed. * / + * /* Block comment attached to + * * grault. * / + * optional int32 grault = 6; + * + * // ignored detached comments. + */ + leadingComments?: string | undefined; + trailingComments?: string | undefined; + leadingDetachedComments: string[]; +} + +/** + * Describes the relationship between generated code and its original source + * file. A GeneratedCodeInfo message is associated with only one generated + * source file, but may contain references to different source .proto files. + */ +export interface GeneratedCodeInfo { + /** + * An Annotation connects some span of text in generated code to an element + * of its generating .proto file. + */ + annotation: GeneratedCodeInfo_Annotation[]; +} + +export interface GeneratedCodeInfo_Annotation { + /** + * Identifies the element in the original source .proto file. This field + * is formatted the same as SourceCodeInfo.Location.path. + */ + path: number[]; + /** Identifies the filesystem path to the original source .proto. */ + sourceFile?: + | string + | undefined; + /** + * Identifies the starting offset in bytes in the generated code + * that relates to the identified object. + */ + begin?: + | number + | undefined; + /** + * Identifies the ending offset in bytes in the generated code that + * relates to the identified object. The end offset should be one past + * the last relevant byte (so the length of the text = end - begin). + */ + end?: number | undefined; + semantic?: GeneratedCodeInfo_Annotation_Semantic | undefined; +} + +/** + * Represents the identified object's effect on the element in the original + * .proto file. + */ +export enum GeneratedCodeInfo_Annotation_Semantic { + /** NONE - There is no effect or the effect is indescribable. */ + NONE = 0, + /** SET - The element is set or otherwise mutated. */ + SET = 1, + /** ALIAS - An alias to the element is returned. */ + ALIAS = 2, + UNRECOGNIZED = -1, +} + +export function generatedCodeInfo_Annotation_SemanticFromJSON(object: any): GeneratedCodeInfo_Annotation_Semantic { + switch (object) { + case 0: + case "NONE": + return GeneratedCodeInfo_Annotation_Semantic.NONE; + case 1: + case "SET": + return GeneratedCodeInfo_Annotation_Semantic.SET; + case 2: + case "ALIAS": + return GeneratedCodeInfo_Annotation_Semantic.ALIAS; + case -1: + case "UNRECOGNIZED": + default: + return GeneratedCodeInfo_Annotation_Semantic.UNRECOGNIZED; + } +} + +export function generatedCodeInfo_Annotation_SemanticToJSON(object: GeneratedCodeInfo_Annotation_Semantic): string { + switch (object) { + case GeneratedCodeInfo_Annotation_Semantic.NONE: + return "NONE"; + case GeneratedCodeInfo_Annotation_Semantic.SET: + return "SET"; + case GeneratedCodeInfo_Annotation_Semantic.ALIAS: + return "ALIAS"; + case GeneratedCodeInfo_Annotation_Semantic.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +function createBaseFileDescriptorSet(): FileDescriptorSet { + return { file: [] }; +} + +export const FileDescriptorSet: MessageFns<FileDescriptorSet> = { + encode(message: FileDescriptorSet, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + for (const v of message.file) { + FileDescriptorProto.encode(v!, writer.uint32(10).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): FileDescriptorSet { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileDescriptorSet(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.file.push(FileDescriptorProto.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): FileDescriptorSet { + return { + file: globalThis.Array.isArray(object?.file) ? object.file.map((e: any) => FileDescriptorProto.fromJSON(e)) : [], + }; + }, + + toJSON(message: FileDescriptorSet): unknown { + const obj: any = {}; + if (message.file?.length) { + obj.file = message.file.map((e) => FileDescriptorProto.toJSON(e)); + } + return obj; + }, + + create<I extends Exact<DeepPartial<FileDescriptorSet>, I>>(base?: I): FileDescriptorSet { + return FileDescriptorSet.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<FileDescriptorSet>, I>>(object: I): FileDescriptorSet { + const message = createBaseFileDescriptorSet(); + message.file = object.file?.map((e) => FileDescriptorProto.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFileDescriptorProto(): FileDescriptorProto { + return { + name: "", + package: "", + dependency: [], + publicDependency: [], + weakDependency: [], + optionDependency: [], + messageType: [], + enumType: [], + service: [], + extension: [], + options: undefined, + sourceCodeInfo: undefined, + syntax: "", + edition: 0, + }; +} + +export const FileDescriptorProto: MessageFns<FileDescriptorProto> = { + encode(message: FileDescriptorProto, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.name !== undefined && message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.package !== undefined && message.package !== "") { + writer.uint32(18).string(message.package); + } + for (const v of message.dependency) { + writer.uint32(26).string(v!); + } + for (const v of message.publicDependency) { + writer.uint32(80).int32(v!); + } + for (const v of message.weakDependency) { + writer.uint32(88).int32(v!); + } + for (const v of message.optionDependency) { + writer.uint32(122).string(v!); + } + for (const v of message.messageType) { + DescriptorProto.encode(v!, writer.uint32(34).fork()).join(); + } + for (const v of message.enumType) { + EnumDescriptorProto.encode(v!, writer.uint32(42).fork()).join(); + } + for (const v of message.service) { + ServiceDescriptorProto.encode(v!, writer.uint32(50).fork()).join(); + } + for (const v of message.extension) { + FieldDescriptorProto.encode(v!, writer.uint32(58).fork()).join(); + } + if (message.options !== undefined) { + FileOptions.encode(message.options, writer.uint32(66).fork()).join(); + } + if (message.sourceCodeInfo !== undefined) { + SourceCodeInfo.encode(message.sourceCodeInfo, writer.uint32(74).fork()).join(); + } + if (message.syntax !== undefined && message.syntax !== "") { + writer.uint32(98).string(message.syntax); + } + if (message.edition !== undefined && message.edition !== 0) { + writer.uint32(112).int32(message.edition); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): FileDescriptorProto { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.package = reader.string(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.dependency.push(reader.string()); + continue; + } + case 10: { + if (tag === 80) { + message.publicDependency.push(reader.int32()); + + continue; + } + + if (tag === 82) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.publicDependency.push(reader.int32()); + } + + continue; + } + + break; + } + case 11: { + if (tag === 88) { + message.weakDependency.push(reader.int32()); + + continue; + } + + if (tag === 90) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.weakDependency.push(reader.int32()); + } + + continue; + } + + break; + } + case 15: { + if (tag !== 122) { + break; + } + + message.optionDependency.push(reader.string()); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + + message.messageType.push(DescriptorProto.decode(reader, reader.uint32())); + continue; + } + case 5: { + if (tag !== 42) { + break; + } + + message.enumType.push(EnumDescriptorProto.decode(reader, reader.uint32())); + continue; + } + case 6: { + if (tag !== 50) { + break; + } + + message.service.push(ServiceDescriptorProto.decode(reader, reader.uint32())); + continue; + } + case 7: { + if (tag !== 58) { + break; + } + + message.extension.push(FieldDescriptorProto.decode(reader, reader.uint32())); + continue; + } + case 8: { + if (tag !== 66) { + break; + } + + message.options = FileOptions.decode(reader, reader.uint32()); + continue; + } + case 9: { + if (tag !== 74) { + break; + } + + message.sourceCodeInfo = SourceCodeInfo.decode(reader, reader.uint32()); + continue; + } + case 12: { + if (tag !== 98) { + break; + } + + message.syntax = reader.string(); + continue; + } + case 14: { + if (tag !== 112) { + break; + } + + message.edition = reader.int32() as any; + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): FileDescriptorProto { + return { + name: isSet(object.name) ? globalThis.String(object.name) : "", + package: isSet(object.package) ? globalThis.String(object.package) : "", + dependency: globalThis.Array.isArray(object?.dependency) + ? object.dependency.map((e: any) => globalThis.String(e)) + : [], + publicDependency: globalThis.Array.isArray(object?.publicDependency) + ? object.publicDependency.map((e: any) => globalThis.Number(e)) + : globalThis.Array.isArray(object?.public_dependency) + ? object.public_dependency.map((e: any) => globalThis.Number(e)) + : [], + weakDependency: globalThis.Array.isArray(object?.weakDependency) + ? object.weakDependency.map((e: any) => globalThis.Number(e)) + : globalThis.Array.isArray(object?.weak_dependency) + ? object.weak_dependency.map((e: any) => globalThis.Number(e)) + : [], + optionDependency: globalThis.Array.isArray(object?.optionDependency) + ? object.optionDependency.map((e: any) => globalThis.String(e)) + : globalThis.Array.isArray(object?.option_dependency) + ? object.option_dependency.map((e: any) => globalThis.String(e)) + : [], + messageType: globalThis.Array.isArray(object?.messageType) + ? object.messageType.map((e: any) => DescriptorProto.fromJSON(e)) + : globalThis.Array.isArray(object?.message_type) + ? object.message_type.map((e: any) => DescriptorProto.fromJSON(e)) + : [], + enumType: globalThis.Array.isArray(object?.enumType) + ? object.enumType.map((e: any) => EnumDescriptorProto.fromJSON(e)) + : globalThis.Array.isArray(object?.enum_type) + ? object.enum_type.map((e: any) => EnumDescriptorProto.fromJSON(e)) + : [], + service: globalThis.Array.isArray(object?.service) + ? object.service.map((e: any) => ServiceDescriptorProto.fromJSON(e)) + : [], + extension: globalThis.Array.isArray(object?.extension) + ? object.extension.map((e: any) => FieldDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) ? FileOptions.fromJSON(object.options) : undefined, + sourceCodeInfo: isSet(object.sourceCodeInfo) + ? SourceCodeInfo.fromJSON(object.sourceCodeInfo) + : isSet(object.source_code_info) + ? SourceCodeInfo.fromJSON(object.source_code_info) + : undefined, + syntax: isSet(object.syntax) ? globalThis.String(object.syntax) : "", + edition: isSet(object.edition) ? editionFromJSON(object.edition) : 0, + }; + }, + + toJSON(message: FileDescriptorProto): unknown { + const obj: any = {}; + if (message.name !== undefined && message.name !== "") { + obj.name = message.name; + } + if (message.package !== undefined && message.package !== "") { + obj.package = message.package; + } + if (message.dependency?.length) { + obj.dependency = message.dependency; + } + if (message.publicDependency?.length) { + obj.publicDependency = message.publicDependency.map((e) => Math.round(e)); + } + if (message.weakDependency?.length) { + obj.weakDependency = message.weakDependency.map((e) => Math.round(e)); + } + if (message.optionDependency?.length) { + obj.optionDependency = message.optionDependency; + } + if (message.messageType?.length) { + obj.messageType = message.messageType.map((e) => DescriptorProto.toJSON(e)); + } + if (message.enumType?.length) { + obj.enumType = message.enumType.map((e) => EnumDescriptorProto.toJSON(e)); + } + if (message.service?.length) { + obj.service = message.service.map((e) => ServiceDescriptorProto.toJSON(e)); + } + if (message.extension?.length) { + obj.extension = message.extension.map((e) => FieldDescriptorProto.toJSON(e)); + } + if (message.options !== undefined) { + obj.options = FileOptions.toJSON(message.options); + } + if (message.sourceCodeInfo !== undefined) { + obj.sourceCodeInfo = SourceCodeInfo.toJSON(message.sourceCodeInfo); + } + if (message.syntax !== undefined && message.syntax !== "") { + obj.syntax = message.syntax; + } + if (message.edition !== undefined && message.edition !== 0) { + obj.edition = editionToJSON(message.edition); + } + return obj; + }, + + create<I extends Exact<DeepPartial<FileDescriptorProto>, I>>(base?: I): FileDescriptorProto { + return FileDescriptorProto.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<FileDescriptorProto>, I>>(object: I): FileDescriptorProto { + const message = createBaseFileDescriptorProto(); + message.name = object.name ?? ""; + message.package = object.package ?? ""; + message.dependency = object.dependency?.map((e) => e) || []; + message.publicDependency = object.publicDependency?.map((e) => e) || []; + message.weakDependency = object.weakDependency?.map((e) => e) || []; + message.optionDependency = object.optionDependency?.map((e) => e) || []; + message.messageType = object.messageType?.map((e) => DescriptorProto.fromPartial(e)) || []; + message.enumType = object.enumType?.map((e) => EnumDescriptorProto.fromPartial(e)) || []; + message.service = object.service?.map((e) => ServiceDescriptorProto.fromPartial(e)) || []; + message.extension = object.extension?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? FileOptions.fromPartial(object.options) + : undefined; + message.sourceCodeInfo = (object.sourceCodeInfo !== undefined && object.sourceCodeInfo !== null) + ? SourceCodeInfo.fromPartial(object.sourceCodeInfo) + : undefined; + message.syntax = object.syntax ?? ""; + message.edition = object.edition ?? 0; + return message; + }, +}; + +function createBaseDescriptorProto(): DescriptorProto { + return { + name: "", + field: [], + extension: [], + nestedType: [], + enumType: [], + extensionRange: [], + oneofDecl: [], + options: undefined, + reservedRange: [], + reservedName: [], + visibility: 0, + }; +} + +export const DescriptorProto: MessageFns<DescriptorProto> = { + encode(message: DescriptorProto, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.name !== undefined && message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.field) { + FieldDescriptorProto.encode(v!, writer.uint32(18).fork()).join(); + } + for (const v of message.extension) { + FieldDescriptorProto.encode(v!, writer.uint32(50).fork()).join(); + } + for (const v of message.nestedType) { + DescriptorProto.encode(v!, writer.uint32(26).fork()).join(); + } + for (const v of message.enumType) { + EnumDescriptorProto.encode(v!, writer.uint32(34).fork()).join(); + } + for (const v of message.extensionRange) { + DescriptorProto_ExtensionRange.encode(v!, writer.uint32(42).fork()).join(); + } + for (const v of message.oneofDecl) { + OneofDescriptorProto.encode(v!, writer.uint32(66).fork()).join(); + } + if (message.options !== undefined) { + MessageOptions.encode(message.options, writer.uint32(58).fork()).join(); + } + for (const v of message.reservedRange) { + DescriptorProto_ReservedRange.encode(v!, writer.uint32(74).fork()).join(); + } + for (const v of message.reservedName) { + writer.uint32(82).string(v!); + } + if (message.visibility !== undefined && message.visibility !== 0) { + writer.uint32(88).int32(message.visibility); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): DescriptorProto { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.field.push(FieldDescriptorProto.decode(reader, reader.uint32())); + continue; + } + case 6: { + if (tag !== 50) { + break; + } + + message.extension.push(FieldDescriptorProto.decode(reader, reader.uint32())); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.nestedType.push(DescriptorProto.decode(reader, reader.uint32())); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + + message.enumType.push(EnumDescriptorProto.decode(reader, reader.uint32())); + continue; + } + case 5: { + if (tag !== 42) { + break; + } + + message.extensionRange.push(DescriptorProto_ExtensionRange.decode(reader, reader.uint32())); + continue; + } + case 8: { + if (tag !== 66) { + break; + } + + message.oneofDecl.push(OneofDescriptorProto.decode(reader, reader.uint32())); + continue; + } + case 7: { + if (tag !== 58) { + break; + } + + message.options = MessageOptions.decode(reader, reader.uint32()); + continue; + } + case 9: { + if (tag !== 74) { + break; + } + + message.reservedRange.push(DescriptorProto_ReservedRange.decode(reader, reader.uint32())); + continue; + } + case 10: { + if (tag !== 82) { + break; + } + + message.reservedName.push(reader.string()); + continue; + } + case 11: { + if (tag !== 88) { + break; + } + + message.visibility = reader.int32() as any; + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): DescriptorProto { + return { + name: isSet(object.name) ? globalThis.String(object.name) : "", + field: globalThis.Array.isArray(object?.field) + ? object.field.map((e: any) => FieldDescriptorProto.fromJSON(e)) + : [], + extension: globalThis.Array.isArray(object?.extension) + ? object.extension.map((e: any) => FieldDescriptorProto.fromJSON(e)) + : [], + nestedType: globalThis.Array.isArray(object?.nestedType) + ? object.nestedType.map((e: any) => DescriptorProto.fromJSON(e)) + : globalThis.Array.isArray(object?.nested_type) + ? object.nested_type.map((e: any) => DescriptorProto.fromJSON(e)) + : [], + enumType: globalThis.Array.isArray(object?.enumType) + ? object.enumType.map((e: any) => EnumDescriptorProto.fromJSON(e)) + : globalThis.Array.isArray(object?.enum_type) + ? object.enum_type.map((e: any) => EnumDescriptorProto.fromJSON(e)) + : [], + extensionRange: globalThis.Array.isArray(object?.extensionRange) + ? object.extensionRange.map((e: any) => DescriptorProto_ExtensionRange.fromJSON(e)) + : globalThis.Array.isArray(object?.extension_range) + ? object.extension_range.map((e: any) => DescriptorProto_ExtensionRange.fromJSON(e)) + : [], + oneofDecl: globalThis.Array.isArray(object?.oneofDecl) + ? object.oneofDecl.map((e: any) => OneofDescriptorProto.fromJSON(e)) + : globalThis.Array.isArray(object?.oneof_decl) + ? object.oneof_decl.map((e: any) => OneofDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) ? MessageOptions.fromJSON(object.options) : undefined, + reservedRange: globalThis.Array.isArray(object?.reservedRange) + ? object.reservedRange.map((e: any) => DescriptorProto_ReservedRange.fromJSON(e)) + : globalThis.Array.isArray(object?.reserved_range) + ? object.reserved_range.map((e: any) => DescriptorProto_ReservedRange.fromJSON(e)) + : [], + reservedName: globalThis.Array.isArray(object?.reservedName) + ? object.reservedName.map((e: any) => globalThis.String(e)) + : globalThis.Array.isArray(object?.reserved_name) + ? object.reserved_name.map((e: any) => globalThis.String(e)) + : [], + visibility: isSet(object.visibility) ? symbolVisibilityFromJSON(object.visibility) : 0, + }; + }, + + toJSON(message: DescriptorProto): unknown { + const obj: any = {}; + if (message.name !== undefined && message.name !== "") { + obj.name = message.name; + } + if (message.field?.length) { + obj.field = message.field.map((e) => FieldDescriptorProto.toJSON(e)); + } + if (message.extension?.length) { + obj.extension = message.extension.map((e) => FieldDescriptorProto.toJSON(e)); + } + if (message.nestedType?.length) { + obj.nestedType = message.nestedType.map((e) => DescriptorProto.toJSON(e)); + } + if (message.enumType?.length) { + obj.enumType = message.enumType.map((e) => EnumDescriptorProto.toJSON(e)); + } + if (message.extensionRange?.length) { + obj.extensionRange = message.extensionRange.map((e) => DescriptorProto_ExtensionRange.toJSON(e)); + } + if (message.oneofDecl?.length) { + obj.oneofDecl = message.oneofDecl.map((e) => OneofDescriptorProto.toJSON(e)); + } + if (message.options !== undefined) { + obj.options = MessageOptions.toJSON(message.options); + } + if (message.reservedRange?.length) { + obj.reservedRange = message.reservedRange.map((e) => DescriptorProto_ReservedRange.toJSON(e)); + } + if (message.reservedName?.length) { + obj.reservedName = message.reservedName; + } + if (message.visibility !== undefined && message.visibility !== 0) { + obj.visibility = symbolVisibilityToJSON(message.visibility); + } + return obj; + }, + + create<I extends Exact<DeepPartial<DescriptorProto>, I>>(base?: I): DescriptorProto { + return DescriptorProto.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<DescriptorProto>, I>>(object: I): DescriptorProto { + const message = createBaseDescriptorProto(); + message.name = object.name ?? ""; + message.field = object.field?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.extension = object.extension?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.nestedType = object.nestedType?.map((e) => DescriptorProto.fromPartial(e)) || []; + message.enumType = object.enumType?.map((e) => EnumDescriptorProto.fromPartial(e)) || []; + message.extensionRange = object.extensionRange?.map((e) => DescriptorProto_ExtensionRange.fromPartial(e)) || []; + message.oneofDecl = object.oneofDecl?.map((e) => OneofDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? MessageOptions.fromPartial(object.options) + : undefined; + message.reservedRange = object.reservedRange?.map((e) => DescriptorProto_ReservedRange.fromPartial(e)) || []; + message.reservedName = object.reservedName?.map((e) => e) || []; + message.visibility = object.visibility ?? 0; + return message; + }, +}; + +function createBaseDescriptorProto_ExtensionRange(): DescriptorProto_ExtensionRange { + return { start: 0, end: 0, options: undefined }; +} + +export const DescriptorProto_ExtensionRange: MessageFns<DescriptorProto_ExtensionRange> = { + encode(message: DescriptorProto_ExtensionRange, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.start !== undefined && message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== undefined && message.end !== 0) { + writer.uint32(16).int32(message.end); + } + if (message.options !== undefined) { + ExtensionRangeOptions.encode(message.options, writer.uint32(26).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): DescriptorProto_ExtensionRange { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto_ExtensionRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + + message.start = reader.int32(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.end = reader.int32(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.options = ExtensionRangeOptions.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): DescriptorProto_ExtensionRange { + return { + start: isSet(object.start) ? globalThis.Number(object.start) : 0, + end: isSet(object.end) ? globalThis.Number(object.end) : 0, + options: isSet(object.options) ? ExtensionRangeOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: DescriptorProto_ExtensionRange): unknown { + const obj: any = {}; + if (message.start !== undefined && message.start !== 0) { + obj.start = Math.round(message.start); + } + if (message.end !== undefined && message.end !== 0) { + obj.end = Math.round(message.end); + } + if (message.options !== undefined) { + obj.options = ExtensionRangeOptions.toJSON(message.options); + } + return obj; + }, + + create<I extends Exact<DeepPartial<DescriptorProto_ExtensionRange>, I>>(base?: I): DescriptorProto_ExtensionRange { + return DescriptorProto_ExtensionRange.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<DescriptorProto_ExtensionRange>, I>>( + object: I, + ): DescriptorProto_ExtensionRange { + const message = createBaseDescriptorProto_ExtensionRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + message.options = (object.options !== undefined && object.options !== null) + ? ExtensionRangeOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseDescriptorProto_ReservedRange(): DescriptorProto_ReservedRange { + return { start: 0, end: 0 }; +} + +export const DescriptorProto_ReservedRange: MessageFns<DescriptorProto_ReservedRange> = { + encode(message: DescriptorProto_ReservedRange, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.start !== undefined && message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== undefined && message.end !== 0) { + writer.uint32(16).int32(message.end); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): DescriptorProto_ReservedRange { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto_ReservedRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + + message.start = reader.int32(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.end = reader.int32(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): DescriptorProto_ReservedRange { + return { + start: isSet(object.start) ? globalThis.Number(object.start) : 0, + end: isSet(object.end) ? globalThis.Number(object.end) : 0, + }; + }, + + toJSON(message: DescriptorProto_ReservedRange): unknown { + const obj: any = {}; + if (message.start !== undefined && message.start !== 0) { + obj.start = Math.round(message.start); + } + if (message.end !== undefined && message.end !== 0) { + obj.end = Math.round(message.end); + } + return obj; + }, + + create<I extends Exact<DeepPartial<DescriptorProto_ReservedRange>, I>>(base?: I): DescriptorProto_ReservedRange { + return DescriptorProto_ReservedRange.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<DescriptorProto_ReservedRange>, I>>( + object: I, + ): DescriptorProto_ReservedRange { + const message = createBaseDescriptorProto_ReservedRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + return message; + }, +}; + +function createBaseExtensionRangeOptions(): ExtensionRangeOptions { + return { uninterpretedOption: [], declaration: [], features: undefined, verification: 1 }; +} + +export const ExtensionRangeOptions: MessageFns<ExtensionRangeOptions> = { + encode(message: ExtensionRangeOptions, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).join(); + } + for (const v of message.declaration) { + ExtensionRangeOptions_Declaration.encode(v!, writer.uint32(18).fork()).join(); + } + if (message.features !== undefined) { + FeatureSet.encode(message.features, writer.uint32(402).fork()).join(); + } + if (message.verification !== undefined && message.verification !== 1) { + writer.uint32(24).int32(message.verification); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): ExtensionRangeOptions { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseExtensionRangeOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 999: { + if (tag !== 7994) { + break; + } + + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.declaration.push(ExtensionRangeOptions_Declaration.decode(reader, reader.uint32())); + continue; + } + case 50: { + if (tag !== 402) { + break; + } + + message.features = FeatureSet.decode(reader, reader.uint32()); + continue; + } + case 3: { + if (tag !== 24) { + break; + } + + message.verification = reader.int32() as any; + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): ExtensionRangeOptions { + return { + uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : globalThis.Array.isArray(object?.uninterpreted_option) + ? object.uninterpreted_option.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + declaration: globalThis.Array.isArray(object?.declaration) + ? object.declaration.map((e: any) => ExtensionRangeOptions_Declaration.fromJSON(e)) + : [], + features: isSet(object.features) ? FeatureSet.fromJSON(object.features) : undefined, + verification: isSet(object.verification) + ? extensionRangeOptions_VerificationStateFromJSON(object.verification) + : 1, + }; + }, + + toJSON(message: ExtensionRangeOptions): unknown { + const obj: any = {}; + if (message.uninterpretedOption?.length) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); + } + if (message.declaration?.length) { + obj.declaration = message.declaration.map((e) => ExtensionRangeOptions_Declaration.toJSON(e)); + } + if (message.features !== undefined) { + obj.features = FeatureSet.toJSON(message.features); + } + if (message.verification !== undefined && message.verification !== 1) { + obj.verification = extensionRangeOptions_VerificationStateToJSON(message.verification); + } + return obj; + }, + + create<I extends Exact<DeepPartial<ExtensionRangeOptions>, I>>(base?: I): ExtensionRangeOptions { + return ExtensionRangeOptions.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<ExtensionRangeOptions>, I>>(object: I): ExtensionRangeOptions { + const message = createBaseExtensionRangeOptions(); + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + message.declaration = object.declaration?.map((e) => ExtensionRangeOptions_Declaration.fromPartial(e)) || []; + message.features = (object.features !== undefined && object.features !== null) + ? FeatureSet.fromPartial(object.features) + : undefined; + message.verification = object.verification ?? 1; + return message; + }, +}; + +function createBaseExtensionRangeOptions_Declaration(): ExtensionRangeOptions_Declaration { + return { number: 0, fullName: "", type: "", reserved: false, repeated: false }; +} + +export const ExtensionRangeOptions_Declaration: MessageFns<ExtensionRangeOptions_Declaration> = { + encode(message: ExtensionRangeOptions_Declaration, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.number !== undefined && message.number !== 0) { + writer.uint32(8).int32(message.number); + } + if (message.fullName !== undefined && message.fullName !== "") { + writer.uint32(18).string(message.fullName); + } + if (message.type !== undefined && message.type !== "") { + writer.uint32(26).string(message.type); + } + if (message.reserved !== undefined && message.reserved !== false) { + writer.uint32(40).bool(message.reserved); + } + if (message.repeated !== undefined && message.repeated !== false) { + writer.uint32(48).bool(message.repeated); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): ExtensionRangeOptions_Declaration { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseExtensionRangeOptions_Declaration(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + + message.number = reader.int32(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.fullName = reader.string(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.type = reader.string(); + continue; + } + case 5: { + if (tag !== 40) { + break; + } + + message.reserved = reader.bool(); + continue; + } + case 6: { + if (tag !== 48) { + break; + } + + message.repeated = reader.bool(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): ExtensionRangeOptions_Declaration { + return { + number: isSet(object.number) ? globalThis.Number(object.number) : 0, + fullName: isSet(object.fullName) + ? globalThis.String(object.fullName) + : isSet(object.full_name) + ? globalThis.String(object.full_name) + : "", + type: isSet(object.type) ? globalThis.String(object.type) : "", + reserved: isSet(object.reserved) ? globalThis.Boolean(object.reserved) : false, + repeated: isSet(object.repeated) ? globalThis.Boolean(object.repeated) : false, + }; + }, + + toJSON(message: ExtensionRangeOptions_Declaration): unknown { + const obj: any = {}; + if (message.number !== undefined && message.number !== 0) { + obj.number = Math.round(message.number); + } + if (message.fullName !== undefined && message.fullName !== "") { + obj.fullName = message.fullName; + } + if (message.type !== undefined && message.type !== "") { + obj.type = message.type; + } + if (message.reserved !== undefined && message.reserved !== false) { + obj.reserved = message.reserved; + } + if (message.repeated !== undefined && message.repeated !== false) { + obj.repeated = message.repeated; + } + return obj; + }, + + create<I extends Exact<DeepPartial<ExtensionRangeOptions_Declaration>, I>>( + base?: I, + ): ExtensionRangeOptions_Declaration { + return ExtensionRangeOptions_Declaration.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<ExtensionRangeOptions_Declaration>, I>>( + object: I, + ): ExtensionRangeOptions_Declaration { + const message = createBaseExtensionRangeOptions_Declaration(); + message.number = object.number ?? 0; + message.fullName = object.fullName ?? ""; + message.type = object.type ?? ""; + message.reserved = object.reserved ?? false; + message.repeated = object.repeated ?? false; + return message; + }, +}; + +function createBaseFieldDescriptorProto(): FieldDescriptorProto { + return { + name: "", + number: 0, + label: 1, + type: 1, + typeName: "", + extendee: "", + defaultValue: "", + oneofIndex: 0, + jsonName: "", + options: undefined, + proto3Optional: false, + }; +} + +export const FieldDescriptorProto: MessageFns<FieldDescriptorProto> = { + encode(message: FieldDescriptorProto, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.name !== undefined && message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.number !== undefined && message.number !== 0) { + writer.uint32(24).int32(message.number); + } + if (message.label !== undefined && message.label !== 1) { + writer.uint32(32).int32(message.label); + } + if (message.type !== undefined && message.type !== 1) { + writer.uint32(40).int32(message.type); + } + if (message.typeName !== undefined && message.typeName !== "") { + writer.uint32(50).string(message.typeName); + } + if (message.extendee !== undefined && message.extendee !== "") { + writer.uint32(18).string(message.extendee); + } + if (message.defaultValue !== undefined && message.defaultValue !== "") { + writer.uint32(58).string(message.defaultValue); + } + if (message.oneofIndex !== undefined && message.oneofIndex !== 0) { + writer.uint32(72).int32(message.oneofIndex); + } + if (message.jsonName !== undefined && message.jsonName !== "") { + writer.uint32(82).string(message.jsonName); + } + if (message.options !== undefined) { + FieldOptions.encode(message.options, writer.uint32(66).fork()).join(); + } + if (message.proto3Optional !== undefined && message.proto3Optional !== false) { + writer.uint32(136).bool(message.proto3Optional); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): FieldDescriptorProto { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFieldDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + } + case 3: { + if (tag !== 24) { + break; + } + + message.number = reader.int32(); + continue; + } + case 4: { + if (tag !== 32) { + break; + } + + message.label = reader.int32() as any; + continue; + } + case 5: { + if (tag !== 40) { + break; + } + + message.type = reader.int32() as any; + continue; + } + case 6: { + if (tag !== 50) { + break; + } + + message.typeName = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.extendee = reader.string(); + continue; + } + case 7: { + if (tag !== 58) { + break; + } + + message.defaultValue = reader.string(); + continue; + } + case 9: { + if (tag !== 72) { + break; + } + + message.oneofIndex = reader.int32(); + continue; + } + case 10: { + if (tag !== 82) { + break; + } + + message.jsonName = reader.string(); + continue; + } + case 8: { + if (tag !== 66) { + break; + } + + message.options = FieldOptions.decode(reader, reader.uint32()); + continue; + } + case 17: { + if (tag !== 136) { + break; + } + + message.proto3Optional = reader.bool(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): FieldDescriptorProto { + return { + name: isSet(object.name) ? globalThis.String(object.name) : "", + number: isSet(object.number) ? globalThis.Number(object.number) : 0, + label: isSet(object.label) ? fieldDescriptorProto_LabelFromJSON(object.label) : 1, + type: isSet(object.type) ? fieldDescriptorProto_TypeFromJSON(object.type) : 1, + typeName: isSet(object.typeName) + ? globalThis.String(object.typeName) + : isSet(object.type_name) + ? globalThis.String(object.type_name) + : "", + extendee: isSet(object.extendee) ? globalThis.String(object.extendee) : "", + defaultValue: isSet(object.defaultValue) + ? globalThis.String(object.defaultValue) + : isSet(object.default_value) + ? globalThis.String(object.default_value) + : "", + oneofIndex: isSet(object.oneofIndex) + ? globalThis.Number(object.oneofIndex) + : isSet(object.oneof_index) + ? globalThis.Number(object.oneof_index) + : 0, + jsonName: isSet(object.jsonName) + ? globalThis.String(object.jsonName) + : isSet(object.json_name) + ? globalThis.String(object.json_name) + : "", + options: isSet(object.options) ? FieldOptions.fromJSON(object.options) : undefined, + proto3Optional: isSet(object.proto3Optional) + ? globalThis.Boolean(object.proto3Optional) + : isSet(object.proto3_optional) + ? globalThis.Boolean(object.proto3_optional) + : false, + }; + }, + + toJSON(message: FieldDescriptorProto): unknown { + const obj: any = {}; + if (message.name !== undefined && message.name !== "") { + obj.name = message.name; + } + if (message.number !== undefined && message.number !== 0) { + obj.number = Math.round(message.number); + } + if (message.label !== undefined && message.label !== 1) { + obj.label = fieldDescriptorProto_LabelToJSON(message.label); + } + if (message.type !== undefined && message.type !== 1) { + obj.type = fieldDescriptorProto_TypeToJSON(message.type); + } + if (message.typeName !== undefined && message.typeName !== "") { + obj.typeName = message.typeName; + } + if (message.extendee !== undefined && message.extendee !== "") { + obj.extendee = message.extendee; + } + if (message.defaultValue !== undefined && message.defaultValue !== "") { + obj.defaultValue = message.defaultValue; + } + if (message.oneofIndex !== undefined && message.oneofIndex !== 0) { + obj.oneofIndex = Math.round(message.oneofIndex); + } + if (message.jsonName !== undefined && message.jsonName !== "") { + obj.jsonName = message.jsonName; + } + if (message.options !== undefined) { + obj.options = FieldOptions.toJSON(message.options); + } + if (message.proto3Optional !== undefined && message.proto3Optional !== false) { + obj.proto3Optional = message.proto3Optional; + } + return obj; + }, + + create<I extends Exact<DeepPartial<FieldDescriptorProto>, I>>(base?: I): FieldDescriptorProto { + return FieldDescriptorProto.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<FieldDescriptorProto>, I>>(object: I): FieldDescriptorProto { + const message = createBaseFieldDescriptorProto(); + message.name = object.name ?? ""; + message.number = object.number ?? 0; + message.label = object.label ?? 1; + message.type = object.type ?? 1; + message.typeName = object.typeName ?? ""; + message.extendee = object.extendee ?? ""; + message.defaultValue = object.defaultValue ?? ""; + message.oneofIndex = object.oneofIndex ?? 0; + message.jsonName = object.jsonName ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? FieldOptions.fromPartial(object.options) + : undefined; + message.proto3Optional = object.proto3Optional ?? false; + return message; + }, +}; + +function createBaseOneofDescriptorProto(): OneofDescriptorProto { + return { name: "", options: undefined }; +} + +export const OneofDescriptorProto: MessageFns<OneofDescriptorProto> = { + encode(message: OneofDescriptorProto, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.name !== undefined && message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.options !== undefined) { + OneofOptions.encode(message.options, writer.uint32(18).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): OneofDescriptorProto { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseOneofDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.options = OneofOptions.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): OneofDescriptorProto { + return { + name: isSet(object.name) ? globalThis.String(object.name) : "", + options: isSet(object.options) ? OneofOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: OneofDescriptorProto): unknown { + const obj: any = {}; + if (message.name !== undefined && message.name !== "") { + obj.name = message.name; + } + if (message.options !== undefined) { + obj.options = OneofOptions.toJSON(message.options); + } + return obj; + }, + + create<I extends Exact<DeepPartial<OneofDescriptorProto>, I>>(base?: I): OneofDescriptorProto { + return OneofDescriptorProto.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<OneofDescriptorProto>, I>>(object: I): OneofDescriptorProto { + const message = createBaseOneofDescriptorProto(); + message.name = object.name ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? OneofOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseEnumDescriptorProto(): EnumDescriptorProto { + return { name: "", value: [], options: undefined, reservedRange: [], reservedName: [], visibility: 0 }; +} + +export const EnumDescriptorProto: MessageFns<EnumDescriptorProto> = { + encode(message: EnumDescriptorProto, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.name !== undefined && message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.value) { + EnumValueDescriptorProto.encode(v!, writer.uint32(18).fork()).join(); + } + if (message.options !== undefined) { + EnumOptions.encode(message.options, writer.uint32(26).fork()).join(); + } + for (const v of message.reservedRange) { + EnumDescriptorProto_EnumReservedRange.encode(v!, writer.uint32(34).fork()).join(); + } + for (const v of message.reservedName) { + writer.uint32(42).string(v!); + } + if (message.visibility !== undefined && message.visibility !== 0) { + writer.uint32(48).int32(message.visibility); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): EnumDescriptorProto { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.value.push(EnumValueDescriptorProto.decode(reader, reader.uint32())); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.options = EnumOptions.decode(reader, reader.uint32()); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + + message.reservedRange.push(EnumDescriptorProto_EnumReservedRange.decode(reader, reader.uint32())); + continue; + } + case 5: { + if (tag !== 42) { + break; + } + + message.reservedName.push(reader.string()); + continue; + } + case 6: { + if (tag !== 48) { + break; + } + + message.visibility = reader.int32() as any; + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): EnumDescriptorProto { + return { + name: isSet(object.name) ? globalThis.String(object.name) : "", + value: globalThis.Array.isArray(object?.value) + ? object.value.map((e: any) => EnumValueDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) ? EnumOptions.fromJSON(object.options) : undefined, + reservedRange: globalThis.Array.isArray(object?.reservedRange) + ? object.reservedRange.map((e: any) => EnumDescriptorProto_EnumReservedRange.fromJSON(e)) + : globalThis.Array.isArray(object?.reserved_range) + ? object.reserved_range.map((e: any) => EnumDescriptorProto_EnumReservedRange.fromJSON(e)) + : [], + reservedName: globalThis.Array.isArray(object?.reservedName) + ? object.reservedName.map((e: any) => globalThis.String(e)) + : globalThis.Array.isArray(object?.reserved_name) + ? object.reserved_name.map((e: any) => globalThis.String(e)) + : [], + visibility: isSet(object.visibility) ? symbolVisibilityFromJSON(object.visibility) : 0, + }; + }, + + toJSON(message: EnumDescriptorProto): unknown { + const obj: any = {}; + if (message.name !== undefined && message.name !== "") { + obj.name = message.name; + } + if (message.value?.length) { + obj.value = message.value.map((e) => EnumValueDescriptorProto.toJSON(e)); + } + if (message.options !== undefined) { + obj.options = EnumOptions.toJSON(message.options); + } + if (message.reservedRange?.length) { + obj.reservedRange = message.reservedRange.map((e) => EnumDescriptorProto_EnumReservedRange.toJSON(e)); + } + if (message.reservedName?.length) { + obj.reservedName = message.reservedName; + } + if (message.visibility !== undefined && message.visibility !== 0) { + obj.visibility = symbolVisibilityToJSON(message.visibility); + } + return obj; + }, + + create<I extends Exact<DeepPartial<EnumDescriptorProto>, I>>(base?: I): EnumDescriptorProto { + return EnumDescriptorProto.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<EnumDescriptorProto>, I>>(object: I): EnumDescriptorProto { + const message = createBaseEnumDescriptorProto(); + message.name = object.name ?? ""; + message.value = object.value?.map((e) => EnumValueDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? EnumOptions.fromPartial(object.options) + : undefined; + message.reservedRange = object.reservedRange?.map((e) => EnumDescriptorProto_EnumReservedRange.fromPartial(e)) || + []; + message.reservedName = object.reservedName?.map((e) => e) || []; + message.visibility = object.visibility ?? 0; + return message; + }, +}; + +function createBaseEnumDescriptorProto_EnumReservedRange(): EnumDescriptorProto_EnumReservedRange { + return { start: 0, end: 0 }; +} + +export const EnumDescriptorProto_EnumReservedRange: MessageFns<EnumDescriptorProto_EnumReservedRange> = { + encode(message: EnumDescriptorProto_EnumReservedRange, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.start !== undefined && message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== undefined && message.end !== 0) { + writer.uint32(16).int32(message.end); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): EnumDescriptorProto_EnumReservedRange { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumDescriptorProto_EnumReservedRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + + message.start = reader.int32(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.end = reader.int32(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): EnumDescriptorProto_EnumReservedRange { + return { + start: isSet(object.start) ? globalThis.Number(object.start) : 0, + end: isSet(object.end) ? globalThis.Number(object.end) : 0, + }; + }, + + toJSON(message: EnumDescriptorProto_EnumReservedRange): unknown { + const obj: any = {}; + if (message.start !== undefined && message.start !== 0) { + obj.start = Math.round(message.start); + } + if (message.end !== undefined && message.end !== 0) { + obj.end = Math.round(message.end); + } + return obj; + }, + + create<I extends Exact<DeepPartial<EnumDescriptorProto_EnumReservedRange>, I>>( + base?: I, + ): EnumDescriptorProto_EnumReservedRange { + return EnumDescriptorProto_EnumReservedRange.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<EnumDescriptorProto_EnumReservedRange>, I>>( + object: I, + ): EnumDescriptorProto_EnumReservedRange { + const message = createBaseEnumDescriptorProto_EnumReservedRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + return message; + }, +}; + +function createBaseEnumValueDescriptorProto(): EnumValueDescriptorProto { + return { name: "", number: 0, options: undefined }; +} + +export const EnumValueDescriptorProto: MessageFns<EnumValueDescriptorProto> = { + encode(message: EnumValueDescriptorProto, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.name !== undefined && message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.number !== undefined && message.number !== 0) { + writer.uint32(16).int32(message.number); + } + if (message.options !== undefined) { + EnumValueOptions.encode(message.options, writer.uint32(26).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): EnumValueDescriptorProto { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumValueDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.number = reader.int32(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.options = EnumValueOptions.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): EnumValueDescriptorProto { + return { + name: isSet(object.name) ? globalThis.String(object.name) : "", + number: isSet(object.number) ? globalThis.Number(object.number) : 0, + options: isSet(object.options) ? EnumValueOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: EnumValueDescriptorProto): unknown { + const obj: any = {}; + if (message.name !== undefined && message.name !== "") { + obj.name = message.name; + } + if (message.number !== undefined && message.number !== 0) { + obj.number = Math.round(message.number); + } + if (message.options !== undefined) { + obj.options = EnumValueOptions.toJSON(message.options); + } + return obj; + }, + + create<I extends Exact<DeepPartial<EnumValueDescriptorProto>, I>>(base?: I): EnumValueDescriptorProto { + return EnumValueDescriptorProto.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<EnumValueDescriptorProto>, I>>(object: I): EnumValueDescriptorProto { + const message = createBaseEnumValueDescriptorProto(); + message.name = object.name ?? ""; + message.number = object.number ?? 0; + message.options = (object.options !== undefined && object.options !== null) + ? EnumValueOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseServiceDescriptorProto(): ServiceDescriptorProto { + return { name: "", method: [], options: undefined }; +} + +export const ServiceDescriptorProto: MessageFns<ServiceDescriptorProto> = { + encode(message: ServiceDescriptorProto, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.name !== undefined && message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.method) { + MethodDescriptorProto.encode(v!, writer.uint32(18).fork()).join(); + } + if (message.options !== undefined) { + ServiceOptions.encode(message.options, writer.uint32(26).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): ServiceDescriptorProto { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseServiceDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.method.push(MethodDescriptorProto.decode(reader, reader.uint32())); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.options = ServiceOptions.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): ServiceDescriptorProto { + return { + name: isSet(object.name) ? globalThis.String(object.name) : "", + method: globalThis.Array.isArray(object?.method) + ? object.method.map((e: any) => MethodDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) ? ServiceOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: ServiceDescriptorProto): unknown { + const obj: any = {}; + if (message.name !== undefined && message.name !== "") { + obj.name = message.name; + } + if (message.method?.length) { + obj.method = message.method.map((e) => MethodDescriptorProto.toJSON(e)); + } + if (message.options !== undefined) { + obj.options = ServiceOptions.toJSON(message.options); + } + return obj; + }, + + create<I extends Exact<DeepPartial<ServiceDescriptorProto>, I>>(base?: I): ServiceDescriptorProto { + return ServiceDescriptorProto.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<ServiceDescriptorProto>, I>>(object: I): ServiceDescriptorProto { + const message = createBaseServiceDescriptorProto(); + message.name = object.name ?? ""; + message.method = object.method?.map((e) => MethodDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? ServiceOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseMethodDescriptorProto(): MethodDescriptorProto { + return { + name: "", + inputType: "", + outputType: "", + options: undefined, + clientStreaming: false, + serverStreaming: false, + }; +} + +export const MethodDescriptorProto: MessageFns<MethodDescriptorProto> = { + encode(message: MethodDescriptorProto, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.name !== undefined && message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.inputType !== undefined && message.inputType !== "") { + writer.uint32(18).string(message.inputType); + } + if (message.outputType !== undefined && message.outputType !== "") { + writer.uint32(26).string(message.outputType); + } + if (message.options !== undefined) { + MethodOptions.encode(message.options, writer.uint32(34).fork()).join(); + } + if (message.clientStreaming !== undefined && message.clientStreaming !== false) { + writer.uint32(40).bool(message.clientStreaming); + } + if (message.serverStreaming !== undefined && message.serverStreaming !== false) { + writer.uint32(48).bool(message.serverStreaming); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): MethodDescriptorProto { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMethodDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.inputType = reader.string(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.outputType = reader.string(); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + + message.options = MethodOptions.decode(reader, reader.uint32()); + continue; + } + case 5: { + if (tag !== 40) { + break; + } + + message.clientStreaming = reader.bool(); + continue; + } + case 6: { + if (tag !== 48) { + break; + } + + message.serverStreaming = reader.bool(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): MethodDescriptorProto { + return { + name: isSet(object.name) ? globalThis.String(object.name) : "", + inputType: isSet(object.inputType) + ? globalThis.String(object.inputType) + : isSet(object.input_type) + ? globalThis.String(object.input_type) + : "", + outputType: isSet(object.outputType) + ? globalThis.String(object.outputType) + : isSet(object.output_type) + ? globalThis.String(object.output_type) + : "", + options: isSet(object.options) ? MethodOptions.fromJSON(object.options) : undefined, + clientStreaming: isSet(object.clientStreaming) + ? globalThis.Boolean(object.clientStreaming) + : isSet(object.client_streaming) + ? globalThis.Boolean(object.client_streaming) + : false, + serverStreaming: isSet(object.serverStreaming) + ? globalThis.Boolean(object.serverStreaming) + : isSet(object.server_streaming) + ? globalThis.Boolean(object.server_streaming) + : false, + }; + }, + + toJSON(message: MethodDescriptorProto): unknown { + const obj: any = {}; + if (message.name !== undefined && message.name !== "") { + obj.name = message.name; + } + if (message.inputType !== undefined && message.inputType !== "") { + obj.inputType = message.inputType; + } + if (message.outputType !== undefined && message.outputType !== "") { + obj.outputType = message.outputType; + } + if (message.options !== undefined) { + obj.options = MethodOptions.toJSON(message.options); + } + if (message.clientStreaming !== undefined && message.clientStreaming !== false) { + obj.clientStreaming = message.clientStreaming; + } + if (message.serverStreaming !== undefined && message.serverStreaming !== false) { + obj.serverStreaming = message.serverStreaming; + } + return obj; + }, + + create<I extends Exact<DeepPartial<MethodDescriptorProto>, I>>(base?: I): MethodDescriptorProto { + return MethodDescriptorProto.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<MethodDescriptorProto>, I>>(object: I): MethodDescriptorProto { + const message = createBaseMethodDescriptorProto(); + message.name = object.name ?? ""; + message.inputType = object.inputType ?? ""; + message.outputType = object.outputType ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? MethodOptions.fromPartial(object.options) + : undefined; + message.clientStreaming = object.clientStreaming ?? false; + message.serverStreaming = object.serverStreaming ?? false; + return message; + }, +}; + +function createBaseFileOptions(): FileOptions { + return { + javaPackage: "", + javaOuterClassname: "", + javaMultipleFiles: false, + javaGenerateEqualsAndHash: false, + javaStringCheckUtf8: false, + optimizeFor: 1, + goPackage: "", + ccGenericServices: false, + javaGenericServices: false, + pyGenericServices: false, + deprecated: false, + ccEnableArenas: true, + objcClassPrefix: "", + csharpNamespace: "", + swiftPrefix: "", + phpClassPrefix: "", + phpNamespace: "", + phpMetadataNamespace: "", + rubyPackage: "", + features: undefined, + uninterpretedOption: [], + }; +} + +export const FileOptions: MessageFns<FileOptions> = { + encode(message: FileOptions, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.javaPackage !== undefined && message.javaPackage !== "") { + writer.uint32(10).string(message.javaPackage); + } + if (message.javaOuterClassname !== undefined && message.javaOuterClassname !== "") { + writer.uint32(66).string(message.javaOuterClassname); + } + if (message.javaMultipleFiles !== undefined && message.javaMultipleFiles !== false) { + writer.uint32(80).bool(message.javaMultipleFiles); + } + if (message.javaGenerateEqualsAndHash !== undefined && message.javaGenerateEqualsAndHash !== false) { + writer.uint32(160).bool(message.javaGenerateEqualsAndHash); + } + if (message.javaStringCheckUtf8 !== undefined && message.javaStringCheckUtf8 !== false) { + writer.uint32(216).bool(message.javaStringCheckUtf8); + } + if (message.optimizeFor !== undefined && message.optimizeFor !== 1) { + writer.uint32(72).int32(message.optimizeFor); + } + if (message.goPackage !== undefined && message.goPackage !== "") { + writer.uint32(90).string(message.goPackage); + } + if (message.ccGenericServices !== undefined && message.ccGenericServices !== false) { + writer.uint32(128).bool(message.ccGenericServices); + } + if (message.javaGenericServices !== undefined && message.javaGenericServices !== false) { + writer.uint32(136).bool(message.javaGenericServices); + } + if (message.pyGenericServices !== undefined && message.pyGenericServices !== false) { + writer.uint32(144).bool(message.pyGenericServices); + } + if (message.deprecated !== undefined && message.deprecated !== false) { + writer.uint32(184).bool(message.deprecated); + } + if (message.ccEnableArenas !== undefined && message.ccEnableArenas !== true) { + writer.uint32(248).bool(message.ccEnableArenas); + } + if (message.objcClassPrefix !== undefined && message.objcClassPrefix !== "") { + writer.uint32(290).string(message.objcClassPrefix); + } + if (message.csharpNamespace !== undefined && message.csharpNamespace !== "") { + writer.uint32(298).string(message.csharpNamespace); + } + if (message.swiftPrefix !== undefined && message.swiftPrefix !== "") { + writer.uint32(314).string(message.swiftPrefix); + } + if (message.phpClassPrefix !== undefined && message.phpClassPrefix !== "") { + writer.uint32(322).string(message.phpClassPrefix); + } + if (message.phpNamespace !== undefined && message.phpNamespace !== "") { + writer.uint32(330).string(message.phpNamespace); + } + if (message.phpMetadataNamespace !== undefined && message.phpMetadataNamespace !== "") { + writer.uint32(354).string(message.phpMetadataNamespace); + } + if (message.rubyPackage !== undefined && message.rubyPackage !== "") { + writer.uint32(362).string(message.rubyPackage); + } + if (message.features !== undefined) { + FeatureSet.encode(message.features, writer.uint32(402).fork()).join(); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): FileOptions { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.javaPackage = reader.string(); + continue; + } + case 8: { + if (tag !== 66) { + break; + } + + message.javaOuterClassname = reader.string(); + continue; + } + case 10: { + if (tag !== 80) { + break; + } + + message.javaMultipleFiles = reader.bool(); + continue; + } + case 20: { + if (tag !== 160) { + break; + } + + message.javaGenerateEqualsAndHash = reader.bool(); + continue; + } + case 27: { + if (tag !== 216) { + break; + } + + message.javaStringCheckUtf8 = reader.bool(); + continue; + } + case 9: { + if (tag !== 72) { + break; + } + + message.optimizeFor = reader.int32() as any; + continue; + } + case 11: { + if (tag !== 90) { + break; + } + + message.goPackage = reader.string(); + continue; + } + case 16: { + if (tag !== 128) { + break; + } + + message.ccGenericServices = reader.bool(); + continue; + } + case 17: { + if (tag !== 136) { + break; + } + + message.javaGenericServices = reader.bool(); + continue; + } + case 18: { + if (tag !== 144) { + break; + } + + message.pyGenericServices = reader.bool(); + continue; + } + case 23: { + if (tag !== 184) { + break; + } + + message.deprecated = reader.bool(); + continue; + } + case 31: { + if (tag !== 248) { + break; + } + + message.ccEnableArenas = reader.bool(); + continue; + } + case 36: { + if (tag !== 290) { + break; + } + + message.objcClassPrefix = reader.string(); + continue; + } + case 37: { + if (tag !== 298) { + break; + } + + message.csharpNamespace = reader.string(); + continue; + } + case 39: { + if (tag !== 314) { + break; + } + + message.swiftPrefix = reader.string(); + continue; + } + case 40: { + if (tag !== 322) { + break; + } + + message.phpClassPrefix = reader.string(); + continue; + } + case 41: { + if (tag !== 330) { + break; + } + + message.phpNamespace = reader.string(); + continue; + } + case 44: { + if (tag !== 354) { + break; + } + + message.phpMetadataNamespace = reader.string(); + continue; + } + case 45: { + if (tag !== 362) { + break; + } + + message.rubyPackage = reader.string(); + continue; + } + case 50: { + if (tag !== 402) { + break; + } + + message.features = FeatureSet.decode(reader, reader.uint32()); + continue; + } + case 999: { + if (tag !== 7994) { + break; + } + + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): FileOptions { + return { + javaPackage: isSet(object.javaPackage) + ? globalThis.String(object.javaPackage) + : isSet(object.java_package) + ? globalThis.String(object.java_package) + : "", + javaOuterClassname: isSet(object.javaOuterClassname) + ? globalThis.String(object.javaOuterClassname) + : isSet(object.java_outer_classname) + ? globalThis.String(object.java_outer_classname) + : "", + javaMultipleFiles: isSet(object.javaMultipleFiles) + ? globalThis.Boolean(object.javaMultipleFiles) + : isSet(object.java_multiple_files) + ? globalThis.Boolean(object.java_multiple_files) + : false, + javaGenerateEqualsAndHash: isSet(object.javaGenerateEqualsAndHash) + ? globalThis.Boolean(object.javaGenerateEqualsAndHash) + : isSet(object.java_generate_equals_and_hash) + ? globalThis.Boolean(object.java_generate_equals_and_hash) + : false, + javaStringCheckUtf8: isSet(object.javaStringCheckUtf8) + ? globalThis.Boolean(object.javaStringCheckUtf8) + : isSet(object.java_string_check_utf8) + ? globalThis.Boolean(object.java_string_check_utf8) + : false, + optimizeFor: isSet(object.optimizeFor) + ? fileOptions_OptimizeModeFromJSON(object.optimizeFor) + : isSet(object.optimize_for) + ? fileOptions_OptimizeModeFromJSON(object.optimize_for) + : 1, + goPackage: isSet(object.goPackage) + ? globalThis.String(object.goPackage) + : isSet(object.go_package) + ? globalThis.String(object.go_package) + : "", + ccGenericServices: isSet(object.ccGenericServices) + ? globalThis.Boolean(object.ccGenericServices) + : isSet(object.cc_generic_services) + ? globalThis.Boolean(object.cc_generic_services) + : false, + javaGenericServices: isSet(object.javaGenericServices) + ? globalThis.Boolean(object.javaGenericServices) + : isSet(object.java_generic_services) + ? globalThis.Boolean(object.java_generic_services) + : false, + pyGenericServices: isSet(object.pyGenericServices) + ? globalThis.Boolean(object.pyGenericServices) + : isSet(object.py_generic_services) + ? globalThis.Boolean(object.py_generic_services) + : false, + deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false, + ccEnableArenas: isSet(object.ccEnableArenas) + ? globalThis.Boolean(object.ccEnableArenas) + : isSet(object.cc_enable_arenas) + ? globalThis.Boolean(object.cc_enable_arenas) + : true, + objcClassPrefix: isSet(object.objcClassPrefix) + ? globalThis.String(object.objcClassPrefix) + : isSet(object.objc_class_prefix) + ? globalThis.String(object.objc_class_prefix) + : "", + csharpNamespace: isSet(object.csharpNamespace) + ? globalThis.String(object.csharpNamespace) + : isSet(object.csharp_namespace) + ? globalThis.String(object.csharp_namespace) + : "", + swiftPrefix: isSet(object.swiftPrefix) + ? globalThis.String(object.swiftPrefix) + : isSet(object.swift_prefix) + ? globalThis.String(object.swift_prefix) + : "", + phpClassPrefix: isSet(object.phpClassPrefix) + ? globalThis.String(object.phpClassPrefix) + : isSet(object.php_class_prefix) + ? globalThis.String(object.php_class_prefix) + : "", + phpNamespace: isSet(object.phpNamespace) + ? globalThis.String(object.phpNamespace) + : isSet(object.php_namespace) + ? globalThis.String(object.php_namespace) + : "", + phpMetadataNamespace: isSet(object.phpMetadataNamespace) + ? globalThis.String(object.phpMetadataNamespace) + : isSet(object.php_metadata_namespace) + ? globalThis.String(object.php_metadata_namespace) + : "", + rubyPackage: isSet(object.rubyPackage) + ? globalThis.String(object.rubyPackage) + : isSet(object.ruby_package) + ? globalThis.String(object.ruby_package) + : "", + features: isSet(object.features) ? FeatureSet.fromJSON(object.features) : undefined, + uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : globalThis.Array.isArray(object?.uninterpreted_option) + ? object.uninterpreted_option.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: FileOptions): unknown { + const obj: any = {}; + if (message.javaPackage !== undefined && message.javaPackage !== "") { + obj.javaPackage = message.javaPackage; + } + if (message.javaOuterClassname !== undefined && message.javaOuterClassname !== "") { + obj.javaOuterClassname = message.javaOuterClassname; + } + if (message.javaMultipleFiles !== undefined && message.javaMultipleFiles !== false) { + obj.javaMultipleFiles = message.javaMultipleFiles; + } + if (message.javaGenerateEqualsAndHash !== undefined && message.javaGenerateEqualsAndHash !== false) { + obj.javaGenerateEqualsAndHash = message.javaGenerateEqualsAndHash; + } + if (message.javaStringCheckUtf8 !== undefined && message.javaStringCheckUtf8 !== false) { + obj.javaStringCheckUtf8 = message.javaStringCheckUtf8; + } + if (message.optimizeFor !== undefined && message.optimizeFor !== 1) { + obj.optimizeFor = fileOptions_OptimizeModeToJSON(message.optimizeFor); + } + if (message.goPackage !== undefined && message.goPackage !== "") { + obj.goPackage = message.goPackage; + } + if (message.ccGenericServices !== undefined && message.ccGenericServices !== false) { + obj.ccGenericServices = message.ccGenericServices; + } + if (message.javaGenericServices !== undefined && message.javaGenericServices !== false) { + obj.javaGenericServices = message.javaGenericServices; + } + if (message.pyGenericServices !== undefined && message.pyGenericServices !== false) { + obj.pyGenericServices = message.pyGenericServices; + } + if (message.deprecated !== undefined && message.deprecated !== false) { + obj.deprecated = message.deprecated; + } + if (message.ccEnableArenas !== undefined && message.ccEnableArenas !== true) { + obj.ccEnableArenas = message.ccEnableArenas; + } + if (message.objcClassPrefix !== undefined && message.objcClassPrefix !== "") { + obj.objcClassPrefix = message.objcClassPrefix; + } + if (message.csharpNamespace !== undefined && message.csharpNamespace !== "") { + obj.csharpNamespace = message.csharpNamespace; + } + if (message.swiftPrefix !== undefined && message.swiftPrefix !== "") { + obj.swiftPrefix = message.swiftPrefix; + } + if (message.phpClassPrefix !== undefined && message.phpClassPrefix !== "") { + obj.phpClassPrefix = message.phpClassPrefix; + } + if (message.phpNamespace !== undefined && message.phpNamespace !== "") { + obj.phpNamespace = message.phpNamespace; + } + if (message.phpMetadataNamespace !== undefined && message.phpMetadataNamespace !== "") { + obj.phpMetadataNamespace = message.phpMetadataNamespace; + } + if (message.rubyPackage !== undefined && message.rubyPackage !== "") { + obj.rubyPackage = message.rubyPackage; + } + if (message.features !== undefined) { + obj.features = FeatureSet.toJSON(message.features); + } + if (message.uninterpretedOption?.length) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); + } + return obj; + }, + + create<I extends Exact<DeepPartial<FileOptions>, I>>(base?: I): FileOptions { + return FileOptions.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<FileOptions>, I>>(object: I): FileOptions { + const message = createBaseFileOptions(); + message.javaPackage = object.javaPackage ?? ""; + message.javaOuterClassname = object.javaOuterClassname ?? ""; + message.javaMultipleFiles = object.javaMultipleFiles ?? false; + message.javaGenerateEqualsAndHash = object.javaGenerateEqualsAndHash ?? false; + message.javaStringCheckUtf8 = object.javaStringCheckUtf8 ?? false; + message.optimizeFor = object.optimizeFor ?? 1; + message.goPackage = object.goPackage ?? ""; + message.ccGenericServices = object.ccGenericServices ?? false; + message.javaGenericServices = object.javaGenericServices ?? false; + message.pyGenericServices = object.pyGenericServices ?? false; + message.deprecated = object.deprecated ?? false; + message.ccEnableArenas = object.ccEnableArenas ?? true; + message.objcClassPrefix = object.objcClassPrefix ?? ""; + message.csharpNamespace = object.csharpNamespace ?? ""; + message.swiftPrefix = object.swiftPrefix ?? ""; + message.phpClassPrefix = object.phpClassPrefix ?? ""; + message.phpNamespace = object.phpNamespace ?? ""; + message.phpMetadataNamespace = object.phpMetadataNamespace ?? ""; + message.rubyPackage = object.rubyPackage ?? ""; + message.features = (object.features !== undefined && object.features !== null) + ? FeatureSet.fromPartial(object.features) + : undefined; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseMessageOptions(): MessageOptions { + return { + messageSetWireFormat: false, + noStandardDescriptorAccessor: false, + deprecated: false, + mapEntry: false, + deprecatedLegacyJsonFieldConflicts: false, + features: undefined, + uninterpretedOption: [], + }; +} + +export const MessageOptions: MessageFns<MessageOptions> = { + encode(message: MessageOptions, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.messageSetWireFormat !== undefined && message.messageSetWireFormat !== false) { + writer.uint32(8).bool(message.messageSetWireFormat); + } + if (message.noStandardDescriptorAccessor !== undefined && message.noStandardDescriptorAccessor !== false) { + writer.uint32(16).bool(message.noStandardDescriptorAccessor); + } + if (message.deprecated !== undefined && message.deprecated !== false) { + writer.uint32(24).bool(message.deprecated); + } + if (message.mapEntry !== undefined && message.mapEntry !== false) { + writer.uint32(56).bool(message.mapEntry); + } + if ( + message.deprecatedLegacyJsonFieldConflicts !== undefined && message.deprecatedLegacyJsonFieldConflicts !== false + ) { + writer.uint32(88).bool(message.deprecatedLegacyJsonFieldConflicts); + } + if (message.features !== undefined) { + FeatureSet.encode(message.features, writer.uint32(98).fork()).join(); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): MessageOptions { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMessageOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + + message.messageSetWireFormat = reader.bool(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.noStandardDescriptorAccessor = reader.bool(); + continue; + } + case 3: { + if (tag !== 24) { + break; + } + + message.deprecated = reader.bool(); + continue; + } + case 7: { + if (tag !== 56) { + break; + } + + message.mapEntry = reader.bool(); + continue; + } + case 11: { + if (tag !== 88) { + break; + } + + message.deprecatedLegacyJsonFieldConflicts = reader.bool(); + continue; + } + case 12: { + if (tag !== 98) { + break; + } + + message.features = FeatureSet.decode(reader, reader.uint32()); + continue; + } + case 999: { + if (tag !== 7994) { + break; + } + + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): MessageOptions { + return { + messageSetWireFormat: isSet(object.messageSetWireFormat) + ? globalThis.Boolean(object.messageSetWireFormat) + : isSet(object.message_set_wire_format) + ? globalThis.Boolean(object.message_set_wire_format) + : false, + noStandardDescriptorAccessor: isSet(object.noStandardDescriptorAccessor) + ? globalThis.Boolean(object.noStandardDescriptorAccessor) + : isSet(object.no_standard_descriptor_accessor) + ? globalThis.Boolean(object.no_standard_descriptor_accessor) + : false, + deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false, + mapEntry: isSet(object.mapEntry) + ? globalThis.Boolean(object.mapEntry) + : isSet(object.map_entry) + ? globalThis.Boolean(object.map_entry) + : false, + deprecatedLegacyJsonFieldConflicts: isSet(object.deprecatedLegacyJsonFieldConflicts) + ? globalThis.Boolean(object.deprecatedLegacyJsonFieldConflicts) + : isSet(object.deprecated_legacy_json_field_conflicts) + ? globalThis.Boolean(object.deprecated_legacy_json_field_conflicts) + : false, + features: isSet(object.features) ? FeatureSet.fromJSON(object.features) : undefined, + uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : globalThis.Array.isArray(object?.uninterpreted_option) + ? object.uninterpreted_option.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: MessageOptions): unknown { + const obj: any = {}; + if (message.messageSetWireFormat !== undefined && message.messageSetWireFormat !== false) { + obj.messageSetWireFormat = message.messageSetWireFormat; + } + if (message.noStandardDescriptorAccessor !== undefined && message.noStandardDescriptorAccessor !== false) { + obj.noStandardDescriptorAccessor = message.noStandardDescriptorAccessor; + } + if (message.deprecated !== undefined && message.deprecated !== false) { + obj.deprecated = message.deprecated; + } + if (message.mapEntry !== undefined && message.mapEntry !== false) { + obj.mapEntry = message.mapEntry; + } + if ( + message.deprecatedLegacyJsonFieldConflicts !== undefined && message.deprecatedLegacyJsonFieldConflicts !== false + ) { + obj.deprecatedLegacyJsonFieldConflicts = message.deprecatedLegacyJsonFieldConflicts; + } + if (message.features !== undefined) { + obj.features = FeatureSet.toJSON(message.features); + } + if (message.uninterpretedOption?.length) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); + } + return obj; + }, + + create<I extends Exact<DeepPartial<MessageOptions>, I>>(base?: I): MessageOptions { + return MessageOptions.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<MessageOptions>, I>>(object: I): MessageOptions { + const message = createBaseMessageOptions(); + message.messageSetWireFormat = object.messageSetWireFormat ?? false; + message.noStandardDescriptorAccessor = object.noStandardDescriptorAccessor ?? false; + message.deprecated = object.deprecated ?? false; + message.mapEntry = object.mapEntry ?? false; + message.deprecatedLegacyJsonFieldConflicts = object.deprecatedLegacyJsonFieldConflicts ?? false; + message.features = (object.features !== undefined && object.features !== null) + ? FeatureSet.fromPartial(object.features) + : undefined; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFieldOptions(): FieldOptions { + return { + ctype: 0, + packed: false, + jstype: 0, + lazy: false, + unverifiedLazy: false, + deprecated: false, + weak: false, + debugRedact: false, + retention: 0, + targets: [], + editionDefaults: [], + features: undefined, + featureSupport: undefined, + uninterpretedOption: [], + }; +} + +export const FieldOptions: MessageFns<FieldOptions> = { + encode(message: FieldOptions, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.ctype !== undefined && message.ctype !== 0) { + writer.uint32(8).int32(message.ctype); + } + if (message.packed !== undefined && message.packed !== false) { + writer.uint32(16).bool(message.packed); + } + if (message.jstype !== undefined && message.jstype !== 0) { + writer.uint32(48).int32(message.jstype); + } + if (message.lazy !== undefined && message.lazy !== false) { + writer.uint32(40).bool(message.lazy); + } + if (message.unverifiedLazy !== undefined && message.unverifiedLazy !== false) { + writer.uint32(120).bool(message.unverifiedLazy); + } + if (message.deprecated !== undefined && message.deprecated !== false) { + writer.uint32(24).bool(message.deprecated); + } + if (message.weak !== undefined && message.weak !== false) { + writer.uint32(80).bool(message.weak); + } + if (message.debugRedact !== undefined && message.debugRedact !== false) { + writer.uint32(128).bool(message.debugRedact); + } + if (message.retention !== undefined && message.retention !== 0) { + writer.uint32(136).int32(message.retention); + } + for (const v of message.targets) { + writer.uint32(152).int32(v!); + } + for (const v of message.editionDefaults) { + FieldOptions_EditionDefault.encode(v!, writer.uint32(162).fork()).join(); + } + if (message.features !== undefined) { + FeatureSet.encode(message.features, writer.uint32(170).fork()).join(); + } + if (message.featureSupport !== undefined) { + FieldOptions_FeatureSupport.encode(message.featureSupport, writer.uint32(178).fork()).join(); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): FieldOptions { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFieldOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + + message.ctype = reader.int32() as any; + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.packed = reader.bool(); + continue; + } + case 6: { + if (tag !== 48) { + break; + } + + message.jstype = reader.int32() as any; + continue; + } + case 5: { + if (tag !== 40) { + break; + } + + message.lazy = reader.bool(); + continue; + } + case 15: { + if (tag !== 120) { + break; + } + + message.unverifiedLazy = reader.bool(); + continue; + } + case 3: { + if (tag !== 24) { + break; + } + + message.deprecated = reader.bool(); + continue; + } + case 10: { + if (tag !== 80) { + break; + } + + message.weak = reader.bool(); + continue; + } + case 16: { + if (tag !== 128) { + break; + } + + message.debugRedact = reader.bool(); + continue; + } + case 17: { + if (tag !== 136) { + break; + } + + message.retention = reader.int32() as any; + continue; + } + case 19: { + if (tag === 152) { + message.targets.push(reader.int32() as any); + + continue; + } + + if (tag === 154) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.targets.push(reader.int32() as any); + } + + continue; + } + + break; + } + case 20: { + if (tag !== 162) { + break; + } + + message.editionDefaults.push(FieldOptions_EditionDefault.decode(reader, reader.uint32())); + continue; + } + case 21: { + if (tag !== 170) { + break; + } + + message.features = FeatureSet.decode(reader, reader.uint32()); + continue; + } + case 22: { + if (tag !== 178) { + break; + } + + message.featureSupport = FieldOptions_FeatureSupport.decode(reader, reader.uint32()); + continue; + } + case 999: { + if (tag !== 7994) { + break; + } + + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): FieldOptions { + return { + ctype: isSet(object.ctype) ? fieldOptions_CTypeFromJSON(object.ctype) : 0, + packed: isSet(object.packed) ? globalThis.Boolean(object.packed) : false, + jstype: isSet(object.jstype) ? fieldOptions_JSTypeFromJSON(object.jstype) : 0, + lazy: isSet(object.lazy) ? globalThis.Boolean(object.lazy) : false, + unverifiedLazy: isSet(object.unverifiedLazy) + ? globalThis.Boolean(object.unverifiedLazy) + : isSet(object.unverified_lazy) + ? globalThis.Boolean(object.unverified_lazy) + : false, + deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false, + weak: isSet(object.weak) ? globalThis.Boolean(object.weak) : false, + debugRedact: isSet(object.debugRedact) + ? globalThis.Boolean(object.debugRedact) + : isSet(object.debug_redact) + ? globalThis.Boolean(object.debug_redact) + : false, + retention: isSet(object.retention) ? fieldOptions_OptionRetentionFromJSON(object.retention) : 0, + targets: globalThis.Array.isArray(object?.targets) + ? object.targets.map((e: any) => fieldOptions_OptionTargetTypeFromJSON(e)) + : [], + editionDefaults: globalThis.Array.isArray(object?.editionDefaults) + ? object.editionDefaults.map((e: any) => FieldOptions_EditionDefault.fromJSON(e)) + : globalThis.Array.isArray(object?.edition_defaults) + ? object.edition_defaults.map((e: any) => FieldOptions_EditionDefault.fromJSON(e)) + : [], + features: isSet(object.features) ? FeatureSet.fromJSON(object.features) : undefined, + featureSupport: isSet(object.featureSupport) + ? FieldOptions_FeatureSupport.fromJSON(object.featureSupport) + : isSet(object.feature_support) + ? FieldOptions_FeatureSupport.fromJSON(object.feature_support) + : undefined, + uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : globalThis.Array.isArray(object?.uninterpreted_option) + ? object.uninterpreted_option.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: FieldOptions): unknown { + const obj: any = {}; + if (message.ctype !== undefined && message.ctype !== 0) { + obj.ctype = fieldOptions_CTypeToJSON(message.ctype); + } + if (message.packed !== undefined && message.packed !== false) { + obj.packed = message.packed; + } + if (message.jstype !== undefined && message.jstype !== 0) { + obj.jstype = fieldOptions_JSTypeToJSON(message.jstype); + } + if (message.lazy !== undefined && message.lazy !== false) { + obj.lazy = message.lazy; + } + if (message.unverifiedLazy !== undefined && message.unverifiedLazy !== false) { + obj.unverifiedLazy = message.unverifiedLazy; + } + if (message.deprecated !== undefined && message.deprecated !== false) { + obj.deprecated = message.deprecated; + } + if (message.weak !== undefined && message.weak !== false) { + obj.weak = message.weak; + } + if (message.debugRedact !== undefined && message.debugRedact !== false) { + obj.debugRedact = message.debugRedact; + } + if (message.retention !== undefined && message.retention !== 0) { + obj.retention = fieldOptions_OptionRetentionToJSON(message.retention); + } + if (message.targets?.length) { + obj.targets = message.targets.map((e) => fieldOptions_OptionTargetTypeToJSON(e)); + } + if (message.editionDefaults?.length) { + obj.editionDefaults = message.editionDefaults.map((e) => FieldOptions_EditionDefault.toJSON(e)); + } + if (message.features !== undefined) { + obj.features = FeatureSet.toJSON(message.features); + } + if (message.featureSupport !== undefined) { + obj.featureSupport = FieldOptions_FeatureSupport.toJSON(message.featureSupport); + } + if (message.uninterpretedOption?.length) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); + } + return obj; + }, + + create<I extends Exact<DeepPartial<FieldOptions>, I>>(base?: I): FieldOptions { + return FieldOptions.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<FieldOptions>, I>>(object: I): FieldOptions { + const message = createBaseFieldOptions(); + message.ctype = object.ctype ?? 0; + message.packed = object.packed ?? false; + message.jstype = object.jstype ?? 0; + message.lazy = object.lazy ?? false; + message.unverifiedLazy = object.unverifiedLazy ?? false; + message.deprecated = object.deprecated ?? false; + message.weak = object.weak ?? false; + message.debugRedact = object.debugRedact ?? false; + message.retention = object.retention ?? 0; + message.targets = object.targets?.map((e) => e) || []; + message.editionDefaults = object.editionDefaults?.map((e) => FieldOptions_EditionDefault.fromPartial(e)) || []; + message.features = (object.features !== undefined && object.features !== null) + ? FeatureSet.fromPartial(object.features) + : undefined; + message.featureSupport = (object.featureSupport !== undefined && object.featureSupport !== null) + ? FieldOptions_FeatureSupport.fromPartial(object.featureSupport) + : undefined; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFieldOptions_EditionDefault(): FieldOptions_EditionDefault { + return { edition: 0, value: "" }; +} + +export const FieldOptions_EditionDefault: MessageFns<FieldOptions_EditionDefault> = { + encode(message: FieldOptions_EditionDefault, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.edition !== undefined && message.edition !== 0) { + writer.uint32(24).int32(message.edition); + } + if (message.value !== undefined && message.value !== "") { + writer.uint32(18).string(message.value); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): FieldOptions_EditionDefault { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFieldOptions_EditionDefault(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 3: { + if (tag !== 24) { + break; + } + + message.edition = reader.int32() as any; + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.value = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): FieldOptions_EditionDefault { + return { + edition: isSet(object.edition) ? editionFromJSON(object.edition) : 0, + value: isSet(object.value) ? globalThis.String(object.value) : "", + }; + }, + + toJSON(message: FieldOptions_EditionDefault): unknown { + const obj: any = {}; + if (message.edition !== undefined && message.edition !== 0) { + obj.edition = editionToJSON(message.edition); + } + if (message.value !== undefined && message.value !== "") { + obj.value = message.value; + } + return obj; + }, + + create<I extends Exact<DeepPartial<FieldOptions_EditionDefault>, I>>(base?: I): FieldOptions_EditionDefault { + return FieldOptions_EditionDefault.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<FieldOptions_EditionDefault>, I>>(object: I): FieldOptions_EditionDefault { + const message = createBaseFieldOptions_EditionDefault(); + message.edition = object.edition ?? 0; + message.value = object.value ?? ""; + return message; + }, +}; + +function createBaseFieldOptions_FeatureSupport(): FieldOptions_FeatureSupport { + return { editionIntroduced: 0, editionDeprecated: 0, deprecationWarning: "", editionRemoved: 0 }; +} + +export const FieldOptions_FeatureSupport: MessageFns<FieldOptions_FeatureSupport> = { + encode(message: FieldOptions_FeatureSupport, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.editionIntroduced !== undefined && message.editionIntroduced !== 0) { + writer.uint32(8).int32(message.editionIntroduced); + } + if (message.editionDeprecated !== undefined && message.editionDeprecated !== 0) { + writer.uint32(16).int32(message.editionDeprecated); + } + if (message.deprecationWarning !== undefined && message.deprecationWarning !== "") { + writer.uint32(26).string(message.deprecationWarning); + } + if (message.editionRemoved !== undefined && message.editionRemoved !== 0) { + writer.uint32(32).int32(message.editionRemoved); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): FieldOptions_FeatureSupport { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFieldOptions_FeatureSupport(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + + message.editionIntroduced = reader.int32() as any; + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.editionDeprecated = reader.int32() as any; + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.deprecationWarning = reader.string(); + continue; + } + case 4: { + if (tag !== 32) { + break; + } + + message.editionRemoved = reader.int32() as any; + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): FieldOptions_FeatureSupport { + return { + editionIntroduced: isSet(object.editionIntroduced) + ? editionFromJSON(object.editionIntroduced) + : isSet(object.edition_introduced) + ? editionFromJSON(object.edition_introduced) + : 0, + editionDeprecated: isSet(object.editionDeprecated) + ? editionFromJSON(object.editionDeprecated) + : isSet(object.edition_deprecated) + ? editionFromJSON(object.edition_deprecated) + : 0, + deprecationWarning: isSet(object.deprecationWarning) + ? globalThis.String(object.deprecationWarning) + : isSet(object.deprecation_warning) + ? globalThis.String(object.deprecation_warning) + : "", + editionRemoved: isSet(object.editionRemoved) + ? editionFromJSON(object.editionRemoved) + : isSet(object.edition_removed) + ? editionFromJSON(object.edition_removed) + : 0, + }; + }, + + toJSON(message: FieldOptions_FeatureSupport): unknown { + const obj: any = {}; + if (message.editionIntroduced !== undefined && message.editionIntroduced !== 0) { + obj.editionIntroduced = editionToJSON(message.editionIntroduced); + } + if (message.editionDeprecated !== undefined && message.editionDeprecated !== 0) { + obj.editionDeprecated = editionToJSON(message.editionDeprecated); + } + if (message.deprecationWarning !== undefined && message.deprecationWarning !== "") { + obj.deprecationWarning = message.deprecationWarning; + } + if (message.editionRemoved !== undefined && message.editionRemoved !== 0) { + obj.editionRemoved = editionToJSON(message.editionRemoved); + } + return obj; + }, + + create<I extends Exact<DeepPartial<FieldOptions_FeatureSupport>, I>>(base?: I): FieldOptions_FeatureSupport { + return FieldOptions_FeatureSupport.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<FieldOptions_FeatureSupport>, I>>(object: I): FieldOptions_FeatureSupport { + const message = createBaseFieldOptions_FeatureSupport(); + message.editionIntroduced = object.editionIntroduced ?? 0; + message.editionDeprecated = object.editionDeprecated ?? 0; + message.deprecationWarning = object.deprecationWarning ?? ""; + message.editionRemoved = object.editionRemoved ?? 0; + return message; + }, +}; + +function createBaseOneofOptions(): OneofOptions { + return { features: undefined, uninterpretedOption: [] }; +} + +export const OneofOptions: MessageFns<OneofOptions> = { + encode(message: OneofOptions, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.features !== undefined) { + FeatureSet.encode(message.features, writer.uint32(10).fork()).join(); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): OneofOptions { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseOneofOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.features = FeatureSet.decode(reader, reader.uint32()); + continue; + } + case 999: { + if (tag !== 7994) { + break; + } + + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): OneofOptions { + return { + features: isSet(object.features) ? FeatureSet.fromJSON(object.features) : undefined, + uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : globalThis.Array.isArray(object?.uninterpreted_option) + ? object.uninterpreted_option.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: OneofOptions): unknown { + const obj: any = {}; + if (message.features !== undefined) { + obj.features = FeatureSet.toJSON(message.features); + } + if (message.uninterpretedOption?.length) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); + } + return obj; + }, + + create<I extends Exact<DeepPartial<OneofOptions>, I>>(base?: I): OneofOptions { + return OneofOptions.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<OneofOptions>, I>>(object: I): OneofOptions { + const message = createBaseOneofOptions(); + message.features = (object.features !== undefined && object.features !== null) + ? FeatureSet.fromPartial(object.features) + : undefined; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseEnumOptions(): EnumOptions { + return { + allowAlias: false, + deprecated: false, + deprecatedLegacyJsonFieldConflicts: false, + features: undefined, + uninterpretedOption: [], + }; +} + +export const EnumOptions: MessageFns<EnumOptions> = { + encode(message: EnumOptions, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.allowAlias !== undefined && message.allowAlias !== false) { + writer.uint32(16).bool(message.allowAlias); + } + if (message.deprecated !== undefined && message.deprecated !== false) { + writer.uint32(24).bool(message.deprecated); + } + if ( + message.deprecatedLegacyJsonFieldConflicts !== undefined && message.deprecatedLegacyJsonFieldConflicts !== false + ) { + writer.uint32(48).bool(message.deprecatedLegacyJsonFieldConflicts); + } + if (message.features !== undefined) { + FeatureSet.encode(message.features, writer.uint32(58).fork()).join(); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): EnumOptions { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 2: { + if (tag !== 16) { + break; + } + + message.allowAlias = reader.bool(); + continue; + } + case 3: { + if (tag !== 24) { + break; + } + + message.deprecated = reader.bool(); + continue; + } + case 6: { + if (tag !== 48) { + break; + } + + message.deprecatedLegacyJsonFieldConflicts = reader.bool(); + continue; + } + case 7: { + if (tag !== 58) { + break; + } + + message.features = FeatureSet.decode(reader, reader.uint32()); + continue; + } + case 999: { + if (tag !== 7994) { + break; + } + + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): EnumOptions { + return { + allowAlias: isSet(object.allowAlias) + ? globalThis.Boolean(object.allowAlias) + : isSet(object.allow_alias) + ? globalThis.Boolean(object.allow_alias) + : false, + deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false, + deprecatedLegacyJsonFieldConflicts: isSet(object.deprecatedLegacyJsonFieldConflicts) + ? globalThis.Boolean(object.deprecatedLegacyJsonFieldConflicts) + : isSet(object.deprecated_legacy_json_field_conflicts) + ? globalThis.Boolean(object.deprecated_legacy_json_field_conflicts) + : false, + features: isSet(object.features) ? FeatureSet.fromJSON(object.features) : undefined, + uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : globalThis.Array.isArray(object?.uninterpreted_option) + ? object.uninterpreted_option.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: EnumOptions): unknown { + const obj: any = {}; + if (message.allowAlias !== undefined && message.allowAlias !== false) { + obj.allowAlias = message.allowAlias; + } + if (message.deprecated !== undefined && message.deprecated !== false) { + obj.deprecated = message.deprecated; + } + if ( + message.deprecatedLegacyJsonFieldConflicts !== undefined && message.deprecatedLegacyJsonFieldConflicts !== false + ) { + obj.deprecatedLegacyJsonFieldConflicts = message.deprecatedLegacyJsonFieldConflicts; + } + if (message.features !== undefined) { + obj.features = FeatureSet.toJSON(message.features); + } + if (message.uninterpretedOption?.length) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); + } + return obj; + }, + + create<I extends Exact<DeepPartial<EnumOptions>, I>>(base?: I): EnumOptions { + return EnumOptions.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<EnumOptions>, I>>(object: I): EnumOptions { + const message = createBaseEnumOptions(); + message.allowAlias = object.allowAlias ?? false; + message.deprecated = object.deprecated ?? false; + message.deprecatedLegacyJsonFieldConflicts = object.deprecatedLegacyJsonFieldConflicts ?? false; + message.features = (object.features !== undefined && object.features !== null) + ? FeatureSet.fromPartial(object.features) + : undefined; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseEnumValueOptions(): EnumValueOptions { + return { + deprecated: false, + features: undefined, + debugRedact: false, + featureSupport: undefined, + uninterpretedOption: [], + }; +} + +export const EnumValueOptions: MessageFns<EnumValueOptions> = { + encode(message: EnumValueOptions, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.deprecated !== undefined && message.deprecated !== false) { + writer.uint32(8).bool(message.deprecated); + } + if (message.features !== undefined) { + FeatureSet.encode(message.features, writer.uint32(18).fork()).join(); + } + if (message.debugRedact !== undefined && message.debugRedact !== false) { + writer.uint32(24).bool(message.debugRedact); + } + if (message.featureSupport !== undefined) { + FieldOptions_FeatureSupport.encode(message.featureSupport, writer.uint32(34).fork()).join(); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): EnumValueOptions { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumValueOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + + message.deprecated = reader.bool(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.features = FeatureSet.decode(reader, reader.uint32()); + continue; + } + case 3: { + if (tag !== 24) { + break; + } + + message.debugRedact = reader.bool(); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + + message.featureSupport = FieldOptions_FeatureSupport.decode(reader, reader.uint32()); + continue; + } + case 999: { + if (tag !== 7994) { + break; + } + + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): EnumValueOptions { + return { + deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false, + features: isSet(object.features) ? FeatureSet.fromJSON(object.features) : undefined, + debugRedact: isSet(object.debugRedact) + ? globalThis.Boolean(object.debugRedact) + : isSet(object.debug_redact) + ? globalThis.Boolean(object.debug_redact) + : false, + featureSupport: isSet(object.featureSupport) + ? FieldOptions_FeatureSupport.fromJSON(object.featureSupport) + : isSet(object.feature_support) + ? FieldOptions_FeatureSupport.fromJSON(object.feature_support) + : undefined, + uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : globalThis.Array.isArray(object?.uninterpreted_option) + ? object.uninterpreted_option.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: EnumValueOptions): unknown { + const obj: any = {}; + if (message.deprecated !== undefined && message.deprecated !== false) { + obj.deprecated = message.deprecated; + } + if (message.features !== undefined) { + obj.features = FeatureSet.toJSON(message.features); + } + if (message.debugRedact !== undefined && message.debugRedact !== false) { + obj.debugRedact = message.debugRedact; + } + if (message.featureSupport !== undefined) { + obj.featureSupport = FieldOptions_FeatureSupport.toJSON(message.featureSupport); + } + if (message.uninterpretedOption?.length) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); + } + return obj; + }, + + create<I extends Exact<DeepPartial<EnumValueOptions>, I>>(base?: I): EnumValueOptions { + return EnumValueOptions.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<EnumValueOptions>, I>>(object: I): EnumValueOptions { + const message = createBaseEnumValueOptions(); + message.deprecated = object.deprecated ?? false; + message.features = (object.features !== undefined && object.features !== null) + ? FeatureSet.fromPartial(object.features) + : undefined; + message.debugRedact = object.debugRedact ?? false; + message.featureSupport = (object.featureSupport !== undefined && object.featureSupport !== null) + ? FieldOptions_FeatureSupport.fromPartial(object.featureSupport) + : undefined; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseServiceOptions(): ServiceOptions { + return { features: undefined, deprecated: false, uninterpretedOption: [] }; +} + +export const ServiceOptions: MessageFns<ServiceOptions> = { + encode(message: ServiceOptions, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.features !== undefined) { + FeatureSet.encode(message.features, writer.uint32(274).fork()).join(); + } + if (message.deprecated !== undefined && message.deprecated !== false) { + writer.uint32(264).bool(message.deprecated); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): ServiceOptions { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseServiceOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 34: { + if (tag !== 274) { + break; + } + + message.features = FeatureSet.decode(reader, reader.uint32()); + continue; + } + case 33: { + if (tag !== 264) { + break; + } + + message.deprecated = reader.bool(); + continue; + } + case 999: { + if (tag !== 7994) { + break; + } + + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): ServiceOptions { + return { + features: isSet(object.features) ? FeatureSet.fromJSON(object.features) : undefined, + deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false, + uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : globalThis.Array.isArray(object?.uninterpreted_option) + ? object.uninterpreted_option.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: ServiceOptions): unknown { + const obj: any = {}; + if (message.features !== undefined) { + obj.features = FeatureSet.toJSON(message.features); + } + if (message.deprecated !== undefined && message.deprecated !== false) { + obj.deprecated = message.deprecated; + } + if (message.uninterpretedOption?.length) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); + } + return obj; + }, + + create<I extends Exact<DeepPartial<ServiceOptions>, I>>(base?: I): ServiceOptions { + return ServiceOptions.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<ServiceOptions>, I>>(object: I): ServiceOptions { + const message = createBaseServiceOptions(); + message.features = (object.features !== undefined && object.features !== null) + ? FeatureSet.fromPartial(object.features) + : undefined; + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseMethodOptions(): MethodOptions { + return { deprecated: false, idempotencyLevel: 0, features: undefined, uninterpretedOption: [] }; +} + +export const MethodOptions: MessageFns<MethodOptions> = { + encode(message: MethodOptions, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.deprecated !== undefined && message.deprecated !== false) { + writer.uint32(264).bool(message.deprecated); + } + if (message.idempotencyLevel !== undefined && message.idempotencyLevel !== 0) { + writer.uint32(272).int32(message.idempotencyLevel); + } + if (message.features !== undefined) { + FeatureSet.encode(message.features, writer.uint32(282).fork()).join(); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): MethodOptions { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMethodOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 33: { + if (tag !== 264) { + break; + } + + message.deprecated = reader.bool(); + continue; + } + case 34: { + if (tag !== 272) { + break; + } + + message.idempotencyLevel = reader.int32() as any; + continue; + } + case 35: { + if (tag !== 282) { + break; + } + + message.features = FeatureSet.decode(reader, reader.uint32()); + continue; + } + case 999: { + if (tag !== 7994) { + break; + } + + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): MethodOptions { + return { + deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false, + idempotencyLevel: isSet(object.idempotencyLevel) + ? methodOptions_IdempotencyLevelFromJSON(object.idempotencyLevel) + : isSet(object.idempotency_level) + ? methodOptions_IdempotencyLevelFromJSON(object.idempotency_level) + : 0, + features: isSet(object.features) ? FeatureSet.fromJSON(object.features) : undefined, + uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : globalThis.Array.isArray(object?.uninterpreted_option) + ? object.uninterpreted_option.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: MethodOptions): unknown { + const obj: any = {}; + if (message.deprecated !== undefined && message.deprecated !== false) { + obj.deprecated = message.deprecated; + } + if (message.idempotencyLevel !== undefined && message.idempotencyLevel !== 0) { + obj.idempotencyLevel = methodOptions_IdempotencyLevelToJSON(message.idempotencyLevel); + } + if (message.features !== undefined) { + obj.features = FeatureSet.toJSON(message.features); + } + if (message.uninterpretedOption?.length) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); + } + return obj; + }, + + create<I extends Exact<DeepPartial<MethodOptions>, I>>(base?: I): MethodOptions { + return MethodOptions.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<MethodOptions>, I>>(object: I): MethodOptions { + const message = createBaseMethodOptions(); + message.deprecated = object.deprecated ?? false; + message.idempotencyLevel = object.idempotencyLevel ?? 0; + message.features = (object.features !== undefined && object.features !== null) + ? FeatureSet.fromPartial(object.features) + : undefined; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseUninterpretedOption(): UninterpretedOption { + return { + name: [], + identifierValue: "", + positiveIntValue: 0, + negativeIntValue: 0, + doubleValue: 0, + stringValue: new Uint8Array(0), + aggregateValue: "", + }; +} + +export const UninterpretedOption: MessageFns<UninterpretedOption> = { + encode(message: UninterpretedOption, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + for (const v of message.name) { + UninterpretedOption_NamePart.encode(v!, writer.uint32(18).fork()).join(); + } + if (message.identifierValue !== undefined && message.identifierValue !== "") { + writer.uint32(26).string(message.identifierValue); + } + if (message.positiveIntValue !== undefined && message.positiveIntValue !== 0) { + writer.uint32(32).uint64(message.positiveIntValue); + } + if (message.negativeIntValue !== undefined && message.negativeIntValue !== 0) { + writer.uint32(40).int64(message.negativeIntValue); + } + if (message.doubleValue !== undefined && message.doubleValue !== 0) { + writer.uint32(49).double(message.doubleValue); + } + if (message.stringValue !== undefined && message.stringValue.length !== 0) { + writer.uint32(58).bytes(message.stringValue); + } + if (message.aggregateValue !== undefined && message.aggregateValue !== "") { + writer.uint32(66).string(message.aggregateValue); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): UninterpretedOption { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseUninterpretedOption(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 2: { + if (tag !== 18) { + break; + } + + message.name.push(UninterpretedOption_NamePart.decode(reader, reader.uint32())); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.identifierValue = reader.string(); + continue; + } + case 4: { + if (tag !== 32) { + break; + } + + message.positiveIntValue = longToNumber(reader.uint64()); + continue; + } + case 5: { + if (tag !== 40) { + break; + } + + message.negativeIntValue = longToNumber(reader.int64()); + continue; + } + case 6: { + if (tag !== 49) { + break; + } + + message.doubleValue = reader.double(); + continue; + } + case 7: { + if (tag !== 58) { + break; + } + + message.stringValue = reader.bytes(); + continue; + } + case 8: { + if (tag !== 66) { + break; + } + + message.aggregateValue = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): UninterpretedOption { + return { + name: globalThis.Array.isArray(object?.name) + ? object.name.map((e: any) => UninterpretedOption_NamePart.fromJSON(e)) + : [], + identifierValue: isSet(object.identifierValue) + ? globalThis.String(object.identifierValue) + : isSet(object.identifier_value) + ? globalThis.String(object.identifier_value) + : "", + positiveIntValue: isSet(object.positiveIntValue) + ? globalThis.Number(object.positiveIntValue) + : isSet(object.positive_int_value) + ? globalThis.Number(object.positive_int_value) + : 0, + negativeIntValue: isSet(object.negativeIntValue) + ? globalThis.Number(object.negativeIntValue) + : isSet(object.negative_int_value) + ? globalThis.Number(object.negative_int_value) + : 0, + doubleValue: isSet(object.doubleValue) + ? globalThis.Number(object.doubleValue) + : isSet(object.double_value) + ? globalThis.Number(object.double_value) + : 0, + stringValue: isSet(object.stringValue) + ? bytesFromBase64(object.stringValue) + : isSet(object.string_value) + ? bytesFromBase64(object.string_value) + : new Uint8Array(0), + aggregateValue: isSet(object.aggregateValue) + ? globalThis.String(object.aggregateValue) + : isSet(object.aggregate_value) + ? globalThis.String(object.aggregate_value) + : "", + }; + }, + + toJSON(message: UninterpretedOption): unknown { + const obj: any = {}; + if (message.name?.length) { + obj.name = message.name.map((e) => UninterpretedOption_NamePart.toJSON(e)); + } + if (message.identifierValue !== undefined && message.identifierValue !== "") { + obj.identifierValue = message.identifierValue; + } + if (message.positiveIntValue !== undefined && message.positiveIntValue !== 0) { + obj.positiveIntValue = Math.round(message.positiveIntValue); + } + if (message.negativeIntValue !== undefined && message.negativeIntValue !== 0) { + obj.negativeIntValue = Math.round(message.negativeIntValue); + } + if (message.doubleValue !== undefined && message.doubleValue !== 0) { + obj.doubleValue = message.doubleValue; + } + if (message.stringValue !== undefined && message.stringValue.length !== 0) { + obj.stringValue = base64FromBytes(message.stringValue); + } + if (message.aggregateValue !== undefined && message.aggregateValue !== "") { + obj.aggregateValue = message.aggregateValue; + } + return obj; + }, + + create<I extends Exact<DeepPartial<UninterpretedOption>, I>>(base?: I): UninterpretedOption { + return UninterpretedOption.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<UninterpretedOption>, I>>(object: I): UninterpretedOption { + const message = createBaseUninterpretedOption(); + message.name = object.name?.map((e) => UninterpretedOption_NamePart.fromPartial(e)) || []; + message.identifierValue = object.identifierValue ?? ""; + message.positiveIntValue = object.positiveIntValue ?? 0; + message.negativeIntValue = object.negativeIntValue ?? 0; + message.doubleValue = object.doubleValue ?? 0; + message.stringValue = object.stringValue ?? new Uint8Array(0); + message.aggregateValue = object.aggregateValue ?? ""; + return message; + }, +}; + +function createBaseUninterpretedOption_NamePart(): UninterpretedOption_NamePart { + return { namePart: "", isExtension: false }; +} + +export const UninterpretedOption_NamePart: MessageFns<UninterpretedOption_NamePart> = { + encode(message: UninterpretedOption_NamePart, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.namePart !== "") { + writer.uint32(10).string(message.namePart); + } + if (message.isExtension !== false) { + writer.uint32(16).bool(message.isExtension); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): UninterpretedOption_NamePart { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseUninterpretedOption_NamePart(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.namePart = reader.string(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.isExtension = reader.bool(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): UninterpretedOption_NamePart { + return { + namePart: isSet(object.namePart) + ? globalThis.String(object.namePart) + : isSet(object.name_part) + ? globalThis.String(object.name_part) + : "", + isExtension: isSet(object.isExtension) + ? globalThis.Boolean(object.isExtension) + : isSet(object.is_extension) + ? globalThis.Boolean(object.is_extension) + : false, + }; + }, + + toJSON(message: UninterpretedOption_NamePart): unknown { + const obj: any = {}; + if (message.namePart !== "") { + obj.namePart = message.namePart; + } + if (message.isExtension !== false) { + obj.isExtension = message.isExtension; + } + return obj; + }, + + create<I extends Exact<DeepPartial<UninterpretedOption_NamePart>, I>>(base?: I): UninterpretedOption_NamePart { + return UninterpretedOption_NamePart.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<UninterpretedOption_NamePart>, I>>(object: I): UninterpretedOption_NamePart { + const message = createBaseUninterpretedOption_NamePart(); + message.namePart = object.namePart ?? ""; + message.isExtension = object.isExtension ?? false; + return message; + }, +}; + +function createBaseFeatureSet(): FeatureSet { + return { + fieldPresence: 0, + enumType: 0, + repeatedFieldEncoding: 0, + utf8Validation: 0, + messageEncoding: 0, + jsonFormat: 0, + enforceNamingStyle: 0, + defaultSymbolVisibility: 0, + }; +} + +export const FeatureSet: MessageFns<FeatureSet> = { + encode(message: FeatureSet, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.fieldPresence !== undefined && message.fieldPresence !== 0) { + writer.uint32(8).int32(message.fieldPresence); + } + if (message.enumType !== undefined && message.enumType !== 0) { + writer.uint32(16).int32(message.enumType); + } + if (message.repeatedFieldEncoding !== undefined && message.repeatedFieldEncoding !== 0) { + writer.uint32(24).int32(message.repeatedFieldEncoding); + } + if (message.utf8Validation !== undefined && message.utf8Validation !== 0) { + writer.uint32(32).int32(message.utf8Validation); + } + if (message.messageEncoding !== undefined && message.messageEncoding !== 0) { + writer.uint32(40).int32(message.messageEncoding); + } + if (message.jsonFormat !== undefined && message.jsonFormat !== 0) { + writer.uint32(48).int32(message.jsonFormat); + } + if (message.enforceNamingStyle !== undefined && message.enforceNamingStyle !== 0) { + writer.uint32(56).int32(message.enforceNamingStyle); + } + if (message.defaultSymbolVisibility !== undefined && message.defaultSymbolVisibility !== 0) { + writer.uint32(64).int32(message.defaultSymbolVisibility); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): FeatureSet { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFeatureSet(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + + message.fieldPresence = reader.int32() as any; + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.enumType = reader.int32() as any; + continue; + } + case 3: { + if (tag !== 24) { + break; + } + + message.repeatedFieldEncoding = reader.int32() as any; + continue; + } + case 4: { + if (tag !== 32) { + break; + } + + message.utf8Validation = reader.int32() as any; + continue; + } + case 5: { + if (tag !== 40) { + break; + } + + message.messageEncoding = reader.int32() as any; + continue; + } + case 6: { + if (tag !== 48) { + break; + } + + message.jsonFormat = reader.int32() as any; + continue; + } + case 7: { + if (tag !== 56) { + break; + } + + message.enforceNamingStyle = reader.int32() as any; + continue; + } + case 8: { + if (tag !== 64) { + break; + } + + message.defaultSymbolVisibility = reader.int32() as any; + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): FeatureSet { + return { + fieldPresence: isSet(object.fieldPresence) + ? featureSet_FieldPresenceFromJSON(object.fieldPresence) + : isSet(object.field_presence) + ? featureSet_FieldPresenceFromJSON(object.field_presence) + : 0, + enumType: isSet(object.enumType) + ? featureSet_EnumTypeFromJSON(object.enumType) + : isSet(object.enum_type) + ? featureSet_EnumTypeFromJSON(object.enum_type) + : 0, + repeatedFieldEncoding: isSet(object.repeatedFieldEncoding) + ? featureSet_RepeatedFieldEncodingFromJSON(object.repeatedFieldEncoding) + : isSet(object.repeated_field_encoding) + ? featureSet_RepeatedFieldEncodingFromJSON(object.repeated_field_encoding) + : 0, + utf8Validation: isSet(object.utf8Validation) + ? featureSet_Utf8ValidationFromJSON(object.utf8Validation) + : isSet(object.utf8_validation) + ? featureSet_Utf8ValidationFromJSON(object.utf8_validation) + : 0, + messageEncoding: isSet(object.messageEncoding) + ? featureSet_MessageEncodingFromJSON(object.messageEncoding) + : isSet(object.message_encoding) + ? featureSet_MessageEncodingFromJSON(object.message_encoding) + : 0, + jsonFormat: isSet(object.jsonFormat) + ? featureSet_JsonFormatFromJSON(object.jsonFormat) + : isSet(object.json_format) + ? featureSet_JsonFormatFromJSON(object.json_format) + : 0, + enforceNamingStyle: isSet(object.enforceNamingStyle) + ? featureSet_EnforceNamingStyleFromJSON(object.enforceNamingStyle) + : isSet(object.enforce_naming_style) + ? featureSet_EnforceNamingStyleFromJSON(object.enforce_naming_style) + : 0, + defaultSymbolVisibility: isSet(object.defaultSymbolVisibility) + ? featureSet_VisibilityFeature_DefaultSymbolVisibilityFromJSON(object.defaultSymbolVisibility) + : isSet(object.default_symbol_visibility) + ? featureSet_VisibilityFeature_DefaultSymbolVisibilityFromJSON(object.default_symbol_visibility) + : 0, + }; + }, + + toJSON(message: FeatureSet): unknown { + const obj: any = {}; + if (message.fieldPresence !== undefined && message.fieldPresence !== 0) { + obj.fieldPresence = featureSet_FieldPresenceToJSON(message.fieldPresence); + } + if (message.enumType !== undefined && message.enumType !== 0) { + obj.enumType = featureSet_EnumTypeToJSON(message.enumType); + } + if (message.repeatedFieldEncoding !== undefined && message.repeatedFieldEncoding !== 0) { + obj.repeatedFieldEncoding = featureSet_RepeatedFieldEncodingToJSON(message.repeatedFieldEncoding); + } + if (message.utf8Validation !== undefined && message.utf8Validation !== 0) { + obj.utf8Validation = featureSet_Utf8ValidationToJSON(message.utf8Validation); + } + if (message.messageEncoding !== undefined && message.messageEncoding !== 0) { + obj.messageEncoding = featureSet_MessageEncodingToJSON(message.messageEncoding); + } + if (message.jsonFormat !== undefined && message.jsonFormat !== 0) { + obj.jsonFormat = featureSet_JsonFormatToJSON(message.jsonFormat); + } + if (message.enforceNamingStyle !== undefined && message.enforceNamingStyle !== 0) { + obj.enforceNamingStyle = featureSet_EnforceNamingStyleToJSON(message.enforceNamingStyle); + } + if (message.defaultSymbolVisibility !== undefined && message.defaultSymbolVisibility !== 0) { + obj.defaultSymbolVisibility = featureSet_VisibilityFeature_DefaultSymbolVisibilityToJSON( + message.defaultSymbolVisibility, + ); + } + return obj; + }, + + create<I extends Exact<DeepPartial<FeatureSet>, I>>(base?: I): FeatureSet { + return FeatureSet.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<FeatureSet>, I>>(object: I): FeatureSet { + const message = createBaseFeatureSet(); + message.fieldPresence = object.fieldPresence ?? 0; + message.enumType = object.enumType ?? 0; + message.repeatedFieldEncoding = object.repeatedFieldEncoding ?? 0; + message.utf8Validation = object.utf8Validation ?? 0; + message.messageEncoding = object.messageEncoding ?? 0; + message.jsonFormat = object.jsonFormat ?? 0; + message.enforceNamingStyle = object.enforceNamingStyle ?? 0; + message.defaultSymbolVisibility = object.defaultSymbolVisibility ?? 0; + return message; + }, +}; + +function createBaseFeatureSet_VisibilityFeature(): FeatureSet_VisibilityFeature { + return {}; +} + +export const FeatureSet_VisibilityFeature: MessageFns<FeatureSet_VisibilityFeature> = { + encode(_: FeatureSet_VisibilityFeature, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): FeatureSet_VisibilityFeature { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFeatureSet_VisibilityFeature(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(_: any): FeatureSet_VisibilityFeature { + return {}; + }, + + toJSON(_: FeatureSet_VisibilityFeature): unknown { + const obj: any = {}; + return obj; + }, + + create<I extends Exact<DeepPartial<FeatureSet_VisibilityFeature>, I>>(base?: I): FeatureSet_VisibilityFeature { + return FeatureSet_VisibilityFeature.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<FeatureSet_VisibilityFeature>, I>>(_: I): FeatureSet_VisibilityFeature { + const message = createBaseFeatureSet_VisibilityFeature(); + return message; + }, +}; + +function createBaseFeatureSetDefaults(): FeatureSetDefaults { + return { defaults: [], minimumEdition: 0, maximumEdition: 0 }; +} + +export const FeatureSetDefaults: MessageFns<FeatureSetDefaults> = { + encode(message: FeatureSetDefaults, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + for (const v of message.defaults) { + FeatureSetDefaults_FeatureSetEditionDefault.encode(v!, writer.uint32(10).fork()).join(); + } + if (message.minimumEdition !== undefined && message.minimumEdition !== 0) { + writer.uint32(32).int32(message.minimumEdition); + } + if (message.maximumEdition !== undefined && message.maximumEdition !== 0) { + writer.uint32(40).int32(message.maximumEdition); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): FeatureSetDefaults { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFeatureSetDefaults(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.defaults.push(FeatureSetDefaults_FeatureSetEditionDefault.decode(reader, reader.uint32())); + continue; + } + case 4: { + if (tag !== 32) { + break; + } + + message.minimumEdition = reader.int32() as any; + continue; + } + case 5: { + if (tag !== 40) { + break; + } + + message.maximumEdition = reader.int32() as any; + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): FeatureSetDefaults { + return { + defaults: globalThis.Array.isArray(object?.defaults) + ? object.defaults.map((e: any) => FeatureSetDefaults_FeatureSetEditionDefault.fromJSON(e)) + : [], + minimumEdition: isSet(object.minimumEdition) + ? editionFromJSON(object.minimumEdition) + : isSet(object.minimum_edition) + ? editionFromJSON(object.minimum_edition) + : 0, + maximumEdition: isSet(object.maximumEdition) + ? editionFromJSON(object.maximumEdition) + : isSet(object.maximum_edition) + ? editionFromJSON(object.maximum_edition) + : 0, + }; + }, + + toJSON(message: FeatureSetDefaults): unknown { + const obj: any = {}; + if (message.defaults?.length) { + obj.defaults = message.defaults.map((e) => FeatureSetDefaults_FeatureSetEditionDefault.toJSON(e)); + } + if (message.minimumEdition !== undefined && message.minimumEdition !== 0) { + obj.minimumEdition = editionToJSON(message.minimumEdition); + } + if (message.maximumEdition !== undefined && message.maximumEdition !== 0) { + obj.maximumEdition = editionToJSON(message.maximumEdition); + } + return obj; + }, + + create<I extends Exact<DeepPartial<FeatureSetDefaults>, I>>(base?: I): FeatureSetDefaults { + return FeatureSetDefaults.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<FeatureSetDefaults>, I>>(object: I): FeatureSetDefaults { + const message = createBaseFeatureSetDefaults(); + message.defaults = object.defaults?.map((e) => FeatureSetDefaults_FeatureSetEditionDefault.fromPartial(e)) || []; + message.minimumEdition = object.minimumEdition ?? 0; + message.maximumEdition = object.maximumEdition ?? 0; + return message; + }, +}; + +function createBaseFeatureSetDefaults_FeatureSetEditionDefault(): FeatureSetDefaults_FeatureSetEditionDefault { + return { edition: 0, overridableFeatures: undefined, fixedFeatures: undefined }; +} + +export const FeatureSetDefaults_FeatureSetEditionDefault: MessageFns<FeatureSetDefaults_FeatureSetEditionDefault> = { + encode( + message: FeatureSetDefaults_FeatureSetEditionDefault, + writer: BinaryWriter = new BinaryWriter(), + ): BinaryWriter { + if (message.edition !== undefined && message.edition !== 0) { + writer.uint32(24).int32(message.edition); + } + if (message.overridableFeatures !== undefined) { + FeatureSet.encode(message.overridableFeatures, writer.uint32(34).fork()).join(); + } + if (message.fixedFeatures !== undefined) { + FeatureSet.encode(message.fixedFeatures, writer.uint32(42).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): FeatureSetDefaults_FeatureSetEditionDefault { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFeatureSetDefaults_FeatureSetEditionDefault(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 3: { + if (tag !== 24) { + break; + } + + message.edition = reader.int32() as any; + continue; + } + case 4: { + if (tag !== 34) { + break; + } + + message.overridableFeatures = FeatureSet.decode(reader, reader.uint32()); + continue; + } + case 5: { + if (tag !== 42) { + break; + } + + message.fixedFeatures = FeatureSet.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): FeatureSetDefaults_FeatureSetEditionDefault { + return { + edition: isSet(object.edition) ? editionFromJSON(object.edition) : 0, + overridableFeatures: isSet(object.overridableFeatures) + ? FeatureSet.fromJSON(object.overridableFeatures) + : isSet(object.overridable_features) + ? FeatureSet.fromJSON(object.overridable_features) + : undefined, + fixedFeatures: isSet(object.fixedFeatures) + ? FeatureSet.fromJSON(object.fixedFeatures) + : isSet(object.fixed_features) + ? FeatureSet.fromJSON(object.fixed_features) + : undefined, + }; + }, + + toJSON(message: FeatureSetDefaults_FeatureSetEditionDefault): unknown { + const obj: any = {}; + if (message.edition !== undefined && message.edition !== 0) { + obj.edition = editionToJSON(message.edition); + } + if (message.overridableFeatures !== undefined) { + obj.overridableFeatures = FeatureSet.toJSON(message.overridableFeatures); + } + if (message.fixedFeatures !== undefined) { + obj.fixedFeatures = FeatureSet.toJSON(message.fixedFeatures); + } + return obj; + }, + + create<I extends Exact<DeepPartial<FeatureSetDefaults_FeatureSetEditionDefault>, I>>( + base?: I, + ): FeatureSetDefaults_FeatureSetEditionDefault { + return FeatureSetDefaults_FeatureSetEditionDefault.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<FeatureSetDefaults_FeatureSetEditionDefault>, I>>( + object: I, + ): FeatureSetDefaults_FeatureSetEditionDefault { + const message = createBaseFeatureSetDefaults_FeatureSetEditionDefault(); + message.edition = object.edition ?? 0; + message.overridableFeatures = (object.overridableFeatures !== undefined && object.overridableFeatures !== null) + ? FeatureSet.fromPartial(object.overridableFeatures) + : undefined; + message.fixedFeatures = (object.fixedFeatures !== undefined && object.fixedFeatures !== null) + ? FeatureSet.fromPartial(object.fixedFeatures) + : undefined; + return message; + }, +}; + +function createBaseSourceCodeInfo(): SourceCodeInfo { + return { location: [] }; +} + +export const SourceCodeInfo: MessageFns<SourceCodeInfo> = { + encode(message: SourceCodeInfo, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + for (const v of message.location) { + SourceCodeInfo_Location.encode(v!, writer.uint32(10).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): SourceCodeInfo { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSourceCodeInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.location.push(SourceCodeInfo_Location.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): SourceCodeInfo { + return { + location: globalThis.Array.isArray(object?.location) + ? object.location.map((e: any) => SourceCodeInfo_Location.fromJSON(e)) + : [], + }; + }, + + toJSON(message: SourceCodeInfo): unknown { + const obj: any = {}; + if (message.location?.length) { + obj.location = message.location.map((e) => SourceCodeInfo_Location.toJSON(e)); + } + return obj; + }, + + create<I extends Exact<DeepPartial<SourceCodeInfo>, I>>(base?: I): SourceCodeInfo { + return SourceCodeInfo.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<SourceCodeInfo>, I>>(object: I): SourceCodeInfo { + const message = createBaseSourceCodeInfo(); + message.location = object.location?.map((e) => SourceCodeInfo_Location.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseSourceCodeInfo_Location(): SourceCodeInfo_Location { + return { path: [], span: [], leadingComments: "", trailingComments: "", leadingDetachedComments: [] }; +} + +export const SourceCodeInfo_Location: MessageFns<SourceCodeInfo_Location> = { + encode(message: SourceCodeInfo_Location, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + writer.uint32(10).fork(); + for (const v of message.path) { + writer.int32(v); + } + writer.join(); + writer.uint32(18).fork(); + for (const v of message.span) { + writer.int32(v); + } + writer.join(); + if (message.leadingComments !== undefined && message.leadingComments !== "") { + writer.uint32(26).string(message.leadingComments); + } + if (message.trailingComments !== undefined && message.trailingComments !== "") { + writer.uint32(34).string(message.trailingComments); + } + for (const v of message.leadingDetachedComments) { + writer.uint32(50).string(v!); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): SourceCodeInfo_Location { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSourceCodeInfo_Location(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag === 8) { + message.path.push(reader.int32()); + + continue; + } + + if (tag === 10) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.path.push(reader.int32()); + } + + continue; + } + + break; + } + case 2: { + if (tag === 16) { + message.span.push(reader.int32()); + + continue; + } + + if (tag === 18) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.span.push(reader.int32()); + } + + continue; + } + + break; + } + case 3: { + if (tag !== 26) { + break; + } + + message.leadingComments = reader.string(); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + + message.trailingComments = reader.string(); + continue; + } + case 6: { + if (tag !== 50) { + break; + } + + message.leadingDetachedComments.push(reader.string()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): SourceCodeInfo_Location { + return { + path: globalThis.Array.isArray(object?.path) ? object.path.map((e: any) => globalThis.Number(e)) : [], + span: globalThis.Array.isArray(object?.span) ? object.span.map((e: any) => globalThis.Number(e)) : [], + leadingComments: isSet(object.leadingComments) + ? globalThis.String(object.leadingComments) + : isSet(object.leading_comments) + ? globalThis.String(object.leading_comments) + : "", + trailingComments: isSet(object.trailingComments) + ? globalThis.String(object.trailingComments) + : isSet(object.trailing_comments) + ? globalThis.String(object.trailing_comments) + : "", + leadingDetachedComments: globalThis.Array.isArray(object?.leadingDetachedComments) + ? object.leadingDetachedComments.map((e: any) => globalThis.String(e)) + : globalThis.Array.isArray(object?.leading_detached_comments) + ? object.leading_detached_comments.map((e: any) => globalThis.String(e)) + : [], + }; + }, + + toJSON(message: SourceCodeInfo_Location): unknown { + const obj: any = {}; + if (message.path?.length) { + obj.path = message.path.map((e) => Math.round(e)); + } + if (message.span?.length) { + obj.span = message.span.map((e) => Math.round(e)); + } + if (message.leadingComments !== undefined && message.leadingComments !== "") { + obj.leadingComments = message.leadingComments; + } + if (message.trailingComments !== undefined && message.trailingComments !== "") { + obj.trailingComments = message.trailingComments; + } + if (message.leadingDetachedComments?.length) { + obj.leadingDetachedComments = message.leadingDetachedComments; + } + return obj; + }, + + create<I extends Exact<DeepPartial<SourceCodeInfo_Location>, I>>(base?: I): SourceCodeInfo_Location { + return SourceCodeInfo_Location.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<SourceCodeInfo_Location>, I>>(object: I): SourceCodeInfo_Location { + const message = createBaseSourceCodeInfo_Location(); + message.path = object.path?.map((e) => e) || []; + message.span = object.span?.map((e) => e) || []; + message.leadingComments = object.leadingComments ?? ""; + message.trailingComments = object.trailingComments ?? ""; + message.leadingDetachedComments = object.leadingDetachedComments?.map((e) => e) || []; + return message; + }, +}; + +function createBaseGeneratedCodeInfo(): GeneratedCodeInfo { + return { annotation: [] }; +} + +export const GeneratedCodeInfo: MessageFns<GeneratedCodeInfo> = { + encode(message: GeneratedCodeInfo, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + for (const v of message.annotation) { + GeneratedCodeInfo_Annotation.encode(v!, writer.uint32(10).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): GeneratedCodeInfo { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGeneratedCodeInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.annotation.push(GeneratedCodeInfo_Annotation.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): GeneratedCodeInfo { + return { + annotation: globalThis.Array.isArray(object?.annotation) + ? object.annotation.map((e: any) => GeneratedCodeInfo_Annotation.fromJSON(e)) + : [], + }; + }, + + toJSON(message: GeneratedCodeInfo): unknown { + const obj: any = {}; + if (message.annotation?.length) { + obj.annotation = message.annotation.map((e) => GeneratedCodeInfo_Annotation.toJSON(e)); + } + return obj; + }, + + create<I extends Exact<DeepPartial<GeneratedCodeInfo>, I>>(base?: I): GeneratedCodeInfo { + return GeneratedCodeInfo.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<GeneratedCodeInfo>, I>>(object: I): GeneratedCodeInfo { + const message = createBaseGeneratedCodeInfo(); + message.annotation = object.annotation?.map((e) => GeneratedCodeInfo_Annotation.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseGeneratedCodeInfo_Annotation(): GeneratedCodeInfo_Annotation { + return { path: [], sourceFile: "", begin: 0, end: 0, semantic: 0 }; +} + +export const GeneratedCodeInfo_Annotation: MessageFns<GeneratedCodeInfo_Annotation> = { + encode(message: GeneratedCodeInfo_Annotation, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + writer.uint32(10).fork(); + for (const v of message.path) { + writer.int32(v); + } + writer.join(); + if (message.sourceFile !== undefined && message.sourceFile !== "") { + writer.uint32(18).string(message.sourceFile); + } + if (message.begin !== undefined && message.begin !== 0) { + writer.uint32(24).int32(message.begin); + } + if (message.end !== undefined && message.end !== 0) { + writer.uint32(32).int32(message.end); + } + if (message.semantic !== undefined && message.semantic !== 0) { + writer.uint32(40).int32(message.semantic); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): GeneratedCodeInfo_Annotation { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGeneratedCodeInfo_Annotation(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag === 8) { + message.path.push(reader.int32()); + + continue; + } + + if (tag === 10) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.path.push(reader.int32()); + } + + continue; + } + + break; + } + case 2: { + if (tag !== 18) { + break; + } + + message.sourceFile = reader.string(); + continue; + } + case 3: { + if (tag !== 24) { + break; + } + + message.begin = reader.int32(); + continue; + } + case 4: { + if (tag !== 32) { + break; + } + + message.end = reader.int32(); + continue; + } + case 5: { + if (tag !== 40) { + break; + } + + message.semantic = reader.int32() as any; + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): GeneratedCodeInfo_Annotation { + return { + path: globalThis.Array.isArray(object?.path) ? object.path.map((e: any) => globalThis.Number(e)) : [], + sourceFile: isSet(object.sourceFile) + ? globalThis.String(object.sourceFile) + : isSet(object.source_file) + ? globalThis.String(object.source_file) + : "", + begin: isSet(object.begin) ? globalThis.Number(object.begin) : 0, + end: isSet(object.end) ? globalThis.Number(object.end) : 0, + semantic: isSet(object.semantic) ? generatedCodeInfo_Annotation_SemanticFromJSON(object.semantic) : 0, + }; + }, + + toJSON(message: GeneratedCodeInfo_Annotation): unknown { + const obj: any = {}; + if (message.path?.length) { + obj.path = message.path.map((e) => Math.round(e)); + } + if (message.sourceFile !== undefined && message.sourceFile !== "") { + obj.sourceFile = message.sourceFile; + } + if (message.begin !== undefined && message.begin !== 0) { + obj.begin = Math.round(message.begin); + } + if (message.end !== undefined && message.end !== 0) { + obj.end = Math.round(message.end); + } + if (message.semantic !== undefined && message.semantic !== 0) { + obj.semantic = generatedCodeInfo_Annotation_SemanticToJSON(message.semantic); + } + return obj; + }, + + create<I extends Exact<DeepPartial<GeneratedCodeInfo_Annotation>, I>>(base?: I): GeneratedCodeInfo_Annotation { + return GeneratedCodeInfo_Annotation.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<GeneratedCodeInfo_Annotation>, I>>(object: I): GeneratedCodeInfo_Annotation { + const message = createBaseGeneratedCodeInfo_Annotation(); + message.path = object.path?.map((e) => e) || []; + message.sourceFile = object.sourceFile ?? ""; + message.begin = object.begin ?? 0; + message.end = object.end ?? 0; + message.semantic = object.semantic ?? 0; + return message; + }, +}; + +function bytesFromBase64(b64: string): Uint8Array { + if ((globalThis as any).Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if ((globalThis as any).Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(globalThis.String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial<T> = T extends Builtin ? T + : T extends globalThis.Array<infer U> ? globalThis.Array<DeepPartial<U>> + : T extends ReadonlyArray<infer U> ? ReadonlyArray<DeepPartial<U>> + : T extends {} ? { [K in keyof T]?: DeepPartial<T[K]> } + : Partial<T>; + +type KeysOfUnion<T> = T extends T ? keyof T : never; +export type Exact<P, I extends P> = P extends Builtin ? P + : P & { [K in keyof P]: Exact<P[K], I[K]> } & { [K in Exclude<keyof I, KeysOfUnion<P>>]: never }; + +function longToNumber(int64: { toString(): string }): number { + const num = globalThis.Number(int64.toString()); + if (num > globalThis.Number.MAX_SAFE_INTEGER) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + if (num < globalThis.Number.MIN_SAFE_INTEGER) { + throw new globalThis.Error("Value is smaller than Number.MIN_SAFE_INTEGER"); + } + return num; +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} + +export interface MessageFns<T> { + encode(message: T, writer?: BinaryWriter): BinaryWriter; + decode(input: BinaryReader | Uint8Array, length?: number): T; + fromJSON(object: any): T; + toJSON(message: T): unknown; + create<I extends Exact<DeepPartial<T>, I>>(base?: I): T; + fromPartial<I extends Exact<DeepPartial<T>, I>>(object: I): T; +} diff --git a/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/ignite.planet.mars/types/ignite/planet/mars/mars.ts b/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/ignite.planet.mars/types/ignite/planet/mars/mars.ts new file mode 100644 index 0000000..b909076 --- /dev/null +++ b/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/ignite.planet.mars/types/ignite/planet/mars/mars.ts @@ -0,0 +1,1203 @@ +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.11.2 +// protoc unknown +// source: ignite/planet/mars/mars.proto + +/* eslint-disable */ +import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire"; +import { PageRequest, PageResponse } from "../../../cosmos/base/query/v1beta1/pagination"; + +export const protobufPackage = "ignite.planet.mars"; + +export interface MsgMyMessageRequest { + mytypefield: string; +} + +export interface MsgMyMessageResponse { + mytypefield: string; +} + +export interface MsgBarRequest { + mytypefield: string; +} + +export interface MsgBarResponse { + mytypefield: string; +} + +export interface QuerySimpleRequest { +} + +export interface QuerySimpleResponse { + bar: string; +} + +export interface QuerySimpleParamsRequest { + mytypefield: string; +} + +export interface QuerySimpleParamsResponse { + bar: string; +} + +export interface QueryWithPaginationRequest { + mytypefield: string; + pagination: PageRequest | undefined; +} + +export interface QueryWithPaginationResponse { + pagination: PageResponse | undefined; +} + +export interface QueryWithQueryParamsRequest { + mytypefield: string; + queryParam: string; + mybool: boolean; + myrepeatedbool: boolean[]; +} + +export interface QueryWithQueryParamsResponse { + bar: string; +} + +export interface QueryWithQueryParamsWithPaginationRequest { + mytypefield: string; + queryParam: string; + pagination: PageRequest | undefined; +} + +export interface QueryWithQueryParamsWithPaginationResponse { + bar: string; + pagination: PageResponse | undefined; +} + +export interface AnotherType { + mytypefield: string; +} + +function createBaseMsgMyMessageRequest(): MsgMyMessageRequest { + return { mytypefield: "" }; +} + +export const MsgMyMessageRequest: MessageFns<MsgMyMessageRequest> = { + encode(message: MsgMyMessageRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.mytypefield !== "") { + writer.uint32(10).string(message.mytypefield); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): MsgMyMessageRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgMyMessageRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.mytypefield = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): MsgMyMessageRequest { + return { mytypefield: isSet(object.mytypefield) ? globalThis.String(object.mytypefield) : "" }; + }, + + toJSON(message: MsgMyMessageRequest): unknown { + const obj: any = {}; + if (message.mytypefield !== "") { + obj.mytypefield = message.mytypefield; + } + return obj; + }, + + create<I extends Exact<DeepPartial<MsgMyMessageRequest>, I>>(base?: I): MsgMyMessageRequest { + return MsgMyMessageRequest.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<MsgMyMessageRequest>, I>>(object: I): MsgMyMessageRequest { + const message = createBaseMsgMyMessageRequest(); + message.mytypefield = object.mytypefield ?? ""; + return message; + }, +}; + +function createBaseMsgMyMessageResponse(): MsgMyMessageResponse { + return { mytypefield: "" }; +} + +export const MsgMyMessageResponse: MessageFns<MsgMyMessageResponse> = { + encode(message: MsgMyMessageResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.mytypefield !== "") { + writer.uint32(10).string(message.mytypefield); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): MsgMyMessageResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgMyMessageResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.mytypefield = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): MsgMyMessageResponse { + return { mytypefield: isSet(object.mytypefield) ? globalThis.String(object.mytypefield) : "" }; + }, + + toJSON(message: MsgMyMessageResponse): unknown { + const obj: any = {}; + if (message.mytypefield !== "") { + obj.mytypefield = message.mytypefield; + } + return obj; + }, + + create<I extends Exact<DeepPartial<MsgMyMessageResponse>, I>>(base?: I): MsgMyMessageResponse { + return MsgMyMessageResponse.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<MsgMyMessageResponse>, I>>(object: I): MsgMyMessageResponse { + const message = createBaseMsgMyMessageResponse(); + message.mytypefield = object.mytypefield ?? ""; + return message; + }, +}; + +function createBaseMsgBarRequest(): MsgBarRequest { + return { mytypefield: "" }; +} + +export const MsgBarRequest: MessageFns<MsgBarRequest> = { + encode(message: MsgBarRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.mytypefield !== "") { + writer.uint32(10).string(message.mytypefield); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): MsgBarRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgBarRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.mytypefield = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): MsgBarRequest { + return { mytypefield: isSet(object.mytypefield) ? globalThis.String(object.mytypefield) : "" }; + }, + + toJSON(message: MsgBarRequest): unknown { + const obj: any = {}; + if (message.mytypefield !== "") { + obj.mytypefield = message.mytypefield; + } + return obj; + }, + + create<I extends Exact<DeepPartial<MsgBarRequest>, I>>(base?: I): MsgBarRequest { + return MsgBarRequest.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<MsgBarRequest>, I>>(object: I): MsgBarRequest { + const message = createBaseMsgBarRequest(); + message.mytypefield = object.mytypefield ?? ""; + return message; + }, +}; + +function createBaseMsgBarResponse(): MsgBarResponse { + return { mytypefield: "" }; +} + +export const MsgBarResponse: MessageFns<MsgBarResponse> = { + encode(message: MsgBarResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.mytypefield !== "") { + writer.uint32(10).string(message.mytypefield); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): MsgBarResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgBarResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.mytypefield = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): MsgBarResponse { + return { mytypefield: isSet(object.mytypefield) ? globalThis.String(object.mytypefield) : "" }; + }, + + toJSON(message: MsgBarResponse): unknown { + const obj: any = {}; + if (message.mytypefield !== "") { + obj.mytypefield = message.mytypefield; + } + return obj; + }, + + create<I extends Exact<DeepPartial<MsgBarResponse>, I>>(base?: I): MsgBarResponse { + return MsgBarResponse.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<MsgBarResponse>, I>>(object: I): MsgBarResponse { + const message = createBaseMsgBarResponse(); + message.mytypefield = object.mytypefield ?? ""; + return message; + }, +}; + +function createBaseQuerySimpleRequest(): QuerySimpleRequest { + return {}; +} + +export const QuerySimpleRequest: MessageFns<QuerySimpleRequest> = { + encode(_: QuerySimpleRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): QuerySimpleRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQuerySimpleRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(_: any): QuerySimpleRequest { + return {}; + }, + + toJSON(_: QuerySimpleRequest): unknown { + const obj: any = {}; + return obj; + }, + + create<I extends Exact<DeepPartial<QuerySimpleRequest>, I>>(base?: I): QuerySimpleRequest { + return QuerySimpleRequest.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<QuerySimpleRequest>, I>>(_: I): QuerySimpleRequest { + const message = createBaseQuerySimpleRequest(); + return message; + }, +}; + +function createBaseQuerySimpleResponse(): QuerySimpleResponse { + return { bar: "" }; +} + +export const QuerySimpleResponse: MessageFns<QuerySimpleResponse> = { + encode(message: QuerySimpleResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.bar !== "") { + writer.uint32(10).string(message.bar); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): QuerySimpleResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQuerySimpleResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.bar = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): QuerySimpleResponse { + return { bar: isSet(object.bar) ? globalThis.String(object.bar) : "" }; + }, + + toJSON(message: QuerySimpleResponse): unknown { + const obj: any = {}; + if (message.bar !== "") { + obj.bar = message.bar; + } + return obj; + }, + + create<I extends Exact<DeepPartial<QuerySimpleResponse>, I>>(base?: I): QuerySimpleResponse { + return QuerySimpleResponse.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<QuerySimpleResponse>, I>>(object: I): QuerySimpleResponse { + const message = createBaseQuerySimpleResponse(); + message.bar = object.bar ?? ""; + return message; + }, +}; + +function createBaseQuerySimpleParamsRequest(): QuerySimpleParamsRequest { + return { mytypefield: "" }; +} + +export const QuerySimpleParamsRequest: MessageFns<QuerySimpleParamsRequest> = { + encode(message: QuerySimpleParamsRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.mytypefield !== "") { + writer.uint32(10).string(message.mytypefield); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): QuerySimpleParamsRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQuerySimpleParamsRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.mytypefield = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): QuerySimpleParamsRequest { + return { mytypefield: isSet(object.mytypefield) ? globalThis.String(object.mytypefield) : "" }; + }, + + toJSON(message: QuerySimpleParamsRequest): unknown { + const obj: any = {}; + if (message.mytypefield !== "") { + obj.mytypefield = message.mytypefield; + } + return obj; + }, + + create<I extends Exact<DeepPartial<QuerySimpleParamsRequest>, I>>(base?: I): QuerySimpleParamsRequest { + return QuerySimpleParamsRequest.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<QuerySimpleParamsRequest>, I>>(object: I): QuerySimpleParamsRequest { + const message = createBaseQuerySimpleParamsRequest(); + message.mytypefield = object.mytypefield ?? ""; + return message; + }, +}; + +function createBaseQuerySimpleParamsResponse(): QuerySimpleParamsResponse { + return { bar: "" }; +} + +export const QuerySimpleParamsResponse: MessageFns<QuerySimpleParamsResponse> = { + encode(message: QuerySimpleParamsResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.bar !== "") { + writer.uint32(10).string(message.bar); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): QuerySimpleParamsResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQuerySimpleParamsResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.bar = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): QuerySimpleParamsResponse { + return { bar: isSet(object.bar) ? globalThis.String(object.bar) : "" }; + }, + + toJSON(message: QuerySimpleParamsResponse): unknown { + const obj: any = {}; + if (message.bar !== "") { + obj.bar = message.bar; + } + return obj; + }, + + create<I extends Exact<DeepPartial<QuerySimpleParamsResponse>, I>>(base?: I): QuerySimpleParamsResponse { + return QuerySimpleParamsResponse.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<QuerySimpleParamsResponse>, I>>(object: I): QuerySimpleParamsResponse { + const message = createBaseQuerySimpleParamsResponse(); + message.bar = object.bar ?? ""; + return message; + }, +}; + +function createBaseQueryWithPaginationRequest(): QueryWithPaginationRequest { + return { mytypefield: "", pagination: undefined }; +} + +export const QueryWithPaginationRequest: MessageFns<QueryWithPaginationRequest> = { + encode(message: QueryWithPaginationRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.mytypefield !== "") { + writer.uint32(10).string(message.mytypefield); + } + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(18).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): QueryWithPaginationRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryWithPaginationRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.mytypefield = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.pagination = PageRequest.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): QueryWithPaginationRequest { + return { + mytypefield: isSet(object.mytypefield) ? globalThis.String(object.mytypefield) : "", + pagination: isSet(object.pagination) ? PageRequest.fromJSON(object.pagination) : undefined, + }; + }, + + toJSON(message: QueryWithPaginationRequest): unknown { + const obj: any = {}; + if (message.mytypefield !== "") { + obj.mytypefield = message.mytypefield; + } + if (message.pagination !== undefined) { + obj.pagination = PageRequest.toJSON(message.pagination); + } + return obj; + }, + + create<I extends Exact<DeepPartial<QueryWithPaginationRequest>, I>>(base?: I): QueryWithPaginationRequest { + return QueryWithPaginationRequest.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<QueryWithPaginationRequest>, I>>(object: I): QueryWithPaginationRequest { + const message = createBaseQueryWithPaginationRequest(); + message.mytypefield = object.mytypefield ?? ""; + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageRequest.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryWithPaginationResponse(): QueryWithPaginationResponse { + return { pagination: undefined }; +} + +export const QueryWithPaginationResponse: MessageFns<QueryWithPaginationResponse> = { + encode(message: QueryWithPaginationResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(10).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): QueryWithPaginationResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryWithPaginationResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.pagination = PageResponse.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): QueryWithPaginationResponse { + return { pagination: isSet(object.pagination) ? PageResponse.fromJSON(object.pagination) : undefined }; + }, + + toJSON(message: QueryWithPaginationResponse): unknown { + const obj: any = {}; + if (message.pagination !== undefined) { + obj.pagination = PageResponse.toJSON(message.pagination); + } + return obj; + }, + + create<I extends Exact<DeepPartial<QueryWithPaginationResponse>, I>>(base?: I): QueryWithPaginationResponse { + return QueryWithPaginationResponse.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<QueryWithPaginationResponse>, I>>(object: I): QueryWithPaginationResponse { + const message = createBaseQueryWithPaginationResponse(); + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageResponse.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryWithQueryParamsRequest(): QueryWithQueryParamsRequest { + return { mytypefield: "", queryParam: "", mybool: false, myrepeatedbool: [] }; +} + +export const QueryWithQueryParamsRequest: MessageFns<QueryWithQueryParamsRequest> = { + encode(message: QueryWithQueryParamsRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.mytypefield !== "") { + writer.uint32(10).string(message.mytypefield); + } + if (message.queryParam !== "") { + writer.uint32(18).string(message.queryParam); + } + if (message.mybool !== false) { + writer.uint32(24).bool(message.mybool); + } + writer.uint32(34).fork(); + for (const v of message.myrepeatedbool) { + writer.bool(v); + } + writer.join(); + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): QueryWithQueryParamsRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryWithQueryParamsRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.mytypefield = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.queryParam = reader.string(); + continue; + } + case 3: { + if (tag !== 24) { + break; + } + + message.mybool = reader.bool(); + continue; + } + case 4: { + if (tag === 32) { + message.myrepeatedbool.push(reader.bool()); + + continue; + } + + if (tag === 34) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.myrepeatedbool.push(reader.bool()); + } + + continue; + } + + break; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): QueryWithQueryParamsRequest { + return { + mytypefield: isSet(object.mytypefield) ? globalThis.String(object.mytypefield) : "", + queryParam: isSet(object.queryParam) + ? globalThis.String(object.queryParam) + : isSet(object.query_param) + ? globalThis.String(object.query_param) + : "", + mybool: isSet(object.mybool) ? globalThis.Boolean(object.mybool) : false, + myrepeatedbool: globalThis.Array.isArray(object?.myrepeatedbool) + ? object.myrepeatedbool.map((e: any) => globalThis.Boolean(e)) + : [], + }; + }, + + toJSON(message: QueryWithQueryParamsRequest): unknown { + const obj: any = {}; + if (message.mytypefield !== "") { + obj.mytypefield = message.mytypefield; + } + if (message.queryParam !== "") { + obj.queryParam = message.queryParam; + } + if (message.mybool !== false) { + obj.mybool = message.mybool; + } + if (message.myrepeatedbool?.length) { + obj.myrepeatedbool = message.myrepeatedbool; + } + return obj; + }, + + create<I extends Exact<DeepPartial<QueryWithQueryParamsRequest>, I>>(base?: I): QueryWithQueryParamsRequest { + return QueryWithQueryParamsRequest.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<QueryWithQueryParamsRequest>, I>>(object: I): QueryWithQueryParamsRequest { + const message = createBaseQueryWithQueryParamsRequest(); + message.mytypefield = object.mytypefield ?? ""; + message.queryParam = object.queryParam ?? ""; + message.mybool = object.mybool ?? false; + message.myrepeatedbool = object.myrepeatedbool?.map((e) => e) || []; + return message; + }, +}; + +function createBaseQueryWithQueryParamsResponse(): QueryWithQueryParamsResponse { + return { bar: "" }; +} + +export const QueryWithQueryParamsResponse: MessageFns<QueryWithQueryParamsResponse> = { + encode(message: QueryWithQueryParamsResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.bar !== "") { + writer.uint32(10).string(message.bar); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): QueryWithQueryParamsResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryWithQueryParamsResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.bar = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): QueryWithQueryParamsResponse { + return { bar: isSet(object.bar) ? globalThis.String(object.bar) : "" }; + }, + + toJSON(message: QueryWithQueryParamsResponse): unknown { + const obj: any = {}; + if (message.bar !== "") { + obj.bar = message.bar; + } + return obj; + }, + + create<I extends Exact<DeepPartial<QueryWithQueryParamsResponse>, I>>(base?: I): QueryWithQueryParamsResponse { + return QueryWithQueryParamsResponse.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<QueryWithQueryParamsResponse>, I>>(object: I): QueryWithQueryParamsResponse { + const message = createBaseQueryWithQueryParamsResponse(); + message.bar = object.bar ?? ""; + return message; + }, +}; + +function createBaseQueryWithQueryParamsWithPaginationRequest(): QueryWithQueryParamsWithPaginationRequest { + return { mytypefield: "", queryParam: "", pagination: undefined }; +} + +export const QueryWithQueryParamsWithPaginationRequest: MessageFns<QueryWithQueryParamsWithPaginationRequest> = { + encode(message: QueryWithQueryParamsWithPaginationRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.mytypefield !== "") { + writer.uint32(10).string(message.mytypefield); + } + if (message.queryParam !== "") { + writer.uint32(18).string(message.queryParam); + } + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(26).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): QueryWithQueryParamsWithPaginationRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryWithQueryParamsWithPaginationRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.mytypefield = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.queryParam = reader.string(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.pagination = PageRequest.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): QueryWithQueryParamsWithPaginationRequest { + return { + mytypefield: isSet(object.mytypefield) ? globalThis.String(object.mytypefield) : "", + queryParam: isSet(object.queryParam) + ? globalThis.String(object.queryParam) + : isSet(object.query_param) + ? globalThis.String(object.query_param) + : "", + pagination: isSet(object.pagination) ? PageRequest.fromJSON(object.pagination) : undefined, + }; + }, + + toJSON(message: QueryWithQueryParamsWithPaginationRequest): unknown { + const obj: any = {}; + if (message.mytypefield !== "") { + obj.mytypefield = message.mytypefield; + } + if (message.queryParam !== "") { + obj.queryParam = message.queryParam; + } + if (message.pagination !== undefined) { + obj.pagination = PageRequest.toJSON(message.pagination); + } + return obj; + }, + + create<I extends Exact<DeepPartial<QueryWithQueryParamsWithPaginationRequest>, I>>( + base?: I, + ): QueryWithQueryParamsWithPaginationRequest { + return QueryWithQueryParamsWithPaginationRequest.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<QueryWithQueryParamsWithPaginationRequest>, I>>( + object: I, + ): QueryWithQueryParamsWithPaginationRequest { + const message = createBaseQueryWithQueryParamsWithPaginationRequest(); + message.mytypefield = object.mytypefield ?? ""; + message.queryParam = object.queryParam ?? ""; + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageRequest.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryWithQueryParamsWithPaginationResponse(): QueryWithQueryParamsWithPaginationResponse { + return { bar: "", pagination: undefined }; +} + +export const QueryWithQueryParamsWithPaginationResponse: MessageFns<QueryWithQueryParamsWithPaginationResponse> = { + encode(message: QueryWithQueryParamsWithPaginationResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.bar !== "") { + writer.uint32(10).string(message.bar); + } + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): QueryWithQueryParamsWithPaginationResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryWithQueryParamsWithPaginationResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.bar = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.pagination = PageResponse.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): QueryWithQueryParamsWithPaginationResponse { + return { + bar: isSet(object.bar) ? globalThis.String(object.bar) : "", + pagination: isSet(object.pagination) ? PageResponse.fromJSON(object.pagination) : undefined, + }; + }, + + toJSON(message: QueryWithQueryParamsWithPaginationResponse): unknown { + const obj: any = {}; + if (message.bar !== "") { + obj.bar = message.bar; + } + if (message.pagination !== undefined) { + obj.pagination = PageResponse.toJSON(message.pagination); + } + return obj; + }, + + create<I extends Exact<DeepPartial<QueryWithQueryParamsWithPaginationResponse>, I>>( + base?: I, + ): QueryWithQueryParamsWithPaginationResponse { + return QueryWithQueryParamsWithPaginationResponse.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<QueryWithQueryParamsWithPaginationResponse>, I>>( + object: I, + ): QueryWithQueryParamsWithPaginationResponse { + const message = createBaseQueryWithQueryParamsWithPaginationResponse(); + message.bar = object.bar ?? ""; + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageResponse.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseAnotherType(): AnotherType { + return { mytypefield: "" }; +} + +export const AnotherType: MessageFns<AnotherType> = { + encode(message: AnotherType, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.mytypefield !== "") { + writer.uint32(10).string(message.mytypefield); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): AnotherType { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAnotherType(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.mytypefield = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): AnotherType { + return { mytypefield: isSet(object.mytypefield) ? globalThis.String(object.mytypefield) : "" }; + }, + + toJSON(message: AnotherType): unknown { + const obj: any = {}; + if (message.mytypefield !== "") { + obj.mytypefield = message.mytypefield; + } + return obj; + }, + + create<I extends Exact<DeepPartial<AnotherType>, I>>(base?: I): AnotherType { + return AnotherType.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<AnotherType>, I>>(object: I): AnotherType { + const message = createBaseAnotherType(); + message.mytypefield = object.mytypefield ?? ""; + return message; + }, +}; + +export interface Msg { + MyMessage(request: MsgMyMessageRequest): Promise<MsgMyMessageResponse>; + Bar(request: MsgBarRequest): Promise<MsgBarResponse>; +} + +export const MsgServiceName = "ignite.planet.mars.Msg"; +export class MsgClientImpl implements Msg { + private readonly rpc: Rpc; + private readonly service: string; + constructor(rpc: Rpc, opts?: { service?: string }) { + this.service = opts?.service || MsgServiceName; + this.rpc = rpc; + this.MyMessage = this.MyMessage.bind(this); + this.Bar = this.Bar.bind(this); + } + MyMessage(request: MsgMyMessageRequest): Promise<MsgMyMessageResponse> { + const data = MsgMyMessageRequest.encode(request).finish(); + const promise = this.rpc.request(this.service, "MyMessage", data); + return promise.then((data) => MsgMyMessageResponse.decode(new BinaryReader(data))); + } + + Bar(request: MsgBarRequest): Promise<MsgBarResponse> { + const data = MsgBarRequest.encode(request).finish(); + const promise = this.rpc.request(this.service, "Bar", data); + return promise.then((data) => MsgBarResponse.decode(new BinaryReader(data))); + } +} + +export interface Query { + QuerySimple(request: QuerySimpleRequest): Promise<QuerySimpleResponse>; + QuerySimpleParams(request: QuerySimpleParamsRequest): Promise<QuerySimpleParamsResponse>; + QueryParamsWithPagination(request: QueryWithPaginationRequest): Promise<QueryWithPaginationResponse>; + QueryWithQueryParams(request: QueryWithQueryParamsRequest): Promise<QueryWithQueryParamsResponse>; + QueryWithQueryParamsWithPagination( + request: QueryWithQueryParamsWithPaginationRequest, + ): Promise<QueryWithQueryParamsWithPaginationResponse>; +} + +export const QueryServiceName = "ignite.planet.mars.Query"; +export class QueryClientImpl implements Query { + private readonly rpc: Rpc; + private readonly service: string; + constructor(rpc: Rpc, opts?: { service?: string }) { + this.service = opts?.service || QueryServiceName; + this.rpc = rpc; + this.QuerySimple = this.QuerySimple.bind(this); + this.QuerySimpleParams = this.QuerySimpleParams.bind(this); + this.QueryParamsWithPagination = this.QueryParamsWithPagination.bind(this); + this.QueryWithQueryParams = this.QueryWithQueryParams.bind(this); + this.QueryWithQueryParamsWithPagination = this.QueryWithQueryParamsWithPagination.bind(this); + } + QuerySimple(request: QuerySimpleRequest): Promise<QuerySimpleResponse> { + const data = QuerySimpleRequest.encode(request).finish(); + const promise = this.rpc.request(this.service, "QuerySimple", data); + return promise.then((data) => QuerySimpleResponse.decode(new BinaryReader(data))); + } + + QuerySimpleParams(request: QuerySimpleParamsRequest): Promise<QuerySimpleParamsResponse> { + const data = QuerySimpleParamsRequest.encode(request).finish(); + const promise = this.rpc.request(this.service, "QuerySimpleParams", data); + return promise.then((data) => QuerySimpleParamsResponse.decode(new BinaryReader(data))); + } + + QueryParamsWithPagination(request: QueryWithPaginationRequest): Promise<QueryWithPaginationResponse> { + const data = QueryWithPaginationRequest.encode(request).finish(); + const promise = this.rpc.request(this.service, "QueryParamsWithPagination", data); + return promise.then((data) => QueryWithPaginationResponse.decode(new BinaryReader(data))); + } + + QueryWithQueryParams(request: QueryWithQueryParamsRequest): Promise<QueryWithQueryParamsResponse> { + const data = QueryWithQueryParamsRequest.encode(request).finish(); + const promise = this.rpc.request(this.service, "QueryWithQueryParams", data); + return promise.then((data) => QueryWithQueryParamsResponse.decode(new BinaryReader(data))); + } + + QueryWithQueryParamsWithPagination( + request: QueryWithQueryParamsWithPaginationRequest, + ): Promise<QueryWithQueryParamsWithPaginationResponse> { + const data = QueryWithQueryParamsWithPaginationRequest.encode(request).finish(); + const promise = this.rpc.request(this.service, "QueryWithQueryParamsWithPagination", data); + return promise.then((data) => QueryWithQueryParamsWithPaginationResponse.decode(new BinaryReader(data))); + } +} + +interface Rpc { + request(service: string, method: string, data: Uint8Array): Promise<Uint8Array>; +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial<T> = T extends Builtin ? T + : T extends globalThis.Array<infer U> ? globalThis.Array<DeepPartial<U>> + : T extends ReadonlyArray<infer U> ? ReadonlyArray<DeepPartial<U>> + : T extends {} ? { [K in keyof T]?: DeepPartial<T[K]> } + : Partial<T>; + +type KeysOfUnion<T> = T extends T ? keyof T : never; +export type Exact<P, I extends P> = P extends Builtin ? P + : P & { [K in keyof P]: Exact<P[K], I[K]> } & { [K in Exclude<keyof I, KeysOfUnion<P>>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} + +export interface MessageFns<T> { + encode(message: T, writer?: BinaryWriter): BinaryWriter; + decode(input: BinaryReader | Uint8Array, length?: number): T; + fromJSON(object: any): T; + toJSON(message: T): unknown; + create<I extends Exact<DeepPartial<T>, I>>(base?: I): T; + fromPartial<I extends Exact<DeepPartial<T>, I>>(object: I): T; +} diff --git a/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/ignite.planet.mars/types/route-name.eta b/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/ignite.planet.mars/types/route-name.eta new file mode 100644 index 0000000..291eb38 --- /dev/null +++ b/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/ignite.planet.mars/types/route-name.eta @@ -0,0 +1,45 @@ +<% +const { routeInfo, utils } = it; +const { + operationId, + method, + route, + moduleName, + responsesTypes, + description, + tags, + summary, + pathArgs, +} = routeInfo; +const { _, fmtToJSDocLine, require } = utils; + +const methodAliases = { + get: (pathName, hasPathInserts) => + _.camelCase(`${pathName}_${hasPathInserts ? "detail" : "list"}`), + post: (pathName, hasPathInserts) => _.camelCase(`${pathName}_create`), + put: (pathName, hasPathInserts) => _.camelCase(`${pathName}_update`), + patch: (pathName, hasPathInserts) => _.camelCase(`${pathName}_partial_update`), + delete: (pathName, hasPathInserts) => _.camelCase(`${pathName}_delete`), +}; + +const createCustomOperationId = (method, route, moduleName) => { + const hasPathInserts = /\{(\w){1,}\}/g.test(route); + const splitedRouteBySlash = _.compact(_.replace(route, /\{(\w){1,}\}/g, "").split("/")); + const routeParts = (splitedRouteBySlash.length > 1 + ? splitedRouteBySlash.splice(1) + : splitedRouteBySlash + ).join("_"); + return routeParts.length > 3 && methodAliases[method] + ? methodAliases[method](routeParts, hasPathInserts) + : _.camelCase(_.lowerCase(method) + "_" + [moduleName].join("_")) || "index"; +}; + +if (operationId) { + let routeName = operationId.replace('_',''); + return routeName[0].toLowerCase() + routeName.slice(1); +} +if (route === "/") + return _.camelCase(`${_.lowerCase(method)}Root`); + +return createCustomOperationId(method, route, moduleName); +%> \ No newline at end of file diff --git a/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/index.ts b/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/index.ts new file mode 100755 index 0000000..7c9a93f --- /dev/null +++ b/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/index.ts @@ -0,0 +1,21 @@ +// Generated by Ignite ignite.com/cli +import { Registry } from '@cosmjs/proto-signing' +import { IgniteClient } from "./client"; +import { MissingWalletError } from "./helpers"; +import { IgntModule as IgnitePlanetMars, msgTypes as IgnitePlanetMarsMsgTypes } from './ignite.planet.mars' + + +const Client = IgniteClient.plugin([ + IgnitePlanetMars +]); + +const registry = new Registry([ + ...IgnitePlanetMarsMsgTypes, + +]) + +export { + Client, + registry, + MissingWalletError +} diff --git a/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/modules.ts b/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/modules.ts new file mode 100755 index 0000000..49d2c8b --- /dev/null +++ b/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/modules.ts @@ -0,0 +1,5 @@ +import { IgniteClient } from "./client"; +import { GeneratedType } from "@cosmjs/proto-signing"; + +export type IgntModuleInterface = { [key: string]: any } +export type IgntModule = (instance: IgniteClient) => { module: IgntModuleInterface, registry: [string, GeneratedType][] } diff --git a/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/package.json b/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/package.json new file mode 100755 index 0000000..726a5cb --- /dev/null +++ b/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/package.json @@ -0,0 +1,39 @@ +{ + "name": "testdata-testchain-client-ts", + "version": "0.0.1", + "description": "Autogenerated Typescript Client", + "author": "Ignite Codegen <hello@ignite.com>", + "license": "Apache-2.0", + "licenses": [ + { + "type": "Apache-2.0", + "url": "http://www.apache.org/licenses/LICENSE-2.0" + } + ], + "main": "lib/index.js", + "publishConfig": { + "access": "public" + }, + "scripts": { + "build": "NODE_OPTIONS='--max-old-space-size=16384' tsc" + }, + "dependencies": { + "@cosmjs/proto-signing": "0.33.1", + "@cosmjs/stargate": "0.33.1", + "@keplr-wallet/types": "^0.12.234", + "axios": "1.9.0", + "buffer": "^6.0.3", + "events": "^3.3.0" + }, + "peerDependencies": { + "@cosmjs/proto-signing": "0.33.1", + "@cosmjs/stargate": "0.33.1" + }, + "devDependencies": { + "@bufbuild/protobuf": "^2.4.0", + "@types/events": "^3.0.3", + "qs": "^6.14.0", + "type-fest": "^4.41.0", + "typescript": "^5.8.3" + } +} diff --git a/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/tsconfig.json b/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/tsconfig.json new file mode 100755 index 0000000..6d679bb --- /dev/null +++ b/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/tsconfig.json @@ -0,0 +1,13 @@ +{ + "compilerOptions": { + "target": "ES2020", + "module": "ES2020", + "moduleResolution": "node", + "outDir": "./lib", + "declaration": true, + "allowSyntheticDefaultImports": true, + "esModuleInterop": false, + "strict": false, + "skipLibCheck": true + } + } \ No newline at end of file diff --git a/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/types.d.ts b/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/types.d.ts new file mode 100755 index 0000000..b839c08 --- /dev/null +++ b/ignite/pkg/cosmosgen/testdata/expected_files/ts-client/types.d.ts @@ -0,0 +1,21 @@ +import { Keplr, Window as KeplrWindow } from '@keplr-wallet/types'; + +declare global { + interface KeplrIntereactionOptions { + readonly sign?: KeplrSignOptions; + } + + export interface KeplrSignOptions { + readonly preferNoSetFee?: boolean; + readonly preferNoSetMemo?: boolean; + readonly disableBalanceCheck?: boolean; + } + interface CustomKeplr extends Keplr { + enable(chainId: string | string[]): Promise<void>; + + defaultOptions: KeplrIntereactionOptions; + } + interface Window extends KeplrWindow { + keplr: CustomKeplr; + } +} \ No newline at end of file diff --git a/ignite/pkg/cosmosgen/testdata/testchain/app/app.go b/ignite/pkg/cosmosgen/testdata/testchain/app/app.go new file mode 100644 index 0000000..c426e06 --- /dev/null +++ b/ignite/pkg/cosmosgen/testdata/testchain/app/app.go @@ -0,0 +1,12 @@ +package app + +import ( + "github.com/cosmos/cosmos-sdk/baseapp" + marskeeper "github.com/ignite/planet/x/mars/keeper" +) + +type Foo struct { + baseapp.BaseApp + + MarsKeeper marskeeper.Keeper +} diff --git a/ignite/pkg/cosmosgen/testdata/testchain/docs/static/openapi.json b/ignite/pkg/cosmosgen/testdata/testchain/docs/static/openapi.json new file mode 100644 index 0000000..a866102 --- /dev/null +++ b/ignite/pkg/cosmosgen/testdata/testchain/docs/static/openapi.json @@ -0,0 +1 @@ +{"id":"go.mod","consumes":["application/json"],"produces":["application/json"],"swagger":"2.0","info":{"description":"Chain go.mod REST API","title":"HTTP API Console","contact":{"name":"go.mod"},"version":"version not set"},"paths":{"/ignite/mars/query_simple":{"get":{"tags":["Query"],"operationId":"GoModQuery_QuerySimple","responses":{"200":{"description":"A successful response.","schema":{"$ref":"#/definitions/ignite.planet.mars.QuerySimpleResponse"}},"default":{"description":"An unexpected error response.","schema":{"$ref":"#/definitions/google.rpc.Status"}}}}},"/ignite/mars/query_simple/{mytypefield}":{"get":{"tags":["Query"],"operationId":"GoModQuery_QuerySimpleParams","parameters":[{"type":"string","name":"mytypefield","in":"path","required":true}],"responses":{"200":{"description":"A successful response.","schema":{"$ref":"#/definitions/ignite.planet.mars.QuerySimpleParamsResponse"}},"default":{"description":"An unexpected error response.","schema":{"$ref":"#/definitions/google.rpc.Status"}}}}},"/ignite/mars/query_with_params/{mytypefield}":{"get":{"tags":["Query"],"operationId":"GoModQuery_QueryParamsWithPagination","parameters":[{"type":"string","name":"mytypefield","in":"path","required":true},{"type":"string","format":"byte","description":"key is a value returned in PageResponse.next_key to begin\nquerying the next page most efficiently. Only one of offset or key\nshould be set.","name":"pagination.key","in":"query"},{"type":"string","format":"uint64","description":"offset is a numeric offset that can be used when key is unavailable.\nIt is less efficient than using key. Only one of offset or key should\nbe set.","name":"pagination.offset","in":"query"},{"type":"string","format":"uint64","description":"limit is the total number of results to be returned in the result page.\nIf left empty it will default to a value to be set by each app.","name":"pagination.limit","in":"query"},{"type":"boolean","description":"count_total is set to true to indicate that the result set should include\na count of the total number of items available for pagination in UIs.\ncount_total is only respected when offset is used. It is ignored when key\nis set.","name":"pagination.count_total","in":"query"},{"type":"boolean","description":"reverse is set to true if results are to be returned in the descending order.","name":"pagination.reverse","in":"query"}],"responses":{"200":{"description":"A successful response.","schema":{"$ref":"#/definitions/ignite.planet.mars.QueryWithPaginationResponse"}},"default":{"description":"An unexpected error response.","schema":{"$ref":"#/definitions/google.rpc.Status"}}}}},"/ignite/mars/query_with_query_params/{mytypefield}":{"get":{"tags":["Query"],"operationId":"GoModQuery_QueryWithQueryParamsWithPagination","parameters":[{"type":"string","name":"mytypefield","in":"path","required":true},{"type":"string","name":"query_param","in":"query"},{"type":"string","format":"byte","description":"key is a value returned in PageResponse.next_key to begin\nquerying the next page most efficiently. Only one of offset or key\nshould be set.","name":"pagination.key","in":"query"},{"type":"string","format":"uint64","description":"offset is a numeric offset that can be used when key is unavailable.\nIt is less efficient than using key. Only one of offset or key should\nbe set.","name":"pagination.offset","in":"query"},{"type":"string","format":"uint64","description":"limit is the total number of results to be returned in the result page.\nIf left empty it will default to a value to be set by each app.","name":"pagination.limit","in":"query"},{"type":"boolean","description":"count_total is set to true to indicate that the result set should include\na count of the total number of items available for pagination in UIs.\ncount_total is only respected when offset is used. It is ignored when key\nis set.","name":"pagination.count_total","in":"query"},{"type":"boolean","description":"reverse is set to true if results are to be returned in the descending order.","name":"pagination.reverse","in":"query"}],"responses":{"200":{"description":"A successful response.","schema":{"$ref":"#/definitions/ignite.planet.mars.QueryWithQueryParamsWithPaginationResponse"}},"default":{"description":"An unexpected error response.","schema":{"$ref":"#/definitions/google.rpc.Status"}}}}},"/ignite/mars/query_with_query_params/{mytypefield}/{mybool}":{"get":{"tags":["Query"],"operationId":"GoModQuery_QueryWithQueryParams","parameters":[{"type":"string","name":"mytypefield","in":"path","required":true},{"type":"boolean","name":"mybool","in":"path","required":true},{"type":"string","name":"query_param","in":"query"},{"type":"array","items":{"type":"boolean"},"collectionFormat":"multi","name":"myrepeatedbool","in":"query"}],"responses":{"200":{"description":"A successful response.","schema":{"$ref":"#/definitions/ignite.planet.mars.QueryWithQueryParamsResponse"}},"default":{"description":"An unexpected error response.","schema":{"$ref":"#/definitions/google.rpc.Status"}}}}}},"definitions":{"cosmos.base.query.v1beta1.PageRequest":{"description":"message SomeRequest {\n Foo some_parameter = 1;\n PageRequest pagination = 2;\n }","type":"object","title":"PageRequest is to be embedded in gRPC request messages for efficient\npagination. Ex:","properties":{"count_total":{"description":"count_total is set to true to indicate that the result set should include\na count of the total number of items available for pagination in UIs.\ncount_total is only respected when offset is used. It is ignored when key\nis set.","type":"boolean"},"key":{"description":"key is a value returned in PageResponse.next_key to begin\nquerying the next page most efficiently. Only one of offset or key\nshould be set.","type":"string","format":"byte"},"limit":{"description":"limit is the total number of results to be returned in the result page.\nIf left empty it will default to a value to be set by each app.","type":"string","format":"uint64"},"offset":{"description":"offset is a numeric offset that can be used when key is unavailable.\nIt is less efficient than using key. Only one of offset or key should\nbe set.","type":"string","format":"uint64"},"reverse":{"description":"reverse is set to true if results are to be returned in the descending order.","type":"boolean"}}},"cosmos.base.query.v1beta1.PageResponse":{"description":"PageResponse is to be embedded in gRPC response messages where the\ncorresponding request message has used PageRequest.\n\n message SomeResponse {\n repeated Bar results = 1;\n PageResponse page = 2;\n }","type":"object","properties":{"next_key":{"description":"next_key is the key to be passed to PageRequest.key to\nquery the next page most efficiently. It will be empty if\nthere are no more results.","type":"string","format":"byte"},"total":{"type":"string","format":"uint64","title":"total is total number of results available if PageRequest.count_total\nwas set, its value is undefined otherwise"}}},"google.protobuf.Any":{"type":"object","properties":{"@type":{"type":"string"}},"additionalProperties":{}},"google.rpc.Status":{"type":"object","properties":{"code":{"type":"integer","format":"int32"},"details":{"type":"array","items":{"type":"object","$ref":"#/definitions/google.protobuf.Any"}},"message":{"type":"string"}}},"ignite.planet.mars.MsgBarResponse":{"type":"object","properties":{"mytypefield":{"type":"string"}}},"ignite.planet.mars.MsgMyMessageResponse":{"type":"object","properties":{"mytypefield":{"type":"string"}}},"ignite.planet.mars.QuerySimpleParamsResponse":{"type":"object","properties":{"bar":{"type":"string"}}},"ignite.planet.mars.QuerySimpleResponse":{"type":"object","properties":{"bar":{"type":"string"}}},"ignite.planet.mars.QueryWithPaginationResponse":{"type":"object","properties":{"pagination":{"$ref":"#/definitions/cosmos.base.query.v1beta1.PageResponse"}}},"ignite.planet.mars.QueryWithQueryParamsResponse":{"type":"object","properties":{"bar":{"type":"string"}}},"ignite.planet.mars.QueryWithQueryParamsWithPaginationResponse":{"type":"object","properties":{"bar":{"type":"string"},"pagination":{"$ref":"#/definitions/cosmos.base.query.v1beta1.PageResponse"}}}},"tags":[{"name":"Msg"},{"name":"Query"}]} \ No newline at end of file diff --git a/ignite/pkg/cosmosgen/testdata/testchain/go.mod b/ignite/pkg/cosmosgen/testdata/testchain/go.mod new file mode 100644 index 0000000..3194883 --- /dev/null +++ b/ignite/pkg/cosmosgen/testdata/testchain/go.mod @@ -0,0 +1,3 @@ +module github.com/ignite/planet + +go 1.16 diff --git a/ignite/pkg/cosmosgen/testdata/testchain/proto/buf.gen.swagger.yaml b/ignite/pkg/cosmosgen/testdata/testchain/proto/buf.gen.swagger.yaml new file mode 100644 index 0000000..373a48a --- /dev/null +++ b/ignite/pkg/cosmosgen/testdata/testchain/proto/buf.gen.swagger.yaml @@ -0,0 +1,19 @@ +# This file is auto-generated by Ignite. You can edit +# the file content but do not change the file name or path. +# +# buf.gen.swagger.yaml +# +version: v2 +plugins: + - local: + [ + "go", + "tool", + "github.com/grpc-ecosystem/grpc-gateway/v2/protoc-gen-openapiv2", + ] + out: . + opt: + - logtostderr=true + - openapi_naming_strategy=fqn + - json_names_for_fields=false + - generate_unbound_methods=false diff --git a/ignite/pkg/cosmosgen/testdata/testchain/proto/buf.gen.ts.yaml b/ignite/pkg/cosmosgen/testdata/testchain/proto/buf.gen.ts.yaml new file mode 100644 index 0000000..d60c27f --- /dev/null +++ b/ignite/pkg/cosmosgen/testdata/testchain/proto/buf.gen.ts.yaml @@ -0,0 +1,14 @@ +# This file should be identical to https://github.com/ignite/cli/blob/main/ignite/templates/app/files/%7B%7BprotoDir%7D%7D/buf.gen.ts.yaml +version: v2 +managed: + enabled: true +plugins: + - remote: buf.build/community/stephenh-ts-proto + out: . + opt: + - logtostderr=true + - allow_merge=true + - json_names_for_fields=false + - ts_proto_opt=snakeToCamel=true + - ts_proto_opt=esModuleInterop=true + - ts_proto_out=. diff --git a/ignite/pkg/cosmosgen/testdata/testchain/proto/buf.lock b/ignite/pkg/cosmosgen/testdata/testchain/proto/buf.lock new file mode 100644 index 0000000..d814726 --- /dev/null +++ b/ignite/pkg/cosmosgen/testdata/testchain/proto/buf.lock @@ -0,0 +1,18 @@ +# Generated by buf. DO NOT EDIT. +version: v2 +deps: + - name: buf.build/cosmos/cosmos-proto + commit: 04467658e59e44bbb22fe568206e1f70 + digest: b5:8058c0aadbee8c9af67a9cefe86492c6c0b0bd5b4526b0ec820507b91fc9b0b5efbebca97331854576d2d279b0b3f5ed6a7abb0640cb640c4186532239c48fc4 + - name: buf.build/cosmos/cosmos-sdk + commit: 34ac2e8322d44db08830e553ad21b93c + digest: b5:381f54c53f533c6ff074a440a4635af5ac4041eb6533c8234b5395465a209b1ecd2722a004f198bcdde77346e0eb789e56213364bf28600619b86a314719ddfb + - name: buf.build/cosmos/gogo-proto + commit: 88ef6483f90f478fb938c37dde52ece3 + digest: b5:f0c69202c9bca9672dc72a9737ea9bc83744daaed2b3da77e3a95b0e53b86dee76b5a7405b993181d6c863fd64afaca0976a302f700d6c4912eb1692a1782c0a + - name: buf.build/googleapis/googleapis + commit: 61b203b9a9164be9a834f58c37be6f62 + digest: b5:7811a98b35bd2e4ae5c3ac73c8b3d9ae429f3a790da15de188dc98fc2b77d6bb10e45711f14903af9553fa9821dff256054f2e4b7795789265bc476bec2f088c + - name: buf.build/protocolbuffers/wellknowntypes + commit: 3ddd61d1f53d485abd3d3a2b47a62b8e + digest: b5:09e4405493fa16fef2af6b667fcaea9d2280ec44ed4943eddb96fb5a32daa1e8a353331dd4ef33b7df3783d17e912a703d57b73b236cd749d6a87ce83f60e2c9 diff --git a/ignite/pkg/cosmosgen/testdata/testchain/proto/buf.yaml b/ignite/pkg/cosmosgen/testdata/testchain/proto/buf.yaml new file mode 100644 index 0000000..4a88c4c --- /dev/null +++ b/ignite/pkg/cosmosgen/testdata/testchain/proto/buf.yaml @@ -0,0 +1,24 @@ +breaking: + except: + - EXTENSION_NO_DELETE + - FIELD_SAME_DEFAULT + use: + - FILE +deps: + - buf.build/cosmos/cosmos-sdk + - buf.build/googleapis/googleapis + - buf.build/protocolbuffers/wellknowntypes +lint: + disallow_comment_ignores: true + except: + - COMMENT_FIELD + - RPC_REQUEST_STANDARD_NAME + - RPC_RESPONSE_STANDARD_NAME + - SERVICE_SUFFIX + use: + - COMMENTS + - STANDARD + - FILE_LOWER_SNAKE_CASE +modules: + - path: . +version: v2 diff --git a/ignite/pkg/cosmosgen/testdata/testchain/proto/ignite/planet/mars/mars.proto b/ignite/pkg/cosmosgen/testdata/testchain/proto/ignite/planet/mars/mars.proto new file mode 100644 index 0000000..39b8c2d --- /dev/null +++ b/ignite/pkg/cosmosgen/testdata/testchain/proto/ignite/planet/mars/mars.proto @@ -0,0 +1,99 @@ +syntax = "proto3"; +package ignite.planet.mars; +import "cosmos/base/query/v1beta1/pagination.proto"; +import "google/api/annotations.proto"; + +option go_package = "github.com/ignite/planet/x/mars/types"; + +service Msg { + rpc MyMessage(MsgMyMessageRequest) returns (MsgMyMessageResponse); + + rpc Bar(MsgBarRequest) returns (MsgBarResponse); +} + +message MsgMyMessageRequest { + string mytypefield = 1; +} + +message MsgMyMessageResponse { + string mytypefield = 1; +} + +message MsgBarRequest { + string mytypefield = 1; +} + +message MsgBarResponse { + string mytypefield = 1; +} + +service Query { + rpc QuerySimple(QuerySimpleRequest) returns (QuerySimpleResponse) { + option (google.api.http).get = "/ignite/mars/query_simple"; + } + + rpc QuerySimpleParams(QuerySimpleParamsRequest) returns (QuerySimpleParamsResponse) { + option (google.api.http).get = "/ignite/mars/query_simple/{mytypefield}"; + } + + rpc QueryParamsWithPagination(QueryWithPaginationRequest) returns (QueryWithPaginationResponse) { + option (google.api.http).get = "/ignite/mars/query_with_params/{mytypefield}"; + } + + rpc QueryWithQueryParams(QueryWithQueryParamsRequest) returns (QueryWithQueryParamsResponse) { + option (google.api.http).get = "/ignite/mars/query_with_query_params/{mytypefield}/{mybool}"; + } + + rpc QueryWithQueryParamsWithPagination(QueryWithQueryParamsWithPaginationRequest) returns (QueryWithQueryParamsWithPaginationResponse) { + option (google.api.http).get = "/ignite/mars/query_with_query_params/{mytypefield}"; + } +} + +message QuerySimpleRequest {} + +message QuerySimpleResponse { + string bar = 1; +} + +message QuerySimpleParamsRequest { + string mytypefield = 1; +} + +message QuerySimpleParamsResponse { + string bar = 1; +} + +message QueryWithPaginationRequest { + string mytypefield = 1; + cosmos.base.query.v1beta1.PageRequest pagination = 2; +} + +message QueryWithPaginationResponse { + cosmos.base.query.v1beta1.PageResponse pagination = 1; +} + +message QueryWithQueryParamsRequest { + string mytypefield = 1; + string query_param = 2; + bool mybool = 3; + repeated bool myrepeatedbool = 4; +} + +message QueryWithQueryParamsResponse { + string bar = 1; +} + +message QueryWithQueryParamsWithPaginationRequest { + string mytypefield = 1; + string query_param = 2; + cosmos.base.query.v1beta1.PageRequest pagination = 3; +} + +message QueryWithQueryParamsWithPaginationResponse { + string bar = 1; + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} + +message AnotherType { + string mytypefield = 1; +} \ No newline at end of file diff --git a/ignite/pkg/cosmosgen/testdata/testchain/ts-client/client.ts b/ignite/pkg/cosmosgen/testdata/testchain/ts-client/client.ts new file mode 100755 index 0000000..3c30aae --- /dev/null +++ b/ignite/pkg/cosmosgen/testdata/testchain/ts-client/client.ts @@ -0,0 +1,164 @@ +/// <reference path="./types.d.ts" /> +import { + GeneratedType, + OfflineSigner, + EncodeObject, + Registry, +} from "@cosmjs/proto-signing"; +import { SigningStargateClient, StdFee } from "@cosmjs/stargate"; +import { Env } from "./env"; +import { UnionToIntersection, Return, Constructor } from "./helpers"; +import { IgntModule } from "./modules"; +import { EventEmitter } from "events"; +import { ChainInfo } from "@keplr-wallet/types"; + +const defaultFee = { + amount: [], + gas: "200000", +}; + +export class IgniteClient extends EventEmitter { + static plugins: IgntModule[] = []; + env: Env; + signer?: OfflineSigner; + registry: Array<[string, GeneratedType]> = []; + static plugin<T extends IgntModule | IgntModule[]>(plugin: T) { + const currentPlugins = this.plugins; + + class AugmentedClient extends this { + static plugins = currentPlugins.concat(plugin); + } + + if (Array.isArray(plugin)) { + type Extension = UnionToIntersection<Return<T>['module']> + return AugmentedClient as typeof IgniteClient & Constructor<Extension>; + } + + type Extension = Return<T>['module'] + return AugmentedClient as typeof IgniteClient & Constructor<Extension>; + } + + async signAndBroadcast(msgs: EncodeObject[], fee: StdFee, memo: string) { + if (this.signer) { + const { address } = (await this.signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(this.env.rpcURL, this.signer, { registry: new Registry(this.registry) }); + return await signingClient.signAndBroadcast(address, msgs, fee ? fee : defaultFee, memo) + } else { + throw new Error(" Signer is not present."); + } + } + + constructor(env: Env, signer?: OfflineSigner) { + super(); + this.env = env; + this.setMaxListeners(0); + this.signer = signer; + const classConstructor = this.constructor as typeof IgniteClient; + classConstructor.plugins.forEach(plugin => { + const pluginInstance = plugin(this); + Object.assign(this, pluginInstance.module) + if (this.registry) { + this.registry = this.registry.concat(pluginInstance.registry) + } + }); + } + useSigner(signer: OfflineSigner) { + this.signer = signer; + this.emit("signer-changed", this.signer); + } + removeSigner() { + this.signer = undefined; + this.emit("signer-changed", this.signer); + } + async useKeplr(keplrChainInfo: Partial<ChainInfo> = {}) { + // Using queryClients directly because BaseClient has no knowledge of the modules at this stage + try { + const queryClient = ( + await import("./cosmos.base.tendermint.v1beta1/module") + ).queryClient; + const bankQueryClient = (await import("./cosmos.bank.v1beta1/module")) + .queryClient; + const stakingQueryClient = (await import("./cosmos.staking.v1beta1/module")).queryClient; + const stakingqc = stakingQueryClient({ addr: this.env.apiURL }); + const staking = await (await stakingqc.queryParams()).data; + const qc = queryClient({ addr: this.env.apiURL }); + const node_info = await (await qc.serviceGetNodeInfo()).data; + const chainId = node_info.default_node_info?.network ?? ""; + const chainName = chainId?.toUpperCase() + " Network"; + const bankqc = bankQueryClient({ addr: this.env.apiURL }); + const tokens = await (await bankqc.queryTotalSupply()).data; + const addrPrefix = this.env.prefix ?? "cosmos"; + const rpc = this.env.rpcURL; + const rest = this.env.apiURL; + + let bip44 = { + coinType: 118, + }; + + let bech32Config = { + bech32PrefixAccAddr: addrPrefix, + bech32PrefixAccPub: addrPrefix + "pub", + bech32PrefixValAddr: addrPrefix + "valoper", + bech32PrefixValPub: addrPrefix + "valoperpub", + bech32PrefixConsAddr: addrPrefix + "valcons", + bech32PrefixConsPub: addrPrefix + "valconspub", + }; + + let currencies = + tokens.supply?.map((x) => { + const y = { + coinDenom: x.denom?.toUpperCase() ?? "", + coinMinimalDenom: x.denom ?? "", + coinDecimals: 0, + }; + return y; + }) ?? []; + + let stakeCurrency = { + coinDenom: staking.params?.bond_denom?.toUpperCase() ?? "", + coinMinimalDenom: staking.params?.bond_denom ?? "", + coinDecimals: 0, + }; + + let feeCurrencies = + tokens.supply?.map((x) => { + const y = { + coinDenom: x.denom?.toUpperCase() ?? "", + coinMinimalDenom: x.denom ?? "", + coinDecimals: 0, + }; + return y; + }) ?? []; + + if (chainId) { + const suggestOptions: ChainInfo = { + chainId, + chainName, + rpc, + rest, + stakeCurrency, + bip44, + bech32Config, + currencies, + feeCurrencies, + ...keplrChainInfo, + }; + await window.keplr.experimentalSuggestChain(suggestOptions); + + window.keplr.defaultOptions = { + sign: { + preferNoSetFee: true, + preferNoSetMemo: true, + }, + }; + } + await window.keplr.enable(chainId); + this.signer = window.keplr.getOfflineSigner(chainId); + this.emit("signer-changed", this.signer); + } catch (e) { + throw new Error( + "Could not load tendermint, staking and bank modules. Please ensure your client loads them to use useKeplr()" + ); + } + } +} \ No newline at end of file diff --git a/ignite/pkg/cosmosgen/testdata/testchain/ts-client/env.ts b/ignite/pkg/cosmosgen/testdata/testchain/ts-client/env.ts new file mode 100755 index 0000000..dbd876a --- /dev/null +++ b/ignite/pkg/cosmosgen/testdata/testchain/ts-client/env.ts @@ -0,0 +1,7 @@ +import { OfflineSigner } from "@cosmjs/proto-signing"; + +export interface Env { + apiURL: string + rpcURL: string + prefix?: string +} \ No newline at end of file diff --git a/ignite/pkg/cosmosgen/testdata/testchain/ts-client/helpers.ts b/ignite/pkg/cosmosgen/testdata/testchain/ts-client/helpers.ts new file mode 100755 index 0000000..80e1ecf --- /dev/null +++ b/ignite/pkg/cosmosgen/testdata/testchain/ts-client/helpers.ts @@ -0,0 +1,32 @@ +export type Constructor<T> = new (...args: any[]) => T; + +export type AnyFunction = (...args: any) => any; + +export type UnionToIntersection<Union> = + (Union extends any + ? (argument: Union) => void + : never + ) extends (argument: infer Intersection) => void + ? Intersection + : never; + +export type Return<T> = + T extends AnyFunction + ? ReturnType<T> + : T extends AnyFunction[] + ? UnionToIntersection<ReturnType<T[number]>> + : never + + +export const MissingWalletError = new Error("wallet is required"); + +export function getStructure(template) { + let structure = { fields: [] as Array<unknown>} + for (const [key, value] of Object.entries(template)) { + let field: any = {} + field.name = key + field.type = typeof value + structure.fields.push(field) + } + return structure +} \ No newline at end of file diff --git a/ignite/pkg/cosmosgen/testdata/testchain/ts-client/ignite.planet.mars/index.ts b/ignite/pkg/cosmosgen/testdata/testchain/ts-client/ignite.planet.mars/index.ts new file mode 100755 index 0000000..4705917 --- /dev/null +++ b/ignite/pkg/cosmosgen/testdata/testchain/ts-client/ignite.planet.mars/index.ts @@ -0,0 +1,6 @@ +import IgntModule from './module'; +import { txClient, queryClient, registry } from './module'; +import { msgTypes } from './registry'; + +export * from "./types"; +export { IgntModule, msgTypes, txClient, queryClient, registry }; diff --git a/ignite/pkg/cosmosgen/testdata/testchain/ts-client/ignite.planet.mars/module.ts b/ignite/pkg/cosmosgen/testdata/testchain/ts-client/ignite.planet.mars/module.ts new file mode 100755 index 0000000..87bf764 --- /dev/null +++ b/ignite/pkg/cosmosgen/testdata/testchain/ts-client/ignite.planet.mars/module.ts @@ -0,0 +1,163 @@ +// Generated by Ignite ignite.com/cli + +import { SigningStargateClient, DeliverTxResponse, StdFee } from "@cosmjs/stargate"; +import { EncodeObject, GeneratedType, OfflineSigner, Registry } from "@cosmjs/proto-signing"; +import { msgTypes } from './registry'; +import { IgniteClient } from "../client" +import { MissingWalletError } from "../helpers" +import { Api } from "./rest"; +import { MsgMyMessageRequest } from "./types/ignite/planet/mars/mars"; +import { MsgBarRequest } from "./types/ignite/planet/mars/mars"; + +import { AnotherType as typeAnotherType} from "./types" + +export { MsgMyMessageRequest, MsgBarRequest }; + +type sendMsgMyMessageRequestParams = { + value: MsgMyMessageRequest, + fee?: StdFee, + memo?: string +}; + +type sendMsgBarRequestParams = { + value: MsgBarRequest, + fee?: StdFee, + memo?: string +}; + + +type msgMyMessageRequestParams = { + value: MsgMyMessageRequest, +}; + +type msgBarRequestParams = { + value: MsgBarRequest, +}; + + +export const registry = new Registry(msgTypes); + +type Field = { + name: string; + type: unknown; +} +function getStructure(template) { + const structure: {fields: Field[]} = { fields: [] } + for (let [key, value] of Object.entries(template)) { + let field = { name: key, type: typeof value } + structure.fields.push(field) + } + return structure +} +const defaultFee = { + amount: [], + gas: "200000", +}; + +interface TxClientOptions { + addr: string + prefix: string + signer?: OfflineSigner +} + +export const txClient = ({ signer, prefix, addr }: TxClientOptions = { addr: "http://localhost:26657", prefix: "cosmos" }) => { + + return { + + async sendMsgMyMessageRequest({ value, fee, memo }: sendMsgMyMessageRequestParams): Promise<DeliverTxResponse> { + if (!signer) { + throw new Error('TxClient:sendMsgMyMessageRequest: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry}); + let msg = this.msgMyMessageRequest({ value: MsgMyMessageRequest.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendMsgMyMessageRequest: Could not broadcast Tx: '+ e.message) + } + }, + + async sendMsgBarRequest({ value, fee, memo }: sendMsgBarRequestParams): Promise<DeliverTxResponse> { + if (!signer) { + throw new Error('TxClient:sendMsgBarRequest: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry}); + let msg = this.msgBarRequest({ value: MsgBarRequest.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendMsgBarRequest: Could not broadcast Tx: '+ e.message) + } + }, + + + msgMyMessageRequest({ value }: msgMyMessageRequestParams): EncodeObject { + try { + return { typeUrl: "/ignite.planet.mars.MsgMyMessageRequest", value: MsgMyMessageRequest.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:MsgMyMessageRequest: Could not create message: ' + e.message) + } + }, + + msgBarRequest({ value }: msgBarRequestParams): EncodeObject { + try { + return { typeUrl: "/ignite.planet.mars.MsgBarRequest", value: MsgBarRequest.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:MsgBarRequest: Could not create message: ' + e.message) + } + }, + + } +}; + +interface QueryClientOptions { + addr: string +} + +export const queryClient = ({ addr: addr }: QueryClientOptions = { addr: "http://localhost:1317" }) => { + return new Api({ baseURL: addr }); +}; + +class SDKModule { + public query: ReturnType<typeof queryClient>; + public tx: ReturnType<typeof txClient>; + public structure: Record<string,unknown>; + public registry: Array<[string, GeneratedType]> = []; + + constructor(client: IgniteClient) { + + this.query = queryClient({ addr: client.env.apiURL }); + this.updateTX(client); + this.structure = { + AnotherType: getStructure(typeAnotherType.fromPartial({})), + + }; + client.on('signer-changed',(signer) => { + this.updateTX(client); + }) + } + updateTX(client: IgniteClient) { + const methods = txClient({ + signer: client.signer, + addr: client.env.rpcURL, + prefix: client.env.prefix ?? "cosmos", + }) + + this.tx = methods; + for (let m in methods) { + this.tx[m] = methods[m].bind(this.tx); + } + } +}; + +const IgntModule = (test: IgniteClient) => { + return { + module: { + IgnitePlanetMars: new SDKModule(test) + }, + registry: msgTypes + } +} +export default IgntModule; \ No newline at end of file diff --git a/ignite/pkg/cosmosgen/testdata/testchain/ts-client/ignite.planet.mars/registry.ts b/ignite/pkg/cosmosgen/testdata/testchain/ts-client/ignite.planet.mars/registry.ts new file mode 100755 index 0000000..32e45de --- /dev/null +++ b/ignite/pkg/cosmosgen/testdata/testchain/ts-client/ignite.planet.mars/registry.ts @@ -0,0 +1,11 @@ +import { GeneratedType } from "@cosmjs/proto-signing"; +import { MsgMyMessageRequest } from "./types/ignite/planet/mars/mars"; +import { MsgBarRequest } from "./types/ignite/planet/mars/mars"; + +const msgTypes: Array<[string, GeneratedType]> = [ + ["/ignite.planet.mars.MsgMyMessageRequest", MsgMyMessageRequest], + ["/ignite.planet.mars.MsgBarRequest", MsgBarRequest], + +]; + +export { msgTypes } \ No newline at end of file diff --git a/ignite/pkg/cosmosgen/testdata/testchain/ts-client/ignite.planet.mars/rest.ts b/ignite/pkg/cosmosgen/testdata/testchain/ts-client/ignite.planet.mars/rest.ts new file mode 100755 index 0000000..da5286d --- /dev/null +++ b/ignite/pkg/cosmosgen/testdata/testchain/ts-client/ignite.planet.mars/rest.ts @@ -0,0 +1,298 @@ +import axios, { AxiosInstance, AxiosRequestConfig, AxiosResponse, ResponseType } from "axios"; +import { QuerySimpleResponse } from "./types/ignite/planet/mars/mars"; +import { QuerySimpleParamsResponse } from "./types/ignite/planet/mars/mars"; +import { QueryWithPaginationResponse } from "./types/ignite/planet/mars/mars"; +import { QueryWithQueryParamsResponse } from "./types/ignite/planet/mars/mars"; +import { QueryWithQueryParamsWithPaginationResponse } from "./types/ignite/planet/mars/mars"; + +import { QuerySimpleRequest } from "./types/ignite/planet/mars/mars"; +import { QuerySimpleParamsRequest } from "./types/ignite/planet/mars/mars"; +import { QueryWithPaginationRequest } from "./types/ignite/planet/mars/mars"; +import { QueryWithQueryParamsRequest } from "./types/ignite/planet/mars/mars"; +import { QueryWithQueryParamsWithPaginationRequest } from "./types/ignite/planet/mars/mars"; + + +import type {SnakeCasedPropertiesDeep} from 'type-fest'; + +export type QueryParamsType = Record<string | number, any>; + +export type FlattenObject<TValue> = CollapseEntries<CreateObjectEntries<TValue, TValue>>; + +type Entry = { key: string; value: unknown }; +type EmptyEntry<TValue> = { key: ''; value: TValue }; +type ExcludedTypes = Date | Set<unknown> | Map<unknown, unknown>; +type ArrayEncoder = `[${bigint}]`; + +type EscapeArrayKey<TKey extends string> = TKey extends `${infer TKeyBefore}.${ArrayEncoder}${infer TKeyAfter}` + ? EscapeArrayKey<`${TKeyBefore}${ArrayEncoder}${TKeyAfter}`> + : TKey; + +// Transforms entries to one flattened type +type CollapseEntries<TEntry extends Entry> = { + [E in TEntry as EscapeArrayKey<E['key']>]: E['value']; +}; + +// Transforms array type to object +type CreateArrayEntry<TValue, TValueInitial> = OmitItself< + TValue extends unknown[] ? { [k: ArrayEncoder]: TValue[number] } : TValue, + TValueInitial +>; + +// Omit the type that references itself +type OmitItself<TValue, TValueInitial> = TValue extends TValueInitial + ? EmptyEntry<TValue> + : OmitExcludedTypes<TValue, TValueInitial>; + +// Omit the type that is listed in ExcludedTypes union +type OmitExcludedTypes<TValue, TValueInitial> = TValue extends ExcludedTypes + ? EmptyEntry<TValue> + : CreateObjectEntries<TValue, TValueInitial>; + +type CreateObjectEntries<TValue, TValueInitial> = TValue extends object + ? { + // Checks that Key is of type string + [TKey in keyof TValue]-?: TKey extends string + ? // Nested key can be an object, run recursively to the bottom + CreateArrayEntry<TValue[TKey], TValueInitial> extends infer TNestedValue + ? TNestedValue extends Entry + ? TNestedValue['key'] extends '' + ? { + key: TKey; + value: TNestedValue['value']; + } + : + | { + key: `${TKey}.${TNestedValue['key']}`; + value: TNestedValue['value']; + } + | { + key: TKey; + value: TValue[TKey]; + } + : never + : never + : never; + }[keyof TValue] // Builds entry for each key + : EmptyEntry<TValue>; + +export type ChangeProtoToJSPrimitives<T extends object> = { + [key in keyof T]: T[key] extends Uint8Array | Date ? string : T[key] extends object ? ChangeProtoToJSPrimitives<T[key]>: T[key]; + // ^^^^ This line is used to convert Uint8Array to string, if you want to keep Uint8Array as is, you can remove this line +} + +export interface FullRequestParams extends Omit<AxiosRequestConfig, "data" | "params" | "url" | "responseType"> { + /** set parameter to `true` for call `securityWorker` for this request */ + secure?: boolean; + /** request path */ + path: string; + /** content type of request body */ + type?: ContentType; + /** query params */ + query?: QueryParamsType; + /** format of response (i.e. response.json() -> format: "json") */ + format?: ResponseType; + /** request body */ + body?: unknown; +} + +export type RequestParams = Omit<FullRequestParams, "body" | "method" | "query" | "path">; + +export interface ApiConfig<SecurityDataType = unknown> extends Omit<AxiosRequestConfig, "data" | "cancelToken"> { + securityWorker?: ( + securityData: SecurityDataType | null, + ) => Promise<AxiosRequestConfig | void> | AxiosRequestConfig | void; + secure?: boolean; + format?: ResponseType; +} + +export enum ContentType { + Json = "application/json", + FormData = "multipart/form-data", + UrlEncoded = "application/x-www-form-urlencoded", +} + +export class HttpClient<SecurityDataType = unknown> { + public instance: AxiosInstance; + private securityData: SecurityDataType | null = null; + private securityWorker?: ApiConfig<SecurityDataType>["securityWorker"]; + private secure?: boolean; + private format?: ResponseType; + + constructor({ securityWorker, secure, format, ...axiosConfig }: ApiConfig<SecurityDataType> = {}) { + this.instance = axios.create({ ...axiosConfig, baseURL: axiosConfig.baseURL || "" }); + this.secure = secure; + this.format = format; + this.securityWorker = securityWorker; + } + + public setSecurityData = (data: SecurityDataType | null) => { + this.securityData = data; + }; + + private mergeRequestParams(params1: AxiosRequestConfig, params2?: AxiosRequestConfig): AxiosRequestConfig { + return { + ...this.instance.defaults, + ...params1, + ...(params2 || {}), + headers: { + ...(this.instance.defaults.headers ), + ...(params1.headers || {}), + ...((params2 && params2.headers) || {}), + }, + } as AxiosRequestConfig; + } + + private createFormData(input: Record<string, unknown>): FormData { + return Object.keys(input || {}).reduce((formData, key) => { + const property = input[key]; + formData.append( + key, + property instanceof Blob + ? property + : typeof property === "object" && property !== null + ? JSON.stringify(property) + : `${property}`, + ); + return formData; + }, new FormData()); + } + + public request = async <T = any>({ + secure, + path, + type, + query, + format, + body, + ...params + }: FullRequestParams): Promise<AxiosResponse<T>> => { + const secureParams = + ((typeof secure === "boolean" ? secure : this.secure) && + this.securityWorker && + (await this.securityWorker(this.securityData))) || + {}; + const requestParams = this.mergeRequestParams(params, secureParams); + const responseFormat = (format && this.format) || void 0; + + if (type === ContentType.FormData && body && body !== null && typeof body === "object") { + requestParams.headers.common = { Accept: "*/*" }; + requestParams.headers.post = {}; + requestParams.headers.put = {}; + + body = this.createFormData(body as Record<string, unknown>); + } + + return this.instance.request({ + ...requestParams, + headers: { + ...(type && type !== ContentType.FormData ? { "Content-Type": type } : {}), + ...(requestParams.headers || {}), + }, + params: query, + responseType: responseFormat, + data: body, + url: path, + }); + }; +} + +/** + * @title ignite.planet.mars + */ +export class Api<SecurityDataType extends unknown> extends HttpClient<SecurityDataType> { + /** + * QueryQuerySimple + * + * @tags Query + * @name queryQuerySimple + * @request GET:/ignite/mars/query_simple + */ + queryQuerySimple = ( + query?: Record<string, any>, + params: RequestParams = {}, + ) => + this.request<SnakeCasedPropertiesDeep<ChangeProtoToJSPrimitives<QuerySimpleResponse>>>({ + path: `/ignite/mars/query_simple`, + method: "GET", + query: query, + format: "json", + ...params, + }); + + /** + * QueryQuerySimpleParams + * + * @tags Query + * @name queryQuerySimpleParams + * @request GET:/ignite/mars/query_simple/{mytypefield} + */ + queryQuerySimpleParams = (mytypefield: string, + query?: Record<string, any>, + params: RequestParams = {}, + ) => + this.request<SnakeCasedPropertiesDeep<ChangeProtoToJSPrimitives<QuerySimpleParamsResponse>>>({ + path: `/ignite/mars/query_simple/${mytypefield}`, + method: "GET", + query: query, + format: "json", + ...params, + }); + + /** + * QueryQueryParamsWithPagination + * + * @tags Query + * @name queryQueryParamsWithPagination + * @request GET:/ignite/mars/query_with_params/{mytypefield} + */ + queryQueryParamsWithPagination = (mytypefield: string, + query?: Omit<FlattenObject<SnakeCasedPropertiesDeep<ChangeProtoToJSPrimitives<QueryWithPaginationRequest>>>,"mytypefield">, + params: RequestParams = {}, + ) => + this.request<SnakeCasedPropertiesDeep<ChangeProtoToJSPrimitives<QueryWithPaginationResponse>>>({ + path: `/ignite/mars/query_with_params/${mytypefield}`, + method: "GET", + query: query, + format: "json", + ...params, + }); + + /** + * QueryQueryWithQueryParams + * + * @tags Query + * @name queryQueryWithQueryParams + * @request GET:/ignite/mars/query_with_query_params/{mytypefield}/{mybool} + */ + queryQueryWithQueryParams = (mytypefield: string, mybool: string, + query?: Omit<FlattenObject<SnakeCasedPropertiesDeep<ChangeProtoToJSPrimitives<QueryWithQueryParamsRequest>>>,"mytypefield" | "mybool">, + params: RequestParams = {}, + ) => + this.request<SnakeCasedPropertiesDeep<ChangeProtoToJSPrimitives<QueryWithQueryParamsResponse>>>({ + path: `/ignite/mars/query_with_query_params/${mytypefield}/${mybool}`, + method: "GET", + query: query, + format: "json", + ...params, + }); + + /** + * QueryQueryWithQueryParamsWithPagination + * + * @tags Query + * @name queryQueryWithQueryParamsWithPagination + * @request GET:/ignite/mars/query_with_query_params/{mytypefield} + */ + queryQueryWithQueryParamsWithPagination = (mytypefield: string, + query?: Omit<FlattenObject<SnakeCasedPropertiesDeep<ChangeProtoToJSPrimitives<QueryWithQueryParamsWithPaginationRequest>>>,"mytypefield">, + params: RequestParams = {}, + ) => + this.request<SnakeCasedPropertiesDeep<ChangeProtoToJSPrimitives<QueryWithQueryParamsWithPaginationResponse>>>({ + path: `/ignite/mars/query_with_query_params/${mytypefield}`, + method: "GET", + query: query, + format: "json", + ...params, + }); + +} \ No newline at end of file diff --git a/ignite/pkg/cosmosgen/testdata/testchain/ts-client/ignite.planet.mars/types.ts b/ignite/pkg/cosmosgen/testdata/testchain/ts-client/ignite.planet.mars/types.ts new file mode 100755 index 0000000..d4c7883 --- /dev/null +++ b/ignite/pkg/cosmosgen/testdata/testchain/ts-client/ignite.planet.mars/types.ts @@ -0,0 +1,7 @@ +import { AnotherType } from "./types/ignite/planet/mars/mars" + + +export { + AnotherType, + + } \ No newline at end of file diff --git a/ignite/pkg/cosmosgen/testdata/testchain/ts-client/ignite.planet.mars/types/cosmos/base/query/v1beta1/pagination.ts b/ignite/pkg/cosmosgen/testdata/testchain/ts-client/ignite.planet.mars/types/cosmos/base/query/v1beta1/pagination.ts new file mode 100644 index 0000000..9766c4e --- /dev/null +++ b/ignite/pkg/cosmosgen/testdata/testchain/ts-client/ignite.planet.mars/types/cosmos/base/query/v1beta1/pagination.ts @@ -0,0 +1,340 @@ +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.11.2 +// protoc unknown +// source: cosmos/base/query/v1beta1/pagination.proto + +/* eslint-disable */ +import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire"; + +export const protobufPackage = "cosmos.base.query.v1beta1"; + +/** + * PageRequest is to be embedded in gRPC request messages for efficient + * pagination. Ex: + * + * message SomeRequest { + * Foo some_parameter = 1; + * PageRequest pagination = 2; + * } + */ +export interface PageRequest { + /** + * key is a value returned in PageResponse.next_key to begin + * querying the next page most efficiently. Only one of offset or key + * should be set. + */ + key: Uint8Array; + /** + * offset is a numeric offset that can be used when key is unavailable. + * It is less efficient than using key. Only one of offset or key should + * be set. + */ + offset: number; + /** + * limit is the total number of results to be returned in the result page. + * If left empty it will default to a value to be set by each app. + */ + limit: number; + /** + * count_total is set to true to indicate that the result set should include + * a count of the total number of items available for pagination in UIs. + * count_total is only respected when offset is used. It is ignored when key + * is set. + */ + countTotal: boolean; + /** reverse is set to true if results are to be returned in the descending order. */ + reverse: boolean; +} + +/** + * PageResponse is to be embedded in gRPC response messages where the + * corresponding request message has used PageRequest. + * + * message SomeResponse { + * repeated Bar results = 1; + * PageResponse page = 2; + * } + */ +export interface PageResponse { + /** + * next_key is the key to be passed to PageRequest.key to + * query the next page most efficiently. It will be empty if + * there are no more results. + */ + nextKey: Uint8Array; + /** + * total is total number of results available if PageRequest.count_total + * was set, its value is undefined otherwise + */ + total: number; +} + +function createBasePageRequest(): PageRequest { + return { key: new Uint8Array(0), offset: 0, limit: 0, countTotal: false, reverse: false }; +} + +export const PageRequest: MessageFns<PageRequest> = { + encode(message: PageRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.key.length !== 0) { + writer.uint32(10).bytes(message.key); + } + if (message.offset !== 0) { + writer.uint32(16).uint64(message.offset); + } + if (message.limit !== 0) { + writer.uint32(24).uint64(message.limit); + } + if (message.countTotal !== false) { + writer.uint32(32).bool(message.countTotal); + } + if (message.reverse !== false) { + writer.uint32(40).bool(message.reverse); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): PageRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePageRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.key = reader.bytes(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.offset = longToNumber(reader.uint64()); + continue; + } + case 3: { + if (tag !== 24) { + break; + } + + message.limit = longToNumber(reader.uint64()); + continue; + } + case 4: { + if (tag !== 32) { + break; + } + + message.countTotal = reader.bool(); + continue; + } + case 5: { + if (tag !== 40) { + break; + } + + message.reverse = reader.bool(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): PageRequest { + return { + key: isSet(object.key) ? bytesFromBase64(object.key) : new Uint8Array(0), + offset: isSet(object.offset) ? globalThis.Number(object.offset) : 0, + limit: isSet(object.limit) ? globalThis.Number(object.limit) : 0, + countTotal: isSet(object.countTotal) + ? globalThis.Boolean(object.countTotal) + : isSet(object.count_total) + ? globalThis.Boolean(object.count_total) + : false, + reverse: isSet(object.reverse) ? globalThis.Boolean(object.reverse) : false, + }; + }, + + toJSON(message: PageRequest): unknown { + const obj: any = {}; + if (message.key.length !== 0) { + obj.key = base64FromBytes(message.key); + } + if (message.offset !== 0) { + obj.offset = Math.round(message.offset); + } + if (message.limit !== 0) { + obj.limit = Math.round(message.limit); + } + if (message.countTotal !== false) { + obj.countTotal = message.countTotal; + } + if (message.reverse !== false) { + obj.reverse = message.reverse; + } + return obj; + }, + + create<I extends Exact<DeepPartial<PageRequest>, I>>(base?: I): PageRequest { + return PageRequest.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<PageRequest>, I>>(object: I): PageRequest { + const message = createBasePageRequest(); + message.key = object.key ?? new Uint8Array(0); + message.offset = object.offset ?? 0; + message.limit = object.limit ?? 0; + message.countTotal = object.countTotal ?? false; + message.reverse = object.reverse ?? false; + return message; + }, +}; + +function createBasePageResponse(): PageResponse { + return { nextKey: new Uint8Array(0), total: 0 }; +} + +export const PageResponse: MessageFns<PageResponse> = { + encode(message: PageResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.nextKey.length !== 0) { + writer.uint32(10).bytes(message.nextKey); + } + if (message.total !== 0) { + writer.uint32(16).uint64(message.total); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): PageResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePageResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.nextKey = reader.bytes(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.total = longToNumber(reader.uint64()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): PageResponse { + return { + nextKey: isSet(object.nextKey) + ? bytesFromBase64(object.nextKey) + : isSet(object.next_key) + ? bytesFromBase64(object.next_key) + : new Uint8Array(0), + total: isSet(object.total) ? globalThis.Number(object.total) : 0, + }; + }, + + toJSON(message: PageResponse): unknown { + const obj: any = {}; + if (message.nextKey.length !== 0) { + obj.nextKey = base64FromBytes(message.nextKey); + } + if (message.total !== 0) { + obj.total = Math.round(message.total); + } + return obj; + }, + + create<I extends Exact<DeepPartial<PageResponse>, I>>(base?: I): PageResponse { + return PageResponse.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<PageResponse>, I>>(object: I): PageResponse { + const message = createBasePageResponse(); + message.nextKey = object.nextKey ?? new Uint8Array(0); + message.total = object.total ?? 0; + return message; + }, +}; + +function bytesFromBase64(b64: string): Uint8Array { + if ((globalThis as any).Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if ((globalThis as any).Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(globalThis.String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial<T> = T extends Builtin ? T + : T extends globalThis.Array<infer U> ? globalThis.Array<DeepPartial<U>> + : T extends ReadonlyArray<infer U> ? ReadonlyArray<DeepPartial<U>> + : T extends {} ? { [K in keyof T]?: DeepPartial<T[K]> } + : Partial<T>; + +type KeysOfUnion<T> = T extends T ? keyof T : never; +export type Exact<P, I extends P> = P extends Builtin ? P + : P & { [K in keyof P]: Exact<P[K], I[K]> } & { [K in Exclude<keyof I, KeysOfUnion<P>>]: never }; + +function longToNumber(int64: { toString(): string }): number { + const num = globalThis.Number(int64.toString()); + if (num > globalThis.Number.MAX_SAFE_INTEGER) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + if (num < globalThis.Number.MIN_SAFE_INTEGER) { + throw new globalThis.Error("Value is smaller than Number.MIN_SAFE_INTEGER"); + } + return num; +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} + +export interface MessageFns<T> { + encode(message: T, writer?: BinaryWriter): BinaryWriter; + decode(input: BinaryReader | Uint8Array, length?: number): T; + fromJSON(object: any): T; + toJSON(message: T): unknown; + create<I extends Exact<DeepPartial<T>, I>>(base?: I): T; + fromPartial<I extends Exact<DeepPartial<T>, I>>(object: I): T; +} diff --git a/ignite/pkg/cosmosgen/testdata/testchain/ts-client/ignite.planet.mars/types/cosmos_proto/cosmos.ts b/ignite/pkg/cosmosgen/testdata/testchain/ts-client/ignite.planet.mars/types/cosmos_proto/cosmos.ts new file mode 100644 index 0000000..3d799ec --- /dev/null +++ b/ignite/pkg/cosmosgen/testdata/testchain/ts-client/ignite.planet.mars/types/cosmos_proto/cosmos.ts @@ -0,0 +1,309 @@ +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.11.2 +// protoc unknown +// source: cosmos_proto/cosmos.proto + +/* eslint-disable */ +import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire"; + +export const protobufPackage = "cosmos_proto"; + +export enum ScalarType { + SCALAR_TYPE_UNSPECIFIED = 0, + SCALAR_TYPE_STRING = 1, + SCALAR_TYPE_BYTES = 2, + UNRECOGNIZED = -1, +} + +export function scalarTypeFromJSON(object: any): ScalarType { + switch (object) { + case 0: + case "SCALAR_TYPE_UNSPECIFIED": + return ScalarType.SCALAR_TYPE_UNSPECIFIED; + case 1: + case "SCALAR_TYPE_STRING": + return ScalarType.SCALAR_TYPE_STRING; + case 2: + case "SCALAR_TYPE_BYTES": + return ScalarType.SCALAR_TYPE_BYTES; + case -1: + case "UNRECOGNIZED": + default: + return ScalarType.UNRECOGNIZED; + } +} + +export function scalarTypeToJSON(object: ScalarType): string { + switch (object) { + case ScalarType.SCALAR_TYPE_UNSPECIFIED: + return "SCALAR_TYPE_UNSPECIFIED"; + case ScalarType.SCALAR_TYPE_STRING: + return "SCALAR_TYPE_STRING"; + case ScalarType.SCALAR_TYPE_BYTES: + return "SCALAR_TYPE_BYTES"; + case ScalarType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** + * InterfaceDescriptor describes an interface type to be used with + * accepts_interface and implements_interface and declared by declare_interface. + */ +export interface InterfaceDescriptor { + /** + * name is the name of the interface. It should be a short-name (without + * a period) such that the fully qualified name of the interface will be + * package.name, ex. for the package a.b and interface named C, the + * fully-qualified name will be a.b.C. + */ + name: string; + /** + * description is a human-readable description of the interface and its + * purpose. + */ + description: string; +} + +/** + * ScalarDescriptor describes an scalar type to be used with + * the scalar field option and declared by declare_scalar. + * Scalars extend simple protobuf built-in types with additional + * syntax and semantics, for instance to represent big integers. + * Scalars should ideally define an encoding such that there is only one + * valid syntactical representation for a given semantic meaning, + * i.e. the encoding should be deterministic. + */ +export interface ScalarDescriptor { + /** + * name is the name of the scalar. It should be a short-name (without + * a period) such that the fully qualified name of the scalar will be + * package.name, ex. for the package a.b and scalar named C, the + * fully-qualified name will be a.b.C. + */ + name: string; + /** + * description is a human-readable description of the scalar and its + * encoding format. For instance a big integer or decimal scalar should + * specify precisely the expected encoding format. + */ + description: string; + /** + * field_type is the type of field with which this scalar can be used. + * Scalars can be used with one and only one type of field so that + * encoding standards and simple and clear. Currently only string and + * bytes fields are supported for scalars. + */ + fieldType: ScalarType[]; +} + +function createBaseInterfaceDescriptor(): InterfaceDescriptor { + return { name: "", description: "" }; +} + +export const InterfaceDescriptor: MessageFns<InterfaceDescriptor> = { + encode(message: InterfaceDescriptor, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): InterfaceDescriptor { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseInterfaceDescriptor(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.description = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): InterfaceDescriptor { + return { + name: isSet(object.name) ? globalThis.String(object.name) : "", + description: isSet(object.description) ? globalThis.String(object.description) : "", + }; + }, + + toJSON(message: InterfaceDescriptor): unknown { + const obj: any = {}; + if (message.name !== "") { + obj.name = message.name; + } + if (message.description !== "") { + obj.description = message.description; + } + return obj; + }, + + create<I extends Exact<DeepPartial<InterfaceDescriptor>, I>>(base?: I): InterfaceDescriptor { + return InterfaceDescriptor.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<InterfaceDescriptor>, I>>(object: I): InterfaceDescriptor { + const message = createBaseInterfaceDescriptor(); + message.name = object.name ?? ""; + message.description = object.description ?? ""; + return message; + }, +}; + +function createBaseScalarDescriptor(): ScalarDescriptor { + return { name: "", description: "", fieldType: [] }; +} + +export const ScalarDescriptor: MessageFns<ScalarDescriptor> = { + encode(message: ScalarDescriptor, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + writer.uint32(26).fork(); + for (const v of message.fieldType) { + writer.int32(v); + } + writer.join(); + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): ScalarDescriptor { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseScalarDescriptor(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.description = reader.string(); + continue; + } + case 3: { + if (tag === 24) { + message.fieldType.push(reader.int32() as any); + + continue; + } + + if (tag === 26) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.fieldType.push(reader.int32() as any); + } + + continue; + } + + break; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): ScalarDescriptor { + return { + name: isSet(object.name) ? globalThis.String(object.name) : "", + description: isSet(object.description) ? globalThis.String(object.description) : "", + fieldType: globalThis.Array.isArray(object?.fieldType) + ? object.fieldType.map((e: any) => scalarTypeFromJSON(e)) + : globalThis.Array.isArray(object?.field_type) + ? object.field_type.map((e: any) => scalarTypeFromJSON(e)) + : [], + }; + }, + + toJSON(message: ScalarDescriptor): unknown { + const obj: any = {}; + if (message.name !== "") { + obj.name = message.name; + } + if (message.description !== "") { + obj.description = message.description; + } + if (message.fieldType?.length) { + obj.fieldType = message.fieldType.map((e) => scalarTypeToJSON(e)); + } + return obj; + }, + + create<I extends Exact<DeepPartial<ScalarDescriptor>, I>>(base?: I): ScalarDescriptor { + return ScalarDescriptor.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<ScalarDescriptor>, I>>(object: I): ScalarDescriptor { + const message = createBaseScalarDescriptor(); + message.name = object.name ?? ""; + message.description = object.description ?? ""; + message.fieldType = object.fieldType?.map((e) => e) || []; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial<T> = T extends Builtin ? T + : T extends globalThis.Array<infer U> ? globalThis.Array<DeepPartial<U>> + : T extends ReadonlyArray<infer U> ? ReadonlyArray<DeepPartial<U>> + : T extends {} ? { [K in keyof T]?: DeepPartial<T[K]> } + : Partial<T>; + +type KeysOfUnion<T> = T extends T ? keyof T : never; +export type Exact<P, I extends P> = P extends Builtin ? P + : P & { [K in keyof P]: Exact<P[K], I[K]> } & { [K in Exclude<keyof I, KeysOfUnion<P>>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} + +export interface MessageFns<T> { + encode(message: T, writer?: BinaryWriter): BinaryWriter; + decode(input: BinaryReader | Uint8Array, length?: number): T; + fromJSON(object: any): T; + toJSON(message: T): unknown; + create<I extends Exact<DeepPartial<T>, I>>(base?: I): T; + fromPartial<I extends Exact<DeepPartial<T>, I>>(object: I): T; +} diff --git a/ignite/pkg/cosmosgen/testdata/testchain/ts-client/ignite.planet.mars/types/google/api/annotations.ts b/ignite/pkg/cosmosgen/testdata/testchain/ts-client/ignite.planet.mars/types/google/api/annotations.ts new file mode 100644 index 0000000..a205c7c --- /dev/null +++ b/ignite/pkg/cosmosgen/testdata/testchain/ts-client/ignite.planet.mars/types/google/api/annotations.ts @@ -0,0 +1,9 @@ +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.11.2 +// protoc unknown +// source: google/api/annotations.proto + +/* eslint-disable */ + +export const protobufPackage = "google.api"; diff --git a/ignite/pkg/cosmosgen/testdata/testchain/ts-client/ignite.planet.mars/types/google/api/http.ts b/ignite/pkg/cosmosgen/testdata/testchain/ts-client/ignite.planet.mars/types/google/api/http.ts new file mode 100644 index 0000000..2c3ad9d --- /dev/null +++ b/ignite/pkg/cosmosgen/testdata/testchain/ts-client/ignite.planet.mars/types/google/api/http.ts @@ -0,0 +1,778 @@ +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.11.2 +// protoc unknown +// source: google/api/http.proto + +/* eslint-disable */ +import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire"; + +export const protobufPackage = "google.api"; + +/** + * Defines the HTTP configuration for an API service. It contains a list of + * [HttpRule][google.api.HttpRule], each specifying the mapping of an RPC method + * to one or more HTTP REST API methods. + */ +export interface Http { + /** + * A list of HTTP configuration rules that apply to individual API methods. + * + * **NOTE:** All service configuration rules follow "last one wins" order. + */ + rules: HttpRule[]; + /** + * When set to true, URL path parameters will be fully URI-decoded except in + * cases of single segment matches in reserved expansion, where "%2F" will be + * left encoded. + * + * The default behavior is to not decode RFC 6570 reserved characters in multi + * segment matches. + */ + fullyDecodeReservedExpansion: boolean; +} + +/** + * gRPC Transcoding + * + * gRPC Transcoding is a feature for mapping between a gRPC method and one or + * more HTTP REST endpoints. It allows developers to build a single API service + * that supports both gRPC APIs and REST APIs. Many systems, including [Google + * APIs](https://github.com/googleapis/googleapis), + * [Cloud Endpoints](https://cloud.google.com/endpoints), [gRPC + * Gateway](https://github.com/grpc-ecosystem/grpc-gateway), + * and [Envoy](https://github.com/envoyproxy/envoy) proxy support this feature + * and use it for large scale production services. + * + * `HttpRule` defines the schema of the gRPC/REST mapping. The mapping specifies + * how different portions of the gRPC request message are mapped to the URL + * path, URL query parameters, and HTTP request body. It also controls how the + * gRPC response message is mapped to the HTTP response body. `HttpRule` is + * typically specified as an `google.api.http` annotation on the gRPC method. + * + * Each mapping specifies a URL path template and an HTTP method. The path + * template may refer to one or more fields in the gRPC request message, as long + * as each field is a non-repeated field with a primitive (non-message) type. + * The path template controls how fields of the request message are mapped to + * the URL path. + * + * Example: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http) = { + * get: "/v1/{name=messages/*}" + * }; + * } + * } + * message GetMessageRequest { + * string name = 1; // Mapped to URL path. + * } + * message Message { + * string text = 1; // The resource content. + * } + * + * This enables an HTTP REST to gRPC mapping as below: + * + * - HTTP: `GET /v1/messages/123456` + * - gRPC: `GetMessage(name: "messages/123456")` + * + * Any fields in the request message which are not bound by the path template + * automatically become HTTP query parameters if there is no HTTP request body. + * For example: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http) = { + * get:"/v1/messages/{message_id}" + * }; + * } + * } + * message GetMessageRequest { + * message SubMessage { + * string subfield = 1; + * } + * string message_id = 1; // Mapped to URL path. + * int64 revision = 2; // Mapped to URL query parameter `revision`. + * SubMessage sub = 3; // Mapped to URL query parameter `sub.subfield`. + * } + * + * This enables a HTTP JSON to RPC mapping as below: + * + * - HTTP: `GET /v1/messages/123456?revision=2&sub.subfield=foo` + * - gRPC: `GetMessage(message_id: "123456" revision: 2 sub: + * SubMessage(subfield: "foo"))` + * + * Note that fields which are mapped to URL query parameters must have a + * primitive type or a repeated primitive type or a non-repeated message type. + * In the case of a repeated type, the parameter can be repeated in the URL + * as `...?param=A¶m=B`. In the case of a message type, each field of the + * message is mapped to a separate parameter, such as + * `...?foo.a=A&foo.b=B&foo.c=C`. + * + * For HTTP methods that allow a request body, the `body` field + * specifies the mapping. Consider a REST update method on the + * message resource collection: + * + * service Messaging { + * rpc UpdateMessage(UpdateMessageRequest) returns (Message) { + * option (google.api.http) = { + * patch: "/v1/messages/{message_id}" + * body: "message" + * }; + * } + * } + * message UpdateMessageRequest { + * string message_id = 1; // mapped to the URL + * Message message = 2; // mapped to the body + * } + * + * The following HTTP JSON to RPC mapping is enabled, where the + * representation of the JSON in the request body is determined by + * protos JSON encoding: + * + * - HTTP: `PATCH /v1/messages/123456 { "text": "Hi!" }` + * - gRPC: `UpdateMessage(message_id: "123456" message { text: "Hi!" })` + * + * The special name `*` can be used in the body mapping to define that + * every field not bound by the path template should be mapped to the + * request body. This enables the following alternative definition of + * the update method: + * + * service Messaging { + * rpc UpdateMessage(Message) returns (Message) { + * option (google.api.http) = { + * patch: "/v1/messages/{message_id}" + * body: "*" + * }; + * } + * } + * message Message { + * string message_id = 1; + * string text = 2; + * } + * + * The following HTTP JSON to RPC mapping is enabled: + * + * - HTTP: `PATCH /v1/messages/123456 { "text": "Hi!" }` + * - gRPC: `UpdateMessage(message_id: "123456" text: "Hi!")` + * + * Note that when using `*` in the body mapping, it is not possible to + * have HTTP parameters, as all fields not bound by the path end in + * the body. This makes this option more rarely used in practice when + * defining REST APIs. The common usage of `*` is in custom methods + * which don't use the URL at all for transferring data. + * + * It is possible to define multiple HTTP methods for one RPC by using + * the `additional_bindings` option. Example: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http) = { + * get: "/v1/messages/{message_id}" + * additional_bindings { + * get: "/v1/users/{user_id}/messages/{message_id}" + * } + * }; + * } + * } + * message GetMessageRequest { + * string message_id = 1; + * string user_id = 2; + * } + * + * This enables the following two alternative HTTP JSON to RPC mappings: + * + * - HTTP: `GET /v1/messages/123456` + * - gRPC: `GetMessage(message_id: "123456")` + * + * - HTTP: `GET /v1/users/me/messages/123456` + * - gRPC: `GetMessage(user_id: "me" message_id: "123456")` + * + * Rules for HTTP mapping + * + * 1. Leaf request fields (recursive expansion nested messages in the request + * message) are classified into three categories: + * - Fields referred by the path template. They are passed via the URL path. + * - Fields referred by the [HttpRule.body][google.api.HttpRule.body]. They + * are passed via the HTTP + * request body. + * - All other fields are passed via the URL query parameters, and the + * parameter name is the field path in the request message. A repeated + * field can be represented as multiple query parameters under the same + * name. + * 2. If [HttpRule.body][google.api.HttpRule.body] is "*", there is no URL + * query parameter, all fields + * are passed via URL path and HTTP request body. + * 3. If [HttpRule.body][google.api.HttpRule.body] is omitted, there is no HTTP + * request body, all + * fields are passed via URL path and URL query parameters. + * + * Path template syntax + * + * Template = "/" Segments [ Verb ] ; + * Segments = Segment { "/" Segment } ; + * Segment = "*" | "**" | LITERAL | Variable ; + * Variable = "{" FieldPath [ "=" Segments ] "}" ; + * FieldPath = IDENT { "." IDENT } ; + * Verb = ":" LITERAL ; + * + * The syntax `*` matches a single URL path segment. The syntax `**` matches + * zero or more URL path segments, which must be the last part of the URL path + * except the `Verb`. + * + * The syntax `Variable` matches part of the URL path as specified by its + * template. A variable template must not contain other variables. If a variable + * matches a single path segment, its template may be omitted, e.g. `{var}` + * is equivalent to `{var=*}`. + * + * The syntax `LITERAL` matches literal text in the URL path. If the `LITERAL` + * contains any reserved character, such characters should be percent-encoded + * before the matching. + * + * If a variable contains exactly one path segment, such as `"{var}"` or + * `"{var=*}"`, when such a variable is expanded into a URL path on the client + * side, all characters except `[-_.~0-9a-zA-Z]` are percent-encoded. The + * server side does the reverse decoding. Such variables show up in the + * [Discovery + * Document](https://developers.google.com/discovery/v1/reference/apis) as + * `{var}`. + * + * If a variable contains multiple path segments, such as `"{var=foo/*}"` + * or `"{var=**}"`, when such a variable is expanded into a URL path on the + * client side, all characters except `[-_.~/0-9a-zA-Z]` are percent-encoded. + * The server side does the reverse decoding, except "%2F" and "%2f" are left + * unchanged. Such variables show up in the + * [Discovery + * Document](https://developers.google.com/discovery/v1/reference/apis) as + * `{+var}`. + * + * Using gRPC API Service Configuration + * + * gRPC API Service Configuration (service config) is a configuration language + * for configuring a gRPC service to become a user-facing product. The + * service config is simply the YAML representation of the `google.api.Service` + * proto message. + * + * As an alternative to annotating your proto file, you can configure gRPC + * transcoding in your service config YAML files. You do this by specifying a + * `HttpRule` that maps the gRPC method to a REST endpoint, achieving the same + * effect as the proto annotation. This can be particularly useful if you + * have a proto that is reused in multiple services. Note that any transcoding + * specified in the service config will override any matching transcoding + * configuration in the proto. + * + * The following example selects a gRPC method and applies an `HttpRule` to it: + * + * http: + * rules: + * - selector: example.v1.Messaging.GetMessage + * get: /v1/messages/{message_id}/{sub.subfield} + * + * Special notes + * + * When gRPC Transcoding is used to map a gRPC to JSON REST endpoints, the + * proto to JSON conversion must follow the [proto3 + * specification](https://developers.google.com/protocol-buffers/docs/proto3#json). + * + * While the single segment variable follows the semantics of + * [RFC 6570](https://tools.ietf.org/html/rfc6570) Section 3.2.2 Simple String + * Expansion, the multi segment variable **does not** follow RFC 6570 Section + * 3.2.3 Reserved Expansion. The reason is that the Reserved Expansion + * does not expand special characters like `?` and `#`, which would lead + * to invalid URLs. As the result, gRPC Transcoding uses a custom encoding + * for multi segment variables. + * + * The path variables **must not** refer to any repeated or mapped field, + * because client libraries are not capable of handling such variable expansion. + * + * The path variables **must not** capture the leading "/" character. The reason + * is that the most common use case "{var}" does not capture the leading "/" + * character. For consistency, all path variables must share the same behavior. + * + * Repeated message fields must not be mapped to URL query parameters, because + * no client library can support such complicated mapping. + * + * If an API needs to use a JSON array for request or response body, it can map + * the request or response body to a repeated field. However, some gRPC + * Transcoding implementations may not support this feature. + */ +export interface HttpRule { + /** + * Selects a method to which this rule applies. + * + * Refer to [selector][google.api.DocumentationRule.selector] for syntax + * details. + */ + selector: string; + /** + * Maps to HTTP GET. Used for listing and getting information about + * resources. + */ + get?: + | string + | undefined; + /** Maps to HTTP PUT. Used for replacing a resource. */ + put?: + | string + | undefined; + /** Maps to HTTP POST. Used for creating a resource or performing an action. */ + post?: + | string + | undefined; + /** Maps to HTTP DELETE. Used for deleting a resource. */ + delete?: + | string + | undefined; + /** Maps to HTTP PATCH. Used for updating a resource. */ + patch?: + | string + | undefined; + /** + * The custom pattern is used for specifying an HTTP method that is not + * included in the `pattern` field, such as HEAD, or "*" to leave the + * HTTP method unspecified for this rule. The wild-card rule is useful + * for services that provide content to Web (HTML) clients. + */ + custom?: + | CustomHttpPattern + | undefined; + /** + * The name of the request field whose value is mapped to the HTTP request + * body, or `*` for mapping all request fields not captured by the path + * pattern to the HTTP body, or omitted for not having any HTTP request body. + * + * NOTE: the referred field must be present at the top-level of the request + * message type. + */ + body: string; + /** + * Optional. The name of the response field whose value is mapped to the HTTP + * response body. When omitted, the entire response message will be used + * as the HTTP response body. + * + * NOTE: The referred field must be present at the top-level of the response + * message type. + */ + responseBody: string; + /** + * Additional HTTP bindings for the selector. Nested bindings must + * not contain an `additional_bindings` field themselves (that is, + * the nesting may only be one level deep). + */ + additionalBindings: HttpRule[]; +} + +/** A custom pattern is used for defining custom HTTP verb. */ +export interface CustomHttpPattern { + /** The name of this custom HTTP verb. */ + kind: string; + /** The path matched by this custom verb. */ + path: string; +} + +function createBaseHttp(): Http { + return { rules: [], fullyDecodeReservedExpansion: false }; +} + +export const Http: MessageFns<Http> = { + encode(message: Http, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + for (const v of message.rules) { + HttpRule.encode(v!, writer.uint32(10).fork()).join(); + } + if (message.fullyDecodeReservedExpansion !== false) { + writer.uint32(16).bool(message.fullyDecodeReservedExpansion); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): Http { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHttp(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.rules.push(HttpRule.decode(reader, reader.uint32())); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.fullyDecodeReservedExpansion = reader.bool(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): Http { + return { + rules: globalThis.Array.isArray(object?.rules) ? object.rules.map((e: any) => HttpRule.fromJSON(e)) : [], + fullyDecodeReservedExpansion: isSet(object.fullyDecodeReservedExpansion) + ? globalThis.Boolean(object.fullyDecodeReservedExpansion) + : isSet(object.fully_decode_reserved_expansion) + ? globalThis.Boolean(object.fully_decode_reserved_expansion) + : false, + }; + }, + + toJSON(message: Http): unknown { + const obj: any = {}; + if (message.rules?.length) { + obj.rules = message.rules.map((e) => HttpRule.toJSON(e)); + } + if (message.fullyDecodeReservedExpansion !== false) { + obj.fullyDecodeReservedExpansion = message.fullyDecodeReservedExpansion; + } + return obj; + }, + + create<I extends Exact<DeepPartial<Http>, I>>(base?: I): Http { + return Http.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<Http>, I>>(object: I): Http { + const message = createBaseHttp(); + message.rules = object.rules?.map((e) => HttpRule.fromPartial(e)) || []; + message.fullyDecodeReservedExpansion = object.fullyDecodeReservedExpansion ?? false; + return message; + }, +}; + +function createBaseHttpRule(): HttpRule { + return { + selector: "", + get: undefined, + put: undefined, + post: undefined, + delete: undefined, + patch: undefined, + custom: undefined, + body: "", + responseBody: "", + additionalBindings: [], + }; +} + +export const HttpRule: MessageFns<HttpRule> = { + encode(message: HttpRule, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.selector !== "") { + writer.uint32(10).string(message.selector); + } + if (message.get !== undefined) { + writer.uint32(18).string(message.get); + } + if (message.put !== undefined) { + writer.uint32(26).string(message.put); + } + if (message.post !== undefined) { + writer.uint32(34).string(message.post); + } + if (message.delete !== undefined) { + writer.uint32(42).string(message.delete); + } + if (message.patch !== undefined) { + writer.uint32(50).string(message.patch); + } + if (message.custom !== undefined) { + CustomHttpPattern.encode(message.custom, writer.uint32(66).fork()).join(); + } + if (message.body !== "") { + writer.uint32(58).string(message.body); + } + if (message.responseBody !== "") { + writer.uint32(98).string(message.responseBody); + } + for (const v of message.additionalBindings) { + HttpRule.encode(v!, writer.uint32(90).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): HttpRule { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHttpRule(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.selector = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.get = reader.string(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.put = reader.string(); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + + message.post = reader.string(); + continue; + } + case 5: { + if (tag !== 42) { + break; + } + + message.delete = reader.string(); + continue; + } + case 6: { + if (tag !== 50) { + break; + } + + message.patch = reader.string(); + continue; + } + case 8: { + if (tag !== 66) { + break; + } + + message.custom = CustomHttpPattern.decode(reader, reader.uint32()); + continue; + } + case 7: { + if (tag !== 58) { + break; + } + + message.body = reader.string(); + continue; + } + case 12: { + if (tag !== 98) { + break; + } + + message.responseBody = reader.string(); + continue; + } + case 11: { + if (tag !== 90) { + break; + } + + message.additionalBindings.push(HttpRule.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): HttpRule { + return { + selector: isSet(object.selector) ? globalThis.String(object.selector) : "", + get: isSet(object.get) ? globalThis.String(object.get) : undefined, + put: isSet(object.put) ? globalThis.String(object.put) : undefined, + post: isSet(object.post) ? globalThis.String(object.post) : undefined, + delete: isSet(object.delete) ? globalThis.String(object.delete) : undefined, + patch: isSet(object.patch) ? globalThis.String(object.patch) : undefined, + custom: isSet(object.custom) ? CustomHttpPattern.fromJSON(object.custom) : undefined, + body: isSet(object.body) ? globalThis.String(object.body) : "", + responseBody: isSet(object.responseBody) + ? globalThis.String(object.responseBody) + : isSet(object.response_body) + ? globalThis.String(object.response_body) + : "", + additionalBindings: globalThis.Array.isArray(object?.additionalBindings) + ? object.additionalBindings.map((e: any) => HttpRule.fromJSON(e)) + : globalThis.Array.isArray(object?.additional_bindings) + ? object.additional_bindings.map((e: any) => HttpRule.fromJSON(e)) + : [], + }; + }, + + toJSON(message: HttpRule): unknown { + const obj: any = {}; + if (message.selector !== "") { + obj.selector = message.selector; + } + if (message.get !== undefined) { + obj.get = message.get; + } + if (message.put !== undefined) { + obj.put = message.put; + } + if (message.post !== undefined) { + obj.post = message.post; + } + if (message.delete !== undefined) { + obj.delete = message.delete; + } + if (message.patch !== undefined) { + obj.patch = message.patch; + } + if (message.custom !== undefined) { + obj.custom = CustomHttpPattern.toJSON(message.custom); + } + if (message.body !== "") { + obj.body = message.body; + } + if (message.responseBody !== "") { + obj.responseBody = message.responseBody; + } + if (message.additionalBindings?.length) { + obj.additionalBindings = message.additionalBindings.map((e) => HttpRule.toJSON(e)); + } + return obj; + }, + + create<I extends Exact<DeepPartial<HttpRule>, I>>(base?: I): HttpRule { + return HttpRule.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<HttpRule>, I>>(object: I): HttpRule { + const message = createBaseHttpRule(); + message.selector = object.selector ?? ""; + message.get = object.get ?? undefined; + message.put = object.put ?? undefined; + message.post = object.post ?? undefined; + message.delete = object.delete ?? undefined; + message.patch = object.patch ?? undefined; + message.custom = (object.custom !== undefined && object.custom !== null) + ? CustomHttpPattern.fromPartial(object.custom) + : undefined; + message.body = object.body ?? ""; + message.responseBody = object.responseBody ?? ""; + message.additionalBindings = object.additionalBindings?.map((e) => HttpRule.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseCustomHttpPattern(): CustomHttpPattern { + return { kind: "", path: "" }; +} + +export const CustomHttpPattern: MessageFns<CustomHttpPattern> = { + encode(message: CustomHttpPattern, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.kind !== "") { + writer.uint32(10).string(message.kind); + } + if (message.path !== "") { + writer.uint32(18).string(message.path); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): CustomHttpPattern { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCustomHttpPattern(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.kind = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.path = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): CustomHttpPattern { + return { + kind: isSet(object.kind) ? globalThis.String(object.kind) : "", + path: isSet(object.path) ? globalThis.String(object.path) : "", + }; + }, + + toJSON(message: CustomHttpPattern): unknown { + const obj: any = {}; + if (message.kind !== "") { + obj.kind = message.kind; + } + if (message.path !== "") { + obj.path = message.path; + } + return obj; + }, + + create<I extends Exact<DeepPartial<CustomHttpPattern>, I>>(base?: I): CustomHttpPattern { + return CustomHttpPattern.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<CustomHttpPattern>, I>>(object: I): CustomHttpPattern { + const message = createBaseCustomHttpPattern(); + message.kind = object.kind ?? ""; + message.path = object.path ?? ""; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial<T> = T extends Builtin ? T + : T extends globalThis.Array<infer U> ? globalThis.Array<DeepPartial<U>> + : T extends ReadonlyArray<infer U> ? ReadonlyArray<DeepPartial<U>> + : T extends {} ? { [K in keyof T]?: DeepPartial<T[K]> } + : Partial<T>; + +type KeysOfUnion<T> = T extends T ? keyof T : never; +export type Exact<P, I extends P> = P extends Builtin ? P + : P & { [K in keyof P]: Exact<P[K], I[K]> } & { [K in Exclude<keyof I, KeysOfUnion<P>>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} + +export interface MessageFns<T> { + encode(message: T, writer?: BinaryWriter): BinaryWriter; + decode(input: BinaryReader | Uint8Array, length?: number): T; + fromJSON(object: any): T; + toJSON(message: T): unknown; + create<I extends Exact<DeepPartial<T>, I>>(base?: I): T; + fromPartial<I extends Exact<DeepPartial<T>, I>>(object: I): T; +} diff --git a/ignite/pkg/cosmosgen/testdata/testchain/ts-client/ignite.planet.mars/types/google/protobuf/descriptor.ts b/ignite/pkg/cosmosgen/testdata/testchain/ts-client/ignite.planet.mars/types/google/protobuf/descriptor.ts new file mode 100644 index 0000000..5e7370b --- /dev/null +++ b/ignite/pkg/cosmosgen/testdata/testchain/ts-client/ignite.planet.mars/types/google/protobuf/descriptor.ts @@ -0,0 +1,7277 @@ +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.11.2 +// protoc unknown +// source: google/protobuf/descriptor.proto + +/* eslint-disable */ +import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire"; + +export const protobufPackage = "google.protobuf"; + +/** The full set of known editions. */ +export enum Edition { + /** EDITION_UNKNOWN - A placeholder for an unknown edition value. */ + EDITION_UNKNOWN = 0, + /** + * EDITION_LEGACY - A placeholder edition for specifying default behaviors *before* a feature + * was first introduced. This is effectively an "infinite past". + */ + EDITION_LEGACY = 900, + /** + * EDITION_PROTO2 - Legacy syntax "editions". These pre-date editions, but behave much like + * distinct editions. These can't be used to specify the edition of proto + * files, but feature definitions must supply proto2/proto3 defaults for + * backwards compatibility. + */ + EDITION_PROTO2 = 998, + EDITION_PROTO3 = 999, + /** + * EDITION_2023 - Editions that have been released. The specific values are arbitrary and + * should not be depended on, but they will always be time-ordered for easy + * comparison. + */ + EDITION_2023 = 1000, + EDITION_2024 = 1001, + /** + * EDITION_1_TEST_ONLY - Placeholder editions for testing feature resolution. These should not be + * used or relied on outside of tests. + */ + EDITION_1_TEST_ONLY = 1, + EDITION_2_TEST_ONLY = 2, + EDITION_99997_TEST_ONLY = 99997, + EDITION_99998_TEST_ONLY = 99998, + EDITION_99999_TEST_ONLY = 99999, + /** + * EDITION_MAX - Placeholder for specifying unbounded edition support. This should only + * ever be used by plugins that can expect to never require any changes to + * support a new edition. + */ + EDITION_MAX = 2147483647, + UNRECOGNIZED = -1, +} + +export function editionFromJSON(object: any): Edition { + switch (object) { + case 0: + case "EDITION_UNKNOWN": + return Edition.EDITION_UNKNOWN; + case 900: + case "EDITION_LEGACY": + return Edition.EDITION_LEGACY; + case 998: + case "EDITION_PROTO2": + return Edition.EDITION_PROTO2; + case 999: + case "EDITION_PROTO3": + return Edition.EDITION_PROTO3; + case 1000: + case "EDITION_2023": + return Edition.EDITION_2023; + case 1001: + case "EDITION_2024": + return Edition.EDITION_2024; + case 1: + case "EDITION_1_TEST_ONLY": + return Edition.EDITION_1_TEST_ONLY; + case 2: + case "EDITION_2_TEST_ONLY": + return Edition.EDITION_2_TEST_ONLY; + case 99997: + case "EDITION_99997_TEST_ONLY": + return Edition.EDITION_99997_TEST_ONLY; + case 99998: + case "EDITION_99998_TEST_ONLY": + return Edition.EDITION_99998_TEST_ONLY; + case 99999: + case "EDITION_99999_TEST_ONLY": + return Edition.EDITION_99999_TEST_ONLY; + case 2147483647: + case "EDITION_MAX": + return Edition.EDITION_MAX; + case -1: + case "UNRECOGNIZED": + default: + return Edition.UNRECOGNIZED; + } +} + +export function editionToJSON(object: Edition): string { + switch (object) { + case Edition.EDITION_UNKNOWN: + return "EDITION_UNKNOWN"; + case Edition.EDITION_LEGACY: + return "EDITION_LEGACY"; + case Edition.EDITION_PROTO2: + return "EDITION_PROTO2"; + case Edition.EDITION_PROTO3: + return "EDITION_PROTO3"; + case Edition.EDITION_2023: + return "EDITION_2023"; + case Edition.EDITION_2024: + return "EDITION_2024"; + case Edition.EDITION_1_TEST_ONLY: + return "EDITION_1_TEST_ONLY"; + case Edition.EDITION_2_TEST_ONLY: + return "EDITION_2_TEST_ONLY"; + case Edition.EDITION_99997_TEST_ONLY: + return "EDITION_99997_TEST_ONLY"; + case Edition.EDITION_99998_TEST_ONLY: + return "EDITION_99998_TEST_ONLY"; + case Edition.EDITION_99999_TEST_ONLY: + return "EDITION_99999_TEST_ONLY"; + case Edition.EDITION_MAX: + return "EDITION_MAX"; + case Edition.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** + * Describes the 'visibility' of a symbol with respect to the proto import + * system. Symbols can only be imported when the visibility rules do not prevent + * it (ex: local symbols cannot be imported). Visibility modifiers can only set + * on `message` and `enum` as they are the only types available to be referenced + * from other files. + */ +export enum SymbolVisibility { + VISIBILITY_UNSET = 0, + VISIBILITY_LOCAL = 1, + VISIBILITY_EXPORT = 2, + UNRECOGNIZED = -1, +} + +export function symbolVisibilityFromJSON(object: any): SymbolVisibility { + switch (object) { + case 0: + case "VISIBILITY_UNSET": + return SymbolVisibility.VISIBILITY_UNSET; + case 1: + case "VISIBILITY_LOCAL": + return SymbolVisibility.VISIBILITY_LOCAL; + case 2: + case "VISIBILITY_EXPORT": + return SymbolVisibility.VISIBILITY_EXPORT; + case -1: + case "UNRECOGNIZED": + default: + return SymbolVisibility.UNRECOGNIZED; + } +} + +export function symbolVisibilityToJSON(object: SymbolVisibility): string { + switch (object) { + case SymbolVisibility.VISIBILITY_UNSET: + return "VISIBILITY_UNSET"; + case SymbolVisibility.VISIBILITY_LOCAL: + return "VISIBILITY_LOCAL"; + case SymbolVisibility.VISIBILITY_EXPORT: + return "VISIBILITY_EXPORT"; + case SymbolVisibility.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** + * The protocol compiler can output a FileDescriptorSet containing the .proto + * files it parses. + */ +export interface FileDescriptorSet { + file: FileDescriptorProto[]; +} + +/** Describes a complete .proto file. */ +export interface FileDescriptorProto { + /** file name, relative to root of source tree */ + name?: + | string + | undefined; + /** e.g. "foo", "foo.bar", etc. */ + package?: + | string + | undefined; + /** Names of files imported by this file. */ + dependency: string[]; + /** Indexes of the public imported files in the dependency list above. */ + publicDependency: number[]; + /** + * Indexes of the weak imported files in the dependency list. + * For Google-internal migration only. Do not use. + */ + weakDependency: number[]; + /** + * Names of files imported by this file purely for the purpose of providing + * option extensions. These are excluded from the dependency list above. + */ + optionDependency: string[]; + /** All top-level definitions in this file. */ + messageType: DescriptorProto[]; + enumType: EnumDescriptorProto[]; + service: ServiceDescriptorProto[]; + extension: FieldDescriptorProto[]; + options?: + | FileOptions + | undefined; + /** + * This field contains optional information about the original source code. + * You may safely remove this entire field without harming runtime + * functionality of the descriptors -- the information is needed only by + * development tools. + */ + sourceCodeInfo?: + | SourceCodeInfo + | undefined; + /** + * The syntax of the proto file. + * The supported values are "proto2", "proto3", and "editions". + * + * If `edition` is present, this value must be "editions". + * WARNING: This field should only be used by protobuf plugins or special + * cases like the proto compiler. Other uses are discouraged and + * developers should rely on the protoreflect APIs for their client language. + */ + syntax?: + | string + | undefined; + /** + * The edition of the proto file. + * WARNING: This field should only be used by protobuf plugins or special + * cases like the proto compiler. Other uses are discouraged and + * developers should rely on the protoreflect APIs for their client language. + */ + edition?: Edition | undefined; +} + +/** Describes a message type. */ +export interface DescriptorProto { + name?: string | undefined; + field: FieldDescriptorProto[]; + extension: FieldDescriptorProto[]; + nestedType: DescriptorProto[]; + enumType: EnumDescriptorProto[]; + extensionRange: DescriptorProto_ExtensionRange[]; + oneofDecl: OneofDescriptorProto[]; + options?: MessageOptions | undefined; + reservedRange: DescriptorProto_ReservedRange[]; + /** + * Reserved field names, which may not be used by fields in the same message. + * A given name may only be reserved once. + */ + reservedName: string[]; + /** Support for `export` and `local` keywords on enums. */ + visibility?: SymbolVisibility | undefined; +} + +export interface DescriptorProto_ExtensionRange { + /** Inclusive. */ + start?: + | number + | undefined; + /** Exclusive. */ + end?: number | undefined; + options?: ExtensionRangeOptions | undefined; +} + +/** + * Range of reserved tag numbers. Reserved tag numbers may not be used by + * fields or extension ranges in the same message. Reserved ranges may + * not overlap. + */ +export interface DescriptorProto_ReservedRange { + /** Inclusive. */ + start?: + | number + | undefined; + /** Exclusive. */ + end?: number | undefined; +} + +export interface ExtensionRangeOptions { + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; + /** + * For external users: DO NOT USE. We are in the process of open sourcing + * extension declaration and executing internal cleanups before it can be + * used externally. + */ + declaration: ExtensionRangeOptions_Declaration[]; + /** Any features defined in the specific edition. */ + features?: + | FeatureSet + | undefined; + /** + * The verification state of the range. + * TODO: flip the default to DECLARATION once all empty ranges + * are marked as UNVERIFIED. + */ + verification?: ExtensionRangeOptions_VerificationState | undefined; +} + +/** The verification state of the extension range. */ +export enum ExtensionRangeOptions_VerificationState { + /** DECLARATION - All the extensions of the range must be declared. */ + DECLARATION = 0, + UNVERIFIED = 1, + UNRECOGNIZED = -1, +} + +export function extensionRangeOptions_VerificationStateFromJSON(object: any): ExtensionRangeOptions_VerificationState { + switch (object) { + case 0: + case "DECLARATION": + return ExtensionRangeOptions_VerificationState.DECLARATION; + case 1: + case "UNVERIFIED": + return ExtensionRangeOptions_VerificationState.UNVERIFIED; + case -1: + case "UNRECOGNIZED": + default: + return ExtensionRangeOptions_VerificationState.UNRECOGNIZED; + } +} + +export function extensionRangeOptions_VerificationStateToJSON(object: ExtensionRangeOptions_VerificationState): string { + switch (object) { + case ExtensionRangeOptions_VerificationState.DECLARATION: + return "DECLARATION"; + case ExtensionRangeOptions_VerificationState.UNVERIFIED: + return "UNVERIFIED"; + case ExtensionRangeOptions_VerificationState.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export interface ExtensionRangeOptions_Declaration { + /** The extension number declared within the extension range. */ + number?: + | number + | undefined; + /** + * The fully-qualified name of the extension field. There must be a leading + * dot in front of the full name. + */ + fullName?: + | string + | undefined; + /** + * The fully-qualified type name of the extension field. Unlike + * Metadata.type, Declaration.type must have a leading dot for messages + * and enums. + */ + type?: + | string + | undefined; + /** + * If true, indicates that the number is reserved in the extension range, + * and any extension field with the number will fail to compile. Set this + * when a declared extension field is deleted. + */ + reserved?: + | boolean + | undefined; + /** + * If true, indicates that the extension must be defined as repeated. + * Otherwise the extension must be defined as optional. + */ + repeated?: boolean | undefined; +} + +/** Describes a field within a message. */ +export interface FieldDescriptorProto { + name?: string | undefined; + number?: number | undefined; + label?: + | FieldDescriptorProto_Label + | undefined; + /** + * If type_name is set, this need not be set. If both this and type_name + * are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. + */ + type?: + | FieldDescriptorProto_Type + | undefined; + /** + * For message and enum types, this is the name of the type. If the name + * starts with a '.', it is fully-qualified. Otherwise, C++-like scoping + * rules are used to find the type (i.e. first the nested types within this + * message are searched, then within the parent, on up to the root + * namespace). + */ + typeName?: + | string + | undefined; + /** + * For extensions, this is the name of the type being extended. It is + * resolved in the same manner as type_name. + */ + extendee?: + | string + | undefined; + /** + * For numeric types, contains the original text representation of the value. + * For booleans, "true" or "false". + * For strings, contains the default text contents (not escaped in any way). + * For bytes, contains the C escaped value. All bytes >= 128 are escaped. + */ + defaultValue?: + | string + | undefined; + /** + * If set, gives the index of a oneof in the containing type's oneof_decl + * list. This field is a member of that oneof. + */ + oneofIndex?: + | number + | undefined; + /** + * JSON name of this field. The value is set by protocol compiler. If the + * user has set a "json_name" option on this field, that option's value + * will be used. Otherwise, it's deduced from the field's name by converting + * it to camelCase. + */ + jsonName?: string | undefined; + options?: + | FieldOptions + | undefined; + /** + * If true, this is a proto3 "optional". When a proto3 field is optional, it + * tracks presence regardless of field type. + * + * When proto3_optional is true, this field must belong to a oneof to signal + * to old proto3 clients that presence is tracked for this field. This oneof + * is known as a "synthetic" oneof, and this field must be its sole member + * (each proto3 optional field gets its own synthetic oneof). Synthetic oneofs + * exist in the descriptor only, and do not generate any API. Synthetic oneofs + * must be ordered after all "real" oneofs. + * + * For message fields, proto3_optional doesn't create any semantic change, + * since non-repeated message fields always track presence. However it still + * indicates the semantic detail of whether the user wrote "optional" or not. + * This can be useful for round-tripping the .proto file. For consistency we + * give message fields a synthetic oneof also, even though it is not required + * to track presence. This is especially important because the parser can't + * tell if a field is a message or an enum, so it must always create a + * synthetic oneof. + * + * Proto2 optional fields do not set this flag, because they already indicate + * optional with `LABEL_OPTIONAL`. + */ + proto3Optional?: boolean | undefined; +} + +export enum FieldDescriptorProto_Type { + /** + * TYPE_DOUBLE - 0 is reserved for errors. + * Order is weird for historical reasons. + */ + TYPE_DOUBLE = 1, + TYPE_FLOAT = 2, + /** + * TYPE_INT64 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if + * negative values are likely. + */ + TYPE_INT64 = 3, + TYPE_UINT64 = 4, + /** + * TYPE_INT32 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if + * negative values are likely. + */ + TYPE_INT32 = 5, + TYPE_FIXED64 = 6, + TYPE_FIXED32 = 7, + TYPE_BOOL = 8, + TYPE_STRING = 9, + /** + * TYPE_GROUP - Tag-delimited aggregate. + * Group type is deprecated and not supported after google.protobuf. However, Proto3 + * implementations should still be able to parse the group wire format and + * treat group fields as unknown fields. In Editions, the group wire format + * can be enabled via the `message_encoding` feature. + */ + TYPE_GROUP = 10, + /** TYPE_MESSAGE - Length-delimited aggregate. */ + TYPE_MESSAGE = 11, + /** TYPE_BYTES - New in version 2. */ + TYPE_BYTES = 12, + TYPE_UINT32 = 13, + TYPE_ENUM = 14, + TYPE_SFIXED32 = 15, + TYPE_SFIXED64 = 16, + /** TYPE_SINT32 - Uses ZigZag encoding. */ + TYPE_SINT32 = 17, + /** TYPE_SINT64 - Uses ZigZag encoding. */ + TYPE_SINT64 = 18, + UNRECOGNIZED = -1, +} + +export function fieldDescriptorProto_TypeFromJSON(object: any): FieldDescriptorProto_Type { + switch (object) { + case 1: + case "TYPE_DOUBLE": + return FieldDescriptorProto_Type.TYPE_DOUBLE; + case 2: + case "TYPE_FLOAT": + return FieldDescriptorProto_Type.TYPE_FLOAT; + case 3: + case "TYPE_INT64": + return FieldDescriptorProto_Type.TYPE_INT64; + case 4: + case "TYPE_UINT64": + return FieldDescriptorProto_Type.TYPE_UINT64; + case 5: + case "TYPE_INT32": + return FieldDescriptorProto_Type.TYPE_INT32; + case 6: + case "TYPE_FIXED64": + return FieldDescriptorProto_Type.TYPE_FIXED64; + case 7: + case "TYPE_FIXED32": + return FieldDescriptorProto_Type.TYPE_FIXED32; + case 8: + case "TYPE_BOOL": + return FieldDescriptorProto_Type.TYPE_BOOL; + case 9: + case "TYPE_STRING": + return FieldDescriptorProto_Type.TYPE_STRING; + case 10: + case "TYPE_GROUP": + return FieldDescriptorProto_Type.TYPE_GROUP; + case 11: + case "TYPE_MESSAGE": + return FieldDescriptorProto_Type.TYPE_MESSAGE; + case 12: + case "TYPE_BYTES": + return FieldDescriptorProto_Type.TYPE_BYTES; + case 13: + case "TYPE_UINT32": + return FieldDescriptorProto_Type.TYPE_UINT32; + case 14: + case "TYPE_ENUM": + return FieldDescriptorProto_Type.TYPE_ENUM; + case 15: + case "TYPE_SFIXED32": + return FieldDescriptorProto_Type.TYPE_SFIXED32; + case 16: + case "TYPE_SFIXED64": + return FieldDescriptorProto_Type.TYPE_SFIXED64; + case 17: + case "TYPE_SINT32": + return FieldDescriptorProto_Type.TYPE_SINT32; + case 18: + case "TYPE_SINT64": + return FieldDescriptorProto_Type.TYPE_SINT64; + case -1: + case "UNRECOGNIZED": + default: + return FieldDescriptorProto_Type.UNRECOGNIZED; + } +} + +export function fieldDescriptorProto_TypeToJSON(object: FieldDescriptorProto_Type): string { + switch (object) { + case FieldDescriptorProto_Type.TYPE_DOUBLE: + return "TYPE_DOUBLE"; + case FieldDescriptorProto_Type.TYPE_FLOAT: + return "TYPE_FLOAT"; + case FieldDescriptorProto_Type.TYPE_INT64: + return "TYPE_INT64"; + case FieldDescriptorProto_Type.TYPE_UINT64: + return "TYPE_UINT64"; + case FieldDescriptorProto_Type.TYPE_INT32: + return "TYPE_INT32"; + case FieldDescriptorProto_Type.TYPE_FIXED64: + return "TYPE_FIXED64"; + case FieldDescriptorProto_Type.TYPE_FIXED32: + return "TYPE_FIXED32"; + case FieldDescriptorProto_Type.TYPE_BOOL: + return "TYPE_BOOL"; + case FieldDescriptorProto_Type.TYPE_STRING: + return "TYPE_STRING"; + case FieldDescriptorProto_Type.TYPE_GROUP: + return "TYPE_GROUP"; + case FieldDescriptorProto_Type.TYPE_MESSAGE: + return "TYPE_MESSAGE"; + case FieldDescriptorProto_Type.TYPE_BYTES: + return "TYPE_BYTES"; + case FieldDescriptorProto_Type.TYPE_UINT32: + return "TYPE_UINT32"; + case FieldDescriptorProto_Type.TYPE_ENUM: + return "TYPE_ENUM"; + case FieldDescriptorProto_Type.TYPE_SFIXED32: + return "TYPE_SFIXED32"; + case FieldDescriptorProto_Type.TYPE_SFIXED64: + return "TYPE_SFIXED64"; + case FieldDescriptorProto_Type.TYPE_SINT32: + return "TYPE_SINT32"; + case FieldDescriptorProto_Type.TYPE_SINT64: + return "TYPE_SINT64"; + case FieldDescriptorProto_Type.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FieldDescriptorProto_Label { + /** LABEL_OPTIONAL - 0 is reserved for errors */ + LABEL_OPTIONAL = 1, + LABEL_REPEATED = 3, + /** + * LABEL_REQUIRED - The required label is only allowed in google.protobuf. In proto3 and Editions + * it's explicitly prohibited. In Editions, the `field_presence` feature + * can be used to get this behavior. + */ + LABEL_REQUIRED = 2, + UNRECOGNIZED = -1, +} + +export function fieldDescriptorProto_LabelFromJSON(object: any): FieldDescriptorProto_Label { + switch (object) { + case 1: + case "LABEL_OPTIONAL": + return FieldDescriptorProto_Label.LABEL_OPTIONAL; + case 3: + case "LABEL_REPEATED": + return FieldDescriptorProto_Label.LABEL_REPEATED; + case 2: + case "LABEL_REQUIRED": + return FieldDescriptorProto_Label.LABEL_REQUIRED; + case -1: + case "UNRECOGNIZED": + default: + return FieldDescriptorProto_Label.UNRECOGNIZED; + } +} + +export function fieldDescriptorProto_LabelToJSON(object: FieldDescriptorProto_Label): string { + switch (object) { + case FieldDescriptorProto_Label.LABEL_OPTIONAL: + return "LABEL_OPTIONAL"; + case FieldDescriptorProto_Label.LABEL_REPEATED: + return "LABEL_REPEATED"; + case FieldDescriptorProto_Label.LABEL_REQUIRED: + return "LABEL_REQUIRED"; + case FieldDescriptorProto_Label.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** Describes a oneof. */ +export interface OneofDescriptorProto { + name?: string | undefined; + options?: OneofOptions | undefined; +} + +/** Describes an enum type. */ +export interface EnumDescriptorProto { + name?: string | undefined; + value: EnumValueDescriptorProto[]; + options?: + | EnumOptions + | undefined; + /** + * Range of reserved numeric values. Reserved numeric values may not be used + * by enum values in the same enum declaration. Reserved ranges may not + * overlap. + */ + reservedRange: EnumDescriptorProto_EnumReservedRange[]; + /** + * Reserved enum value names, which may not be reused. A given name may only + * be reserved once. + */ + reservedName: string[]; + /** Support for `export` and `local` keywords on enums. */ + visibility?: SymbolVisibility | undefined; +} + +/** + * Range of reserved numeric values. Reserved values may not be used by + * entries in the same enum. Reserved ranges may not overlap. + * + * Note that this is distinct from DescriptorProto.ReservedRange in that it + * is inclusive such that it can appropriately represent the entire int32 + * domain. + */ +export interface EnumDescriptorProto_EnumReservedRange { + /** Inclusive. */ + start?: + | number + | undefined; + /** Inclusive. */ + end?: number | undefined; +} + +/** Describes a value within an enum. */ +export interface EnumValueDescriptorProto { + name?: string | undefined; + number?: number | undefined; + options?: EnumValueOptions | undefined; +} + +/** Describes a service. */ +export interface ServiceDescriptorProto { + name?: string | undefined; + method: MethodDescriptorProto[]; + options?: ServiceOptions | undefined; +} + +/** Describes a method of a service. */ +export interface MethodDescriptorProto { + name?: + | string + | undefined; + /** + * Input and output type names. These are resolved in the same way as + * FieldDescriptorProto.type_name, but must refer to a message type. + */ + inputType?: string | undefined; + outputType?: string | undefined; + options?: + | MethodOptions + | undefined; + /** Identifies if client streams multiple client messages */ + clientStreaming?: + | boolean + | undefined; + /** Identifies if server streams multiple server messages */ + serverStreaming?: boolean | undefined; +} + +export interface FileOptions { + /** + * Sets the Java package where classes generated from this .proto will be + * placed. By default, the proto package is used, but this is often + * inappropriate because proto packages do not normally start with backwards + * domain names. + */ + javaPackage?: + | string + | undefined; + /** + * Controls the name of the wrapper Java class generated for the .proto file. + * That class will always contain the .proto file's getDescriptor() method as + * well as any top-level extensions defined in the .proto file. + * If java_multiple_files is disabled, then all the other classes from the + * .proto file will be nested inside the single wrapper outer class. + */ + javaOuterClassname?: + | string + | undefined; + /** + * If enabled, then the Java code generator will generate a separate .java + * file for each top-level message, enum, and service defined in the .proto + * file. Thus, these types will *not* be nested inside the wrapper class + * named by java_outer_classname. However, the wrapper class will still be + * generated to contain the file's getDescriptor() method as well as any + * top-level extensions defined in the file. + */ + javaMultipleFiles?: + | boolean + | undefined; + /** + * This option does nothing. + * + * @deprecated + */ + javaGenerateEqualsAndHash?: + | boolean + | undefined; + /** + * A proto2 file can set this to true to opt in to UTF-8 checking for Java, + * which will throw an exception if invalid UTF-8 is parsed from the wire or + * assigned to a string field. + * + * TODO: clarify exactly what kinds of field types this option + * applies to, and update these docs accordingly. + * + * Proto3 files already perform these checks. Setting the option explicitly to + * false has no effect: it cannot be used to opt proto3 files out of UTF-8 + * checks. + */ + javaStringCheckUtf8?: boolean | undefined; + optimizeFor?: + | FileOptions_OptimizeMode + | undefined; + /** + * Sets the Go package where structs generated from this .proto will be + * placed. If omitted, the Go package will be derived from the following: + * - The basename of the package import path, if provided. + * - Otherwise, the package statement in the .proto file, if present. + * - Otherwise, the basename of the .proto file, without extension. + */ + goPackage?: + | string + | undefined; + /** + * Should generic services be generated in each language? "Generic" services + * are not specific to any particular RPC system. They are generated by the + * main code generators in each language (without additional plugins). + * Generic services were the only kind of service generation supported by + * early versions of google.protobuf. + * + * Generic services are now considered deprecated in favor of using plugins + * that generate code specific to your particular RPC system. Therefore, + * these default to false. Old code which depends on generic services should + * explicitly set them to true. + */ + ccGenericServices?: boolean | undefined; + javaGenericServices?: boolean | undefined; + pyGenericServices?: + | boolean + | undefined; + /** + * Is this file deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for everything in the file, or it will be completely ignored; in the very + * least, this is a formalization for deprecating files. + */ + deprecated?: + | boolean + | undefined; + /** + * Enables the use of arenas for the proto messages in this file. This applies + * only to generated classes for C++. + */ + ccEnableArenas?: + | boolean + | undefined; + /** + * Sets the objective c class prefix which is prepended to all objective c + * generated classes from this .proto. There is no default. + */ + objcClassPrefix?: + | string + | undefined; + /** Namespace for generated classes; defaults to the package. */ + csharpNamespace?: + | string + | undefined; + /** + * By default Swift generators will take the proto package and CamelCase it + * replacing '.' with underscore and use that to prefix the types/symbols + * defined. When this options is provided, they will use this value instead + * to prefix the types/symbols defined. + */ + swiftPrefix?: + | string + | undefined; + /** + * Sets the php class prefix which is prepended to all php generated classes + * from this .proto. Default is empty. + */ + phpClassPrefix?: + | string + | undefined; + /** + * Use this option to change the namespace of php generated classes. Default + * is empty. When this option is empty, the package name will be used for + * determining the namespace. + */ + phpNamespace?: + | string + | undefined; + /** + * Use this option to change the namespace of php generated metadata classes. + * Default is empty. When this option is empty, the proto file name will be + * used for determining the namespace. + */ + phpMetadataNamespace?: + | string + | undefined; + /** + * Use this option to change the package of ruby generated classes. Default + * is empty. When this option is not set, the package name will be used for + * determining the ruby package. + */ + rubyPackage?: + | string + | undefined; + /** + * Any features defined in the specific edition. + * WARNING: This field should only be used by protobuf plugins or special + * cases like the proto compiler. Other uses are discouraged and + * developers should rely on the protoreflect APIs for their client language. + */ + features?: + | FeatureSet + | undefined; + /** + * The parser stores options it doesn't recognize here. + * See the documentation for the "Options" section above. + */ + uninterpretedOption: UninterpretedOption[]; +} + +/** Generated classes can be optimized for speed or code size. */ +export enum FileOptions_OptimizeMode { + /** SPEED - Generate complete code for parsing, serialization, */ + SPEED = 1, + /** CODE_SIZE - etc. */ + CODE_SIZE = 2, + /** LITE_RUNTIME - Generate code using MessageLite and the lite runtime. */ + LITE_RUNTIME = 3, + UNRECOGNIZED = -1, +} + +export function fileOptions_OptimizeModeFromJSON(object: any): FileOptions_OptimizeMode { + switch (object) { + case 1: + case "SPEED": + return FileOptions_OptimizeMode.SPEED; + case 2: + case "CODE_SIZE": + return FileOptions_OptimizeMode.CODE_SIZE; + case 3: + case "LITE_RUNTIME": + return FileOptions_OptimizeMode.LITE_RUNTIME; + case -1: + case "UNRECOGNIZED": + default: + return FileOptions_OptimizeMode.UNRECOGNIZED; + } +} + +export function fileOptions_OptimizeModeToJSON(object: FileOptions_OptimizeMode): string { + switch (object) { + case FileOptions_OptimizeMode.SPEED: + return "SPEED"; + case FileOptions_OptimizeMode.CODE_SIZE: + return "CODE_SIZE"; + case FileOptions_OptimizeMode.LITE_RUNTIME: + return "LITE_RUNTIME"; + case FileOptions_OptimizeMode.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export interface MessageOptions { + /** + * Set true to use the old proto1 MessageSet wire format for extensions. + * This is provided for backwards-compatibility with the MessageSet wire + * format. You should not use this for any other reason: It's less + * efficient, has fewer features, and is more complicated. + * + * The message must be defined exactly as follows: + * message Foo { + * option message_set_wire_format = true; + * extensions 4 to max; + * } + * Note that the message cannot have any defined fields; MessageSets only + * have extensions. + * + * All extensions of your type must be singular messages; e.g. they cannot + * be int32s, enums, or repeated messages. + * + * Because this is an option, the above two restrictions are not enforced by + * the protocol compiler. + */ + messageSetWireFormat?: + | boolean + | undefined; + /** + * Disables the generation of the standard "descriptor()" accessor, which can + * conflict with a field of the same name. This is meant to make migration + * from proto1 easier; new code should avoid fields named "descriptor". + */ + noStandardDescriptorAccessor?: + | boolean + | undefined; + /** + * Is this message deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the message, or it will be completely ignored; in the very least, + * this is a formalization for deprecating messages. + */ + deprecated?: + | boolean + | undefined; + /** + * Whether the message is an automatically generated map entry type for the + * maps field. + * + * For maps fields: + * map<KeyType, ValueType> map_field = 1; + * The parsed descriptor looks like: + * message MapFieldEntry { + * option map_entry = true; + * optional KeyType key = 1; + * optional ValueType value = 2; + * } + * repeated MapFieldEntry map_field = 1; + * + * Implementations may choose not to generate the map_entry=true message, but + * use a native map in the target language to hold the keys and values. + * The reflection APIs in such implementations still need to work as + * if the field is a repeated message field. + * + * NOTE: Do not set the option in .proto files. Always use the maps syntax + * instead. The option should only be implicitly set by the proto compiler + * parser. + */ + mapEntry?: + | boolean + | undefined; + /** + * Enable the legacy handling of JSON field name conflicts. This lowercases + * and strips underscored from the fields before comparison in proto3 only. + * The new behavior takes `json_name` into account and applies to proto2 as + * well. + * + * This should only be used as a temporary measure against broken builds due + * to the change in behavior for JSON field name conflicts. + * + * TODO This is legacy behavior we plan to remove once downstream + * teams have had time to migrate. + * + * @deprecated + */ + deprecatedLegacyJsonFieldConflicts?: + | boolean + | undefined; + /** + * Any features defined in the specific edition. + * WARNING: This field should only be used by protobuf plugins or special + * cases like the proto compiler. Other uses are discouraged and + * developers should rely on the protoreflect APIs for their client language. + */ + features?: + | FeatureSet + | undefined; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface FieldOptions { + /** + * NOTE: ctype is deprecated. Use `features.(pb.cpp).string_type` instead. + * The ctype option instructs the C++ code generator to use a different + * representation of the field than it normally would. See the specific + * options below. This option is only implemented to support use of + * [ctype=CORD] and [ctype=STRING] (the default) on non-repeated fields of + * type "bytes" in the open source release. + * TODO: make ctype actually deprecated. + */ + ctype?: + | FieldOptions_CType + | undefined; + /** + * The packed option can be enabled for repeated primitive fields to enable + * a more efficient representation on the wire. Rather than repeatedly + * writing the tag and type for each element, the entire array is encoded as + * a single length-delimited blob. In proto3, only explicit setting it to + * false will avoid using packed encoding. This option is prohibited in + * Editions, but the `repeated_field_encoding` feature can be used to control + * the behavior. + */ + packed?: + | boolean + | undefined; + /** + * The jstype option determines the JavaScript type used for values of the + * field. The option is permitted only for 64 bit integral and fixed types + * (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING + * is represented as JavaScript string, which avoids loss of precision that + * can happen when a large value is converted to a floating point JavaScript. + * Specifying JS_NUMBER for the jstype causes the generated JavaScript code to + * use the JavaScript "number" type. The behavior of the default option + * JS_NORMAL is implementation dependent. + * + * This option is an enum to permit additional types to be added, e.g. + * goog.math.Integer. + */ + jstype?: + | FieldOptions_JSType + | undefined; + /** + * Should this field be parsed lazily? Lazy applies only to message-type + * fields. It means that when the outer message is initially parsed, the + * inner message's contents will not be parsed but instead stored in encoded + * form. The inner message will actually be parsed when it is first accessed. + * + * This is only a hint. Implementations are free to choose whether to use + * eager or lazy parsing regardless of the value of this option. However, + * setting this option true suggests that the protocol author believes that + * using lazy parsing on this field is worth the additional bookkeeping + * overhead typically needed to implement it. + * + * This option does not affect the public interface of any generated code; + * all method signatures remain the same. Furthermore, thread-safety of the + * interface is not affected by this option; const methods remain safe to + * call from multiple threads concurrently, while non-const methods continue + * to require exclusive access. + * + * Note that lazy message fields are still eagerly verified to check + * ill-formed wireformat or missing required fields. Calling IsInitialized() + * on the outer message would fail if the inner message has missing required + * fields. Failed verification would result in parsing failure (except when + * uninitialized messages are acceptable). + */ + lazy?: + | boolean + | undefined; + /** + * unverified_lazy does no correctness checks on the byte stream. This should + * only be used where lazy with verification is prohibitive for performance + * reasons. + */ + unverifiedLazy?: + | boolean + | undefined; + /** + * Is this field deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for accessors, or it will be completely ignored; in the very least, this + * is a formalization for deprecating fields. + */ + deprecated?: + | boolean + | undefined; + /** For Google-internal migration only. Do not use. */ + weak?: + | boolean + | undefined; + /** + * Indicate that the field value should not be printed out when using debug + * formats, e.g. when the field contains sensitive credentials. + */ + debugRedact?: boolean | undefined; + retention?: FieldOptions_OptionRetention | undefined; + targets: FieldOptions_OptionTargetType[]; + editionDefaults: FieldOptions_EditionDefault[]; + /** + * Any features defined in the specific edition. + * WARNING: This field should only be used by protobuf plugins or special + * cases like the proto compiler. Other uses are discouraged and + * developers should rely on the protoreflect APIs for their client language. + */ + features?: FeatureSet | undefined; + featureSupport?: + | FieldOptions_FeatureSupport + | undefined; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export enum FieldOptions_CType { + /** STRING - Default mode. */ + STRING = 0, + /** + * CORD - The option [ctype=CORD] may be applied to a non-repeated field of type + * "bytes". It indicates that in C++, the data should be stored in a Cord + * instead of a string. For very large strings, this may reduce memory + * fragmentation. It may also allow better performance when parsing from a + * Cord, or when parsing with aliasing enabled, as the parsed Cord may then + * alias the original buffer. + */ + CORD = 1, + STRING_PIECE = 2, + UNRECOGNIZED = -1, +} + +export function fieldOptions_CTypeFromJSON(object: any): FieldOptions_CType { + switch (object) { + case 0: + case "STRING": + return FieldOptions_CType.STRING; + case 1: + case "CORD": + return FieldOptions_CType.CORD; + case 2: + case "STRING_PIECE": + return FieldOptions_CType.STRING_PIECE; + case -1: + case "UNRECOGNIZED": + default: + return FieldOptions_CType.UNRECOGNIZED; + } +} + +export function fieldOptions_CTypeToJSON(object: FieldOptions_CType): string { + switch (object) { + case FieldOptions_CType.STRING: + return "STRING"; + case FieldOptions_CType.CORD: + return "CORD"; + case FieldOptions_CType.STRING_PIECE: + return "STRING_PIECE"; + case FieldOptions_CType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FieldOptions_JSType { + /** JS_NORMAL - Use the default type. */ + JS_NORMAL = 0, + /** JS_STRING - Use JavaScript strings. */ + JS_STRING = 1, + /** JS_NUMBER - Use JavaScript numbers. */ + JS_NUMBER = 2, + UNRECOGNIZED = -1, +} + +export function fieldOptions_JSTypeFromJSON(object: any): FieldOptions_JSType { + switch (object) { + case 0: + case "JS_NORMAL": + return FieldOptions_JSType.JS_NORMAL; + case 1: + case "JS_STRING": + return FieldOptions_JSType.JS_STRING; + case 2: + case "JS_NUMBER": + return FieldOptions_JSType.JS_NUMBER; + case -1: + case "UNRECOGNIZED": + default: + return FieldOptions_JSType.UNRECOGNIZED; + } +} + +export function fieldOptions_JSTypeToJSON(object: FieldOptions_JSType): string { + switch (object) { + case FieldOptions_JSType.JS_NORMAL: + return "JS_NORMAL"; + case FieldOptions_JSType.JS_STRING: + return "JS_STRING"; + case FieldOptions_JSType.JS_NUMBER: + return "JS_NUMBER"; + case FieldOptions_JSType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** If set to RETENTION_SOURCE, the option will be omitted from the binary. */ +export enum FieldOptions_OptionRetention { + RETENTION_UNKNOWN = 0, + RETENTION_RUNTIME = 1, + RETENTION_SOURCE = 2, + UNRECOGNIZED = -1, +} + +export function fieldOptions_OptionRetentionFromJSON(object: any): FieldOptions_OptionRetention { + switch (object) { + case 0: + case "RETENTION_UNKNOWN": + return FieldOptions_OptionRetention.RETENTION_UNKNOWN; + case 1: + case "RETENTION_RUNTIME": + return FieldOptions_OptionRetention.RETENTION_RUNTIME; + case 2: + case "RETENTION_SOURCE": + return FieldOptions_OptionRetention.RETENTION_SOURCE; + case -1: + case "UNRECOGNIZED": + default: + return FieldOptions_OptionRetention.UNRECOGNIZED; + } +} + +export function fieldOptions_OptionRetentionToJSON(object: FieldOptions_OptionRetention): string { + switch (object) { + case FieldOptions_OptionRetention.RETENTION_UNKNOWN: + return "RETENTION_UNKNOWN"; + case FieldOptions_OptionRetention.RETENTION_RUNTIME: + return "RETENTION_RUNTIME"; + case FieldOptions_OptionRetention.RETENTION_SOURCE: + return "RETENTION_SOURCE"; + case FieldOptions_OptionRetention.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** + * This indicates the types of entities that the field may apply to when used + * as an option. If it is unset, then the field may be freely used as an + * option on any kind of entity. + */ +export enum FieldOptions_OptionTargetType { + TARGET_TYPE_UNKNOWN = 0, + TARGET_TYPE_FILE = 1, + TARGET_TYPE_EXTENSION_RANGE = 2, + TARGET_TYPE_MESSAGE = 3, + TARGET_TYPE_FIELD = 4, + TARGET_TYPE_ONEOF = 5, + TARGET_TYPE_ENUM = 6, + TARGET_TYPE_ENUM_ENTRY = 7, + TARGET_TYPE_SERVICE = 8, + TARGET_TYPE_METHOD = 9, + UNRECOGNIZED = -1, +} + +export function fieldOptions_OptionTargetTypeFromJSON(object: any): FieldOptions_OptionTargetType { + switch (object) { + case 0: + case "TARGET_TYPE_UNKNOWN": + return FieldOptions_OptionTargetType.TARGET_TYPE_UNKNOWN; + case 1: + case "TARGET_TYPE_FILE": + return FieldOptions_OptionTargetType.TARGET_TYPE_FILE; + case 2: + case "TARGET_TYPE_EXTENSION_RANGE": + return FieldOptions_OptionTargetType.TARGET_TYPE_EXTENSION_RANGE; + case 3: + case "TARGET_TYPE_MESSAGE": + return FieldOptions_OptionTargetType.TARGET_TYPE_MESSAGE; + case 4: + case "TARGET_TYPE_FIELD": + return FieldOptions_OptionTargetType.TARGET_TYPE_FIELD; + case 5: + case "TARGET_TYPE_ONEOF": + return FieldOptions_OptionTargetType.TARGET_TYPE_ONEOF; + case 6: + case "TARGET_TYPE_ENUM": + return FieldOptions_OptionTargetType.TARGET_TYPE_ENUM; + case 7: + case "TARGET_TYPE_ENUM_ENTRY": + return FieldOptions_OptionTargetType.TARGET_TYPE_ENUM_ENTRY; + case 8: + case "TARGET_TYPE_SERVICE": + return FieldOptions_OptionTargetType.TARGET_TYPE_SERVICE; + case 9: + case "TARGET_TYPE_METHOD": + return FieldOptions_OptionTargetType.TARGET_TYPE_METHOD; + case -1: + case "UNRECOGNIZED": + default: + return FieldOptions_OptionTargetType.UNRECOGNIZED; + } +} + +export function fieldOptions_OptionTargetTypeToJSON(object: FieldOptions_OptionTargetType): string { + switch (object) { + case FieldOptions_OptionTargetType.TARGET_TYPE_UNKNOWN: + return "TARGET_TYPE_UNKNOWN"; + case FieldOptions_OptionTargetType.TARGET_TYPE_FILE: + return "TARGET_TYPE_FILE"; + case FieldOptions_OptionTargetType.TARGET_TYPE_EXTENSION_RANGE: + return "TARGET_TYPE_EXTENSION_RANGE"; + case FieldOptions_OptionTargetType.TARGET_TYPE_MESSAGE: + return "TARGET_TYPE_MESSAGE"; + case FieldOptions_OptionTargetType.TARGET_TYPE_FIELD: + return "TARGET_TYPE_FIELD"; + case FieldOptions_OptionTargetType.TARGET_TYPE_ONEOF: + return "TARGET_TYPE_ONEOF"; + case FieldOptions_OptionTargetType.TARGET_TYPE_ENUM: + return "TARGET_TYPE_ENUM"; + case FieldOptions_OptionTargetType.TARGET_TYPE_ENUM_ENTRY: + return "TARGET_TYPE_ENUM_ENTRY"; + case FieldOptions_OptionTargetType.TARGET_TYPE_SERVICE: + return "TARGET_TYPE_SERVICE"; + case FieldOptions_OptionTargetType.TARGET_TYPE_METHOD: + return "TARGET_TYPE_METHOD"; + case FieldOptions_OptionTargetType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export interface FieldOptions_EditionDefault { + edition?: + | Edition + | undefined; + /** Textproto value. */ + value?: string | undefined; +} + +/** Information about the support window of a feature. */ +export interface FieldOptions_FeatureSupport { + /** + * The edition that this feature was first available in. In editions + * earlier than this one, the default assigned to EDITION_LEGACY will be + * used, and proto files will not be able to override it. + */ + editionIntroduced?: + | Edition + | undefined; + /** + * The edition this feature becomes deprecated in. Using this after this + * edition may trigger warnings. + */ + editionDeprecated?: + | Edition + | undefined; + /** + * The deprecation warning text if this feature is used after the edition it + * was marked deprecated in. + */ + deprecationWarning?: + | string + | undefined; + /** + * The edition this feature is no longer available in. In editions after + * this one, the last default assigned will be used, and proto files will + * not be able to override it. + */ + editionRemoved?: Edition | undefined; +} + +export interface OneofOptions { + /** + * Any features defined in the specific edition. + * WARNING: This field should only be used by protobuf plugins or special + * cases like the proto compiler. Other uses are discouraged and + * developers should rely on the protoreflect APIs for their client language. + */ + features?: + | FeatureSet + | undefined; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface EnumOptions { + /** + * Set this option to true to allow mapping different tag names to the same + * value. + */ + allowAlias?: + | boolean + | undefined; + /** + * Is this enum deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the enum, or it will be completely ignored; in the very least, this + * is a formalization for deprecating enums. + */ + deprecated?: + | boolean + | undefined; + /** + * Enable the legacy handling of JSON field name conflicts. This lowercases + * and strips underscored from the fields before comparison in proto3 only. + * The new behavior takes `json_name` into account and applies to proto2 as + * well. + * TODO Remove this legacy behavior once downstream teams have + * had time to migrate. + * + * @deprecated + */ + deprecatedLegacyJsonFieldConflicts?: + | boolean + | undefined; + /** + * Any features defined in the specific edition. + * WARNING: This field should only be used by protobuf plugins or special + * cases like the proto compiler. Other uses are discouraged and + * developers should rely on the protoreflect APIs for their client language. + */ + features?: + | FeatureSet + | undefined; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface EnumValueOptions { + /** + * Is this enum value deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the enum value, or it will be completely ignored; in the very least, + * this is a formalization for deprecating enum values. + */ + deprecated?: + | boolean + | undefined; + /** + * Any features defined in the specific edition. + * WARNING: This field should only be used by protobuf plugins or special + * cases like the proto compiler. Other uses are discouraged and + * developers should rely on the protoreflect APIs for their client language. + */ + features?: + | FeatureSet + | undefined; + /** + * Indicate that fields annotated with this enum value should not be printed + * out when using debug formats, e.g. when the field contains sensitive + * credentials. + */ + debugRedact?: + | boolean + | undefined; + /** Information about the support window of a feature value. */ + featureSupport?: + | FieldOptions_FeatureSupport + | undefined; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface ServiceOptions { + /** + * Any features defined in the specific edition. + * WARNING: This field should only be used by protobuf plugins or special + * cases like the proto compiler. Other uses are discouraged and + * developers should rely on the protoreflect APIs for their client language. + */ + features?: + | FeatureSet + | undefined; + /** + * Is this service deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the service, or it will be completely ignored; in the very least, + * this is a formalization for deprecating services. + */ + deprecated?: + | boolean + | undefined; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface MethodOptions { + /** + * Is this method deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the method, or it will be completely ignored; in the very least, + * this is a formalization for deprecating methods. + */ + deprecated?: boolean | undefined; + idempotencyLevel?: + | MethodOptions_IdempotencyLevel + | undefined; + /** + * Any features defined in the specific edition. + * WARNING: This field should only be used by protobuf plugins or special + * cases like the proto compiler. Other uses are discouraged and + * developers should rely on the protoreflect APIs for their client language. + */ + features?: + | FeatureSet + | undefined; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +/** + * Is this method side-effect-free (or safe in HTTP parlance), or idempotent, + * or neither? HTTP based RPC implementation may choose GET verb for safe + * methods, and PUT verb for idempotent methods instead of the default POST. + */ +export enum MethodOptions_IdempotencyLevel { + IDEMPOTENCY_UNKNOWN = 0, + /** NO_SIDE_EFFECTS - implies idempotent */ + NO_SIDE_EFFECTS = 1, + /** IDEMPOTENT - idempotent, but may have side effects */ + IDEMPOTENT = 2, + UNRECOGNIZED = -1, +} + +export function methodOptions_IdempotencyLevelFromJSON(object: any): MethodOptions_IdempotencyLevel { + switch (object) { + case 0: + case "IDEMPOTENCY_UNKNOWN": + return MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN; + case 1: + case "NO_SIDE_EFFECTS": + return MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS; + case 2: + case "IDEMPOTENT": + return MethodOptions_IdempotencyLevel.IDEMPOTENT; + case -1: + case "UNRECOGNIZED": + default: + return MethodOptions_IdempotencyLevel.UNRECOGNIZED; + } +} + +export function methodOptions_IdempotencyLevelToJSON(object: MethodOptions_IdempotencyLevel): string { + switch (object) { + case MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN: + return "IDEMPOTENCY_UNKNOWN"; + case MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS: + return "NO_SIDE_EFFECTS"; + case MethodOptions_IdempotencyLevel.IDEMPOTENT: + return "IDEMPOTENT"; + case MethodOptions_IdempotencyLevel.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** + * A message representing a option the parser does not recognize. This only + * appears in options protos created by the compiler::Parser class. + * DescriptorPool resolves these when building Descriptor objects. Therefore, + * options protos in descriptor objects (e.g. returned by Descriptor::options(), + * or produced by Descriptor::CopyTo()) will never have UninterpretedOptions + * in them. + */ +export interface UninterpretedOption { + name: UninterpretedOption_NamePart[]; + /** + * The value of the uninterpreted option, in whatever type the tokenizer + * identified it as during parsing. Exactly one of these should be set. + */ + identifierValue?: string | undefined; + positiveIntValue?: number | undefined; + negativeIntValue?: number | undefined; + doubleValue?: number | undefined; + stringValue?: Uint8Array | undefined; + aggregateValue?: string | undefined; +} + +/** + * The name of the uninterpreted option. Each string represents a segment in + * a dot-separated name. is_extension is true iff a segment represents an + * extension (denoted with parentheses in options specs in .proto files). + * E.g.,{ ["foo", false], ["bar.baz", true], ["moo", false] } represents + * "foo.(bar.baz).moo". + */ +export interface UninterpretedOption_NamePart { + namePart: string; + isExtension: boolean; +} + +/** + * TODO Enums in C++ gencode (and potentially other languages) are + * not well scoped. This means that each of the feature enums below can clash + * with each other. The short names we've chosen maximize call-site + * readability, but leave us very open to this scenario. A future feature will + * be designed and implemented to handle this, hopefully before we ever hit a + * conflict here. + */ +export interface FeatureSet { + fieldPresence?: FeatureSet_FieldPresence | undefined; + enumType?: FeatureSet_EnumType | undefined; + repeatedFieldEncoding?: FeatureSet_RepeatedFieldEncoding | undefined; + utf8Validation?: FeatureSet_Utf8Validation | undefined; + messageEncoding?: FeatureSet_MessageEncoding | undefined; + jsonFormat?: FeatureSet_JsonFormat | undefined; + enforceNamingStyle?: FeatureSet_EnforceNamingStyle | undefined; + defaultSymbolVisibility?: FeatureSet_VisibilityFeature_DefaultSymbolVisibility | undefined; +} + +export enum FeatureSet_FieldPresence { + FIELD_PRESENCE_UNKNOWN = 0, + EXPLICIT = 1, + IMPLICIT = 2, + LEGACY_REQUIRED = 3, + UNRECOGNIZED = -1, +} + +export function featureSet_FieldPresenceFromJSON(object: any): FeatureSet_FieldPresence { + switch (object) { + case 0: + case "FIELD_PRESENCE_UNKNOWN": + return FeatureSet_FieldPresence.FIELD_PRESENCE_UNKNOWN; + case 1: + case "EXPLICIT": + return FeatureSet_FieldPresence.EXPLICIT; + case 2: + case "IMPLICIT": + return FeatureSet_FieldPresence.IMPLICIT; + case 3: + case "LEGACY_REQUIRED": + return FeatureSet_FieldPresence.LEGACY_REQUIRED; + case -1: + case "UNRECOGNIZED": + default: + return FeatureSet_FieldPresence.UNRECOGNIZED; + } +} + +export function featureSet_FieldPresenceToJSON(object: FeatureSet_FieldPresence): string { + switch (object) { + case FeatureSet_FieldPresence.FIELD_PRESENCE_UNKNOWN: + return "FIELD_PRESENCE_UNKNOWN"; + case FeatureSet_FieldPresence.EXPLICIT: + return "EXPLICIT"; + case FeatureSet_FieldPresence.IMPLICIT: + return "IMPLICIT"; + case FeatureSet_FieldPresence.LEGACY_REQUIRED: + return "LEGACY_REQUIRED"; + case FeatureSet_FieldPresence.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FeatureSet_EnumType { + ENUM_TYPE_UNKNOWN = 0, + OPEN = 1, + CLOSED = 2, + UNRECOGNIZED = -1, +} + +export function featureSet_EnumTypeFromJSON(object: any): FeatureSet_EnumType { + switch (object) { + case 0: + case "ENUM_TYPE_UNKNOWN": + return FeatureSet_EnumType.ENUM_TYPE_UNKNOWN; + case 1: + case "OPEN": + return FeatureSet_EnumType.OPEN; + case 2: + case "CLOSED": + return FeatureSet_EnumType.CLOSED; + case -1: + case "UNRECOGNIZED": + default: + return FeatureSet_EnumType.UNRECOGNIZED; + } +} + +export function featureSet_EnumTypeToJSON(object: FeatureSet_EnumType): string { + switch (object) { + case FeatureSet_EnumType.ENUM_TYPE_UNKNOWN: + return "ENUM_TYPE_UNKNOWN"; + case FeatureSet_EnumType.OPEN: + return "OPEN"; + case FeatureSet_EnumType.CLOSED: + return "CLOSED"; + case FeatureSet_EnumType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FeatureSet_RepeatedFieldEncoding { + REPEATED_FIELD_ENCODING_UNKNOWN = 0, + PACKED = 1, + EXPANDED = 2, + UNRECOGNIZED = -1, +} + +export function featureSet_RepeatedFieldEncodingFromJSON(object: any): FeatureSet_RepeatedFieldEncoding { + switch (object) { + case 0: + case "REPEATED_FIELD_ENCODING_UNKNOWN": + return FeatureSet_RepeatedFieldEncoding.REPEATED_FIELD_ENCODING_UNKNOWN; + case 1: + case "PACKED": + return FeatureSet_RepeatedFieldEncoding.PACKED; + case 2: + case "EXPANDED": + return FeatureSet_RepeatedFieldEncoding.EXPANDED; + case -1: + case "UNRECOGNIZED": + default: + return FeatureSet_RepeatedFieldEncoding.UNRECOGNIZED; + } +} + +export function featureSet_RepeatedFieldEncodingToJSON(object: FeatureSet_RepeatedFieldEncoding): string { + switch (object) { + case FeatureSet_RepeatedFieldEncoding.REPEATED_FIELD_ENCODING_UNKNOWN: + return "REPEATED_FIELD_ENCODING_UNKNOWN"; + case FeatureSet_RepeatedFieldEncoding.PACKED: + return "PACKED"; + case FeatureSet_RepeatedFieldEncoding.EXPANDED: + return "EXPANDED"; + case FeatureSet_RepeatedFieldEncoding.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FeatureSet_Utf8Validation { + UTF8_VALIDATION_UNKNOWN = 0, + VERIFY = 2, + NONE = 3, + UNRECOGNIZED = -1, +} + +export function featureSet_Utf8ValidationFromJSON(object: any): FeatureSet_Utf8Validation { + switch (object) { + case 0: + case "UTF8_VALIDATION_UNKNOWN": + return FeatureSet_Utf8Validation.UTF8_VALIDATION_UNKNOWN; + case 2: + case "VERIFY": + return FeatureSet_Utf8Validation.VERIFY; + case 3: + case "NONE": + return FeatureSet_Utf8Validation.NONE; + case -1: + case "UNRECOGNIZED": + default: + return FeatureSet_Utf8Validation.UNRECOGNIZED; + } +} + +export function featureSet_Utf8ValidationToJSON(object: FeatureSet_Utf8Validation): string { + switch (object) { + case FeatureSet_Utf8Validation.UTF8_VALIDATION_UNKNOWN: + return "UTF8_VALIDATION_UNKNOWN"; + case FeatureSet_Utf8Validation.VERIFY: + return "VERIFY"; + case FeatureSet_Utf8Validation.NONE: + return "NONE"; + case FeatureSet_Utf8Validation.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FeatureSet_MessageEncoding { + MESSAGE_ENCODING_UNKNOWN = 0, + LENGTH_PREFIXED = 1, + DELIMITED = 2, + UNRECOGNIZED = -1, +} + +export function featureSet_MessageEncodingFromJSON(object: any): FeatureSet_MessageEncoding { + switch (object) { + case 0: + case "MESSAGE_ENCODING_UNKNOWN": + return FeatureSet_MessageEncoding.MESSAGE_ENCODING_UNKNOWN; + case 1: + case "LENGTH_PREFIXED": + return FeatureSet_MessageEncoding.LENGTH_PREFIXED; + case 2: + case "DELIMITED": + return FeatureSet_MessageEncoding.DELIMITED; + case -1: + case "UNRECOGNIZED": + default: + return FeatureSet_MessageEncoding.UNRECOGNIZED; + } +} + +export function featureSet_MessageEncodingToJSON(object: FeatureSet_MessageEncoding): string { + switch (object) { + case FeatureSet_MessageEncoding.MESSAGE_ENCODING_UNKNOWN: + return "MESSAGE_ENCODING_UNKNOWN"; + case FeatureSet_MessageEncoding.LENGTH_PREFIXED: + return "LENGTH_PREFIXED"; + case FeatureSet_MessageEncoding.DELIMITED: + return "DELIMITED"; + case FeatureSet_MessageEncoding.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FeatureSet_JsonFormat { + JSON_FORMAT_UNKNOWN = 0, + ALLOW = 1, + LEGACY_BEST_EFFORT = 2, + UNRECOGNIZED = -1, +} + +export function featureSet_JsonFormatFromJSON(object: any): FeatureSet_JsonFormat { + switch (object) { + case 0: + case "JSON_FORMAT_UNKNOWN": + return FeatureSet_JsonFormat.JSON_FORMAT_UNKNOWN; + case 1: + case "ALLOW": + return FeatureSet_JsonFormat.ALLOW; + case 2: + case "LEGACY_BEST_EFFORT": + return FeatureSet_JsonFormat.LEGACY_BEST_EFFORT; + case -1: + case "UNRECOGNIZED": + default: + return FeatureSet_JsonFormat.UNRECOGNIZED; + } +} + +export function featureSet_JsonFormatToJSON(object: FeatureSet_JsonFormat): string { + switch (object) { + case FeatureSet_JsonFormat.JSON_FORMAT_UNKNOWN: + return "JSON_FORMAT_UNKNOWN"; + case FeatureSet_JsonFormat.ALLOW: + return "ALLOW"; + case FeatureSet_JsonFormat.LEGACY_BEST_EFFORT: + return "LEGACY_BEST_EFFORT"; + case FeatureSet_JsonFormat.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FeatureSet_EnforceNamingStyle { + ENFORCE_NAMING_STYLE_UNKNOWN = 0, + STYLE2024 = 1, + STYLE_LEGACY = 2, + UNRECOGNIZED = -1, +} + +export function featureSet_EnforceNamingStyleFromJSON(object: any): FeatureSet_EnforceNamingStyle { + switch (object) { + case 0: + case "ENFORCE_NAMING_STYLE_UNKNOWN": + return FeatureSet_EnforceNamingStyle.ENFORCE_NAMING_STYLE_UNKNOWN; + case 1: + case "STYLE2024": + return FeatureSet_EnforceNamingStyle.STYLE2024; + case 2: + case "STYLE_LEGACY": + return FeatureSet_EnforceNamingStyle.STYLE_LEGACY; + case -1: + case "UNRECOGNIZED": + default: + return FeatureSet_EnforceNamingStyle.UNRECOGNIZED; + } +} + +export function featureSet_EnforceNamingStyleToJSON(object: FeatureSet_EnforceNamingStyle): string { + switch (object) { + case FeatureSet_EnforceNamingStyle.ENFORCE_NAMING_STYLE_UNKNOWN: + return "ENFORCE_NAMING_STYLE_UNKNOWN"; + case FeatureSet_EnforceNamingStyle.STYLE2024: + return "STYLE2024"; + case FeatureSet_EnforceNamingStyle.STYLE_LEGACY: + return "STYLE_LEGACY"; + case FeatureSet_EnforceNamingStyle.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export interface FeatureSet_VisibilityFeature { +} + +export enum FeatureSet_VisibilityFeature_DefaultSymbolVisibility { + DEFAULT_SYMBOL_VISIBILITY_UNKNOWN = 0, + /** EXPORT_ALL - Default pre-EDITION_2024, all UNSET visibility are export. */ + EXPORT_ALL = 1, + /** EXPORT_TOP_LEVEL - All top-level symbols default to export, nested default to local. */ + EXPORT_TOP_LEVEL = 2, + /** LOCAL_ALL - All symbols default to local. */ + LOCAL_ALL = 3, + /** + * STRICT - All symbols local by default. Nested types cannot be exported. + * With special case caveat for message { enum {} reserved 1 to max; } + * This is the recommended setting for new protos. + */ + STRICT = 4, + UNRECOGNIZED = -1, +} + +export function featureSet_VisibilityFeature_DefaultSymbolVisibilityFromJSON( + object: any, +): FeatureSet_VisibilityFeature_DefaultSymbolVisibility { + switch (object) { + case 0: + case "DEFAULT_SYMBOL_VISIBILITY_UNKNOWN": + return FeatureSet_VisibilityFeature_DefaultSymbolVisibility.DEFAULT_SYMBOL_VISIBILITY_UNKNOWN; + case 1: + case "EXPORT_ALL": + return FeatureSet_VisibilityFeature_DefaultSymbolVisibility.EXPORT_ALL; + case 2: + case "EXPORT_TOP_LEVEL": + return FeatureSet_VisibilityFeature_DefaultSymbolVisibility.EXPORT_TOP_LEVEL; + case 3: + case "LOCAL_ALL": + return FeatureSet_VisibilityFeature_DefaultSymbolVisibility.LOCAL_ALL; + case 4: + case "STRICT": + return FeatureSet_VisibilityFeature_DefaultSymbolVisibility.STRICT; + case -1: + case "UNRECOGNIZED": + default: + return FeatureSet_VisibilityFeature_DefaultSymbolVisibility.UNRECOGNIZED; + } +} + +export function featureSet_VisibilityFeature_DefaultSymbolVisibilityToJSON( + object: FeatureSet_VisibilityFeature_DefaultSymbolVisibility, +): string { + switch (object) { + case FeatureSet_VisibilityFeature_DefaultSymbolVisibility.DEFAULT_SYMBOL_VISIBILITY_UNKNOWN: + return "DEFAULT_SYMBOL_VISIBILITY_UNKNOWN"; + case FeatureSet_VisibilityFeature_DefaultSymbolVisibility.EXPORT_ALL: + return "EXPORT_ALL"; + case FeatureSet_VisibilityFeature_DefaultSymbolVisibility.EXPORT_TOP_LEVEL: + return "EXPORT_TOP_LEVEL"; + case FeatureSet_VisibilityFeature_DefaultSymbolVisibility.LOCAL_ALL: + return "LOCAL_ALL"; + case FeatureSet_VisibilityFeature_DefaultSymbolVisibility.STRICT: + return "STRICT"; + case FeatureSet_VisibilityFeature_DefaultSymbolVisibility.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** + * A compiled specification for the defaults of a set of features. These + * messages are generated from FeatureSet extensions and can be used to seed + * feature resolution. The resolution with this object becomes a simple search + * for the closest matching edition, followed by proto merges. + */ +export interface FeatureSetDefaults { + defaults: FeatureSetDefaults_FeatureSetEditionDefault[]; + /** + * The minimum supported edition (inclusive) when this was constructed. + * Editions before this will not have defaults. + */ + minimumEdition?: + | Edition + | undefined; + /** + * The maximum known edition (inclusive) when this was constructed. Editions + * after this will not have reliable defaults. + */ + maximumEdition?: Edition | undefined; +} + +/** + * A map from every known edition with a unique set of defaults to its + * defaults. Not all editions may be contained here. For a given edition, + * the defaults at the closest matching edition ordered at or before it should + * be used. This field must be in strict ascending order by edition. + */ +export interface FeatureSetDefaults_FeatureSetEditionDefault { + edition?: + | Edition + | undefined; + /** Defaults of features that can be overridden in this edition. */ + overridableFeatures?: + | FeatureSet + | undefined; + /** Defaults of features that can't be overridden in this edition. */ + fixedFeatures?: FeatureSet | undefined; +} + +/** + * Encapsulates information about the original source file from which a + * FileDescriptorProto was generated. + */ +export interface SourceCodeInfo { + /** + * A Location identifies a piece of source code in a .proto file which + * corresponds to a particular definition. This information is intended + * to be useful to IDEs, code indexers, documentation generators, and similar + * tools. + * + * For example, say we have a file like: + * message Foo { + * optional string foo = 1; + * } + * Let's look at just the field definition: + * optional string foo = 1; + * ^ ^^ ^^ ^ ^^^ + * a bc de f ghi + * We have the following locations: + * span path represents + * [a,i) [ 4, 0, 2, 0 ] The whole field definition. + * [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). + * [c,d) [ 4, 0, 2, 0, 5 ] The type (string). + * [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). + * [g,h) [ 4, 0, 2, 0, 3 ] The number (1). + * + * Notes: + * - A location may refer to a repeated field itself (i.e. not to any + * particular index within it). This is used whenever a set of elements are + * logically enclosed in a single code segment. For example, an entire + * extend block (possibly containing multiple extension definitions) will + * have an outer location whose path refers to the "extensions" repeated + * field without an index. + * - Multiple locations may have the same path. This happens when a single + * logical declaration is spread out across multiple places. The most + * obvious example is the "extend" block again -- there may be multiple + * extend blocks in the same scope, each of which will have the same path. + * - A location's span is not always a subset of its parent's span. For + * example, the "extendee" of an extension declaration appears at the + * beginning of the "extend" block and is shared by all extensions within + * the block. + * - Just because a location's span is a subset of some other location's span + * does not mean that it is a descendant. For example, a "group" defines + * both a type and a field in a single declaration. Thus, the locations + * corresponding to the type and field and their components will overlap. + * - Code which tries to interpret locations should probably be designed to + * ignore those that it doesn't understand, as more types of locations could + * be recorded in the future. + */ + location: SourceCodeInfo_Location[]; +} + +export interface SourceCodeInfo_Location { + /** + * Identifies which part of the FileDescriptorProto was defined at this + * location. + * + * Each element is a field number or an index. They form a path from + * the root FileDescriptorProto to the place where the definition appears. + * For example, this path: + * [ 4, 3, 2, 7, 1 ] + * refers to: + * file.message_type(3) // 4, 3 + * .field(7) // 2, 7 + * .name() // 1 + * This is because FileDescriptorProto.message_type has field number 4: + * repeated DescriptorProto message_type = 4; + * and DescriptorProto.field has field number 2: + * repeated FieldDescriptorProto field = 2; + * and FieldDescriptorProto.name has field number 1: + * optional string name = 1; + * + * Thus, the above path gives the location of a field name. If we removed + * the last element: + * [ 4, 3, 2, 7 ] + * this path refers to the whole field declaration (from the beginning + * of the label to the terminating semicolon). + */ + path: number[]; + /** + * Always has exactly three or four elements: start line, start column, + * end line (optional, otherwise assumed same as start line), end column. + * These are packed into a single field for efficiency. Note that line + * and column numbers are zero-based -- typically you will want to add + * 1 to each before displaying to a user. + */ + span: number[]; + /** + * If this SourceCodeInfo represents a complete declaration, these are any + * comments appearing before and after the declaration which appear to be + * attached to the declaration. + * + * A series of line comments appearing on consecutive lines, with no other + * tokens appearing on those lines, will be treated as a single comment. + * + * leading_detached_comments will keep paragraphs of comments that appear + * before (but not connected to) the current element. Each paragraph, + * separated by empty lines, will be one comment element in the repeated + * field. + * + * Only the comment content is provided; comment markers (e.g. //) are + * stripped out. For block comments, leading whitespace and an asterisk + * will be stripped from the beginning of each line other than the first. + * Newlines are included in the output. + * + * Examples: + * + * optional int32 foo = 1; // Comment attached to foo. + * // Comment attached to bar. + * optional int32 bar = 2; + * + * optional string baz = 3; + * // Comment attached to baz. + * // Another line attached to baz. + * + * // Comment attached to moo. + * // + * // Another line attached to moo. + * optional double moo = 4; + * + * // Detached comment for corge. This is not leading or trailing comments + * // to moo or corge because there are blank lines separating it from + * // both. + * + * // Detached comment for corge paragraph 2. + * + * optional string corge = 5; + * /* Block comment attached + * * to corge. Leading asterisks + * * will be removed. * / + * /* Block comment attached to + * * grault. * / + * optional int32 grault = 6; + * + * // ignored detached comments. + */ + leadingComments?: string | undefined; + trailingComments?: string | undefined; + leadingDetachedComments: string[]; +} + +/** + * Describes the relationship between generated code and its original source + * file. A GeneratedCodeInfo message is associated with only one generated + * source file, but may contain references to different source .proto files. + */ +export interface GeneratedCodeInfo { + /** + * An Annotation connects some span of text in generated code to an element + * of its generating .proto file. + */ + annotation: GeneratedCodeInfo_Annotation[]; +} + +export interface GeneratedCodeInfo_Annotation { + /** + * Identifies the element in the original source .proto file. This field + * is formatted the same as SourceCodeInfo.Location.path. + */ + path: number[]; + /** Identifies the filesystem path to the original source .proto. */ + sourceFile?: + | string + | undefined; + /** + * Identifies the starting offset in bytes in the generated code + * that relates to the identified object. + */ + begin?: + | number + | undefined; + /** + * Identifies the ending offset in bytes in the generated code that + * relates to the identified object. The end offset should be one past + * the last relevant byte (so the length of the text = end - begin). + */ + end?: number | undefined; + semantic?: GeneratedCodeInfo_Annotation_Semantic | undefined; +} + +/** + * Represents the identified object's effect on the element in the original + * .proto file. + */ +export enum GeneratedCodeInfo_Annotation_Semantic { + /** NONE - There is no effect or the effect is indescribable. */ + NONE = 0, + /** SET - The element is set or otherwise mutated. */ + SET = 1, + /** ALIAS - An alias to the element is returned. */ + ALIAS = 2, + UNRECOGNIZED = -1, +} + +export function generatedCodeInfo_Annotation_SemanticFromJSON(object: any): GeneratedCodeInfo_Annotation_Semantic { + switch (object) { + case 0: + case "NONE": + return GeneratedCodeInfo_Annotation_Semantic.NONE; + case 1: + case "SET": + return GeneratedCodeInfo_Annotation_Semantic.SET; + case 2: + case "ALIAS": + return GeneratedCodeInfo_Annotation_Semantic.ALIAS; + case -1: + case "UNRECOGNIZED": + default: + return GeneratedCodeInfo_Annotation_Semantic.UNRECOGNIZED; + } +} + +export function generatedCodeInfo_Annotation_SemanticToJSON(object: GeneratedCodeInfo_Annotation_Semantic): string { + switch (object) { + case GeneratedCodeInfo_Annotation_Semantic.NONE: + return "NONE"; + case GeneratedCodeInfo_Annotation_Semantic.SET: + return "SET"; + case GeneratedCodeInfo_Annotation_Semantic.ALIAS: + return "ALIAS"; + case GeneratedCodeInfo_Annotation_Semantic.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +function createBaseFileDescriptorSet(): FileDescriptorSet { + return { file: [] }; +} + +export const FileDescriptorSet: MessageFns<FileDescriptorSet> = { + encode(message: FileDescriptorSet, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + for (const v of message.file) { + FileDescriptorProto.encode(v!, writer.uint32(10).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): FileDescriptorSet { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileDescriptorSet(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.file.push(FileDescriptorProto.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): FileDescriptorSet { + return { + file: globalThis.Array.isArray(object?.file) ? object.file.map((e: any) => FileDescriptorProto.fromJSON(e)) : [], + }; + }, + + toJSON(message: FileDescriptorSet): unknown { + const obj: any = {}; + if (message.file?.length) { + obj.file = message.file.map((e) => FileDescriptorProto.toJSON(e)); + } + return obj; + }, + + create<I extends Exact<DeepPartial<FileDescriptorSet>, I>>(base?: I): FileDescriptorSet { + return FileDescriptorSet.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<FileDescriptorSet>, I>>(object: I): FileDescriptorSet { + const message = createBaseFileDescriptorSet(); + message.file = object.file?.map((e) => FileDescriptorProto.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFileDescriptorProto(): FileDescriptorProto { + return { + name: "", + package: "", + dependency: [], + publicDependency: [], + weakDependency: [], + optionDependency: [], + messageType: [], + enumType: [], + service: [], + extension: [], + options: undefined, + sourceCodeInfo: undefined, + syntax: "", + edition: 0, + }; +} + +export const FileDescriptorProto: MessageFns<FileDescriptorProto> = { + encode(message: FileDescriptorProto, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.name !== undefined && message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.package !== undefined && message.package !== "") { + writer.uint32(18).string(message.package); + } + for (const v of message.dependency) { + writer.uint32(26).string(v!); + } + for (const v of message.publicDependency) { + writer.uint32(80).int32(v!); + } + for (const v of message.weakDependency) { + writer.uint32(88).int32(v!); + } + for (const v of message.optionDependency) { + writer.uint32(122).string(v!); + } + for (const v of message.messageType) { + DescriptorProto.encode(v!, writer.uint32(34).fork()).join(); + } + for (const v of message.enumType) { + EnumDescriptorProto.encode(v!, writer.uint32(42).fork()).join(); + } + for (const v of message.service) { + ServiceDescriptorProto.encode(v!, writer.uint32(50).fork()).join(); + } + for (const v of message.extension) { + FieldDescriptorProto.encode(v!, writer.uint32(58).fork()).join(); + } + if (message.options !== undefined) { + FileOptions.encode(message.options, writer.uint32(66).fork()).join(); + } + if (message.sourceCodeInfo !== undefined) { + SourceCodeInfo.encode(message.sourceCodeInfo, writer.uint32(74).fork()).join(); + } + if (message.syntax !== undefined && message.syntax !== "") { + writer.uint32(98).string(message.syntax); + } + if (message.edition !== undefined && message.edition !== 0) { + writer.uint32(112).int32(message.edition); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): FileDescriptorProto { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.package = reader.string(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.dependency.push(reader.string()); + continue; + } + case 10: { + if (tag === 80) { + message.publicDependency.push(reader.int32()); + + continue; + } + + if (tag === 82) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.publicDependency.push(reader.int32()); + } + + continue; + } + + break; + } + case 11: { + if (tag === 88) { + message.weakDependency.push(reader.int32()); + + continue; + } + + if (tag === 90) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.weakDependency.push(reader.int32()); + } + + continue; + } + + break; + } + case 15: { + if (tag !== 122) { + break; + } + + message.optionDependency.push(reader.string()); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + + message.messageType.push(DescriptorProto.decode(reader, reader.uint32())); + continue; + } + case 5: { + if (tag !== 42) { + break; + } + + message.enumType.push(EnumDescriptorProto.decode(reader, reader.uint32())); + continue; + } + case 6: { + if (tag !== 50) { + break; + } + + message.service.push(ServiceDescriptorProto.decode(reader, reader.uint32())); + continue; + } + case 7: { + if (tag !== 58) { + break; + } + + message.extension.push(FieldDescriptorProto.decode(reader, reader.uint32())); + continue; + } + case 8: { + if (tag !== 66) { + break; + } + + message.options = FileOptions.decode(reader, reader.uint32()); + continue; + } + case 9: { + if (tag !== 74) { + break; + } + + message.sourceCodeInfo = SourceCodeInfo.decode(reader, reader.uint32()); + continue; + } + case 12: { + if (tag !== 98) { + break; + } + + message.syntax = reader.string(); + continue; + } + case 14: { + if (tag !== 112) { + break; + } + + message.edition = reader.int32() as any; + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): FileDescriptorProto { + return { + name: isSet(object.name) ? globalThis.String(object.name) : "", + package: isSet(object.package) ? globalThis.String(object.package) : "", + dependency: globalThis.Array.isArray(object?.dependency) + ? object.dependency.map((e: any) => globalThis.String(e)) + : [], + publicDependency: globalThis.Array.isArray(object?.publicDependency) + ? object.publicDependency.map((e: any) => globalThis.Number(e)) + : globalThis.Array.isArray(object?.public_dependency) + ? object.public_dependency.map((e: any) => globalThis.Number(e)) + : [], + weakDependency: globalThis.Array.isArray(object?.weakDependency) + ? object.weakDependency.map((e: any) => globalThis.Number(e)) + : globalThis.Array.isArray(object?.weak_dependency) + ? object.weak_dependency.map((e: any) => globalThis.Number(e)) + : [], + optionDependency: globalThis.Array.isArray(object?.optionDependency) + ? object.optionDependency.map((e: any) => globalThis.String(e)) + : globalThis.Array.isArray(object?.option_dependency) + ? object.option_dependency.map((e: any) => globalThis.String(e)) + : [], + messageType: globalThis.Array.isArray(object?.messageType) + ? object.messageType.map((e: any) => DescriptorProto.fromJSON(e)) + : globalThis.Array.isArray(object?.message_type) + ? object.message_type.map((e: any) => DescriptorProto.fromJSON(e)) + : [], + enumType: globalThis.Array.isArray(object?.enumType) + ? object.enumType.map((e: any) => EnumDescriptorProto.fromJSON(e)) + : globalThis.Array.isArray(object?.enum_type) + ? object.enum_type.map((e: any) => EnumDescriptorProto.fromJSON(e)) + : [], + service: globalThis.Array.isArray(object?.service) + ? object.service.map((e: any) => ServiceDescriptorProto.fromJSON(e)) + : [], + extension: globalThis.Array.isArray(object?.extension) + ? object.extension.map((e: any) => FieldDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) ? FileOptions.fromJSON(object.options) : undefined, + sourceCodeInfo: isSet(object.sourceCodeInfo) + ? SourceCodeInfo.fromJSON(object.sourceCodeInfo) + : isSet(object.source_code_info) + ? SourceCodeInfo.fromJSON(object.source_code_info) + : undefined, + syntax: isSet(object.syntax) ? globalThis.String(object.syntax) : "", + edition: isSet(object.edition) ? editionFromJSON(object.edition) : 0, + }; + }, + + toJSON(message: FileDescriptorProto): unknown { + const obj: any = {}; + if (message.name !== undefined && message.name !== "") { + obj.name = message.name; + } + if (message.package !== undefined && message.package !== "") { + obj.package = message.package; + } + if (message.dependency?.length) { + obj.dependency = message.dependency; + } + if (message.publicDependency?.length) { + obj.publicDependency = message.publicDependency.map((e) => Math.round(e)); + } + if (message.weakDependency?.length) { + obj.weakDependency = message.weakDependency.map((e) => Math.round(e)); + } + if (message.optionDependency?.length) { + obj.optionDependency = message.optionDependency; + } + if (message.messageType?.length) { + obj.messageType = message.messageType.map((e) => DescriptorProto.toJSON(e)); + } + if (message.enumType?.length) { + obj.enumType = message.enumType.map((e) => EnumDescriptorProto.toJSON(e)); + } + if (message.service?.length) { + obj.service = message.service.map((e) => ServiceDescriptorProto.toJSON(e)); + } + if (message.extension?.length) { + obj.extension = message.extension.map((e) => FieldDescriptorProto.toJSON(e)); + } + if (message.options !== undefined) { + obj.options = FileOptions.toJSON(message.options); + } + if (message.sourceCodeInfo !== undefined) { + obj.sourceCodeInfo = SourceCodeInfo.toJSON(message.sourceCodeInfo); + } + if (message.syntax !== undefined && message.syntax !== "") { + obj.syntax = message.syntax; + } + if (message.edition !== undefined && message.edition !== 0) { + obj.edition = editionToJSON(message.edition); + } + return obj; + }, + + create<I extends Exact<DeepPartial<FileDescriptorProto>, I>>(base?: I): FileDescriptorProto { + return FileDescriptorProto.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<FileDescriptorProto>, I>>(object: I): FileDescriptorProto { + const message = createBaseFileDescriptorProto(); + message.name = object.name ?? ""; + message.package = object.package ?? ""; + message.dependency = object.dependency?.map((e) => e) || []; + message.publicDependency = object.publicDependency?.map((e) => e) || []; + message.weakDependency = object.weakDependency?.map((e) => e) || []; + message.optionDependency = object.optionDependency?.map((e) => e) || []; + message.messageType = object.messageType?.map((e) => DescriptorProto.fromPartial(e)) || []; + message.enumType = object.enumType?.map((e) => EnumDescriptorProto.fromPartial(e)) || []; + message.service = object.service?.map((e) => ServiceDescriptorProto.fromPartial(e)) || []; + message.extension = object.extension?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? FileOptions.fromPartial(object.options) + : undefined; + message.sourceCodeInfo = (object.sourceCodeInfo !== undefined && object.sourceCodeInfo !== null) + ? SourceCodeInfo.fromPartial(object.sourceCodeInfo) + : undefined; + message.syntax = object.syntax ?? ""; + message.edition = object.edition ?? 0; + return message; + }, +}; + +function createBaseDescriptorProto(): DescriptorProto { + return { + name: "", + field: [], + extension: [], + nestedType: [], + enumType: [], + extensionRange: [], + oneofDecl: [], + options: undefined, + reservedRange: [], + reservedName: [], + visibility: 0, + }; +} + +export const DescriptorProto: MessageFns<DescriptorProto> = { + encode(message: DescriptorProto, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.name !== undefined && message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.field) { + FieldDescriptorProto.encode(v!, writer.uint32(18).fork()).join(); + } + for (const v of message.extension) { + FieldDescriptorProto.encode(v!, writer.uint32(50).fork()).join(); + } + for (const v of message.nestedType) { + DescriptorProto.encode(v!, writer.uint32(26).fork()).join(); + } + for (const v of message.enumType) { + EnumDescriptorProto.encode(v!, writer.uint32(34).fork()).join(); + } + for (const v of message.extensionRange) { + DescriptorProto_ExtensionRange.encode(v!, writer.uint32(42).fork()).join(); + } + for (const v of message.oneofDecl) { + OneofDescriptorProto.encode(v!, writer.uint32(66).fork()).join(); + } + if (message.options !== undefined) { + MessageOptions.encode(message.options, writer.uint32(58).fork()).join(); + } + for (const v of message.reservedRange) { + DescriptorProto_ReservedRange.encode(v!, writer.uint32(74).fork()).join(); + } + for (const v of message.reservedName) { + writer.uint32(82).string(v!); + } + if (message.visibility !== undefined && message.visibility !== 0) { + writer.uint32(88).int32(message.visibility); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): DescriptorProto { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.field.push(FieldDescriptorProto.decode(reader, reader.uint32())); + continue; + } + case 6: { + if (tag !== 50) { + break; + } + + message.extension.push(FieldDescriptorProto.decode(reader, reader.uint32())); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.nestedType.push(DescriptorProto.decode(reader, reader.uint32())); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + + message.enumType.push(EnumDescriptorProto.decode(reader, reader.uint32())); + continue; + } + case 5: { + if (tag !== 42) { + break; + } + + message.extensionRange.push(DescriptorProto_ExtensionRange.decode(reader, reader.uint32())); + continue; + } + case 8: { + if (tag !== 66) { + break; + } + + message.oneofDecl.push(OneofDescriptorProto.decode(reader, reader.uint32())); + continue; + } + case 7: { + if (tag !== 58) { + break; + } + + message.options = MessageOptions.decode(reader, reader.uint32()); + continue; + } + case 9: { + if (tag !== 74) { + break; + } + + message.reservedRange.push(DescriptorProto_ReservedRange.decode(reader, reader.uint32())); + continue; + } + case 10: { + if (tag !== 82) { + break; + } + + message.reservedName.push(reader.string()); + continue; + } + case 11: { + if (tag !== 88) { + break; + } + + message.visibility = reader.int32() as any; + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): DescriptorProto { + return { + name: isSet(object.name) ? globalThis.String(object.name) : "", + field: globalThis.Array.isArray(object?.field) + ? object.field.map((e: any) => FieldDescriptorProto.fromJSON(e)) + : [], + extension: globalThis.Array.isArray(object?.extension) + ? object.extension.map((e: any) => FieldDescriptorProto.fromJSON(e)) + : [], + nestedType: globalThis.Array.isArray(object?.nestedType) + ? object.nestedType.map((e: any) => DescriptorProto.fromJSON(e)) + : globalThis.Array.isArray(object?.nested_type) + ? object.nested_type.map((e: any) => DescriptorProto.fromJSON(e)) + : [], + enumType: globalThis.Array.isArray(object?.enumType) + ? object.enumType.map((e: any) => EnumDescriptorProto.fromJSON(e)) + : globalThis.Array.isArray(object?.enum_type) + ? object.enum_type.map((e: any) => EnumDescriptorProto.fromJSON(e)) + : [], + extensionRange: globalThis.Array.isArray(object?.extensionRange) + ? object.extensionRange.map((e: any) => DescriptorProto_ExtensionRange.fromJSON(e)) + : globalThis.Array.isArray(object?.extension_range) + ? object.extension_range.map((e: any) => DescriptorProto_ExtensionRange.fromJSON(e)) + : [], + oneofDecl: globalThis.Array.isArray(object?.oneofDecl) + ? object.oneofDecl.map((e: any) => OneofDescriptorProto.fromJSON(e)) + : globalThis.Array.isArray(object?.oneof_decl) + ? object.oneof_decl.map((e: any) => OneofDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) ? MessageOptions.fromJSON(object.options) : undefined, + reservedRange: globalThis.Array.isArray(object?.reservedRange) + ? object.reservedRange.map((e: any) => DescriptorProto_ReservedRange.fromJSON(e)) + : globalThis.Array.isArray(object?.reserved_range) + ? object.reserved_range.map((e: any) => DescriptorProto_ReservedRange.fromJSON(e)) + : [], + reservedName: globalThis.Array.isArray(object?.reservedName) + ? object.reservedName.map((e: any) => globalThis.String(e)) + : globalThis.Array.isArray(object?.reserved_name) + ? object.reserved_name.map((e: any) => globalThis.String(e)) + : [], + visibility: isSet(object.visibility) ? symbolVisibilityFromJSON(object.visibility) : 0, + }; + }, + + toJSON(message: DescriptorProto): unknown { + const obj: any = {}; + if (message.name !== undefined && message.name !== "") { + obj.name = message.name; + } + if (message.field?.length) { + obj.field = message.field.map((e) => FieldDescriptorProto.toJSON(e)); + } + if (message.extension?.length) { + obj.extension = message.extension.map((e) => FieldDescriptorProto.toJSON(e)); + } + if (message.nestedType?.length) { + obj.nestedType = message.nestedType.map((e) => DescriptorProto.toJSON(e)); + } + if (message.enumType?.length) { + obj.enumType = message.enumType.map((e) => EnumDescriptorProto.toJSON(e)); + } + if (message.extensionRange?.length) { + obj.extensionRange = message.extensionRange.map((e) => DescriptorProto_ExtensionRange.toJSON(e)); + } + if (message.oneofDecl?.length) { + obj.oneofDecl = message.oneofDecl.map((e) => OneofDescriptorProto.toJSON(e)); + } + if (message.options !== undefined) { + obj.options = MessageOptions.toJSON(message.options); + } + if (message.reservedRange?.length) { + obj.reservedRange = message.reservedRange.map((e) => DescriptorProto_ReservedRange.toJSON(e)); + } + if (message.reservedName?.length) { + obj.reservedName = message.reservedName; + } + if (message.visibility !== undefined && message.visibility !== 0) { + obj.visibility = symbolVisibilityToJSON(message.visibility); + } + return obj; + }, + + create<I extends Exact<DeepPartial<DescriptorProto>, I>>(base?: I): DescriptorProto { + return DescriptorProto.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<DescriptorProto>, I>>(object: I): DescriptorProto { + const message = createBaseDescriptorProto(); + message.name = object.name ?? ""; + message.field = object.field?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.extension = object.extension?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.nestedType = object.nestedType?.map((e) => DescriptorProto.fromPartial(e)) || []; + message.enumType = object.enumType?.map((e) => EnumDescriptorProto.fromPartial(e)) || []; + message.extensionRange = object.extensionRange?.map((e) => DescriptorProto_ExtensionRange.fromPartial(e)) || []; + message.oneofDecl = object.oneofDecl?.map((e) => OneofDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? MessageOptions.fromPartial(object.options) + : undefined; + message.reservedRange = object.reservedRange?.map((e) => DescriptorProto_ReservedRange.fromPartial(e)) || []; + message.reservedName = object.reservedName?.map((e) => e) || []; + message.visibility = object.visibility ?? 0; + return message; + }, +}; + +function createBaseDescriptorProto_ExtensionRange(): DescriptorProto_ExtensionRange { + return { start: 0, end: 0, options: undefined }; +} + +export const DescriptorProto_ExtensionRange: MessageFns<DescriptorProto_ExtensionRange> = { + encode(message: DescriptorProto_ExtensionRange, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.start !== undefined && message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== undefined && message.end !== 0) { + writer.uint32(16).int32(message.end); + } + if (message.options !== undefined) { + ExtensionRangeOptions.encode(message.options, writer.uint32(26).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): DescriptorProto_ExtensionRange { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto_ExtensionRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + + message.start = reader.int32(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.end = reader.int32(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.options = ExtensionRangeOptions.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): DescriptorProto_ExtensionRange { + return { + start: isSet(object.start) ? globalThis.Number(object.start) : 0, + end: isSet(object.end) ? globalThis.Number(object.end) : 0, + options: isSet(object.options) ? ExtensionRangeOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: DescriptorProto_ExtensionRange): unknown { + const obj: any = {}; + if (message.start !== undefined && message.start !== 0) { + obj.start = Math.round(message.start); + } + if (message.end !== undefined && message.end !== 0) { + obj.end = Math.round(message.end); + } + if (message.options !== undefined) { + obj.options = ExtensionRangeOptions.toJSON(message.options); + } + return obj; + }, + + create<I extends Exact<DeepPartial<DescriptorProto_ExtensionRange>, I>>(base?: I): DescriptorProto_ExtensionRange { + return DescriptorProto_ExtensionRange.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<DescriptorProto_ExtensionRange>, I>>( + object: I, + ): DescriptorProto_ExtensionRange { + const message = createBaseDescriptorProto_ExtensionRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + message.options = (object.options !== undefined && object.options !== null) + ? ExtensionRangeOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseDescriptorProto_ReservedRange(): DescriptorProto_ReservedRange { + return { start: 0, end: 0 }; +} + +export const DescriptorProto_ReservedRange: MessageFns<DescriptorProto_ReservedRange> = { + encode(message: DescriptorProto_ReservedRange, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.start !== undefined && message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== undefined && message.end !== 0) { + writer.uint32(16).int32(message.end); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): DescriptorProto_ReservedRange { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto_ReservedRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + + message.start = reader.int32(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.end = reader.int32(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): DescriptorProto_ReservedRange { + return { + start: isSet(object.start) ? globalThis.Number(object.start) : 0, + end: isSet(object.end) ? globalThis.Number(object.end) : 0, + }; + }, + + toJSON(message: DescriptorProto_ReservedRange): unknown { + const obj: any = {}; + if (message.start !== undefined && message.start !== 0) { + obj.start = Math.round(message.start); + } + if (message.end !== undefined && message.end !== 0) { + obj.end = Math.round(message.end); + } + return obj; + }, + + create<I extends Exact<DeepPartial<DescriptorProto_ReservedRange>, I>>(base?: I): DescriptorProto_ReservedRange { + return DescriptorProto_ReservedRange.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<DescriptorProto_ReservedRange>, I>>( + object: I, + ): DescriptorProto_ReservedRange { + const message = createBaseDescriptorProto_ReservedRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + return message; + }, +}; + +function createBaseExtensionRangeOptions(): ExtensionRangeOptions { + return { uninterpretedOption: [], declaration: [], features: undefined, verification: 1 }; +} + +export const ExtensionRangeOptions: MessageFns<ExtensionRangeOptions> = { + encode(message: ExtensionRangeOptions, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).join(); + } + for (const v of message.declaration) { + ExtensionRangeOptions_Declaration.encode(v!, writer.uint32(18).fork()).join(); + } + if (message.features !== undefined) { + FeatureSet.encode(message.features, writer.uint32(402).fork()).join(); + } + if (message.verification !== undefined && message.verification !== 1) { + writer.uint32(24).int32(message.verification); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): ExtensionRangeOptions { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseExtensionRangeOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 999: { + if (tag !== 7994) { + break; + } + + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.declaration.push(ExtensionRangeOptions_Declaration.decode(reader, reader.uint32())); + continue; + } + case 50: { + if (tag !== 402) { + break; + } + + message.features = FeatureSet.decode(reader, reader.uint32()); + continue; + } + case 3: { + if (tag !== 24) { + break; + } + + message.verification = reader.int32() as any; + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): ExtensionRangeOptions { + return { + uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : globalThis.Array.isArray(object?.uninterpreted_option) + ? object.uninterpreted_option.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + declaration: globalThis.Array.isArray(object?.declaration) + ? object.declaration.map((e: any) => ExtensionRangeOptions_Declaration.fromJSON(e)) + : [], + features: isSet(object.features) ? FeatureSet.fromJSON(object.features) : undefined, + verification: isSet(object.verification) + ? extensionRangeOptions_VerificationStateFromJSON(object.verification) + : 1, + }; + }, + + toJSON(message: ExtensionRangeOptions): unknown { + const obj: any = {}; + if (message.uninterpretedOption?.length) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); + } + if (message.declaration?.length) { + obj.declaration = message.declaration.map((e) => ExtensionRangeOptions_Declaration.toJSON(e)); + } + if (message.features !== undefined) { + obj.features = FeatureSet.toJSON(message.features); + } + if (message.verification !== undefined && message.verification !== 1) { + obj.verification = extensionRangeOptions_VerificationStateToJSON(message.verification); + } + return obj; + }, + + create<I extends Exact<DeepPartial<ExtensionRangeOptions>, I>>(base?: I): ExtensionRangeOptions { + return ExtensionRangeOptions.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<ExtensionRangeOptions>, I>>(object: I): ExtensionRangeOptions { + const message = createBaseExtensionRangeOptions(); + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + message.declaration = object.declaration?.map((e) => ExtensionRangeOptions_Declaration.fromPartial(e)) || []; + message.features = (object.features !== undefined && object.features !== null) + ? FeatureSet.fromPartial(object.features) + : undefined; + message.verification = object.verification ?? 1; + return message; + }, +}; + +function createBaseExtensionRangeOptions_Declaration(): ExtensionRangeOptions_Declaration { + return { number: 0, fullName: "", type: "", reserved: false, repeated: false }; +} + +export const ExtensionRangeOptions_Declaration: MessageFns<ExtensionRangeOptions_Declaration> = { + encode(message: ExtensionRangeOptions_Declaration, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.number !== undefined && message.number !== 0) { + writer.uint32(8).int32(message.number); + } + if (message.fullName !== undefined && message.fullName !== "") { + writer.uint32(18).string(message.fullName); + } + if (message.type !== undefined && message.type !== "") { + writer.uint32(26).string(message.type); + } + if (message.reserved !== undefined && message.reserved !== false) { + writer.uint32(40).bool(message.reserved); + } + if (message.repeated !== undefined && message.repeated !== false) { + writer.uint32(48).bool(message.repeated); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): ExtensionRangeOptions_Declaration { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseExtensionRangeOptions_Declaration(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + + message.number = reader.int32(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.fullName = reader.string(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.type = reader.string(); + continue; + } + case 5: { + if (tag !== 40) { + break; + } + + message.reserved = reader.bool(); + continue; + } + case 6: { + if (tag !== 48) { + break; + } + + message.repeated = reader.bool(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): ExtensionRangeOptions_Declaration { + return { + number: isSet(object.number) ? globalThis.Number(object.number) : 0, + fullName: isSet(object.fullName) + ? globalThis.String(object.fullName) + : isSet(object.full_name) + ? globalThis.String(object.full_name) + : "", + type: isSet(object.type) ? globalThis.String(object.type) : "", + reserved: isSet(object.reserved) ? globalThis.Boolean(object.reserved) : false, + repeated: isSet(object.repeated) ? globalThis.Boolean(object.repeated) : false, + }; + }, + + toJSON(message: ExtensionRangeOptions_Declaration): unknown { + const obj: any = {}; + if (message.number !== undefined && message.number !== 0) { + obj.number = Math.round(message.number); + } + if (message.fullName !== undefined && message.fullName !== "") { + obj.fullName = message.fullName; + } + if (message.type !== undefined && message.type !== "") { + obj.type = message.type; + } + if (message.reserved !== undefined && message.reserved !== false) { + obj.reserved = message.reserved; + } + if (message.repeated !== undefined && message.repeated !== false) { + obj.repeated = message.repeated; + } + return obj; + }, + + create<I extends Exact<DeepPartial<ExtensionRangeOptions_Declaration>, I>>( + base?: I, + ): ExtensionRangeOptions_Declaration { + return ExtensionRangeOptions_Declaration.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<ExtensionRangeOptions_Declaration>, I>>( + object: I, + ): ExtensionRangeOptions_Declaration { + const message = createBaseExtensionRangeOptions_Declaration(); + message.number = object.number ?? 0; + message.fullName = object.fullName ?? ""; + message.type = object.type ?? ""; + message.reserved = object.reserved ?? false; + message.repeated = object.repeated ?? false; + return message; + }, +}; + +function createBaseFieldDescriptorProto(): FieldDescriptorProto { + return { + name: "", + number: 0, + label: 1, + type: 1, + typeName: "", + extendee: "", + defaultValue: "", + oneofIndex: 0, + jsonName: "", + options: undefined, + proto3Optional: false, + }; +} + +export const FieldDescriptorProto: MessageFns<FieldDescriptorProto> = { + encode(message: FieldDescriptorProto, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.name !== undefined && message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.number !== undefined && message.number !== 0) { + writer.uint32(24).int32(message.number); + } + if (message.label !== undefined && message.label !== 1) { + writer.uint32(32).int32(message.label); + } + if (message.type !== undefined && message.type !== 1) { + writer.uint32(40).int32(message.type); + } + if (message.typeName !== undefined && message.typeName !== "") { + writer.uint32(50).string(message.typeName); + } + if (message.extendee !== undefined && message.extendee !== "") { + writer.uint32(18).string(message.extendee); + } + if (message.defaultValue !== undefined && message.defaultValue !== "") { + writer.uint32(58).string(message.defaultValue); + } + if (message.oneofIndex !== undefined && message.oneofIndex !== 0) { + writer.uint32(72).int32(message.oneofIndex); + } + if (message.jsonName !== undefined && message.jsonName !== "") { + writer.uint32(82).string(message.jsonName); + } + if (message.options !== undefined) { + FieldOptions.encode(message.options, writer.uint32(66).fork()).join(); + } + if (message.proto3Optional !== undefined && message.proto3Optional !== false) { + writer.uint32(136).bool(message.proto3Optional); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): FieldDescriptorProto { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFieldDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + } + case 3: { + if (tag !== 24) { + break; + } + + message.number = reader.int32(); + continue; + } + case 4: { + if (tag !== 32) { + break; + } + + message.label = reader.int32() as any; + continue; + } + case 5: { + if (tag !== 40) { + break; + } + + message.type = reader.int32() as any; + continue; + } + case 6: { + if (tag !== 50) { + break; + } + + message.typeName = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.extendee = reader.string(); + continue; + } + case 7: { + if (tag !== 58) { + break; + } + + message.defaultValue = reader.string(); + continue; + } + case 9: { + if (tag !== 72) { + break; + } + + message.oneofIndex = reader.int32(); + continue; + } + case 10: { + if (tag !== 82) { + break; + } + + message.jsonName = reader.string(); + continue; + } + case 8: { + if (tag !== 66) { + break; + } + + message.options = FieldOptions.decode(reader, reader.uint32()); + continue; + } + case 17: { + if (tag !== 136) { + break; + } + + message.proto3Optional = reader.bool(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): FieldDescriptorProto { + return { + name: isSet(object.name) ? globalThis.String(object.name) : "", + number: isSet(object.number) ? globalThis.Number(object.number) : 0, + label: isSet(object.label) ? fieldDescriptorProto_LabelFromJSON(object.label) : 1, + type: isSet(object.type) ? fieldDescriptorProto_TypeFromJSON(object.type) : 1, + typeName: isSet(object.typeName) + ? globalThis.String(object.typeName) + : isSet(object.type_name) + ? globalThis.String(object.type_name) + : "", + extendee: isSet(object.extendee) ? globalThis.String(object.extendee) : "", + defaultValue: isSet(object.defaultValue) + ? globalThis.String(object.defaultValue) + : isSet(object.default_value) + ? globalThis.String(object.default_value) + : "", + oneofIndex: isSet(object.oneofIndex) + ? globalThis.Number(object.oneofIndex) + : isSet(object.oneof_index) + ? globalThis.Number(object.oneof_index) + : 0, + jsonName: isSet(object.jsonName) + ? globalThis.String(object.jsonName) + : isSet(object.json_name) + ? globalThis.String(object.json_name) + : "", + options: isSet(object.options) ? FieldOptions.fromJSON(object.options) : undefined, + proto3Optional: isSet(object.proto3Optional) + ? globalThis.Boolean(object.proto3Optional) + : isSet(object.proto3_optional) + ? globalThis.Boolean(object.proto3_optional) + : false, + }; + }, + + toJSON(message: FieldDescriptorProto): unknown { + const obj: any = {}; + if (message.name !== undefined && message.name !== "") { + obj.name = message.name; + } + if (message.number !== undefined && message.number !== 0) { + obj.number = Math.round(message.number); + } + if (message.label !== undefined && message.label !== 1) { + obj.label = fieldDescriptorProto_LabelToJSON(message.label); + } + if (message.type !== undefined && message.type !== 1) { + obj.type = fieldDescriptorProto_TypeToJSON(message.type); + } + if (message.typeName !== undefined && message.typeName !== "") { + obj.typeName = message.typeName; + } + if (message.extendee !== undefined && message.extendee !== "") { + obj.extendee = message.extendee; + } + if (message.defaultValue !== undefined && message.defaultValue !== "") { + obj.defaultValue = message.defaultValue; + } + if (message.oneofIndex !== undefined && message.oneofIndex !== 0) { + obj.oneofIndex = Math.round(message.oneofIndex); + } + if (message.jsonName !== undefined && message.jsonName !== "") { + obj.jsonName = message.jsonName; + } + if (message.options !== undefined) { + obj.options = FieldOptions.toJSON(message.options); + } + if (message.proto3Optional !== undefined && message.proto3Optional !== false) { + obj.proto3Optional = message.proto3Optional; + } + return obj; + }, + + create<I extends Exact<DeepPartial<FieldDescriptorProto>, I>>(base?: I): FieldDescriptorProto { + return FieldDescriptorProto.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<FieldDescriptorProto>, I>>(object: I): FieldDescriptorProto { + const message = createBaseFieldDescriptorProto(); + message.name = object.name ?? ""; + message.number = object.number ?? 0; + message.label = object.label ?? 1; + message.type = object.type ?? 1; + message.typeName = object.typeName ?? ""; + message.extendee = object.extendee ?? ""; + message.defaultValue = object.defaultValue ?? ""; + message.oneofIndex = object.oneofIndex ?? 0; + message.jsonName = object.jsonName ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? FieldOptions.fromPartial(object.options) + : undefined; + message.proto3Optional = object.proto3Optional ?? false; + return message; + }, +}; + +function createBaseOneofDescriptorProto(): OneofDescriptorProto { + return { name: "", options: undefined }; +} + +export const OneofDescriptorProto: MessageFns<OneofDescriptorProto> = { + encode(message: OneofDescriptorProto, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.name !== undefined && message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.options !== undefined) { + OneofOptions.encode(message.options, writer.uint32(18).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): OneofDescriptorProto { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseOneofDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.options = OneofOptions.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): OneofDescriptorProto { + return { + name: isSet(object.name) ? globalThis.String(object.name) : "", + options: isSet(object.options) ? OneofOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: OneofDescriptorProto): unknown { + const obj: any = {}; + if (message.name !== undefined && message.name !== "") { + obj.name = message.name; + } + if (message.options !== undefined) { + obj.options = OneofOptions.toJSON(message.options); + } + return obj; + }, + + create<I extends Exact<DeepPartial<OneofDescriptorProto>, I>>(base?: I): OneofDescriptorProto { + return OneofDescriptorProto.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<OneofDescriptorProto>, I>>(object: I): OneofDescriptorProto { + const message = createBaseOneofDescriptorProto(); + message.name = object.name ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? OneofOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseEnumDescriptorProto(): EnumDescriptorProto { + return { name: "", value: [], options: undefined, reservedRange: [], reservedName: [], visibility: 0 }; +} + +export const EnumDescriptorProto: MessageFns<EnumDescriptorProto> = { + encode(message: EnumDescriptorProto, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.name !== undefined && message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.value) { + EnumValueDescriptorProto.encode(v!, writer.uint32(18).fork()).join(); + } + if (message.options !== undefined) { + EnumOptions.encode(message.options, writer.uint32(26).fork()).join(); + } + for (const v of message.reservedRange) { + EnumDescriptorProto_EnumReservedRange.encode(v!, writer.uint32(34).fork()).join(); + } + for (const v of message.reservedName) { + writer.uint32(42).string(v!); + } + if (message.visibility !== undefined && message.visibility !== 0) { + writer.uint32(48).int32(message.visibility); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): EnumDescriptorProto { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.value.push(EnumValueDescriptorProto.decode(reader, reader.uint32())); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.options = EnumOptions.decode(reader, reader.uint32()); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + + message.reservedRange.push(EnumDescriptorProto_EnumReservedRange.decode(reader, reader.uint32())); + continue; + } + case 5: { + if (tag !== 42) { + break; + } + + message.reservedName.push(reader.string()); + continue; + } + case 6: { + if (tag !== 48) { + break; + } + + message.visibility = reader.int32() as any; + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): EnumDescriptorProto { + return { + name: isSet(object.name) ? globalThis.String(object.name) : "", + value: globalThis.Array.isArray(object?.value) + ? object.value.map((e: any) => EnumValueDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) ? EnumOptions.fromJSON(object.options) : undefined, + reservedRange: globalThis.Array.isArray(object?.reservedRange) + ? object.reservedRange.map((e: any) => EnumDescriptorProto_EnumReservedRange.fromJSON(e)) + : globalThis.Array.isArray(object?.reserved_range) + ? object.reserved_range.map((e: any) => EnumDescriptorProto_EnumReservedRange.fromJSON(e)) + : [], + reservedName: globalThis.Array.isArray(object?.reservedName) + ? object.reservedName.map((e: any) => globalThis.String(e)) + : globalThis.Array.isArray(object?.reserved_name) + ? object.reserved_name.map((e: any) => globalThis.String(e)) + : [], + visibility: isSet(object.visibility) ? symbolVisibilityFromJSON(object.visibility) : 0, + }; + }, + + toJSON(message: EnumDescriptorProto): unknown { + const obj: any = {}; + if (message.name !== undefined && message.name !== "") { + obj.name = message.name; + } + if (message.value?.length) { + obj.value = message.value.map((e) => EnumValueDescriptorProto.toJSON(e)); + } + if (message.options !== undefined) { + obj.options = EnumOptions.toJSON(message.options); + } + if (message.reservedRange?.length) { + obj.reservedRange = message.reservedRange.map((e) => EnumDescriptorProto_EnumReservedRange.toJSON(e)); + } + if (message.reservedName?.length) { + obj.reservedName = message.reservedName; + } + if (message.visibility !== undefined && message.visibility !== 0) { + obj.visibility = symbolVisibilityToJSON(message.visibility); + } + return obj; + }, + + create<I extends Exact<DeepPartial<EnumDescriptorProto>, I>>(base?: I): EnumDescriptorProto { + return EnumDescriptorProto.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<EnumDescriptorProto>, I>>(object: I): EnumDescriptorProto { + const message = createBaseEnumDescriptorProto(); + message.name = object.name ?? ""; + message.value = object.value?.map((e) => EnumValueDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? EnumOptions.fromPartial(object.options) + : undefined; + message.reservedRange = object.reservedRange?.map((e) => EnumDescriptorProto_EnumReservedRange.fromPartial(e)) || + []; + message.reservedName = object.reservedName?.map((e) => e) || []; + message.visibility = object.visibility ?? 0; + return message; + }, +}; + +function createBaseEnumDescriptorProto_EnumReservedRange(): EnumDescriptorProto_EnumReservedRange { + return { start: 0, end: 0 }; +} + +export const EnumDescriptorProto_EnumReservedRange: MessageFns<EnumDescriptorProto_EnumReservedRange> = { + encode(message: EnumDescriptorProto_EnumReservedRange, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.start !== undefined && message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== undefined && message.end !== 0) { + writer.uint32(16).int32(message.end); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): EnumDescriptorProto_EnumReservedRange { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumDescriptorProto_EnumReservedRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + + message.start = reader.int32(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.end = reader.int32(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): EnumDescriptorProto_EnumReservedRange { + return { + start: isSet(object.start) ? globalThis.Number(object.start) : 0, + end: isSet(object.end) ? globalThis.Number(object.end) : 0, + }; + }, + + toJSON(message: EnumDescriptorProto_EnumReservedRange): unknown { + const obj: any = {}; + if (message.start !== undefined && message.start !== 0) { + obj.start = Math.round(message.start); + } + if (message.end !== undefined && message.end !== 0) { + obj.end = Math.round(message.end); + } + return obj; + }, + + create<I extends Exact<DeepPartial<EnumDescriptorProto_EnumReservedRange>, I>>( + base?: I, + ): EnumDescriptorProto_EnumReservedRange { + return EnumDescriptorProto_EnumReservedRange.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<EnumDescriptorProto_EnumReservedRange>, I>>( + object: I, + ): EnumDescriptorProto_EnumReservedRange { + const message = createBaseEnumDescriptorProto_EnumReservedRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + return message; + }, +}; + +function createBaseEnumValueDescriptorProto(): EnumValueDescriptorProto { + return { name: "", number: 0, options: undefined }; +} + +export const EnumValueDescriptorProto: MessageFns<EnumValueDescriptorProto> = { + encode(message: EnumValueDescriptorProto, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.name !== undefined && message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.number !== undefined && message.number !== 0) { + writer.uint32(16).int32(message.number); + } + if (message.options !== undefined) { + EnumValueOptions.encode(message.options, writer.uint32(26).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): EnumValueDescriptorProto { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumValueDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.number = reader.int32(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.options = EnumValueOptions.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): EnumValueDescriptorProto { + return { + name: isSet(object.name) ? globalThis.String(object.name) : "", + number: isSet(object.number) ? globalThis.Number(object.number) : 0, + options: isSet(object.options) ? EnumValueOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: EnumValueDescriptorProto): unknown { + const obj: any = {}; + if (message.name !== undefined && message.name !== "") { + obj.name = message.name; + } + if (message.number !== undefined && message.number !== 0) { + obj.number = Math.round(message.number); + } + if (message.options !== undefined) { + obj.options = EnumValueOptions.toJSON(message.options); + } + return obj; + }, + + create<I extends Exact<DeepPartial<EnumValueDescriptorProto>, I>>(base?: I): EnumValueDescriptorProto { + return EnumValueDescriptorProto.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<EnumValueDescriptorProto>, I>>(object: I): EnumValueDescriptorProto { + const message = createBaseEnumValueDescriptorProto(); + message.name = object.name ?? ""; + message.number = object.number ?? 0; + message.options = (object.options !== undefined && object.options !== null) + ? EnumValueOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseServiceDescriptorProto(): ServiceDescriptorProto { + return { name: "", method: [], options: undefined }; +} + +export const ServiceDescriptorProto: MessageFns<ServiceDescriptorProto> = { + encode(message: ServiceDescriptorProto, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.name !== undefined && message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.method) { + MethodDescriptorProto.encode(v!, writer.uint32(18).fork()).join(); + } + if (message.options !== undefined) { + ServiceOptions.encode(message.options, writer.uint32(26).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): ServiceDescriptorProto { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseServiceDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.method.push(MethodDescriptorProto.decode(reader, reader.uint32())); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.options = ServiceOptions.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): ServiceDescriptorProto { + return { + name: isSet(object.name) ? globalThis.String(object.name) : "", + method: globalThis.Array.isArray(object?.method) + ? object.method.map((e: any) => MethodDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) ? ServiceOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: ServiceDescriptorProto): unknown { + const obj: any = {}; + if (message.name !== undefined && message.name !== "") { + obj.name = message.name; + } + if (message.method?.length) { + obj.method = message.method.map((e) => MethodDescriptorProto.toJSON(e)); + } + if (message.options !== undefined) { + obj.options = ServiceOptions.toJSON(message.options); + } + return obj; + }, + + create<I extends Exact<DeepPartial<ServiceDescriptorProto>, I>>(base?: I): ServiceDescriptorProto { + return ServiceDescriptorProto.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<ServiceDescriptorProto>, I>>(object: I): ServiceDescriptorProto { + const message = createBaseServiceDescriptorProto(); + message.name = object.name ?? ""; + message.method = object.method?.map((e) => MethodDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? ServiceOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseMethodDescriptorProto(): MethodDescriptorProto { + return { + name: "", + inputType: "", + outputType: "", + options: undefined, + clientStreaming: false, + serverStreaming: false, + }; +} + +export const MethodDescriptorProto: MessageFns<MethodDescriptorProto> = { + encode(message: MethodDescriptorProto, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.name !== undefined && message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.inputType !== undefined && message.inputType !== "") { + writer.uint32(18).string(message.inputType); + } + if (message.outputType !== undefined && message.outputType !== "") { + writer.uint32(26).string(message.outputType); + } + if (message.options !== undefined) { + MethodOptions.encode(message.options, writer.uint32(34).fork()).join(); + } + if (message.clientStreaming !== undefined && message.clientStreaming !== false) { + writer.uint32(40).bool(message.clientStreaming); + } + if (message.serverStreaming !== undefined && message.serverStreaming !== false) { + writer.uint32(48).bool(message.serverStreaming); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): MethodDescriptorProto { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMethodDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.inputType = reader.string(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.outputType = reader.string(); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + + message.options = MethodOptions.decode(reader, reader.uint32()); + continue; + } + case 5: { + if (tag !== 40) { + break; + } + + message.clientStreaming = reader.bool(); + continue; + } + case 6: { + if (tag !== 48) { + break; + } + + message.serverStreaming = reader.bool(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): MethodDescriptorProto { + return { + name: isSet(object.name) ? globalThis.String(object.name) : "", + inputType: isSet(object.inputType) + ? globalThis.String(object.inputType) + : isSet(object.input_type) + ? globalThis.String(object.input_type) + : "", + outputType: isSet(object.outputType) + ? globalThis.String(object.outputType) + : isSet(object.output_type) + ? globalThis.String(object.output_type) + : "", + options: isSet(object.options) ? MethodOptions.fromJSON(object.options) : undefined, + clientStreaming: isSet(object.clientStreaming) + ? globalThis.Boolean(object.clientStreaming) + : isSet(object.client_streaming) + ? globalThis.Boolean(object.client_streaming) + : false, + serverStreaming: isSet(object.serverStreaming) + ? globalThis.Boolean(object.serverStreaming) + : isSet(object.server_streaming) + ? globalThis.Boolean(object.server_streaming) + : false, + }; + }, + + toJSON(message: MethodDescriptorProto): unknown { + const obj: any = {}; + if (message.name !== undefined && message.name !== "") { + obj.name = message.name; + } + if (message.inputType !== undefined && message.inputType !== "") { + obj.inputType = message.inputType; + } + if (message.outputType !== undefined && message.outputType !== "") { + obj.outputType = message.outputType; + } + if (message.options !== undefined) { + obj.options = MethodOptions.toJSON(message.options); + } + if (message.clientStreaming !== undefined && message.clientStreaming !== false) { + obj.clientStreaming = message.clientStreaming; + } + if (message.serverStreaming !== undefined && message.serverStreaming !== false) { + obj.serverStreaming = message.serverStreaming; + } + return obj; + }, + + create<I extends Exact<DeepPartial<MethodDescriptorProto>, I>>(base?: I): MethodDescriptorProto { + return MethodDescriptorProto.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<MethodDescriptorProto>, I>>(object: I): MethodDescriptorProto { + const message = createBaseMethodDescriptorProto(); + message.name = object.name ?? ""; + message.inputType = object.inputType ?? ""; + message.outputType = object.outputType ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? MethodOptions.fromPartial(object.options) + : undefined; + message.clientStreaming = object.clientStreaming ?? false; + message.serverStreaming = object.serverStreaming ?? false; + return message; + }, +}; + +function createBaseFileOptions(): FileOptions { + return { + javaPackage: "", + javaOuterClassname: "", + javaMultipleFiles: false, + javaGenerateEqualsAndHash: false, + javaStringCheckUtf8: false, + optimizeFor: 1, + goPackage: "", + ccGenericServices: false, + javaGenericServices: false, + pyGenericServices: false, + deprecated: false, + ccEnableArenas: true, + objcClassPrefix: "", + csharpNamespace: "", + swiftPrefix: "", + phpClassPrefix: "", + phpNamespace: "", + phpMetadataNamespace: "", + rubyPackage: "", + features: undefined, + uninterpretedOption: [], + }; +} + +export const FileOptions: MessageFns<FileOptions> = { + encode(message: FileOptions, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.javaPackage !== undefined && message.javaPackage !== "") { + writer.uint32(10).string(message.javaPackage); + } + if (message.javaOuterClassname !== undefined && message.javaOuterClassname !== "") { + writer.uint32(66).string(message.javaOuterClassname); + } + if (message.javaMultipleFiles !== undefined && message.javaMultipleFiles !== false) { + writer.uint32(80).bool(message.javaMultipleFiles); + } + if (message.javaGenerateEqualsAndHash !== undefined && message.javaGenerateEqualsAndHash !== false) { + writer.uint32(160).bool(message.javaGenerateEqualsAndHash); + } + if (message.javaStringCheckUtf8 !== undefined && message.javaStringCheckUtf8 !== false) { + writer.uint32(216).bool(message.javaStringCheckUtf8); + } + if (message.optimizeFor !== undefined && message.optimizeFor !== 1) { + writer.uint32(72).int32(message.optimizeFor); + } + if (message.goPackage !== undefined && message.goPackage !== "") { + writer.uint32(90).string(message.goPackage); + } + if (message.ccGenericServices !== undefined && message.ccGenericServices !== false) { + writer.uint32(128).bool(message.ccGenericServices); + } + if (message.javaGenericServices !== undefined && message.javaGenericServices !== false) { + writer.uint32(136).bool(message.javaGenericServices); + } + if (message.pyGenericServices !== undefined && message.pyGenericServices !== false) { + writer.uint32(144).bool(message.pyGenericServices); + } + if (message.deprecated !== undefined && message.deprecated !== false) { + writer.uint32(184).bool(message.deprecated); + } + if (message.ccEnableArenas !== undefined && message.ccEnableArenas !== true) { + writer.uint32(248).bool(message.ccEnableArenas); + } + if (message.objcClassPrefix !== undefined && message.objcClassPrefix !== "") { + writer.uint32(290).string(message.objcClassPrefix); + } + if (message.csharpNamespace !== undefined && message.csharpNamespace !== "") { + writer.uint32(298).string(message.csharpNamespace); + } + if (message.swiftPrefix !== undefined && message.swiftPrefix !== "") { + writer.uint32(314).string(message.swiftPrefix); + } + if (message.phpClassPrefix !== undefined && message.phpClassPrefix !== "") { + writer.uint32(322).string(message.phpClassPrefix); + } + if (message.phpNamespace !== undefined && message.phpNamespace !== "") { + writer.uint32(330).string(message.phpNamespace); + } + if (message.phpMetadataNamespace !== undefined && message.phpMetadataNamespace !== "") { + writer.uint32(354).string(message.phpMetadataNamespace); + } + if (message.rubyPackage !== undefined && message.rubyPackage !== "") { + writer.uint32(362).string(message.rubyPackage); + } + if (message.features !== undefined) { + FeatureSet.encode(message.features, writer.uint32(402).fork()).join(); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): FileOptions { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.javaPackage = reader.string(); + continue; + } + case 8: { + if (tag !== 66) { + break; + } + + message.javaOuterClassname = reader.string(); + continue; + } + case 10: { + if (tag !== 80) { + break; + } + + message.javaMultipleFiles = reader.bool(); + continue; + } + case 20: { + if (tag !== 160) { + break; + } + + message.javaGenerateEqualsAndHash = reader.bool(); + continue; + } + case 27: { + if (tag !== 216) { + break; + } + + message.javaStringCheckUtf8 = reader.bool(); + continue; + } + case 9: { + if (tag !== 72) { + break; + } + + message.optimizeFor = reader.int32() as any; + continue; + } + case 11: { + if (tag !== 90) { + break; + } + + message.goPackage = reader.string(); + continue; + } + case 16: { + if (tag !== 128) { + break; + } + + message.ccGenericServices = reader.bool(); + continue; + } + case 17: { + if (tag !== 136) { + break; + } + + message.javaGenericServices = reader.bool(); + continue; + } + case 18: { + if (tag !== 144) { + break; + } + + message.pyGenericServices = reader.bool(); + continue; + } + case 23: { + if (tag !== 184) { + break; + } + + message.deprecated = reader.bool(); + continue; + } + case 31: { + if (tag !== 248) { + break; + } + + message.ccEnableArenas = reader.bool(); + continue; + } + case 36: { + if (tag !== 290) { + break; + } + + message.objcClassPrefix = reader.string(); + continue; + } + case 37: { + if (tag !== 298) { + break; + } + + message.csharpNamespace = reader.string(); + continue; + } + case 39: { + if (tag !== 314) { + break; + } + + message.swiftPrefix = reader.string(); + continue; + } + case 40: { + if (tag !== 322) { + break; + } + + message.phpClassPrefix = reader.string(); + continue; + } + case 41: { + if (tag !== 330) { + break; + } + + message.phpNamespace = reader.string(); + continue; + } + case 44: { + if (tag !== 354) { + break; + } + + message.phpMetadataNamespace = reader.string(); + continue; + } + case 45: { + if (tag !== 362) { + break; + } + + message.rubyPackage = reader.string(); + continue; + } + case 50: { + if (tag !== 402) { + break; + } + + message.features = FeatureSet.decode(reader, reader.uint32()); + continue; + } + case 999: { + if (tag !== 7994) { + break; + } + + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): FileOptions { + return { + javaPackage: isSet(object.javaPackage) + ? globalThis.String(object.javaPackage) + : isSet(object.java_package) + ? globalThis.String(object.java_package) + : "", + javaOuterClassname: isSet(object.javaOuterClassname) + ? globalThis.String(object.javaOuterClassname) + : isSet(object.java_outer_classname) + ? globalThis.String(object.java_outer_classname) + : "", + javaMultipleFiles: isSet(object.javaMultipleFiles) + ? globalThis.Boolean(object.javaMultipleFiles) + : isSet(object.java_multiple_files) + ? globalThis.Boolean(object.java_multiple_files) + : false, + javaGenerateEqualsAndHash: isSet(object.javaGenerateEqualsAndHash) + ? globalThis.Boolean(object.javaGenerateEqualsAndHash) + : isSet(object.java_generate_equals_and_hash) + ? globalThis.Boolean(object.java_generate_equals_and_hash) + : false, + javaStringCheckUtf8: isSet(object.javaStringCheckUtf8) + ? globalThis.Boolean(object.javaStringCheckUtf8) + : isSet(object.java_string_check_utf8) + ? globalThis.Boolean(object.java_string_check_utf8) + : false, + optimizeFor: isSet(object.optimizeFor) + ? fileOptions_OptimizeModeFromJSON(object.optimizeFor) + : isSet(object.optimize_for) + ? fileOptions_OptimizeModeFromJSON(object.optimize_for) + : 1, + goPackage: isSet(object.goPackage) + ? globalThis.String(object.goPackage) + : isSet(object.go_package) + ? globalThis.String(object.go_package) + : "", + ccGenericServices: isSet(object.ccGenericServices) + ? globalThis.Boolean(object.ccGenericServices) + : isSet(object.cc_generic_services) + ? globalThis.Boolean(object.cc_generic_services) + : false, + javaGenericServices: isSet(object.javaGenericServices) + ? globalThis.Boolean(object.javaGenericServices) + : isSet(object.java_generic_services) + ? globalThis.Boolean(object.java_generic_services) + : false, + pyGenericServices: isSet(object.pyGenericServices) + ? globalThis.Boolean(object.pyGenericServices) + : isSet(object.py_generic_services) + ? globalThis.Boolean(object.py_generic_services) + : false, + deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false, + ccEnableArenas: isSet(object.ccEnableArenas) + ? globalThis.Boolean(object.ccEnableArenas) + : isSet(object.cc_enable_arenas) + ? globalThis.Boolean(object.cc_enable_arenas) + : true, + objcClassPrefix: isSet(object.objcClassPrefix) + ? globalThis.String(object.objcClassPrefix) + : isSet(object.objc_class_prefix) + ? globalThis.String(object.objc_class_prefix) + : "", + csharpNamespace: isSet(object.csharpNamespace) + ? globalThis.String(object.csharpNamespace) + : isSet(object.csharp_namespace) + ? globalThis.String(object.csharp_namespace) + : "", + swiftPrefix: isSet(object.swiftPrefix) + ? globalThis.String(object.swiftPrefix) + : isSet(object.swift_prefix) + ? globalThis.String(object.swift_prefix) + : "", + phpClassPrefix: isSet(object.phpClassPrefix) + ? globalThis.String(object.phpClassPrefix) + : isSet(object.php_class_prefix) + ? globalThis.String(object.php_class_prefix) + : "", + phpNamespace: isSet(object.phpNamespace) + ? globalThis.String(object.phpNamespace) + : isSet(object.php_namespace) + ? globalThis.String(object.php_namespace) + : "", + phpMetadataNamespace: isSet(object.phpMetadataNamespace) + ? globalThis.String(object.phpMetadataNamespace) + : isSet(object.php_metadata_namespace) + ? globalThis.String(object.php_metadata_namespace) + : "", + rubyPackage: isSet(object.rubyPackage) + ? globalThis.String(object.rubyPackage) + : isSet(object.ruby_package) + ? globalThis.String(object.ruby_package) + : "", + features: isSet(object.features) ? FeatureSet.fromJSON(object.features) : undefined, + uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : globalThis.Array.isArray(object?.uninterpreted_option) + ? object.uninterpreted_option.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: FileOptions): unknown { + const obj: any = {}; + if (message.javaPackage !== undefined && message.javaPackage !== "") { + obj.javaPackage = message.javaPackage; + } + if (message.javaOuterClassname !== undefined && message.javaOuterClassname !== "") { + obj.javaOuterClassname = message.javaOuterClassname; + } + if (message.javaMultipleFiles !== undefined && message.javaMultipleFiles !== false) { + obj.javaMultipleFiles = message.javaMultipleFiles; + } + if (message.javaGenerateEqualsAndHash !== undefined && message.javaGenerateEqualsAndHash !== false) { + obj.javaGenerateEqualsAndHash = message.javaGenerateEqualsAndHash; + } + if (message.javaStringCheckUtf8 !== undefined && message.javaStringCheckUtf8 !== false) { + obj.javaStringCheckUtf8 = message.javaStringCheckUtf8; + } + if (message.optimizeFor !== undefined && message.optimizeFor !== 1) { + obj.optimizeFor = fileOptions_OptimizeModeToJSON(message.optimizeFor); + } + if (message.goPackage !== undefined && message.goPackage !== "") { + obj.goPackage = message.goPackage; + } + if (message.ccGenericServices !== undefined && message.ccGenericServices !== false) { + obj.ccGenericServices = message.ccGenericServices; + } + if (message.javaGenericServices !== undefined && message.javaGenericServices !== false) { + obj.javaGenericServices = message.javaGenericServices; + } + if (message.pyGenericServices !== undefined && message.pyGenericServices !== false) { + obj.pyGenericServices = message.pyGenericServices; + } + if (message.deprecated !== undefined && message.deprecated !== false) { + obj.deprecated = message.deprecated; + } + if (message.ccEnableArenas !== undefined && message.ccEnableArenas !== true) { + obj.ccEnableArenas = message.ccEnableArenas; + } + if (message.objcClassPrefix !== undefined && message.objcClassPrefix !== "") { + obj.objcClassPrefix = message.objcClassPrefix; + } + if (message.csharpNamespace !== undefined && message.csharpNamespace !== "") { + obj.csharpNamespace = message.csharpNamespace; + } + if (message.swiftPrefix !== undefined && message.swiftPrefix !== "") { + obj.swiftPrefix = message.swiftPrefix; + } + if (message.phpClassPrefix !== undefined && message.phpClassPrefix !== "") { + obj.phpClassPrefix = message.phpClassPrefix; + } + if (message.phpNamespace !== undefined && message.phpNamespace !== "") { + obj.phpNamespace = message.phpNamespace; + } + if (message.phpMetadataNamespace !== undefined && message.phpMetadataNamespace !== "") { + obj.phpMetadataNamespace = message.phpMetadataNamespace; + } + if (message.rubyPackage !== undefined && message.rubyPackage !== "") { + obj.rubyPackage = message.rubyPackage; + } + if (message.features !== undefined) { + obj.features = FeatureSet.toJSON(message.features); + } + if (message.uninterpretedOption?.length) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); + } + return obj; + }, + + create<I extends Exact<DeepPartial<FileOptions>, I>>(base?: I): FileOptions { + return FileOptions.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<FileOptions>, I>>(object: I): FileOptions { + const message = createBaseFileOptions(); + message.javaPackage = object.javaPackage ?? ""; + message.javaOuterClassname = object.javaOuterClassname ?? ""; + message.javaMultipleFiles = object.javaMultipleFiles ?? false; + message.javaGenerateEqualsAndHash = object.javaGenerateEqualsAndHash ?? false; + message.javaStringCheckUtf8 = object.javaStringCheckUtf8 ?? false; + message.optimizeFor = object.optimizeFor ?? 1; + message.goPackage = object.goPackage ?? ""; + message.ccGenericServices = object.ccGenericServices ?? false; + message.javaGenericServices = object.javaGenericServices ?? false; + message.pyGenericServices = object.pyGenericServices ?? false; + message.deprecated = object.deprecated ?? false; + message.ccEnableArenas = object.ccEnableArenas ?? true; + message.objcClassPrefix = object.objcClassPrefix ?? ""; + message.csharpNamespace = object.csharpNamespace ?? ""; + message.swiftPrefix = object.swiftPrefix ?? ""; + message.phpClassPrefix = object.phpClassPrefix ?? ""; + message.phpNamespace = object.phpNamespace ?? ""; + message.phpMetadataNamespace = object.phpMetadataNamespace ?? ""; + message.rubyPackage = object.rubyPackage ?? ""; + message.features = (object.features !== undefined && object.features !== null) + ? FeatureSet.fromPartial(object.features) + : undefined; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseMessageOptions(): MessageOptions { + return { + messageSetWireFormat: false, + noStandardDescriptorAccessor: false, + deprecated: false, + mapEntry: false, + deprecatedLegacyJsonFieldConflicts: false, + features: undefined, + uninterpretedOption: [], + }; +} + +export const MessageOptions: MessageFns<MessageOptions> = { + encode(message: MessageOptions, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.messageSetWireFormat !== undefined && message.messageSetWireFormat !== false) { + writer.uint32(8).bool(message.messageSetWireFormat); + } + if (message.noStandardDescriptorAccessor !== undefined && message.noStandardDescriptorAccessor !== false) { + writer.uint32(16).bool(message.noStandardDescriptorAccessor); + } + if (message.deprecated !== undefined && message.deprecated !== false) { + writer.uint32(24).bool(message.deprecated); + } + if (message.mapEntry !== undefined && message.mapEntry !== false) { + writer.uint32(56).bool(message.mapEntry); + } + if ( + message.deprecatedLegacyJsonFieldConflicts !== undefined && message.deprecatedLegacyJsonFieldConflicts !== false + ) { + writer.uint32(88).bool(message.deprecatedLegacyJsonFieldConflicts); + } + if (message.features !== undefined) { + FeatureSet.encode(message.features, writer.uint32(98).fork()).join(); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): MessageOptions { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMessageOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + + message.messageSetWireFormat = reader.bool(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.noStandardDescriptorAccessor = reader.bool(); + continue; + } + case 3: { + if (tag !== 24) { + break; + } + + message.deprecated = reader.bool(); + continue; + } + case 7: { + if (tag !== 56) { + break; + } + + message.mapEntry = reader.bool(); + continue; + } + case 11: { + if (tag !== 88) { + break; + } + + message.deprecatedLegacyJsonFieldConflicts = reader.bool(); + continue; + } + case 12: { + if (tag !== 98) { + break; + } + + message.features = FeatureSet.decode(reader, reader.uint32()); + continue; + } + case 999: { + if (tag !== 7994) { + break; + } + + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): MessageOptions { + return { + messageSetWireFormat: isSet(object.messageSetWireFormat) + ? globalThis.Boolean(object.messageSetWireFormat) + : isSet(object.message_set_wire_format) + ? globalThis.Boolean(object.message_set_wire_format) + : false, + noStandardDescriptorAccessor: isSet(object.noStandardDescriptorAccessor) + ? globalThis.Boolean(object.noStandardDescriptorAccessor) + : isSet(object.no_standard_descriptor_accessor) + ? globalThis.Boolean(object.no_standard_descriptor_accessor) + : false, + deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false, + mapEntry: isSet(object.mapEntry) + ? globalThis.Boolean(object.mapEntry) + : isSet(object.map_entry) + ? globalThis.Boolean(object.map_entry) + : false, + deprecatedLegacyJsonFieldConflicts: isSet(object.deprecatedLegacyJsonFieldConflicts) + ? globalThis.Boolean(object.deprecatedLegacyJsonFieldConflicts) + : isSet(object.deprecated_legacy_json_field_conflicts) + ? globalThis.Boolean(object.deprecated_legacy_json_field_conflicts) + : false, + features: isSet(object.features) ? FeatureSet.fromJSON(object.features) : undefined, + uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : globalThis.Array.isArray(object?.uninterpreted_option) + ? object.uninterpreted_option.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: MessageOptions): unknown { + const obj: any = {}; + if (message.messageSetWireFormat !== undefined && message.messageSetWireFormat !== false) { + obj.messageSetWireFormat = message.messageSetWireFormat; + } + if (message.noStandardDescriptorAccessor !== undefined && message.noStandardDescriptorAccessor !== false) { + obj.noStandardDescriptorAccessor = message.noStandardDescriptorAccessor; + } + if (message.deprecated !== undefined && message.deprecated !== false) { + obj.deprecated = message.deprecated; + } + if (message.mapEntry !== undefined && message.mapEntry !== false) { + obj.mapEntry = message.mapEntry; + } + if ( + message.deprecatedLegacyJsonFieldConflicts !== undefined && message.deprecatedLegacyJsonFieldConflicts !== false + ) { + obj.deprecatedLegacyJsonFieldConflicts = message.deprecatedLegacyJsonFieldConflicts; + } + if (message.features !== undefined) { + obj.features = FeatureSet.toJSON(message.features); + } + if (message.uninterpretedOption?.length) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); + } + return obj; + }, + + create<I extends Exact<DeepPartial<MessageOptions>, I>>(base?: I): MessageOptions { + return MessageOptions.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<MessageOptions>, I>>(object: I): MessageOptions { + const message = createBaseMessageOptions(); + message.messageSetWireFormat = object.messageSetWireFormat ?? false; + message.noStandardDescriptorAccessor = object.noStandardDescriptorAccessor ?? false; + message.deprecated = object.deprecated ?? false; + message.mapEntry = object.mapEntry ?? false; + message.deprecatedLegacyJsonFieldConflicts = object.deprecatedLegacyJsonFieldConflicts ?? false; + message.features = (object.features !== undefined && object.features !== null) + ? FeatureSet.fromPartial(object.features) + : undefined; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFieldOptions(): FieldOptions { + return { + ctype: 0, + packed: false, + jstype: 0, + lazy: false, + unverifiedLazy: false, + deprecated: false, + weak: false, + debugRedact: false, + retention: 0, + targets: [], + editionDefaults: [], + features: undefined, + featureSupport: undefined, + uninterpretedOption: [], + }; +} + +export const FieldOptions: MessageFns<FieldOptions> = { + encode(message: FieldOptions, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.ctype !== undefined && message.ctype !== 0) { + writer.uint32(8).int32(message.ctype); + } + if (message.packed !== undefined && message.packed !== false) { + writer.uint32(16).bool(message.packed); + } + if (message.jstype !== undefined && message.jstype !== 0) { + writer.uint32(48).int32(message.jstype); + } + if (message.lazy !== undefined && message.lazy !== false) { + writer.uint32(40).bool(message.lazy); + } + if (message.unverifiedLazy !== undefined && message.unverifiedLazy !== false) { + writer.uint32(120).bool(message.unverifiedLazy); + } + if (message.deprecated !== undefined && message.deprecated !== false) { + writer.uint32(24).bool(message.deprecated); + } + if (message.weak !== undefined && message.weak !== false) { + writer.uint32(80).bool(message.weak); + } + if (message.debugRedact !== undefined && message.debugRedact !== false) { + writer.uint32(128).bool(message.debugRedact); + } + if (message.retention !== undefined && message.retention !== 0) { + writer.uint32(136).int32(message.retention); + } + for (const v of message.targets) { + writer.uint32(152).int32(v!); + } + for (const v of message.editionDefaults) { + FieldOptions_EditionDefault.encode(v!, writer.uint32(162).fork()).join(); + } + if (message.features !== undefined) { + FeatureSet.encode(message.features, writer.uint32(170).fork()).join(); + } + if (message.featureSupport !== undefined) { + FieldOptions_FeatureSupport.encode(message.featureSupport, writer.uint32(178).fork()).join(); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): FieldOptions { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFieldOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + + message.ctype = reader.int32() as any; + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.packed = reader.bool(); + continue; + } + case 6: { + if (tag !== 48) { + break; + } + + message.jstype = reader.int32() as any; + continue; + } + case 5: { + if (tag !== 40) { + break; + } + + message.lazy = reader.bool(); + continue; + } + case 15: { + if (tag !== 120) { + break; + } + + message.unverifiedLazy = reader.bool(); + continue; + } + case 3: { + if (tag !== 24) { + break; + } + + message.deprecated = reader.bool(); + continue; + } + case 10: { + if (tag !== 80) { + break; + } + + message.weak = reader.bool(); + continue; + } + case 16: { + if (tag !== 128) { + break; + } + + message.debugRedact = reader.bool(); + continue; + } + case 17: { + if (tag !== 136) { + break; + } + + message.retention = reader.int32() as any; + continue; + } + case 19: { + if (tag === 152) { + message.targets.push(reader.int32() as any); + + continue; + } + + if (tag === 154) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.targets.push(reader.int32() as any); + } + + continue; + } + + break; + } + case 20: { + if (tag !== 162) { + break; + } + + message.editionDefaults.push(FieldOptions_EditionDefault.decode(reader, reader.uint32())); + continue; + } + case 21: { + if (tag !== 170) { + break; + } + + message.features = FeatureSet.decode(reader, reader.uint32()); + continue; + } + case 22: { + if (tag !== 178) { + break; + } + + message.featureSupport = FieldOptions_FeatureSupport.decode(reader, reader.uint32()); + continue; + } + case 999: { + if (tag !== 7994) { + break; + } + + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): FieldOptions { + return { + ctype: isSet(object.ctype) ? fieldOptions_CTypeFromJSON(object.ctype) : 0, + packed: isSet(object.packed) ? globalThis.Boolean(object.packed) : false, + jstype: isSet(object.jstype) ? fieldOptions_JSTypeFromJSON(object.jstype) : 0, + lazy: isSet(object.lazy) ? globalThis.Boolean(object.lazy) : false, + unverifiedLazy: isSet(object.unverifiedLazy) + ? globalThis.Boolean(object.unverifiedLazy) + : isSet(object.unverified_lazy) + ? globalThis.Boolean(object.unverified_lazy) + : false, + deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false, + weak: isSet(object.weak) ? globalThis.Boolean(object.weak) : false, + debugRedact: isSet(object.debugRedact) + ? globalThis.Boolean(object.debugRedact) + : isSet(object.debug_redact) + ? globalThis.Boolean(object.debug_redact) + : false, + retention: isSet(object.retention) ? fieldOptions_OptionRetentionFromJSON(object.retention) : 0, + targets: globalThis.Array.isArray(object?.targets) + ? object.targets.map((e: any) => fieldOptions_OptionTargetTypeFromJSON(e)) + : [], + editionDefaults: globalThis.Array.isArray(object?.editionDefaults) + ? object.editionDefaults.map((e: any) => FieldOptions_EditionDefault.fromJSON(e)) + : globalThis.Array.isArray(object?.edition_defaults) + ? object.edition_defaults.map((e: any) => FieldOptions_EditionDefault.fromJSON(e)) + : [], + features: isSet(object.features) ? FeatureSet.fromJSON(object.features) : undefined, + featureSupport: isSet(object.featureSupport) + ? FieldOptions_FeatureSupport.fromJSON(object.featureSupport) + : isSet(object.feature_support) + ? FieldOptions_FeatureSupport.fromJSON(object.feature_support) + : undefined, + uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : globalThis.Array.isArray(object?.uninterpreted_option) + ? object.uninterpreted_option.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: FieldOptions): unknown { + const obj: any = {}; + if (message.ctype !== undefined && message.ctype !== 0) { + obj.ctype = fieldOptions_CTypeToJSON(message.ctype); + } + if (message.packed !== undefined && message.packed !== false) { + obj.packed = message.packed; + } + if (message.jstype !== undefined && message.jstype !== 0) { + obj.jstype = fieldOptions_JSTypeToJSON(message.jstype); + } + if (message.lazy !== undefined && message.lazy !== false) { + obj.lazy = message.lazy; + } + if (message.unverifiedLazy !== undefined && message.unverifiedLazy !== false) { + obj.unverifiedLazy = message.unverifiedLazy; + } + if (message.deprecated !== undefined && message.deprecated !== false) { + obj.deprecated = message.deprecated; + } + if (message.weak !== undefined && message.weak !== false) { + obj.weak = message.weak; + } + if (message.debugRedact !== undefined && message.debugRedact !== false) { + obj.debugRedact = message.debugRedact; + } + if (message.retention !== undefined && message.retention !== 0) { + obj.retention = fieldOptions_OptionRetentionToJSON(message.retention); + } + if (message.targets?.length) { + obj.targets = message.targets.map((e) => fieldOptions_OptionTargetTypeToJSON(e)); + } + if (message.editionDefaults?.length) { + obj.editionDefaults = message.editionDefaults.map((e) => FieldOptions_EditionDefault.toJSON(e)); + } + if (message.features !== undefined) { + obj.features = FeatureSet.toJSON(message.features); + } + if (message.featureSupport !== undefined) { + obj.featureSupport = FieldOptions_FeatureSupport.toJSON(message.featureSupport); + } + if (message.uninterpretedOption?.length) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); + } + return obj; + }, + + create<I extends Exact<DeepPartial<FieldOptions>, I>>(base?: I): FieldOptions { + return FieldOptions.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<FieldOptions>, I>>(object: I): FieldOptions { + const message = createBaseFieldOptions(); + message.ctype = object.ctype ?? 0; + message.packed = object.packed ?? false; + message.jstype = object.jstype ?? 0; + message.lazy = object.lazy ?? false; + message.unverifiedLazy = object.unverifiedLazy ?? false; + message.deprecated = object.deprecated ?? false; + message.weak = object.weak ?? false; + message.debugRedact = object.debugRedact ?? false; + message.retention = object.retention ?? 0; + message.targets = object.targets?.map((e) => e) || []; + message.editionDefaults = object.editionDefaults?.map((e) => FieldOptions_EditionDefault.fromPartial(e)) || []; + message.features = (object.features !== undefined && object.features !== null) + ? FeatureSet.fromPartial(object.features) + : undefined; + message.featureSupport = (object.featureSupport !== undefined && object.featureSupport !== null) + ? FieldOptions_FeatureSupport.fromPartial(object.featureSupport) + : undefined; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFieldOptions_EditionDefault(): FieldOptions_EditionDefault { + return { edition: 0, value: "" }; +} + +export const FieldOptions_EditionDefault: MessageFns<FieldOptions_EditionDefault> = { + encode(message: FieldOptions_EditionDefault, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.edition !== undefined && message.edition !== 0) { + writer.uint32(24).int32(message.edition); + } + if (message.value !== undefined && message.value !== "") { + writer.uint32(18).string(message.value); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): FieldOptions_EditionDefault { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFieldOptions_EditionDefault(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 3: { + if (tag !== 24) { + break; + } + + message.edition = reader.int32() as any; + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.value = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): FieldOptions_EditionDefault { + return { + edition: isSet(object.edition) ? editionFromJSON(object.edition) : 0, + value: isSet(object.value) ? globalThis.String(object.value) : "", + }; + }, + + toJSON(message: FieldOptions_EditionDefault): unknown { + const obj: any = {}; + if (message.edition !== undefined && message.edition !== 0) { + obj.edition = editionToJSON(message.edition); + } + if (message.value !== undefined && message.value !== "") { + obj.value = message.value; + } + return obj; + }, + + create<I extends Exact<DeepPartial<FieldOptions_EditionDefault>, I>>(base?: I): FieldOptions_EditionDefault { + return FieldOptions_EditionDefault.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<FieldOptions_EditionDefault>, I>>(object: I): FieldOptions_EditionDefault { + const message = createBaseFieldOptions_EditionDefault(); + message.edition = object.edition ?? 0; + message.value = object.value ?? ""; + return message; + }, +}; + +function createBaseFieldOptions_FeatureSupport(): FieldOptions_FeatureSupport { + return { editionIntroduced: 0, editionDeprecated: 0, deprecationWarning: "", editionRemoved: 0 }; +} + +export const FieldOptions_FeatureSupport: MessageFns<FieldOptions_FeatureSupport> = { + encode(message: FieldOptions_FeatureSupport, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.editionIntroduced !== undefined && message.editionIntroduced !== 0) { + writer.uint32(8).int32(message.editionIntroduced); + } + if (message.editionDeprecated !== undefined && message.editionDeprecated !== 0) { + writer.uint32(16).int32(message.editionDeprecated); + } + if (message.deprecationWarning !== undefined && message.deprecationWarning !== "") { + writer.uint32(26).string(message.deprecationWarning); + } + if (message.editionRemoved !== undefined && message.editionRemoved !== 0) { + writer.uint32(32).int32(message.editionRemoved); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): FieldOptions_FeatureSupport { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFieldOptions_FeatureSupport(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + + message.editionIntroduced = reader.int32() as any; + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.editionDeprecated = reader.int32() as any; + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.deprecationWarning = reader.string(); + continue; + } + case 4: { + if (tag !== 32) { + break; + } + + message.editionRemoved = reader.int32() as any; + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): FieldOptions_FeatureSupport { + return { + editionIntroduced: isSet(object.editionIntroduced) + ? editionFromJSON(object.editionIntroduced) + : isSet(object.edition_introduced) + ? editionFromJSON(object.edition_introduced) + : 0, + editionDeprecated: isSet(object.editionDeprecated) + ? editionFromJSON(object.editionDeprecated) + : isSet(object.edition_deprecated) + ? editionFromJSON(object.edition_deprecated) + : 0, + deprecationWarning: isSet(object.deprecationWarning) + ? globalThis.String(object.deprecationWarning) + : isSet(object.deprecation_warning) + ? globalThis.String(object.deprecation_warning) + : "", + editionRemoved: isSet(object.editionRemoved) + ? editionFromJSON(object.editionRemoved) + : isSet(object.edition_removed) + ? editionFromJSON(object.edition_removed) + : 0, + }; + }, + + toJSON(message: FieldOptions_FeatureSupport): unknown { + const obj: any = {}; + if (message.editionIntroduced !== undefined && message.editionIntroduced !== 0) { + obj.editionIntroduced = editionToJSON(message.editionIntroduced); + } + if (message.editionDeprecated !== undefined && message.editionDeprecated !== 0) { + obj.editionDeprecated = editionToJSON(message.editionDeprecated); + } + if (message.deprecationWarning !== undefined && message.deprecationWarning !== "") { + obj.deprecationWarning = message.deprecationWarning; + } + if (message.editionRemoved !== undefined && message.editionRemoved !== 0) { + obj.editionRemoved = editionToJSON(message.editionRemoved); + } + return obj; + }, + + create<I extends Exact<DeepPartial<FieldOptions_FeatureSupport>, I>>(base?: I): FieldOptions_FeatureSupport { + return FieldOptions_FeatureSupport.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<FieldOptions_FeatureSupport>, I>>(object: I): FieldOptions_FeatureSupport { + const message = createBaseFieldOptions_FeatureSupport(); + message.editionIntroduced = object.editionIntroduced ?? 0; + message.editionDeprecated = object.editionDeprecated ?? 0; + message.deprecationWarning = object.deprecationWarning ?? ""; + message.editionRemoved = object.editionRemoved ?? 0; + return message; + }, +}; + +function createBaseOneofOptions(): OneofOptions { + return { features: undefined, uninterpretedOption: [] }; +} + +export const OneofOptions: MessageFns<OneofOptions> = { + encode(message: OneofOptions, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.features !== undefined) { + FeatureSet.encode(message.features, writer.uint32(10).fork()).join(); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): OneofOptions { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseOneofOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.features = FeatureSet.decode(reader, reader.uint32()); + continue; + } + case 999: { + if (tag !== 7994) { + break; + } + + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): OneofOptions { + return { + features: isSet(object.features) ? FeatureSet.fromJSON(object.features) : undefined, + uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : globalThis.Array.isArray(object?.uninterpreted_option) + ? object.uninterpreted_option.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: OneofOptions): unknown { + const obj: any = {}; + if (message.features !== undefined) { + obj.features = FeatureSet.toJSON(message.features); + } + if (message.uninterpretedOption?.length) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); + } + return obj; + }, + + create<I extends Exact<DeepPartial<OneofOptions>, I>>(base?: I): OneofOptions { + return OneofOptions.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<OneofOptions>, I>>(object: I): OneofOptions { + const message = createBaseOneofOptions(); + message.features = (object.features !== undefined && object.features !== null) + ? FeatureSet.fromPartial(object.features) + : undefined; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseEnumOptions(): EnumOptions { + return { + allowAlias: false, + deprecated: false, + deprecatedLegacyJsonFieldConflicts: false, + features: undefined, + uninterpretedOption: [], + }; +} + +export const EnumOptions: MessageFns<EnumOptions> = { + encode(message: EnumOptions, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.allowAlias !== undefined && message.allowAlias !== false) { + writer.uint32(16).bool(message.allowAlias); + } + if (message.deprecated !== undefined && message.deprecated !== false) { + writer.uint32(24).bool(message.deprecated); + } + if ( + message.deprecatedLegacyJsonFieldConflicts !== undefined && message.deprecatedLegacyJsonFieldConflicts !== false + ) { + writer.uint32(48).bool(message.deprecatedLegacyJsonFieldConflicts); + } + if (message.features !== undefined) { + FeatureSet.encode(message.features, writer.uint32(58).fork()).join(); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): EnumOptions { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 2: { + if (tag !== 16) { + break; + } + + message.allowAlias = reader.bool(); + continue; + } + case 3: { + if (tag !== 24) { + break; + } + + message.deprecated = reader.bool(); + continue; + } + case 6: { + if (tag !== 48) { + break; + } + + message.deprecatedLegacyJsonFieldConflicts = reader.bool(); + continue; + } + case 7: { + if (tag !== 58) { + break; + } + + message.features = FeatureSet.decode(reader, reader.uint32()); + continue; + } + case 999: { + if (tag !== 7994) { + break; + } + + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): EnumOptions { + return { + allowAlias: isSet(object.allowAlias) + ? globalThis.Boolean(object.allowAlias) + : isSet(object.allow_alias) + ? globalThis.Boolean(object.allow_alias) + : false, + deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false, + deprecatedLegacyJsonFieldConflicts: isSet(object.deprecatedLegacyJsonFieldConflicts) + ? globalThis.Boolean(object.deprecatedLegacyJsonFieldConflicts) + : isSet(object.deprecated_legacy_json_field_conflicts) + ? globalThis.Boolean(object.deprecated_legacy_json_field_conflicts) + : false, + features: isSet(object.features) ? FeatureSet.fromJSON(object.features) : undefined, + uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : globalThis.Array.isArray(object?.uninterpreted_option) + ? object.uninterpreted_option.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: EnumOptions): unknown { + const obj: any = {}; + if (message.allowAlias !== undefined && message.allowAlias !== false) { + obj.allowAlias = message.allowAlias; + } + if (message.deprecated !== undefined && message.deprecated !== false) { + obj.deprecated = message.deprecated; + } + if ( + message.deprecatedLegacyJsonFieldConflicts !== undefined && message.deprecatedLegacyJsonFieldConflicts !== false + ) { + obj.deprecatedLegacyJsonFieldConflicts = message.deprecatedLegacyJsonFieldConflicts; + } + if (message.features !== undefined) { + obj.features = FeatureSet.toJSON(message.features); + } + if (message.uninterpretedOption?.length) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); + } + return obj; + }, + + create<I extends Exact<DeepPartial<EnumOptions>, I>>(base?: I): EnumOptions { + return EnumOptions.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<EnumOptions>, I>>(object: I): EnumOptions { + const message = createBaseEnumOptions(); + message.allowAlias = object.allowAlias ?? false; + message.deprecated = object.deprecated ?? false; + message.deprecatedLegacyJsonFieldConflicts = object.deprecatedLegacyJsonFieldConflicts ?? false; + message.features = (object.features !== undefined && object.features !== null) + ? FeatureSet.fromPartial(object.features) + : undefined; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseEnumValueOptions(): EnumValueOptions { + return { + deprecated: false, + features: undefined, + debugRedact: false, + featureSupport: undefined, + uninterpretedOption: [], + }; +} + +export const EnumValueOptions: MessageFns<EnumValueOptions> = { + encode(message: EnumValueOptions, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.deprecated !== undefined && message.deprecated !== false) { + writer.uint32(8).bool(message.deprecated); + } + if (message.features !== undefined) { + FeatureSet.encode(message.features, writer.uint32(18).fork()).join(); + } + if (message.debugRedact !== undefined && message.debugRedact !== false) { + writer.uint32(24).bool(message.debugRedact); + } + if (message.featureSupport !== undefined) { + FieldOptions_FeatureSupport.encode(message.featureSupport, writer.uint32(34).fork()).join(); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): EnumValueOptions { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumValueOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + + message.deprecated = reader.bool(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.features = FeatureSet.decode(reader, reader.uint32()); + continue; + } + case 3: { + if (tag !== 24) { + break; + } + + message.debugRedact = reader.bool(); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + + message.featureSupport = FieldOptions_FeatureSupport.decode(reader, reader.uint32()); + continue; + } + case 999: { + if (tag !== 7994) { + break; + } + + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): EnumValueOptions { + return { + deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false, + features: isSet(object.features) ? FeatureSet.fromJSON(object.features) : undefined, + debugRedact: isSet(object.debugRedact) + ? globalThis.Boolean(object.debugRedact) + : isSet(object.debug_redact) + ? globalThis.Boolean(object.debug_redact) + : false, + featureSupport: isSet(object.featureSupport) + ? FieldOptions_FeatureSupport.fromJSON(object.featureSupport) + : isSet(object.feature_support) + ? FieldOptions_FeatureSupport.fromJSON(object.feature_support) + : undefined, + uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : globalThis.Array.isArray(object?.uninterpreted_option) + ? object.uninterpreted_option.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: EnumValueOptions): unknown { + const obj: any = {}; + if (message.deprecated !== undefined && message.deprecated !== false) { + obj.deprecated = message.deprecated; + } + if (message.features !== undefined) { + obj.features = FeatureSet.toJSON(message.features); + } + if (message.debugRedact !== undefined && message.debugRedact !== false) { + obj.debugRedact = message.debugRedact; + } + if (message.featureSupport !== undefined) { + obj.featureSupport = FieldOptions_FeatureSupport.toJSON(message.featureSupport); + } + if (message.uninterpretedOption?.length) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); + } + return obj; + }, + + create<I extends Exact<DeepPartial<EnumValueOptions>, I>>(base?: I): EnumValueOptions { + return EnumValueOptions.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<EnumValueOptions>, I>>(object: I): EnumValueOptions { + const message = createBaseEnumValueOptions(); + message.deprecated = object.deprecated ?? false; + message.features = (object.features !== undefined && object.features !== null) + ? FeatureSet.fromPartial(object.features) + : undefined; + message.debugRedact = object.debugRedact ?? false; + message.featureSupport = (object.featureSupport !== undefined && object.featureSupport !== null) + ? FieldOptions_FeatureSupport.fromPartial(object.featureSupport) + : undefined; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseServiceOptions(): ServiceOptions { + return { features: undefined, deprecated: false, uninterpretedOption: [] }; +} + +export const ServiceOptions: MessageFns<ServiceOptions> = { + encode(message: ServiceOptions, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.features !== undefined) { + FeatureSet.encode(message.features, writer.uint32(274).fork()).join(); + } + if (message.deprecated !== undefined && message.deprecated !== false) { + writer.uint32(264).bool(message.deprecated); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): ServiceOptions { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseServiceOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 34: { + if (tag !== 274) { + break; + } + + message.features = FeatureSet.decode(reader, reader.uint32()); + continue; + } + case 33: { + if (tag !== 264) { + break; + } + + message.deprecated = reader.bool(); + continue; + } + case 999: { + if (tag !== 7994) { + break; + } + + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): ServiceOptions { + return { + features: isSet(object.features) ? FeatureSet.fromJSON(object.features) : undefined, + deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false, + uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : globalThis.Array.isArray(object?.uninterpreted_option) + ? object.uninterpreted_option.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: ServiceOptions): unknown { + const obj: any = {}; + if (message.features !== undefined) { + obj.features = FeatureSet.toJSON(message.features); + } + if (message.deprecated !== undefined && message.deprecated !== false) { + obj.deprecated = message.deprecated; + } + if (message.uninterpretedOption?.length) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); + } + return obj; + }, + + create<I extends Exact<DeepPartial<ServiceOptions>, I>>(base?: I): ServiceOptions { + return ServiceOptions.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<ServiceOptions>, I>>(object: I): ServiceOptions { + const message = createBaseServiceOptions(); + message.features = (object.features !== undefined && object.features !== null) + ? FeatureSet.fromPartial(object.features) + : undefined; + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseMethodOptions(): MethodOptions { + return { deprecated: false, idempotencyLevel: 0, features: undefined, uninterpretedOption: [] }; +} + +export const MethodOptions: MessageFns<MethodOptions> = { + encode(message: MethodOptions, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.deprecated !== undefined && message.deprecated !== false) { + writer.uint32(264).bool(message.deprecated); + } + if (message.idempotencyLevel !== undefined && message.idempotencyLevel !== 0) { + writer.uint32(272).int32(message.idempotencyLevel); + } + if (message.features !== undefined) { + FeatureSet.encode(message.features, writer.uint32(282).fork()).join(); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): MethodOptions { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMethodOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 33: { + if (tag !== 264) { + break; + } + + message.deprecated = reader.bool(); + continue; + } + case 34: { + if (tag !== 272) { + break; + } + + message.idempotencyLevel = reader.int32() as any; + continue; + } + case 35: { + if (tag !== 282) { + break; + } + + message.features = FeatureSet.decode(reader, reader.uint32()); + continue; + } + case 999: { + if (tag !== 7994) { + break; + } + + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): MethodOptions { + return { + deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false, + idempotencyLevel: isSet(object.idempotencyLevel) + ? methodOptions_IdempotencyLevelFromJSON(object.idempotencyLevel) + : isSet(object.idempotency_level) + ? methodOptions_IdempotencyLevelFromJSON(object.idempotency_level) + : 0, + features: isSet(object.features) ? FeatureSet.fromJSON(object.features) : undefined, + uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : globalThis.Array.isArray(object?.uninterpreted_option) + ? object.uninterpreted_option.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: MethodOptions): unknown { + const obj: any = {}; + if (message.deprecated !== undefined && message.deprecated !== false) { + obj.deprecated = message.deprecated; + } + if (message.idempotencyLevel !== undefined && message.idempotencyLevel !== 0) { + obj.idempotencyLevel = methodOptions_IdempotencyLevelToJSON(message.idempotencyLevel); + } + if (message.features !== undefined) { + obj.features = FeatureSet.toJSON(message.features); + } + if (message.uninterpretedOption?.length) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); + } + return obj; + }, + + create<I extends Exact<DeepPartial<MethodOptions>, I>>(base?: I): MethodOptions { + return MethodOptions.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<MethodOptions>, I>>(object: I): MethodOptions { + const message = createBaseMethodOptions(); + message.deprecated = object.deprecated ?? false; + message.idempotencyLevel = object.idempotencyLevel ?? 0; + message.features = (object.features !== undefined && object.features !== null) + ? FeatureSet.fromPartial(object.features) + : undefined; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseUninterpretedOption(): UninterpretedOption { + return { + name: [], + identifierValue: "", + positiveIntValue: 0, + negativeIntValue: 0, + doubleValue: 0, + stringValue: new Uint8Array(0), + aggregateValue: "", + }; +} + +export const UninterpretedOption: MessageFns<UninterpretedOption> = { + encode(message: UninterpretedOption, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + for (const v of message.name) { + UninterpretedOption_NamePart.encode(v!, writer.uint32(18).fork()).join(); + } + if (message.identifierValue !== undefined && message.identifierValue !== "") { + writer.uint32(26).string(message.identifierValue); + } + if (message.positiveIntValue !== undefined && message.positiveIntValue !== 0) { + writer.uint32(32).uint64(message.positiveIntValue); + } + if (message.negativeIntValue !== undefined && message.negativeIntValue !== 0) { + writer.uint32(40).int64(message.negativeIntValue); + } + if (message.doubleValue !== undefined && message.doubleValue !== 0) { + writer.uint32(49).double(message.doubleValue); + } + if (message.stringValue !== undefined && message.stringValue.length !== 0) { + writer.uint32(58).bytes(message.stringValue); + } + if (message.aggregateValue !== undefined && message.aggregateValue !== "") { + writer.uint32(66).string(message.aggregateValue); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): UninterpretedOption { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseUninterpretedOption(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 2: { + if (tag !== 18) { + break; + } + + message.name.push(UninterpretedOption_NamePart.decode(reader, reader.uint32())); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.identifierValue = reader.string(); + continue; + } + case 4: { + if (tag !== 32) { + break; + } + + message.positiveIntValue = longToNumber(reader.uint64()); + continue; + } + case 5: { + if (tag !== 40) { + break; + } + + message.negativeIntValue = longToNumber(reader.int64()); + continue; + } + case 6: { + if (tag !== 49) { + break; + } + + message.doubleValue = reader.double(); + continue; + } + case 7: { + if (tag !== 58) { + break; + } + + message.stringValue = reader.bytes(); + continue; + } + case 8: { + if (tag !== 66) { + break; + } + + message.aggregateValue = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): UninterpretedOption { + return { + name: globalThis.Array.isArray(object?.name) + ? object.name.map((e: any) => UninterpretedOption_NamePart.fromJSON(e)) + : [], + identifierValue: isSet(object.identifierValue) + ? globalThis.String(object.identifierValue) + : isSet(object.identifier_value) + ? globalThis.String(object.identifier_value) + : "", + positiveIntValue: isSet(object.positiveIntValue) + ? globalThis.Number(object.positiveIntValue) + : isSet(object.positive_int_value) + ? globalThis.Number(object.positive_int_value) + : 0, + negativeIntValue: isSet(object.negativeIntValue) + ? globalThis.Number(object.negativeIntValue) + : isSet(object.negative_int_value) + ? globalThis.Number(object.negative_int_value) + : 0, + doubleValue: isSet(object.doubleValue) + ? globalThis.Number(object.doubleValue) + : isSet(object.double_value) + ? globalThis.Number(object.double_value) + : 0, + stringValue: isSet(object.stringValue) + ? bytesFromBase64(object.stringValue) + : isSet(object.string_value) + ? bytesFromBase64(object.string_value) + : new Uint8Array(0), + aggregateValue: isSet(object.aggregateValue) + ? globalThis.String(object.aggregateValue) + : isSet(object.aggregate_value) + ? globalThis.String(object.aggregate_value) + : "", + }; + }, + + toJSON(message: UninterpretedOption): unknown { + const obj: any = {}; + if (message.name?.length) { + obj.name = message.name.map((e) => UninterpretedOption_NamePart.toJSON(e)); + } + if (message.identifierValue !== undefined && message.identifierValue !== "") { + obj.identifierValue = message.identifierValue; + } + if (message.positiveIntValue !== undefined && message.positiveIntValue !== 0) { + obj.positiveIntValue = Math.round(message.positiveIntValue); + } + if (message.negativeIntValue !== undefined && message.negativeIntValue !== 0) { + obj.negativeIntValue = Math.round(message.negativeIntValue); + } + if (message.doubleValue !== undefined && message.doubleValue !== 0) { + obj.doubleValue = message.doubleValue; + } + if (message.stringValue !== undefined && message.stringValue.length !== 0) { + obj.stringValue = base64FromBytes(message.stringValue); + } + if (message.aggregateValue !== undefined && message.aggregateValue !== "") { + obj.aggregateValue = message.aggregateValue; + } + return obj; + }, + + create<I extends Exact<DeepPartial<UninterpretedOption>, I>>(base?: I): UninterpretedOption { + return UninterpretedOption.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<UninterpretedOption>, I>>(object: I): UninterpretedOption { + const message = createBaseUninterpretedOption(); + message.name = object.name?.map((e) => UninterpretedOption_NamePart.fromPartial(e)) || []; + message.identifierValue = object.identifierValue ?? ""; + message.positiveIntValue = object.positiveIntValue ?? 0; + message.negativeIntValue = object.negativeIntValue ?? 0; + message.doubleValue = object.doubleValue ?? 0; + message.stringValue = object.stringValue ?? new Uint8Array(0); + message.aggregateValue = object.aggregateValue ?? ""; + return message; + }, +}; + +function createBaseUninterpretedOption_NamePart(): UninterpretedOption_NamePart { + return { namePart: "", isExtension: false }; +} + +export const UninterpretedOption_NamePart: MessageFns<UninterpretedOption_NamePart> = { + encode(message: UninterpretedOption_NamePart, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.namePart !== "") { + writer.uint32(10).string(message.namePart); + } + if (message.isExtension !== false) { + writer.uint32(16).bool(message.isExtension); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): UninterpretedOption_NamePart { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseUninterpretedOption_NamePart(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.namePart = reader.string(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.isExtension = reader.bool(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): UninterpretedOption_NamePart { + return { + namePart: isSet(object.namePart) + ? globalThis.String(object.namePart) + : isSet(object.name_part) + ? globalThis.String(object.name_part) + : "", + isExtension: isSet(object.isExtension) + ? globalThis.Boolean(object.isExtension) + : isSet(object.is_extension) + ? globalThis.Boolean(object.is_extension) + : false, + }; + }, + + toJSON(message: UninterpretedOption_NamePart): unknown { + const obj: any = {}; + if (message.namePart !== "") { + obj.namePart = message.namePart; + } + if (message.isExtension !== false) { + obj.isExtension = message.isExtension; + } + return obj; + }, + + create<I extends Exact<DeepPartial<UninterpretedOption_NamePart>, I>>(base?: I): UninterpretedOption_NamePart { + return UninterpretedOption_NamePart.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<UninterpretedOption_NamePart>, I>>(object: I): UninterpretedOption_NamePart { + const message = createBaseUninterpretedOption_NamePart(); + message.namePart = object.namePart ?? ""; + message.isExtension = object.isExtension ?? false; + return message; + }, +}; + +function createBaseFeatureSet(): FeatureSet { + return { + fieldPresence: 0, + enumType: 0, + repeatedFieldEncoding: 0, + utf8Validation: 0, + messageEncoding: 0, + jsonFormat: 0, + enforceNamingStyle: 0, + defaultSymbolVisibility: 0, + }; +} + +export const FeatureSet: MessageFns<FeatureSet> = { + encode(message: FeatureSet, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.fieldPresence !== undefined && message.fieldPresence !== 0) { + writer.uint32(8).int32(message.fieldPresence); + } + if (message.enumType !== undefined && message.enumType !== 0) { + writer.uint32(16).int32(message.enumType); + } + if (message.repeatedFieldEncoding !== undefined && message.repeatedFieldEncoding !== 0) { + writer.uint32(24).int32(message.repeatedFieldEncoding); + } + if (message.utf8Validation !== undefined && message.utf8Validation !== 0) { + writer.uint32(32).int32(message.utf8Validation); + } + if (message.messageEncoding !== undefined && message.messageEncoding !== 0) { + writer.uint32(40).int32(message.messageEncoding); + } + if (message.jsonFormat !== undefined && message.jsonFormat !== 0) { + writer.uint32(48).int32(message.jsonFormat); + } + if (message.enforceNamingStyle !== undefined && message.enforceNamingStyle !== 0) { + writer.uint32(56).int32(message.enforceNamingStyle); + } + if (message.defaultSymbolVisibility !== undefined && message.defaultSymbolVisibility !== 0) { + writer.uint32(64).int32(message.defaultSymbolVisibility); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): FeatureSet { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFeatureSet(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + + message.fieldPresence = reader.int32() as any; + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.enumType = reader.int32() as any; + continue; + } + case 3: { + if (tag !== 24) { + break; + } + + message.repeatedFieldEncoding = reader.int32() as any; + continue; + } + case 4: { + if (tag !== 32) { + break; + } + + message.utf8Validation = reader.int32() as any; + continue; + } + case 5: { + if (tag !== 40) { + break; + } + + message.messageEncoding = reader.int32() as any; + continue; + } + case 6: { + if (tag !== 48) { + break; + } + + message.jsonFormat = reader.int32() as any; + continue; + } + case 7: { + if (tag !== 56) { + break; + } + + message.enforceNamingStyle = reader.int32() as any; + continue; + } + case 8: { + if (tag !== 64) { + break; + } + + message.defaultSymbolVisibility = reader.int32() as any; + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): FeatureSet { + return { + fieldPresence: isSet(object.fieldPresence) + ? featureSet_FieldPresenceFromJSON(object.fieldPresence) + : isSet(object.field_presence) + ? featureSet_FieldPresenceFromJSON(object.field_presence) + : 0, + enumType: isSet(object.enumType) + ? featureSet_EnumTypeFromJSON(object.enumType) + : isSet(object.enum_type) + ? featureSet_EnumTypeFromJSON(object.enum_type) + : 0, + repeatedFieldEncoding: isSet(object.repeatedFieldEncoding) + ? featureSet_RepeatedFieldEncodingFromJSON(object.repeatedFieldEncoding) + : isSet(object.repeated_field_encoding) + ? featureSet_RepeatedFieldEncodingFromJSON(object.repeated_field_encoding) + : 0, + utf8Validation: isSet(object.utf8Validation) + ? featureSet_Utf8ValidationFromJSON(object.utf8Validation) + : isSet(object.utf8_validation) + ? featureSet_Utf8ValidationFromJSON(object.utf8_validation) + : 0, + messageEncoding: isSet(object.messageEncoding) + ? featureSet_MessageEncodingFromJSON(object.messageEncoding) + : isSet(object.message_encoding) + ? featureSet_MessageEncodingFromJSON(object.message_encoding) + : 0, + jsonFormat: isSet(object.jsonFormat) + ? featureSet_JsonFormatFromJSON(object.jsonFormat) + : isSet(object.json_format) + ? featureSet_JsonFormatFromJSON(object.json_format) + : 0, + enforceNamingStyle: isSet(object.enforceNamingStyle) + ? featureSet_EnforceNamingStyleFromJSON(object.enforceNamingStyle) + : isSet(object.enforce_naming_style) + ? featureSet_EnforceNamingStyleFromJSON(object.enforce_naming_style) + : 0, + defaultSymbolVisibility: isSet(object.defaultSymbolVisibility) + ? featureSet_VisibilityFeature_DefaultSymbolVisibilityFromJSON(object.defaultSymbolVisibility) + : isSet(object.default_symbol_visibility) + ? featureSet_VisibilityFeature_DefaultSymbolVisibilityFromJSON(object.default_symbol_visibility) + : 0, + }; + }, + + toJSON(message: FeatureSet): unknown { + const obj: any = {}; + if (message.fieldPresence !== undefined && message.fieldPresence !== 0) { + obj.fieldPresence = featureSet_FieldPresenceToJSON(message.fieldPresence); + } + if (message.enumType !== undefined && message.enumType !== 0) { + obj.enumType = featureSet_EnumTypeToJSON(message.enumType); + } + if (message.repeatedFieldEncoding !== undefined && message.repeatedFieldEncoding !== 0) { + obj.repeatedFieldEncoding = featureSet_RepeatedFieldEncodingToJSON(message.repeatedFieldEncoding); + } + if (message.utf8Validation !== undefined && message.utf8Validation !== 0) { + obj.utf8Validation = featureSet_Utf8ValidationToJSON(message.utf8Validation); + } + if (message.messageEncoding !== undefined && message.messageEncoding !== 0) { + obj.messageEncoding = featureSet_MessageEncodingToJSON(message.messageEncoding); + } + if (message.jsonFormat !== undefined && message.jsonFormat !== 0) { + obj.jsonFormat = featureSet_JsonFormatToJSON(message.jsonFormat); + } + if (message.enforceNamingStyle !== undefined && message.enforceNamingStyle !== 0) { + obj.enforceNamingStyle = featureSet_EnforceNamingStyleToJSON(message.enforceNamingStyle); + } + if (message.defaultSymbolVisibility !== undefined && message.defaultSymbolVisibility !== 0) { + obj.defaultSymbolVisibility = featureSet_VisibilityFeature_DefaultSymbolVisibilityToJSON( + message.defaultSymbolVisibility, + ); + } + return obj; + }, + + create<I extends Exact<DeepPartial<FeatureSet>, I>>(base?: I): FeatureSet { + return FeatureSet.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<FeatureSet>, I>>(object: I): FeatureSet { + const message = createBaseFeatureSet(); + message.fieldPresence = object.fieldPresence ?? 0; + message.enumType = object.enumType ?? 0; + message.repeatedFieldEncoding = object.repeatedFieldEncoding ?? 0; + message.utf8Validation = object.utf8Validation ?? 0; + message.messageEncoding = object.messageEncoding ?? 0; + message.jsonFormat = object.jsonFormat ?? 0; + message.enforceNamingStyle = object.enforceNamingStyle ?? 0; + message.defaultSymbolVisibility = object.defaultSymbolVisibility ?? 0; + return message; + }, +}; + +function createBaseFeatureSet_VisibilityFeature(): FeatureSet_VisibilityFeature { + return {}; +} + +export const FeatureSet_VisibilityFeature: MessageFns<FeatureSet_VisibilityFeature> = { + encode(_: FeatureSet_VisibilityFeature, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): FeatureSet_VisibilityFeature { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFeatureSet_VisibilityFeature(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(_: any): FeatureSet_VisibilityFeature { + return {}; + }, + + toJSON(_: FeatureSet_VisibilityFeature): unknown { + const obj: any = {}; + return obj; + }, + + create<I extends Exact<DeepPartial<FeatureSet_VisibilityFeature>, I>>(base?: I): FeatureSet_VisibilityFeature { + return FeatureSet_VisibilityFeature.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<FeatureSet_VisibilityFeature>, I>>(_: I): FeatureSet_VisibilityFeature { + const message = createBaseFeatureSet_VisibilityFeature(); + return message; + }, +}; + +function createBaseFeatureSetDefaults(): FeatureSetDefaults { + return { defaults: [], minimumEdition: 0, maximumEdition: 0 }; +} + +export const FeatureSetDefaults: MessageFns<FeatureSetDefaults> = { + encode(message: FeatureSetDefaults, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + for (const v of message.defaults) { + FeatureSetDefaults_FeatureSetEditionDefault.encode(v!, writer.uint32(10).fork()).join(); + } + if (message.minimumEdition !== undefined && message.minimumEdition !== 0) { + writer.uint32(32).int32(message.minimumEdition); + } + if (message.maximumEdition !== undefined && message.maximumEdition !== 0) { + writer.uint32(40).int32(message.maximumEdition); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): FeatureSetDefaults { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFeatureSetDefaults(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.defaults.push(FeatureSetDefaults_FeatureSetEditionDefault.decode(reader, reader.uint32())); + continue; + } + case 4: { + if (tag !== 32) { + break; + } + + message.minimumEdition = reader.int32() as any; + continue; + } + case 5: { + if (tag !== 40) { + break; + } + + message.maximumEdition = reader.int32() as any; + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): FeatureSetDefaults { + return { + defaults: globalThis.Array.isArray(object?.defaults) + ? object.defaults.map((e: any) => FeatureSetDefaults_FeatureSetEditionDefault.fromJSON(e)) + : [], + minimumEdition: isSet(object.minimumEdition) + ? editionFromJSON(object.minimumEdition) + : isSet(object.minimum_edition) + ? editionFromJSON(object.minimum_edition) + : 0, + maximumEdition: isSet(object.maximumEdition) + ? editionFromJSON(object.maximumEdition) + : isSet(object.maximum_edition) + ? editionFromJSON(object.maximum_edition) + : 0, + }; + }, + + toJSON(message: FeatureSetDefaults): unknown { + const obj: any = {}; + if (message.defaults?.length) { + obj.defaults = message.defaults.map((e) => FeatureSetDefaults_FeatureSetEditionDefault.toJSON(e)); + } + if (message.minimumEdition !== undefined && message.minimumEdition !== 0) { + obj.minimumEdition = editionToJSON(message.minimumEdition); + } + if (message.maximumEdition !== undefined && message.maximumEdition !== 0) { + obj.maximumEdition = editionToJSON(message.maximumEdition); + } + return obj; + }, + + create<I extends Exact<DeepPartial<FeatureSetDefaults>, I>>(base?: I): FeatureSetDefaults { + return FeatureSetDefaults.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<FeatureSetDefaults>, I>>(object: I): FeatureSetDefaults { + const message = createBaseFeatureSetDefaults(); + message.defaults = object.defaults?.map((e) => FeatureSetDefaults_FeatureSetEditionDefault.fromPartial(e)) || []; + message.minimumEdition = object.minimumEdition ?? 0; + message.maximumEdition = object.maximumEdition ?? 0; + return message; + }, +}; + +function createBaseFeatureSetDefaults_FeatureSetEditionDefault(): FeatureSetDefaults_FeatureSetEditionDefault { + return { edition: 0, overridableFeatures: undefined, fixedFeatures: undefined }; +} + +export const FeatureSetDefaults_FeatureSetEditionDefault: MessageFns<FeatureSetDefaults_FeatureSetEditionDefault> = { + encode( + message: FeatureSetDefaults_FeatureSetEditionDefault, + writer: BinaryWriter = new BinaryWriter(), + ): BinaryWriter { + if (message.edition !== undefined && message.edition !== 0) { + writer.uint32(24).int32(message.edition); + } + if (message.overridableFeatures !== undefined) { + FeatureSet.encode(message.overridableFeatures, writer.uint32(34).fork()).join(); + } + if (message.fixedFeatures !== undefined) { + FeatureSet.encode(message.fixedFeatures, writer.uint32(42).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): FeatureSetDefaults_FeatureSetEditionDefault { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFeatureSetDefaults_FeatureSetEditionDefault(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 3: { + if (tag !== 24) { + break; + } + + message.edition = reader.int32() as any; + continue; + } + case 4: { + if (tag !== 34) { + break; + } + + message.overridableFeatures = FeatureSet.decode(reader, reader.uint32()); + continue; + } + case 5: { + if (tag !== 42) { + break; + } + + message.fixedFeatures = FeatureSet.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): FeatureSetDefaults_FeatureSetEditionDefault { + return { + edition: isSet(object.edition) ? editionFromJSON(object.edition) : 0, + overridableFeatures: isSet(object.overridableFeatures) + ? FeatureSet.fromJSON(object.overridableFeatures) + : isSet(object.overridable_features) + ? FeatureSet.fromJSON(object.overridable_features) + : undefined, + fixedFeatures: isSet(object.fixedFeatures) + ? FeatureSet.fromJSON(object.fixedFeatures) + : isSet(object.fixed_features) + ? FeatureSet.fromJSON(object.fixed_features) + : undefined, + }; + }, + + toJSON(message: FeatureSetDefaults_FeatureSetEditionDefault): unknown { + const obj: any = {}; + if (message.edition !== undefined && message.edition !== 0) { + obj.edition = editionToJSON(message.edition); + } + if (message.overridableFeatures !== undefined) { + obj.overridableFeatures = FeatureSet.toJSON(message.overridableFeatures); + } + if (message.fixedFeatures !== undefined) { + obj.fixedFeatures = FeatureSet.toJSON(message.fixedFeatures); + } + return obj; + }, + + create<I extends Exact<DeepPartial<FeatureSetDefaults_FeatureSetEditionDefault>, I>>( + base?: I, + ): FeatureSetDefaults_FeatureSetEditionDefault { + return FeatureSetDefaults_FeatureSetEditionDefault.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<FeatureSetDefaults_FeatureSetEditionDefault>, I>>( + object: I, + ): FeatureSetDefaults_FeatureSetEditionDefault { + const message = createBaseFeatureSetDefaults_FeatureSetEditionDefault(); + message.edition = object.edition ?? 0; + message.overridableFeatures = (object.overridableFeatures !== undefined && object.overridableFeatures !== null) + ? FeatureSet.fromPartial(object.overridableFeatures) + : undefined; + message.fixedFeatures = (object.fixedFeatures !== undefined && object.fixedFeatures !== null) + ? FeatureSet.fromPartial(object.fixedFeatures) + : undefined; + return message; + }, +}; + +function createBaseSourceCodeInfo(): SourceCodeInfo { + return { location: [] }; +} + +export const SourceCodeInfo: MessageFns<SourceCodeInfo> = { + encode(message: SourceCodeInfo, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + for (const v of message.location) { + SourceCodeInfo_Location.encode(v!, writer.uint32(10).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): SourceCodeInfo { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSourceCodeInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.location.push(SourceCodeInfo_Location.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): SourceCodeInfo { + return { + location: globalThis.Array.isArray(object?.location) + ? object.location.map((e: any) => SourceCodeInfo_Location.fromJSON(e)) + : [], + }; + }, + + toJSON(message: SourceCodeInfo): unknown { + const obj: any = {}; + if (message.location?.length) { + obj.location = message.location.map((e) => SourceCodeInfo_Location.toJSON(e)); + } + return obj; + }, + + create<I extends Exact<DeepPartial<SourceCodeInfo>, I>>(base?: I): SourceCodeInfo { + return SourceCodeInfo.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<SourceCodeInfo>, I>>(object: I): SourceCodeInfo { + const message = createBaseSourceCodeInfo(); + message.location = object.location?.map((e) => SourceCodeInfo_Location.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseSourceCodeInfo_Location(): SourceCodeInfo_Location { + return { path: [], span: [], leadingComments: "", trailingComments: "", leadingDetachedComments: [] }; +} + +export const SourceCodeInfo_Location: MessageFns<SourceCodeInfo_Location> = { + encode(message: SourceCodeInfo_Location, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + writer.uint32(10).fork(); + for (const v of message.path) { + writer.int32(v); + } + writer.join(); + writer.uint32(18).fork(); + for (const v of message.span) { + writer.int32(v); + } + writer.join(); + if (message.leadingComments !== undefined && message.leadingComments !== "") { + writer.uint32(26).string(message.leadingComments); + } + if (message.trailingComments !== undefined && message.trailingComments !== "") { + writer.uint32(34).string(message.trailingComments); + } + for (const v of message.leadingDetachedComments) { + writer.uint32(50).string(v!); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): SourceCodeInfo_Location { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSourceCodeInfo_Location(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag === 8) { + message.path.push(reader.int32()); + + continue; + } + + if (tag === 10) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.path.push(reader.int32()); + } + + continue; + } + + break; + } + case 2: { + if (tag === 16) { + message.span.push(reader.int32()); + + continue; + } + + if (tag === 18) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.span.push(reader.int32()); + } + + continue; + } + + break; + } + case 3: { + if (tag !== 26) { + break; + } + + message.leadingComments = reader.string(); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + + message.trailingComments = reader.string(); + continue; + } + case 6: { + if (tag !== 50) { + break; + } + + message.leadingDetachedComments.push(reader.string()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): SourceCodeInfo_Location { + return { + path: globalThis.Array.isArray(object?.path) ? object.path.map((e: any) => globalThis.Number(e)) : [], + span: globalThis.Array.isArray(object?.span) ? object.span.map((e: any) => globalThis.Number(e)) : [], + leadingComments: isSet(object.leadingComments) + ? globalThis.String(object.leadingComments) + : isSet(object.leading_comments) + ? globalThis.String(object.leading_comments) + : "", + trailingComments: isSet(object.trailingComments) + ? globalThis.String(object.trailingComments) + : isSet(object.trailing_comments) + ? globalThis.String(object.trailing_comments) + : "", + leadingDetachedComments: globalThis.Array.isArray(object?.leadingDetachedComments) + ? object.leadingDetachedComments.map((e: any) => globalThis.String(e)) + : globalThis.Array.isArray(object?.leading_detached_comments) + ? object.leading_detached_comments.map((e: any) => globalThis.String(e)) + : [], + }; + }, + + toJSON(message: SourceCodeInfo_Location): unknown { + const obj: any = {}; + if (message.path?.length) { + obj.path = message.path.map((e) => Math.round(e)); + } + if (message.span?.length) { + obj.span = message.span.map((e) => Math.round(e)); + } + if (message.leadingComments !== undefined && message.leadingComments !== "") { + obj.leadingComments = message.leadingComments; + } + if (message.trailingComments !== undefined && message.trailingComments !== "") { + obj.trailingComments = message.trailingComments; + } + if (message.leadingDetachedComments?.length) { + obj.leadingDetachedComments = message.leadingDetachedComments; + } + return obj; + }, + + create<I extends Exact<DeepPartial<SourceCodeInfo_Location>, I>>(base?: I): SourceCodeInfo_Location { + return SourceCodeInfo_Location.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<SourceCodeInfo_Location>, I>>(object: I): SourceCodeInfo_Location { + const message = createBaseSourceCodeInfo_Location(); + message.path = object.path?.map((e) => e) || []; + message.span = object.span?.map((e) => e) || []; + message.leadingComments = object.leadingComments ?? ""; + message.trailingComments = object.trailingComments ?? ""; + message.leadingDetachedComments = object.leadingDetachedComments?.map((e) => e) || []; + return message; + }, +}; + +function createBaseGeneratedCodeInfo(): GeneratedCodeInfo { + return { annotation: [] }; +} + +export const GeneratedCodeInfo: MessageFns<GeneratedCodeInfo> = { + encode(message: GeneratedCodeInfo, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + for (const v of message.annotation) { + GeneratedCodeInfo_Annotation.encode(v!, writer.uint32(10).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): GeneratedCodeInfo { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGeneratedCodeInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.annotation.push(GeneratedCodeInfo_Annotation.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): GeneratedCodeInfo { + return { + annotation: globalThis.Array.isArray(object?.annotation) + ? object.annotation.map((e: any) => GeneratedCodeInfo_Annotation.fromJSON(e)) + : [], + }; + }, + + toJSON(message: GeneratedCodeInfo): unknown { + const obj: any = {}; + if (message.annotation?.length) { + obj.annotation = message.annotation.map((e) => GeneratedCodeInfo_Annotation.toJSON(e)); + } + return obj; + }, + + create<I extends Exact<DeepPartial<GeneratedCodeInfo>, I>>(base?: I): GeneratedCodeInfo { + return GeneratedCodeInfo.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<GeneratedCodeInfo>, I>>(object: I): GeneratedCodeInfo { + const message = createBaseGeneratedCodeInfo(); + message.annotation = object.annotation?.map((e) => GeneratedCodeInfo_Annotation.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseGeneratedCodeInfo_Annotation(): GeneratedCodeInfo_Annotation { + return { path: [], sourceFile: "", begin: 0, end: 0, semantic: 0 }; +} + +export const GeneratedCodeInfo_Annotation: MessageFns<GeneratedCodeInfo_Annotation> = { + encode(message: GeneratedCodeInfo_Annotation, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + writer.uint32(10).fork(); + for (const v of message.path) { + writer.int32(v); + } + writer.join(); + if (message.sourceFile !== undefined && message.sourceFile !== "") { + writer.uint32(18).string(message.sourceFile); + } + if (message.begin !== undefined && message.begin !== 0) { + writer.uint32(24).int32(message.begin); + } + if (message.end !== undefined && message.end !== 0) { + writer.uint32(32).int32(message.end); + } + if (message.semantic !== undefined && message.semantic !== 0) { + writer.uint32(40).int32(message.semantic); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): GeneratedCodeInfo_Annotation { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGeneratedCodeInfo_Annotation(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag === 8) { + message.path.push(reader.int32()); + + continue; + } + + if (tag === 10) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.path.push(reader.int32()); + } + + continue; + } + + break; + } + case 2: { + if (tag !== 18) { + break; + } + + message.sourceFile = reader.string(); + continue; + } + case 3: { + if (tag !== 24) { + break; + } + + message.begin = reader.int32(); + continue; + } + case 4: { + if (tag !== 32) { + break; + } + + message.end = reader.int32(); + continue; + } + case 5: { + if (tag !== 40) { + break; + } + + message.semantic = reader.int32() as any; + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): GeneratedCodeInfo_Annotation { + return { + path: globalThis.Array.isArray(object?.path) ? object.path.map((e: any) => globalThis.Number(e)) : [], + sourceFile: isSet(object.sourceFile) + ? globalThis.String(object.sourceFile) + : isSet(object.source_file) + ? globalThis.String(object.source_file) + : "", + begin: isSet(object.begin) ? globalThis.Number(object.begin) : 0, + end: isSet(object.end) ? globalThis.Number(object.end) : 0, + semantic: isSet(object.semantic) ? generatedCodeInfo_Annotation_SemanticFromJSON(object.semantic) : 0, + }; + }, + + toJSON(message: GeneratedCodeInfo_Annotation): unknown { + const obj: any = {}; + if (message.path?.length) { + obj.path = message.path.map((e) => Math.round(e)); + } + if (message.sourceFile !== undefined && message.sourceFile !== "") { + obj.sourceFile = message.sourceFile; + } + if (message.begin !== undefined && message.begin !== 0) { + obj.begin = Math.round(message.begin); + } + if (message.end !== undefined && message.end !== 0) { + obj.end = Math.round(message.end); + } + if (message.semantic !== undefined && message.semantic !== 0) { + obj.semantic = generatedCodeInfo_Annotation_SemanticToJSON(message.semantic); + } + return obj; + }, + + create<I extends Exact<DeepPartial<GeneratedCodeInfo_Annotation>, I>>(base?: I): GeneratedCodeInfo_Annotation { + return GeneratedCodeInfo_Annotation.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<GeneratedCodeInfo_Annotation>, I>>(object: I): GeneratedCodeInfo_Annotation { + const message = createBaseGeneratedCodeInfo_Annotation(); + message.path = object.path?.map((e) => e) || []; + message.sourceFile = object.sourceFile ?? ""; + message.begin = object.begin ?? 0; + message.end = object.end ?? 0; + message.semantic = object.semantic ?? 0; + return message; + }, +}; + +function bytesFromBase64(b64: string): Uint8Array { + if ((globalThis as any).Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if ((globalThis as any).Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(globalThis.String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial<T> = T extends Builtin ? T + : T extends globalThis.Array<infer U> ? globalThis.Array<DeepPartial<U>> + : T extends ReadonlyArray<infer U> ? ReadonlyArray<DeepPartial<U>> + : T extends {} ? { [K in keyof T]?: DeepPartial<T[K]> } + : Partial<T>; + +type KeysOfUnion<T> = T extends T ? keyof T : never; +export type Exact<P, I extends P> = P extends Builtin ? P + : P & { [K in keyof P]: Exact<P[K], I[K]> } & { [K in Exclude<keyof I, KeysOfUnion<P>>]: never }; + +function longToNumber(int64: { toString(): string }): number { + const num = globalThis.Number(int64.toString()); + if (num > globalThis.Number.MAX_SAFE_INTEGER) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + if (num < globalThis.Number.MIN_SAFE_INTEGER) { + throw new globalThis.Error("Value is smaller than Number.MIN_SAFE_INTEGER"); + } + return num; +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} + +export interface MessageFns<T> { + encode(message: T, writer?: BinaryWriter): BinaryWriter; + decode(input: BinaryReader | Uint8Array, length?: number): T; + fromJSON(object: any): T; + toJSON(message: T): unknown; + create<I extends Exact<DeepPartial<T>, I>>(base?: I): T; + fromPartial<I extends Exact<DeepPartial<T>, I>>(object: I): T; +} diff --git a/ignite/pkg/cosmosgen/testdata/testchain/ts-client/ignite.planet.mars/types/ignite/planet/mars/mars.ts b/ignite/pkg/cosmosgen/testdata/testchain/ts-client/ignite.planet.mars/types/ignite/planet/mars/mars.ts new file mode 100644 index 0000000..b909076 --- /dev/null +++ b/ignite/pkg/cosmosgen/testdata/testchain/ts-client/ignite.planet.mars/types/ignite/planet/mars/mars.ts @@ -0,0 +1,1203 @@ +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.11.2 +// protoc unknown +// source: ignite/planet/mars/mars.proto + +/* eslint-disable */ +import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire"; +import { PageRequest, PageResponse } from "../../../cosmos/base/query/v1beta1/pagination"; + +export const protobufPackage = "ignite.planet.mars"; + +export interface MsgMyMessageRequest { + mytypefield: string; +} + +export interface MsgMyMessageResponse { + mytypefield: string; +} + +export interface MsgBarRequest { + mytypefield: string; +} + +export interface MsgBarResponse { + mytypefield: string; +} + +export interface QuerySimpleRequest { +} + +export interface QuerySimpleResponse { + bar: string; +} + +export interface QuerySimpleParamsRequest { + mytypefield: string; +} + +export interface QuerySimpleParamsResponse { + bar: string; +} + +export interface QueryWithPaginationRequest { + mytypefield: string; + pagination: PageRequest | undefined; +} + +export interface QueryWithPaginationResponse { + pagination: PageResponse | undefined; +} + +export interface QueryWithQueryParamsRequest { + mytypefield: string; + queryParam: string; + mybool: boolean; + myrepeatedbool: boolean[]; +} + +export interface QueryWithQueryParamsResponse { + bar: string; +} + +export interface QueryWithQueryParamsWithPaginationRequest { + mytypefield: string; + queryParam: string; + pagination: PageRequest | undefined; +} + +export interface QueryWithQueryParamsWithPaginationResponse { + bar: string; + pagination: PageResponse | undefined; +} + +export interface AnotherType { + mytypefield: string; +} + +function createBaseMsgMyMessageRequest(): MsgMyMessageRequest { + return { mytypefield: "" }; +} + +export const MsgMyMessageRequest: MessageFns<MsgMyMessageRequest> = { + encode(message: MsgMyMessageRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.mytypefield !== "") { + writer.uint32(10).string(message.mytypefield); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): MsgMyMessageRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgMyMessageRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.mytypefield = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): MsgMyMessageRequest { + return { mytypefield: isSet(object.mytypefield) ? globalThis.String(object.mytypefield) : "" }; + }, + + toJSON(message: MsgMyMessageRequest): unknown { + const obj: any = {}; + if (message.mytypefield !== "") { + obj.mytypefield = message.mytypefield; + } + return obj; + }, + + create<I extends Exact<DeepPartial<MsgMyMessageRequest>, I>>(base?: I): MsgMyMessageRequest { + return MsgMyMessageRequest.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<MsgMyMessageRequest>, I>>(object: I): MsgMyMessageRequest { + const message = createBaseMsgMyMessageRequest(); + message.mytypefield = object.mytypefield ?? ""; + return message; + }, +}; + +function createBaseMsgMyMessageResponse(): MsgMyMessageResponse { + return { mytypefield: "" }; +} + +export const MsgMyMessageResponse: MessageFns<MsgMyMessageResponse> = { + encode(message: MsgMyMessageResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.mytypefield !== "") { + writer.uint32(10).string(message.mytypefield); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): MsgMyMessageResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgMyMessageResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.mytypefield = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): MsgMyMessageResponse { + return { mytypefield: isSet(object.mytypefield) ? globalThis.String(object.mytypefield) : "" }; + }, + + toJSON(message: MsgMyMessageResponse): unknown { + const obj: any = {}; + if (message.mytypefield !== "") { + obj.mytypefield = message.mytypefield; + } + return obj; + }, + + create<I extends Exact<DeepPartial<MsgMyMessageResponse>, I>>(base?: I): MsgMyMessageResponse { + return MsgMyMessageResponse.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<MsgMyMessageResponse>, I>>(object: I): MsgMyMessageResponse { + const message = createBaseMsgMyMessageResponse(); + message.mytypefield = object.mytypefield ?? ""; + return message; + }, +}; + +function createBaseMsgBarRequest(): MsgBarRequest { + return { mytypefield: "" }; +} + +export const MsgBarRequest: MessageFns<MsgBarRequest> = { + encode(message: MsgBarRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.mytypefield !== "") { + writer.uint32(10).string(message.mytypefield); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): MsgBarRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgBarRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.mytypefield = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): MsgBarRequest { + return { mytypefield: isSet(object.mytypefield) ? globalThis.String(object.mytypefield) : "" }; + }, + + toJSON(message: MsgBarRequest): unknown { + const obj: any = {}; + if (message.mytypefield !== "") { + obj.mytypefield = message.mytypefield; + } + return obj; + }, + + create<I extends Exact<DeepPartial<MsgBarRequest>, I>>(base?: I): MsgBarRequest { + return MsgBarRequest.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<MsgBarRequest>, I>>(object: I): MsgBarRequest { + const message = createBaseMsgBarRequest(); + message.mytypefield = object.mytypefield ?? ""; + return message; + }, +}; + +function createBaseMsgBarResponse(): MsgBarResponse { + return { mytypefield: "" }; +} + +export const MsgBarResponse: MessageFns<MsgBarResponse> = { + encode(message: MsgBarResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.mytypefield !== "") { + writer.uint32(10).string(message.mytypefield); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): MsgBarResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgBarResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.mytypefield = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): MsgBarResponse { + return { mytypefield: isSet(object.mytypefield) ? globalThis.String(object.mytypefield) : "" }; + }, + + toJSON(message: MsgBarResponse): unknown { + const obj: any = {}; + if (message.mytypefield !== "") { + obj.mytypefield = message.mytypefield; + } + return obj; + }, + + create<I extends Exact<DeepPartial<MsgBarResponse>, I>>(base?: I): MsgBarResponse { + return MsgBarResponse.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<MsgBarResponse>, I>>(object: I): MsgBarResponse { + const message = createBaseMsgBarResponse(); + message.mytypefield = object.mytypefield ?? ""; + return message; + }, +}; + +function createBaseQuerySimpleRequest(): QuerySimpleRequest { + return {}; +} + +export const QuerySimpleRequest: MessageFns<QuerySimpleRequest> = { + encode(_: QuerySimpleRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): QuerySimpleRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQuerySimpleRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(_: any): QuerySimpleRequest { + return {}; + }, + + toJSON(_: QuerySimpleRequest): unknown { + const obj: any = {}; + return obj; + }, + + create<I extends Exact<DeepPartial<QuerySimpleRequest>, I>>(base?: I): QuerySimpleRequest { + return QuerySimpleRequest.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<QuerySimpleRequest>, I>>(_: I): QuerySimpleRequest { + const message = createBaseQuerySimpleRequest(); + return message; + }, +}; + +function createBaseQuerySimpleResponse(): QuerySimpleResponse { + return { bar: "" }; +} + +export const QuerySimpleResponse: MessageFns<QuerySimpleResponse> = { + encode(message: QuerySimpleResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.bar !== "") { + writer.uint32(10).string(message.bar); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): QuerySimpleResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQuerySimpleResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.bar = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): QuerySimpleResponse { + return { bar: isSet(object.bar) ? globalThis.String(object.bar) : "" }; + }, + + toJSON(message: QuerySimpleResponse): unknown { + const obj: any = {}; + if (message.bar !== "") { + obj.bar = message.bar; + } + return obj; + }, + + create<I extends Exact<DeepPartial<QuerySimpleResponse>, I>>(base?: I): QuerySimpleResponse { + return QuerySimpleResponse.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<QuerySimpleResponse>, I>>(object: I): QuerySimpleResponse { + const message = createBaseQuerySimpleResponse(); + message.bar = object.bar ?? ""; + return message; + }, +}; + +function createBaseQuerySimpleParamsRequest(): QuerySimpleParamsRequest { + return { mytypefield: "" }; +} + +export const QuerySimpleParamsRequest: MessageFns<QuerySimpleParamsRequest> = { + encode(message: QuerySimpleParamsRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.mytypefield !== "") { + writer.uint32(10).string(message.mytypefield); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): QuerySimpleParamsRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQuerySimpleParamsRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.mytypefield = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): QuerySimpleParamsRequest { + return { mytypefield: isSet(object.mytypefield) ? globalThis.String(object.mytypefield) : "" }; + }, + + toJSON(message: QuerySimpleParamsRequest): unknown { + const obj: any = {}; + if (message.mytypefield !== "") { + obj.mytypefield = message.mytypefield; + } + return obj; + }, + + create<I extends Exact<DeepPartial<QuerySimpleParamsRequest>, I>>(base?: I): QuerySimpleParamsRequest { + return QuerySimpleParamsRequest.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<QuerySimpleParamsRequest>, I>>(object: I): QuerySimpleParamsRequest { + const message = createBaseQuerySimpleParamsRequest(); + message.mytypefield = object.mytypefield ?? ""; + return message; + }, +}; + +function createBaseQuerySimpleParamsResponse(): QuerySimpleParamsResponse { + return { bar: "" }; +} + +export const QuerySimpleParamsResponse: MessageFns<QuerySimpleParamsResponse> = { + encode(message: QuerySimpleParamsResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.bar !== "") { + writer.uint32(10).string(message.bar); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): QuerySimpleParamsResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQuerySimpleParamsResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.bar = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): QuerySimpleParamsResponse { + return { bar: isSet(object.bar) ? globalThis.String(object.bar) : "" }; + }, + + toJSON(message: QuerySimpleParamsResponse): unknown { + const obj: any = {}; + if (message.bar !== "") { + obj.bar = message.bar; + } + return obj; + }, + + create<I extends Exact<DeepPartial<QuerySimpleParamsResponse>, I>>(base?: I): QuerySimpleParamsResponse { + return QuerySimpleParamsResponse.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<QuerySimpleParamsResponse>, I>>(object: I): QuerySimpleParamsResponse { + const message = createBaseQuerySimpleParamsResponse(); + message.bar = object.bar ?? ""; + return message; + }, +}; + +function createBaseQueryWithPaginationRequest(): QueryWithPaginationRequest { + return { mytypefield: "", pagination: undefined }; +} + +export const QueryWithPaginationRequest: MessageFns<QueryWithPaginationRequest> = { + encode(message: QueryWithPaginationRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.mytypefield !== "") { + writer.uint32(10).string(message.mytypefield); + } + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(18).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): QueryWithPaginationRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryWithPaginationRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.mytypefield = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.pagination = PageRequest.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): QueryWithPaginationRequest { + return { + mytypefield: isSet(object.mytypefield) ? globalThis.String(object.mytypefield) : "", + pagination: isSet(object.pagination) ? PageRequest.fromJSON(object.pagination) : undefined, + }; + }, + + toJSON(message: QueryWithPaginationRequest): unknown { + const obj: any = {}; + if (message.mytypefield !== "") { + obj.mytypefield = message.mytypefield; + } + if (message.pagination !== undefined) { + obj.pagination = PageRequest.toJSON(message.pagination); + } + return obj; + }, + + create<I extends Exact<DeepPartial<QueryWithPaginationRequest>, I>>(base?: I): QueryWithPaginationRequest { + return QueryWithPaginationRequest.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<QueryWithPaginationRequest>, I>>(object: I): QueryWithPaginationRequest { + const message = createBaseQueryWithPaginationRequest(); + message.mytypefield = object.mytypefield ?? ""; + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageRequest.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryWithPaginationResponse(): QueryWithPaginationResponse { + return { pagination: undefined }; +} + +export const QueryWithPaginationResponse: MessageFns<QueryWithPaginationResponse> = { + encode(message: QueryWithPaginationResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(10).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): QueryWithPaginationResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryWithPaginationResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.pagination = PageResponse.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): QueryWithPaginationResponse { + return { pagination: isSet(object.pagination) ? PageResponse.fromJSON(object.pagination) : undefined }; + }, + + toJSON(message: QueryWithPaginationResponse): unknown { + const obj: any = {}; + if (message.pagination !== undefined) { + obj.pagination = PageResponse.toJSON(message.pagination); + } + return obj; + }, + + create<I extends Exact<DeepPartial<QueryWithPaginationResponse>, I>>(base?: I): QueryWithPaginationResponse { + return QueryWithPaginationResponse.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<QueryWithPaginationResponse>, I>>(object: I): QueryWithPaginationResponse { + const message = createBaseQueryWithPaginationResponse(); + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageResponse.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryWithQueryParamsRequest(): QueryWithQueryParamsRequest { + return { mytypefield: "", queryParam: "", mybool: false, myrepeatedbool: [] }; +} + +export const QueryWithQueryParamsRequest: MessageFns<QueryWithQueryParamsRequest> = { + encode(message: QueryWithQueryParamsRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.mytypefield !== "") { + writer.uint32(10).string(message.mytypefield); + } + if (message.queryParam !== "") { + writer.uint32(18).string(message.queryParam); + } + if (message.mybool !== false) { + writer.uint32(24).bool(message.mybool); + } + writer.uint32(34).fork(); + for (const v of message.myrepeatedbool) { + writer.bool(v); + } + writer.join(); + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): QueryWithQueryParamsRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryWithQueryParamsRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.mytypefield = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.queryParam = reader.string(); + continue; + } + case 3: { + if (tag !== 24) { + break; + } + + message.mybool = reader.bool(); + continue; + } + case 4: { + if (tag === 32) { + message.myrepeatedbool.push(reader.bool()); + + continue; + } + + if (tag === 34) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.myrepeatedbool.push(reader.bool()); + } + + continue; + } + + break; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): QueryWithQueryParamsRequest { + return { + mytypefield: isSet(object.mytypefield) ? globalThis.String(object.mytypefield) : "", + queryParam: isSet(object.queryParam) + ? globalThis.String(object.queryParam) + : isSet(object.query_param) + ? globalThis.String(object.query_param) + : "", + mybool: isSet(object.mybool) ? globalThis.Boolean(object.mybool) : false, + myrepeatedbool: globalThis.Array.isArray(object?.myrepeatedbool) + ? object.myrepeatedbool.map((e: any) => globalThis.Boolean(e)) + : [], + }; + }, + + toJSON(message: QueryWithQueryParamsRequest): unknown { + const obj: any = {}; + if (message.mytypefield !== "") { + obj.mytypefield = message.mytypefield; + } + if (message.queryParam !== "") { + obj.queryParam = message.queryParam; + } + if (message.mybool !== false) { + obj.mybool = message.mybool; + } + if (message.myrepeatedbool?.length) { + obj.myrepeatedbool = message.myrepeatedbool; + } + return obj; + }, + + create<I extends Exact<DeepPartial<QueryWithQueryParamsRequest>, I>>(base?: I): QueryWithQueryParamsRequest { + return QueryWithQueryParamsRequest.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<QueryWithQueryParamsRequest>, I>>(object: I): QueryWithQueryParamsRequest { + const message = createBaseQueryWithQueryParamsRequest(); + message.mytypefield = object.mytypefield ?? ""; + message.queryParam = object.queryParam ?? ""; + message.mybool = object.mybool ?? false; + message.myrepeatedbool = object.myrepeatedbool?.map((e) => e) || []; + return message; + }, +}; + +function createBaseQueryWithQueryParamsResponse(): QueryWithQueryParamsResponse { + return { bar: "" }; +} + +export const QueryWithQueryParamsResponse: MessageFns<QueryWithQueryParamsResponse> = { + encode(message: QueryWithQueryParamsResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.bar !== "") { + writer.uint32(10).string(message.bar); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): QueryWithQueryParamsResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryWithQueryParamsResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.bar = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): QueryWithQueryParamsResponse { + return { bar: isSet(object.bar) ? globalThis.String(object.bar) : "" }; + }, + + toJSON(message: QueryWithQueryParamsResponse): unknown { + const obj: any = {}; + if (message.bar !== "") { + obj.bar = message.bar; + } + return obj; + }, + + create<I extends Exact<DeepPartial<QueryWithQueryParamsResponse>, I>>(base?: I): QueryWithQueryParamsResponse { + return QueryWithQueryParamsResponse.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<QueryWithQueryParamsResponse>, I>>(object: I): QueryWithQueryParamsResponse { + const message = createBaseQueryWithQueryParamsResponse(); + message.bar = object.bar ?? ""; + return message; + }, +}; + +function createBaseQueryWithQueryParamsWithPaginationRequest(): QueryWithQueryParamsWithPaginationRequest { + return { mytypefield: "", queryParam: "", pagination: undefined }; +} + +export const QueryWithQueryParamsWithPaginationRequest: MessageFns<QueryWithQueryParamsWithPaginationRequest> = { + encode(message: QueryWithQueryParamsWithPaginationRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.mytypefield !== "") { + writer.uint32(10).string(message.mytypefield); + } + if (message.queryParam !== "") { + writer.uint32(18).string(message.queryParam); + } + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(26).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): QueryWithQueryParamsWithPaginationRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryWithQueryParamsWithPaginationRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.mytypefield = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.queryParam = reader.string(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.pagination = PageRequest.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): QueryWithQueryParamsWithPaginationRequest { + return { + mytypefield: isSet(object.mytypefield) ? globalThis.String(object.mytypefield) : "", + queryParam: isSet(object.queryParam) + ? globalThis.String(object.queryParam) + : isSet(object.query_param) + ? globalThis.String(object.query_param) + : "", + pagination: isSet(object.pagination) ? PageRequest.fromJSON(object.pagination) : undefined, + }; + }, + + toJSON(message: QueryWithQueryParamsWithPaginationRequest): unknown { + const obj: any = {}; + if (message.mytypefield !== "") { + obj.mytypefield = message.mytypefield; + } + if (message.queryParam !== "") { + obj.queryParam = message.queryParam; + } + if (message.pagination !== undefined) { + obj.pagination = PageRequest.toJSON(message.pagination); + } + return obj; + }, + + create<I extends Exact<DeepPartial<QueryWithQueryParamsWithPaginationRequest>, I>>( + base?: I, + ): QueryWithQueryParamsWithPaginationRequest { + return QueryWithQueryParamsWithPaginationRequest.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<QueryWithQueryParamsWithPaginationRequest>, I>>( + object: I, + ): QueryWithQueryParamsWithPaginationRequest { + const message = createBaseQueryWithQueryParamsWithPaginationRequest(); + message.mytypefield = object.mytypefield ?? ""; + message.queryParam = object.queryParam ?? ""; + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageRequest.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryWithQueryParamsWithPaginationResponse(): QueryWithQueryParamsWithPaginationResponse { + return { bar: "", pagination: undefined }; +} + +export const QueryWithQueryParamsWithPaginationResponse: MessageFns<QueryWithQueryParamsWithPaginationResponse> = { + encode(message: QueryWithQueryParamsWithPaginationResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.bar !== "") { + writer.uint32(10).string(message.bar); + } + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): QueryWithQueryParamsWithPaginationResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryWithQueryParamsWithPaginationResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.bar = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.pagination = PageResponse.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): QueryWithQueryParamsWithPaginationResponse { + return { + bar: isSet(object.bar) ? globalThis.String(object.bar) : "", + pagination: isSet(object.pagination) ? PageResponse.fromJSON(object.pagination) : undefined, + }; + }, + + toJSON(message: QueryWithQueryParamsWithPaginationResponse): unknown { + const obj: any = {}; + if (message.bar !== "") { + obj.bar = message.bar; + } + if (message.pagination !== undefined) { + obj.pagination = PageResponse.toJSON(message.pagination); + } + return obj; + }, + + create<I extends Exact<DeepPartial<QueryWithQueryParamsWithPaginationResponse>, I>>( + base?: I, + ): QueryWithQueryParamsWithPaginationResponse { + return QueryWithQueryParamsWithPaginationResponse.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<QueryWithQueryParamsWithPaginationResponse>, I>>( + object: I, + ): QueryWithQueryParamsWithPaginationResponse { + const message = createBaseQueryWithQueryParamsWithPaginationResponse(); + message.bar = object.bar ?? ""; + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageResponse.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseAnotherType(): AnotherType { + return { mytypefield: "" }; +} + +export const AnotherType: MessageFns<AnotherType> = { + encode(message: AnotherType, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.mytypefield !== "") { + writer.uint32(10).string(message.mytypefield); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): AnotherType { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAnotherType(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.mytypefield = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): AnotherType { + return { mytypefield: isSet(object.mytypefield) ? globalThis.String(object.mytypefield) : "" }; + }, + + toJSON(message: AnotherType): unknown { + const obj: any = {}; + if (message.mytypefield !== "") { + obj.mytypefield = message.mytypefield; + } + return obj; + }, + + create<I extends Exact<DeepPartial<AnotherType>, I>>(base?: I): AnotherType { + return AnotherType.fromPartial(base ?? ({} as any)); + }, + fromPartial<I extends Exact<DeepPartial<AnotherType>, I>>(object: I): AnotherType { + const message = createBaseAnotherType(); + message.mytypefield = object.mytypefield ?? ""; + return message; + }, +}; + +export interface Msg { + MyMessage(request: MsgMyMessageRequest): Promise<MsgMyMessageResponse>; + Bar(request: MsgBarRequest): Promise<MsgBarResponse>; +} + +export const MsgServiceName = "ignite.planet.mars.Msg"; +export class MsgClientImpl implements Msg { + private readonly rpc: Rpc; + private readonly service: string; + constructor(rpc: Rpc, opts?: { service?: string }) { + this.service = opts?.service || MsgServiceName; + this.rpc = rpc; + this.MyMessage = this.MyMessage.bind(this); + this.Bar = this.Bar.bind(this); + } + MyMessage(request: MsgMyMessageRequest): Promise<MsgMyMessageResponse> { + const data = MsgMyMessageRequest.encode(request).finish(); + const promise = this.rpc.request(this.service, "MyMessage", data); + return promise.then((data) => MsgMyMessageResponse.decode(new BinaryReader(data))); + } + + Bar(request: MsgBarRequest): Promise<MsgBarResponse> { + const data = MsgBarRequest.encode(request).finish(); + const promise = this.rpc.request(this.service, "Bar", data); + return promise.then((data) => MsgBarResponse.decode(new BinaryReader(data))); + } +} + +export interface Query { + QuerySimple(request: QuerySimpleRequest): Promise<QuerySimpleResponse>; + QuerySimpleParams(request: QuerySimpleParamsRequest): Promise<QuerySimpleParamsResponse>; + QueryParamsWithPagination(request: QueryWithPaginationRequest): Promise<QueryWithPaginationResponse>; + QueryWithQueryParams(request: QueryWithQueryParamsRequest): Promise<QueryWithQueryParamsResponse>; + QueryWithQueryParamsWithPagination( + request: QueryWithQueryParamsWithPaginationRequest, + ): Promise<QueryWithQueryParamsWithPaginationResponse>; +} + +export const QueryServiceName = "ignite.planet.mars.Query"; +export class QueryClientImpl implements Query { + private readonly rpc: Rpc; + private readonly service: string; + constructor(rpc: Rpc, opts?: { service?: string }) { + this.service = opts?.service || QueryServiceName; + this.rpc = rpc; + this.QuerySimple = this.QuerySimple.bind(this); + this.QuerySimpleParams = this.QuerySimpleParams.bind(this); + this.QueryParamsWithPagination = this.QueryParamsWithPagination.bind(this); + this.QueryWithQueryParams = this.QueryWithQueryParams.bind(this); + this.QueryWithQueryParamsWithPagination = this.QueryWithQueryParamsWithPagination.bind(this); + } + QuerySimple(request: QuerySimpleRequest): Promise<QuerySimpleResponse> { + const data = QuerySimpleRequest.encode(request).finish(); + const promise = this.rpc.request(this.service, "QuerySimple", data); + return promise.then((data) => QuerySimpleResponse.decode(new BinaryReader(data))); + } + + QuerySimpleParams(request: QuerySimpleParamsRequest): Promise<QuerySimpleParamsResponse> { + const data = QuerySimpleParamsRequest.encode(request).finish(); + const promise = this.rpc.request(this.service, "QuerySimpleParams", data); + return promise.then((data) => QuerySimpleParamsResponse.decode(new BinaryReader(data))); + } + + QueryParamsWithPagination(request: QueryWithPaginationRequest): Promise<QueryWithPaginationResponse> { + const data = QueryWithPaginationRequest.encode(request).finish(); + const promise = this.rpc.request(this.service, "QueryParamsWithPagination", data); + return promise.then((data) => QueryWithPaginationResponse.decode(new BinaryReader(data))); + } + + QueryWithQueryParams(request: QueryWithQueryParamsRequest): Promise<QueryWithQueryParamsResponse> { + const data = QueryWithQueryParamsRequest.encode(request).finish(); + const promise = this.rpc.request(this.service, "QueryWithQueryParams", data); + return promise.then((data) => QueryWithQueryParamsResponse.decode(new BinaryReader(data))); + } + + QueryWithQueryParamsWithPagination( + request: QueryWithQueryParamsWithPaginationRequest, + ): Promise<QueryWithQueryParamsWithPaginationResponse> { + const data = QueryWithQueryParamsWithPaginationRequest.encode(request).finish(); + const promise = this.rpc.request(this.service, "QueryWithQueryParamsWithPagination", data); + return promise.then((data) => QueryWithQueryParamsWithPaginationResponse.decode(new BinaryReader(data))); + } +} + +interface Rpc { + request(service: string, method: string, data: Uint8Array): Promise<Uint8Array>; +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial<T> = T extends Builtin ? T + : T extends globalThis.Array<infer U> ? globalThis.Array<DeepPartial<U>> + : T extends ReadonlyArray<infer U> ? ReadonlyArray<DeepPartial<U>> + : T extends {} ? { [K in keyof T]?: DeepPartial<T[K]> } + : Partial<T>; + +type KeysOfUnion<T> = T extends T ? keyof T : never; +export type Exact<P, I extends P> = P extends Builtin ? P + : P & { [K in keyof P]: Exact<P[K], I[K]> } & { [K in Exclude<keyof I, KeysOfUnion<P>>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} + +export interface MessageFns<T> { + encode(message: T, writer?: BinaryWriter): BinaryWriter; + decode(input: BinaryReader | Uint8Array, length?: number): T; + fromJSON(object: any): T; + toJSON(message: T): unknown; + create<I extends Exact<DeepPartial<T>, I>>(base?: I): T; + fromPartial<I extends Exact<DeepPartial<T>, I>>(object: I): T; +} diff --git a/ignite/pkg/cosmosgen/testdata/testchain/ts-client/ignite.planet.mars/types/route-name.eta b/ignite/pkg/cosmosgen/testdata/testchain/ts-client/ignite.planet.mars/types/route-name.eta new file mode 100644 index 0000000..291eb38 --- /dev/null +++ b/ignite/pkg/cosmosgen/testdata/testchain/ts-client/ignite.planet.mars/types/route-name.eta @@ -0,0 +1,45 @@ +<% +const { routeInfo, utils } = it; +const { + operationId, + method, + route, + moduleName, + responsesTypes, + description, + tags, + summary, + pathArgs, +} = routeInfo; +const { _, fmtToJSDocLine, require } = utils; + +const methodAliases = { + get: (pathName, hasPathInserts) => + _.camelCase(`${pathName}_${hasPathInserts ? "detail" : "list"}`), + post: (pathName, hasPathInserts) => _.camelCase(`${pathName}_create`), + put: (pathName, hasPathInserts) => _.camelCase(`${pathName}_update`), + patch: (pathName, hasPathInserts) => _.camelCase(`${pathName}_partial_update`), + delete: (pathName, hasPathInserts) => _.camelCase(`${pathName}_delete`), +}; + +const createCustomOperationId = (method, route, moduleName) => { + const hasPathInserts = /\{(\w){1,}\}/g.test(route); + const splitedRouteBySlash = _.compact(_.replace(route, /\{(\w){1,}\}/g, "").split("/")); + const routeParts = (splitedRouteBySlash.length > 1 + ? splitedRouteBySlash.splice(1) + : splitedRouteBySlash + ).join("_"); + return routeParts.length > 3 && methodAliases[method] + ? methodAliases[method](routeParts, hasPathInserts) + : _.camelCase(_.lowerCase(method) + "_" + [moduleName].join("_")) || "index"; +}; + +if (operationId) { + let routeName = operationId.replace('_',''); + return routeName[0].toLowerCase() + routeName.slice(1); +} +if (route === "/") + return _.camelCase(`${_.lowerCase(method)}Root`); + +return createCustomOperationId(method, route, moduleName); +%> \ No newline at end of file diff --git a/ignite/pkg/cosmosgen/testdata/testchain/ts-client/index.ts b/ignite/pkg/cosmosgen/testdata/testchain/ts-client/index.ts new file mode 100755 index 0000000..7c9a93f --- /dev/null +++ b/ignite/pkg/cosmosgen/testdata/testchain/ts-client/index.ts @@ -0,0 +1,21 @@ +// Generated by Ignite ignite.com/cli +import { Registry } from '@cosmjs/proto-signing' +import { IgniteClient } from "./client"; +import { MissingWalletError } from "./helpers"; +import { IgntModule as IgnitePlanetMars, msgTypes as IgnitePlanetMarsMsgTypes } from './ignite.planet.mars' + + +const Client = IgniteClient.plugin([ + IgnitePlanetMars +]); + +const registry = new Registry([ + ...IgnitePlanetMarsMsgTypes, + +]) + +export { + Client, + registry, + MissingWalletError +} diff --git a/ignite/pkg/cosmosgen/testdata/testchain/ts-client/modules.ts b/ignite/pkg/cosmosgen/testdata/testchain/ts-client/modules.ts new file mode 100755 index 0000000..49d2c8b --- /dev/null +++ b/ignite/pkg/cosmosgen/testdata/testchain/ts-client/modules.ts @@ -0,0 +1,5 @@ +import { IgniteClient } from "./client"; +import { GeneratedType } from "@cosmjs/proto-signing"; + +export type IgntModuleInterface = { [key: string]: any } +export type IgntModule = (instance: IgniteClient) => { module: IgntModuleInterface, registry: [string, GeneratedType][] } diff --git a/ignite/pkg/cosmosgen/testdata/testchain/ts-client/package.json b/ignite/pkg/cosmosgen/testdata/testchain/ts-client/package.json new file mode 100755 index 0000000..726a5cb --- /dev/null +++ b/ignite/pkg/cosmosgen/testdata/testchain/ts-client/package.json @@ -0,0 +1,39 @@ +{ + "name": "testdata-testchain-client-ts", + "version": "0.0.1", + "description": "Autogenerated Typescript Client", + "author": "Ignite Codegen <hello@ignite.com>", + "license": "Apache-2.0", + "licenses": [ + { + "type": "Apache-2.0", + "url": "http://www.apache.org/licenses/LICENSE-2.0" + } + ], + "main": "lib/index.js", + "publishConfig": { + "access": "public" + }, + "scripts": { + "build": "NODE_OPTIONS='--max-old-space-size=16384' tsc" + }, + "dependencies": { + "@cosmjs/proto-signing": "0.33.1", + "@cosmjs/stargate": "0.33.1", + "@keplr-wallet/types": "^0.12.234", + "axios": "1.9.0", + "buffer": "^6.0.3", + "events": "^3.3.0" + }, + "peerDependencies": { + "@cosmjs/proto-signing": "0.33.1", + "@cosmjs/stargate": "0.33.1" + }, + "devDependencies": { + "@bufbuild/protobuf": "^2.4.0", + "@types/events": "^3.0.3", + "qs": "^6.14.0", + "type-fest": "^4.41.0", + "typescript": "^5.8.3" + } +} diff --git a/ignite/pkg/cosmosgen/testdata/testchain/ts-client/tsconfig.json b/ignite/pkg/cosmosgen/testdata/testchain/ts-client/tsconfig.json new file mode 100755 index 0000000..6d679bb --- /dev/null +++ b/ignite/pkg/cosmosgen/testdata/testchain/ts-client/tsconfig.json @@ -0,0 +1,13 @@ +{ + "compilerOptions": { + "target": "ES2020", + "module": "ES2020", + "moduleResolution": "node", + "outDir": "./lib", + "declaration": true, + "allowSyntheticDefaultImports": true, + "esModuleInterop": false, + "strict": false, + "skipLibCheck": true + } + } \ No newline at end of file diff --git a/ignite/pkg/cosmosgen/testdata/testchain/ts-client/types.d.ts b/ignite/pkg/cosmosgen/testdata/testchain/ts-client/types.d.ts new file mode 100755 index 0000000..b839c08 --- /dev/null +++ b/ignite/pkg/cosmosgen/testdata/testchain/ts-client/types.d.ts @@ -0,0 +1,21 @@ +import { Keplr, Window as KeplrWindow } from '@keplr-wallet/types'; + +declare global { + interface KeplrIntereactionOptions { + readonly sign?: KeplrSignOptions; + } + + export interface KeplrSignOptions { + readonly preferNoSetFee?: boolean; + readonly preferNoSetMemo?: boolean; + readonly disableBalanceCheck?: boolean; + } + interface CustomKeplr extends Keplr { + enable(chainId: string | string[]): Promise<void>; + + defaultOptions: KeplrIntereactionOptions; + } + interface Window extends KeplrWindow { + keplr: CustomKeplr; + } +} \ No newline at end of file diff --git a/ignite/pkg/cosmosgen/testdata/testchain/x/mars/keeper/query.go b/ignite/pkg/cosmosgen/testdata/testchain/x/mars/keeper/query.go new file mode 100644 index 0000000..337ae26 --- /dev/null +++ b/ignite/pkg/cosmosgen/testdata/testchain/x/mars/keeper/query.go @@ -0,0 +1,29 @@ +package keeper + +import ( + "context" + + "github.com/ignite/planet/x/mars/types" +) + +type Keeper struct{} + +func (k Keeper) QuerySimple(goCtx context.Context, req *types.QuerySimpleRequest) (*types.QuerySimpleResponse, error) { + return nil, nil +} + +func (k Keeper) QuerySimpleParams(goCtx context.Context, req *types.QuerySimpleParamsRequest) (*types.QuerySimpleParamsResponse, error) { + return nil, nil +} + +func (k Keeper) QueryParamsWithPagination(goCtx context.Context, req *types.QueryWithPaginationRequest) (*types.QueryWithPaginationResponse, error) { + return nil, nil +} + +func (k Keeper) QueryWithQueryParams(goCtx context.Context, req *types.QueryWithQueryParamsRequest) (*types.QueryWithQueryParamsResponse, error) { + return nil, nil +} + +func (k Keeper) QueryWithQueryParamsWithPagination(goCtx context.Context, req *types.QueryWithQueryParamsWithPaginationRequest) (*types.QueryWithQueryParamsWithPaginationResponse, error) { + return nil, nil +} diff --git a/ignite/pkg/cosmosgen/testdata/testchain/x/mars/types/types.go b/ignite/pkg/cosmosgen/testdata/testchain/x/mars/types/types.go new file mode 100644 index 0000000..3b27b55 --- /dev/null +++ b/ignite/pkg/cosmosgen/testdata/testchain/x/mars/types/types.go @@ -0,0 +1,14 @@ +package types + +type ( + QuerySimpleRequest struct{} + QuerySimpleResponse struct{} + QuerySimpleParamsRequest struct{} + QuerySimpleParamsResponse struct{} + QueryWithPaginationRequest struct{} + QueryWithPaginationResponse struct{} + QueryWithQueryParamsRequest struct{} + QueryWithQueryParamsResponse struct{} + QueryWithQueryParamsWithPaginationRequest struct{} + QueryWithQueryParamsWithPaginationResponse struct{} +) diff --git a/ignite/pkg/cosmosgen/webtemplates.go b/ignite/pkg/cosmosgen/webtemplates.go new file mode 100644 index 0000000..9d63a4c --- /dev/null +++ b/ignite/pkg/cosmosgen/webtemplates.go @@ -0,0 +1,12 @@ +package cosmosgen + +import ( + webtemplates "github.com/ignite/web" + + "github.com/ignite/cli/v29/ignite/pkg/localfs" +) + +// Vue scaffolds a Vue.js app for a chain. +func Vue(path string) error { + return localfs.Save(webtemplates.VueBoilerplate(), path) +} diff --git a/ignite/pkg/cosmostxcollector/adapter/adapter.go b/ignite/pkg/cosmostxcollector/adapter/adapter.go new file mode 100644 index 0000000..00e4988 --- /dev/null +++ b/ignite/pkg/cosmostxcollector/adapter/adapter.go @@ -0,0 +1,42 @@ +package adapter + +import ( + "context" + + "github.com/ignite/cli/v29/ignite/pkg/cosmosclient" + "github.com/ignite/cli/v29/ignite/pkg/cosmostxcollector/query" +) + +// Saver is the interface that wraps the transactions save method. +// +//go:generate mockery --name Saver --case underscore --with-expecter --output ../mocks +type Saver interface { + // Save a list of transactions into a data backend. + Save(context.Context, []cosmosclient.TX) error +} + +// Adapter defines the interface for data backend adapters. +type Adapter interface { + Saver + + // GetType returns the adapter type. + GetType() string + + // Init initializes the adapter. + // During initialization the adapter creates or updates the data backend schema + // required to save the metrics and performs any initialization required previous + // to use the adapter. + // This method must be called at least once to set up the initial database schema. + // Calling it when a schema already exists updates the existing schema to the + // latest version if the current one is older. + Init(context.Context) error + + // GetLatestHeight returns the height of the latest block known by the data backend. + GetLatestHeight(context.Context) (int64, error) + + // QueryEvents executes an event query in the data backend. + QueryEvents(context.Context, query.EventQuery) ([]query.Event, error) + + // Query executes a query in the data backend. + Query(context.Context, query.Query) (query.Cursor, error) +} diff --git a/ignite/pkg/cosmostxcollector/adapter/adapter_test.go b/ignite/pkg/cosmostxcollector/adapter/adapter_test.go new file mode 100644 index 0000000..6aa550d --- /dev/null +++ b/ignite/pkg/cosmostxcollector/adapter/adapter_test.go @@ -0,0 +1,33 @@ +package adapter + +import ( + "context" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/cosmosclient" + "github.com/ignite/cli/v29/ignite/pkg/cosmostxcollector/query" +) + +type testAdapter struct{} + +func (testAdapter) Save(context.Context, []cosmosclient.TX) error { return nil } +func (testAdapter) GetType() string { return "test" } +func (testAdapter) Init(context.Context) error { return nil } +func (testAdapter) GetLatestHeight(context.Context) (int64, error) { return 1, nil } +func (testAdapter) QueryEvents(context.Context, query.EventQuery) ([]query.Event, error) { + return nil, nil +} + +func (testAdapter) Query(context.Context, query.Query) (query.Cursor, error) { + return nil, nil +} + +func TestTestAdapterImplementsInterfaces(t *testing.T) { + var _ Saver = testAdapter{} + var a Adapter = testAdapter{} + + require.Equal(t, "test", a.GetType()) + require.NoError(t, a.Save(context.Background(), nil)) +} diff --git a/ignite/pkg/cosmostxcollector/adapter/postgres/filters.go b/ignite/pkg/cosmostxcollector/adapter/postgres/filters.go new file mode 100644 index 0000000..6726b9a --- /dev/null +++ b/ignite/pkg/cosmostxcollector/adapter/postgres/filters.go @@ -0,0 +1,142 @@ +package postgres + +import ( + "fmt" + "strconv" + + "github.com/lib/pq" +) + +const ( + FieldEventAttrName = "attribute.name" + FieldEventAttrValue = "attribute.value" + FieldEventTXHash = "event.tx_hash" + FieldEventType = "event.type" +) + +const ( + filterPlaceholder = "?" +) + +// Modifier defines a function that can be used to modify a field name or value. +type Modifier func(field string) string + +// CastJSONToText modifier casts a JSON/JSONB field to text. +func CastJSONToText(f string) string { + return fmt.Sprintf("%s::text", f) +} + +// CastJSONToNumeric modifier casts a JSON/JSONB field to numeric. +func CastJSONToNumeric(f string) string { + return fmt.Sprintf("%s::numeric", f) +} + +// FilterOption defines an option for filters. +type FilterOption func(*Filter) + +// WithModifiers assigns one or more field modifier functions to the filter. +// Field modifiers can be used to change the behavior of a filtered field. +func WithModifiers(m ...Modifier) FilterOption { + return func(f *Filter) { + f.modifiers = m + } +} + +// NewFilter creates a new generic equality filter. +func NewFilter(field string, value any, options ...FilterOption) Filter { + f := Filter{ + field: field, + value: value, + } + + for _, o := range options { + o(&f) + } + + return f +} + +// Filter defines a generic equality filter. +type Filter struct { + field string + value any + modifiers []Modifier +} + +func (f Filter) String() string { + return fmt.Sprintf("%s = %s", f.applyModifiers(f.field), filterPlaceholder) +} + +func (f Filter) Field() string { + return f.field +} + +func (f Filter) Value() any { + return f.value +} + +func (f Filter) applyModifiers(field string) string { + // Apply all the field modifiers in order + for _, m := range f.modifiers { + field = m(field) + } + + return field +} + +// NewStringSliceFilter creates a new string slice equality filter. +func NewStringSliceFilter(field string, values []string) SliceFilter { + return SliceFilter{ + Filter: NewFilter(field, pq.Array(values)), + } +} + +// NewIntSliceFilter creates a new int64 slice equality filter. +func NewIntSliceFilter(field string, values []int64) SliceFilter { + return SliceFilter{ + Filter: NewFilter(field, pq.Array(values)), + } +} + +// SliceFilter defines a generic slice/array equality filter. +type SliceFilter struct { + Filter +} + +func (f SliceFilter) String() string { + return fmt.Sprintf("%s = ANY(%s)", f.applyModifiers(f.field), filterPlaceholder) +} + +func (f SliceFilter) Value() any { + return f.Filter.Value() +} + +// FilterByEventType creates a new filter to match events by type. +func FilterByEventType(eventType string) Filter { + return NewFilter(FieldEventType, eventType) +} + +// FilterByEventTXs creates a new filter to match events by TX hashes. +func FilterByEventTXs(hashes ...string) SliceFilter { + return NewStringSliceFilter(FieldEventTXHash, hashes) +} + +// FilterByEventAttrName creates a new filter to match events by attribute name. +func FilterByEventAttrName(name string) Filter { + return NewFilter(FieldEventAttrName, name) +} + +// FilterByEventAttrValue creates a new filter to match events by attribute value. +func FilterByEventAttrValue(v string) Filter { + // The string value must be quoted to match with the JSONB text + v = strconv.Quote(v) + + // Use a field modifier to cast the event attribute value JSONB field to text + return NewFilter(FieldEventAttrValue, v, WithModifiers(CastJSONToText)) +} + +// FilterByEventAttrValueInt creates a new filter to match events by attribute value. +func FilterByEventAttrValueInt(v int64) Filter { + // Use a field modifier to cast the event attribute value JSONB field to numeric + return NewFilter(FieldEventAttrValue, v, WithModifiers(CastJSONToNumeric)) +} diff --git a/ignite/pkg/cosmostxcollector/adapter/postgres/filters_test.go b/ignite/pkg/cosmostxcollector/adapter/postgres/filters_test.go new file mode 100644 index 0000000..e9ffc9f --- /dev/null +++ b/ignite/pkg/cosmostxcollector/adapter/postgres/filters_test.go @@ -0,0 +1,52 @@ +package postgres_test + +import ( + "fmt" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/cosmostxcollector/adapter/postgres" +) + +func TestFilter(t *testing.T) { + // Arrange + name := "string_field" + value := "test" + repr := fmt.Sprintf("%s = ?", name) + + // Act + filter := postgres.NewFilter(name, value) + + // Assert + require.Equal(t, repr, filter.String()) + require.Equal(t, name, filter.Field()) + require.Equal(t, value, filter.Value()) +} + +func TestFilterModifiers(t *testing.T) { + cases := []struct { + name string + modifier postgres.Modifier + want string + }{ + { + name: "CastJSONToText", + modifier: postgres.CastJSONToText, + want: "field::text = ?", + }, + { + name: "CastJSONToNumeric", + modifier: postgres.CastJSONToNumeric, + want: "field::numeric = ?", + }, + } + + for _, tt := range cases { + t.Run(tt.name, func(t *testing.T) { + field := postgres.NewFilter("field", nil, postgres.WithModifiers(tt.modifier)) + + require.EqualValues(t, tt.want, field.String()) + }) + } +} diff --git a/ignite/pkg/cosmostxcollector/adapter/postgres/parsers.go b/ignite/pkg/cosmostxcollector/adapter/postgres/parsers.go new file mode 100644 index 0000000..2b04302 --- /dev/null +++ b/ignite/pkg/cosmostxcollector/adapter/postgres/parsers.go @@ -0,0 +1,185 @@ +package postgres + +import ( + "fmt" + "strings" + + "github.com/ignite/cli/v29/ignite/pkg/cosmostxcollector/query" + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +const ( + eventAttrPrefix = "attribute." + + sqlSelectAll = "SELECT *" + sqlWhereTrue = "WHERE true" + + tplSelectEventsSQL = ` + SELECT event.id, event.index, event.tx_hash, event.type, event.created_at + FROM event INNER JOIN tx ON event.tx_hash = tx.hash + %s + ORDER BY tx.height, tx.index, event.index + ` + tplSelectEventsWithAttrSQL = ` + SELECT DISTINCT events.* + FROM ( + SELECT event.id, event.index, event.tx_hash, event.type, event.created_at + FROM event + INNER JOIN tx ON event.tx_hash = tx.hash + INNER JOIN attribute ON event.id = attribute.event_id + %s + ORDER BY tx.height, tx.index, event.index + ) AS events + ` +) + +var ( + ErrUnknownEntity = errors.New("unknown query entity") + ErrInvalidSortOrder = errors.New("invalid query sort order") +) + +// TODO: Use an SQL builder/parser to build the queries? +func parseQuery(q query.Query) (string, error) { + sections := []string{ + // Add SELECT + parseFields(q.Fields()), + // Add FROM + parseFrom(q), + } + + // Add WHERE + sections = append(sections, parseFilters(q.Filters())) + + // Add ORDER BY + sortBy, err := parseSortBy(q.SortBy()) + if err != nil { + return "", err + } + + if sortBy != "" { + sections = append(sections, sortBy) + } + + // Add LIMIT/OFFSET + if s, ok := parsePaging(q); ok { + sections = append(sections, s) + } + + return strings.Join(sections, " "), nil +} + +func parseEventQuery(q query.EventQuery) string { + sql := tplSelectEventsSQL + filters := q.Filters() + + // Check if any of the filters references an event attribute + // and if so add the required INNER JOIN to the raw SQL query. + // The JOIN is not present by default to improve events queries. + for _, f := range filters { + if strings.HasPrefix(f.Field(), eventAttrPrefix) { + sql = tplSelectEventsWithAttrSQL + + break + } + } + + // Add SELECT + sections := []string{ + fmt.Sprintf(sql, parseFilters(q.Filters())), + } + + // Add LIMIT/OFFSET + if s, ok := parsePaging(q); ok { + sections = append(sections, s) + } + + return strings.Join(sections, " ") +} + +func parseFields(fields []string) string { + if len(fields) == 0 { + // By default select all fields + return sqlSelectAll + } + + return fmt.Sprintf("SELECT DISTINCT %s", strings.Join(fields, ", ")) +} + +func parseFrom(q query.Query) string { + // Init the function call placeholders for the arguments + args := q.Args() + placeholders := make([]string, len(args)) + for i := range args { + placeholders[i] = fmt.Sprintf("$%d", i+1) + } + + // When there are arguments it means it is a postgres function + // call otherwise the call is treated as a table or view. + s := fmt.Sprintf("FROM %s", q.Name()) + if len(placeholders) > 0 { + s = fmt.Sprintf("%s(%s)", s, strings.Join(placeholders, ", ")) + } + + return s +} + +func parseFilters(filters []query.Filter) string { + if len(filters) == 0 { + return sqlWhereTrue + } + + pos := 0 + items := make([]string, len(filters)) + + for i, f := range filters { + // Render the filter, so it can be applied to the query + expr := f.String() + + // When the filter has a value replace the "?" by a positional + // postgres placeholder like "$1", "$2", and so on + if v := f.Value(); v != nil { + index := strings.LastIndex(expr, filterPlaceholder) + expr = expr[:index] + fmt.Sprintf("$%d", pos+1) + expr[index+1:] + pos++ + } + + items[i] = expr + } + + return fmt.Sprintf("WHERE %s", strings.Join(items, " AND ")) +} + +func parseSortBy(sortInfo []query.SortBy) (string, error) { + if len(sortInfo) == 0 { + return "", nil + } + + var items []string + + for _, s := range sortInfo { + if s.Order != query.SortOrderAsc && s.Order != query.SortOrderDesc { + return "", ErrInvalidSortOrder + } + + items = append(items, fmt.Sprintf("%s %s", s.Field, s.Order)) + } + + return fmt.Sprintf("ORDER BY %s", strings.Join(items, ", ")), nil +} + +func parsePaging(q query.Pager) (string, bool) { + if !q.IsPagingEnabled() { + return "", false + } + + // Get the current page and make sure that the page number is valid + page := q.AtPage() + if page == 0 { + page = 1 + } + + limit := q.PageSize() + offset := limit * (page - 1) + + return fmt.Sprintf("LIMIT %d OFFSET %d", limit, offset), true +} diff --git a/ignite/pkg/cosmostxcollector/adapter/postgres/postgres.go b/ignite/pkg/cosmostxcollector/adapter/postgres/postgres.go new file mode 100644 index 0000000..2728d1a --- /dev/null +++ b/ignite/pkg/cosmostxcollector/adapter/postgres/postgres.go @@ -0,0 +1,424 @@ +package postgres + +import ( + "context" + "database/sql" + "embed" + "encoding/json" + "fmt" + "net/url" + + "github.com/lib/pq" + + ctypes "github.com/cometbft/cometbft/rpc/core/types" + + "github.com/ignite/cli/v29/ignite/pkg/cosmosclient" + "github.com/ignite/cli/v29/ignite/pkg/cosmostxcollector/query" + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +const ( + DefaultPort = 5432 + DefaultHost = "127.0.0.1" +) + +const ( + adapterType = "postgres" + + sqlSelectBlockHeight = ` + SELECT COALESCE(MAX(height), 0) + FROM tx + ` + sqlSelectEventAttrs = ` + SELECT event_id, name, value FROM attribute + WHERE event_id = ANY($1) + ORDER BY event_id + ` + sqlInsertTX = ` + INSERT INTO tx (hash, index, height, block_time) + VALUES ($1, $2, $3, $4) + ` + sqlInsertEvent = ` + INSERT INTO event (tx_hash, type, index) + VALUES ($1, $2, $3) RETURNING id + ` + sqlInsertEventAttr = ` + INSERT INTO attribute (event_id, name, value) + VALUES ($1, $2, $3) + ` + sqlInsertRawTX = ` + INSERT INTO raw_tx (hash, data) + VALUES ($1, $2) + ` +) + +//go:embed schemas/* +var fsSchemas embed.FS + +// ErrClosed is returned when database connection is not open. +var ErrClosed = errors.New("no database connection") + +// Option defines an option for the adapter. +type Option func(*Adapter) + +// WithHost configures a database host name or IP. +func WithHost(host string) Option { + return func(a *Adapter) { + a.host = host + } +} + +// WithPort configures a database port. +func WithPort(port uint) Option { + return func(a *Adapter) { + a.port = port + } +} + +// WithUser configures a database user. +func WithUser(user string) Option { + return func(a *Adapter) { + a.user = user + } +} + +// WithPassword configures a database password. +func WithPassword(password string) Option { + return func(a *Adapter) { + a.password = password + } +} + +// WithParams configures extra database parameters. +func WithParams(params map[string]string) Option { + return func(a *Adapter) { + a.params = params + } +} + +// NewAdapter creates a new PostgreSQL adapter. +func NewAdapter(database string, options ...Option) (Adapter, error) { + adapter := Adapter{ + host: DefaultHost, + port: DefaultPort, + database: database, + schemas: NewSchemas(fsSchemas, ""), + } + + for _, o := range options { + o(&adapter) + } + + db, err := sql.Open("postgres", createPostgresURI(adapter)) + if err != nil { + return Adapter{}, err + } + + adapter.db = db + + return adapter, nil +} + +// Adapter implements a data backend adapter for PostgreSQL. +type Adapter struct { + host, user, password, database string + port uint + params map[string]string + db *sql.DB + schemas Schemas +} + +// UpdateSchema updates the database schema to the latest version available. +// It applies all available schemas that were not applied already. +func (a Adapter) UpdateSchema(ctx context.Context, s Schemas) error { + db, err := a.getDB() + if err != nil { + return err + } + + // Create the schema table if it doesn't exist + if _, err := db.ExecContext(ctx, s.GetTableDDL()); err != nil { + return errors.Errorf("failed to check schema table: %w", err) + } + + // Get the current schema version + var v uint64 + if err := db.QueryRowContext(ctx, s.GetSchemaVersionSQL()).Scan(&v); err != nil { + return errors.Errorf("failed to read current schema version: %w", err) + } + + return s.WalkFrom(v+1, func(version uint64, script []byte) error { + if _, err := db.ExecContext(ctx, string(script)); err != nil { + return errors.Errorf("error applying schema version %d: %w", version, err) + } + + return nil + }) +} + +func (a Adapter) GetType() string { + return adapterType +} + +func (a Adapter) Init(ctx context.Context) error { + return a.UpdateSchema(ctx, a.schemas) +} + +func (a Adapter) Save(ctx context.Context, txs []cosmosclient.TX) error { + db, err := a.getDB() + if err != nil { + return err + } + + // Start a transaction + sqlTx, err := db.BeginTx(ctx, nil) + if err != nil { + return err + } + + // Rollback won't have any effect if the transaction is committed before + defer sqlTx.Rollback() //nolint:errcheck + + // Prepare insert statements to speed up "bulk" saving times + txStmt, err := sqlTx.PrepareContext(ctx, sqlInsertTX) + if err != nil { + return err + } + + defer txStmt.Close() + + evtStmt, err := sqlTx.PrepareContext(ctx, sqlInsertEvent) + if err != nil { + return err + } + + defer evtStmt.Close() + + attrStmt, err := sqlTx.PrepareContext(ctx, sqlInsertEventAttr) + if err != nil { + return err + } + + defer attrStmt.Close() + + // All the transactions are saved within the context of the same database + // transactions and because of that either all block transactions are + // saved or none of them. + for _, tx := range txs { + if err := saveRawTX(ctx, sqlTx, tx.Raw); err != nil { + return err + } + + if err := saveTX(ctx, txStmt, evtStmt, attrStmt, tx); err != nil { + return err + } + } + + return sqlTx.Commit() +} + +func (a Adapter) GetLatestHeight(ctx context.Context) (height int64, err error) { + db, err := a.getDB() + if err != nil { + return 0, err + } + + row := db.QueryRowContext(ctx, sqlSelectBlockHeight) + if err = row.Scan(&height); err != nil { + return 0, err + } + + return height, nil +} + +func (a Adapter) QueryEvents(ctx context.Context, q query.EventQuery) ([]query.Event, error) { + db, err := a.getDB() + if err != nil { + return nil, err + } + + sql := parseEventQuery(q) + args := extractEventQueryArgs(q) + rows, err := db.QueryContext(ctx, sql, args...) + if err != nil { + return nil, err + } + + var ( + events []query.Event + eventIDs []int64 + + // Keep an index of the event position within the events slice + // to find them later when updating their attributes. + eventIndexes = make(map[int64]int) + ) + + for i := 0; rows.Next(); i++ { + e := query.Event{} + if err := rows.Scan(&e.ID, &e.Index, &e.TXHash, &e.Type, &e.CreatedAt); err != nil { + return nil, errors.Errorf("failed to read event: %w", err) + } + + events = append(events, e) + eventIDs = append(eventIDs, e.ID) + + eventIndexes[e.ID] = i + } + + // Don't query attributes when there are no events + if len(events) == 0 { + return events, nil + } + + // Select the attributes for the events that matched the query + rows, err = db.QueryContext(ctx, sqlSelectEventAttrs, pq.Array(eventIDs)) + if err != nil { + return nil, err + } + + // Update the attributes of the selected events + for rows.Next() { + var ( + eventID int64 + name string + value []byte + ) + + if err := rows.Scan(&eventID, &name, &value); err != nil { + return nil, errors.Errorf("failed to read event attribute: %w", err) + } + + i := eventIndexes[eventID] + events[i].Attributes = append(events[i].Attributes, query.NewAttribute(name, value)) + } + + return events, nil +} + +func (a Adapter) Query(ctx context.Context, q query.Query) (query.Cursor, error) { + db, err := a.getDB() + if err != nil { + return nil, err + } + + sql, err := parseQuery(q) + if err != nil { + return nil, err + } + + args := extractQueryArgs(q) + rows, err := db.QueryContext(ctx, sql, args...) + if err != nil { + return nil, err + } + + return rows, nil +} + +func (a Adapter) getDB() (*sql.DB, error) { + if a.db == nil { + return nil, ErrClosed + } + + return a.db, nil +} + +func createPostgresURI(a Adapter) string { + uri := url.URL{ + Scheme: adapterType, + Host: fmt.Sprintf("%s:%d", a.host, a.port), + Path: a.database, + } + + if a.user != "" { + if a.password != "" { + uri.User = url.UserPassword(a.user, a.password) + } else { + uri.User = url.User(a.user) + } + } + + // Add extra params as query arguments + if a.params != nil { + val := url.Values{} + for k, v := range a.params { + val.Set(k, v) + } + + uri.RawQuery = val.Encode() + } + + return uri.String() +} + +func saveRawTX(ctx context.Context, sqlTx *sql.Tx, rtx *ctypes.ResultTx) error { + hash := rtx.Hash.String() + raw, err := json.Marshal(rtx) + if err != nil { + return errors.Errorf("failed to encode raw TX %s: %w", hash, err) + } + + if _, err := sqlTx.ExecContext(ctx, sqlInsertRawTX, hash, raw); err != nil { + return errors.Errorf("error saving raw TX %s: %w", hash, err) + } + + return nil +} + +func saveTX(ctx context.Context, txStmt, evtStmt, attrStmt *sql.Stmt, tx cosmosclient.TX) error { + hash := tx.Raw.Hash.String() + if _, err := txStmt.ExecContext(ctx, hash, tx.Raw.Index, tx.Raw.Height, tx.BlockTime); err != nil { + return errors.Errorf("error saving TX %s: %w", hash, err) + } + + events, err := tx.GetEvents() + if err != nil { + return err + } + + for i, evt := range events { + var evtID int + + row := evtStmt.QueryRowContext(ctx, hash, evt.Type, i) + if err := row.Err(); err != nil { + return errors.Errorf("error saving event '%s': %w", evt.Type, err) + } + + if err := row.Scan(&evtID); err != nil { + return errors.Errorf("error reading event ID: %w", err) + } + + for _, attr := range evt.Attributes { + if _, err := attrStmt.ExecContext(ctx, evtID, attr.Key, attr.Value); err != nil { + return errors.Errorf("error saving event attr '%s.%s': %w", evt.Type, attr.Key, err) + } + } + } + + return nil +} + +func extractQueryArgs(q query.Query) []any { + // When the query is a call to a postgres function + // add the arguments before the filter values + args := q.Args() + + // Add the values from the filters + for _, f := range q.Filters() { + if a := f.Value(); a != nil { + args = append(args, a) + } + } + + return args +} + +func extractEventQueryArgs(q query.EventQuery) (args []any) { + for _, f := range q.Filters() { + if a := f.Value(); a != nil { + args = append(args, a) + } + } + + return args +} diff --git a/ignite/pkg/cosmostxcollector/adapter/postgres/postgres_test.go b/ignite/pkg/cosmostxcollector/adapter/postgres/postgres_test.go new file mode 100644 index 0000000..56d4b37 --- /dev/null +++ b/ignite/pkg/cosmostxcollector/adapter/postgres/postgres_test.go @@ -0,0 +1,550 @@ +package postgres + +import ( + "context" + "database/sql" + "encoding/hex" + "encoding/json" + "fmt" + "testing" + "testing/fstest" + "time" + + "github.com/DATA-DOG/go-sqlmock" + abci "github.com/cometbft/cometbft/abci/types" + ctypes "github.com/cometbft/cometbft/rpc/core/types" + "github.com/lib/pq" + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/cosmosclient" + "github.com/ignite/cli/v29/ignite/pkg/cosmostxcollector/query" + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +var ( + eventFields = []string{"id", "index", "tx_hash", "type", "created_at"} + eventAttrFields = []string{"event_id", "name", "value"} +) + +func TestUpdateSchema(t *testing.T) { + // Arrange + db, mock := createMatchEqualSQLMock(t) + defer db.Close() + + ctx := context.Background() + tplSchemaScript := `BEGIN; + INSERT INTO schema(version) + VALUES(%d) + ;%sCOMMIT;` + + // Arrange: Schema files + schemasData := []string{"/* FOO */", "/* BAR */"} + fs := fstest.MapFS{ + "schemas/1.sql": &fstest.MapFile{Data: []byte(schemasData[0])}, + "schemas/2.sql": &fstest.MapFile{Data: []byte(schemasData[1])}, + } + s := NewSchemas(fs, "") + + // Arrange: Prepare database adapter + adapter := Adapter{ + db: db, + schemas: s, + } + + // Arrange: Database mock and expectations + mock. + ExpectExec(s.GetTableDDL()). + WillReturnResult( + // DDL execution won't affect any rows or IDs + sqlmock.NewResult(0, 0), + ) + mock. + ExpectQuery(s.GetSchemaVersionSQL()). + WillReturnRows( + // Zero is returned to signal that there are no versions applied. + // When no versions are applied schema walk will start from version 1. + sqlmock.NewRows([]string{"version"}).AddRow(uint64(0)), + ) + + for i, data := range schemasData { + version := i + 1 + script := fmt.Sprintf(tplSchemaScript, version, data) + + // Add database mock and expectation for the current schema version + mock. + ExpectExec(script). + WillReturnResult(sqlmock.NewResult(0, 0)) + } + + // Act + err := adapter.UpdateSchema(ctx, s) + + // Assert + require.NoError(t, err) + require.NoError(t, mock.ExpectationsWereMet()) +} + +func TestSave(t *testing.T) { + // Arrange + db, mock := createMatchEqualSQLMock(t) + defer db.Close() + + adapter := Adapter{db: db} + ctx := context.Background() + hash := "F2564C78071E26643AE9B3E2A19FA0DC10D4D9E873AA0BE808660123F11A1E78" + + // Arrange: A Cosmos client TX to save + evtAttr := abci.EventAttribute{ + Key: "recipient", + Value: "cosmos1crje20aj4gxdtyct7z3knxqry2jqt2fuaey6u5", + } + evt := abci.Event{ + Type: "transfer", + Attributes: []abci.EventAttribute{evtAttr}, + } + + h, _ := hex.DecodeString(hash) // TODO: How to properly generate TX hash for the result? + tx := cosmosclient.TX{ + // Tendermint API search result + Raw: &ctypes.ResultTx{ + Hash: h, + Height: 1, + Index: 0, + TxResult: abci.ExecTxResult{ + Events: []abci.Event{evt}, + }, + }, + } + + // Arrange: JSON of the raw transaction result + jsonResTX, err := json.Marshal(tx.Raw) + require.NoError(t, err) + + // Arrange: Database mock and expectations for prepared SQL statements + mock.ExpectBegin() + + txStmt := mock.ExpectPrepare(` + INSERT INTO tx (hash, index, height, block_time) + VALUES ($1, $2, $3, $4) + `) + evtStmt := mock.ExpectPrepare(` + INSERT INTO event (tx_hash, type, index) + VALUES ($1, $2, $3) RETURNING id + `) + attrStmt := mock.ExpectPrepare(` + INSERT INTO attribute (event_id, name, value) + VALUES ($1, $2, $3) + `) + + // Arrange: Database mock and expectations for INSERT statement executions + insertResult := sqlmock.NewResult(0, 1) + evtIndex := 0 + evtID := int64(1) + jsonEvtAttrValue := []byte(fmt.Sprintf(`"%s"`, evtAttr.Value)) + + mock. + ExpectExec(` + INSERT INTO raw_tx (hash, data) + VALUES ($1, $2) + `). + WithArgs(tx.Raw.Hash.String(), jsonResTX). + WillReturnResult(insertResult) + + txStmt. + ExpectExec(). + WithArgs(hash, tx.Raw.Index, tx.Raw.Height, tx.BlockTime). + WillReturnResult(insertResult) + evtStmt. + ExpectQuery(). + WithArgs(hash, evt.Type, evtIndex). + WillReturnRows( + sqlmock.NewRows([]string{"event_id"}).AddRow(evtID), + ) + attrStmt. + ExpectExec(). + WithArgs(evtID, evtAttr.Key, jsonEvtAttrValue). + WillReturnResult(insertResult) + + mock.ExpectCommit() + + // Act + err = adapter.Save(ctx, []cosmosclient.TX{tx}) + + // Assert + require.NoError(t, err) + require.NoError(t, mock.ExpectationsWereMet()) +} + +func TestGetLatestHeight(t *testing.T) { + // Arrange + db, mock := createMatchEqualSQLMock(t) + defer db.Close() + + adapter := Adapter{db: db} + ctx := context.Background() + + // Arrange: Database mock and expectations + wantHeight := int64(42) + + mock. + ExpectQuery(sqlSelectBlockHeight). + WillReturnRows( + sqlmock.NewRows([]string{"height"}).AddRow(wantHeight), + ) + + // Act + height, err := adapter.GetLatestHeight(ctx) + + // Assert + require.NoError(t, err) + require.Equal(t, wantHeight, height) + require.NoError(t, mock.ExpectationsWereMet()) +} + +func TestQuery(t *testing.T) { + // Arrange + var rowValue string + + db, mock := createMatchEqualSQLMock(t) + defer db.Close() + + adapter := Adapter{db: db} + ctx := context.Background() + + // Arrange: Query + qry := query.New("baz", query.Fields("foo")) + + // Arrange: Database mock and expectations + wantRowValue := "expected" + fields := []string{"foo"} + rows := sqlmock.NewRows(fields).AddRow(wantRowValue) + + mock. + ExpectQuery(` + SELECT DISTINCT foo + FROM baz + WHERE true + LIMIT 30 OFFSET 0 + `). + WillReturnRows(rows) + + // Act + cr, err := adapter.Query(ctx, qry) + if cr.Next() { + err = cr.Scan(&rowValue) + require.NoError(t, err) + } + + // Assert + require.NoError(t, err, "expected no query errors on execution") + require.Equal(t, wantRowValue, rowValue) + require.NoError(t, mock.ExpectationsWereMet()) +} + +func TestQueryCursor(t *testing.T) { + // Arrange + var ( + rowValue string + cursorNextSucceeded bool + err error + ) + + db, mock := createMatchEqualSQLMock(t) + defer db.Close() + + adapter := Adapter{db: db} + ctx := context.Background() + + // Arrange: Query + qry := query.New("baz", query.Fields("foo")) + + // Arrange: Database mock and expectations + wantRowValue := "expected" + fields := []string{"foo"} + rows := sqlmock.NewRows(fields).AddRow(wantRowValue) + + mock. + ExpectQuery(` + SELECT DISTINCT foo + FROM baz + WHERE true + LIMIT 30 OFFSET 0 + `). + WillReturnRows(rows) + + // Act + cr, _ := adapter.Query(ctx, qry) + if cursorNextSucceeded = cr.Next(); cursorNextSucceeded { + err = cr.Scan(&rowValue) + } + + // Assert + require.True(t, cursorNextSucceeded, "expected cursor.Next() to succeed") + require.NoError(t, err, "expected no scan errors on execution") + require.Equal(t, wantRowValue, rowValue) + require.NoError(t, mock.ExpectationsWereMet()) +} + +func TestQueryWithFilter(t *testing.T) { + // Arrange + db, mock := createMatchEqualSQLMock(t) + defer db.Close() + + adapter := Adapter{db: db} + ctx := context.Background() + + // Arrange: Query + wantArg := "bar" + + qry := query.New( + "baz", + query.Fields("foo"), + query.WithFilters( + NewFilter("foo", wantArg), + ), + ) + + // Arrange: Database mock and expectations + fields := []string{"baz"} + rows := sqlmock.NewRows(fields) + + mock. + ExpectQuery(` + SELECT DISTINCT foo + FROM baz + WHERE foo = $1 + LIMIT 30 OFFSET 0 + `). + WithArgs(wantArg). + WillReturnRows(rows) + + // Act + _, err := adapter.Query(ctx, qry) + + // Assert + require.NoError(t, err, "expected no query errors on execution") + require.NoError(t, mock.ExpectationsWereMet()) +} + +func TestQueryError(t *testing.T) { + // Arrange + db, mock := createMatchEqualSQLMock(t) + defer db.Close() + + adapter := Adapter{db: db} + ctx := context.Background() + + // Arrange: Query + qry := query.New("baz", query.Fields("foo"), query.WithoutPaging()) + + // Arrange: Database mock and expectations + wantErr := errors.New("expected error") + + mock. + ExpectQuery("SELECT DISTINCT foo FROM baz WHERE true"). + WillReturnError(wantErr) + + // Act + _, err := adapter.Query(ctx, qry) + + // Assert + require.Equal(t, wantErr, err) + require.NoError(t, mock.ExpectationsWereMet()) +} + +func TestQueryRowError(t *testing.T) { + // Arrange + db, mock := createMatchEqualSQLMock(t) + defer db.Close() + + adapter := Adapter{db: db} + ctx := context.Background() + cols := []string{"name"} + + // Arrange: Query + qry := query.New("baz", query.Fields(cols[0]), query.WithoutPaging()) + + // Arrange: Database mock and expectations + wantErr := errors.New("expected error") + + row := sqlmock. + NewRows(cols). + AddRow("foo"). + RowError(0, wantErr) + + mock. + ExpectQuery("SELECT DISTINCT name FROM baz WHERE true"). + WillReturnRows(row) + + // Act + cr, err := adapter.Query(ctx, qry) + + // Assert + require.NoError(t, err, "expected no query errors on execution") + require.False(t, cr.Next(), "expected cursor.Next() to fail") + require.Equal(t, wantErr, cr.Err()) + require.NoError(t, mock.ExpectationsWereMet()) +} + +func TestEventQuery(t *testing.T) { + // Arrange + db, mock := createMatchEqualSQLMock(t) + defer db.Close() + + adapter := Adapter{db: db} + ctx := context.Background() + + // Arrange: Database mocks + attrName := "foo" + attrValue := []byte("42") + event := query.Event{ + ID: 1, + TXHash: "ABC123", + Index: 0, + Type: "test", + Attributes: []query.Attribute{ + query.NewAttribute(attrName, attrValue), + }, + CreatedAt: time.Now(), + } + + eventRows := sqlmock. + NewRows(eventFields). + AddRow(event.ID, event.Index, event.TXHash, event.Type, event.CreatedAt) + eventAttrRows := sqlmock. + NewRows(eventAttrFields). + AddRow(event.ID, attrName, attrValue) + + mock. + ExpectQuery(` + SELECT event.id, event.index, event.tx_hash, event.type, event.created_at + FROM event INNER JOIN tx ON event.tx_hash = tx.hash + WHERE true + ORDER BY tx.height, tx.index, event.index + LIMIT 30 OFFSET 0 + `). + WillReturnRows(eventRows) + mock. + ExpectQuery(` + SELECT event_id, name, value FROM attribute + WHERE event_id = ANY($1) + ORDER BY event_id + `). + WillReturnRows(eventAttrRows). + WithArgs(pq.Array([]int64{event.ID})) + + // Arrange: Expectations + wantEvents := []query.Event{event} + + // Arrange: Query + qry := query.NewEventQuery() + + // Act + events, err := adapter.QueryEvents(ctx, qry) + + // Assert + require.NoError(t, err, "expected no query errors on execution") + require.Len(t, events, 1) + require.Equal(t, wantEvents, events) + require.NoError(t, mock.ExpectationsWereMet()) +} + +func TestEventQueryWithFilters(t *testing.T) { + // Arrange + db, mock := createMatchEqualSQLMock(t) + defer db.Close() + + adapter := Adapter{db: db} + ctx := context.Background() + + // Arrange: Database mocks + indexValue := 2 + typeValue := "chain.test.Test" + hashValues := []string{"HASH1", "HASH2"} + eventRows := sqlmock.NewRows(eventFields) + + mock. + ExpectQuery(` + SELECT event.id, event.index, event.tx_hash, event.type, event.created_at + FROM event INNER JOIN tx ON event.tx_hash = tx.hash + WHERE event.index = $1 AND event.type = $2 AND event.tx_hash = ANY($3) + ORDER BY tx.height, tx.index, event.index + LIMIT 30 OFFSET 0 + `). + WillReturnRows(eventRows). + WithArgs(indexValue, typeValue, pq.Array(hashValues)) + + // Arrange: Query + qry := query.NewEventQuery( + query.WithFilters( + NewFilter("event.index", indexValue), + FilterByEventType(typeValue), + FilterByEventTXs(hashValues...), + ), + ) + + // Act + events, err := adapter.QueryEvents(ctx, qry) + + // Assert + require.NoError(t, err, "expected no query errors on execution") + require.Len(t, events, 0) + require.NoError(t, mock.ExpectationsWereMet()) +} + +func TestEventQueryWithEventAttrFilters(t *testing.T) { + // Arrange + db, mock := createMatchEqualSQLMock(t) + defer db.Close() + + adapter := Adapter{db: db} + ctx := context.Background() + + // Arrange: Database mocks + attrNameValue := "foo" + attrValue := int64(42) + eventRows := sqlmock.NewRows(eventFields) + + mock. + ExpectQuery(` + SELECT DISTINCT events.* + FROM ( + SELECT event.id, event.index, event.tx_hash, event.type, event.created_at + FROM event + INNER JOIN tx ON event.tx_hash = tx.hash + INNER JOIN attribute ON event.id = attribute.event_id + WHERE attribute.name = $1 AND attribute.name = $2 AND attribute.value::numeric = $3 + ORDER BY tx.height, tx.index, event.index + ) AS events + LIMIT 30 OFFSET 0 + `). + WillReturnRows(eventRows). + WithArgs(attrNameValue, attrNameValue, attrValue) + + // Arrange: Query + qry := query.NewEventQuery( + query.WithFilters( + NewFilter("attribute.name", attrNameValue), + FilterByEventAttrName(attrNameValue), + FilterByEventAttrValueInt(attrValue), + ), + ) + + // Act + events, err := adapter.QueryEvents(ctx, qry) + + // Assert + require.NoError(t, err, "expected no query errors on execution") + require.Len(t, events, 0) + require.NoError(t, mock.ExpectationsWereMet()) +} + +func createMatchEqualSQLMock(t *testing.T) (*sql.DB, sqlmock.Sqlmock) { + t.Helper() + db, mock, err := sqlmock.New( + sqlmock.QueryMatcherOption(sqlmock.QueryMatcherEqual), + ) + require.NoError(t, err) + + return db, mock +} diff --git a/ignite/pkg/cosmostxcollector/adapter/postgres/schemas.go b/ignite/pkg/cosmostxcollector/adapter/postgres/schemas.go new file mode 100644 index 0000000..d2db476 --- /dev/null +++ b/ignite/pkg/cosmostxcollector/adapter/postgres/schemas.go @@ -0,0 +1,203 @@ +package postgres + +import ( + "bytes" + "fmt" + "io/fs" + "path/filepath" + "sort" + "strconv" + "strings" + + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +// SchemasDir defines the name for the embedded schema directory. +const SchemasDir = "schemas" + +const ( + defaultSchemasTableName = "schema" + + sqlBeginTX = "BEGIN" + sqlCommitTX = "COMMIT" + sqlCommandSuffix = ";" + + tplSchemaInsertSQL = ` + INSERT INTO %s(version) + VALUES(%d) + ` + tplSchemaTableDDL = ` + CREATE TABLE IF NOT EXISTS %[1]v ( + version SMALLINT NOT NULL, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + + CONSTRAINT %[1]v_pk PRIMARY KEY (version) + ) + ` + tplSchemaVersionSQL = ` + SELECT COALESCE(MAX(version), 0) + FROM %s + ` +) + +// SchemasWalkFunc is the type of the function called by WalkFrom. +type SchemasWalkFunc func(version uint64, script []byte) error + +// NewSchemas creates a new embedded SQL schema manager. +// The embedded FS is used to iterate the schema files. +// By default, the applied schema versions are stored in the "schema" +// table but the name can have a prefix namespace when different +// packages are storing the schemas in the same database. +func NewSchemas(fs fs.FS, namespace string) Schemas { + tableName := defaultSchemasTableName + if namespace != "" { + tableName = fmt.Sprintf("%s_%s", namespace, tableName) + } + + return Schemas{tableName, fs} +} + +// Schemas defines a type to manage versioning of embedded SQL schemas. +// Each schema file must live inside the embedded schemas directory and the name +// of each schema file must be numeric, where the number represents the version. +type Schemas struct { + tableName string + fs fs.FS +} + +// GetTableDDL returns the DDL to create the schemas table. +func (s Schemas) GetTableDDL() string { + return fmt.Sprintf(tplSchemaTableDDL, s.tableName) +} + +// GetSchemaVersionSQL returns the SQL query to get the current schema version. +func (s Schemas) GetSchemaVersionSQL() string { + return fmt.Sprintf(tplSchemaVersionSQL, s.tableName) +} + +// WalkFrom calls a function for SQL schemas starting from a specific version. +// This is useful to apply newer schemas that are not yet applied. +func (s Schemas) WalkFrom(fromVersion uint64, fn SchemasWalkFunc) error { + // Stores schema file paths by version + paths := map[uint64]string{} + + // Index the paths to the schemas with the matching versions + err := fs.WalkDir(s.fs, SchemasDir, func(path string, _ fs.DirEntry, err error) error { + if err != nil { + return errors.Errorf("failed to read schema %s: %w", path, err) + } + + if path == SchemasDir { + return nil + } + + // Extract the schema file version from the file name + version := extractSchemaVersion(path) + if version == 0 { + return errors.Errorf("invalid schema file name '%s'", path) + } + + if fromVersion <= version { + paths[version] = path + } + + return nil + }) + if err != nil { + return err + } + + if len(paths) == 0 { + return nil + } + + for _, ver := range sortedSchemaVersions(paths) { + p := paths[ver] + + // Read the SQL script from the schema file + script, err := fs.ReadFile(s.fs, p) + if err != nil { + return errors.Errorf("failed to read schema '%s': %w", p, err) + } + + // Create the SQL script to change the schema to the + // current version within a single transaction + b := ScriptBuilder{} + b.BeginTX() + b.AppendCommand(s.getSchemaVersionInsertSQL(ver)) + b.AppendScript(script) + b.CommitTX() + + if err := fn(ver, b.Bytes()); err != nil { + return err + } + } + + return nil +} + +func (s Schemas) getSchemaVersionInsertSQL(version uint64) string { + return fmt.Sprintf(tplSchemaInsertSQL, s.tableName, version) +} + +// ScriptBuilder builds database DDL/SQL scripts that execute multiple commands. +type ScriptBuilder struct { + buf bytes.Buffer +} + +// BeginTX appends a command to start a database transaction. +func (b *ScriptBuilder) BeginTX() { + b.AppendCommand(sqlBeginTX) +} + +// CommitTX appends a command to commit a database transaction. +func (b *ScriptBuilder) CommitTX() { + b.AppendCommand(sqlCommitTX) +} + +// AppendCommand appends a command to the script. +func (b *ScriptBuilder) AppendCommand(cmd string) { + if strings.HasSuffix(cmd, sqlCommandSuffix) { + b.buf.WriteString(cmd) + } else { + b.buf.WriteString(cmd + sqlCommandSuffix) + } +} + +// AppendScript appends a database DDL/SQL script. +func (b *ScriptBuilder) AppendScript(s []byte) { + b.buf.Write(s) +} + +// Bytes returns the whole script as bytes. +func (b *ScriptBuilder) Bytes() []byte { + return b.buf.Bytes() +} + +func extractSchemaVersion(fileName string) uint64 { + name := strings.TrimSuffix( + filepath.Base(fileName), + filepath.Ext(fileName), + ) + + // The names of the schema files MUST be numeric + version, err := strconv.ParseUint(name, 10, 0) + if err != nil { + return 0 + } + + return version +} + +func sortedSchemaVersions(paths map[uint64]string) []uint64 { + versions := make([]uint64, 0, len(paths)) + for ver := range paths { + versions = append(versions, ver) + } + + sort.Slice(versions, func(i, j int) bool { + return versions[i] < versions[j] + }) + + return versions +} diff --git a/ignite/pkg/cosmostxcollector/adapter/postgres/schemas/1.sql b/ignite/pkg/cosmostxcollector/adapter/postgres/schemas/1.sql new file mode 100644 index 0000000..4b38b11 --- /dev/null +++ b/ignite/pkg/cosmostxcollector/adapter/postgres/schemas/1.sql @@ -0,0 +1,46 @@ +CREATE TABLE tx ( + hash CHAR(64) NOT NULL, + "index" BIGINT NOT NULL, + height BIGINT NOT NULL, + block_time TIMESTAMP NOT NULL, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + + CONSTRAINT tx_pk PRIMARY KEY (hash) +); + +CREATE INDEX tx_height_idx ON tx (height); + +CREATE SEQUENCE event_id_seq AS INTEGER; + +CREATE TABLE event ( + id INTEGER NOT NULL DEFAULT nextval('event_id_seq'), + tx_hash CHAR(64) NOT NULL, + "type" VARCHAR NOT NULL, + "index" SMALLINT NOT NULL, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + + CONSTRAINT event_pk PRIMARY KEY (id), + CONSTRAINT event_tx_fk FOREIGN KEY (tx_hash) REFERENCES tx (hash) ON DELETE CASCADE +); + +ALTER SEQUENCE event_id_seq OWNED BY event.id; + +CREATE INDEX event_type_idx ON event ("type"); + +CREATE TABLE attribute ( + event_id INTEGER NOT NULL, + name VARCHAR NOT NULL, + value JSONB NOT NULL, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + + CONSTRAINT attribute_pk PRIMARY KEY (event_id, name), + CONSTRAINT attribute_event_fk FOREIGN KEY (event_id) REFERENCES event (id) ON DELETE CASCADE +); + +CREATE TABLE raw_tx ( + hash CHAR(64) NOT NULL, + data TEXT NOT NULL, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + + CONSTRAINT raw_tx_pk PRIMARY KEY (hash) +); diff --git a/ignite/pkg/cosmostxcollector/adapter/postgres/schemas_test.go b/ignite/pkg/cosmostxcollector/adapter/postgres/schemas_test.go new file mode 100644 index 0000000..9d1ec17 --- /dev/null +++ b/ignite/pkg/cosmostxcollector/adapter/postgres/schemas_test.go @@ -0,0 +1,128 @@ +package postgres_test + +import ( + "bytes" + "fmt" + "testing" + "testing/fstest" + + "github.com/stretchr/testify/mock" + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/cosmostxcollector/adapter/postgres" +) + +func TestSchemasWalk(t *testing.T) { + // Arrange: Scripts data by version + data := map[uint]string{ + 1: "/* TEST-V1 */", + 2: "/* TEST-V2 */", + } + + // Arrange: Script argument matchers + matchByDataV1 := mock.MatchedBy(func(script []byte) bool { + return bytes.Contains(script, []byte(data[1])) + }) + matchByDataV2 := mock.MatchedBy(func(script []byte) bool { + return bytes.Contains(script, []byte(data[2])) + }) + + // Prepare the walk function mock + m := mock.Mock{} + m.Test(t) + m.On("fn", uint64(1), matchByDataV1).Return(nil) + m.On("fn", uint64(2), matchByDataV2).Return(nil) + + fn := func(version uint64, script []byte) error { + return m.MethodCalled("fn", version, script).Error(0) + } + + // Arrange: A new schema that contains SQL scripts for three versions + fs := fstest.MapFS{ + "schemas/1.sql": &fstest.MapFile{Data: []byte(data[1])}, + "schemas/2.sql": &fstest.MapFile{Data: []byte(data[2])}, + } + s := postgres.NewSchemas(fs, "") + + // Act + err := s.WalkFrom(1, fn) + + // Assert + require.NoError(t, err) + m.AssertExpectations(t) +} + +func TestSchemasWalkOrder(t *testing.T) { + // Arrange: Scripts data by version + data := map[uint]string{ + 1: "/* TEST-V1 */", + 2: "/* TEST-V2 */", + 10: "/* TEST-V10 */", + } + + // Arrange: Script argument matchers + matchByDataV1 := mock.MatchedBy(func(script []byte) bool { + return bytes.Contains(script, []byte(data[1])) + }) + matchByDataV2 := mock.MatchedBy(func(script []byte) bool { + return bytes.Contains(script, []byte(data[2])) + }) + matchByDataV10 := mock.MatchedBy(func(script []byte) bool { + return bytes.Contains(script, []byte(data[10])) + }) + + // Arrange: Walk function mock + m := mock.Mock{} + m.Test(t) + m.On("fn", uint64(1), matchByDataV1).Return(nil) + m.On("fn", uint64(2), matchByDataV2).Return(nil) + m.On("fn", uint64(10), matchByDataV10).Return(nil) + + var versions []uint64 + + fn := func(ver uint64, script []byte) error { + versions = append(versions, ver) + + return m.MethodCalled("fn", ver, script).Error(0) + } + + // Arrange: A new schema that contains SQL scripts for three versions + fs := fstest.MapFS{ + "schemas/1.sql": &fstest.MapFile{Data: []byte(data[1])}, + "schemas/2.sql": &fstest.MapFile{Data: []byte(data[2])}, + "schemas/10.sql": &fstest.MapFile{Data: []byte(data[10])}, + } + s := postgres.NewSchemas(fs, "") + + // Act + err := s.WalkFrom(1, fn) + + // Assert + require.NoError(t, err) + require.IsIncreasing(t, versions) + m.AssertExpectations(t) +} + +func TestScriptBuilder(t *testing.T) { + // Arrange + s1 := "SCRIPT-1;" + s2 := "SCRIPT-2;" + c1 := "COMMAND-1" + c2 := "COMMAND-2" + + b := postgres.ScriptBuilder{} + b.BeginTX() + b.AppendScript([]byte(s1)) + b.AppendScript([]byte(s2)) + b.AppendCommand(c1) + b.AppendCommand(c2) + b.CommitTX() + + want := fmt.Sprintf("BEGIN;%s%s%s;%s;COMMIT;", s1, s2, c1, c2) + + // Act + script := b.Bytes() + + // Assert + require.EqualValues(t, []byte(want), script) +} diff --git a/ignite/pkg/cosmostxcollector/collector.go b/ignite/pkg/cosmostxcollector/collector.go new file mode 100644 index 0000000..ce09294 --- /dev/null +++ b/ignite/pkg/cosmostxcollector/collector.go @@ -0,0 +1,58 @@ +package cosmostxcollector + +import ( + "context" + + "golang.org/x/sync/errgroup" + + "github.com/ignite/cli/v29/ignite/pkg/cosmosclient" + "github.com/ignite/cli/v29/ignite/pkg/cosmostxcollector/adapter" +) + +// TXsCollector defines the interface for Cosmos clients that support collection of transactions. +// +//go:generate mockery --name TXsCollector --filename txs_collector.go --with-expecter +type TXsCollector interface { + CollectTXs(ctx context.Context, fromHeight int64, tc chan<- []cosmosclient.TX) error +} + +// New creates a new Cosmos transaction collector. +func New(db adapter.Saver, client TXsCollector) Collector { + return Collector{db, client} +} + +// Collector defines a type to collect and save Cosmos transactions in a data backend. +type Collector struct { + db adapter.Saver + client TXsCollector +} + +// Collect gathers transactions for all blocks starting from a specific height. +// Each group of block transactions is saved sequentially after being collected. +func (c Collector) Collect(ctx context.Context, fromHeight int64) error { + tc := make(chan []cosmosclient.TX) + wg, ctx := errgroup.WithContext(ctx) + + // Start collecting block transactions. + // The transactions channel is closed by the client when all transactions + // are collected or when an error occurs during the collection. + wg.Go(func() error { + return c.client.CollectTXs(ctx, fromHeight, tc) + }) + + // The transactions for each block are saved in "bulks" so they are not + // kept in memory. Also, they are saved sequentially to avoid block height + // gaps that can occur if a group of transactions from a previous block + // fail to be saved. + wg.Go(func() error { + for txs := range tc { + if err := c.db.Save(ctx, txs); err != nil { + return err + } + } + + return nil + }) + + return wg.Wait() +} diff --git a/ignite/pkg/cosmostxcollector/collector_test.go b/ignite/pkg/cosmostxcollector/collector_test.go new file mode 100644 index 0000000..f938e35 --- /dev/null +++ b/ignite/pkg/cosmostxcollector/collector_test.go @@ -0,0 +1,135 @@ +package cosmostxcollector_test + +import ( + "context" + "testing" + + "github.com/stretchr/testify/mock" + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/cosmosclient" + "github.com/ignite/cli/v29/ignite/pkg/cosmostxcollector" + "github.com/ignite/cli/v29/ignite/pkg/cosmostxcollector/mocks" + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +func TestCollector(t *testing.T) { + // Arrange + var ( + savedTXs [][]cosmosclient.TX + + fromHeight int64 = 1 + ) + + txs := [][]cosmosclient.TX{{}, {}} + + client := mocks.NewTXsCollector(t) + client.EXPECT(). + CollectTXs( + mock.Anything, + fromHeight, + mock.AnythingOfType("chan<- []cosmosclient.TX"), + ). + Run(func(_ context.Context, _ int64, tc chan<- []cosmosclient.TX) { + defer close(tc) + + // Send the collected block transactions + tc <- txs[0] + tc <- txs[1] + }). + Return(nil). + Times(1) + + db := mocks.NewSaver(t) + db.EXPECT(). + Save( + mock.Anything, + mock.AnythingOfType("[]cosmosclient.TX"), + ). + Run(func(_ context.Context, txs []cosmosclient.TX) { + // Save the transactions + savedTXs = append(savedTXs, txs) + }). + Return(nil). + Times(2) + + c := cosmostxcollector.New(db, client) + ctx := context.Background() + + // Act + err := c.Collect(ctx, fromHeight) + + // Assert + require.NoError(t, err) + require.Equal(t, savedTXs, txs) +} + +func TestCollectorWithCollectError(t *testing.T) { + // Arrange + wantErr := errors.New("expected error") + + client := mocks.NewTXsCollector(t) + client.EXPECT(). + CollectTXs( + mock.Anything, + mock.AnythingOfType("int64"), + mock.AnythingOfType("chan<- []cosmosclient.TX"), + ). + Run(func(_ context.Context, _ int64, tc chan<- []cosmosclient.TX) { + close(tc) + }). + Return(wantErr). + Times(1) + + db := mocks.NewSaver(t) + c := cosmostxcollector.New(db, client) + ctx := context.Background() + + // Act + err := c.Collect(ctx, 1) + + // Assert + require.ErrorIs(t, err, wantErr) + + db.AssertNotCalled(t, "Save", mock.Anything, mock.AnythingOfType("[]cosmosclient.TX")) +} + +func TestCollectorWithSaveError(t *testing.T) { + // Arrange + wantErr := errors.New("expected error") + var txs []cosmosclient.TX + + client := mocks.NewTXsCollector(t) + client.EXPECT(). + CollectTXs( + mock.Anything, + mock.AnythingOfType("int64"), + mock.AnythingOfType("chan<- []cosmosclient.TX"), + ). + Run(func(_ context.Context, _ int64, tc chan<- []cosmosclient.TX) { + defer close(tc) + + // Send the collected block transactions + tc <- txs + }). + Return(nil). + Times(1) + + db := mocks.NewSaver(t) + db.EXPECT(). + Save( + mock.Anything, + mock.AnythingOfType("[]cosmosclient.TX"), + ). + Return(wantErr). + Times(1) + + c := cosmostxcollector.New(db, client) + ctx := context.Background() + + // Act + err := c.Collect(ctx, 1) + + // Assert + require.ErrorIs(t, err, wantErr) +} diff --git a/ignite/pkg/cosmostxcollector/mocks/saver.go b/ignite/pkg/cosmostxcollector/mocks/saver.go new file mode 100644 index 0000000..aa3acd7 --- /dev/null +++ b/ignite/pkg/cosmostxcollector/mocks/saver.go @@ -0,0 +1,85 @@ +// Code generated by mockery v2.53.3. DO NOT EDIT. + +package mocks + +import ( + context "context" + + mock "github.com/stretchr/testify/mock" + + cosmosclient "github.com/ignite/cli/v29/ignite/pkg/cosmosclient" +) + +// Saver is an autogenerated mock type for the Saver type +type Saver struct { + mock.Mock +} + +type Saver_Expecter struct { + mock *mock.Mock +} + +func (_m *Saver) EXPECT() *Saver_Expecter { + return &Saver_Expecter{mock: &_m.Mock} +} + +// Save provides a mock function with given fields: _a0, _a1 +func (_m *Saver) Save(_a0 context.Context, _a1 []cosmosclient.TX) error { + ret := _m.Called(_a0, _a1) + + if len(ret) == 0 { + panic("no return value specified for Save") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, []cosmosclient.TX) error); ok { + r0 = rf(_a0, _a1) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// Saver_Save_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Save' +type Saver_Save_Call struct { + *mock.Call +} + +// Save is a helper method to define mock.On call +// - _a0 context.Context +// - _a1 []cosmosclient.TX +func (_e *Saver_Expecter) Save(_a0 interface{}, _a1 interface{}) *Saver_Save_Call { + return &Saver_Save_Call{Call: _e.mock.On("Save", _a0, _a1)} +} + +func (_c *Saver_Save_Call) Run(run func(_a0 context.Context, _a1 []cosmosclient.TX)) *Saver_Save_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].([]cosmosclient.TX)) + }) + return _c +} + +func (_c *Saver_Save_Call) Return(_a0 error) *Saver_Save_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *Saver_Save_Call) RunAndReturn(run func(context.Context, []cosmosclient.TX) error) *Saver_Save_Call { + _c.Call.Return(run) + return _c +} + +// NewSaver creates a new instance of Saver. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewSaver(t interface { + mock.TestingT + Cleanup(func()) +}) *Saver { + mock := &Saver{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/ignite/pkg/cosmostxcollector/mocks/txs_collector.go b/ignite/pkg/cosmostxcollector/mocks/txs_collector.go new file mode 100644 index 0000000..6f74a58 --- /dev/null +++ b/ignite/pkg/cosmostxcollector/mocks/txs_collector.go @@ -0,0 +1,86 @@ +// Code generated by mockery v2.53.3. DO NOT EDIT. + +package mocks + +import ( + context "context" + + cosmosclient "github.com/ignite/cli/v29/ignite/pkg/cosmosclient" + + mock "github.com/stretchr/testify/mock" +) + +// TXsCollector is an autogenerated mock type for the TXsCollector type +type TXsCollector struct { + mock.Mock +} + +type TXsCollector_Expecter struct { + mock *mock.Mock +} + +func (_m *TXsCollector) EXPECT() *TXsCollector_Expecter { + return &TXsCollector_Expecter{mock: &_m.Mock} +} + +// CollectTXs provides a mock function with given fields: ctx, fromHeight, tc +func (_m *TXsCollector) CollectTXs(ctx context.Context, fromHeight int64, tc chan<- []cosmosclient.TX) error { + ret := _m.Called(ctx, fromHeight, tc) + + if len(ret) == 0 { + panic("no return value specified for CollectTXs") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, int64, chan<- []cosmosclient.TX) error); ok { + r0 = rf(ctx, fromHeight, tc) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// TXsCollector_CollectTXs_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CollectTXs' +type TXsCollector_CollectTXs_Call struct { + *mock.Call +} + +// CollectTXs is a helper method to define mock.On call +// - ctx context.Context +// - fromHeight int64 +// - tc chan<- []cosmosclient.TX +func (_e *TXsCollector_Expecter) CollectTXs(ctx interface{}, fromHeight interface{}, tc interface{}) *TXsCollector_CollectTXs_Call { + return &TXsCollector_CollectTXs_Call{Call: _e.mock.On("CollectTXs", ctx, fromHeight, tc)} +} + +func (_c *TXsCollector_CollectTXs_Call) Run(run func(ctx context.Context, fromHeight int64, tc chan<- []cosmosclient.TX)) *TXsCollector_CollectTXs_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(int64), args[2].(chan<- []cosmosclient.TX)) + }) + return _c +} + +func (_c *TXsCollector_CollectTXs_Call) Return(_a0 error) *TXsCollector_CollectTXs_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *TXsCollector_CollectTXs_Call) RunAndReturn(run func(context.Context, int64, chan<- []cosmosclient.TX) error) *TXsCollector_CollectTXs_Call { + _c.Call.Return(run) + return _c +} + +// NewTXsCollector creates a new instance of TXsCollector. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewTXsCollector(t interface { + mock.TestingT + Cleanup(func()) +}) *TXsCollector { + mock := &TXsCollector{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/ignite/pkg/cosmostxcollector/query/cursor.go b/ignite/pkg/cosmostxcollector/query/cursor.go new file mode 100644 index 0000000..4384a64 --- /dev/null +++ b/ignite/pkg/cosmostxcollector/query/cursor.go @@ -0,0 +1,25 @@ +package query + +// Cursor defines a cursor to iterate query results. +type Cursor interface { + // Err returns the last error seen by the Cursor, or nil if no error has occurred. + Err() error + + // Next prepares the next result to be read with the Scan method. + // It returns true on success, or false if there is no next result row or an error + // happened while preparing it. Err should be consulted to distinguish between the + // two cases. + // + // Every call to Scan, even the first one, must be preceded by a call to Next. + Next() bool + + // Scan copies the query row into the pointed values. + Scan(values ...any) error + + // Close closes the cursor preventing further iterations. + // If Next is called and returns false and there are no further result sets, + // the cursor is closed automatically and it will suffice to check the result of Err. + // This method is idempotent meaning that after the first call, any subsequent calls + // will not change the state. + Close() error +} diff --git a/ignite/pkg/cosmostxcollector/query/event.go b/ignite/pkg/cosmostxcollector/query/event.go new file mode 100644 index 0000000..7f18b97 --- /dev/null +++ b/ignite/pkg/cosmostxcollector/query/event.go @@ -0,0 +1,54 @@ +package query + +import ( + "encoding/json" + "time" +) + +// Event defines a transaction event. +type Event struct { + ID int64 + TXHash string + Index uint64 + Type string + Attributes []Attribute + CreatedAt time.Time +} + +// NewAttribute creates a new transaction event attribute. +func NewAttribute(name string, value []byte) Attribute { + return Attribute{ + value: value, + Name: name, + } +} + +// Attribute defines a transaction event attribute. +type Attribute struct { + value []byte + + Name string +} + +// Value returns the attribute value. +// Event attribute values are originally encoded as JSON. +// This method decodes the event value into its Go representation. +func (a Attribute) Value() (v any, err error) { + if a.value == nil { + return + } + + err = json.Unmarshal(a.value, &v) + return +} + +// NewEventQuery creates a new query that selects events. +func NewEventQuery(options ...Option) EventQuery { + return New("event", options...) +} + +// EventQuery describes how to select event values from a data backend. +type EventQuery interface { + Pager + Filterer +} diff --git a/ignite/pkg/cosmostxcollector/query/query.go b/ignite/pkg/cosmostxcollector/query/query.go new file mode 100644 index 0000000..9f03219 --- /dev/null +++ b/ignite/pkg/cosmostxcollector/query/query.go @@ -0,0 +1,199 @@ +package query + +import "fmt" + +const ( + // DefaultPageSize defines the default number of results to select per page. + DefaultPageSize = 30 + + SortOrderAsc = "asc" + SortOrderDesc = "desc" +) + +// Pager describes support for paging query results. +type Pager interface { + // PageSize returns the size for each query result set. + PageSize() uint32 + + // AtPage returns the result set page to query. + AtPage() uint32 + + // IsPagingEnabled checks if the query results should be paginated. + IsPagingEnabled() bool +} + +// SortBy contains info on how to sort query results. +type SortBy struct { + Field string + Order string +} + +// Filter describes a filter to apply to a query. +type Filter interface { + fmt.Stringer + + // Field returns the name of the filtered field. + Field() string + + // Value returns the value to use for filtering. + Value() any +} + +// Filterer describes support for filtering query results. +type Filterer interface { + // Filters returns the list of filters to apply to the query. + Filters() []Filter +} + +type queryOptions struct { + pageSize uint32 + atPage uint32 + args []any + fields []string + sortBy []SortBy + filters []Filter +} + +// Option configures queries. +type Option func(*Query) + +// AtPage assigns a page to select. +// Pages start from page one, so assigning page zero selects the first page. +func AtPage(page uint32) Option { + return func(q *Query) { + if page == 0 { + q.options.atPage = 1 + } else { + q.options.atPage = page + } + } +} + +// WithPageSize assigns the number of results to select per page. +// The default page size is used when size zero is assigned. +func WithPageSize(size uint32) Option { + return func(q *Query) { + if size == 0 { + q.options.pageSize = DefaultPageSize + } else { + q.options.pageSize = size + } + } +} + +// WithoutPaging disables the paging of results. +// All results are selected when paging is disabled. +func WithoutPaging() Option { + return func(q *Query) { + q.options.pageSize = 0 + } +} + +// WithFilters adds one or more filters to apply to the query. +func WithFilters(f ...Filter) Option { + return func(q *Query) { + q.options.filters = f + } +} + +// WithArgs adds one or more arguments to the query. +func WithArgs(args ...any) Option { + return func(q *Query) { + q.options.args = args + } +} + +// Fields assigns the field names to query. +// The default is to select all fields. +func Fields(fields ...string) Option { + return func(q *Query) { + q.options.fields = fields + } +} + +// SortByFields orders the query by one or more fields. +// Use `WithSortBy` option when multiple order by directions are needed. +func SortByFields(order string, fields ...string) Option { + return func(q *Query) { + for _, f := range fields { + q.options.sortBy = append(q.options.sortBy, SortBy{ + Field: f, + Order: order, + }) + } + } +} + +// WithSortBy orders the query by one or more fields. +func WithSortBy(o ...SortBy) Option { + return func(q *Query) { + q.options.sortBy = append(q.options.sortBy, o...) + } +} + +// New creates a new query that selects results from an entity. +// The name is the name of an entity which depending on the data backend have +// different meanings. In a relational database the name should be a table, +// function or view, while in a NoSQL database it should be a collection. +func New(name string, options ...Option) Query { + q := Query{ + name: name, + options: queryOptions{ + pageSize: DefaultPageSize, + atPage: 1, + }, + } + + for _, apply := range options { + apply(&q) + } + + return q +} + +// Query describes how to select values from a data backend. +type Query struct { + name string + options queryOptions +} + +// Name returns the name of the database table, collection, view or function to select. +func (q Query) Name() string { + return q.name +} + +// Args returns the arguments for query. +// Arguments are used when the query calls a function in the data backend. +func (q Query) Args() []any { + return q.options.args +} + +// Fields returns list of field names to select. +func (q Query) Fields() []string { + return q.options.fields +} + +// SortBy returns the sort info for the query. +func (q Query) SortBy() []SortBy { + return q.options.sortBy +} + +// PageSize returns the size for each query result set. +func (q Query) PageSize() uint32 { + return q.options.pageSize +} + +// AtPage returns the result set page to query. +func (q Query) AtPage() uint32 { + return q.options.atPage +} + +// Filters returns the list of filters to apply to the query. +func (q Query) Filters() []Filter { + return q.options.filters +} + +// IsPagingEnabled checks if the query results should be paginated. +func (q Query) IsPagingEnabled() bool { + return q.options.pageSize > 0 +} diff --git a/ignite/pkg/cosmostxcollector/query/query_test.go b/ignite/pkg/cosmostxcollector/query/query_test.go new file mode 100644 index 0000000..2614a87 --- /dev/null +++ b/ignite/pkg/cosmostxcollector/query/query_test.go @@ -0,0 +1,66 @@ +package query_test + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/cosmostxcollector/query" +) + +func TestQuery(t *testing.T) { + // Arrange + name := "entity" + field := "foo" + + // Act + qry := query.New(name, query.Fields(field)) + + // Assert + require.Equal(t, name, qry.Name()) + require.Equal(t, []string{field}, qry.Fields()) + require.Nil(t, qry.SortBy()) + require.Nil(t, qry.Filters()) + require.Nil(t, qry.Args()) + require.True(t, qry.IsPagingEnabled()) + require.EqualValues(t, query.DefaultPageSize, qry.PageSize()) + require.EqualValues(t, 1, qry.AtPage()) +} + +func TestPaging(t *testing.T) { + // Arrange + var ( + page uint32 = 42 + pageSize uint32 = 100 + ) + + // Act + qry := query.New( + "name", + query.WithPageSize(pageSize), + query.AtPage(page), + ) + + // Assert + require.True(t, qry.IsPagingEnabled()) + require.EqualValues(t, pageSize, qry.PageSize()) + require.EqualValues(t, page, qry.AtPage()) +} + +func TestDisablePaging(t *testing.T) { + // Act + qry := query.New("name", query.WithoutPaging()) + + // Assert + require.False(t, qry.IsPagingEnabled()) + require.EqualValues(t, 0, qry.PageSize()) +} + +func TestAtPageZero(t *testing.T) { + // Act + qry := query.New("name", query.AtPage(0)) + + // Assert + require.True(t, qry.IsPagingEnabled()) + require.EqualValues(t, 1, qry.AtPage()) +} diff --git a/ignite/pkg/cosmosver/cosmosver.go b/ignite/pkg/cosmosver/cosmosver.go new file mode 100644 index 0000000..34a47f0 --- /dev/null +++ b/ignite/pkg/cosmosver/cosmosver.go @@ -0,0 +1,84 @@ +package cosmosver + +import ( + "strings" + + "github.com/blang/semver/v4" +) + +const prefix = "v" + +// Version represents a range of Cosmos SDK versions. +type Version struct { + // Version is the exact sdk version string. + Version string + + // Semantic is the parsed version. + Semantic semver.Version +} + +var ( + StargateFortyVersion = newVersion("0.40.0") + StargateFortyFourVersion = newVersion("0.44.0-alpha") + StargateFortyFiveThreeVersion = newVersion("0.45.3") + StargateFortySevenTwoVersion = newVersion("0.47.2") + StargateFiftyVersion = newVersion("0.50.0") // 0.50.0 has been retracted and replaced with 0.50.1, but we keep it here for compatibility with pseudo versions. + StargateFiftyThreeVersion = newVersion("0.53.0") +) + +var ( + // Versions is a list of known, sorted Cosmos-SDK versions. + Versions = []Version{ + StargateFortyVersion, + StargateFortyFourVersion, + StargateFortyFiveThreeVersion, + StargateFortySevenTwoVersion, + StargateFiftyVersion, + StargateFiftyThreeVersion, // NOTE: v0.50 and v0.53 are API compatible but consensus incompatible when using the new features from v0.53. + } + + // Latest is the latest known version of the Cosmos-SDK. + Latest = Versions[len(Versions)-1] +) + +func newVersion(version string) Version { + return Version{ + Version: "v" + version, + Semantic: semver.MustParse(version), + } +} + +// Parse parses a Cosmos-SDK version. +func Parse(version string) (v Version, err error) { + v.Version = version + + if v.Semantic, err = semver.Parse(strings.TrimPrefix(version, prefix)); err != nil { + return v, err + } + + return +} + +// GTE checks if v is greater than or equal to version. +func (v Version) GTE(version Version) bool { + return v.Semantic.GTE(version.Semantic) +} + +// LT checks if v is less than version. +func (v Version) LT(version Version) bool { + return v.Semantic.LT(version.Semantic) +} + +// LTE checks if v is less than or equal to version. +func (v Version) LTE(version Version) bool { + return v.Semantic.LTE(version.Semantic) +} + +// Is checks if v is equal to version. +func (v Version) Is(version Version) bool { + return v.Semantic.EQ(version.Semantic) +} + +func (v Version) String() string { + return v.Version +} diff --git a/ignite/pkg/cosmosver/detect.go b/ignite/pkg/cosmosver/detect.go new file mode 100644 index 0000000..a5d269c --- /dev/null +++ b/ignite/pkg/cosmosver/detect.go @@ -0,0 +1,56 @@ +package cosmosver + +import ( + "regexp" + + "github.com/ignite/cli/v29/ignite/pkg/gomodule" +) + +var ( + // CosmosSDKRepoName defines the name of the Cosmos SDK repository. + // THE MUKAN PATCH: also matches mukan-sdk forks. + CosmosSDKRepoName = "cosmos-sdk|mukan-sdk" + // CosmosModulePath defines Cosmos SDK import path. + // THE MUKAN PATCH: accepts both the original Cosmos path and the Mukan sovereign path. + CosmosModulePath = "github.com/cosmos/cosmos-sdk" + // MukanSDKModulePath defines the Mukan sovereign SDK import path. + MukanSDKModulePath = "git.cw.tr/mukan-network/mukan-sdk" + // CosmosSDKModulePathPattern defines a regexp pattern for Cosmos SDK import path. + // THE MUKAN PATCH: matches both github.com/cosmos/cosmos-sdk and git.cw.tr/mukan-network/mukan-sdk. + CosmosSDKModulePathPattern = regexp.MustCompile(CosmosSDKRepoName + "$") +) + +// Detect detects major version of Cosmos SDK. +// If the Cosmos SDK is replaced with a fork, it returns the version of the fork. +// If the Cosmos SDK is replaced with a local fork, it returns its non resolved version. +func Detect(appPath string) (version Version, err error) { + parsed, err := gomodule.ParseAt(appPath) + if err != nil { + return version, err + } + + versions, err := gomodule.ResolveDependencies(parsed, false) + if err != nil { + return version, err + } + + for _, v := range versions { + if CosmosSDKModulePathPattern.MatchString(v.Path) { + // an empty version means that Cosmos SDK is replaced with a local fork + // we fallback to use the non resolved go import of the Cosmos SDK + if v.Version == "" { + for _, r := range parsed.Require { + if r.Mod.Path == CosmosModulePath { + v.Version = r.Mod.Version + } + } + } + + if version, err = Parse(v.Version); err != nil { + return version, err + } + } + } + + return +} diff --git a/ignite/pkg/cosmosver/detect_test.go b/ignite/pkg/cosmosver/detect_test.go new file mode 100644 index 0000000..dd4bba6 --- /dev/null +++ b/ignite/pkg/cosmosver/detect_test.go @@ -0,0 +1,26 @@ +package cosmosver_test + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/cosmosver" +) + +func TestDetect(t *testing.T) { + _, err := cosmosver.Detect(".") + require.Error(t, err) + + v, err := cosmosver.Detect("testdata/chain") + require.NoError(t, err) + require.Equal(t, "v0.47.3", v.Version) + + v, err = cosmosver.Detect("testdata/chain-sdk-fork") + require.NoError(t, err) + require.Equal(t, "v0.50.1-rollkit-v0.11.6-no-fraud-proofs", v.Version) + + v, err = cosmosver.Detect("testdata/chain-sdk-local-fork") + require.NoError(t, err) + require.Equal(t, "v0.50.2", v.Version) +} diff --git a/ignite/pkg/cosmosver/testdata/chain-sdk-fork/go.mod b/ignite/pkg/cosmosver/testdata/chain-sdk-fork/go.mod new file mode 100644 index 0000000..b586c36 --- /dev/null +++ b/ignite/pkg/cosmosver/testdata/chain-sdk-fork/go.mod @@ -0,0 +1,170 @@ +module my-new-chain + +go 1.23 + +require ( + cosmossdk.io/api v0.7.2 + github.com/cometbft/cometbft v0.38.1 + github.com/cometbft/cometbft-db v0.8.0 + github.com/cosmos/cosmos-sdk v0.47.3 + github.com/cosmos/gogoproto v1.4.11 + github.com/cosmos/ibc-go/v7 v7.1.0 + github.com/golang/protobuf v1.5.3 + github.com/gorilla/mux v1.8.1 + github.com/grpc-ecosystem/grpc-gateway v1.16.0 + github.com/grpc-ecosystem/grpc-gateway/v2 v2.15.2 + github.com/spf13/cast v1.5.1 + github.com/spf13/cobra v1.8.0 + github.com/spf13/pflag v1.0.5 + github.com/stretchr/testify v1.8.4 + google.golang.org/genproto v0.0.0-20231016165738-49dd2c1f3d0b + google.golang.org/grpc v1.59.0 + gopkg.in/yaml.v2 v2.4.0 +) + +require ( + cloud.google.com/go v0.110.8 // indirect + cloud.google.com/go/compute v1.23.1 // indirect + cloud.google.com/go/compute/metadata v0.2.3 // indirect + cloud.google.com/go/iam v1.1.3 // indirect + cloud.google.com/go/storage v1.30.1 // indirect + cosmossdk.io/core v0.11.0 // indirect + cosmossdk.io/depinject v1.0.0-alpha.4 // indirect + cosmossdk.io/errors v1.0.0 // indirect + cosmossdk.io/math v1.2.0 // indirect + cosmossdk.io/tools/rosetta v0.2.1 // indirect + filippo.io/edwards25519 v1.0.0 // indirect + github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4 // indirect + github.com/99designs/keyring v1.2.1 // indirect + github.com/ChainSafe/go-schnorrkel v0.0.0-20200405005733-88cbf1b4c40d // indirect + github.com/armon/go-metrics v0.4.1 // indirect + github.com/aws/aws-sdk-go v1.44.203 // indirect + github.com/beorn7/perks v1.0.1 // indirect + github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d // indirect + github.com/bgentry/speakeasy v0.1.1-0.20220910012023-760eaf8b6816 // indirect + github.com/btcsuite/btcd/btcec/v2 v2.3.2 // indirect + github.com/cenkalti/backoff/v4 v4.2.1 // indirect + github.com/cespare/xxhash v1.1.0 // indirect + github.com/cespare/xxhash/v2 v2.2.0 // indirect + github.com/chzyer/readline v1.5.1 // indirect + github.com/cockroachdb/apd/v2 v2.0.2 // indirect + github.com/coinbase/rosetta-sdk-go/types v1.0.0 // indirect + github.com/confio/ics23/go v0.9.0 // indirect + github.com/cosmos/btcutil v1.0.5 // indirect + github.com/cosmos/cosmos-proto v1.0.0-beta.3 // indirect + github.com/cosmos/go-bip39 v1.0.0 // indirect + github.com/cosmos/gogogateway v1.2.0 // indirect + github.com/cosmos/iavl v1.0.0 // indirect + github.com/cosmos/ics23/go v0.10.0 // indirect + github.com/cosmos/ledger-cosmos-go v0.13.3 // indirect + github.com/cosmos/rosetta-sdk-go v0.10.0 // indirect + github.com/creachadair/taskgroup v0.4.2 // indirect + github.com/danieljoos/wincred v1.1.2 // indirect + github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect + github.com/decred/dcrd/dcrec/secp256k1/v4 v4.2.0 // indirect + github.com/desertbit/timer v0.0.0-20180107155436-c41aec40b27f // indirect + github.com/dgraph-io/badger/v2 v2.2007.4 // indirect + github.com/dgraph-io/ristretto v0.1.1 // indirect + github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 // indirect + github.com/dustin/go-humanize v1.0.1 // indirect + github.com/dvsekhvalnov/jose2go v1.5.0 // indirect + github.com/felixge/httpsnoop v1.0.2 // indirect + github.com/fsnotify/fsnotify v1.6.0 // indirect + github.com/ghodss/yaml v1.0.0 // indirect + github.com/go-kit/kit v0.13.0 // indirect + github.com/go-kit/log v0.2.1 // indirect + github.com/go-logfmt/logfmt v0.6.0 // indirect + github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2 // indirect + github.com/gogo/googleapis v1.4.1 // indirect + github.com/gogo/protobuf v1.3.2 // indirect + github.com/golang/glog v1.1.2 // indirect + github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect + github.com/golang/mock v1.6.0 // indirect + github.com/golang/snappy v0.0.4 // indirect + github.com/google/btree v1.1.2 // indirect + github.com/google/go-cmp v0.6.0 // indirect + github.com/google/orderedcode v0.0.1 // indirect + github.com/google/uuid v1.3.1 // indirect + github.com/googleapis/enterprise-certificate-proxy v0.3.1 // indirect + github.com/googleapis/gax-go/v2 v2.12.0 // indirect + github.com/gorilla/handlers v1.5.1 // indirect + github.com/gorilla/websocket v1.5.1 // indirect + github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 // indirect + github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c // indirect + github.com/gtank/merlin v0.1.1 // indirect + github.com/gtank/ristretto255 v0.1.2 // indirect + github.com/hashicorp/go-cleanhttp v0.5.2 // indirect + github.com/hashicorp/go-getter v1.7.1 // indirect + github.com/hashicorp/go-immutable-radix v1.3.1 // indirect + github.com/hashicorp/go-safetemp v1.0.0 // indirect + github.com/hashicorp/go-version v1.6.0 // indirect + github.com/hashicorp/golang-lru v1.0.2 // indirect + github.com/hashicorp/hcl v1.0.0 // indirect + github.com/hdevalence/ed25519consensus v0.1.0 // indirect + github.com/huandu/skiplist v1.2.0 // indirect + github.com/improbable-eng/grpc-web v0.15.0 // indirect + github.com/inconshreveable/mousetrap v1.1.0 // indirect + github.com/jmespath/go-jmespath v0.4.0 // indirect + github.com/jmhodges/levigo v1.0.0 // indirect + github.com/json-iterator/go v1.1.12 // indirect + github.com/klauspost/compress v1.17.2 // indirect + github.com/lib/pq v1.10.7 // indirect + github.com/libp2p/go-buffer-pool v0.1.0 // indirect + github.com/magiconair/properties v1.8.7 // indirect + github.com/manifoldco/promptui v0.9.0 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect + github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect + github.com/mimoo/StrobeGo v0.0.0-20210601165009-122bf33a46e0 // indirect + github.com/minio/highwayhash v1.0.2 // indirect + github.com/mitchellh/go-homedir v1.1.0 // indirect + github.com/mitchellh/go-testing-interface v1.14.1 // indirect + github.com/mitchellh/mapstructure v1.5.0 // indirect + github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect + github.com/modern-go/reflect2 v1.0.2 // indirect + github.com/mtibben/percent v0.2.1 // indirect + github.com/pelletier/go-toml/v2 v2.1.0 // indirect + github.com/petermattis/goid v0.0.0-20230904192822-1876fd5063bc // indirect + github.com/pkg/errors v0.9.1 // indirect + github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect + github.com/prometheus/client_golang v1.17.0 // indirect + github.com/prometheus/client_model v0.5.0 // indirect + github.com/prometheus/common v0.45.0 // indirect + github.com/prometheus/procfs v0.12.0 // indirect + github.com/rakyll/statik v0.1.7 // indirect + github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 // indirect + github.com/rs/cors v1.10.1 // indirect + github.com/sasha-s/go-deadlock v0.3.1 // indirect + github.com/spf13/afero v1.10.0 // indirect + github.com/spf13/jwalterweatherman v1.1.0 // indirect + github.com/spf13/viper v1.17.0 // indirect + github.com/subosito/gotenv v1.6.0 // indirect + github.com/syndtr/goleveldb v1.0.1-0.20220721030215-126854af5e6d // indirect + github.com/tecbot/gorocksdb v0.0.0-20191217155057-f0fad39f321c // indirect + github.com/tendermint/go-amino v0.16.0 // indirect + github.com/tidwall/btree v1.7.0 // indirect + github.com/ulikunitz/xz v0.5.11 // indirect + github.com/zondax/hid v0.9.2 // indirect + github.com/zondax/ledger-go v0.14.3 // indirect + go.etcd.io/bbolt v1.3.7 // indirect + go.opencensus.io v0.24.0 // indirect + golang.org/x/crypto v0.16.0 // indirect + golang.org/x/exp v0.0.0-20231006140011-7918f672742d // indirect + golang.org/x/net v0.19.0 // indirect + golang.org/x/oauth2 v0.12.0 // indirect + golang.org/x/sys v0.15.0 // indirect + golang.org/x/term v0.15.0 // indirect + golang.org/x/text v0.14.0 // indirect + golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 // indirect + google.golang.org/api v0.143.0 // indirect + google.golang.org/appengine v1.6.7 // indirect + google.golang.org/protobuf v1.31.0 // indirect + gopkg.in/ini.v1 v1.67.0 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect + nhooyr.io/websocket v1.8.7 // indirect + pgregory.net/rapid v1.1.0 // indirect + sigs.k8s.io/yaml v1.3.0 // indirect +) + +replace github.com/syndtr/goleveldb => github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7 + +replace github.com/cosmos/cosmos-sdk => github.com/rollkit/cosmos-sdk v0.50.1-rollkit-v0.11.6-no-fraud-proofs diff --git a/ignite/pkg/cosmosver/testdata/chain-sdk-local-fork/go.mod b/ignite/pkg/cosmosver/testdata/chain-sdk-local-fork/go.mod new file mode 100644 index 0000000..b4abd12 --- /dev/null +++ b/ignite/pkg/cosmosver/testdata/chain-sdk-local-fork/go.mod @@ -0,0 +1,242 @@ +module my-new-chain + +go 1.23 + +replace github.com/cosmos/cosmos-sdk => ./my/local/fork/cosmos-sdk + +require ( + cosmossdk.io/api v0.7.2 + cosmossdk.io/client/v2 v2.0.0-beta.1 + cosmossdk.io/core v0.11.0 + cosmossdk.io/depinject v1.0.0-alpha.4 + cosmossdk.io/log v1.2.1 + cosmossdk.io/store v1.0.1 + cosmossdk.io/tools/confix v0.1.1 + cosmossdk.io/x/circuit v0.1.0 + cosmossdk.io/x/evidence v0.1.0 + cosmossdk.io/x/feegrant v0.1.0 + cosmossdk.io/x/upgrade v0.1.1 + github.com/bufbuild/buf v1.32.1 + github.com/cometbft/cometbft v0.38.2 + github.com/cosmos/cosmos-db v1.0.0 + github.com/cosmos/cosmos-proto v1.0.0-beta.3 + github.com/cosmos/cosmos-sdk v0.50.2 + github.com/cosmos/gogoproto v1.4.11 + github.com/cosmos/ibc-go/modules/capability v1.0.0 + github.com/cosmos/ibc-go/v8 v8.0.0 + github.com/gorilla/mux v1.8.1 + github.com/grpc-ecosystem/grpc-gateway v1.16.0 + github.com/grpc-ecosystem/grpc-gateway/v2 v2.18.1 + github.com/spf13/cobra v1.8.0 + github.com/spf13/pflag v1.0.5 + github.com/spf13/viper v1.17.0 + github.com/stretchr/testify v1.8.4 + google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.3.0 + google.golang.org/protobuf v1.31.0 +) + +require ( + cloud.google.com/go v0.110.9 // indirect + cloud.google.com/go/compute v1.23.2 // indirect + cloud.google.com/go/compute/metadata v0.2.3 // indirect + cloud.google.com/go/iam v1.1.4 // indirect + cloud.google.com/go/storage v1.30.1 // indirect + connectrpc.com/connect v1.11.1 // indirect + connectrpc.com/otelconnect v0.6.0 // indirect + cosmossdk.io/collections v0.4.0 // indirect + cosmossdk.io/errors v1.0.0 // indirect + cosmossdk.io/math v1.2.0 // indirect + cosmossdk.io/x/tx v0.12.0 // indirect + filippo.io/edwards25519 v1.0.0 // indirect + github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4 // indirect + github.com/99designs/keyring v1.2.1 // indirect + github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161 // indirect + github.com/DataDog/datadog-go v3.2.0+incompatible // indirect + github.com/DataDog/zstd v1.5.5 // indirect + github.com/Microsoft/go-winio v0.6.1 // indirect + github.com/aws/aws-sdk-go v1.44.224 // indirect + github.com/beorn7/perks v1.0.1 // indirect + github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d // indirect + github.com/bgentry/speakeasy v0.1.1-0.20220910012023-760eaf8b6816 // indirect + github.com/bits-and-blooms/bitset v1.8.0 // indirect + github.com/btcsuite/btcd/btcec/v2 v2.3.2 // indirect + github.com/bufbuild/protocompile v0.6.0 // indirect + github.com/cenkalti/backoff/v4 v4.1.3 // indirect + github.com/cespare/xxhash v1.1.0 // indirect + github.com/cespare/xxhash/v2 v2.2.0 // indirect + github.com/chzyer/readline v1.5.1 // indirect + github.com/cockroachdb/apd/v2 v2.0.2 // indirect + github.com/cockroachdb/errors v1.11.1 // indirect + github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b // indirect + github.com/cockroachdb/pebble v0.0.0-20231102162011-844f0582c2eb // indirect + github.com/cockroachdb/redact v1.1.5 // indirect + github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06 // indirect + github.com/cometbft/cometbft-db v0.9.1 // indirect + github.com/containerd/stargz-snapshotter/estargz v0.14.3 // indirect + github.com/cosmos/btcutil v1.0.5 // indirect + github.com/cosmos/go-bip39 v1.0.0 // indirect + github.com/cosmos/gogogateway v1.2.0 // indirect + github.com/cosmos/iavl v1.0.0 // indirect + github.com/cosmos/ics23/go v0.10.0 // indirect + github.com/cosmos/ledger-cosmos-go v0.13.3 // indirect + github.com/cpuguy83/go-md2man/v2 v2.0.3 // indirect + github.com/creachadair/atomicfile v0.3.1 // indirect + github.com/creachadair/tomledit v0.0.24 // indirect + github.com/danieljoos/wincred v1.2.0 // indirect + github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect + github.com/decred/dcrd/dcrec/secp256k1/v4 v4.2.0 // indirect + github.com/desertbit/timer v0.0.0-20180107155436-c41aec40b27f // indirect + github.com/dgraph-io/badger/v2 v2.2007.4 // indirect + github.com/dgraph-io/ristretto v0.1.1 // indirect + github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 // indirect + github.com/distribution/reference v0.5.0 // indirect + github.com/docker/cli v24.0.6+incompatible // indirect + github.com/docker/distribution v2.8.3+incompatible // indirect + github.com/docker/docker v24.0.6+incompatible // indirect + github.com/docker/docker-credential-helpers v0.8.0 // indirect + github.com/docker/go-connections v0.4.0 // indirect + github.com/docker/go-units v0.5.0 // indirect + github.com/dustin/go-humanize v1.0.1 // indirect + github.com/dvsekhvalnov/jose2go v1.5.0 // indirect + github.com/emicklei/dot v1.6.0 // indirect + github.com/fatih/color v1.15.0 // indirect + github.com/felixge/fgprof v0.9.3 // indirect + github.com/felixge/httpsnoop v1.0.4 // indirect + github.com/fsnotify/fsnotify v1.6.0 // indirect + github.com/getsentry/sentry-go v0.25.0 // indirect + github.com/ghodss/yaml v1.0.0 // indirect + github.com/go-chi/chi/v5 v5.0.10 // indirect + github.com/go-kit/kit v0.12.0 // indirect + github.com/go-kit/log v0.2.1 // indirect + github.com/go-logfmt/logfmt v0.6.0 // indirect + github.com/go-logr/logr v1.2.4 // indirect + github.com/go-logr/stdr v1.2.2 // indirect + github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2 // indirect + github.com/gofrs/uuid/v5 v5.0.0 // indirect + github.com/gogo/googleapis v1.4.1 // indirect + github.com/gogo/protobuf v1.3.2 // indirect + github.com/golang/glog v1.2.0 // indirect + github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect + github.com/golang/mock v1.6.0 // indirect + github.com/golang/protobuf v1.5.3 // indirect + github.com/golang/snappy v0.0.4 // indirect + github.com/google/btree v1.1.2 // indirect + github.com/google/go-cmp v0.6.0 // indirect + github.com/google/go-containerregistry v0.16.1 // indirect + github.com/google/orderedcode v0.0.1 // indirect + github.com/google/pprof v0.0.0-20230926050212-f7f687d19a98 // indirect + github.com/google/s2a-go v0.1.7 // indirect + github.com/google/uuid v1.3.1 // indirect + github.com/googleapis/enterprise-certificate-proxy v0.3.1 // indirect + github.com/googleapis/gax-go/v2 v2.12.0 // indirect + github.com/gorilla/handlers v1.5.2 // indirect + github.com/gorilla/websocket v1.5.0 // indirect + github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 // indirect + github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c // indirect + github.com/hashicorp/go-cleanhttp v0.5.2 // indirect + github.com/hashicorp/go-getter v1.7.3 // indirect + github.com/hashicorp/go-hclog v1.5.0 // indirect + github.com/hashicorp/go-immutable-radix v1.3.1 // indirect + github.com/hashicorp/go-metrics v0.5.2 // indirect + github.com/hashicorp/go-plugin v1.5.2 // indirect + github.com/hashicorp/go-safetemp v1.0.0 // indirect + github.com/hashicorp/go-version v1.6.0 // indirect + github.com/hashicorp/golang-lru v1.0.2 // indirect + github.com/hashicorp/hcl v1.0.0 // indirect + github.com/hashicorp/yamux v0.1.1 // indirect + github.com/hdevalence/ed25519consensus v0.1.0 // indirect + github.com/huandu/skiplist v1.2.0 // indirect + github.com/iancoleman/strcase v0.3.0 // indirect + github.com/improbable-eng/grpc-web v0.15.0 // indirect + github.com/inconshreveable/mousetrap v1.1.0 // indirect + github.com/jdx/go-netrc v1.0.0 // indirect + github.com/jmespath/go-jmespath v0.4.0 // indirect + github.com/jmhodges/levigo v1.0.0 // indirect + github.com/klauspost/compress v1.17.4 // indirect + github.com/klauspost/pgzip v1.2.6 // indirect + github.com/kr/pretty v0.3.1 // indirect + github.com/kr/text v0.2.0 // indirect + github.com/lib/pq v1.10.7 // indirect + github.com/libp2p/go-buffer-pool v0.1.0 // indirect + github.com/linxGnu/grocksdb v1.8.6 // indirect + github.com/magiconair/properties v1.8.7 // indirect + github.com/manifoldco/promptui v0.9.0 // indirect + github.com/mattn/go-colorable v0.1.13 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect + github.com/matttproud/golang_protobuf_extensions/v2 v2.0.0 // indirect + github.com/minio/highwayhash v1.0.2 // indirect + github.com/mitchellh/go-homedir v1.1.0 // indirect + github.com/mitchellh/go-testing-interface v1.14.1 // indirect + github.com/mitchellh/mapstructure v1.5.0 // indirect + github.com/moby/term v0.5.0 // indirect + github.com/morikuni/aec v1.0.0 // indirect + github.com/mtibben/percent v0.2.1 // indirect + github.com/oasisprotocol/curve25519-voi v0.0.0-20230904125328-1f23a7beb09a // indirect + github.com/oklog/run v1.1.0 // indirect + github.com/opencontainers/go-digest v1.0.0 // indirect + github.com/opencontainers/image-spec v1.1.0-rc5 // indirect + github.com/pelletier/go-toml/v2 v2.1.0 // indirect + github.com/petermattis/goid v0.0.0-20230904192822-1876fd5063bc // indirect + github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 // indirect + github.com/pkg/errors v0.9.1 // indirect + github.com/pkg/profile v1.7.0 // indirect + github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect + github.com/prometheus/client_golang v1.17.0 // indirect + github.com/prometheus/client_model v0.5.0 // indirect + github.com/prometheus/common v0.45.0 // indirect + github.com/prometheus/procfs v0.12.0 // indirect + github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 // indirect + github.com/rogpeppe/go-internal v1.11.0 // indirect + github.com/rs/cors v1.10.1 // indirect + github.com/rs/zerolog v1.31.0 // indirect + github.com/russross/blackfriday/v2 v2.1.0 // indirect + github.com/sagikazarmark/locafero v0.3.0 // indirect + github.com/sagikazarmark/slog-shim v0.1.0 // indirect + github.com/sasha-s/go-deadlock v0.3.1 // indirect + github.com/sirupsen/logrus v1.9.3 // indirect + github.com/sourcegraph/conc v0.3.0 // indirect + github.com/spf13/afero v1.10.0 // indirect + github.com/spf13/cast v1.5.1 // indirect + github.com/subosito/gotenv v1.6.0 // indirect + github.com/syndtr/goleveldb v1.0.1-0.20220721030215-126854af5e6d // indirect + github.com/tendermint/go-amino v0.16.0 // indirect + github.com/tetratelabs/wazero v1.5.0 // indirect + github.com/tidwall/btree v1.7.0 // indirect + github.com/ulikunitz/xz v0.5.11 // indirect + github.com/vbatts/tar-split v0.11.5 // indirect + github.com/zondax/hid v0.9.2 // indirect + github.com/zondax/ledger-go v0.14.3 // indirect + go.etcd.io/bbolt v1.3.8 // indirect + go.opencensus.io v0.24.0 // indirect + go.opentelemetry.io/otel v1.19.0 // indirect + go.opentelemetry.io/otel/metric v1.19.0 // indirect + go.opentelemetry.io/otel/sdk v1.19.0 // indirect + go.opentelemetry.io/otel/trace v1.19.0 // indirect + go.uber.org/atomic v1.11.0 // indirect + go.uber.org/multierr v1.11.0 // indirect + go.uber.org/zap v1.26.0 // indirect + golang.org/x/crypto v0.16.0 // indirect + golang.org/x/exp v0.0.0-20231006140011-7918f672742d // indirect + golang.org/x/mod v0.13.0 // indirect + golang.org/x/net v0.19.0 // indirect + golang.org/x/oauth2 v0.13.0 // indirect + golang.org/x/sync v0.5.0 // indirect + golang.org/x/sys v0.15.0 // indirect + golang.org/x/term v0.15.0 // indirect + golang.org/x/text v0.14.0 // indirect + golang.org/x/tools v0.14.0 // indirect + golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 // indirect + google.golang.org/api v0.143.0 // indirect + google.golang.org/appengine v1.6.7 // indirect + google.golang.org/genproto v0.0.0-20231030173426-d783a09b4405 // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20231106174013-bbf56f31fb17 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20231030173426-d783a09b4405 // indirect + google.golang.org/grpc v1.59.0 // indirect + gopkg.in/ini.v1 v1.67.0 // indirect + gopkg.in/yaml.v2 v2.4.0 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect + gotest.tools/v3 v3.5.1 // indirect + nhooyr.io/websocket v1.8.6 // indirect + pgregory.net/rapid v1.1.0 // indirect + sigs.k8s.io/yaml v1.4.0 // indirect +) diff --git a/ignite/pkg/cosmosver/testdata/chain/go.mod b/ignite/pkg/cosmosver/testdata/chain/go.mod new file mode 100644 index 0000000..6dfe6aa --- /dev/null +++ b/ignite/pkg/cosmosver/testdata/chain/go.mod @@ -0,0 +1,168 @@ +module my-new-chain + +go 1.19 + +require ( + cosmossdk.io/api v0.3.1 + github.com/cometbft/cometbft v0.37.2 + github.com/cometbft/cometbft-db v0.7.0 + github.com/cosmos/cosmos-sdk v0.47.3 + github.com/cosmos/gogoproto v1.4.10 + github.com/cosmos/ibc-go/v7 v7.1.0 + github.com/golang/protobuf v1.5.3 + github.com/gorilla/mux v1.8.0 + github.com/grpc-ecosystem/grpc-gateway v1.16.0 + github.com/grpc-ecosystem/grpc-gateway/v2 v2.15.2 + github.com/spf13/cast v1.5.0 + github.com/spf13/cobra v1.6.1 + github.com/spf13/pflag v1.0.5 + github.com/stretchr/testify v1.8.2 + google.golang.org/genproto v0.0.0-20230306155012-7f2fa6fef1f4 + google.golang.org/grpc v1.55.0 + gopkg.in/yaml.v2 v2.4.0 +) + +require ( + cloud.google.com/go v0.110.0 // indirect + cloud.google.com/go/compute v1.18.0 // indirect + cloud.google.com/go/compute/metadata v0.2.3 // indirect + cloud.google.com/go/iam v0.12.0 // indirect + cloud.google.com/go/storage v1.29.0 // indirect + cosmossdk.io/core v0.5.1 // indirect + cosmossdk.io/depinject v1.0.0-alpha.3 // indirect + cosmossdk.io/errors v1.0.0-beta.7 // indirect + cosmossdk.io/math v1.0.1 // indirect + cosmossdk.io/tools/rosetta v0.2.1 // indirect + filippo.io/edwards25519 v1.0.0 // indirect + github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4 // indirect + github.com/99designs/keyring v1.2.1 // indirect + github.com/ChainSafe/go-schnorrkel v0.0.0-20200405005733-88cbf1b4c40d // indirect + github.com/armon/go-metrics v0.4.1 // indirect + github.com/aws/aws-sdk-go v1.44.203 // indirect + github.com/beorn7/perks v1.0.1 // indirect + github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d // indirect + github.com/bgentry/speakeasy v0.1.1-0.20220910012023-760eaf8b6816 // indirect + github.com/btcsuite/btcd/btcec/v2 v2.3.2 // indirect + github.com/cenkalti/backoff/v4 v4.1.3 // indirect + github.com/cespare/xxhash v1.1.0 // indirect + github.com/cespare/xxhash/v2 v2.2.0 // indirect + github.com/chzyer/readline v1.5.1 // indirect + github.com/cockroachdb/apd/v2 v2.0.2 // indirect + github.com/coinbase/rosetta-sdk-go/types v1.0.0 // indirect + github.com/confio/ics23/go v0.9.0 // indirect + github.com/cosmos/btcutil v1.0.5 // indirect + github.com/cosmos/cosmos-proto v1.0.0-beta.2 // indirect + github.com/cosmos/go-bip39 v1.0.0 // indirect + github.com/cosmos/gogogateway v1.2.0 // indirect + github.com/cosmos/iavl v0.20.0 // indirect + github.com/cosmos/ics23/go v0.10.0 // indirect + github.com/cosmos/ledger-cosmos-go v0.12.1 // indirect + github.com/cosmos/rosetta-sdk-go v0.10.0 // indirect + github.com/creachadair/taskgroup v0.4.2 // indirect + github.com/danieljoos/wincred v1.1.2 // indirect + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/decred/dcrd/dcrec/secp256k1/v4 v4.1.0 // indirect + github.com/desertbit/timer v0.0.0-20180107155436-c41aec40b27f // indirect + github.com/dgraph-io/badger/v2 v2.2007.4 // indirect + github.com/dgraph-io/ristretto v0.1.1 // indirect + github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 // indirect + github.com/dustin/go-humanize v1.0.1 // indirect + github.com/dvsekhvalnov/jose2go v1.5.0 // indirect + github.com/felixge/httpsnoop v1.0.2 // indirect + github.com/fsnotify/fsnotify v1.6.0 // indirect + github.com/ghodss/yaml v1.0.0 // indirect + github.com/go-kit/kit v0.12.0 // indirect + github.com/go-kit/log v0.2.1 // indirect + github.com/go-logfmt/logfmt v0.6.0 // indirect + github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2 // indirect + github.com/gogo/googleapis v1.4.1 // indirect + github.com/gogo/protobuf v1.3.2 // indirect + github.com/golang/glog v1.1.0 // indirect + github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect + github.com/golang/mock v1.6.0 // indirect + github.com/golang/snappy v0.0.4 // indirect + github.com/google/btree v1.1.2 // indirect + github.com/google/go-cmp v0.5.9 // indirect + github.com/google/orderedcode v0.0.1 // indirect + github.com/google/uuid v1.3.0 // indirect + github.com/googleapis/enterprise-certificate-proxy v0.2.3 // indirect + github.com/googleapis/gax-go/v2 v2.7.0 // indirect + github.com/gorilla/handlers v1.5.1 // indirect + github.com/gorilla/websocket v1.5.0 // indirect + github.com/grpc-ecosystem/go-grpc-middleware v1.3.0 // indirect + github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c // indirect + github.com/gtank/merlin v0.1.1 // indirect + github.com/gtank/ristretto255 v0.1.2 // indirect + github.com/hashicorp/go-cleanhttp v0.5.2 // indirect + github.com/hashicorp/go-getter v1.7.1 // indirect + github.com/hashicorp/go-immutable-radix v1.3.1 // indirect + github.com/hashicorp/go-safetemp v1.0.0 // indirect + github.com/hashicorp/go-version v1.6.0 // indirect + github.com/hashicorp/golang-lru v0.5.5-0.20210104140557-80c98217689d // indirect + github.com/hashicorp/hcl v1.0.0 // indirect + github.com/hdevalence/ed25519consensus v0.1.0 // indirect + github.com/huandu/skiplist v1.2.0 // indirect + github.com/improbable-eng/grpc-web v0.15.0 // indirect + github.com/inconshreveable/mousetrap v1.1.0 // indirect + github.com/jmespath/go-jmespath v0.4.0 // indirect + github.com/jmhodges/levigo v1.0.0 // indirect + github.com/json-iterator/go v1.1.12 // indirect + github.com/klauspost/compress v1.16.3 // indirect + github.com/lib/pq v1.10.7 // indirect + github.com/libp2p/go-buffer-pool v0.1.0 // indirect + github.com/magiconair/properties v1.8.7 // indirect + github.com/manifoldco/promptui v0.9.0 // indirect + github.com/mattn/go-isatty v0.0.18 // indirect + github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect + github.com/mimoo/StrobeGo v0.0.0-20210601165009-122bf33a46e0 // indirect + github.com/minio/highwayhash v1.0.2 // indirect + github.com/mitchellh/go-homedir v1.1.0 // indirect + github.com/mitchellh/go-testing-interface v1.14.1 // indirect + github.com/mitchellh/mapstructure v1.5.0 // indirect + github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect + github.com/modern-go/reflect2 v1.0.2 // indirect + github.com/mtibben/percent v0.2.1 // indirect + github.com/pelletier/go-toml/v2 v2.0.7 // indirect + github.com/petermattis/goid v0.0.0-20230317030725-371a4b8eda08 // indirect + github.com/pkg/errors v0.9.1 // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/prometheus/client_golang v1.14.0 // indirect + github.com/prometheus/client_model v0.3.0 // indirect + github.com/prometheus/common v0.42.0 // indirect + github.com/prometheus/procfs v0.9.0 // indirect + github.com/rakyll/statik v0.1.7 // indirect + github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 // indirect + github.com/rs/cors v1.8.3 // indirect + github.com/sasha-s/go-deadlock v0.3.1 // indirect + github.com/spf13/afero v1.9.3 // indirect + github.com/spf13/jwalterweatherman v1.1.0 // indirect + github.com/spf13/viper v1.15.0 // indirect + github.com/subosito/gotenv v1.4.2 // indirect + github.com/syndtr/goleveldb v1.0.1-0.20220721030215-126854af5e6d // indirect + github.com/tecbot/gorocksdb v0.0.0-20191217155057-f0fad39f321c // indirect + github.com/tendermint/go-amino v0.16.0 // indirect + github.com/tidwall/btree v1.6.0 // indirect + github.com/ulikunitz/xz v0.5.11 // indirect + github.com/zondax/hid v0.9.1 // indirect + github.com/zondax/ledger-go v0.14.1 // indirect + go.etcd.io/bbolt v1.3.7 // indirect + go.opencensus.io v0.24.0 // indirect + golang.org/x/crypto v0.8.0 // indirect + golang.org/x/exp v0.0.0-20230515195305-f3d0a9c9a5cc // indirect + golang.org/x/net v0.9.0 // indirect + golang.org/x/oauth2 v0.6.0 // indirect + golang.org/x/sys v0.7.0 // indirect + golang.org/x/term v0.7.0 // indirect + golang.org/x/text v0.9.0 // indirect + golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 // indirect + google.golang.org/api v0.110.0 // indirect + google.golang.org/appengine v1.6.7 // indirect + google.golang.org/protobuf v1.30.0 // indirect + gopkg.in/ini.v1 v1.67.0 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect + nhooyr.io/websocket v1.8.6 // indirect + pgregory.net/rapid v0.5.5 // indirect + sigs.k8s.io/yaml v1.3.0 // indirect +) + +replace github.com/syndtr/goleveldb => github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7 diff --git a/ignite/pkg/ctxticker/ctxticker.go b/ignite/pkg/ctxticker/ctxticker.go new file mode 100644 index 0000000..9e430ca --- /dev/null +++ b/ignite/pkg/ctxticker/ctxticker.go @@ -0,0 +1,32 @@ +package ctxticker + +import ( + "context" + "time" +) + +// Do calls fn every d until ctx canceled or fn returns with a non-nil error. +func Do(ctx context.Context, d time.Duration, fn func() error) error { + ticker := time.NewTicker(d) + defer ticker.Stop() + + for { + select { + case <-ctx.Done(): + return ctx.Err() + + case <-ticker.C: + if err := fn(); err != nil { + return err + } + } + } +} + +// DoNow is same as Do except it makes +1 call to fn on start. +func DoNow(ctx context.Context, d time.Duration, fn func() error) error { + if err := fn(); err != nil { + return err + } + return Do(ctx, d, fn) +} diff --git a/ignite/pkg/ctxticker/ctxticker_test.go b/ignite/pkg/ctxticker/ctxticker_test.go new file mode 100644 index 0000000..91e60d5 --- /dev/null +++ b/ignite/pkg/ctxticker/ctxticker_test.go @@ -0,0 +1,42 @@ +package ctxticker + +import ( + "context" + "testing" + "time" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +func TestDoNow(t *testing.T) { + ctx, cancel := context.WithCancel(context.Background()) + var callCount int + + require.Error(t, context.Canceled, DoNow(ctx, time.Millisecond, func() error { + if callCount == 3 { + cancel() + return nil + } + callCount++ + return nil + })) + + require.True(t, callCount >= 3) +} + +func TestDoNowError(t *testing.T) { + errDone := errors.New("done") + var callCount int + + require.Error(t, errDone, DoNow(context.Background(), time.Millisecond, func() error { + if callCount == 3 { + return errDone + } + callCount++ + return nil + })) + + require.True(t, callCount >= 3) +} diff --git a/ignite/pkg/debugger/server.go b/ignite/pkg/debugger/server.go new file mode 100644 index 0000000..e69ec8b --- /dev/null +++ b/ignite/pkg/debugger/server.go @@ -0,0 +1,184 @@ +package debugger + +import ( + "context" + "net" + + "github.com/go-delve/delve/pkg/logflags" + "github.com/go-delve/delve/pkg/terminal" + "github.com/go-delve/delve/service" + "github.com/go-delve/delve/service/debugger" + "github.com/go-delve/delve/service/rpc2" + "github.com/go-delve/delve/service/rpccommon" + "golang.org/x/sync/errgroup" + + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +const ( + // DefaultAddress defines the default debug server address. + DefaultAddress = "127.0.0.1:30500" + + // DefaultWorkingDir defines the default directory to use as + // working dir when running the app binary that will be debugged. + DefaultWorkingDir = "." +) + +// Option configures debugging. +type Option func(*debuggerOptions) + +type debuggerOptions struct { + disconnectChan chan struct{} + address, workingDir string + listener net.Listener + binaryArgs []string + clientRunHook, serverStartHook func() +} + +// Address sets the address for the debug server. +func Address(address string) Option { + return func(o *debuggerOptions) { + o.address = address + } +} + +// DisconnectChannel sets the channel used by the server to signal when the client disconnects. +func DisconnectChannel(c chan struct{}) Option { + return func(o *debuggerOptions) { + o.disconnectChan = c + } +} + +// Listener sets a custom listener to serve requests. +func Listener(l net.Listener) Option { + return func(o *debuggerOptions) { + o.listener = l + } +} + +// WorkingDir sets the working directory of the new process. +func WorkingDir(path string) Option { + return func(o *debuggerOptions) { + o.workingDir = path + } +} + +// BinaryArgs sets command line argument for the new process. +func BinaryArgs(args ...string) Option { + return func(o *debuggerOptions) { + o.binaryArgs = args + } +} + +// ClientRunHook sets a function to be executed right before debug client is run. +func ClientRunHook(fn func()) Option { + return func(o *debuggerOptions) { + o.clientRunHook = fn + } +} + +// ServerStartHook sets a function to be executed right before debug server starts. +func ServerStartHook(fn func()) Option { + return func(o *debuggerOptions) { + o.serverStartHook = fn + } +} + +// Start starts a debug server. +func Start(ctx context.Context, binaryPath string, options ...Option) (err error) { + o := applyDebuggerOptions(options...) + + listener := o.listener + if listener == nil { + var c net.ListenConfig + + listener, err = c.Listen(ctx, "tcp", o.address) + if err != nil { + return err + } + + defer listener.Close() + } + + if err = disableDelveLogging(); err != nil { + return err + } + + server := rpccommon.NewServer(&service.Config{ + Listener: listener, + AcceptMulti: false, + APIVersion: 2, + CheckLocalConnUser: true, + DisconnectChan: o.disconnectChan, + ProcessArgs: append([]string{binaryPath}, o.binaryArgs...), + Debugger: debugger.Config{ + WorkingDir: o.workingDir, + Backend: "default", + }, + }) + + if o.serverStartHook != nil { + o.serverStartHook() + } + + if err = server.Run(); err != nil { + return errors.Errorf("failed to run debug server: %w", err) + } + + defer server.Stop() //nolint:errcheck + + // Wait until the context is done or the connected client disconnects + select { + case <-ctx.Done(): + case <-o.disconnectChan: + } + + return nil +} + +// Run runs a debug client. +func Run(ctx context.Context, binaryPath string, options ...Option) error { + listener, conn := service.ListenerPipe() + defer listener.Close() + + o := applyDebuggerOptions(options...) + + options = append(options, Listener(listener)) + g, ctx := errgroup.WithContext(ctx) + + // Start the debugger server + g.Go(func() error { + return Start(ctx, binaryPath, options...) + }) + + // Start the debug client + g.Go(func() error { + client := rpc2.NewClientFromConn(conn) + term := terminal.New(client, nil) + + if o.clientRunHook != nil { + o.clientRunHook() + } + + _, err := term.Run() + return err + }) + + return g.Wait() +} + +func applyDebuggerOptions(options ...Option) debuggerOptions { + o := debuggerOptions{ + address: DefaultAddress, + workingDir: DefaultWorkingDir, + disconnectChan: make(chan struct{}), + } + for _, apply := range options { + apply(&o) + } + return o +} + +func disableDelveLogging() error { + return logflags.Setup(false, "", "") +} diff --git a/ignite/pkg/debugger/server_test.go b/ignite/pkg/debugger/server_test.go new file mode 100644 index 0000000..416a965 --- /dev/null +++ b/ignite/pkg/debugger/server_test.go @@ -0,0 +1,52 @@ +package debugger + +import ( + "net" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestApplyDebuggerOptionsDefaults(t *testing.T) { + o := applyDebuggerOptions() + require.Equal(t, DefaultAddress, o.address) + require.Equal(t, DefaultWorkingDir, o.workingDir) + require.NotNil(t, o.disconnectChan) +} + +func TestApplyDebuggerOptionsWithOverrides(t *testing.T) { + c := make(chan struct{}) + l, err := net.Listen("tcp", "127.0.0.1:0") + require.NoError(t, err) + t.Cleanup(func() { + require.NoError(t, l.Close()) + }) + + clientRan := false + serverStarted := false + + o := applyDebuggerOptions( + Address("127.0.0.1:9999"), + DisconnectChannel(c), + Listener(l), + WorkingDir("/tmp/work"), + BinaryArgs("a", "b"), + ClientRunHook(func() { clientRan = true }), + ServerStartHook(func() { serverStarted = true }), + ) + + require.Equal(t, "127.0.0.1:9999", o.address) + require.Equal(t, c, o.disconnectChan) + require.Equal(t, l, o.listener) + require.Equal(t, "/tmp/work", o.workingDir) + require.Equal(t, []string{"a", "b"}, o.binaryArgs) + + o.clientRunHook() + o.serverStartHook() + require.True(t, clientRan) + require.True(t, serverStarted) +} + +func TestDisableDelveLogging(t *testing.T) { + require.NoError(t, disableDelveLogging()) +} diff --git a/ignite/pkg/dircache/cache.go b/ignite/pkg/dircache/cache.go new file mode 100644 index 0000000..db7ca07 --- /dev/null +++ b/ignite/pkg/dircache/cache.go @@ -0,0 +1,111 @@ +package dircache + +import ( + "crypto/sha256" + "fmt" + "os" + "path/filepath" + + "github.com/otiai10/copy" + + "github.com/ignite/cli/v29/ignite/config" + "github.com/ignite/cli/v29/ignite/pkg/cache" + "github.com/ignite/cli/v29/ignite/pkg/dirchange" + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +var ErrCacheNotFound = errors.New("cache not found") + +type Cache struct { + path string + storageCache cache.Cache[string] +} + +// New creates a new Buf based on the installed binary. +func New(cacheStorage cache.Storage, dir, specNamespace string) (Cache, error) { + path, err := cachePath() + if err != nil { + return Cache{}, err + } + path = filepath.Join(path, dir) + if err := os.MkdirAll(path, 0o755); err != nil && !os.IsExist(err) { + return Cache{}, err + } + + return Cache{ + path: path, + storageCache: cache.New[string](cacheStorage, specNamespace), + }, nil +} + +// ClearCache remove the cache path. +func ClearCache() error { + path, err := cachePath() + if err != nil { + return err + } + return os.RemoveAll(path) +} + +// cachePath returns the cache path. +func cachePath() (string, error) { + globalPath, err := config.DirPath() + if err != nil { + return "", err + } + return filepath.Join(globalPath, "cache"), nil +} + +// cacheKey create the cache key. +func cacheKey(src string, keys ...string) (string, error) { + checksum, err := dirchange.ChecksumFromPaths(src, "") + if err != nil { + return "", err + } + + h := sha256.New() + if _, err := h.Write(checksum); err != nil { + return "", err + } + for _, key := range keys { + if _, err := h.Write([]byte(key)); err != nil { + return "", err + } + } + return fmt.Sprintf("%x", h.Sum(nil)), nil +} + +// CopyTo gets the cache folder based on the cache key from the storage and copies the folder to the output. +func (c Cache) CopyTo(src, output string, keys ...string) (string, error) { + key, err := cacheKey(src, keys...) + if err != nil { + return key, err + } + + cachedPath, err := c.storageCache.Get(key) + if errors.Is(err, cache.ErrorNotFound) { + return key, ErrCacheNotFound + } else if err != nil { + return key, err + } + + if err := copy.Copy(cachedPath, output); err != nil { + return "", errors.Wrapf(err, "get dir cache cannot copy path %s to %s", cachedPath, output) + } + return key, nil +} + +// Save copies the source to the cache folder and saves the path into the storage based on the key. +func (c Cache) Save(src, key string) error { + path := filepath.Join(c.path, key) + if err := os.Mkdir(path, 0o700); os.IsExist(err) { + return nil + } else if err != nil { + return err + } + + if err := copy.Copy(src, path); err != nil { + return errors.Wrapf(err, "save dir cache cannot copy path %s to %s", src, path) + } + return c.storageCache.Put(key, path) +} diff --git a/ignite/pkg/dircache/cache_test.go b/ignite/pkg/dircache/cache_test.go new file mode 100644 index 0000000..c3a046f --- /dev/null +++ b/ignite/pkg/dircache/cache_test.go @@ -0,0 +1,87 @@ +package dircache + +import ( + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +func Test_cacheKey(t *testing.T) { + wd, err := os.Getwd() + require.NoError(t, err) + wd = filepath.Join(wd, "testdata") + + type args struct { + src string + keys []string + } + tests := []struct { + name string + args args + want string + err error + }{ + { + name: "no keys", + args: args{ + src: wd, + }, + want: "4cf0539ac24f8ebc9ee17b81d0ea880e55d2ba98a4e355affe3c3f8a0cdb01ee", + }, + { + name: "one key", + args: args{ + src: wd, + keys: []string{"test"}, + }, + want: "dc7b4e68b7b9d827b3833845202818a11a1105542a3551052c012d815a64e7ae", + }, + { + name: "two keys", + args: args{ + src: wd, + keys: []string{"test1", "test2"}, + }, + want: "a017b975dd0a30efc7fbc515af9b3c37657c20a509fd5771111d4c0e43d373b0", + }, + { + name: "duplicated keys", + args: args{ + src: wd, + keys: []string{"test", "test"}, + }, + want: "26ce20a6c4563963fd646121948cd62137a143317c970a52a3ec8ed9979c868d", + }, + { + name: "many keys", + args: args{ + src: wd, + keys: []string{"test1", "test2", "test3", "test4", "test5", "test6", "test6"}, + }, + want: "f9cd1468363ff902bdd5a93c9c7c43c83c9074796486306a7da046a082314121", + }, + { + name: "invalid source", + args: args{ + src: "invalid_source", + }, + err: errors.New("no file in specified paths"), + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := cacheKey(tt.args.src, tt.args.keys...) + if tt.err != nil { + require.Error(t, err) + require.Equal(t, tt.err.Error(), err.Error()) + return + } + require.NoError(t, err) + require.Equal(t, tt.want, got) + }) + } +} diff --git a/ignite/pkg/dircache/testdata/subdata/subfile b/ignite/pkg/dircache/testdata/subdata/subfile new file mode 100644 index 0000000..9aae36d --- /dev/null +++ b/ignite/pkg/dircache/testdata/subdata/subfile @@ -0,0 +1 @@ +subtest \ No newline at end of file diff --git a/ignite/pkg/dircache/testdata/testfile b/ignite/pkg/dircache/testdata/testfile new file mode 100644 index 0000000..aa19560 --- /dev/null +++ b/ignite/pkg/dircache/testdata/testfile @@ -0,0 +1 @@ +test \ No newline at end of file diff --git a/ignite/pkg/dirchange/dirchange.go b/ignite/pkg/dirchange/dirchange.go new file mode 100644 index 0000000..2bec2a4 --- /dev/null +++ b/ignite/pkg/dirchange/dirchange.go @@ -0,0 +1,116 @@ +package dirchange + +import ( + "bytes" + "crypto/sha256" + "os" + "path/filepath" + + "github.com/ignite/cli/v29/ignite/pkg/cache" + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +var ErrNoFile = errors.New("no file in specified paths") + +// SaveDirChecksum saves the md5 checksum of the provided paths (directories or files) in the provided cache. +// If checksumSavePath directory doesn't exist, it is created. +// Paths are relative to workdir. If workdir is empty, string paths are absolute. +func SaveDirChecksum(checksumCache cache.Cache[[]byte], cacheKey string, workdir string, paths ...string) error { + checksum, err := ChecksumFromPaths(workdir, paths...) + if err != nil { + return err + } + + // save checksum + return checksumCache.Put(cacheKey, checksum) +} + +// HasDirChecksumChanged computes the md5 checksum of the provided paths (directories or files) +// and compares it with the current cached checksum. +// Return true if the checksum doesn't exist yet. +// paths are relative to workdir, if workdir is empty string paths are absolute. +func HasDirChecksumChanged(checksumCache cache.Cache[[]byte], cacheKey string, workdir string, paths ...string) (bool, error) { + savedChecksum, err := checksumCache.Get(cacheKey) + if errors.Is(err, cache.ErrorNotFound) { + return true, nil + } + if err != nil { + return false, err + } + + // Compute checksum + checksum, err := ChecksumFromPaths(workdir, paths...) + if errors.Is(err, ErrNoFile) { + // Checksum cannot be saved with no file + // Therefore if no file are found, this means these have been deleted, then the directory has been changed + return true, nil + } else if err != nil { + return false, err + } + + // Compare checksums + if bytes.Equal(checksum, savedChecksum) { + return false, nil + } + + // The checksum has changed + return true, nil +} + +// ChecksumFromPaths computes the md5 checksum from the provided paths. +// Relative paths to the workdir are used. If workdir is empty, string paths are absolute. +func ChecksumFromPaths(workdir string, paths ...string) ([]byte, error) { + hash := sha256.New() + + // Can't compute hash if no file present + noFile := true + + // read files + for _, path := range paths { + if !filepath.IsAbs(path) { + path = filepath.Join(workdir, path) + } + + // non-existent paths are ignored + if _, err := os.Stat(path); os.IsNotExist(err) { + continue + } else if err != nil { + return []byte{}, err + } + + err := filepath.Walk(path, func(subPath string, info os.FileInfo, err error) error { + if err != nil { + return err + } + + // ignore directory + if info.IsDir() { + return nil + } + + noFile = false + + // write file content + content, err := os.ReadFile(subPath) + if err != nil { + return err + } + _, err = hash.Write(content) + if err != nil { + return err + } + + return nil + }) + if err != nil { + return []byte{}, err + } + } + + if noFile { + return []byte{}, ErrNoFile + } + + // compute checksum + return hash.Sum(nil), nil +} diff --git a/ignite/pkg/dirchange/dirchange_test.go b/ignite/pkg/dirchange/dirchange_test.go new file mode 100644 index 0000000..d0f4ae8 --- /dev/null +++ b/ignite/pkg/dirchange/dirchange_test.go @@ -0,0 +1,161 @@ +package dirchange_test + +import ( + "crypto/rand" + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/cache" + "github.com/ignite/cli/v29/ignite/pkg/dirchange" +) + +const ( + TmpPattern = "starport-dirchange" + ChecksumKey = "checksum" +) + +func randomBytes(t *testing.T, n int) []byte { + t.Helper() + bytes := make([]byte, n) + _, err := rand.Read(bytes) + require.NoError(t, err) + + return bytes +} + +func TestHasDirChecksumChanged(t *testing.T) { + tempDir := os.TempDir() + cacheDir := os.TempDir() + + cacheStorage, err := cache.NewStorage(filepath.Join(cacheDir, "testcache.db")) + require.NoError(t, err) + c := cache.New[[]byte](cacheStorage, "testnamespace") + + // Create directory tree + dir1 := filepath.Join(tempDir, "foo1") + err = os.MkdirAll(dir1, 0o700) + require.NoError(t, err) + defer os.RemoveAll(dir1) + dir2 := filepath.Join(tempDir, "foo2") + err = os.MkdirAll(dir2, 0o700) + require.NoError(t, err) + defer os.RemoveAll(dir2) + dir3 := filepath.Join(tempDir, "foo3") + err = os.MkdirAll(dir3, 0o700) + require.NoError(t, err) + defer os.RemoveAll(dir3) + + dir11, err := os.MkdirTemp(dir1, TmpPattern) + require.NoError(t, err) + dir12, err := os.MkdirTemp(dir1, TmpPattern) + require.NoError(t, err) + dir21, err := os.MkdirTemp(dir2, TmpPattern) + require.NoError(t, err) + + // Create files + err = os.WriteFile(filepath.Join(dir1, "foo"), []byte("some bytes"), 0o644) + require.NoError(t, err) + err = os.WriteFile(filepath.Join(dir11, "foo"), randomBytes(t, 15), 0o644) + require.NoError(t, err) + err = os.WriteFile(filepath.Join(dir12, "foo"), randomBytes(t, 20), 0o644) + require.NoError(t, err) + err = os.WriteFile(filepath.Join(dir21, "foo"), randomBytes(t, 20), 0o644) + require.NoError(t, err) + err = os.WriteFile(filepath.Join(dir3, "foo1"), randomBytes(t, 10), 0o644) + require.NoError(t, err) + err = os.WriteFile(filepath.Join(dir3, "foo2"), randomBytes(t, 10), 0o644) + require.NoError(t, err) + + // Check checksum + paths := []string{dir1, dir2, dir3} + checksum, err := dirchange.ChecksumFromPaths("", paths...) + require.NoError(t, err) + // sha256 checksum is 32 bytes + require.Len(t, checksum, 32) + + // Checksum remains the same if a file is deleted and recreated with the same content + err = os.Remove(filepath.Join(dir1, "foo")) + require.NoError(t, err) + err = os.WriteFile(filepath.Join(dir1, "foo"), []byte("some bytes"), 0o644) + require.NoError(t, err) + tmpChecksum, err := dirchange.ChecksumFromPaths("", paths...) + require.NoError(t, err) + require.Equal(t, checksum, tmpChecksum) + + // Can compute the checksum from a specific workdir + pathNames := []string{"foo1", "foo2", "foo3"} + tmpChecksum, err = dirchange.ChecksumFromPaths(tempDir, pathNames...) + require.NoError(t, err) + require.Equal(t, checksum, tmpChecksum) + + // Ignore non existent dir + pathNames = append(pathNames, "nonexistent") + tmpChecksum, err = dirchange.ChecksumFromPaths(tempDir, pathNames...) + require.NoError(t, err) + require.Equal(t, checksum, tmpChecksum) + + // Checksum from a subdir is different + tmpChecksum, err = dirchange.ChecksumFromPaths("", dir1, dir2) + require.NoError(t, err) + require.NotEqual(t, checksum, tmpChecksum) + + // Checksum changes if a file is modified + err = os.WriteFile(filepath.Join(dir3, "foo1"), randomBytes(t, 10), 0o644) + require.NoError(t, err) + newChecksum, err := dirchange.ChecksumFromPaths("", paths...) + require.NoError(t, err) + require.NotEqual(t, checksum, newChecksum) + + // Error if no files in the specified dirs + empty1 := filepath.Join(tempDir, "empty1") + err = os.MkdirAll(empty1, 0o700) + require.NoError(t, err) + defer os.RemoveAll(empty1) + empty2 := filepath.Join(tempDir, "empty2") + err = os.MkdirAll(empty2, 0o700) + require.NoError(t, err) + defer os.RemoveAll(empty2) + _, err = dirchange.ChecksumFromPaths("", empty1, empty2) + require.Error(t, err) + + // SaveDirChecksum saves the checksum in the cache + saveDir, err := os.MkdirTemp(tempDir, TmpPattern) + require.NoError(t, err) + defer os.RemoveAll(saveDir) + err = dirchange.SaveDirChecksum(c, ChecksumKey, "", paths...) + require.NoError(t, err) + savedChecksum, err := c.Get(ChecksumKey) + require.NoError(t, err) + require.Equal(t, newChecksum, savedChecksum) + + // Error if the paths contains no file + err = dirchange.SaveDirChecksum(c, ChecksumKey, "", empty1, empty2) + require.Error(t, err) + + // HasDirChecksumChanged returns false if the directory has not changed + changed, err := dirchange.HasDirChecksumChanged(c, ChecksumKey, "", paths...) + require.NoError(t, err) + require.False(t, changed) + + // Return true if cache entry doesn't exist + err = c.Delete(ChecksumKey) + require.NoError(t, err) + changed, err = dirchange.HasDirChecksumChanged(c, ChecksumKey, "", paths...) + require.NoError(t, err) + require.True(t, changed) + + // Return true if the paths contains no file + changed, err = dirchange.HasDirChecksumChanged(c, ChecksumKey, "", empty1, empty2) + require.NoError(t, err) + require.True(t, changed) + + // Return true if it has been changed + err = os.WriteFile(filepath.Join(dir21, "bar"), randomBytes(t, 20), 0o644) + require.NoError(t, err) + changed, err = dirchange.HasDirChecksumChanged(c, ChecksumKey, "", paths...) + require.NoError(t, err) + require.True(t, changed) +} diff --git a/ignite/pkg/env/env.go b/ignite/pkg/env/env.go new file mode 100644 index 0000000..ca024f7 --- /dev/null +++ b/ignite/pkg/env/env.go @@ -0,0 +1,45 @@ +package env + +import ( + "fmt" + "os" + "path" + + "github.com/ignite/cli/v29/ignite/pkg/xfilepath" +) + +const ( + DebugEnvVar = "IGNT_DEBUG" + ConfigDirEnvVar = "IGNT_CONFIG_DIR" +) + +// SetDebug sets the debug environment variable to "1". +// This is used to enable debug mode in the application. +func SetDebug() { + _ = os.Setenv(DebugEnvVar, "1") +} + +// IsDebug checks if the debug environment variable is set to "1". +// This is used to determine if the application is running in debug mode. +func IsDebug() bool { + return os.Getenv(DebugEnvVar) == "1" +} + +func ConfigDir() xfilepath.PathRetriever { + return func() (string, error) { + if dir := os.Getenv(ConfigDirEnvVar); dir != "" { + if !path.IsAbs(dir) { + panic(fmt.Sprintf("%s must be an absolute path", ConfigDirEnvVar)) + } + return dir, nil + } + return xfilepath.JoinFromHome(xfilepath.Path(".ignite"))() + } +} + +func SetConfigDir(dir string) { + err := os.Setenv(ConfigDirEnvVar, dir) + if err != nil { + panic(fmt.Sprintf("set config dir env: %v", err)) + } +} diff --git a/ignite/pkg/env/env_test.go b/ignite/pkg/env/env_test.go new file mode 100644 index 0000000..4ee7aee --- /dev/null +++ b/ignite/pkg/env/env_test.go @@ -0,0 +1,39 @@ +package env + +import ( + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestSetDebugAndIsDebug(t *testing.T) { + t.Setenv(DebugEnvVar, "") + require.False(t, IsDebug()) + + SetDebug() + require.True(t, IsDebug()) +} + +func TestConfigDirFromEnv(t *testing.T) { + dir := filepath.Join(t.TempDir(), "ignite-config") + t.Setenv(ConfigDirEnvVar, dir) + + got, err := ConfigDir()() + require.NoError(t, err) + require.Equal(t, dir, got) +} + +func TestConfigDirPanicsWithRelativePath(t *testing.T) { + t.Setenv(ConfigDirEnvVar, "relative/path") + require.Panics(t, func() { + _, _ = ConfigDir()() + }) +} + +func TestSetConfigDir(t *testing.T) { + dir := filepath.Join(t.TempDir(), "cfg") + SetConfigDir(dir) + require.Equal(t, dir, os.Getenv(ConfigDirEnvVar)) +} diff --git a/ignite/pkg/errors/errors.go b/ignite/pkg/errors/errors.go new file mode 100644 index 0000000..a67895f --- /dev/null +++ b/ignite/pkg/errors/errors.go @@ -0,0 +1,7 @@ +package errors + +// ValidationError must be implemented by errors that provide validation info. +type ValidationError interface { + error + ValidationInfo() string +} diff --git a/ignite/pkg/errors/xerrors.go b/ignite/pkg/errors/xerrors.go new file mode 100644 index 0000000..16a1c17 --- /dev/null +++ b/ignite/pkg/errors/xerrors.go @@ -0,0 +1,88 @@ +// Package errors provides helpers for error creation, avoiding +// using different packages for errors. +// +// e.g.: +// +// import "github.com/ignite/cli/v29/ignite/pkg/errors" +// +// func main() { +// err1 := errors.New("error new") +// err2 := errors.Errorf("%s: error", foo) +// err3 := errors.Wrap(errFoo, errBar) +// } +package errors + +import ( + "context" + + "github.com/cockroachdb/errors" + "github.com/getsentry/sentry-go" +) + +// New creates an error with a simple error message. +// A stack trace is retained. +func New(msg string) error { + err := errors.New(msg) + captureException(err) + return err +} + +// Errorf aliases Newf(). +func Errorf(format string, args ...any) error { + err := errors.Errorf(format, args...) + captureException(err) + return err +} + +// WithStack annotates err with a stack trace at the point WithStack was called. +func WithStack(err error) error { + errWithStack := errors.WithStack(err) + captureException(errWithStack) + return errWithStack +} + +// Wrap wraps an error with a message prefix. A stack trace is retained. +func Wrap(err error, msg string) error { + errWrap := errors.Wrap(err, msg) + captureException(errWrap) + return errWrap +} + +// Wrapf wraps an error with a formatted message prefix. A stack +// trace is also retained. If the format is empty, no prefix is added, +// but the extra arguments are still processed for reportable strings. +func Wrapf(err error, format string, args ...any) error { + errWrap := errors.Wrapf(err, format, args...) + captureException(errWrap) + return errWrap +} + +func captureException(err error) { + if shouldCaptureException(err) { + sentry.CaptureException(err) + } +} + +func shouldCaptureException(err error) bool { + return err != nil && !Is(err, context.Canceled) +} + +// Unwrap accesses the direct cause of the error if any, otherwise +// returns nil. +func Unwrap(err error) error { return errors.Unwrap(err) } + +// Join returns an error that wraps the given errors. +// Any nil error values are discarded. +// Join returns nil if errs contains no non-nil values. +func Join(errs ...error) error { return errors.Join(errs...) } + +// Is determines whether one of the causes of the given error or any +// of its causes is equivalent to some reference error. +func Is(err, reference error) bool { return errors.Is(err, reference) } + +// As finds the first error in err's chain that matches the type to which target +// points, and if so, sets the target to its value and returns true. An error +// matches a type if it is assignable to the target type, or if it has a method +// As(interface{}) bool such that As(target) returns true. As will panic if target +// is not a non-nil pointer to a type which implements error or is of interface type. +func As(err error, target any) bool { return errors.As(err, target) } diff --git a/ignite/pkg/errors/xerrors_test.go b/ignite/pkg/errors/xerrors_test.go new file mode 100644 index 0000000..6d456de --- /dev/null +++ b/ignite/pkg/errors/xerrors_test.go @@ -0,0 +1,71 @@ +package errors + +import ( + "context" + stdErrors "errors" + "testing" + + "github.com/stretchr/testify/require" +) + +type customErr struct { + msg string +} + +func (e customErr) Error() string { return e.msg } + +func TestBasicHelpers(t *testing.T) { + err := New("boom") + require.EqualError(t, err, "boom") + + err = Errorf("value: %d", 10) + require.EqualError(t, err, "value: 10") +} + +func TestWrapHelpers(t *testing.T) { + base := stdErrors.New("base") + + require.Nil(t, Wrap(nil, "prefix")) + require.Nil(t, Wrapf(nil, "prefix %s", "x")) + + wrapped := Wrap(base, "prefix") + require.Error(t, wrapped) + require.True(t, Is(wrapped, base)) + + wrapped = Wrapf(base, "prefix %s", "x") + require.Error(t, wrapped) + require.True(t, Is(wrapped, base)) +} + +func TestJoinUnwrapAs(t *testing.T) { + e1 := customErr{msg: "one"} + e2 := stdErrors.New("two") + j := Join(e1, e2) + require.Error(t, j) + require.True(t, Is(j, e2)) + + var target customErr + require.True(t, As(j, &target)) + require.Equal(t, "one", target.msg) + + wrapped := Wrap(e2, "prefix") + require.True(t, Is(Unwrap(wrapped), e2)) +} + +func TestWithStack(t *testing.T) { + base := stdErrors.New("base") + require.True(t, Is(WithStack(base), base)) +} + +func TestShouldCaptureException(t *testing.T) { + t.Run("canceled errors are ignored", func(t *testing.T) { + require.False(t, shouldCaptureException(context.Canceled)) + require.False(t, shouldCaptureException(Wrap(context.Canceled, "prefix"))) + require.False(t, shouldCaptureException(Wrapf(context.Canceled, "prefix %s", "x"))) + require.False(t, shouldCaptureException(WithStack(context.Canceled))) + }) + + t.Run("other errors are reported", func(t *testing.T) { + require.True(t, shouldCaptureException(stdErrors.New("boom"))) + }) +} diff --git a/ignite/pkg/events/bus.go b/ignite/pkg/events/bus.go new file mode 100644 index 0000000..742592a --- /dev/null +++ b/ignite/pkg/events/bus.go @@ -0,0 +1,97 @@ +package events + +import ( + "fmt" + + "github.com/ignite/cli/v29/ignite/pkg/cliui/colors" +) + +// DefaultBufferSize defines the default maximum number +// of events that the bus can cache before they are handled. +const DefaultBufferSize = 50 + +// Provider defines an interface for event providers. +type Provider interface { + // Events returns a read only channel to read the events. + Events() <-chan Event +} + +type ( + // Bus defines a bus to send and receive events. + Bus struct { + evChan chan Event + stopped bool + } + + // BusOption configures the Bus. + BusOption func(*Bus) +) + +// WithBufferSize assigns the size of the buffer to use for buffering events. +func WithBufferSize(size int) BusOption { + return func(bus *Bus) { + bus.evChan = make(chan Event, size) + } +} + +// NewBus creates a new event bus. +func NewBus(options ...BusOption) Bus { + bus := Bus{ + evChan: make(chan Event, DefaultBufferSize), + } + + for _, apply := range options { + apply(&bus) + } + + return bus +} + +// Send sends a new event to bus. +// This method will block if the event bus buffer is full. +func (b Bus) Send(message string, options ...Option) { + if b.evChan == nil || b.stopped { + return + } + + b.evChan <- New(message, options...) +} + +// Sendf sends a new event with a formatted message to bus. +func (b Bus) Sendf(format string, a ...any) { + b.Send(fmt.Sprintf(format, a...)) +} + +// SendInfo sends an info event to the bus. +func (b Bus) SendInfo(message string, options ...Option) { + b.Send(colors.Info(message), options...) +} + +// SendError sends an error event to the bus. +func (b Bus) SendError(err error, options ...Option) { + b.Send(colors.Error(err.Error()), options...) +} + +// SendView sends a new event for a view to the bus. +// Views are types that implement the `fmt.Stringer` interface +// which allow events with complex message formats. +func (b Bus) SendView(s fmt.Stringer, options ...Option) { + b.Send(s.String(), options...) +} + +// Events returns a read only channel to read the events. +func (b Bus) Events() <-chan Event { + return b.evChan +} + +// Stop stops the event bus. +// All new events are ignored once the event bus is stopped. +func (b *Bus) Stop() { + if b.evChan == nil { + return + } + + b.stopped = true + + close(b.evChan) +} diff --git a/ignite/pkg/events/bus_test.go b/ignite/pkg/events/bus_test.go new file mode 100644 index 0000000..db5cf80 --- /dev/null +++ b/ignite/pkg/events/bus_test.go @@ -0,0 +1,211 @@ +package events_test + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/cliui/colors" + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/events" +) + +func TestBusSend(t *testing.T) { + cases := []struct { + name, message string + options []events.Option + progress events.ProgressIndication + }{ + { + name: "without options", + message: "test", + progress: events.IndicationNone, + }, + { + name: "with options", + message: "test", + options: []events.Option{events.ProgressStart()}, + progress: events.IndicationStart, + }, + } + for _, tt := range cases { + t.Run(tt.name, func(t *testing.T) { + // Arrange + bus := events.NewBus() + defer bus.Stop() + + // Act + bus.Send(tt.message, tt.options...) + + // Assert + select { + case e := <-bus.Events(): + require.Equal(t, tt.message, e.Message) + require.Equal(t, tt.progress, e.ProgressIndication) + default: + t.Error("expected an event to be received") + } + }) + } +} + +func TestBusSendf(t *testing.T) { + // Arrange + bus := events.NewBus() + defer bus.Stop() + + want := "foo 42" + + // Act + bus.Sendf("%s %d", "foo", 42) + + // Assert + select { + case e := <-bus.Events(): + require.Equal(t, want, e.Message) + default: + t.Error("expected an event to be received") + } +} + +func TestBusSendInfo(t *testing.T) { + cases := []struct { + name, message string + options []events.Option + progress events.ProgressIndication + }{ + { + name: "without options", + message: "test", + progress: events.IndicationNone, + }, + { + name: "with options", + message: "test", + options: []events.Option{events.ProgressStart()}, + progress: events.IndicationStart, + }, + } + for _, tt := range cases { + t.Run(tt.name, func(t *testing.T) { + // Arrange + bus := events.NewBus() + defer bus.Stop() + + // Act + bus.SendInfo(tt.message, tt.options...) + + // Assert + select { + case e := <-bus.Events(): + require.Equal(t, colors.Info(tt.message), e.Message) + require.Equal(t, tt.progress, e.ProgressIndication) + default: + t.Error("expected an event to be received") + } + }) + } +} + +func TestBusSendError(t *testing.T) { + cases := []struct { + name, message string + options []events.Option + progress events.ProgressIndication + }{ + { + name: "without options", + message: "test", + progress: events.IndicationNone, + }, + { + name: "with options", + message: "test", + options: []events.Option{events.ProgressStart()}, + progress: events.IndicationStart, + }, + } + for _, tt := range cases { + t.Run(tt.name, func(t *testing.T) { + // Arrange + bus := events.NewBus() + defer bus.Stop() + + err := errors.New(tt.message) + + // Act + bus.SendError(err, tt.options...) + + // Assert + select { + case e := <-bus.Events(): + require.Equal(t, colors.Error(tt.message), e.Message) + require.Equal(t, tt.progress, e.ProgressIndication) + default: + t.Error("expected an event to be received") + } + }) + } +} + +type testEventView struct { + message string +} + +func (v testEventView) String() string { + return v.message +} + +func TestBusSendView(t *testing.T) { + cases := []struct { + name, message string + options []events.Option + progress events.ProgressIndication + }{ + { + name: "without options", + message: "test", + progress: events.IndicationNone, + }, + { + name: "with options", + message: "test", + options: []events.Option{events.ProgressStart()}, + progress: events.IndicationStart, + }, + } + for _, tt := range cases { + t.Run(tt.name, func(t *testing.T) { + // Arrange + bus := events.NewBus() + defer bus.Stop() + + view := testEventView{tt.message} + + // Act + bus.SendView(view, tt.options...) + + // Assert + select { + case e := <-bus.Events(): + require.Equal(t, tt.message, e.Message) + require.Equal(t, tt.progress, e.ProgressIndication) + default: + t.Error("expected an event to be received") + } + }) + } +} + +func TestBusStop(t *testing.T) { + // Arrange + bus := events.NewBus() + + // Act + bus.Stop() + bus.Send("ignored message") + _, ok := <-bus.Events() + + // Assert + require.False(t, ok, "expected no events after bus stopped") +} diff --git a/ignite/pkg/events/events.go b/ignite/pkg/events/events.go new file mode 100644 index 0000000..6ffc374 --- /dev/null +++ b/ignite/pkg/events/events.go @@ -0,0 +1,116 @@ +// Package events provides functionalities for packages to log their states as events +// for others to consume and display to end users in meaningful ways. +package events + +import ( + "fmt" + + "github.com/muesli/reflow/indent" +) + +// ProgressIndication enumerates possible states of progress indication for an Event. +type ProgressIndication uint8 + +const ( + GroupError = "error" +) + +const ( + IndicationNone ProgressIndication = iota + IndicationStart + IndicationUpdate + IndicationFinish +) + +type ( + // Event represents a state. + Event struct { + ProgressIndication ProgressIndication + Icon string + Indent uint + Message string + Verbose bool + Group string + } + + // Option event options. + Option func(*Event) +) + +// ProgressStart indicates that a status event starts the progress indicator. +func ProgressStart() Option { + return func(e *Event) { + e.ProgressIndication = IndicationStart + } +} + +// ProgressUpdate indicates that a status event updated the current progress. +func ProgressUpdate() Option { + return func(e *Event) { + e.ProgressIndication = IndicationUpdate + } +} + +// ProgressFinish indicates that a status event finished the ongoing task. +func ProgressFinish() Option { + return func(e *Event) { + e.ProgressIndication = IndicationFinish + } +} + +// Verbose sets high verbosity for the Event. +func Verbose() Option { + return func(e *Event) { + e.Verbose = true + } +} + +// Icon sets the text icon prefix. +func Icon(icon string) Option { + return func(e *Event) { + e.Icon = icon + } +} + +// Indent sets the text indentation. +func Indent(indent uint) Option { + return func(e *Event) { + e.Indent = indent + } +} + +// Group sets a group name for the event. +func Group(name string) Option { + return func(e *Event) { + e.Group = name + } +} + +// New creates a new event with given config. +func New(message string, options ...Option) Event { + ev := Event{Message: message} + + for _, applyOption := range options { + applyOption(&ev) + } + + return ev +} + +func (e Event) String() string { + s := e.Message + if e.Icon != "" { + s = fmt.Sprintf("%s %s", e.Icon, s) + } + + if e.Indent > 0 { + s = indent.String(s, e.Indent) + } + + return s +} + +// InProgress returns true when the event is in progress. +func (e Event) InProgress() bool { + return e.ProgressIndication == IndicationStart || e.ProgressIndication == IndicationUpdate +} diff --git a/ignite/pkg/events/events_test.go b/ignite/pkg/events/events_test.go new file mode 100644 index 0000000..b4da312 --- /dev/null +++ b/ignite/pkg/events/events_test.go @@ -0,0 +1,54 @@ +package events_test + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/events" +) + +func TestNew(t *testing.T) { + msg := "message" + cases := []struct { + name, message string + inProgress, hasIcon bool + options []events.Option + event events.Event + }{ + { + name: "event", + event: events.Event{}, + }, + { + name: "event start", + message: msg, + inProgress: true, + options: []events.Option{events.ProgressStart()}, + event: events.New(msg, events.ProgressStart()), + }, + { + name: "event update", + message: msg, + inProgress: true, + options: []events.Option{events.ProgressUpdate()}, + event: events.New(msg, events.ProgressUpdate()), + }, + { + name: "event finish", + message: msg, + options: []events.Option{events.ProgressFinish()}, + event: events.New(msg, events.ProgressFinish()), + }, + } + for _, tt := range cases { + t.Run(tt.name, func(t *testing.T) { + // Act + e := events.New(tt.message, tt.options...) + + // Assert + require.Equal(t, tt.event, e) + require.Equal(t, tt.inProgress, e.InProgress()) + }) + } +} diff --git a/ignite/pkg/goanalysis/goanalysis.go b/ignite/pkg/goanalysis/goanalysis.go new file mode 100644 index 0000000..4728675 --- /dev/null +++ b/ignite/pkg/goanalysis/goanalysis.go @@ -0,0 +1,359 @@ +// Package goanalysis provides a toolset for statically analysing Go applications +package goanalysis + +import ( + "go/ast" + "go/format" + "go/parser" + "go/token" + "io" + "os" + "path/filepath" + "slices" + "strings" + + "golang.org/x/mod/modfile" + + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +const ( + mainPackage = "main" + goFileExtension = ".go" + toolsBuildTag = "//go:build tools\n\n" +) + +// ErrMultipleMainPackagesFound is returned when multiple main packages found while expecting only one. +var ErrMultipleMainPackagesFound = errors.New("multiple main packages found") + +// DiscoverMain finds main Go packages under path. +func DiscoverMain(path string) (pkgPaths []string, err error) { + uniquePaths := make(map[string]struct{}) + + err = filepath.Walk(path, func(filePath string, f os.FileInfo, err error) error { + if f.IsDir() || !strings.HasSuffix(filePath, goFileExtension) { + return err + } + + parsed, err := parser.ParseFile(token.NewFileSet(), filePath, nil, parser.PackageClauseOnly) + if err != nil { + return err + } + + if mainPackage == parsed.Name.Name { + dir := filepath.Dir(filePath) + uniquePaths[dir] = struct{}{} + } + + return nil + }) + if err != nil { + return nil, err + } + + for path := range uniquePaths { + pkgPaths = append(pkgPaths, path) + } + + return pkgPaths, nil +} + +// DiscoverOneMain tries to find only one main Go package under path. +func DiscoverOneMain(path string) (pkgPath string, err error) { + pkgPaths, err := DiscoverMain(path) + if err != nil { + return "", err + } + + count := len(pkgPaths) + if count == 0 { + return "", errors.New("main package cannot be found") + } + if count > 1 { + return "", ErrMultipleMainPackagesFound + } + + return pkgPaths[0], nil +} + +// FuncVarExists finds a genesis variable goImport into the go file. +func FuncVarExists(f *ast.File, goImport, methodSignature string) bool { + var ( + importAlias = "" + imports = FormatImports(f) + ) + for alias, imp := range imports { + if imp == goImport { + importAlias = alias + } + } + if importAlias == "" { + return false + } + methodDecl := importAlias + "." + methodSignature + + for _, d := range f.Decls { + if declVarExists(d, methodDecl) { + return true + } + } + return false +} + +// declVarExists find a variable declaration into a ast.Decl. +func declVarExists(decl ast.Decl, methodDecl string) bool { + switch d := decl.(type) { + case *ast.FuncDecl: + for _, stmt := range d.Body.List { + switch v := stmt.(type) { + case *ast.DeclStmt: + if declVarExists(v.Decl, methodDecl) { + return true + } + case *ast.AssignStmt: + if len(v.Rhs) == 0 { + continue + } + decl, err := getCallExprName(v.Rhs[0]) + if err != nil { + continue + } + if decl == methodDecl { + return true + } + case *ast.IfStmt: + stmt, ok := v.Init.(*ast.AssignStmt) + if !ok || len(stmt.Rhs) == 0 { + continue + } + decl, err := getCallExprName(stmt.Rhs[0]) + if err != nil { + continue + } + if decl == methodDecl { + return true + } + } + } + case *ast.GenDecl: + decls, err := getGenDeclNames(d) + if err != nil { + return false + } + if slices.Contains(decls, methodDecl) { + return true + } + } + return false +} + +// getGenDeclNames returns a list of the method declaration inside the ast.GenDecl. +func getGenDeclNames(genDecl *ast.GenDecl) ([]string, error) { + if genDecl.Tok != token.VAR { + return nil, errors.Errorf("genDecl is not a var token: %v", genDecl.Tok) + } + var decls []string + for _, spec := range genDecl.Specs { + valueDecl, ok := spec.(*ast.ValueSpec) + if !ok { + continue + } + for _, id := range valueDecl.Names { + vSpec, ok := id.Obj.Decl.(*ast.ValueSpec) + if !ok || len(vSpec.Values) == 0 { + continue + } + + cursorDecl, err := getCallExprName(vSpec.Values[0]) + if err != nil { + continue + } + decls = append(decls, cursorDecl) + } + } + if len(decls) == 0 { + return nil, errors.Errorf("empty method declarations") + } + return decls, nil +} + +// getCallExprName returns the method declaration inside the ast.Expr. +func getCallExprName(expr ast.Expr) (string, error) { + call, ok := expr.(*ast.CallExpr) + if !ok { + return "", errors.Errorf("expression is not a *ast.CallExpr: %v", expr) + } + sel, ok := call.Fun.(*ast.SelectorExpr) + if !ok { + return "", errors.Errorf("expression function is not a *ast.SelectorExpr: %v", call.Fun) + } + + x, ok := sel.X.(*ast.Ident) + if !ok { + return "", errors.Errorf("selector expression function is not a *ast.Ident: %v", sel.X) + } + + return x.String() + "." + sel.Sel.String(), nil +} + +// FindBlankImports find all blank imports ('_') into a file. +func FindBlankImports(node *ast.File) []string { + // Iterate through the import declarations and find the blank imports. + blankImports := make([]string, 0) + for _, imp := range node.Imports { + if imp.Name != nil && imp.Name.Name == "_" { + blankImports = append(blankImports, strings.ReplaceAll(imp.Path.Value, `"`, "")) + } + } + return blankImports +} + +// FormatImports translate f.Imports into a map where name -> package. +// Name is the alias if declared, or the last element of the package path. +func FormatImports(f *ast.File) map[string]string { + m := make(map[string]string) // name -> import + for _, imp := range f.Imports { + var importName string + if imp.Name != nil && imp.Name.Name != "_" && imp.Name.Name != "." { + importName = imp.Name.Name + } else { + importParts := strings.Split(imp.Path.Value, "/") + importName = importParts[len(importParts)-1] + } + + name := strings.Trim(importName, "\"") + m[name] = strings.Trim(imp.Path.Value, "\"") + } + return m +} + +// AddOrRemoveTools helper function to add or remove tools from the go.mod file. +func AddOrRemoveTools(f *modfile.File, writer io.Writer, importsToAdd, importsToRemove []string) error { + for _, imp := range importsToAdd { + _ = f.AddTool(imp) + } + + for _, imp := range importsToRemove { + _ = f.DropTool(imp) + } + + data, err := f.Format() + if err != nil { + return errors.Errorf("failed to format go.mod file: %w", err) + } + + _, err = writer.Write(data) + return err +} + +// ReplaceCode replace a function implementation into a package path. The method will find +// the method signature and re-write the method implementation based in the new function. +func ReplaceCode(pkgPath, oldFunctionName, newFunction string) (err error) { + absPath, err := filepath.Abs(pkgPath) + if err != nil { + return err + } + + fileSet := token.NewFileSet() + all, err := parser.ParseDir(fileSet, absPath, func(os.FileInfo) bool { return true }, parser.ParseComments) + if err != nil { + return err + } + + for _, pkg := range all { + for _, f := range pkg.Files { + found := false + ast.Inspect(f, func(n ast.Node) bool { + if funcDecl, ok := n.(*ast.FuncDecl); ok { + // Check if the function has the name you want to replace. + if funcDecl.Name.Name == oldFunctionName { + // Replace the function body with the replacement code. + replacementExpr, err := parser.ParseExpr(newFunction) + if err != nil { + return false + } + funcDecl.Body = &ast.BlockStmt{List: []ast.Stmt{ + &ast.ExprStmt{X: replacementExpr}, + }} + found = true + return false + } + } + return true + }) + if !found { + continue + } + filePath := fileSet.Position(f.Package).Filename + outFile, err := os.Create(filePath) + if err != nil { + return err + } + + // Format and write the modified AST to the output file. + if err := format.Node(outFile, fileSet, f); err != nil { + return err + } + if err := outFile.Close(); err != nil { + return err + } + } + } + return nil +} + +// HasAnyStructFieldsInPkg finds the struct within a package folder and checks +// if any of the fields are defined in the struct. +func HasAnyStructFieldsInPkg(pkgPath, structName string, fields []string) (bool, error) { + absPath, err := filepath.Abs(pkgPath) + if err != nil { + return false, err + } + fileSet := token.NewFileSet() + all, err := parser.ParseDir(fileSet, absPath, nil, parser.ParseComments) + if err != nil { + return false, err + } + + fieldsNames := make(map[string]struct{}) + for _, field := range fields { + fieldsNames[strings.ToLower(field)] = struct{}{} + } + + exist := false + for _, pkg := range all { + for _, f := range pkg.Files { + ast.Inspect(f, func(x ast.Node) bool { + typeSpec, ok := x.(*ast.TypeSpec) + if !ok { + return true + } + + if _, ok := typeSpec.Type.(*ast.StructType); !ok || + typeSpec.Name.Name != structName || + typeSpec.Type == nil { + return true + } + + // Check if the struct has fields. + structType, ok := typeSpec.Type.(*ast.StructType) + if !ok { + return true + } + + // Iterate through the fields of the struct. + for _, field := range structType.Fields.List { + for _, fieldName := range field.Names { + if _, ok := fieldsNames[strings.ToLower(fieldName.Name)]; !ok { + continue + } + exist = true + return false + } + } + return true + }) + } + } + return exist, nil +} diff --git a/ignite/pkg/goanalysis/goanalysis_test.go b/ignite/pkg/goanalysis/goanalysis_test.go new file mode 100644 index 0000000..a08c637 --- /dev/null +++ b/ignite/pkg/goanalysis/goanalysis_test.go @@ -0,0 +1,515 @@ +package goanalysis_test + +import ( + "go/ast" + "go/token" + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/goanalysis" + "github.com/ignite/cli/v29/ignite/pkg/xast" +) + +var MainFile = []byte(`package main`) + +func TestDiscoverMain(t *testing.T) { + tests := []struct { + name string + mainFiles []string + expectFind bool + }{ + { + name: "single main", + mainFiles: []string{"main.go"}, + expectFind: true, + }, + { + name: "no mains", + mainFiles: []string{}, + expectFind: false, + }, + { + name: "single main in sub-folder", + mainFiles: []string{"sub/main.go"}, + expectFind: true, + }, + { + name: "single main with different name", + mainFiles: []string{"sub/somethingelse.go"}, + expectFind: true, + }, + { + name: "multiple mains", + mainFiles: []string{ + "main.go", + "sub/main.go", + "diffSub/alsomain.go", + }, + expectFind: true, + }, + { + name: "single main with wrong extension", + mainFiles: []string{"main.ogg"}, + expectFind: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + tmpDir := t.TempDir() + want, err := createMainFiles(tmpDir, tt.mainFiles) + require.NoError(t, err) + + actual, err := goanalysis.DiscoverMain(tmpDir) + require.NoError(t, err) + if !tt.expectFind { + want = []string{} + } + require.ElementsMatch(t, actual, want) + }) + } +} + +func TestDiscoverOneMain(t *testing.T) { + tests := []struct { + name string + mainFiles []string + err error + }{ + { + name: "single main", + mainFiles: []string{"main.go"}, + err: nil, + }, + { + name: "multiple mains", + mainFiles: []string{ + "main.go", + "sub/main.go", + }, + err: goanalysis.ErrMultipleMainPackagesFound, + }, + { + name: "no mains", + mainFiles: []string{}, + err: errors.New("main package cannot be found"), + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + tmpDir := t.TempDir() + want, err := createMainFiles(tmpDir, tt.mainFiles) + require.NoError(t, err) + + actual, err := goanalysis.DiscoverOneMain(tmpDir) + if tt.err != nil { + require.Error(t, err) + require.True(t, errors.Is(tt.err, err)) + return + } + require.NoError(t, err) + require.Equal(t, 1, len(want)) + require.Equal(t, want[0], actual) + }) + } +} + +func createMainFiles(tmpDir string, mainFiles []string) (pathsWithMain []string, err error) { + for _, mf := range mainFiles { + mainFile := filepath.Join(tmpDir, mf) + dir := filepath.Dir(mainFile) + + if err = os.MkdirAll(dir, 0o770); err != nil { + return nil, err + } + + if err = os.WriteFile(mainFile, MainFile, 0o644); err != nil { + return nil, err + } + + pathsWithMain = append(pathsWithMain, dir) + } + + return pathsWithMain, nil +} + +func TestFuncVarExists(t *testing.T) { + tests := []struct { + name string + testFile string + goImport string + methodSignature string + want bool + }{ + { + name: "test a declaration inside a method success", + testFile: "testdata/varexist", + methodSignature: "Background", + goImport: "context", + want: true, + }, + { + name: "test global declaration success", + testFile: "testdata/varexist", + methodSignature: "Join", + goImport: "path/filepath", + want: true, + }, + { + name: "test a declaration inside an if and inside a method success", + testFile: "testdata/varexist", + methodSignature: "SplitList", + goImport: "path/filepath", + want: true, + }, + { + name: "test global variable success assign", + testFile: "testdata/varexist", + methodSignature: "New", + goImport: "errors", + want: true, + }, + { + name: "test invalid import", + testFile: "testdata/varexist", + methodSignature: "Join", + goImport: "errors", + want: false, + }, + { + name: "test invalid case sensitive assign", + testFile: "testdata/varexist", + methodSignature: "join", + goImport: "context", + want: false, + }, + { + name: "test invalid struct assign", + testFile: "testdata/varexist", + methodSignature: "fooStruct", + goImport: "context", + want: false, + }, + { + name: "test invalid method signature", + testFile: "testdata/varexist", + methodSignature: "fooMethod", + goImport: "context", + want: false, + }, + { + name: "test not found name", + testFile: "testdata/varexist", + methodSignature: "Invalid", + goImport: "context", + want: false, + }, + { + name: "test invalid assign with wrong", + testFile: "testdata/varexist", + methodSignature: "invalid.New", + goImport: "context", + want: false, + }, + { + name: "test invalid assign with wrong", + testFile: "testdata/varexist", + methodSignature: "SplitList", + goImport: "path/filepath", + want: true, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + appPkg, _, err := xast.ParseFile(tt.testFile) + require.NoError(t, err) + + got := goanalysis.FuncVarExists(appPkg, tt.goImport, tt.methodSignature) + require.Equal(t, tt.want, got) + }) + } +} + +func TestFindBlankImports(t *testing.T) { + tests := []struct { + name string + testfile string + want []string + }{ + { + name: "test a declaration inside a method success", + testfile: "testdata/varexist", + want: []string{"embed", "mvdan.cc/gofumpt"}, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + appPkg, _, err := xast.ParseFile(tt.testfile) + require.NoError(t, err) + + got := goanalysis.FindBlankImports(appPkg) + require.Equal(t, tt.want, got) + }) + } +} + +func TestFormatImports(t *testing.T) { + tests := []struct { + name string + input *ast.File + want map[string]string + }{ + { + name: "Test one import", + input: &ast.File{ + Imports: []*ast.ImportSpec{ + { + Path: &ast.BasicLit{ + Kind: token.STRING, + Value: "\"fmt\"", + }, + }, + }, + }, + want: map[string]string{ + "fmt": "fmt", + }, + }, + { + name: "Test underscore import", + input: &ast.File{ + Imports: []*ast.ImportSpec{ + { + Path: &ast.BasicLit{ + Kind: token.STRING, + Value: "\"net/http\"", + }, + }, + { + Name: &ast.Ident{ + Name: "_", + }, + Path: &ast.BasicLit{ + Kind: token.STRING, + Value: "\"github.com/example/pkg\"", + }, + }, + }, + }, + want: map[string]string{ + "http": "net/http", + "pkg": "github.com/example/pkg", + }, + }, + { + name: "Test dot import", + input: &ast.File{ + Imports: []*ast.ImportSpec{ + { + Path: &ast.BasicLit{ + Kind: token.STRING, + Value: "\"net/http\"", + }, + }, + { + Name: &ast.Ident{ + Name: ".", + }, + Path: &ast.BasicLit{ + Kind: token.STRING, + Value: "\"github.com/example/pkg\"", + }, + }, + { + Path: &ast.BasicLit{ + Kind: token.STRING, + Value: "\"fmt\"", + }, + }, + }, + }, + want: map[string]string{ + "http": "net/http", + "pkg": "github.com/example/pkg", + "fmt": "fmt", + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + require.Equal(t, tt.want, goanalysis.FormatImports(tt.input)) + }) + } +} + +func TestReplaceCode(t *testing.T) { + var ( + newFunction = `package test +func NewMethod1() { + n := "test new method" + bla := fmt.Sprintf("test new - %s", n) + fmt.Println(bla) +}` + rollback = `package test +func NewMethod1() { + foo := 100 + bar := fmt.Sprintf("test - %d", foo) + fmt.Println(bar) +}` + ) + + type args struct { + path string + oldFunctionName string + newFunction string + } + tests := []struct { + name string + args args + err error + }{ + { + name: "function fooTest", + args: args{ + path: "testdata", + oldFunctionName: "fooTest", + newFunction: newFunction, + }, + }, + { + name: "function BazTest", + args: args{ + path: "testdata", + oldFunctionName: "BazTest", + newFunction: newFunction, + }, + }, + { + name: "function invalidFunction", + args: args{ + path: "testdata", + oldFunctionName: "invalidFunction", + newFunction: newFunction, + }, + }, + { + name: "invalid path", + args: args{ + path: "invalid_path", + oldFunctionName: "invalidPath", + newFunction: newFunction, + }, + err: os.ErrNotExist, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + err := goanalysis.ReplaceCode(tt.args.path, tt.args.oldFunctionName, tt.args.newFunction) + if tt.err != nil { + require.Error(t, err) + require.ErrorIs(t, err, tt.err) + return + } + require.NoError(t, err) + require.NoError(t, goanalysis.ReplaceCode(tt.args.path, tt.args.oldFunctionName, rollback)) + }) + } +} + +func TestHasStructFieldsInPkg(t *testing.T) { + tests := []struct { + name string + path string + structName string + fields []string + err error + want bool + }{ + { + name: "test a value with an empty struct", + path: "testdata", + structName: "emptyStruct", + fields: []string{"name"}, + want: false, + }, + { + name: "test no value with an empty struct", + path: "testdata", + structName: "emptyStruct", + fields: []string{""}, + want: false, + }, + { + name: "test a valid field into single field struct", + path: "testdata", + structName: "fooStruct", + fields: []string{"name"}, + want: true, + }, + { + name: "test a not valid field into single field struct", + path: "testdata", + structName: "fooStruct", + fields: []string{"baz"}, + want: false, + }, + { + name: "test a not valid field into struct", + path: "testdata", + structName: "bazStruct", + fields: []string{"baz"}, + want: false, + }, + { + name: "test a valid field into struct", + path: "testdata", + structName: "bazStruct", + fields: []string{"name"}, + want: true, + }, + { + name: "test two valid fields into struct", + path: "testdata", + structName: "bazStruct", + fields: []string{"name", "title"}, + want: true, + }, + { + name: "test a valid and a not valid fields into struct", + path: "testdata", + structName: "bazStruct", + fields: []string{"foo", "title"}, + want: true, + }, + { + name: "test three not valid fields into struct", + path: "testdata", + structName: "bazStruct", + fields: []string{"foo", "baz", "bla"}, + want: false, + }, + { + name: "invalid path", + path: "invalid_path", + err: os.ErrNotExist, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := goanalysis.HasAnyStructFieldsInPkg(tt.path, tt.structName, tt.fields) + if tt.err != nil { + require.Error(t, err) + require.ErrorIs(t, err, tt.err) + return + } + require.NoError(t, err) + require.Equal(t, tt.want, got) + }) + } +} diff --git a/ignite/pkg/goanalysis/testdata/fieldexist.go b/ignite/pkg/goanalysis/testdata/fieldexist.go new file mode 100644 index 0000000..8a0f979 --- /dev/null +++ b/ignite/pkg/goanalysis/testdata/fieldexist.go @@ -0,0 +1,13 @@ +package goanalysis + +type ( + emptyStruct struct{} + fooStruct struct { + name string + } + bazStruct struct { + name string + title string + description string + } +) diff --git a/ignite/pkg/goanalysis/testdata/replace.go b/ignite/pkg/goanalysis/testdata/replace.go new file mode 100644 index 0000000..7aec27c --- /dev/null +++ b/ignite/pkg/goanalysis/testdata/replace.go @@ -0,0 +1,16 @@ +package testdata + +import "fmt" + +func fooTest() { + n := "test new method" + bla := fmt.Sprintf("test new - %s", n) + fmt. + Println(bla) +} + +func BazTest() { + foo := 100 + bar := fmt.Sprintf("test - %d", foo) + fmt.Println(bar) +} diff --git a/ignite/pkg/goanalysis/testdata/varexist b/ignite/pkg/goanalysis/testdata/varexist new file mode 100644 index 0000000..8b8aa51 --- /dev/null +++ b/ignite/pkg/goanalysis/testdata/varexist @@ -0,0 +1,46 @@ +package goanalysis + +import ( + "context" + "errors" + "path/filepath" + + _ "embed" + _ "mvdan.cc/gofumpt" +) + +const ( + fooConst = "foo" +) + +type ( + fooStruct struct { + name string + } +) + +var ( + fooVar = filepath.Join("test", "join") + fooStructVar = fooStruct{} +) + +var ( + errorFooVar = errors.New("error foo") + bazStructVar = fooStruct{} + errorBarVar = errors.New("error bar") +) + +func fooMethod(foo string) error { + return nil +} + +func barMethod(foo string) context.Context { + contextVar := context.Background() + return contextVar +} + +func bazMethod(foo string) { + if list := filepath.SplitList("list"); list == nil { + return errors.New("error baz") + } +} \ No newline at end of file diff --git a/ignite/pkg/gocmd/gocmd.go b/ignite/pkg/gocmd/gocmd.go new file mode 100644 index 0000000..75a2aa2 --- /dev/null +++ b/ignite/pkg/gocmd/gocmd.go @@ -0,0 +1,251 @@ +package gocmd + +import ( + "bytes" + "context" + "fmt" + "os" + "path/filepath" + "strings" + + "github.com/ignite/cli/v29/ignite/pkg/cmdrunner/exec" + "github.com/ignite/cli/v29/ignite/pkg/cmdrunner/step" + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/goenv" +) + +const ( + // CommandInstall represents go "install" command. + CommandInstall = "install" + + // CommandGet represents go "get" command. + CommandGet = "get" + + // CommandBuild represents go "build" command. + CommandBuild = "build" + + // CommandMod represents go "mod" command. + CommandMod = "mod" + + // CommandModTidy represents go mod "tidy" command. + CommandModTidy = "tidy" + + // CommandModVerify represents go mod "verify" command. + CommandModVerify = "verify" + + // CommandModDownload represents go mod "download" command. + CommandModDownload = "download" + + // CommandFmt represents go "fmt" command. + CommandFmt = "fmt" + + // CommandEnv represents go "env" command. + CommandEnv = "env" + + // CommandList represents go "list" command. + CommandList = "list" + + // CommandTest represents go "test" command. + CommandTest = "test" + + // EnvGOARCH represents GOARCH variable. + EnvGOARCH = "GOARCH" + // EnvGOMOD represents GOMOD variable. + EnvGOMOD = "GOMOD" + // EnvGOOS represents GOOS variable. + EnvGOOS = "GOOS" + + // FlagGcflags represents gcflags go flag. + FlagGcflags = "-gcflags" + // FlagGcflagsValueDebug represents debug go flags. + FlagGcflagsValueDebug = "all=-N -l" + // FlagLdflags represents ldflags go flag. + FlagLdflags = "-ldflags" + // FlagTags represents tags go flag. + FlagTags = "-tags" + // FlagMod represents mod go flag. + FlagMod = "-mod" + // FlagModValueReadOnly represents readonly go flag. + FlagModValueReadOnly = "readonly" + // FlagOut represents out go flag. + FlagOut = "-o" +) + +// Env returns the value of `go env name`. +func Env(name string) (string, error) { + var b bytes.Buffer + err := exec.Exec(context.Background(), []string{Name(), CommandEnv, name}, exec.StepOption(step.Stdout(&b))) + return b.String(), err +} + +// Name returns the name of Go binary to use. +func Name() string { + custom := os.Getenv("GONAME") + if custom != "" { + return custom + } + return "go" +} + +// Fmt runs go fmt on path. +func Fmt(ctx context.Context, path string, options ...exec.Option) error { + return exec.Exec(ctx, []string{Name(), CommandFmt, "./..."}, append(options, exec.StepOption(step.Workdir(path)))...) +} + +// ModTidy runs go mod tidy on path with options. +func ModTidy(ctx context.Context, path string, options ...exec.Option) error { + return exec.Exec(ctx, []string{Name(), CommandMod, CommandModTidy}, + append(options, + exec.StepOption(step.Workdir(path)), + // FIXME(tb) untagged version of ignite/cli triggers a 404 not found when go + // mod tidy requests the sumdb, until we understand why, we disable sumdb. + // related issue: https://github.com/golang/go/issues/56174 + // Also disable Go toolchain download because it doesn't work without a valid + // GOSUMDB value: https://go.dev/doc/toolchain#download + exec.StepOption(step.Env("GOSUMDB=off", "GOTOOLCHAIN=local+path")), + )...) +} + +// ModVerify runs go mod verify on path with options. +func ModVerify(ctx context.Context, path string, options ...exec.Option) error { + return exec.Exec(ctx, []string{Name(), CommandMod, CommandModVerify}, append(options, exec.StepOption(step.Workdir(path)))...) +} + +// ModDownload runs go mod download on a path with options. +func ModDownload(ctx context.Context, path string, json bool, options ...exec.Option) error { + command := []string{Name(), CommandMod, CommandModDownload} + if json { + command = append(command, "-json") + } + return exec.Exec(ctx, command, append(options, exec.StepOption(step.Workdir(path)))...) +} + +// BuildPath runs go install on cmd folder with options. +func BuildPath(ctx context.Context, output, binary, path string, flags []string, options ...exec.Option) error { + binaryOutput, err := binaryPath(output, binary) + if err != nil { + return err + } + command := []string{ + Name(), + CommandBuild, + FlagOut, binaryOutput, + } + command = append(command, flags...) + command = append(command, ".") + return exec.Exec(ctx, command, append(options, exec.StepOption(step.Workdir(path)))...) +} + +// Build runs go build on path with options. +func Build(ctx context.Context, out, path string, flags []string, options ...exec.Option) error { + command := []string{ + Name(), + CommandBuild, + FlagOut, out, + } + command = append(command, flags...) + return exec.Exec(ctx, command, append(options, exec.StepOption(step.Workdir(path)))...) +} + +// InstallAll runs go install ./... on path with options. +func InstallAll(ctx context.Context, path string, flags []string, options ...exec.Option) error { + command := []string{ + Name(), + CommandInstall, + } + command = append(command, flags...) + command = append(command, "./...") + return exec.Exec(ctx, command, append(options, exec.StepOption(step.Workdir(path)))...) +} + +// Install runs go install pkgs on path with options. +func Install(ctx context.Context, path string, pkgs []string, options ...exec.Option) error { + command := []string{ + Name(), + CommandInstall, + } + command = append(command, pkgs...) + return exec.Exec(ctx, command, append(options, exec.StepOption(step.Workdir(path)))...) +} + +// Get runs go get pkgs on path with options. +func Get(ctx context.Context, path string, pkgs []string, options ...exec.Option) error { + command := []string{ + Name(), + CommandGet, + } + command = append(command, pkgs...) + return exec.Exec(ctx, command, append(options, exec.StepOption(step.Workdir(path)))...) +} + +// List returns the list of packages in path. +func List(ctx context.Context, path string, flags []string, options ...exec.Option) ([]string, error) { + command := []string{ + Name(), + CommandList, + } + command = append(command, flags...) + var b bytes.Buffer + err := exec.Exec(ctx, command, + append(options, exec.StepOption(step.Workdir(path)), exec.StepOption(step.Stdout(&b)))...) + if err != nil { + return nil, err + } + return strings.Fields(b.String()), nil +} + +func Test(ctx context.Context, path string, flags []string, options ...exec.Option) error { + command := []string{ + Name(), + CommandTest, + } + command = append(command, flags...) + return exec.Exec(ctx, command, append(options, exec.StepOption(step.Workdir(path)))...) +} + +// Ldflags returns a combined ldflags set from flags. +func Ldflags(flags ...string) string { + return strings.Join(flags, " ") +} + +// Tags returns a combined tags set from flags. +func Tags(tags ...string) string { + return strings.Join(tags, " ") +} + +// BuildTarget builds a GOOS:GOARCH pair. +func BuildTarget(goos, goarch string) string { + return fmt.Sprintf("%s:%s", goos, goarch) +} + +// ParseTarget parses GOOS:GOARCH pair. +func ParseTarget(t string) (goos, goarch string, err error) { + parsed := strings.Split(t, ":") + if len(parsed) != 2 { + return "", "", errors.New("invalid Go target, expected in GOOS:GOARCH format") + } + + return parsed[0], parsed[1], nil +} + +// PackageLiteral returns the string representation of package part of go get [package]. +func PackageLiteral(path, version string) string { + return fmt.Sprintf("%s@%s", path, version) +} + +// Imports runs goimports on path with options. +func GoImports(ctx context.Context, path string) error { + return exec.Exec(ctx, []string{"go", "tool", "golang.org/x/tools/cmd/goimports", "-w", path}) +} + +// binaryPath determines the path where binary will be located at. +func binaryPath(output, binary string) (string, error) { + if output != "" { + outputAbs, err := filepath.Abs(output) + if err != nil { + return "", err + } + return filepath.Join(outputAbs, binary), nil + } + return filepath.Join(goenv.Bin(), binary), nil +} diff --git a/ignite/pkg/gocmd/gocmd_test.go b/ignite/pkg/gocmd/gocmd_test.go new file mode 100644 index 0000000..1abb78f --- /dev/null +++ b/ignite/pkg/gocmd/gocmd_test.go @@ -0,0 +1,22 @@ +package gocmd_test + +import ( + "context" + "os" + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/ignite/cli/v29/ignite/pkg/gocmd" +) + +func TestList(t *testing.T) { + wd, err := os.Getwd() + assert.NoError(t, err) + + ctx := context.Background() + packages, err := gocmd.List(ctx, wd, []string{"-m", "-f={{.Path}}", "github.com/ignite/cli/v29"}) + assert.NoError(t, err) + + assert.Contains(t, packages, "github.com/ignite/cli/v29") +} diff --git a/ignite/pkg/goenv/goenv.go b/ignite/pkg/goenv/goenv.go new file mode 100644 index 0000000..09ea53d --- /dev/null +++ b/ignite/pkg/goenv/goenv.go @@ -0,0 +1,62 @@ +// Package goenv defines env variables known by Go and some utilities around it. +package goenv + +import ( + "fmt" + "go/build" + "os" + "path/filepath" +) + +const ( + // GOBIN is the env var for GOBIN. + GOBIN = "GOBIN" + + // GOPATH is the env var for GOPATH. + GOPATH = "GOPATH" + + // GOMODCACHE is the env var for GOMODCACHE. + GOMODCACHE = "GOMODCACHE" +) + +const ( + binDir = "bin" + modDir = "pkg/mod" +) + +// Bin returns the path of where Go binaries are installed. +func Bin() string { + if binPath := os.Getenv(GOBIN); binPath != "" { + return binPath + } + if goPath := os.Getenv(GOPATH); goPath != "" { + return filepath.Join(goPath, binDir) + } + return filepath.Join(build.Default.GOPATH, binDir) +} + +// Path returns $PATH with correct go bin configuration set. +func Path() string { + return os.ExpandEnv(fmt.Sprintf("$PATH:%s", Bin())) +} + +// ConfigurePath configures the env with correct $PATH that has go bin setup. +func ConfigurePath() error { + return os.Setenv("PATH", Path()) +} + +// GoModCache returns the path to Go's module cache. +func GoModCache() string { + if path := os.Getenv(GOMODCACHE); path != "" { + return path + } + if path := os.Getenv(GOPATH); path != "" { + return filepath.Join(path, modDir) + } + return filepath.Join(build.Default.GOPATH, modDir) +} + +// GoPath returns the go path. +func GoPath() string { + return os.Getenv(GOPATH) +} diff --git a/ignite/pkg/goenv/goenv_test.go b/ignite/pkg/goenv/goenv_test.go new file mode 100644 index 0000000..77b1964 --- /dev/null +++ b/ignite/pkg/goenv/goenv_test.go @@ -0,0 +1,49 @@ +package goenv_test + +import ( + "go/build" + "path/filepath" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/goenv" +) + +func TestGoModCache(t *testing.T) { + cases := []struct { + name, envKey, envValue, want string + }{ + { + name: "from go module cache", + envKey: "GOMODCACHE", + envValue: "/foo/cache/pkg/mod", + want: "/foo/cache/pkg/mod", + }, + { + name: "from go path", + envKey: "GOPATH", + envValue: "/foo/go", + want: "/foo/go/pkg/mod", + }, + { + name: "from default path", + want: filepath.Join(build.Default.GOPATH, "pkg/mod"), + }, + } + + for _, tt := range cases { + t.Run(tt.name, func(t *testing.T) { + // Arrange + if tt.envKey != "" { + t.Setenv(tt.envKey, tt.envValue) + } + + // Act + path := goenv.GoModCache() + + // Assert + require.Equal(t, tt.want, path) + }) + } +} diff --git a/ignite/pkg/gomodule/gomodule.go b/ignite/pkg/gomodule/gomodule.go new file mode 100644 index 0000000..aa0379b --- /dev/null +++ b/ignite/pkg/gomodule/gomodule.go @@ -0,0 +1,202 @@ +package gomodule + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "io/fs" + "os" + "path/filepath" + "slices" + "strings" + + "golang.org/x/mod/modfile" + "golang.org/x/mod/module" + + "github.com/ignite/cli/v29/ignite/pkg/cache" + "github.com/ignite/cli/v29/ignite/pkg/cmdrunner/exec" + "github.com/ignite/cli/v29/ignite/pkg/cmdrunner/step" + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/gocmd" +) + +const pathCacheNamespace = "gomodule.path" + +var ( + // ErrGoModNotFound returned when go.mod file cannot be found for an app. + ErrGoModNotFound = errors.New("go.mod not found") + + // ErrModuleNotFound is returned when a Go module is not found. + ErrModuleNotFound = errors.New("module not found") +) + +// Version is an alias to the module version type. +type Version = module.Version + +// Module contains Go module info. +type Module struct { + // Path is the Go module path. + Path string + + // Version is the module version. + Version string + + // Dir is the absolute path to the Go module. + Dir string +} + +// ParseAt finds and parses go.mod at app's path. +func ParseAt(path string) (*modfile.File, error) { + gomod, err := os.ReadFile(filepath.Join(path, "go.mod")) + if err != nil { + if errors.Is(err, fs.ErrNotExist) { + return nil, ErrGoModNotFound + } + return nil, err + } + return modfile.Parse("", gomod, nil) +} + +// FilterVersions filters dependencies under require section by their paths. +func FilterVersions(dependencies []Version, paths ...string) []Version { + var filtered []Version + + for _, dep := range dependencies { + if slices.Contains(paths, dep.Path) { + filtered = append(filtered, dep) + } + } + + return filtered +} + +// ResolveDependencies resolves dependencies from go.mod file. +// It replaces direct dependencies with their replacements. +func ResolveDependencies(f *modfile.File, includeIndirect bool) ([]Version, error) { + var versions []Version + + isReplacementAdded := func(rv Version) bool { + for _, rep := range f.Replace { + if rv.Path == rep.Old.Path { + versions = append(versions, rep.New) + + return true + } + } + + return false + } + + for _, req := range f.Require { + if req.Indirect && !includeIndirect { + continue + } + if !isReplacementAdded(req.Mod) { + versions = append(versions, req.Mod) + } + } + + return versions, nil +} + +// LocatePath locates pkg's absolute path managed by 'go mod' on the local filesystem. +func LocatePath(ctx context.Context, cacheStorage cache.Storage, src string, pkg Version) (path string, err error) { + // can be a local package. + if pkg.Version == "" { // indicates that this is a local package. + if filepath.IsAbs(pkg.Path) { + return pkg.Path, nil + } + return filepath.Join(src, pkg.Path), nil + } + + pathCache := cache.New[string](cacheStorage, pathCacheNamespace) + cacheKey := cache.Key(pkg.Path, pkg.Version) + path, err = pathCache.Get(cacheKey) + if err != nil && !errors.Is(err, cache.ErrorNotFound) { + return "", err + } + if !errors.Is(err, cache.ErrorNotFound) { + return path, nil + } + + // otherwise, it is hosted. + m, err := FindModule(ctx, src, pkg.String()) + if err != nil { + return "", err + } + + if err = pathCache.Put(cacheKey, m.Dir); err != nil { + return "", err + } + return m.Dir, nil +} + +// SplitPath splits a Go import path into an URI path and version. +// Version is an empty string when the path doesn't contain a version suffix. +// Versioned paths use the "path@version" format. +func SplitPath(path string) (string, string) { + if len(path) == 0 || path[0] == '@' { + return "", "" + } + + parts := strings.SplitN(path, "@", 2) + if len(parts) == 2 { + return parts[0], parts[1] + } + return parts[0], "" +} + +// JoinPath joins a Go import path URI to a version. +// The result path have the "path@version" format. +func JoinPath(path, version string) string { + if path == "" { + return "" + } + + if version == "" { + return path + } + + return fmt.Sprintf("%s@%s", path, version) +} + +// FindModule returns the Go module info for an import path. +// The module is searched within the dependencies of the module defined in root dir. +// If a local module path is passed, it returns the local module info. +func FindModule(ctx context.Context, rootDir, path string) (Module, error) { + // can be a local module. + if filepath.IsAbs(path) || strings.HasPrefix(path, ".") { // indicates that this is a local module. + return Module{ + Path: path, + Version: "", + Dir: path, + }, nil + } + + var stdout bytes.Buffer + err := gocmd.ModDownload(ctx, rootDir, true, exec.StepOption(step.Stdout(&stdout))) + if err != nil { + return Module{}, err + } + + dec := json.NewDecoder(&stdout) + p, version := SplitPath(path) + + for dec.More() { + var m Module + if err := dec.Decode(&m); err != nil { + if errors.Is(err, io.EOF) { + break + } + return Module{}, err + } + + if m.Path == p && (version == "" || version == m.Version) { + return m, nil + } + } + + return Module{}, errors.Errorf("%w: %s", ErrModuleNotFound, path) +} diff --git a/ignite/pkg/gomodule/gomodule_test.go b/ignite/pkg/gomodule/gomodule_test.go new file mode 100644 index 0000000..4197a4b --- /dev/null +++ b/ignite/pkg/gomodule/gomodule_test.go @@ -0,0 +1,103 @@ +package gomodule_test + +import ( + "context" + "strings" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/gomodule" +) + +func TestSplitPath(t *testing.T) { + cases := []struct { + name string + path string + wantPath string + wantVersion string + }{ + { + name: "path with version", + path: "foo@v0.1.0", + wantPath: "foo", + wantVersion: "v0.1.0", + }, + { + name: "path without version", + path: "foo", + wantPath: "foo", + }, + { + name: "invalid path", + path: "@v0.1.0", + }, + { + name: "empty path", + }, + } + + for _, tt := range cases { + t.Run(tt.name, func(t *testing.T) { + // Act + p, v := gomodule.SplitPath(tt.path) + + // Assert + require.Equal(t, tt.wantPath, p) + require.Equal(t, tt.wantVersion, v) + }) + } +} + +func TestJoinPath(t *testing.T) { + require.Equal(t, "foo@v0.1.0", gomodule.JoinPath("foo", "v0.1.0")) + require.Equal(t, "", gomodule.JoinPath("", "v0.1.0")) + require.Equal(t, "foo", gomodule.JoinPath("foo", "")) +} + +func TestFindModule(t *testing.T) { + cases := []struct { + name string + importPath string + version string + wantErr error + }{ + { + name: "module exists", + importPath: "github.com/gorilla/mux", + version: "v1.8.0", + }, + { + name: "module exists with local replace", + importPath: "../local-module-fork", + version: "", + }, + { + name: "module missing", + importPath: "github.com/foo/bar", + version: "v0.1.0", + wantErr: gomodule.ErrModuleNotFound, + }, + } + + for _, tt := range cases { + t.Run(tt.name, func(t *testing.T) { + // Arrange + ctx := context.Background() + path := gomodule.JoinPath(tt.importPath, tt.version) + + // Act + m, err := gomodule.FindModule(ctx, "testdata/module", path) + + // Assert + if tt.wantErr != nil { + require.ErrorIs(t, err, tt.wantErr) + } else { + require.NoError(t, err) + require.Equal(t, tt.importPath, m.Path) + require.Equal(t, tt.version, m.Version) + require.True(t, strings.HasSuffix(m.Dir, path)) + } + }) + } +} diff --git a/ignite/pkg/gomodule/testdata/local-module-fork/go.mod b/ignite/pkg/gomodule/testdata/local-module-fork/go.mod new file mode 100644 index 0000000..a9bc883 --- /dev/null +++ b/ignite/pkg/gomodule/testdata/local-module-fork/go.mod @@ -0,0 +1 @@ +module github.com/ignite/modules diff --git a/ignite/pkg/gomodule/testdata/module/go.mod b/ignite/pkg/gomodule/testdata/module/go.mod new file mode 100644 index 0000000..c11252d --- /dev/null +++ b/ignite/pkg/gomodule/testdata/module/go.mod @@ -0,0 +1,11 @@ +module github.com/ignite/cli/ignite/pkg/gomodule + +go 1.23 + +require ( + github.com/gorilla/mux v1.8.0 + github.com/stretchr/testify v1.8.2 + github.com/ignite/modules v1.0.0 +) + +replace github.com/ignite/modules => ../local-module-fork diff --git a/ignite/pkg/gomodulepath/gomodulepath.go b/ignite/pkg/gomodulepath/gomodulepath.go new file mode 100644 index 0000000..e07212c --- /dev/null +++ b/ignite/pkg/gomodulepath/gomodulepath.go @@ -0,0 +1,159 @@ +// Package gomodulepath implements functions for the manipulation of Go module paths. +// Paths are typically defined as a domain name and a path containing the user and +// repository names, e.g. "github.com/username/reponame", but Go also allows other module +// names like "domain.com/name", "name", "namespace/name", or similar variants. +package gomodulepath + +import ( + "fmt" + "go/parser" + "go/token" + "path/filepath" + "regexp" + "strings" + + "golang.org/x/mod/module" + "golang.org/x/mod/semver" + + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/gomodule" +) + +// Path represents a Go module's path. +type Path struct { + // Path is Go module's full path. + // e.g.: github.com/ignite/cli. + RawPath string + + // Root is the root directory name of Go module. + // e.g.: cli for github.com/ignite/cli. + Root string + + // Package is the default package name for the Go module that can be used + // to host main functionality of the module. + // e.g.: cli for github.com/ignite/cli. + Package string +} + +// Parse parses rawpath into a module Path. +func Parse(rawpath string) (Path, error) { + if err := validateRawPath(rawpath); err != nil { + return Path{}, err + } + rootName := root(rawpath) + // package name cannot contain "-" so gracefully remove them + // if they present. + packageName := stripNonAlphaNumeric(rootName) + if err := validatePackageName(packageName); err != nil { + return Path{}, err + } + p := Path{ + RawPath: rawpath, + Root: rootName, + Package: packageName, + } + + return p, nil +} + +// ParseAt parses Go module path of an app resides at path. +func ParseAt(path string) (Path, error) { + parsed, err := gomodule.ParseAt(path) + if err != nil { + return Path{}, err + } + return Parse(parsed.Module.Mod.Path) +} + +// Find search the Go module in the current and parent paths until finding it. +func Find(path string) (parsed Path, appPath string, err error) { + for len(path) != 0 && path != "." && path != "/" { + parsed, err = ParseAt(path) + if errors.Is(err, gomodule.ErrGoModNotFound) { + path = filepath.Dir(path) + continue + } + return parsed, path, err + } + return Path{}, "", errors.Wrap(gomodule.ErrGoModNotFound, "could not locate your app's root dir") +} + +// ExtractAppPath extracts the app module path from a Go module path. +func ExtractAppPath(path string) string { + if path == "" { + return "" + } + + items := strings.Split(path, "/") + + // Remove the first path item if it is assumed to be a domain name + if len(items) > 1 && strings.Contains(items[0], ".") { + items = items[1:] + } + + count := len(items) + if count == 1 { + // The Go module path is a single name + return items[0] + } + + // The last two items in the path define the namespace and app name + return strings.Join(items[count-2:], "/") +} + +func hasDomainNamePrefix(path string) bool { + if path == "" { + return false + } + + // TODO: should we use a regexp instead of the simplistic check ? + name, _, _ := strings.Cut(path, "/") + return strings.Contains(name, ".") +} + +func validateRawPath(path string) error { + // A raw path should be either a URI, a single name or a path + if hasDomainNamePrefix(path) { + return validateURIPath(path) + } + return validateNamePath(path) +} + +func validateURIPath(path string) error { + if err := module.CheckPath(path); err != nil { + return errors.Errorf("app name is an invalid go module name: %w", err) + } + return nil +} + +func validateNamePath(path string) error { + if err := module.CheckImportPath(path); err != nil { + return errors.Errorf("app name is an invalid go module name: %w", err) + } + return nil +} + +func validatePackageName(name string) error { + fset := token.NewFileSet() + src := fmt.Sprintf("package %s", name) + if _, err := parser.ParseFile(fset, "", src, parser.PackageClauseOnly); err != nil { + // parser error is very low level here so let's hide it from the user + // completely. + return errors.New("app name is an invalid go package name") + } + return nil +} + +func root(path string) string { + sp := strings.Split(path, "/") + name := sp[len(sp)-1] + if semver.IsValid(name) { // omit versions. + name = sp[len(sp)-2] + } + return name +} + +func stripNonAlphaNumeric(name string) string { + reg := regexp.MustCompile(`[^a-zA-Z0-9_]+`) + return strings.ToLower(reg.ReplaceAllString(name, "")) +} diff --git a/ignite/pkg/gomodulepath/gomodulepath_test.go b/ignite/pkg/gomodulepath/gomodulepath_test.go new file mode 100644 index 0000000..bcc3482 --- /dev/null +++ b/ignite/pkg/gomodulepath/gomodulepath_test.go @@ -0,0 +1,182 @@ +package gomodulepath + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +func TestParse(t *testing.T) { + cases := []struct { + name string + rawpath string + path Path + err error + }{ + { + name: "standard", + rawpath: "github.com/a/b", + path: Path{RawPath: "github.com/a/b", Root: "b", Package: "b"}, + }, + { + name: "with dash", + rawpath: "github.com/a/b-c", + path: Path{RawPath: "github.com/a/b-c", Root: "b-c", Package: "bc"}, + }, + { + name: "short", + rawpath: "github.com/a", + path: Path{RawPath: "github.com/a", Root: "a", Package: "a"}, + }, + { + name: "short with dash", + rawpath: "github.com/a-c", + path: Path{RawPath: "github.com/a-c", Root: "a-c", Package: "ac"}, + }, + { + name: "short with version", + rawpath: "github.com/a/v2", + path: Path{RawPath: "github.com/a/v2", Root: "a", Package: "a"}, + }, + { + name: "long", + rawpath: "github.com/a/b/c", + path: Path{RawPath: "github.com/a/b/c", Root: "c", Package: "c"}, + }, + { + name: "invalid as go.mod module name", + rawpath: "github.com/a/b/c@", + err: errors.New(`app name is an invalid go module name: malformed module path "github.com/a/b/c@": invalid char '@'`), + }, + { + name: "name starting with the letter v", + rawpath: "github.com/a/vote", + path: Path{RawPath: "github.com/a/vote", Root: "vote", Package: "vote"}, + }, + { + name: "with version", + rawpath: "github.com/a/b/v2", + path: Path{RawPath: "github.com/a/b/v2", Root: "b", Package: "b"}, + }, + { + name: "with underscore", + rawpath: "github.com/a/b_c", + path: Path{RawPath: "github.com/a/b_c", Root: "b_c", Package: "b_c"}, + }, + { + name: "with mixed case", + rawpath: "github.com/a/bC", + path: Path{RawPath: "github.com/a/bC", Root: "bC", Package: "bc"}, + }, + { + name: "with a name", + rawpath: "a", + path: Path{RawPath: "a", Root: "a", Package: "a"}, + }, + { + name: "with a name containing underscore", + rawpath: "a_b", + path: Path{RawPath: "a_b", Root: "a_b", Package: "a_b"}, + }, + { + name: "with a name containing dash", + rawpath: "a-b", + path: Path{RawPath: "a-b", Root: "a-b", Package: "ab"}, + }, + { + name: "with a path", + rawpath: "a/b/c", + path: Path{RawPath: "a/b/c", Root: "c", Package: "c"}, + }, + { + name: "with a path containing underscore", + rawpath: "a/b_c", + path: Path{RawPath: "a/b_c", Root: "b_c", Package: "b_c"}, + }, + { + name: "with a path containing dash", + rawpath: "a/b-c", + path: Path{RawPath: "a/b-c", Root: "b-c", Package: "bc"}, + }, + } + for _, tt := range cases { + t.Run(tt.name, func(t *testing.T) { + path, err := Parse(tt.rawpath) + if err != nil { + require.Error(t, err) + require.Equal(t, tt.err.Error(), err.Error()) + return + } + require.NoError(t, err) + require.Equal(t, tt.path, path) + }) + } +} + +func TestExtractAppPath(t *testing.T) { + cases := []struct { + name string + path string + want string + }{ + { + name: "github uri", + path: "github.com/ignite/cli", + want: "ignite/cli", + }, + { + name: "short uri", + path: "domain.com/ignite", + want: "ignite", + }, + { + name: "long uri", + path: "domain.com/a/b/c/ignite/cli", + want: "ignite/cli", + }, + { + name: "name", + path: "cli", + want: "cli", + }, + { + name: "path", + path: "ignite/cli", + want: "ignite/cli", + }, + { + name: "long path", + path: "a/b/c/ignite/cli", + want: "ignite/cli", + }, + { + name: "empty", + path: "", + want: "", + }, + } + + for _, tt := range cases { + t.Run(tt.name, func(t *testing.T) { + require.Equal(t, tt.want, ExtractAppPath(tt.path)) + }) + } +} + +func TestValidateURIPath(t *testing.T) { + require.NoError(t, validateURIPath("github.com/ignite/cli/v29")) +} + +func TestValidateURIPathWithInvalidPath(t *testing.T) { + require.Error(t, validateURIPath("github/ignite/cli")) +} + +func TestValidateNamePath(t *testing.T) { + require.NoError(t, validateNamePath("cli")) +} + +func TestValidateNamePathWithInvalidPath(t *testing.T) { + require.Error(t, validateNamePath("cli@")) +} diff --git a/ignite/pkg/httpstatuschecker/httpstatuschecker.go b/ignite/pkg/httpstatuschecker/httpstatuschecker.go new file mode 100644 index 0000000..3d751b8 --- /dev/null +++ b/ignite/pkg/httpstatuschecker/httpstatuschecker.go @@ -0,0 +1,60 @@ +// Package httpstatuschecker is a tool check health of http pages. +package httpstatuschecker + +import ( + "context" + "net/http" +) + +type checker struct { + c *http.Client + addr string + method string +} + +// Option used to customize checker. +type Option func(*checker) + +// Method configures http method. +func Method(name string) Option { + return func(cr *checker) { + cr.method = name + } +} + +// Client configures http client. +func Client(client *http.Client) Option { + return func(cr *checker) { + if client != nil { + cr.c = client + } + } +} + +// Check checks if given http addr is alive by applying options. +func Check(ctx context.Context, addr string, options ...Option) (isAvailable bool, err error) { + cr := &checker{ + c: http.DefaultClient, + addr: addr, + method: http.MethodGet, + } + for _, o := range options { + o(cr) + } + return cr.check(ctx) +} + +func (c *checker) check(ctx context.Context) (bool, error) { + req, err := http.NewRequestWithContext(ctx, c.method, c.addr, nil) + if err != nil { + return false, err + } + res, err := c.c.Do(req) + if err != nil { + // ignore some errors like "connect: connection refused" + return false, nil + } + defer res.Body.Close() + isOKStatus := res.StatusCode >= http.StatusOK && res.StatusCode < http.StatusMultipleChoices + return isOKStatus, nil +} diff --git a/ignite/pkg/httpstatuschecker/httpstatuschecker_test.go b/ignite/pkg/httpstatuschecker/httpstatuschecker_test.go new file mode 100644 index 0000000..c22536a --- /dev/null +++ b/ignite/pkg/httpstatuschecker/httpstatuschecker_test.go @@ -0,0 +1,60 @@ +package httpstatuschecker + +import ( + "bytes" + "context" + "errors" + "io" + "net/http" + "testing" + + "github.com/stretchr/testify/require" +) + +type roundTripperFunc func(*http.Request) (*http.Response, error) + +func (f roundTripperFunc) RoundTrip(req *http.Request) (*http.Response, error) { + return f(req) +} + +func newTestClient(fn roundTripperFunc) *http.Client { + return &http.Client{Transport: fn} +} + +func TestCheckStatus(t *testing.T) { + cases := []struct { + name string + returnedStatus int + isAvaiable bool + }{ + {"200 OK", 200, true}, + {"202 Accepted ", 202, true}, + {"404 Not Found", 404, false}, + } + for _, tt := range cases { + t.Run(tt.name, func(t *testing.T) { + client := newTestClient(func(req *http.Request) (*http.Response, error) { + return &http.Response{ + StatusCode: tt.returnedStatus, + Body: io.NopCloser(bytes.NewReader(nil)), + Header: make(http.Header), + ContentLength: 0, + Request: req, + }, nil + }) + + isAvailable, err := Check(context.Background(), "http://example.com", Client(client)) + require.NoError(t, err) + require.Equal(t, tt.isAvaiable, isAvailable) + }) + } +} + +func TestCheckServerUnreachable(t *testing.T) { + client := newTestClient(func(*http.Request) (*http.Response, error) { + return nil, errors.New("dial tcp: connection refused") + }) + isAvailable, err := Check(context.Background(), "http://example.com", Client(client)) + require.NoError(t, err) + require.False(t, isAvailable) +} diff --git a/ignite/pkg/jsonfile/jsonfile.go b/ignite/pkg/jsonfile/jsonfile.go new file mode 100644 index 0000000..e65617a --- /dev/null +++ b/ignite/pkg/jsonfile/jsonfile.go @@ -0,0 +1,362 @@ +package jsonfile + +import ( + "bufio" + "bytes" + "context" + "crypto/sha256" + "encoding/hex" + "encoding/json" + "io" + "net/http" + "os" + "strconv" + "strings" + "time" + + "github.com/buger/jsonparser" + + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/safeconverter" + "github.com/ignite/cli/v29/ignite/pkg/tarball" +) + +const ( + keySeparator = "." +) + +var ( + // ErrFieldNotFound parameter not found into json. + ErrFieldNotFound = errors.New("JSON field not found") + + // ErrInvalidValueType invalid value type. + ErrInvalidValueType = errors.New("invalid value type") + + // ErrInvalidURL invalid file URL. + ErrInvalidURL = errors.New("invalid file URL") +) + +type ( + // JSONFile represents the JSON file and also implements the io.write interface, + // saving directly to the file. + JSONFile struct { + file ReadWriteSeeker + tarballPath string + url string + updates map[string][]byte + cache []byte + } + + // UpdateFileOption configures file update function with key and value. + UpdateFileOption func(map[string][]byte) +) + +type ( + // writeTruncate represents the truncate method from io.WriteSeeker interface. + writeTruncate interface { + Truncate(size int64) error + } + + // ReadWriteSeeker represents the owns ReadWriteSeeker interface inherit from io.ReadWriteSeeker. + ReadWriteSeeker interface { + io.ReadWriteSeeker + Close() error + Sync() error + } +) + +// New creates a new JSONFile. +func New(file ReadWriteSeeker) *JSONFile { + return &JSONFile{ + updates: make(map[string][]byte), + file: file, + } +} + +// FromPath parses a JSONFile object from path. +func FromPath(path string) (*JSONFile, error) { + file, err := os.OpenFile(path, os.O_RDWR|os.O_APPEND, 0o600) + if err != nil { + return nil, errors.Wrap(err, "cannot open the file") + } + return New(file), nil +} + +// FromURL fetches the file from the given URL and returns its content. +// If tarballFileName is not empty, the URL is interpreted as a tarball file, +// tarballFileName is extracted from it and is returned instead of the URL +// content. +func FromURL(ctx context.Context, url, destPath, tarballFileName string) (*JSONFile, error) { + return fromURL(ctx, url, destPath, tarballFileName, http.DefaultClient) +} + +// FromURLWithClient fetches the file using the provided HTTP client. +// If client is nil, http.DefaultClient is used. +func FromURLWithClient(ctx context.Context, url, destPath, tarballFileName string, client *http.Client) (*JSONFile, error) { + if client == nil { + client = http.DefaultClient + } + return fromURL(ctx, url, destPath, tarballFileName, client) +} + +func fromURL(ctx context.Context, url, destPath, tarballFileName string, client *http.Client) (*JSONFile, error) { + // TODO create a cache system to avoid download genesis with the same hash again + + // Download the file from URL + req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil) + if err != nil { + return nil, err + } + + resp, err := client.Do(req) + if err != nil { + return nil, err + } + defer resp.Body.Close() + + if resp.StatusCode == http.StatusNotFound { + return nil, ErrInvalidURL + } + + // Remove the old file if exists and create a new one + if err := os.RemoveAll(destPath); err != nil { + return nil, err + } + file, err := os.OpenFile(destPath, os.O_RDWR|os.O_APPEND|os.O_CREATE, 0o600) + if err != nil { + return nil, errors.Wrap(err, "cannot create the file") + } + + // Copy the downloaded file to buffer and the opened file + var buf bytes.Buffer + if _, err := io.Copy(file, io.TeeReader(resp.Body, &buf)); err != nil { + return nil, err + } + + // Check if the downloaded file is a tarball and extract only the necessary JSON file + var ext bytes.Buffer + tarballPath, err := tarball.ExtractFile(&buf, &ext, tarballFileName) + if err != nil && !errors.Is(err, tarball.ErrNotGzipType) && !errors.Is(err, tarball.ErrInvalidFileName) { + return nil, err + } else if err == nil { + // Erase the tarball bite code from the file and copy the correct one + if err := truncate(file, 0); err != nil { + return nil, err + } + if _, err := io.Copy(file, &ext); err != nil { + return nil, err + } + } + + return &JSONFile{ + updates: make(map[string][]byte), + file: file, + url: url, + tarballPath: tarballPath, + }, nil +} + +// Bytes returns the jsonfile byte array. +func (f *JSONFile) Bytes() ([]byte, error) { + file := f.cache + if file != nil { + return file, nil + } + if err := f.Reset(); err != nil { + return nil, err + } + scanner := bufio.NewScanner(f.file) + for scanner.Scan() { + file = append(file, scanner.Bytes()...) + } + if err := scanner.Err(); err != nil { + return nil, err + } + f.cache = file + return file, nil +} + +// Field returns the param by key and the position into byte slice from the file reader. +// Key can be a path to a nested parameters eg: app_state.staking.accounts. +func (f *JSONFile) Field(key string, param interface{}) error { + file, err := f.Bytes() + if err != nil { + return err + } + + value, dataType, _, err := jsonparser.Get(file, strings.Split(key, keySeparator)...) + if errors.Is(err, jsonparser.KeyPathNotFoundError) { + return ErrFieldNotFound + } else if err != nil { + return err + } + + switch dataType { + case jsonparser.Boolean, jsonparser.Array, jsonparser.Number, jsonparser.Object: + err := json.Unmarshal(value, param) + var unmarshalTypeError *json.UnmarshalTypeError + if errors.As(err, &unmarshalTypeError) { + return ErrInvalidValueType + } + case jsonparser.String: + result, err := jsonparser.ParseString(value) + if err != nil { + return err + } + paramStr, ok := param.(*string) + if ok { + *paramStr = result + break + } + var ( + unmarshalTypeError *json.UnmarshalTypeError + syntaxTypeError *json.SyntaxError + ) + if err := json.Unmarshal(value, param); errors.As(err, &unmarshalTypeError) || + errors.As(err, &syntaxTypeError) { + return ErrInvalidValueType + } + case jsonparser.NotExist: + case jsonparser.Null: + case jsonparser.Unknown: + default: + return ErrInvalidValueType + } + return nil +} + +// WithKeyValue updates a file value object by key. +func WithKeyValue(key string, value string) UpdateFileOption { + return func(update map[string][]byte) { + update[key] = []byte(`"` + value + `"`) + } +} + +// WithKeyValueByte updates a file byte value object by key. +func WithKeyValueByte(key string, value []byte) UpdateFileOption { + return func(update map[string][]byte) { + update[key] = value + } +} + +// WithKeyValueTimestamp updates a time value. +func WithKeyValueTimestamp(key string, t int64) UpdateFileOption { + return func(update map[string][]byte) { + formatted := time.Unix(t, 0).UTC().Format(time.RFC3339Nano) + update[key] = []byte(`"` + formatted + `"`) + } +} + +// WithKeyValueInt updates a file int value object by key. +func WithKeyValueInt(key string, value int64) UpdateFileOption { + return func(update map[string][]byte) { + update[key] = []byte(strconv.FormatInt(value, 10)) + } +} + +// WithKeyValueUint updates a file uint value object by key. +func WithKeyValueUint(key string, value uint64) UpdateFileOption { + return WithKeyValueInt(key, safeconverter.ToInt64[uint64](value)) +} + +// Update updates the file with the new parameters by key. +func (f *JSONFile) Update(opts ...UpdateFileOption) error { + for _, opt := range opts { + opt(f.updates) + } + if err := f.Reset(); err != nil { + return err + } + _, err := io.Copy(f, f.file) + return err +} + +// Write implement the write method for io.Writer interface. +func (f *JSONFile) Write(p []byte) (int, error) { + var err error + length := len(p) + for key, value := range f.updates { + p, err = jsonparser.Set(p, value, strings.Split(key, keySeparator)...) + if err != nil { + return 0, err + } + delete(f.updates, key) + } + f.cache = p + + err = truncate(f.file, 0) + if err != nil { + return 0, err + } + + if err := f.Reset(); err != nil { + return 0, err + } + n, err := f.file.Write(p) + if err != nil { + return n, err + } + + if n != len(p) { + return n, io.ErrShortWrite + } + + return length, nil +} + +// truncate removes the current file content. +func truncate(rws io.WriteSeeker, size int) error { + t, ok := rws.(writeTruncate) + if !ok { + return errors.New("truncate: unable to truncate") + } + + return t.Truncate(int64(size)) +} + +// Close the file. +func (f *JSONFile) Close() error { + return f.file.Close() +} + +// URL returns the genesis URL. +func (f *JSONFile) URL() string { + return f.url +} + +// TarballPath returns the tarball path. +func (f *JSONFile) TarballPath() string { + return f.tarballPath +} + +// Hash returns the hash of the file. +func (f *JSONFile) Hash() (string, error) { + if err := f.Reset(); err != nil { + return "", err + } + + h := sha256.New() + if _, err := io.Copy(h, f.file); err != nil { + return "", err + } + + return hex.EncodeToString(h.Sum(nil)), nil +} + +// String returns the file string. +func (f *JSONFile) String() (string, error) { + if err := f.Reset(); err != nil { + return "", err + } + + data, err := io.ReadAll(f.file) + return string(data), err +} + +// Reset sets the offset for the next Read or Write to 0. +func (f *JSONFile) Reset() error { + // TODO find a better way to reset or create a + // read of copy the writer with io.TeeReader + _, err := f.file.Seek(0, 0) + return err +} diff --git a/ignite/pkg/jsonfile/jsonfile_test.go b/ignite/pkg/jsonfile/jsonfile_test.go new file mode 100644 index 0000000..9eec6f3 --- /dev/null +++ b/ignite/pkg/jsonfile/jsonfile_test.go @@ -0,0 +1,413 @@ +package jsonfile + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "os" + "reflect" + "testing" + + "cosmossdk.io/math" + sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/tarball" +) + +type roundTripperFunc func(*http.Request) (*http.Response, error) + +func (f roundTripperFunc) RoundTrip(req *http.Request) (*http.Response, error) { + return f(req) +} + +func newTestClient(statusCode int, body []byte) *http.Client { + return &http.Client{ + Transport: roundTripperFunc(func(req *http.Request) (*http.Response, error) { + return &http.Response{ + StatusCode: statusCode, + Body: io.NopCloser(bytes.NewReader(body)), + Header: make(http.Header), + ContentLength: int64(len(body)), + Request: req, + }, nil + }), + } +} + +func TestJSONFile_Field(t *testing.T) { + type ( + invalidStruct struct { + name string + number int + } + evidence struct { + MaxAgeDuration string `json:"max_age_duration"` + MaxAgeNumBlocks string `json:"max_age_num_blocks"` + MaxBytes int64 `json:"max_bytes"` + } + ) + + tests := []struct { + name string + filepath string + key string + want interface{} + err error + }{ + { + name: "get string parameter", + filepath: "testdata/jsonfile.json", + key: "consensus_params.block.max_bytes", + want: "22020096", + }, + { + name: "get boolean parameter", + filepath: "testdata/jsonfile.json", + key: "launched", + want: true, + }, + { + name: "get array parameter", + filepath: "testdata/jsonfile.json", + key: "consensus_params.block.best_blocks", + want: []int{100, 20, 11, 4, 2}, + }, + { + name: "get number parameter", + filepath: "testdata/jsonfile.json", + key: "consensus_params.block.time_iota_ms", + want: 1000, + }, + { + name: "get coins parameter", + filepath: "testdata/jsonfile.json", + key: "app_state.bank.balances.[0].coins", + want: sdk.Coins{sdk.NewCoin("stake", math.NewInt(95000000))}, + }, + { + name: "get custom parameter", + filepath: "testdata/jsonfile.json", + key: "consensus_params.evidence", + want: evidence{ + MaxAgeDuration: "172800000000000", + MaxAgeNumBlocks: "100000", + MaxBytes: 1048576, + }, + }, + { + name: "invalid coins parameter", + filepath: "testdata/jsonfile.json", + key: "app_state.bank.balances.[0].coins", + want: invalidStruct{name: "invalid", number: 110}, + err: ErrInvalidValueType, + }, + { + name: "invalid path", + filepath: "testdata/jsonfile.json", + key: "invalid.field.path", + want: invalidStruct{name: "invalid", number: 110}, + err: ErrFieldNotFound, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + f, err := FromPath(tt.filepath) + require.NoError(t, err) + t.Cleanup(func() { + err = f.Close() + require.NoError(t, err) + }) + out := reflect.New(reflect.TypeOf(tt.want)) + err = f.Field(tt.key, out.Interface()) + if tt.err != nil { + require.Error(t, err) + require.ErrorIs(t, err, tt.err) + return + } + require.NoError(t, err) + require.Equal(t, tt.want, out.Elem().Interface()) + }) + } +} + +func TestJSONFile_Update(t *testing.T) { + coins := sdk.NewCoin("bar", math.NewInt(500)) + jsonCoins, err := json.Marshal(coins) + require.NoError(t, err) + + tests := []struct { + name string + filepath string + opts []UpdateFileOption + want []interface{} + err error + }{ + { + name: "update string field", + filepath: "testdata/jsonfile.json", + opts: []UpdateFileOption{ + WithKeyValue( + "consensus_params.block.max_bytes", + "22020096", + ), + }, + want: []interface{}{float64(22020096)}, + }, + { + name: "update string field to number", + filepath: "testdata/jsonfile.json", + opts: []UpdateFileOption{ + WithKeyValueInt( + "consensus_params.block.max_bytes", + 22020096, + ), + }, + want: []interface{}{float64(22020096)}, + }, + { + name: "update number field", + filepath: "testdata/jsonfile.json", + opts: []UpdateFileOption{ + WithKeyValueInt( + "consensus_params.block.time_iota_ms", + 1000, + ), + }, + want: []interface{}{float64(1000)}, + }, + { + name: "update timestamp field", + filepath: "testdata/jsonfile.json", + opts: []UpdateFileOption{ + WithKeyValueTimestamp( + "genesis_time", + 10000000, + ), + }, + want: nil, // TODO find a way to test timestamp values + }, + { + name: "update two values type", + filepath: "testdata/jsonfile.json", + opts: []UpdateFileOption{ + WithKeyValue( + "consensus_params.block.max_bytes", + "3000000", + ), + WithKeyValueInt( + "consensus_params.block.time_iota_ms", + 1000, + ), + }, + want: []interface{}{float64(3000000), float64(1000)}, + }, + { + name: "update coin field", + filepath: "testdata/jsonfile.json", + opts: []UpdateFileOption{ + WithKeyValueByte( + "app_state.crisis.params.constant_fee", + jsonCoins, + ), + }, + want: []interface{}{map[string]interface{}{ + "denom": coins.Denom, + "amount": coins.Amount.String(), + }}, + }, + { + name: "add non-existing field", + filepath: "testdata/jsonfile.json", + opts: []UpdateFileOption{ + WithKeyValue( + "app_state.auth.params.sig_verify_cost_ed25519", + "111", + ), + }, + want: []interface{}{float64(111)}, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + f, err := FromPath(tt.filepath) + require.NoError(t, err) + + // Rollback files after change + b, err := f.Bytes() + require.NoError(t, err) + t.Cleanup(func() { + var prettyJSON bytes.Buffer + err := json.Indent(&prettyJSON, b, "", " ") + require.NoError(t, err) + + err = truncate(f.file, 0) + require.NoError(t, err) + err = f.Reset() + require.NoError(t, err) + _, err = f.file.Write(prettyJSON.Bytes()) + require.NoError(t, err) + err = f.Close() + require.NoError(t, err) + }) + + err = f.Update(tt.opts...) + if tt.err != nil { + require.Error(t, err) + require.ErrorIs(t, err, tt.err) + return + } + require.NoError(t, err) + + updates := make(map[string][]byte) + for _, opt := range tt.opts { + opt(updates) + } + if tt.want != nil { + got := make([]interface{}, 0) + for key := range updates { + var newValue interface{} + err := f.Field(key, &newValue) + require.NoError(t, err) + got = append(got, newValue) + } + require.ElementsMatch(t, tt.want, got) + } + }) + } +} + +func TestJSONFile_Hash(t *testing.T) { + tests := []struct { + name string + filepath string + want string + err error + }{ + { + name: "file hash", + filepath: "testdata/jsonfile.json", + want: "036dbc0020f4ab5604f46a8e5a05c368e4cba41f48fcac2864641902c1dfcad5", + }, + { + name: "not found file", + filepath: "testdata/genesis_not_found.json", + err: errors.New( + "cannot open the file: open testdata/genesis_not_found.json: no such file or directory", + ), + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + f, err := FromPath(tt.filepath) + if tt.err != nil { + require.Error(t, err) + require.Equal(t, err.Error(), tt.err.Error()) + return + } + require.NoError(t, err) + t.Cleanup(func() { + err = f.Close() + require.NoError(t, err) + }) + got, err := f.Hash() + if tt.err != nil { + require.Error(t, err) + require.ErrorIs(t, err, tt.err) + return + } + require.NoError(t, err) + require.Equal(t, tt.want, got) + }) + } +} + +func TestFromURL(t *testing.T) { + type args struct { + url string + filepath string + tarballFileName string + } + tests := []struct { + name string + args args + verifyField string + wantField string + err error + }{ + { + name: "JSON URL", + args: args{ + filepath: "testdata/jsonfile.json", + }, + verifyField: "chain_id", + wantField: "earth-1", + }, + { + name: "tarball URL", + args: args{ + filepath: "testdata/example.tar.gz", + tarballFileName: "example.json", + }, + verifyField: "chain_id", + wantField: "gaia-1", + }, + { + name: "invalid tarball file name", + args: args{ + filepath: "testdata/example.tar.gz", + tarballFileName: "invalid.json", + }, + err: tarball.ErrGzipFileNotFound, + }, + { + name: "invalid link", + args: args{ + url: "https://google.com/invalid_example.json", + }, + err: ErrInvalidURL, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + url := tt.args.url + if url == "" { + url = "https://example.com/testdata" + } + + var body []byte + if tt.args.filepath != "" { + var err error + body, err = os.ReadFile(tt.args.filepath) + require.NoError(t, err) + } + + statusCode := http.StatusOK + if tt.err == ErrInvalidURL { + statusCode = http.StatusNotFound + } + client := newTestClient(statusCode, body) + + filepath := fmt.Sprintf("%s/jsonfile.json", t.TempDir()) + got, err := FromURLWithClient(context.TODO(), url, filepath, tt.args.tarballFileName, client) + if tt.err != nil { + require.Error(t, err) + require.ErrorIs(t, err, tt.err) + return + } + require.NoError(t, err) + t.Cleanup(func() { + err = got.Close() + require.NoError(t, err) + }) + var verificationField string + err = got.Field(tt.verifyField, &verificationField) + require.NoError(t, err) + require.Equal(t, tt.wantField, verificationField) + }) + } +} diff --git a/ignite/pkg/jsonfile/testdata/example.tar.gz b/ignite/pkg/jsonfile/testdata/example.tar.gz new file mode 100644 index 0000000..e383942 Binary files /dev/null and b/ignite/pkg/jsonfile/testdata/example.tar.gz differ diff --git a/ignite/pkg/jsonfile/testdata/jsonfile.json b/ignite/pkg/jsonfile/testdata/jsonfile.json new file mode 100644 index 0000000..8e6c3f3 --- /dev/null +++ b/ignite/pkg/jsonfile/testdata/jsonfile.json @@ -0,0 +1,40 @@ +{ + "app_state": { + "bank": { + "balances": [ + { + "address": "cosmos1dd246yq6z5vzjz9gh8cff46pll75yyl8ygndsj", + "coins": [ + { + "amount": "95000000", + "denom": "stake" + } + ] + } + ] + } + }, + "chain_id": "earth-1", + "consensus_params": { + "block": { + "max_bytes": "22020096", + "max_gas": "-1", + "time_iota_ms": 1000, + "best_blocks": [ + 100, + 20, + 11, + 4, + 2 + ] + }, + "evidence": { + "max_age_duration": "172800000000000", + "max_age_num_blocks": "100000", + "max_bytes": 1048576 + } + }, + "genesis_time": "2021-11-12T02:08:12.522572Z", + "initial_height": "1", + "launched": true +} \ No newline at end of file diff --git a/ignite/pkg/localfs/reset.go b/ignite/pkg/localfs/reset.go new file mode 100644 index 0000000..f4ee5db --- /dev/null +++ b/ignite/pkg/localfs/reset.go @@ -0,0 +1,14 @@ +package localfs + +import ( + "io/fs" + "os" +) + +// MkdirAllReset is same as os.MkdirAll except it deletes path before creating it. +func MkdirAllReset(path string, perm fs.FileMode) error { + if err := os.RemoveAll(path); err != nil { + return err + } + return os.MkdirAll(path, perm) +} diff --git a/ignite/pkg/localfs/save.go b/ignite/pkg/localfs/save.go new file mode 100644 index 0000000..06a2ea7 --- /dev/null +++ b/ignite/pkg/localfs/save.go @@ -0,0 +1,76 @@ +package localfs + +import ( + "io/fs" + "os" + "path/filepath" +) + +// SaveTemp saves file system f to a temporary path in the local file system +// and returns that path. +func SaveTemp(f fs.FS) (path string, cleanup func(), err error) { + path, err = os.MkdirTemp("", "") + if err != nil { + return + } + + cleanup = func() { os.RemoveAll(path) } + + defer func() { + if err != nil { + cleanup() + } + }() + + err = Save(f, path) + + return +} + +// SaveBytesTemp saves data bytes to a temporary file location at path. +func SaveBytesTemp(data []byte, prefix string, perm os.FileMode) (path string, cleanup func(), err error) { + f, err := os.CreateTemp("", prefix) + if err != nil { + return + } + defer f.Close() + + path = f.Name() + cleanup = func() { os.Remove(path) } + + defer func() { + if err != nil { + cleanup() + } + }() + + if _, err = f.Write(data); err != nil { + return + } + + err = os.Chmod(path, perm) + + return +} + +// Save saves file system f to path in the local file system. +func Save(f fs.FS, path string) error { + return fs.WalkDir(f, ".", func(wpath string, d fs.DirEntry, err error) error { + if err != nil { + return err + } + + out := filepath.Join(path, wpath) + + if d.IsDir() { + return os.MkdirAll(out, 0o744) + } + + content, err := fs.ReadFile(f, wpath) + if err != nil { + return err + } + + return os.WriteFile(out, content, 0o600) + }) +} diff --git a/ignite/pkg/localfs/search.go b/ignite/pkg/localfs/search.go new file mode 100644 index 0000000..1f25c11 --- /dev/null +++ b/ignite/pkg/localfs/search.go @@ -0,0 +1,46 @@ +package localfs + +import ( + "os" + "path/filepath" + "sort" + "strings" +) + +// Search searches for files in the fs with given glob pattern by ensuring that +// returned file paths are sorted. +func Search(path, pattern string) ([]string, error) { + files := make([]string, 0) + if _, err := os.Stat(path); err != nil { + if os.IsNotExist(err) { + return files, nil + } + return nil, err + } + + err := filepath.Walk(path, func(path string, f os.FileInfo, err error) error { + if err != nil { + return err + } + base := filepath.Base(path) + // skip hidden folders + if f.IsDir() && strings.HasPrefix(base, ".") { + return filepath.SkipDir + } + // avoid check directories + if f.IsDir() { + return nil + } + // check if the file name and pattern matches + matched, err := filepath.Match(pattern, base) + if err != nil { + return err + } + if matched { + files = append(files, path) + } + return nil + }) + sort.Strings(files) + return files, err +} diff --git a/ignite/pkg/localfs/search_test.go b/ignite/pkg/localfs/search_test.go new file mode 100644 index 0000000..f786b18 --- /dev/null +++ b/ignite/pkg/localfs/search_test.go @@ -0,0 +1,106 @@ +package localfs + +import ( + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/require" +) + +func setupGlobTests(t *testing.T, files []string) string { + t.Helper() + tmpdir := t.TempDir() + + for _, file := range files { + fileDir := filepath.Dir(file) + fileDir = filepath.Join(tmpdir, fileDir) + err := os.MkdirAll(fileDir, 0o755) + require.NoError(t, err) + err = os.WriteFile(filepath.Join(tmpdir, file), []byte{}, 0o644) + require.NoError(t, err) + } + return tmpdir +} + +func TestSearch(t *testing.T) { + files := []string{ + "foo/file.proto", + "foo/bar/file1.proto", + "foo/bar/file2.proto", + "foo/baz/file.proto", + "foo/file", + "foo/baz/file", + } + tmpdir := setupGlobTests(t, files) + type args struct { + path string + pattern string + } + tests := []struct { + name string + args args + want []string + err error + }{ + { + name: "get all proto files by pattern", + args: args{ + path: tmpdir, + pattern: "*.proto", + }, + want: []string{ + filepath.Join(tmpdir, "foo/bar/file1.proto"), + filepath.Join(tmpdir, "foo/bar/file2.proto"), + filepath.Join(tmpdir, "foo/baz/file.proto"), + filepath.Join(tmpdir, "foo/file.proto"), + }, + }, { + name: "get only one proto file by name", + args: args{ + path: tmpdir, + pattern: "file1.proto", + }, + want: []string{filepath.Join(tmpdir, "foo/bar/file1.proto")}, + }, { + name: "get two proto files by name", + args: args{ + path: tmpdir, + pattern: "file.proto", + }, + want: []string{ + filepath.Join(tmpdir, "foo/baz/file.proto"), + filepath.Join(tmpdir, "foo/file.proto"), + }, + }, { + name: "get a specific file by name", + args: args{ + path: tmpdir, + pattern: "file", + }, + want: []string{ + filepath.Join(tmpdir, "foo/baz/file"), + filepath.Join(tmpdir, "foo/file"), + }, + }, { + name: "not found directory", + args: args{ + path: "no-directory", + pattern: "file", + }, + want: []string{}, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := Search(tt.args.path, tt.args.pattern) + if tt.err != nil { + require.Error(t, err) + require.EqualValues(t, tt.err, err) + return + } + require.NoError(t, err) + require.EqualValues(t, tt.want, got) + }) + } +} diff --git a/ignite/pkg/localfs/watcher.go b/ignite/pkg/localfs/watcher.go new file mode 100644 index 0000000..8718402 --- /dev/null +++ b/ignite/pkg/localfs/watcher.go @@ -0,0 +1,157 @@ +package localfs + +import ( + "context" + "os" + "path/filepath" + "strings" + "sync" + "time" + + wt "github.com/radovskyb/watcher" + + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +type watcher struct { + wt *wt.Watcher + workdir string + ignoreHidden bool + ignoreFolders bool + ignoreExts []string + onChange func() + interval time.Duration + ctx context.Context + done *sync.WaitGroup +} + +// WatcherOption used to configure watcher. +type WatcherOption func(*watcher) + +// WatcherWorkdir to set as a root to paths needs to be watched. +func WatcherWorkdir(path string) WatcherOption { + return func(w *watcher) { + w.workdir = path + } +} + +// WatcherOnChange sets a hook that executed on every change on filesystem. +func WatcherOnChange(hook func()) WatcherOption { + return func(w *watcher) { + w.onChange = hook + } +} + +// WatcherPollingInterval overwrites default polling interval to check filesystem changes. +func WatcherPollingInterval(d time.Duration) WatcherOption { + return func(w *watcher) { + w.interval = d + } +} + +// WatcherIgnoreHidden ignores hidden(dot) files. +func WatcherIgnoreHidden() WatcherOption { + return func(w *watcher) { + w.ignoreHidden = true + } +} + +func WatcherIgnoreFolders() WatcherOption { + return func(w *watcher) { + w.ignoreFolders = true + } +} + +// WatcherIgnoreExt ignores files with matching file extensions. +func WatcherIgnoreExt(exts ...string) WatcherOption { + return func(w *watcher) { + w.ignoreExts = exts + } +} + +// Watch starts watching changes on the paths. options are used to configure the +// behaviour of watch operation. +func Watch(ctx context.Context, paths []string, options ...WatcherOption) error { + w := &watcher{ + wt: wt.New(), + onChange: func() {}, + interval: time.Millisecond * 300, + done: &sync.WaitGroup{}, + ctx: ctx, + } + w.wt.SetMaxEvents(1) + + for _, o := range options { + o(w) + } + + w.wt.AddFilterHook(func(info os.FileInfo, fullPath string) error { + if info.IsDir() && w.ignoreFolders { + return wt.ErrSkip + } + if w.isFileIgnored(fullPath) { + return wt.ErrSkip + } + + return nil + }) + + // ignore hidden paths. + w.wt.IgnoreHiddenFiles(w.ignoreHidden) + + // add paths to watch + if err := w.addPaths(paths...); err != nil { + return err + } + + // start watching. + w.done.Add(1) + go w.listen() + if err := w.wt.Start(w.interval); err != nil { + return err + } + w.done.Wait() + return nil +} + +func (w *watcher) listen() { + defer w.done.Done() + for { + select { + case <-w.wt.Event: + w.onChange() + case <-w.wt.Closed: + return + case <-w.ctx.Done(): + w.wt.Close() + } + } +} + +func (w *watcher) addPaths(paths ...string) error { + for _, path := range paths { + if !filepath.IsAbs(path) { + path = filepath.Join(w.workdir, path) + } + + // Ignoring paths that don't exist + if _, err := os.Stat(path); errors.Is(err, os.ErrNotExist) { + continue + } + + if err := w.wt.AddRecursive(path); err != nil { + return err + } + } + + return nil +} + +func (w *watcher) isFileIgnored(path string) bool { + for _, ext := range w.ignoreExts { + if strings.HasSuffix(path, ext) { + return true + } + } + return false +} diff --git a/ignite/pkg/markdownviewer/markdownviewer.go b/ignite/pkg/markdownviewer/markdownviewer.go new file mode 100644 index 0000000..1c39b73 --- /dev/null +++ b/ignite/pkg/markdownviewer/markdownviewer.go @@ -0,0 +1,50 @@ +package markdownviewer + +import ( + "os" + + "github.com/charmbracelet/glow/ui" + "golang.org/x/term" + + "github.com/ignite/cli/v29/ignite/pkg/safeconverter" +) + +// View starts the Markdown viewer at path that .md files are located at. +func View(path string) error { + conf, err := config(path) + if err != nil { + return err + } + + // TODO: Enable bubbletea WithAltScreen and WithMouseCellMotion options when glow supports them + p := ui.NewProgram(conf) + + _, err = p.Run() + return err +} + +func config(path string) (ui.Config, error) { + var width uint + + fd := safeconverter.ToInt(os.Stdout.Fd()) + w, _, err := term.GetSize(fd) + if err != nil { + return ui.Config{}, err + } + + width = min(uint(w), 120) //nolint:gosec,nolintlint // conversion is fine + + docTypes := ui.NewDocTypeSet() + docTypes.Add(ui.LocalDoc) + + conf := ui.Config{ + WorkingDirectory: path, + DocumentTypes: docTypes, + GlamourStyle: "auto", + HighPerformancePager: true, + GlamourEnabled: true, + GlamourMaxWidth: width, + } + + return conf, nil +} diff --git a/ignite/pkg/markdownviewer/markdownviewer_test.go b/ignite/pkg/markdownviewer/markdownviewer_test.go new file mode 100644 index 0000000..49fcc54 --- /dev/null +++ b/ignite/pkg/markdownviewer/markdownviewer_test.go @@ -0,0 +1,41 @@ +package markdownviewer + +import ( + "os" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestConfigReturnsErrorWhenStdoutIsNotTTY(t *testing.T) { + tempStdout := setNonTTYStdout(t) + + _, err := config(t.TempDir()) + require.Error(t, err) + + require.NoError(t, tempStdout.Close()) +} + +func TestViewReturnsConfigErrorWhenStdoutIsNotTTY(t *testing.T) { + tempStdout := setNonTTYStdout(t) + + err := View(t.TempDir()) + require.Error(t, err) + + require.NoError(t, tempStdout.Close()) +} + +func setNonTTYStdout(t *testing.T) *os.File { + t.Helper() + + file, err := os.CreateTemp(t.TempDir(), "stdout-*") + require.NoError(t, err) + + originalStdout := os.Stdout + os.Stdout = file + t.Cleanup(func() { + os.Stdout = originalStdout + }) + + return file +} diff --git a/ignite/pkg/multiformatname/multiformatname.go b/ignite/pkg/multiformatname/multiformatname.go new file mode 100644 index 0000000..6aea40c --- /dev/null +++ b/ignite/pkg/multiformatname/multiformatname.go @@ -0,0 +1,90 @@ +// Package multiformatname provides names automatically converted into multiple naming convention +package multiformatname + +import ( + "github.com/iancoleman/strcase" + + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/xstrcase" +) + +// Name represents a name with multiple naming convention representations. +// Supported naming convention are: camel, pascal, and kebab cases. +type Name struct { + Original string + LowerCamel string + UpperCamel string + PascalCase string + LowerCase string + UpperCase string + Kebab string + Snake string +} + +type Checker func(name string) error + +// MustNewName returns a new multi-format name from a name. +func MustNewName(name string, additionalChecks ...Checker) Name { + n, err := NewName(name, additionalChecks...) + if err != nil { + panic(err) + } + return n +} + +// NewName returns a new multi-format name from a name. +func NewName(name string, additionalChecks ...Checker) (Name, error) { + checks := append([]Checker{basicCheckName}, additionalChecks...) + + for _, check := range checks { + if err := check(name); err != nil { + return Name{}, err + } + } + + return Name{ + Original: name, + LowerCamel: strcase.ToLowerCamel(name), + UpperCamel: xstrcase.UpperCamel(name), + PascalCase: strcase.ToCamel(name), + LowerCase: xstrcase.Lowercase(name), + UpperCase: xstrcase.Uppercase(name), + Kebab: strcase.ToKebab(name), + Snake: strcase.ToSnake(name), + }, nil +} + +// NoNumber prevents using number in a name. +func NoNumber(name string) error { + for _, c := range name { + if '0' <= c && c <= '9' { + return errors.New("name cannot contain number") + } + } + + return nil +} + +// basicCheckName performs basic checks common for all names. +func basicCheckName(name string) error { + if name == "" { + return errors.New("name cannot be empty") + } + + // check characters + c := name[0] + authorized := ('a' <= c && c <= 'z') || ('A' <= c && c <= 'Z') + if !authorized { + return errors.Errorf("name cannot contain %v as first character", string(c)) + } + + for _, c := range name[1:] { + // A name can contain letter, hyphen or underscore + authorized := ('a' <= c && c <= 'z') || ('A' <= c && c <= 'Z') || ('0' <= c && c <= '9') || c == '-' || c == '_' + if !authorized { + return errors.Errorf("name cannot contain %v", string(c)) + } + } + + return nil +} diff --git a/ignite/pkg/multiformatname/multiformatname_test.go b/ignite/pkg/multiformatname/multiformatname_test.go new file mode 100644 index 0000000..b8ef5bf --- /dev/null +++ b/ignite/pkg/multiformatname/multiformatname_test.go @@ -0,0 +1,215 @@ +package multiformatname_test + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/multiformatname" +) + +func TestNewName(t *testing.T) { + tests := []struct { + name string + arg string + want multiformatname.Name + err error + }{ + { + name: "simple lowercase name", + arg: "foo", + want: multiformatname.Name{ + Original: "foo", + LowerCamel: "foo", + UpperCamel: "Foo", + PascalCase: "Foo", + LowerCase: "foo", + UpperCase: "FOO", + Kebab: "foo", + Snake: "foo", + }, + }, + { + name: "camelCase name", + arg: "fooBar", + want: multiformatname.Name{ + Original: "fooBar", + LowerCamel: "fooBar", + UpperCamel: "FooBar", + PascalCase: "FooBar", + LowerCase: "foobar", + UpperCase: "FOOBAR", + Kebab: "foo-bar", + Snake: "foo_bar", + }, + }, + { + name: "kebab-case name", + arg: "foo-bar", + want: multiformatname.Name{ + Original: "foo-bar", + LowerCamel: "fooBar", + UpperCamel: "FooBar", + PascalCase: "FooBar", + LowerCase: "foobar", + UpperCase: "FOOBAR", + Kebab: "foo-bar", + Snake: "foo_bar", + }, + }, + { + name: "snake_case name", + arg: "foo_bar", + want: multiformatname.Name{ + Original: "foo_bar", + LowerCamel: "fooBar", + UpperCamel: "FooBar", + PascalCase: "FooBar", + LowerCase: "foobar", + UpperCase: "FOOBAR", + Kebab: "foo-bar", + Snake: "foo_bar", + }, + }, + { + name: "mixed snake_case and camelCase name", + arg: "foo_barFoobar", + want: multiformatname.Name{ + Original: "foo_barFoobar", + LowerCamel: "fooBarFoobar", + UpperCamel: "FooBarFoobar", + PascalCase: "FooBarFoobar", + LowerCase: "foobarfoobar", + UpperCase: "FOOBARFOOBAR", + Kebab: "foo-bar-foobar", + Snake: "foo_bar_foobar", + }, + }, + { + name: "mixed underscores and dashes", + arg: "foo_-_bar", + want: multiformatname.Name{ + Original: "foo_-_bar", + LowerCamel: "fooBar", + UpperCamel: "Foo__Bar", + PascalCase: "FooBar", + LowerCase: "foobar", + UpperCase: "FOOBAR", + Kebab: "foo---bar", + Snake: "foo___bar", + }, + }, + { + name: "mixed underscores, dashes, and numbers", + arg: "foo_-_Bar1", + want: multiformatname.Name{ + Original: "foo_-_Bar1", + LowerCamel: "fooBar1", + UpperCamel: "Foo__Bar_1", + PascalCase: "FooBar1", + LowerCase: "foobar1", + UpperCase: "FOOBAR1", + Kebab: "foo---bar-1", + Snake: "foo___bar_1", + }, + }, + { + name: "uppercase variant in simple name", + arg: "fooBAR", + want: multiformatname.Name{ + Original: "fooBAR", + LowerCamel: "fooBar", + UpperCamel: "FooBar", + PascalCase: "FooBar", + LowerCase: "foobar", + UpperCase: "FOOBAR", + Kebab: "foo-bar", + Snake: "foo_bar", + }, + }, + { + name: "uppercase variant with starting capital", + arg: "FooBAR", + want: multiformatname.Name{ + Original: "FooBAR", + LowerCamel: "fooBar", + UpperCamel: "FooBar", + PascalCase: "FooBar", + LowerCase: "foobar", + UpperCase: "FOOBAR", + Kebab: "foo-bar", + Snake: "foo_bar", + }, + }, + { + name: "camelCase name with numbers", + arg: "fooBar123", + want: multiformatname.Name{ + Original: "fooBar123", + LowerCamel: "fooBar123", + UpperCamel: "FooBar_123", + PascalCase: "FooBar123", + LowerCase: "foobar123", + UpperCase: "FOOBAR123", + Kebab: "foo-bar-123", + Snake: "foo_bar_123", + }, + }, + { + name: "multiple numbers in name", + arg: "para_2_m_s_43_tr_1", + want: multiformatname.Name{ + Original: "para_2_m_s_43_tr_1", + LowerCamel: "para2MS43Tr1", + UpperCamel: "Para_2MS_43Tr_1", + PascalCase: "Para2MS43Tr1", + LowerCase: "para2ms43tr1", + UpperCase: "PARA2MS43TR1", + Kebab: "para-2-m-s-43-tr-1", + Snake: "para_2_m_s_43_tr_1", + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := multiformatname.NewName(tt.arg) + if tt.err != nil { + require.ErrorIs(t, err, tt.err) + return + } + require.NoError(t, err) + require.EqualValues(t, tt.want, got) + }) + } +} + +func TestNewMultiFormatName2(t *testing.T) { + // Test basic forbidden names + cases := []string{ + "", + "foo bar", + "1foo", + "-foo", + "_foo", + "@foo", + } + for _, testCase := range cases { + _, err := multiformatname.NewName(testCase) + require.Error(t, err) + } + + // Test custom check + alwaysWrong := func(string) error { return errors.New("always wrong") } + _, err := multiformatname.NewName("foo", alwaysWrong) + require.Error(t, err) + + alwaysGood := func(string) error { return nil } + _, err = multiformatname.NewName("foo", alwaysGood) + require.NoError(t, err) +} + +func TestNoNumber(t *testing.T) { + require.NoError(t, multiformatname.NoNumber("foo")) + require.Error(t, multiformatname.NoNumber("foo1")) +} diff --git a/ignite/pkg/openapiconsole/console.go b/ignite/pkg/openapiconsole/console.go new file mode 100644 index 0000000..1e77cac --- /dev/null +++ b/ignite/pkg/openapiconsole/console.go @@ -0,0 +1,25 @@ +package openapiconsole + +import ( + "embed" + "html/template" + "net/http" +) + +//go:embed index.tpl +var index embed.FS + +// Handler returns an http handler that servers OpenAPI console for an OpenAPI spec at specURL. +func Handler(title, specURL string) http.HandlerFunc { + t, _ := template.ParseFS(index, "index.tpl") + + return func(w http.ResponseWriter, _ *http.Request) { + _ = t.Execute(w, struct { + Title string + URL string + }{ + title, + specURL, + }) + } +} diff --git a/ignite/pkg/openapiconsole/console_test.go b/ignite/pkg/openapiconsole/console_test.go new file mode 100644 index 0000000..5681575 --- /dev/null +++ b/ignite/pkg/openapiconsole/console_test.go @@ -0,0 +1,22 @@ +package openapiconsole + +import ( + "net/http" + "net/http/httptest" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestHandlerRendersTitleAndSpecURL(t *testing.T) { + h := Handler("My API", "https://example.com/openapi.json") + + req := httptest.NewRequest(http.MethodGet, "/", nil) + rr := httptest.NewRecorder() + h.ServeHTTP(rr, req) + + require.Equal(t, http.StatusOK, rr.Code) + body := rr.Body.String() + require.Contains(t, body, "My API") + require.Contains(t, body, "https://example.com/openapi.json") +} diff --git a/ignite/pkg/openapiconsole/index.tpl b/ignite/pkg/openapiconsole/index.tpl new file mode 100644 index 0000000..367babb --- /dev/null +++ b/ignite/pkg/openapiconsole/index.tpl @@ -0,0 +1,25 @@ +<!DOCTYPE html> +<html lang="en"> + <head> + <meta charset="utf-8" /> + <title>{{ .Title }} + + + + +
+ + + + + diff --git a/ignite/pkg/protoanalysis/builder.go b/ignite/pkg/protoanalysis/builder.go new file mode 100644 index 0000000..e032742 --- /dev/null +++ b/ignite/pkg/protoanalysis/builder.go @@ -0,0 +1,306 @@ +package protoanalysis + +import ( + "fmt" + "regexp" + "slices" + "strings" + + "github.com/emicklei/proto" +) + +type builder struct { + p protoPackage +} + +// build turns a low level proto pkg into a high level Package. +func build(p protoPackage) Package { + br := builder{p} + + pk := Package{ + Name: p.name, + Path: p.dir, + Files: br.buildFiles(), + Messages: br.buildMessages(), + Services: br.toServices(p.services()), + } + + for _, option := range p.options() { + if option.Name == optionGoPkg { + pk.GoImportName = option.Constant.Source + break + } + } + + return pk +} + +func (b builder) buildFiles() (files []File) { + for _, f := range b.p.files { + files = append(files, File{f.path, f.imports}) + } + + return +} + +func (b builder) buildMessages() (messages []Message) { + for _, f := range b.p.files { + for _, message := range f.messages { + // Keep track of the message fields and types + fields := make(map[string]string) + + // Find the highest field number + var highestFieldNumber int + for _, elem := range message.Elements { + field, ok := elem.(*proto.NormalField) + if !ok { + continue + } + + if field.Sequence > highestFieldNumber { + highestFieldNumber = field.Sequence + } + + fields[field.Name] = field.Type + } + + // some proto messages might be defined inside another proto messages. + // to represents these types, an underscore is used. + // e.g. if C message inside B, and B inside A: A_B_C. + var ( + name = message.Name + parent = message.Parent + ) + for { + if parent == nil { + break + } + + parentMessage, ok := parent.(*proto.Message) + if !ok { + break + } + + name = fmt.Sprintf("%s_%s", parentMessage.Name, name) + parent = parentMessage.Parent + } + + messages = append(messages, Message{ + Name: name, + Path: f.path, + HighestFieldNumber: highestFieldNumber, + Fields: fields, + }) + } + } + + return messages +} + +func (b builder) toServices(ps []*proto.Service) (services []Service) { + for _, service := range ps { + s := Service{ + Name: service.Name, + RPCFuncs: b.elementsToRPCFunc(service.Elements), + } + + services = append(services, s) + } + + return +} + +func (b builder) elementsToRPCFunc(elems []proto.Visitee) (rpcFuncs []RPCFunc) { + for _, el := range elems { + rpc, ok := el.(*proto.RPC) + if !ok { + continue + } + + requestMessage := findProtoMessageByTypeName(b.p.name, b.p.messages(), rpc.RequestType) + if requestMessage == nil { + continue + } + + rf := RPCFunc{ + Name: rpc.Name, + RequestType: rpc.RequestType, + ReturnsType: rpc.ReturnsType, + HTTPRules: b.elementsToHTTPRules(requestMessage, rpc.Elements), + } + + rpcFuncs = append(rpcFuncs, rf) + } + + return rpcFuncs +} + +func (b builder) elementsToHTTPRules(requestMessage *proto.Message, elems []proto.Visitee) (httpRules []HTTPRule) { + for _, el := range elems { + option, ok := el.(*proto.Option) + if !ok { + continue + } + if !strings.Contains(option.Name, "google.api.http") { + continue + } + + httpRules = append(httpRules, b.constantToHTTPRules(requestMessage, option.Constant)...) + } + + return +} + +func findProtoMessageByTypeName(pkgName string, messages []*proto.Message, typeName string) *proto.Message { + var exactMatch *proto.Message + + canonicalTypeName := canonicalMessageName(pkgName, typeName) + for _, message := range messages { + if message.Name == typeName { + exactMatch = message + } + + if flattenProtoMessageName(message) == canonicalTypeName { + return message + } + } + + return exactMatch +} + +func flattenProtoMessageName(message *proto.Message) string { + name := message.Name + for parent := message.Parent; parent != nil; { + parentMessage, ok := parent.(*proto.Message) + if !ok { + break + } + + name = fmt.Sprintf("%s_%s", parentMessage.Name, name) + parent = parentMessage.Parent + } + + return name +} + +// Regexp to extract HTTP rule URL parameter names. +// The expression extracts parameter names defined within "{}". +// Extra parameter arguments are ignored. These arguments are normally +// defined after an "=", for example as "{param=**}". +var urlParamRe = regexp.MustCompile(`(?m){([^=]+?)(?:=.+?)?}`) + +func (b builder) constantToHTTPRules(requestMessage *proto.Message, constant proto.Literal) (httpRules []HTTPRule) { + // find out the endpoint template. + endpoint := constant.Source + + if endpoint == "" { + for _, each := range constant.OrderedMap { + switch each.Name { + case + "get", + "post", + "put", + "patch", + "delete": + endpoint = each.Source + } + if endpoint != "" { + break + } + } + } + + // find out url params. + var params []string + + match := urlParamRe.FindAllStringSubmatch(endpoint, -1) + for _, item := range match { + params = append(params, item[1]) + } + + // calculate url params, query params and body fields counts. + var ( + messageFields, messageFieldsCount = b.messageFieldsCount(requestMessage) + paramsCount = len(params) + bodyFieldsCount int + ) + + if body, ok := constant.OrderedMap.Get("body"); ok { // check if body is specified. + if body.Source == "*" { // means there should be no query params per the spec. + bodyFieldsCount = messageFieldsCount - paramsCount + } else if body.Source != "" { + bodyFieldsCount = 1 // means body fields are grouped under a single top-level field. + } + } + + queryParamsCount := messageFieldsCount - paramsCount - bodyFieldsCount + + var ( + queryFields map[string]string + bodyFields map[string]string + ) + for name, t := range messageFields { + if slices.Contains(params, name) { + // this is a URL parameter, skip it + continue + } + + // If there are body fields, we need to add them to the bodyFields map. + // There are no known post requests that contain body fields and query params + if bodyFieldsCount > 0 { + if len(bodyFields) == 0 { + bodyFields = make(map[string]string) + } + bodyFields[name] = t + } else { + if len(queryFields) == 0 { + queryFields = make(map[string]string) + } + + queryFields[name] = t + } + } + + // create and add the HTTP rule to the list. + httpRule := HTTPRule{ + Endpoint: endpoint, + Params: params, + HasQuery: queryParamsCount > 0, + QueryFields: queryFields, + HasBody: bodyFieldsCount > 0, + BodyFields: bodyFields, + } + + httpRules = append(httpRules, httpRule) + + // search for nested HTTP rules. + if constant, ok := constant.OrderedMap.Get("additional_bindings"); ok { + httpRules = append(httpRules, b.constantToHTTPRules(requestMessage, *constant)...) + } + + return httpRules +} + +func (b builder) messageFieldsCount(message *proto.Message) (messageFields map[string]string, count int) { + messageFields = make(map[string]string) + + for _, el := range message.Elements { + switch el := el.(type) { + case *proto.NormalField: + count++ + if el.Repeated { + messageFields[el.Name] = fmt.Sprintf("repeated %s", el.Type) + } else { + messageFields[el.Name] = el.Type + } + case *proto.MapField: + count++ + messageFields[el.Name] = fmt.Sprintf("map<%s, %s>", el.KeyType, el.Type) + case *proto.OneOfField: + count++ + messageFields[el.Name] = el.Type + } + } + + return +} diff --git a/ignite/pkg/protoanalysis/cache.go b/ignite/pkg/protoanalysis/cache.go new file mode 100644 index 0000000..7d2a5a3 --- /dev/null +++ b/ignite/pkg/protoanalysis/cache.go @@ -0,0 +1,27 @@ +package protoanalysis + +import "sync" + +type Cache struct { + mu sync.RWMutex + pkgs map[string]Packages // proto dir path-proto packages pair. +} + +func NewCache() *Cache { + return &Cache{ + pkgs: make(map[string]Packages), + } +} + +func (c *Cache) Get(key string) (Packages, bool) { + c.mu.RLock() + pkgs, ok := c.pkgs[key] + c.mu.RUnlock() + return pkgs, ok +} + +func (c *Cache) Add(key string, value Packages) { + c.mu.Lock() + c.pkgs[key] = value + c.mu.Unlock() +} diff --git a/ignite/pkg/protoanalysis/package.go b/ignite/pkg/protoanalysis/package.go new file mode 100644 index 0000000..f3d3fae --- /dev/null +++ b/ignite/pkg/protoanalysis/package.go @@ -0,0 +1,244 @@ +package protoanalysis + +import ( + "regexp" + "strings" + + "golang.org/x/mod/semver" + + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +type ( + // Packages represents slice of Package. + Packages []Package + + PkgName string + + // Package represents a proto pkg. + Package struct { + // Name of the proto pkg. + Name string `json:"name,omitempty"` + + // Path of the package in the fs. + Path string `json:"path,omitempty"` + + // Files is a list of .proto files in the package. + Files Files `json:"files,omitempty"` + + // GoImportName is the go package name of proto package. + GoImportName string `json:"go_import_name,omitempty"` + + // Messages is a list of proto messages defined in the package. + Messages []Message `json:"messages,omitempty"` + + // Services is a list of RPC services. + Services []Service `json:"services,omitempty"` + } +) + +var regexBetaVersion = regexp.MustCompile("^v[0-9]+(beta|alpha)[0-9]+") + +// ErrMessageNotFound is returned when a proto message cannot be found in a package. +var ErrMessageNotFound = errors.New("no message found") + +// ModuleName retrieves the single module name of the package. +func (p Package) ModuleName() (name string) { + names := strings.Split(p.Name, ".") + for i := len(names) - 1; i >= 0; i-- { + name = names[i] + if !semver.IsValid(name) && !regexBetaVersion.MatchString(name) { + break + } + } + return +} + +// MessageByName finds a message by its name inside Package. +func (p Package) MessageByName(name string) (Message, error) { + message, ok := p.FindMessageByName(name) + if !ok { + return Message{}, ErrMessageNotFound + } + + return message, nil +} + +// FindMessageByName finds a message by its name inside Package. +// It accepts plain message names, current-package qualified names and nested message names. +func (p Package) FindMessageByName(name string) (Message, bool) { + for _, candidate := range candidateMessageNames(p.Name, name) { + for _, message := range p.Messages { + if message.Name == candidate { + return message, true + } + } + } + + if !strings.Contains(strings.TrimPrefix(name, "."), ".") { + leafName := leafMessageName(name) + + var leafMatches []Message + for _, message := range p.Messages { + if leafMessageName(message.Name) == leafName { + leafMatches = append(leafMatches, message) + } + } + + if len(leafMatches) == 1 { + return leafMatches[0], true + } + } + + return Message{}, false +} + +// GoImportPath retrieves the Go import path. +func (p Package) GoImportPath() string { + return strings.Split(p.GoImportName, ";")[0] +} + +// Files retrieves the files from the package list. +func (p Packages) Files() Files { + var files []File + for _, pkg := range p { + files = append(files, pkg.Files...) + } + return files +} + +type ( + Files []File + + File struct { + // Path of the file. + Path string `json:"path,omitempty"` + + // Dependencies is a list of imported proto packages. + Dependencies []string `json:"dependencies,omitempty"` + } +) + +func candidateMessageNames(pkgName, name string) []string { + candidates := []string{name} + + canonical := canonicalMessageName(pkgName, name) + if canonical != "" && canonical != name { + candidates = append(candidates, canonical) + } + + return candidates +} + +func canonicalMessageName(pkgName, name string) string { + name = strings.TrimPrefix(name, ".") + if pkgName != "" { + name = strings.TrimPrefix(name, pkgName+".") + } + + return strings.ReplaceAll(name, ".", "_") +} + +func leafMessageName(name string) string { + if index := strings.LastIndex(name, "_"); index >= 0 { + return name[index+1:] + } + + if index := strings.LastIndex(name, "."); index >= 0 { + return name[index+1:] + } + + return name +} + +// Paths retrieves the list of paths from the files. +func (f Files) Paths() []string { + var paths []string + for _, ff := range f { + paths = append(paths, ff.Path) + } + return paths +} + +type ( + // Message represents a proto message. + Message struct { + // Name of the message. + Name string `json:"name,omitempty"` + + // Path of the proto file where the message is defined. + Path string `json:"path,omitempty"` + + // HighestFieldNumber is the highest field number among fields of the message. + // This allows to determine new field number when writing to proto message. + HighestFieldNumber int `json:"highest_field_number,omitempty"` + + // Fields contains message's field names and types. + Fields map[string]string `json:"fields,omitempty"` + } + + // Service is an RPC service. + Service struct { + // Name of the services. + Name string `json:"name,omitempty"` + + // RPCFuncs is a list of RPC funcs of the service. + RPCFuncs []RPCFunc `json:"functions,omitempty"` + } + + // RPCFunc is an RPC func. + RPCFunc struct { + // Name of the RPC func. + Name string `json:"name,omitempty"` + + // RequestType is the request type of RPC func. + RequestType string `json:"request_type,omitempty"` + + // ReturnsType is the response type of RPC func. + ReturnsType string `json:"return_type,omitempty"` + + // HTTPRules keeps info about http rules of an RPC func. + // spec: + // https://github.com/googleapis/googleapis/blob/master/google/api/http.proto. + HTTPRules []HTTPRule `json:"http_rules,omitempty"` + } + + // HTTPRule keeps info about a configured http rule of an RPC func. + HTTPRule struct { + // Endpoint is the HTTP endpoint path pattern. + Endpoint string `json:"endpoint,omitempty"` + + // Params is a list of parameters defined in the HTTP endpoint itself. + Params []string `json:"params,omitempty"` + + // HasQuery indicates if there is a request query. + HasQuery bool `json:"has_query,omitempty"` + + // QueryFields is a list of query fields defined in the HTTP endpoint. + QueryFields map[string]string `json:"query_fields,omitempty"` + + // HasBody indicates if there is a request payload. + HasBody bool `json:"has_body,omitempty"` + + // BodyFields is a list of body fields defined in the HTTP endpoint. + BodyFields map[string]string `json:"body_fields,omitempty"` + } +) + +// IsPaginated checks if the HTTPRule is paginated based on its QueryFields. +func (hr HTTPRule) IsPaginated() bool { + if len(hr.QueryFields) == 0 { + return false + } + + for _, fieldType := range hr.QueryFields { + // Message field type suffix check to match common pagination types: + // cosmos.base.query.v1beta1.PageRequest + // cosmos.base.query.v1beta1.PageResponse + if strings.HasSuffix(fieldType, "PageRequest") || strings.HasSuffix(fieldType, "PageResponse") { + return true + } + } + + return false +} diff --git a/ignite/pkg/protoanalysis/package_test.go b/ignite/pkg/protoanalysis/package_test.go new file mode 100644 index 0000000..e896a29 --- /dev/null +++ b/ignite/pkg/protoanalysis/package_test.go @@ -0,0 +1,138 @@ +package protoanalysis + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestPackage_ModuleName(t *testing.T) { + tests := []struct { + name string + p Package + want string + }{ + { + name: "test single name", + p: Package{Name: "staking"}, + want: "staking", + }, + { + name: "test two names", + p: Package{Name: "cosmos.staking"}, + want: "staking", + }, + { + name: "test three name", + p: Package{Name: "cosmos.ignite.staking"}, + want: "staking", + }, + { + name: "test with the version 1", + p: Package{Name: "cosmos.staking.v1"}, + want: "staking", + }, + { + name: "test with the version 2", + p: Package{Name: "cosmos.staking.v2"}, + want: "staking", + }, + { + name: "test with the version 10", + p: Package{Name: "cosmos.staking.v10"}, + want: "staking", + }, + { + name: "test with the version 1 beta 1", + p: Package{Name: "cosmos.staking.v1beta1"}, + want: "staking", + }, + { + name: "test with the version 1 beta 2", + p: Package{Name: "cosmos.staking.v1beta2"}, + want: "staking", + }, + { + name: "test with the version 2 beta 1", + p: Package{Name: "cosmos.staking.v2beta1"}, + want: "staking", + }, + { + name: "test with the version 2 beta 2", + p: Package{Name: "cosmos.staking.v2beta2"}, + want: "staking", + }, + { + name: "test with the version 3 alpha 5", + p: Package{Name: "cosmos.staking.v3alpha5"}, + want: "staking", + }, + { + name: "test with the wrong version", + p: Package{Name: "cosmos.staking.v3bank5"}, + want: "v3bank5", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := tt.p.ModuleName() + require.Equal(t, tt.want, got) + }) + } +} + +func TestPackage_MessageByName(t *testing.T) { + pkg := Package{ + Name: "foo.bar", + Messages: []Message{ + {Name: "Request"}, + {Name: "Outer_Inner"}, + }, + } + + tests := []struct { + name string + messageName string + want string + wantErr error + }{ + { + name: "plain name", + messageName: "Request", + want: "Request", + }, + { + name: "qualified name", + messageName: ".foo.bar.Request", + want: "Request", + }, + { + name: "nested qualified name", + messageName: ".foo.bar.Outer.Inner", + want: "Outer_Inner", + }, + { + name: "nested leaf name", + messageName: "Inner", + want: "Outer_Inner", + }, + { + name: "missing name", + messageName: "Missing", + wantErr: ErrMessageNotFound, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + message, err := pkg.MessageByName(tt.messageName) + if tt.wantErr != nil { + require.ErrorIs(t, err, tt.wantErr) + return + } + + require.NoError(t, err) + require.Equal(t, tt.want, message.Name) + }) + } +} diff --git a/ignite/pkg/protoanalysis/parser.go b/ignite/pkg/protoanalysis/parser.go new file mode 100644 index 0000000..657948e --- /dev/null +++ b/ignite/pkg/protoanalysis/parser.go @@ -0,0 +1,142 @@ +package protoanalysis + +import ( + "context" + "os" + "path/filepath" + + "github.com/emicklei/proto" + + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/localfs" +) + +const optionGoPkg = "go_package" + +// parser parses proto packages. +type parser struct { + packages []*protoPackage +} + +// parse parses proto files in the fs that matches with pattern and returns +// the low level representations of proto packages. +func parse(ctx context.Context, path, pattern string) ([]*protoPackage, error) { + pr := &parser{} + + paths, err := localfs.Search(path, pattern) + if err != nil { + return nil, err + } + + for _, path := range paths { + if ctx.Err() != nil { + return nil, ctx.Err() + } + if err := pr.parseFile(path); err != nil { + return nil, errors.Wrapf(err, "file: %s", path) + } + } + + return pr.packages, nil +} + +// protoPackage represents a proto package. +type protoPackage struct { + // name of the proto package. + name string + + // directory of the proto package in the fs. + dir string + + // files is a list of proto files that construct a proto package. + files []file +} + +// file represents a parsed proto file. +type file struct { + // path of the proto file in the fs. + path string + + // parsed data. + pkg *proto.Package + imports []string // imported protos. + options []*proto.Option + messages []*proto.Message + services []*proto.Service +} + +func (p *protoPackage) options() (o []*proto.Option) { + for _, f := range p.files { + o = append(o, f.options...) + } + + return +} + +func (p *protoPackage) messages() (m []*proto.Message) { + for _, f := range p.files { + m = append(m, f.messages...) + } + + return +} + +func (p *protoPackage) services() (s []*proto.Service) { + for _, f := range p.files { + s = append(s, f.services...) + } + + return +} + +func (p *parser) parseFile(path string) error { + f, err := os.Open(path) + if err != nil { + return err + } + defer f.Close() + + def, err := proto.NewParser(f).Parse() + if err != nil { + return err + } + + var pkgName string + + proto.Walk( + def, + proto.WithPackage(func(p *proto.Package) { pkgName = p.Name }), + ) + + var pp *protoPackage + for _, v := range p.packages { + if pkgName == v.name { + pp = v + break + } + } + if pp == nil { + pp = &protoPackage{ + name: pkgName, + dir: filepath.Dir(path), + } + p.packages = append(p.packages, pp) + } + + pf := file{ + path: path, + } + + proto.Walk( + def, + proto.WithPackage(func(p *proto.Package) { pf.pkg = p }), + proto.WithImport(func(s *proto.Import) { pf.imports = append(pf.imports, s.Filename) }), + proto.WithOption(func(o *proto.Option) { pf.options = append(pf.options, o) }), + proto.WithMessage(func(m *proto.Message) { pf.messages = append(pf.messages, m) }), + proto.WithService(func(s *proto.Service) { pf.services = append(pf.services, s) }), + ) + + pp.files = append(pp.files, pf) + + return nil +} diff --git a/ignite/pkg/protoanalysis/protoanalysis.go b/ignite/pkg/protoanalysis/protoanalysis.go new file mode 100644 index 0000000..ddd3e12 --- /dev/null +++ b/ignite/pkg/protoanalysis/protoanalysis.go @@ -0,0 +1,106 @@ +// Package protoanalysis provides a toolset for analyzing proto files and packages. +package protoanalysis + +import ( + "context" + "fmt" + "slices" + "strings" + + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +// ErrImportNotFound returned when proto file import cannot be found. +var ErrImportNotFound = errors.New("proto import not found") + +const ( + protoFilePattern = "*.proto" + internalPath = "/internal" +) + +// Parse parses proto packages by finding them with given glob pattern. +func Parse(ctx context.Context, cache *Cache, path string) (Packages, error) { + if cache != nil { + if packages, ok := cache.Get(path); ok { + return packages, nil + } + } + + parsed, err := parse(ctx, path, protoFilePattern) + if err != nil { + return nil, err + } + + var packages Packages + + for _, pp := range parsed { + if strings.Contains(pp.dir, internalPath) { // skip internal protos (mainly testing protos, etc.) + continue + } + + packages = append(packages, build(*pp)) + } + + if cache != nil { + cache.Add(path, packages) + } + + return packages, nil +} + +// ParseFile parses a proto file at path. +func ParseFile(path string) (File, error) { + packages, err := Parse(context.Background(), nil, path) + if err != nil { + return File{}, err + } + files := packages.Files() + if len(files) != 1 { + return File{}, errors.New("path does not point to single file or it cannot be found") + } + return files[0], nil +} + +// HasMessages checks if the proto package under path contains messages with given names. +func HasMessages(ctx context.Context, path string, names ...string) error { + pkgs, err := Parse(ctx, NewCache(), path) + if err != nil { + return err + } + + hasName := func(name string) error { + for _, pkg := range pkgs { + for _, msg := range pkg.Messages { + if msg.Name == name { + return nil + } + } + } + return errors.Errorf("invalid proto message name %s", name) + } + + for _, name := range names { + if err := hasName(name); err != nil { + return err + } + } + return nil +} + +// IsImported checks if the proto package under path imports list of dependencies. +func IsImported(path string, dependencies ...string) error { + f, err := ParseFile(path) + if err != nil { + return err + } + + for _, wantDep := range dependencies { + found := slices.Contains(f.Dependencies, wantDep) + if !found { + return errors.Wrap(ErrImportNotFound, fmt.Sprintf( + "invalid proto dependency %s for file %s", wantDep, path), + ) + } + } + return nil +} diff --git a/ignite/pkg/protoanalysis/protoanalysis_test.go b/ignite/pkg/protoanalysis/protoanalysis_test.go new file mode 100644 index 0000000..6e618fe --- /dev/null +++ b/ignite/pkg/protoanalysis/protoanalysis_test.go @@ -0,0 +1,552 @@ +package protoanalysis + +import ( + "context" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestNestedMessages(t *testing.T) { + packages, err := Parse(context.Background(), nil, "testdata/nested_messages") + require.NoError(t, err) + + pkg := packages[0] + require.Equal(t, "A", pkg.Messages[0].Name) + require.Equal(t, "A_B", pkg.Messages[1].Name) + require.Equal(t, "A_B_C", pkg.Messages[2].Name) +} + +func TestQualifiedServiceTypes(t *testing.T) { + packages, err := Parse(context.Background(), nil, "testdata/qualified_service") + require.NoError(t, err) + + require.Len(t, packages, 1) + require.Len(t, packages[0].Services, 1) + require.Len(t, packages[0].Services[0].RPCFuncs, 2) + require.Equal(t, ".qualified_service.PingRequest", packages[0].Services[0].RPCFuncs[0].RequestType) + require.Equal(t, ".qualified_service.Outer.NestedRequest", packages[0].Services[0].RPCFuncs[1].RequestType) + + message, err := packages[0].MessageByName(".qualified_service.Outer.NestedRequest") + require.NoError(t, err) + require.Equal(t, "Outer_NestedRequest", message.Name) +} + +func TestLiquidity(t *testing.T) { + packages, err := Parse(context.Background(), nil, "testdata/liquidity") + require.NoError(t, err) + + expected := Packages{ + { + Name: "tendermint.liquidity", + Path: "testdata/liquidity", + Files: Files{ + { + Path: "testdata/liquidity/genesis.proto", + Dependencies: []string{"liquidity.proto", "gogoproto/gogo.proto"}, + }, + { + Path: "testdata/liquidity/liquidity.proto", + Dependencies: []string{"tx.proto", "gogoproto/gogo.proto", "cosmos_proto/coin.proto", "protoc-gen-openapiv2/options/annotations.proto"}, + }, + { + Path: "testdata/liquidity/msg.proto", + Dependencies: []string{"google/api/annotations.proto", "protoc-gen-openapiv2/options/annotations.proto", "tx.proto"}, + }, + { + Path: "testdata/liquidity/query.proto", + Dependencies: []string{"gogoproto/gogo.proto", "liquidity.proto", "google/api/annotations.proto", "cosmos_proto/pagination.proto", "protoc-gen-openapiv2/options/annotations.proto"}, + }, + { + Path: "testdata/liquidity/tx.proto", + Dependencies: []string{"gogoproto/gogo.proto", "cosmos_proto/coin.proto", "protoc-gen-openapiv2/options/annotations.proto"}, + }, + }, + GoImportName: "github.com/tendermint/liquidity/x/liquidity/types", + Messages: []Message{ + {Name: "PoolRecord", Path: "testdata/liquidity/genesis.proto", HighestFieldNumber: 6, Fields: map[string]string{ + "deposit_msg_states": "DepositMsgState", + "pool": "Pool", + "pool_batch": "PoolBatch", + "pool_metadata": "PoolMetadata", + "swap_msg_states": "SwapMsgState", + "withdraw_msg_states": "WithdrawMsgState", + }}, + {Name: "GenesisState", Path: "testdata/liquidity/genesis.proto", HighestFieldNumber: 2, Fields: map[string]string{ + "params": "Params", + "pool_records": "PoolRecord", + }}, + {Name: "PoolType", Path: "testdata/liquidity/liquidity.proto", HighestFieldNumber: 5, Fields: map[string]string{ + "description": "string", + "id": "uint32", + "max_reserve_coin_num": "uint32", + "min_reserve_coin_num": "uint32", + "name": "string", + }}, + {Name: "Params", Path: "testdata/liquidity/liquidity.proto", HighestFieldNumber: 9, Fields: map[string]string{ + "init_pool_coin_mint_amount": "string", + "max_order_amount_ratio": "bytes", + "max_reserve_coin_amount": "string", + "min_init_deposit_amount": "string", + "pool_creation_fee": "cosmos.base.v1beta1.Coin", + "pool_types": "PoolType", + "swap_fee_rate": "bytes", + "unit_batch_height": "uint32", + "withdraw_fee_rate": "bytes", + }}, + {Name: "Pool", Path: "testdata/liquidity/liquidity.proto", HighestFieldNumber: 5, Fields: map[string]string{ + "id": "uint64", + "pool_coin_denom": "string", + "reserve_account_address": "string", + "reserve_coin_denoms": "string", + "type_id": "uint32", + }}, + {Name: "PoolMetadata", Path: "testdata/liquidity/liquidity.proto", HighestFieldNumber: 3, Fields: map[string]string{ + "pool_coin_total_supply": "cosmos.base.v1beta1.Coin", + "pool_id": "uint64", + "reserve_coins": "cosmos.base.v1beta1.Coin", + }}, + {Name: "PoolMetadataResponse", Path: "testdata/liquidity/liquidity.proto", HighestFieldNumber: 2, Fields: map[string]string{ + "pool_coin_total_supply": "cosmos.base.v1beta1.Coin", + "reserve_coins": "cosmos.base.v1beta1.Coin", + }}, + {Name: "PoolBatch", Path: "testdata/liquidity/liquidity.proto", HighestFieldNumber: 7, Fields: map[string]string{ + "begin_height": "int64", + "deposit_msg_index": "uint64", + "executed": "bool", + "index": "uint64", + "pool_id": "uint64", + "swap_msg_index": "uint64", + "withdraw_msg_index": "uint64", + }}, + {Name: "PoolBatchResponse", Path: "testdata/liquidity/liquidity.proto", HighestFieldNumber: 6, Fields: map[string]string{ + "begin_height": "int64", + "deposit_msg_index": "uint64", + "executed": "bool", + "index": "uint64", + "swap_msg_index": "uint64", + "withdraw_msg_index": "uint64", + }}, + {Name: "DepositMsgState", Path: "testdata/liquidity/liquidity.proto", HighestFieldNumber: 6, Fields: map[string]string{ + "executed": "bool", + "msg": "MsgDepositWithinBatch", + "msg_height": "int64", + "msg_index": "uint64", + "succeeded": "bool", + "to_be_deleted": "bool", + }}, + {Name: "WithdrawMsgState", Path: "testdata/liquidity/liquidity.proto", HighestFieldNumber: 6, Fields: map[string]string{ + "executed": "bool", + "msg": "MsgWithdrawWithinBatch", + "msg_height": "int64", + "msg_index": "uint64", + "succeeded": "bool", + "to_be_deleted": "bool", + }}, + {Name: "SwapMsgState", Path: "testdata/liquidity/liquidity.proto", HighestFieldNumber: 10, Fields: map[string]string{ + "exchanged_offer_coin": "cosmos.base.v1beta1.Coin", + "executed": "bool", + "msg": "MsgSwapWithinBatch", + "msg_height": "int64", + "msg_index": "uint64", + "order_expiry_height": "int64", + "remaining_offer_coin": "cosmos.base.v1beta1.Coin", + "reserved_offer_coin_fee": "cosmos.base.v1beta1.Coin", + "succeeded": "bool", + "to_be_deleted": "bool", + }}, + {Name: "QueryLiquidityPoolRequest", Path: "testdata/liquidity/query.proto", HighestFieldNumber: 1, Fields: map[string]string{ + "pool_id": "uint64", + }}, + {Name: "QueryLiquidityPoolResponse", Path: "testdata/liquidity/query.proto", HighestFieldNumber: 1, Fields: map[string]string{ + "pool": "Pool", + }}, + {Name: "QueryLiquidityPoolBatchRequest", Path: "testdata/liquidity/query.proto", HighestFieldNumber: 1, Fields: map[string]string{ + "pool_id": "uint64", + }}, + {Name: "QueryLiquidityPoolBatchResponse", Path: "testdata/liquidity/query.proto", HighestFieldNumber: 1, Fields: map[string]string{ + "batch": "PoolBatch", + }}, + {Name: "QueryLiquidityPoolsRequest", Path: "testdata/liquidity/query.proto", HighestFieldNumber: 1, Fields: map[string]string{ + "pagination": "cosmos.base.query.v1beta1.PageRequest", + }}, + {Name: "QueryLiquidityPoolsResponse", Path: "testdata/liquidity/query.proto", HighestFieldNumber: 2, Fields: map[string]string{ + "pagination": "cosmos.base.query.v1beta1.PageResponse", + "pools": "Pool", + }}, + {Name: "QueryParamsRequest", Path: "testdata/liquidity/query.proto", HighestFieldNumber: 0, Fields: map[string]string{}}, + {Name: "QueryParamsResponse", Path: "testdata/liquidity/query.proto", HighestFieldNumber: 1, Fields: map[string]string{ + "params": "Params", + }}, + {Name: "QueryPoolBatchSwapMsgsRequest", Path: "testdata/liquidity/query.proto", HighestFieldNumber: 2, Fields: map[string]string{ + "pagination": "cosmos.base.query.v1beta1.PageRequest", + "pool_id": "uint64", + }}, + {Name: "QueryPoolBatchSwapMsgRequest", Path: "testdata/liquidity/query.proto", HighestFieldNumber: 2, Fields: map[string]string{ + "msg_index": "uint64", + "pool_id": "uint64", + }}, + {Name: "QueryPoolBatchSwapMsgsResponse", Path: "testdata/liquidity/query.proto", HighestFieldNumber: 2, Fields: map[string]string{ + "pagination": "cosmos.base.query.v1beta1.PageResponse", + "swaps": "SwapMsgState", + }}, + {Name: "QueryPoolBatchSwapMsgResponse", Path: "testdata/liquidity/query.proto", HighestFieldNumber: 1, Fields: map[string]string{ + "swap": "SwapMsgState", + }}, + {Name: "QueryPoolBatchDepositMsgsRequest", Path: "testdata/liquidity/query.proto", HighestFieldNumber: 2, Fields: map[string]string{ + "pagination": "cosmos.base.query.v1beta1.PageRequest", + "pool_id": "uint64", + }}, + {Name: "QueryPoolBatchDepositMsgRequest", Path: "testdata/liquidity/query.proto", HighestFieldNumber: 2, Fields: map[string]string{ + "msg_index": "uint64", + "pool_id": "uint64", + }}, + {Name: "QueryPoolBatchDepositMsgsResponse", Path: "testdata/liquidity/query.proto", HighestFieldNumber: 2, Fields: map[string]string{ + "deposits": "DepositMsgState", + "pagination": "cosmos.base.query.v1beta1.PageResponse", + }}, + {Name: "QueryPoolBatchDepositMsgResponse", Path: "testdata/liquidity/query.proto", HighestFieldNumber: 1, Fields: map[string]string{ + "deposit": "DepositMsgState", + }}, + {Name: "QueryPoolBatchWithdrawMsgsRequest", Path: "testdata/liquidity/query.proto", HighestFieldNumber: 2, Fields: map[string]string{ + "pagination": "cosmos.base.query.v1beta1.PageRequest", + "pool_id": "uint64", + }}, + {Name: "QueryPoolBatchWithdrawMsgRequest", Path: "testdata/liquidity/query.proto", HighestFieldNumber: 2, Fields: map[string]string{ + "msg_index": "uint64", + "pool_id": "uint64", + }}, + {Name: "QueryPoolBatchWithdrawMsgsResponse", Path: "testdata/liquidity/query.proto", HighestFieldNumber: 2, Fields: map[string]string{ + "pagination": "cosmos.base.query.v1beta1.PageResponse", + "withdraws": "WithdrawMsgState", + }}, + {Name: "QueryPoolBatchWithdrawMsgResponse", Path: "testdata/liquidity/query.proto", HighestFieldNumber: 1, Fields: map[string]string{ + "withdraw": "WithdrawMsgState", + }}, + {Name: "MsgCreatePool", Path: "testdata/liquidity/tx.proto", HighestFieldNumber: 4, Fields: map[string]string{ + "deposit_coins": "cosmos.base.v1beta1.Coin", + "pool_creator_address": "string", + "pool_type_id": "uint32", + }}, + {Name: "MsgCreatePoolRequest", Path: "testdata/liquidity/tx.proto", HighestFieldNumber: 2, Fields: map[string]string{ + "base_req": "BaseReq", + "msg": "MsgCreatePool", + }}, + {Name: "MsgCreatePoolResponse", Path: "testdata/liquidity/tx.proto", HighestFieldNumber: 1, Fields: map[string]string{ + "std_tx": "StdTx", + }}, + {Name: "MsgDepositWithinBatch", Path: "testdata/liquidity/tx.proto", HighestFieldNumber: 3, Fields: map[string]string{ + "deposit_coins": "cosmos.base.v1beta1.Coin", + "depositor_address": "string", + "pool_id": "uint64", + }}, + {Name: "MsgDepositWithinBatchRequest", Path: "testdata/liquidity/tx.proto", HighestFieldNumber: 3, Fields: map[string]string{ + "base_req": "BaseReq", + "msg": "MsgDepositWithinBatch", + "pool_id": "uint64", + }}, + {Name: "MsgDepositWithinBatchResponse", Path: "testdata/liquidity/tx.proto", HighestFieldNumber: 1, Fields: map[string]string{ + "std_tx": "StdTx", + }}, + {Name: "MsgWithdrawWithinBatch", Path: "testdata/liquidity/tx.proto", HighestFieldNumber: 3, Fields: map[string]string{ + "pool_coin": "cosmos.base.v1beta1.Coin", + "pool_id": "uint64", + "withdrawer_address": "string", + }}, + {Name: "MsgWithdrawWithinBatchRequest", Path: "testdata/liquidity/tx.proto", HighestFieldNumber: 3, Fields: map[string]string{ + "base_req": "BaseReq", + "msg": "MsgWithdrawWithinBatch", + "pool_id": "uint64", + }}, + {Name: "MsgWithdrawWithinBatchResponse", Path: "testdata/liquidity/tx.proto", HighestFieldNumber: 1, Fields: map[string]string{ + "std_tx": "StdTx", + }}, + {Name: "MsgSwapWithinBatch", Path: "testdata/liquidity/tx.proto", HighestFieldNumber: 7, Fields: map[string]string{ + "demand_coin_denom": "string", + "offer_coin": "cosmos.base.v1beta1.Coin", + "offer_coin_fee": "cosmos.base.v1beta1.Coin", + "order_price": "bytes", + "pool_id": "uint64", + "swap_requester_address": "string", + "swap_type_id": "uint32", + }}, + {Name: "MsgSwapWithinBatchRequest", Path: "testdata/liquidity/tx.proto", HighestFieldNumber: 3, Fields: map[string]string{ + "base_req": "BaseReq", + "msg": "MsgSwapWithinBatch", + "pool_id": "uint64", + }}, + {Name: "MsgSwapWithinBatchResponse", Path: "testdata/liquidity/tx.proto", HighestFieldNumber: 1, Fields: map[string]string{ + "std_tx": "StdTx", + }}, + {Name: "BaseReq", Path: "testdata/liquidity/tx.proto", HighestFieldNumber: 11, Fields: map[string]string{ + "account_number": "uint64", + "chain_id": "string", + "fees": "cosmos.base.v1beta1.Coin", + "from": "string", + "gas": "uint64", + "gas_adjustment": "string", + "gas_prices": "cosmos.base.v1beta1.DecCoin", + "memo": "string", + "sequence": "uint64", + "simulate": "bool", + "timeout_height": "uint64", + }}, + {Name: "Fee", Path: "testdata/liquidity/tx.proto", HighestFieldNumber: 2, Fields: map[string]string{ + "amount": "cosmos.base.v1beta1.Coin", + "gas": "uint64", + }}, + {Name: "PubKey", Path: "testdata/liquidity/tx.proto", HighestFieldNumber: 2, Fields: map[string]string{ + "type": "string", + "value": "string", + }}, + {Name: "Signature", Path: "testdata/liquidity/tx.proto", HighestFieldNumber: 4, Fields: map[string]string{ + "account_number": "uint64", + "pub_key": "PubKey", + "sequence": "uint64", + "signature": "string", + }}, + {Name: "StdTx", Path: "testdata/liquidity/tx.proto", HighestFieldNumber: 4, Fields: map[string]string{ + "fee": "Fee", + "memo": "string", + "msg": "string", + "signature": "Signature", + }}, + }, + Services: []Service{ + { + Name: "MsgApi", + RPCFuncs: []RPCFunc{ + { + Name: "CreatePoolApi", + RequestType: "MsgCreatePoolRequest", + ReturnsType: "MsgCreatePoolResponse", + HTTPRules: []HTTPRule{ + { + Endpoint: "/liquidity/pools/{test}", + Params: []string{"test"}, + HasBody: true, + BodyFields: map[string]string{ + "base_req": "BaseReq", + "msg": "MsgCreatePool", + }, + }, + }, + }, + { + Name: "DepositWithinBatchApi", + RequestType: "MsgDepositWithinBatchRequest", + ReturnsType: "MsgDepositWithinBatchResponse", + HTTPRules: []HTTPRule{ + { + Endpoint: "/liquidity/pools/{pool_id}/batch/deposits", + Params: []string{"pool_id"}, + HasBody: true, + BodyFields: map[string]string{ + "base_req": "BaseReq", + "msg": "MsgDepositWithinBatch", + }, + }, + }, + }, + { + Name: "WithdrawWithinBatchApi", + RequestType: "MsgWithdrawWithinBatchRequest", + ReturnsType: "MsgWithdrawWithinBatchResponse", + HTTPRules: []HTTPRule{ + { + Endpoint: "/liquidity/pools/{pool_id}/batch/withdraws", + Params: []string{"pool_id"}, + HasBody: true, + BodyFields: map[string]string{ + "base_req": "BaseReq", + "msg": "MsgWithdrawWithinBatch", + }, + }, + }, + }, + { + Name: "SwapApi", + RequestType: "MsgSwapWithinBatchRequest", + ReturnsType: "MsgSwapWithinBatchResponse", + HTTPRules: []HTTPRule{ + { + Endpoint: "/liquidity/pools/{pool_id}/batch/swaps", + Params: []string{"pool_id"}, + HasQuery: true, // NOTE: this should never happen in this case, but this was done to test protoanalysis detection in SwapApi proto definition. + HasBody: true, + BodyFields: map[string]string{ + "base_req": "BaseReq", + "msg": "MsgSwapWithinBatch", + }, + }, + }, + }, + }, + }, + { + Name: "Query", + RPCFuncs: []RPCFunc{ + { + Name: "LiquidityPools", + RequestType: "QueryLiquidityPoolsRequest", + ReturnsType: "QueryLiquidityPoolsResponse", + HTTPRules: []HTTPRule{ + { + Endpoint: "/liquidity/pools", + HasQuery: true, + QueryFields: map[string]string{ + "pagination": "cosmos.base.query.v1beta1.PageRequest", + }, + }, + }, + }, + { + Name: "LiquidityPool", + RequestType: "QueryLiquidityPoolRequest", + ReturnsType: "QueryLiquidityPoolResponse", + HTTPRules: []HTTPRule{ + { + Endpoint: "/liquidity/pools/{pool_id}", + Params: []string{"pool_id"}, + }, + }, + }, + { + Name: "LiquidityPoolBatch", + RequestType: "QueryLiquidityPoolBatchRequest", + ReturnsType: "QueryLiquidityPoolBatchResponse", + HTTPRules: []HTTPRule{ + { + Endpoint: "/liquidity/pools/{pool_id}/batch", + Params: []string{"pool_id"}, + }, + }, + }, + { + Name: "PoolBatchSwapMsgs", + RequestType: "QueryPoolBatchSwapMsgsRequest", + ReturnsType: "QueryPoolBatchSwapMsgsResponse", + HTTPRules: []HTTPRule{ + { + Endpoint: "/liquidity/pools/{pool_id}/batch/swaps", + Params: []string{"pool_id"}, + HasQuery: true, + QueryFields: map[string]string{ + "pagination": "cosmos.base.query.v1beta1.PageRequest", + }, + }, + }, + }, + { + Name: "PoolBatchSwapMsg", + RequestType: "QueryPoolBatchSwapMsgRequest", + ReturnsType: "QueryPoolBatchSwapMsgResponse", + HTTPRules: []HTTPRule{ + { + Endpoint: "/liquidity/pools/{pool_id}/batch/swaps/{msg_index}", + Params: []string{"pool_id", "msg_index"}, + }, + }, + }, + { + Name: "PoolBatchDepositMsgs", + RequestType: "QueryPoolBatchDepositMsgsRequest", + ReturnsType: "QueryPoolBatchDepositMsgsResponse", + HTTPRules: []HTTPRule{ + { + Endpoint: "/liquidity/pools/{pool_id}/batch/deposits", + Params: []string{"pool_id"}, + HasQuery: true, + QueryFields: map[string]string{ + "pagination": "cosmos.base.query.v1beta1.PageRequest", + }, + }, + }, + }, + { + Name: "PoolBatchDepositMsg", + RequestType: "QueryPoolBatchDepositMsgRequest", + ReturnsType: "QueryPoolBatchDepositMsgResponse", + HTTPRules: []HTTPRule{ + { + Endpoint: "/liquidity/pools/{pool_id}/batch/deposits/{msg_index}", + Params: []string{"pool_id", "msg_index"}, + }, + }, + }, + { + Name: "PoolBatchWithdrawMsgs", + RequestType: "QueryPoolBatchWithdrawMsgsRequest", + ReturnsType: "QueryPoolBatchWithdrawMsgsResponse", + HTTPRules: []HTTPRule{ + { + Endpoint: "/liquidity/pools/{pool_id}/batch/withdraws", + Params: []string{"pool_id"}, + HasQuery: true, + QueryFields: map[string]string{ + "pagination": "cosmos.base.query.v1beta1.PageRequest", + }, + }, + }, + }, + { + Name: "PoolBatchWithdrawMsg", + RequestType: "QueryPoolBatchWithdrawMsgRequest", + ReturnsType: "QueryPoolBatchWithdrawMsgResponse", + HTTPRules: []HTTPRule{ + { + Endpoint: "/liquidity/pools/{pool_id}/batch/withdraws/{msg_index}", + Params: []string{"pool_id", "msg_index"}, + }, + }, + }, + { + Name: "Params", + RequestType: "QueryParamsRequest", + ReturnsType: "QueryParamsResponse", + HTTPRules: []HTTPRule{ + { + Endpoint: "/liquidity/params", + }, + }, + }, + }, + }, + { + Name: "Msg", + RPCFuncs: []RPCFunc{ + { + Name: "CreatePool", + RequestType: "MsgCreatePool", + ReturnsType: "MsgCreatePoolResponse", + }, + { + Name: "DepositWithinBatch", + RequestType: "MsgDepositWithinBatch", + ReturnsType: "MsgDepositWithinBatchResponse", + }, + { + Name: "WithdrawWithinBatch", + RequestType: "MsgWithdrawWithinBatch", + ReturnsType: "MsgWithdrawWithinBatchResponse", + }, + { + Name: "Swap", + RequestType: "MsgSwapWithinBatch", + ReturnsType: "MsgSwapWithinBatchResponse", + }, + }, + }, + }, + }, + } + + require.Equal(t, expected, packages) +} + +func TestSkipInternalPath(t *testing.T) { + packages, err := Parse(context.Background(), nil, "testdata/internal") + require.NoError(t, err) + + require.Len(t, packages, 0) +} diff --git a/ignite/pkg/protoanalysis/protoutil/creator.go b/ignite/pkg/protoanalysis/protoutil/creator.go new file mode 100644 index 0000000..7678dc0 --- /dev/null +++ b/ignite/pkg/protoanalysis/protoutil/creator.go @@ -0,0 +1,670 @@ +// Package protoutil wraps proto structs to allow easier creation, protobuf lang is small enough +// to easily allow this. +package protoutil + +import ( + "fmt" + "strconv" + + "github.com/emicklei/proto" +) + +// TODO: Can also support comments/inline comments? -- Probably, formatting is currently +// flaky with how it prints them, though. + +// Values for the kind of import. +const ( + KindWeak = "weak" + KindPublic = "public" +) + +// NewLiteral creates a new Literal: +// +// // true +// l := NewLiteral("true") +// +// // 1 +// l := NewLiteral("1") +// +// // "foo" +// l := NewLiteral("foo") +// +// Currently doesn't support creating compound literals (arrays/maps). +func NewLiteral(lit string) *proto.Literal { + return &proto.Literal{ + Source: lit, + IsString: isString(lit), + } +} + +// ImportSpec holds information relevant to the import statement. +type ImportSpec struct { + path string + kind string +} + +// ImportSpecOptions is a type alias for a callable accepting an ImportSpec. +type ImportSpecOptions func(i *ImportSpec) + +// Weak allows you to set the kind of the import statement to 'weak'. +func Weak() ImportSpecOptions { + return func(i *ImportSpec) { + i.kind = KindWeak + } +} + +// Public allows you to set the kind of the import statement to 'public'. +func Public() ImportSpecOptions { + return func(i *ImportSpec) { + i.kind = KindPublic + } +} + +// NewImport creates a new import statement node: +// +// // import "myproto.proto"; +// imp := NewImport("myproto.proto") +// +// By default, no kind is assigned to it, by using Weak or Public, this can be specified: +// +// // import weak "myproto.proto"; +// imp := NewImport("myproto.proto", Weak()) +func NewImport(path string, opts ...ImportSpecOptions) *proto.Import { + i := ImportSpec{path: path} + for _, opt := range opts { + opt(&i) + } + + return &proto.Import{ + Filename: i.path, + Kind: i.kind, + } +} + +// NewPackage creates a new package statement node: +// +// // package foo.bar; +// pkg := NewPackage("foo.bar") +func NewPackage(path string) *proto.Package { + return &proto.Package{ + Name: path, + } +} + +// OptionSpec holds information relevant to the option statement. +type OptionSpec struct { + name string + setter string + constant string + custom bool +} + +// OptionSpecOptions is a function that accepts an OptionSpec. +type OptionSpecOptions func(o *OptionSpec) + +// Custom denotes the option as being a custom option. +func Custom() OptionSpecOptions { + return func(f *OptionSpec) { + f.custom = true + } +} + +// SetField allows setting specific fields for a given option +// that denotes a type with fields. +// +// // option (my_opt).field = "Value"; +// opt := NewOption("my_opt", "Value", Custom(), Setter("field")) +func SetField(name string) OptionSpecOptions { + return func(f *OptionSpec) { + f.setter = name + } +} + +// NewOption creates a new option statement node: +// +// // option foo = 1; +// opt := NewOption("foo", "1") +// +// Custom options can be marked as such by using Custom, this wraps the option name +// in parenthesis: +// +// // option (foo) = 1; +// opt := NewOption("foo", "1", Custom()) +// +// Since option constants can accept a number of types, strings that require quotation +// should be passed as raw strings: +// +// // option foo = "bar"; +// opt := NewOption("foo", `bar`) +func NewOption(name, constant string, opts ...OptionSpecOptions) *proto.Option { + o := OptionSpec{name: name, constant: constant} + for _, opt := range opts { + opt(&o) + } + if o.custom { + o.name = fmt.Sprintf("(%s)", o.name) + } + // add the field we are setting outside the parentheses. + if o.setter != "" { + o.name = fmt.Sprintf("%s.%s", o.name, o.setter) + } + return &proto.Option{ + Name: o.name, + Constant: *NewLiteral(o.constant), + } +} + +/// Service + PRC + +// RPCSpec holds information relevant to the rpc statement. +type RPCSpec struct { + name, inputType, outputType string + streamsReq, streamsResp bool + options []*proto.Option +} + +// RPCSpecOptions is a type alias for a callable accepting an RPCSpec. +type RPCSpecOptions func(i *RPCSpec) + +// StreamRequest marks request as streaming. +func StreamRequest() RPCSpecOptions { + return func(r *RPCSpec) { + r.streamsReq = true + } +} + +// StreamResponse marks response as streaming. +func StreamResponse() RPCSpecOptions { + return func(r *RPCSpec) { + r.streamsResp = true + } +} + +// WithRPCOptions adds options to the RPC. +func WithRPCOptions(option ...*proto.Option) RPCSpecOptions { + return func(o *RPCSpec) { + o.options = append(o.options, option...) + } +} + +// NewRPC creates a new RPC statement node: +// +// // rpc Foo(Bar) returns(Bar) {} +// rpc := NewRPC("Foo", "Bar", "Bar") +// +// No options are attached by default, use WithRPCOptions to add options as required: +// +// // rpc Foo(Bar) returns(Bar) { +// // option (foo) = 1; +// // } +// rpc := NewRPC("Foo", "Bar", "Bar", WithRPCOptions(NewOption("foo", "1"))) +func NewRPC(name, inputType, outputType string, opts ...RPCSpecOptions) *proto.RPC { + r := RPCSpec{name: name, inputType: inputType, outputType: outputType} + for _, opt := range opts { + opt(&r) + } + + rpc := &proto.RPC{ + Name: r.name, + Comment: defaultComment(r.name, "RPC"), + RequestType: r.inputType, + ReturnsType: r.outputType, + StreamsRequest: r.streamsReq, + StreamsReturns: r.streamsResp, + } + if len(r.options) > 0 { + for _, opt := range r.options { + rpc.Elements = append(rpc.Elements, opt) + } + } + return rpc +} + +// ServiceSpec holds information relevant to the service statement. +type ServiceSpec struct { + name string + rpcs []*proto.RPC + opts []*proto.Option +} + +// ServiceSpecOptions is a type alias for a callable accepting a ServiceSpec. +type ServiceSpecOptions func(i *ServiceSpec) + +// WithRPCs adds rpcs to the service. +func WithRPCs(rpcs ...*proto.RPC) ServiceSpecOptions { + return func(s *ServiceSpec) { + s.rpcs = append(s.rpcs, rpcs...) + } +} + +// WithServiceOptions adds options to the service. +func WithServiceOptions(options ...*proto.Option) ServiceSpecOptions { + return func(s *ServiceSpec) { + s.opts = append(s.opts, options...) + } +} + +// NewService creates a new service statement node: +// +// // service Foo {} +// service := NewService("Foo") +// +// No rpcs/options are attached by default, use WithRPCs and +// WithServiceOptions to add them as required: +// +// // service Foo { +// // option (foo) = 1; +// // rpc Bar(Bar) returns (Bar) {} +// // } +// opt := NewOption("foo", "1") +// rpc := NewRPC("Bar", "Bar", "Bar") +// service := NewService("Foo", WithServiceOptions(opt), WithRPCs(rpc)) +// +// By default, options are added first and then the rpcs. +func NewService(name string, opts ...ServiceSpecOptions) *proto.Service { + s := ServiceSpec{name: name} + for _, opt := range opts { + opt(&s) + } + service := &proto.Service{ + Name: s.name, + Comment: defaultComment(s.name, "service"), + } + for _, opt := range s.opts { + service.Elements = append(service.Elements, opt) + } + for _, rpc := range s.rpcs { + service.Elements = append(service.Elements, rpc) + } + return service +} + +/// Message + NormalField + +// FieldSpec holds information relevant to the field statement. +type FieldSpec struct { + name, typename string + sequence int + repeated, optional, required bool + options []*proto.Option +} + +// FieldSpecOptions is a type alias for a callable accepting a FieldSpec. +type FieldSpecOptions func(f *FieldSpec) + +// Repeated marks the field as repeated. +func Repeated() FieldSpecOptions { + return func(f *FieldSpec) { + f.repeated = true + } +} + +// Optional marks the field as optional. +func Optional() FieldSpecOptions { + return func(f *FieldSpec) { + f.optional = true + } +} + +// Required marks the field as required. +func Required() FieldSpecOptions { + return func(f *FieldSpec) { + f.required = true + } +} + +// WithFieldOptions adds options to the field. +func WithFieldOptions(options ...*proto.Option) FieldSpecOptions { + return func(f *FieldSpec) { + f.options = append(f.options, options...) + } +} + +// NewField creates a new field statement node: +// +// // int64 Foo = 1; +// field := NewField("Foo", "int64", 1) +// +// Fields aren't marked as repeated, required or optional. Use Repeated, Optional +// and Required to mark the field as such. +// +// // repeated int64 Foo = 1; +// field := NewField("Foo", "int64", 1, Repeated()) +func NewField(name, typename string, sequence int, opts ...FieldSpecOptions) *proto.NormalField { + f := FieldSpec{name: name, typename: typename, sequence: sequence} + for _, opt := range opts { + opt(&f) + } + + // Check qualifiers? Though protoc will shout if we do stupid things. + field := &proto.NormalField{ + Field: &proto.Field{ + Name: f.name, + Sequence: f.sequence, + Type: f.typename, + Options: []*proto.Option{}, + }, + Repeated: f.repeated, + Required: f.required, + Optional: f.optional, + } + if len(f.options) > 0 { + field.Options = append(field.Options, f.options...) + } + return field +} + +// MessageSpec holds information relevant to the message statement. +type MessageSpec struct { + name string + fields []*proto.NormalField + enums []*proto.Enum + options []*proto.Option + isExtend bool +} + +// MessageSpecOptions is a type alias for a callable accepting a MessageSpec. +type MessageSpecOptions func(i *MessageSpec) + +// WithMessageOptions adds options to the message. +func WithMessageOptions(options ...*proto.Option) MessageSpecOptions { + return func(m *MessageSpec) { + m.options = append(m.options, options...) + } +} + +// WithFields adds fields to the message. +func WithFields(fields ...*proto.NormalField) MessageSpecOptions { + return func(m *MessageSpec) { + m.fields = append(m.fields, fields...) + } +} + +// WithEnums adds enums to the message. +func WithEnums(enum ...*proto.Enum) MessageSpecOptions { + return func(m *MessageSpec) { + m.enums = append(m.enums, enum...) + } +} + +func Extend() MessageSpecOptions { + return func(m *MessageSpec) { + m.isExtend = true + } +} + +// NewMessage creates a new message statement node: +// +// // message Foo {} +// message := NewMessage("Foo") +// +// No fields/enums/options are attached by default, use WithMessageFields, WithEnums, +// and WithMessageOptions to add them as required: +// +// // message Foo { +// // option (foo) = 1; +// // int64 Bar = 1; +// // } +// opt := NewOption("foo", "1") +// field := NewField("int64", "Bar", 1) +// message := NewMessage("Foo", WithMessageOptions(opt), WithFields(field)) +// +// By default, options are added first, then fields and then enums. +func NewMessage(name string, opts ...MessageSpecOptions) *proto.Message { + m := MessageSpec{name: name} + for _, opt := range opts { + opt(&m) + } + message := &proto.Message{ + Name: m.name, + Comment: defaultComment(name, "message"), + IsExtend: m.isExtend, + } + for _, opt := range m.options { + message.Elements = append(message.Elements, opt) + } + + // Verify that fields have unique sequence? Though, again, protoc will shout if + // it isn't the case. + for _, field := range m.fields { + message.Elements = append(message.Elements, field) + } + for _, enum := range m.enums { + message.Elements = append(message.Elements, enum) + } + return message +} + +// EnumFieldSpec holds information relevant to the enum field statement. +type EnumFieldSpec struct { + name string + value int + options []*proto.Option +} + +// EnumFieldSpecOptions is a type alias for a callable accepting an EnumFieldSpec. +type EnumFieldSpecOptions func(f *EnumFieldSpec) + +// WithEnumFieldOptions adds options to the enum field. +func WithEnumFieldOptions(options ...*proto.Option) EnumFieldSpecOptions { + return func(f *EnumFieldSpec) { + f.options = append(f.options, options...) + } +} + +// NewEnumField creates a new enum field statement node: +// +// // BAR = 1; +// field := NewEnumField("BAR", 1) +// +// No options are attached by default, use WithEnumFieldOptions to add them as +// required: +// +// // BAR = 1 [option (foo) = 1]; +// field := NewEnumField("BAR", 1, WithEnumFieldOptions(NewOption("foo", "1"))) +func NewEnumField(name string, value int, opts ...EnumFieldSpecOptions) *proto.EnumField { + f := EnumFieldSpec{name: name, value: value} + for _, opt := range opts { + opt(&f) + } + + field := &proto.EnumField{ + Name: f.name, + Integer: f.value, + } + for _, opt := range f.options { + field.Elements = append(field.Elements, opt) + } + return field +} + +// EnumSpec holds information relevant to the enum statement. +type EnumSpec struct { + name string + fields []*proto.EnumField + options []*proto.Option +} + +// EnumSpecOpts is a type alias for a callable accepting an EnumSpec. +type EnumSpecOpts func(i *EnumSpec) + +// WithEnumOptions adds options to the enum. +func WithEnumOptions(options ...*proto.Option) EnumSpecOpts { + return func(e *EnumSpec) { + e.options = append(e.options, options...) + } +} + +// WithEnumFields adds fields to the enum. +func WithEnumFields(fields ...*proto.EnumField) EnumSpecOpts { + return func(e *EnumSpec) { + e.fields = append(e.fields, fields...) + } +} + +// NewEnum creates a new enum statement node: +// +// // enum Foo { +// // BAR = 1; +// // } +// enum := NewEnum("Foo", WithEnumFields(NewEnumField("BAR", 1))) +// +// No options are attached by default, use WithEnumOptions to add them as +// required: +// +// // enum Foo { +// // BAR = 1 [option (foo) = 1]; +// // } +// enum := NewEnum("Foo", WithEnumOptions(NewOption("foo", "1")), WithEnumFields(NewEnumField("BAR", 1))) +// +// By default, options are added first, then fields. +func NewEnum(name string, opts ...EnumSpecOpts) *proto.Enum { + e := EnumSpec{name: name} + for _, opt := range opts { + opt(&e) + } + enum := &proto.Enum{ + Name: e.name, + Comment: defaultComment(name, "enum"), + } + for _, opt := range e.options { + enum.Elements = append(enum.Elements, opt) + } + for _, field := range e.fields { + enum.Elements = append(enum.Elements, field) + } + return enum +} + +// OneofFieldSpec holds information relevant to the oneof field statement. +type OneofFieldSpec struct { + name, typename string + sequence int + options []*proto.Option +} + +// OneofFieldOptions is a type alias for a callable accepting a OneOfField. +type OneofFieldOptions func(f *OneofFieldSpec) + +// WithOneofFieldOptions adds options to the oneof field. +func WithOneofFieldOptions(options ...*proto.Option) OneofFieldOptions { + return func(f *OneofFieldSpec) { + f.options = append(f.options, options...) + } +} + +// NewOneofField creates a new oneof field statement node: +// +// // Needs to placed in oneof block. +// // int32 Foo = 1; +// field := NewOneofField("Foo", "int32", 1) +// +// Additional options can be created and attached to the field to the field via +// WithOneOfFieldOptions: +// +// // int32 Foo = 1 [option (foo) = 1]; +// field := NewOneofField("Foo", "int32", 1, WithOneOfFieldOptions(NewOption("foo", "1"))) +func NewOneofField(name, typename string, sequence int, opts ...OneofFieldOptions) *proto.OneOfField { + f := OneofFieldSpec{name: name, typename: typename, sequence: sequence} + for _, opt := range opts { + opt(&f) + } + field := &proto.OneOfField{ + Field: &proto.Field{ + Name: f.name, + Sequence: f.sequence, + Type: f.typename, + Options: []*proto.Option{}, + }, + } + field.Options = append(field.Options, f.options...) + return field +} + +// OneofSpec holds information relevant to the enum statement. +type OneofSpec struct { + name string + options []*proto.Option + fields []*proto.OneOfField +} + +// OneofSpecOptions is a type alias for a callable accepting a OneOfSpec. +type OneofSpecOptions func(o *OneofSpec) + +// WithOneofOptions adds options to the oneof. +func WithOneofOptions(options ...*proto.Option) OneofSpecOptions { + return func(o *OneofSpec) { + o.options = append(o.options, options...) + } +} + +// WithOneofFields adds fields to the oneof. +func WithOneofFields(fields ...*proto.OneOfField) OneofSpecOptions { + return func(o *OneofSpec) { + o.fields = append(o.fields, fields...) + } +} + +// NewOneof creates a new oneof statement node: +// +// // oneof Foo { +// // int32 Foo = 1; +// // } +// oneof := NewOneof("Foo", WithOneOfFields(NewOneOfField("Foo", "int32", 1))) +// +// No options are attached by default, use WithOneOfOptions to add them as required. +func NewOneof(name string, opts ...OneofSpecOptions) *proto.Oneof { + o := OneofSpec{name: name} + for _, opt := range opts { + opt(&o) + } + oneof := &proto.Oneof{ + Name: o.name, + } + for _, opt := range o.options { + oneof.Elements = append(oneof.Elements, opt) + } + for _, field := range o.fields { + oneof.Elements = append(oneof.Elements, field) + } + return oneof +} + +// AttachComment attaches a comment top level nodes. Currently only supports Messages, RPC's +// and Services. Silently ignores other nodes though they can easily be added by just appending +// a new case to the switch statement. +func AttachComment(n proto.Visitee, comment string) { + c := &proto.Comment{ + // Attach a starting space here, i.e // text and not //text + Lines: []string{" " + comment}, + } + switch n := n.(type) { + case *proto.Message: + n.Comment = c + case *proto.RPC: + n.Comment = c + case *proto.Service: + n.Comment = c + } +} + +// Check if s is a string, exclude special cases of "false" and "true". +func isString(s string) bool { + if s == "true" || s == "false" { + return false + } + if _, err := strconv.ParseFloat(s, 64); err == nil { + return false + } + return true +} + +// defaultComment creates a new default proto comment with name and type. +func defaultComment(name, protoType string) *proto.Comment { + return newComment(fmt.Sprintf(" %[1]v defines the %[1]v %[2]v.", name, protoType)) +} + +// newComment creates a new proto comment. +func newComment(text string) *proto.Comment { + return &proto.Comment{Lines: []string{text}} +} diff --git a/ignite/pkg/protoanalysis/protoutil/creator_test.go b/ignite/pkg/protoanalysis/protoutil/creator_test.go new file mode 100644 index 0000000..db5e103 --- /dev/null +++ b/ignite/pkg/protoanalysis/protoutil/creator_test.go @@ -0,0 +1,560 @@ +package protoutil_test + +import ( + "testing" + + "github.com/emicklei/proto" + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/protoanalysis/protoutil" +) + +// Imports. +func TestCreateImport(t *testing.T) { + cases := []struct { + name, path, kind string + out *proto.Import + }{ + { + name: "simple import", + path: "github.com/emicklei/proto.proto", + kind: "weak", + out: &proto.Import{ + Filename: "github.com/emicklei/proto.proto", + Kind: "weak", + }, + }, + { + name: "simple import", + path: "github.com/emicklei/proto.proto", + kind: "public", + out: &proto.Import{ + Filename: "github.com/emicklei/proto.proto", + Kind: "public", + }, + }, + } + + for _, test := range cases { + var spec []protoutil.ImportSpecOptions + switch test.kind { + case "weak": + spec = append(spec, protoutil.Weak()) + case "public": + spec = append(spec, protoutil.Public()) + } + imp := protoutil.NewImport(test.path, spec...) + require.Equal(t, test.out, imp, "expected %v, got %v", test.out, imp) + } +} + +// Packages. +func TestCreatePackage(t *testing.T) { + cases := []struct { + name string + out *proto.Package + }{ + { + name: "org.foo.hack", + out: &proto.Package{ + Name: "org.foo.hack", + }, + }, + { + name: "simple.package", + out: &proto.Package{ + Name: "simple.package", + }, + }, + } + + for _, test := range cases { + p := protoutil.NewPackage(test.name) + require.Equal(t, test.out, p, "expected %v, got %v", test.out, p) + } +} + +// Options. +func TestCreateOption(t *testing.T) { + cases := []struct { + name, constant, setField string + isCustom bool + out *proto.Option + }{ + { + name: "my_option", + constant: "5", + out: &proto.Option{ + Name: "my_option", + Constant: *protoutil.NewLiteral("5"), + }, + }, + { + name: "my_option", + constant: "false", + isCustom: true, + out: &proto.Option{ + Name: "(my_option)", + Constant: *protoutil.NewLiteral("false"), + }, + }, + { + name: "my_option", + constant: "2.341", + setField: "my_field", + isCustom: true, + out: &proto.Option{ + Name: "(my_option).my_field", + Constant: *protoutil.NewLiteral("2.341"), + }, + }, + } + + for _, test := range cases { + var opts []protoutil.OptionSpecOptions + if test.isCustom { + opts = []protoutil.OptionSpecOptions{protoutil.Custom()} + } + if test.setField != "" { + opts = append(opts, protoutil.SetField(test.setField)) + } + opt := protoutil.NewOption(test.name, test.constant, opts...) + require.Equal(t, test.out, opt, "expected %v, got %v", test.out, opt) + } +} + +// RPCs. +func TestCreateRPC(t *testing.T) { + cases := []struct { + name, inputType, outputType string + streamsReq, streamsResp bool + options []*proto.Option + }{ + { + name: "my_rpc", + inputType: "my_input_type", + outputType: "my_output_type", + }, + { + name: "my_rpc", + inputType: "my_input_type", + outputType: "my_output_type", + streamsReq: true, + streamsResp: true, + }, + { + name: "my_rpc", + inputType: "my_input_type", + outputType: "my_output_type", + options: []*proto.Option{ + protoutil.NewOption("my_option", "5"), + protoutil.NewOption("gogoproto.nullable", "false", protoutil.Custom(), protoutil.SetField("set")), + }, + }, + } + + for _, test := range cases { + var opts []protoutil.RPCSpecOptions + if test.streamsReq { + opts = append(opts, protoutil.StreamRequest()) + } + if test.streamsResp { + opts = append(opts, protoutil.StreamResponse()) + } + if len(test.options) > 0 { + opts = append(opts, protoutil.WithRPCOptions(test.options...)) + } + rpc := protoutil.NewRPC(test.name, test.inputType, test.outputType, opts...) + + require.Equal(t, test.name, rpc.Name, "expected %v, got %v", test.name, rpc.Name) + require.Equal(t, test.inputType, rpc.RequestType, "expected %v, got %v", test.inputType, rpc.ReturnsType) + require.Equal(t, test.outputType, rpc.ReturnsType, "expected %v, got %v", test.outputType, rpc.ReturnsType) + require.Equal(t, test.streamsReq, rpc.StreamsRequest, "expected %v, got %v", test.streamsReq, rpc.StreamsRequest) + require.Equal(t, test.streamsResp, rpc.StreamsReturns, "expected %v, got %v", test.streamsResp, rpc.StreamsReturns) + for i, opt := range rpc.Elements { + opt, ok := opt.(*proto.Option) + require.True(t, ok, "expected option, got %T", opt) + require.Equal(t, test.options[i], opt, "expected %v, got %v", test.options[i], opt) + } + require.Equal(t, len(test.options), len(rpc.Elements), "expected %v, got %v", len(test.options), len(rpc.Elements)) + } +} + +// Services. +func TestCreateService(t *testing.T) { + cases := []struct { + name string + rpcs []*proto.RPC + options []*proto.Option + }{ + { + name: "my_service", + rpcs: []*proto.RPC{ + protoutil.NewRPC("my_rpc", "my_input_type", "my_output_type"), + protoutil.NewRPC("my_other_rpc", "my_other_input_type", "my_other_output_type", protoutil.StreamRequest(), protoutil.StreamResponse()), + }, + options: []*proto.Option{protoutil.NewOption("my_option", "with a great value")}, + }, + } + + for _, test := range cases { + var opts []protoutil.ServiceSpecOptions + opts = append(opts, protoutil.WithRPCs(test.rpcs...)) + opts = append(opts, protoutil.WithServiceOptions(test.options...)) + rpc := protoutil.NewService(test.name, opts...) + + require.Equal(t, test.name, rpc.Name, "expected %v, got %v", test.name, rpc.Name) + // careful, options come first, then rpcs. + lenOpts, lenRPCs := len(test.options), len(test.rpcs) + require.True(t, len(rpc.Elements) == lenOpts+lenRPCs, "expected %v, got %v", lenOpts+lenRPCs, len(rpc.Elements)) + for i, opt := range rpc.Elements { + if i < lenOpts { + opt, ok := opt.(*proto.Option) + require.True(t, ok, "expected option, got %T", opt) + require.Equal(t, test.options[i], opt, "expected %v, got %v", test.options[i], opt) + } else { + rpc, ok := opt.(*proto.RPC) + require.True(t, ok, "expected rpc, got %T", opt) + require.Equal(t, test.rpcs[i-lenOpts], rpc, "expected %v, got %v", test.rpcs[i-lenOpts], rpc) + } + } + } +} + +// Fields. +func TestCreateField(t *testing.T) { + cases := []struct { + name, typeName string + sequence int + repeated, optional, required bool + options []*proto.Option + }{ + { + name: "my_field", + typeName: "my_type", + sequence: 1, + repeated: true, + }, + { + name: "my_field", + typeName: "my_type", + sequence: 2, + optional: true, + }, + { + name: "my_field", + typeName: "my_type", + sequence: 3, + required: true, + }, + { + name: "my_field", + typeName: "my_type", + sequence: 4, + options: []*proto.Option{ + protoutil.NewOption("my_option", "5"), + protoutil.NewOption("gogoproto.nullable", "false", protoutil.Custom(), protoutil.SetField("set")), + }, + }, + } + + for _, test := range cases { + var opts []protoutil.FieldSpecOptions + if test.repeated { + opts = append(opts, protoutil.Repeated()) + } + if test.optional { + opts = append(opts, protoutil.Optional()) + } + if test.required { + opts = append(opts, protoutil.Required()) + } + opts = append(opts, protoutil.WithFieldOptions(test.options...)) + field := protoutil.NewField(test.name, test.typeName, test.sequence, opts...) + + require.Equal(t, test.name, field.Name, "expected %v, got %v", test.name, field.Name) + require.Equal(t, test.typeName, field.Type, "expected %v, got %v", test.typeName, field.Type) + require.Equal(t, test.sequence, field.Sequence, "expected %v, got %v", test.sequence, field.Sequence) + require.Equal(t, test.repeated, field.Repeated, "expected %v, got %v", test.repeated, field.Repeated) + require.Equal(t, test.optional, field.Optional, "expected %v, got %v", test.optional, field.Optional) + require.Equal(t, test.required, field.Required, "expected %v, got %v", test.required, field.Required) + for i, opt := range field.Options { + require.Equal(t, test.options[i], opt, "expected %v, got %v", test.options[i], opt) + } + require.Equal(t, len(test.options), len(field.Options), "expected %v, got %v", len(test.options), len(field.Options)) + } +} + +// Messages. +func TestCreateMessage(t *testing.T) { + cases := []struct { + name string + fields []*proto.NormalField + enums []*proto.Enum + options []*proto.Option + isExtend bool + }{ + { + name: "my_message", + fields: []*proto.NormalField{ + protoutil.NewField("my_field", "my_type", 1), + protoutil.NewField("my_other_field", "my_other_type", 2), + }, + }, + { + name: "my_message", + fields: []*proto.NormalField{ + protoutil.NewField("my_field", "my_type", 1), + protoutil.NewField("my_other_field", "my_other_type", 2), + }, + enums: []*proto.Enum{protoutil.NewEnum("my_enum")}, + options: []*proto.Option{ + protoutil.NewOption("my_option", "with a great value"), + protoutil.NewOption("gogoproto.nullable", "false", protoutil.Custom(), protoutil.SetField("set")), + }, + isExtend: true, + }, + } + + for _, test := range cases { + var opts []protoutil.MessageSpecOptions + opts = append(opts, protoutil.WithFields(test.fields...)) + opts = append(opts, protoutil.WithEnums(test.enums...)) + opts = append(opts, protoutil.WithMessageOptions(test.options...)) + if test.isExtend { + opts = append(opts, protoutil.Extend()) + } + message := protoutil.NewMessage(test.name, opts...) + + require.Equal(t, test.name, message.Name, "expected %v, got %v", test.name, message.Name) + require.Equal(t, test.isExtend, message.IsExtend, "expected %v, got %v", test.isExtend, message.IsExtend) + + // options added first, then fields and then enums. + lenOpts, lenFields, lenEnums := len(test.options), len(test.fields), len(test.enums) + for i, field := range message.Elements { + switch { + case i < lenOpts: + opt, ok := field.(*proto.Option) + require.True(t, ok, "expected option, got %T", field) + require.Equal(t, test.options[i], opt, "expected %v, got %v", test.options[i], opt) + case i < lenOpts+lenFields: + field, ok := field.(*proto.NormalField) + require.True(t, ok, "expected field, got %T", field) + require.Equal(t, test.fields[i-lenOpts], field, "expected %v, got %v", test.fields[i-lenOpts], field) + default: + enum, ok := field.(*proto.Enum) + require.True(t, ok, "expected enum, got %T", field) + require.Equal(t, test.enums[i-lenOpts-lenFields], enum, "expected %v, got %v", test.enums[i-lenOpts-lenFields], enum) + } + } + require.True(t, lenOpts+lenFields+lenEnums == len(message.Elements), "expected %v, got %v", lenOpts+lenFields+lenEnums, len(message.Elements)) + } +} + +// Enum fields. +func TestCreateEnumField(t *testing.T) { + cases := []struct { + name string + value int + options []*proto.Option + }{ + { + name: "my_field", + value: 1, + }, + { + name: "my_field", + value: 2, + options: []*proto.Option{ + protoutil.NewOption("my_option", "with a great value"), + protoutil.NewOption("gogoproto.nullable", "false", protoutil.Custom(), protoutil.SetField("set")), + }, + }, + } + + for _, test := range cases { + var opts []protoutil.EnumFieldSpecOptions + opts = append(opts, protoutil.WithEnumFieldOptions(test.options...)) + field := protoutil.NewEnumField(test.name, test.value, opts...) + + require.Equal(t, test.name, field.Name, "expected %v, got %v", test.name, field.Name) + require.Equal(t, test.value, field.Integer, "expected %v, got %v", test.value, field.Integer) + for i, opt := range field.Elements { + opt, ok := opt.(*proto.Option) + require.True(t, ok, "expected option, got %T", opt) + require.Equal(t, test.options[i], opt, "expected %v, got %v", test.options[i], opt) + } + require.Equal(t, len(test.options), len(field.Elements), "expected %v, got %v", len(test.options), len(field.Elements)) + } +} + +// Enums:. +func TestCreateEnum(t *testing.T) { + cases := []struct { + name string + options []*proto.Option + values []*proto.EnumField + }{ + { + name: "my_enum", + values: []*proto.EnumField{ + protoutil.NewEnumField("my_value", 1), + protoutil.NewEnumField("my_other_value", 2), + }, + }, + { + name: "my_enum", + values: []*proto.EnumField{ + protoutil.NewEnumField("my_value", 1), + protoutil.NewEnumField("my_other_value", 2), + }, + options: []*proto.Option{ + protoutil.NewOption("my_option", "with a great value"), + }, + }, + } + + for _, test := range cases { + var opts []protoutil.EnumSpecOpts + opts = append(opts, protoutil.WithEnumFields(test.values...)) + opts = append(opts, protoutil.WithEnumOptions(test.options...)) + enum := protoutil.NewEnum(test.name, opts...) + + require.Equal(t, test.name, enum.Name, "expected %v, got %v", test.name, enum.Name) + lenFields, lenOptions := len(test.values), len(test.options) + for i, opt := range enum.Elements[:lenOptions] { + opt, ok := opt.(*proto.Option) + require.True(t, ok, "expected option, got %T", opt) + require.Equal(t, test.options[i], opt, "expected %v, got %v", test.options[i], opt) + } + for i, value := range enum.Elements[lenOptions:] { + value, ok := value.(*proto.EnumField) + require.True(t, ok, "expected enum field, got %T", value) + require.Equal(t, test.values[i], value, "expected %v, got %v", test.values[i], value) + } + require.Equal(t, lenOptions+lenFields, len(enum.Elements), "expected %v, got %v", lenOptions+lenFields, len(enum.Elements)) + } +} + +// OneOf fields:. +func TestCreateOneofField(t *testing.T) { + cases := []struct { + name, typeName string + sequence int + options []*proto.Option + }{ + { + name: "my_field", + typeName: "my_type", + sequence: 1, + }, + { + name: "my_field", + typeName: "my_type", + sequence: 4, + options: []*proto.Option{ + protoutil.NewOption("my_option", "5"), + protoutil.NewOption("gogoproto.nullable", "false", protoutil.Custom(), protoutil.SetField("set")), + }, + }, + } + + for _, test := range cases { + opts := []protoutil.OneofFieldOptions{protoutil.WithOneofFieldOptions(test.options...)} + field := protoutil.NewOneofField(test.name, test.typeName, test.sequence, opts...) + + require.Equal(t, test.name, field.Name, "expected %v, got %v", test.name, field.Name) + require.Equal(t, test.typeName, field.Type, "expected %v, got %v", test.typeName, field.Type) + require.Equal(t, test.sequence, field.Sequence, "expected %v, got %v", test.sequence, field.Sequence) + + for i, opt := range field.Options { + require.Equal(t, test.options[i], opt, "expected %v, got %v", test.options[i], opt) + } + require.Equal(t, len(test.options), len(field.Options), "expected %v, got %v", len(test.options), len(field.Options)) + } +} + +// Oneof:. +func TestCreateOneof(t *testing.T) { + cases := []struct { + name string + options []*proto.Option + values []*proto.OneOfField + }{ + { + name: "oneof_this", + values: []*proto.OneOfField{ + protoutil.NewOneofField("my_value", "my_type", 1), + protoutil.NewOneofField("my_other_value", "my_type", 2), + }, + }, + { + name: "oneof_that", + values: []*proto.OneOfField{ + protoutil.NewOneofField("my_value", "my_type", 1), + }, + options: []*proto.Option{ + protoutil.NewOption("my_option", "with a great value"), + }, + }, + } + + for _, test := range cases { + var opts []protoutil.OneofSpecOptions + opts = append(opts, protoutil.WithOneofFields(test.values...)) + opts = append(opts, protoutil.WithOneofOptions(test.options...)) + oneof := protoutil.NewOneof(test.name, opts...) + + require.Equal(t, test.name, oneof.Name, "expected %v, got %v", test.name, oneof.Name) + lenFields, lenOptions := len(test.values), len(test.options) + for i, opt := range oneof.Elements[:lenOptions] { + opt, ok := opt.(*proto.Option) + require.True(t, ok, "expected option, got %T", opt) + require.Equal(t, test.options[i], opt, "expected %v, got %v", test.options[i], opt) + } + for i, value := range oneof.Elements[lenOptions:] { + value, ok := value.(*proto.OneOfField) + require.True(t, ok, "expected oneof field, got %T", value) + require.Equal(t, test.values[i], value, "expected %v, got %v", test.values[i], value) + } + require.Equal(t, lenOptions+lenFields, len(oneof.Elements), "expected %v, got %v", lenOptions+lenFields, len(oneof.Elements)) + } +} + +func TestAttachComment(t *testing.T) { + // Attach comment to message + msg := protoutil.NewMessage("my_message") + protoutil.AttachComment(msg, "my comment") + require.Equal(t, " my comment", msg.Comment.Lines[0], "expected %v, got %v", "my comment", msg.Comment.Lines[0]) + + // Attach comment to rpc call + rpc := protoutil.NewRPC("my_rpc", "my_request", "my_response") + protoutil.AttachComment(rpc, "my comment") + require.Equal(t, " my comment", rpc.Comment.Lines[0], "expected %v, got %v", "my comment", rpc.Comment.Lines[0]) + + // Attach comment to a service + svc := protoutil.NewService("my_service") + protoutil.AttachComment(svc, "my comment") + require.Equal(t, " my comment", svc.Comment.Lines[0], "expected %v, got %v", "my comment", svc.Comment.Lines[0]) +} + +// Test literal creation (indirectly tests the isString function.) +func TestIsString(t *testing.T) { + require.True(t, protoutil.NewLiteral("string").IsString) + require.True(t, protoutil.NewLiteral("THIS/PATH/IS/STRING").IsString) + + // Don't report "true" and "false" as strings + require.False(t, protoutil.NewLiteral("true").IsString) + require.False(t, protoutil.NewLiteral("false").IsString) + + // Don't report numbers as strings + require.False(t, protoutil.NewLiteral("1").IsString) + require.False(t, protoutil.NewLiteral("1.0").IsString) + require.False(t, protoutil.NewLiteral("1.0e-10").IsString) + require.False(t, protoutil.NewLiteral("1.0e+10").IsString) + require.False(t, protoutil.NewLiteral("1.0e10").IsString) + require.False(t, protoutil.NewLiteral("3.1929348317293483e-10").IsString) + + // A single numbers means not a string, parser would fail with that either way. + require.True(t, protoutil.NewLiteral("isthisastringohnoitactuallyisn't1.0").IsString) +} diff --git a/ignite/pkg/protoanalysis/protoutil/cursor.go b/ignite/pkg/protoanalysis/protoutil/cursor.go new file mode 100644 index 0000000..e09075c --- /dev/null +++ b/ignite/pkg/protoanalysis/protoutil/cursor.go @@ -0,0 +1,317 @@ +package protoutil + +import ( + "fmt" + "reflect" + + "github.com/emicklei/proto" +) + +// Note: The traversing can also be done with proto.Walk but there's some reasons +// why I chose the cursor instead: +// +// 1. We can abort traversing deeper in the tree at any point. (a post +// ApplyFunc returning false) +// 2. We keep track of the parent to have finer grained control over where we +// are in the tree. +// 3. We can use pre/post handling. + +// Modeled heavily after the Apply/Cursor logic in astutil, using proto.Visitee as +// the common interface, abilities for reflection aren't as rich but can still +// manage in order to get the job done. +// Cursor has been augmented to add a couple more methods that can make +// our life easier. + +// An ApplyFunc is invoked by Apply for each Visitee n, even if n is nil, +// before and/or after the node's children, using a Cursor describing +// the current node and providing operations on it. +// +// The return value of ApplyFunc controls the syntax tree traversal. +// See Apply for details. +type ApplyFunc func(*Cursor) bool + +// Apply traverses a syntax tree recursively, starting with root, +// and calling pre and post for each node as described below. +// Apply returns the syntax tree, possibly modified. +// +// If pre is not nil, it is called for each node before the node's +// children are traversed (pre-order). If pre returns false, no +// children are traversed, and post is not called for that node. +// +// If post is not nil, and a prior call of pre didn't return false, +// post is called for each node after its children are traversed +// (post-order). If post returns false, traversal is terminated and +// Apply returns immediately. +func Apply(root proto.Visitee, pre, post ApplyFunc) (result proto.Visitee) { + parent := &struct{ proto.Visitee }{root} + defer func() { + if r := recover(); r != nil && r != abort { + panic(r) + } + result = parent.Visitee + }() + a := &application{pre: pre, post: post} + a.apply(parent, "Visitee", nil, root) + return +} + +var abort = new(int) // singleton, to signal termination of Apply + +// A Cursor describes a node encountered during Apply. +// Information about the node and its parent is available +// from the Node, Parent, Name, and Index methods. +type Cursor struct { + parent proto.Visitee // parent (containing a []proto.Visitee slice) + name string + iter *iterator + node proto.Visitee // current node we're applying over +} + +type iterator struct { + index, step int +} + +// Index reports the index >= 0 of the current Visitee in the slice of Visitees that +// contains it, or a value < 0 if the current Visitee is not part of a slice. +// The index of the current node changes if InsertBefore is called while +// processing the current node. +func (c *Cursor) Index() int { + if c.iter != nil { + return c.iter.index + } + return -1 +} + +// field returns the current node's parent field value. +func (c *Cursor) field() reflect.Value { + return reflect.Indirect(reflect.ValueOf(c.parent)).FieldByName(c.name) +} + +// Node returns the current Node. +func (c *Cursor) Node() proto.Visitee { return c.node } + +// Parent returns the parent of the current Node. +func (c *Cursor) Parent() proto.Visitee { return c.parent } + +// Name returns the name of the parent Node field that contains the current Node. +// If the parent is a *ast.Package and the current Node is a *ast.File, Name returns +// the filename for the current Node. +func (c *Cursor) Name() string { return c.name } + +// IsLast returns if the current node being traversed is the final node in the +// slice of nodes. Can be used to determine if a node is the last one. +func (c *Cursor) IsLast() bool { + i := c.Index() + if i < 0 { + panic("IsLast node not contained in slice") + } + v := c.field() + return i == v.Len()-1 +} + +// Next returns the next Visitee. Can be used to check the next value +// before deciding to continue. +func (c *Cursor) Next() (proto.Visitee, bool) { + i := c.Index() + if i < 0 { + panic("Next node not contained in slice") + } + v := c.field() + if i == v.Len()-1 { + return nil, false + } + var x proto.Visitee + if e := v.Index(i + 1); e.IsValid() { + x = e.Interface().(proto.Visitee) + } + return x, true +} + +// Replace replaces the current Node with n. +// The replacement node is not walked by Apply. +func (c *Cursor) Replace(n proto.Visitee) { + v := c.field() + if i := c.Index(); i >= 0 { + v = v.Index(i) + } + v.Set(reflect.ValueOf(n)) +} + +// InsertAfter inserts n after the current Node in its containing slice. +// If the current Node is not part of a slice, InsertAfter panics. +// Apply does not walk n. +func (c *Cursor) InsertAfter(n proto.Visitee) { + i := c.Index() + if i < 0 { + panic("InsertAfter node not contained in slice") + } + v := c.field() + v.Set(reflect.Append(v, reflect.Zero(v.Type().Elem()))) + l := v.Len() + reflect.Copy(v.Slice(i+2, l), v.Slice(i+1, l)) + v.Index(i + 1).Set(reflect.ValueOf(n)) + c.iter.step++ +} + +// InsertBefore inserts n before the current Node in its containing slice. +// If the current Node is not part of a slice, InsertBefore panics. +// Apply will not walk n. +func (c *Cursor) InsertBefore(n proto.Visitee) { + i := c.Index() + if i < 0 { + panic("InsertBefore node not contained in slice") + } + v := c.field() + v.Set(reflect.Append(v, reflect.Zero(v.Type().Elem()))) + l := v.Len() + reflect.Copy(v.Slice(i+1, l), v.Slice(i, l)) + v.Index(i).Set(reflect.ValueOf(n)) + c.iter.index++ +} + +// application carries all the shared data, so we can pass it around cheaply. +type application struct { + pre, post ApplyFunc + cursor Cursor + iter iterator +} + +func (a *application) apply(parent proto.Visitee, name string, iter *iterator, n proto.Visitee) { + // don't walk into nil's + if v := reflect.ValueOf(n); v.Kind() == reflect.Ptr && v.IsNil() { + return + } + + // avoid heap-allocating a new cursor for each apply call; reuse a.cursor instead + saved := a.cursor + a.cursor.parent = parent + a.cursor.name = name + a.cursor.iter = iter + a.cursor.node = n + + if a.pre != nil && !a.pre(&a.cursor) { + a.cursor = saved + return + } + + // Walk the children. + // This is the issue with proto. Structure isn't really here in order to be able to + // visit every component using a distinct interface. They are all Visitee's. + // Ideally, we could wrap the proto nodes into interfaces that enforce a structure, + // i.e. Nodes, MessageNodes, ServiceNodes, ProtoNodes, etc. + // this way, inserting into a slice would be guarded (by error-ing) by the type of the slice. + // + // An alternative would be to reflect in the insertion methods for cursor and only allow + // specific elements per type. + switch n := n.(type) { + case *proto.Proto: + a.applyList(n, "Elements") + case *proto.Service: + a.apply(n, "Comment", nil, n.Comment) + a.applyList(n, "Elements") + case *proto.RPC: + a.apply(n, "Comment", nil, n.Comment) + a.apply(n, "Inline Comment", nil, n.InlineComment) + a.applyList(n, "Elements") + case *proto.Message: + a.apply(n, "Comment", nil, n.Comment) + a.applyList(n, "Elements") + case *proto.NormalField: + a.apply(n, "Comment", nil, n.Comment) + a.apply(n, "Inline Comment", nil, n.InlineComment) + a.applyList(n, "Options") + case *proto.Oneof: + a.apply(n, "Comment", nil, n.Comment) + a.applyList(n, "Elements") + case *proto.OneOfField: + a.apply(n, "Comment", nil, n.Comment) + a.apply(n, "Inline Comment", nil, n.InlineComment) + a.applyList(n, "Options") + case *proto.Enum: + a.apply(n, "Comment", nil, n.Comment) + a.applyList(n, "Elements") + case *proto.EnumField: + a.apply(n, "Comment", nil, n.Comment) + a.apply(n, "Inline Comment", nil, n.InlineComment) + a.applyList(n, "Elements") + case *proto.Import: + a.apply(n, "Comment", nil, n.Comment) + a.apply(n, "Inline Comment", nil, n.InlineComment) + case *proto.Option: + a.apply(n, "Comment", nil, n.Comment) + a.apply(n, "Inline Comment", nil, n.InlineComment) + case *proto.Package: + a.apply(n, "Comment", nil, n.Comment) + a.apply(n, "Inline Comment", nil, n.InlineComment) + default: + // Probably a comment, ignore it. + } + + if a.post != nil && !a.post(&a.cursor) { + panic(abort) + } + a.cursor = saved +} + +// applyList calls apply on each of the elements of the Visitee. +func (a *application) applyList(parent proto.Visitee, name string) { + // avoid heap-allocating a new iterator for each applyList call; reuse a.iter instead + saved := a.iter + a.iter.index = 0 + for { + // must reload parent.name each time, since cursor modifications might change it + v := reflect.Indirect(reflect.ValueOf(parent)).FieldByName(name) + if a.iter.index >= v.Len() { + break + } + + // element x may be nil in a bad AST - be cautious + var x proto.Visitee + if e := v.Index(a.iter.index); e.IsValid() { + x = e.Interface().(proto.Visitee) + } + + // reset step on each iteration. + a.iter.step = 1 + a.apply(parent, name, &a.iter, x) + a.iter.index += a.iter.step + } + a.iter = saved +} + +// Append appends the elements provided to the node `n`. `n` must be +// a node that can accept elements, such as a proto.File or a proto.Message. +// +// Append panics if `n` is not a node that can accept elements or if the type +// of the elements provided is not compatible with the type of the elements +// contained by `n`. (Basically, this applies to NormalFields and OneOfFields,). +func Append(n proto.Visitee, elems ...proto.Visitee) { + // return early if the slice is empty. + if len(elems) == 0 { + return + } + switch n.(type) { + case *proto.Proto, *proto.Message, *proto.Enum, *proto.Oneof, + *proto.Service, *proto.EnumField, *proto.RPC: + // Can just append directly. + v := reflect.Indirect(reflect.ValueOf(n)).FieldByName("Elements") + v.Set(reflect.AppendSlice(v, reflect.ValueOf(elems))) + case *proto.NormalField, *proto.OneOfField: + // Make into options, panic on failure of one of the objects to do + // so. + var elements []*proto.Option + for _, e := range elems { + o, ok := e.(*proto.Option) + if !ok { + panic(fmt.Sprintf("Tried to append %T to a slice of Options", e)) + } + elements = append(elements, o) + } + // append + v := reflect.Indirect(reflect.ValueOf(n)).FieldByName("Options") + v.Set(reflect.AppendSlice(v, reflect.ValueOf(elements))) + return + default: + panic("Append: node not a slice") + } +} diff --git a/ignite/pkg/protoanalysis/protoutil/cursor_test.go b/ignite/pkg/protoanalysis/protoutil/cursor_test.go new file mode 100644 index 0000000..62e04eb --- /dev/null +++ b/ignite/pkg/protoanalysis/protoutil/cursor_test.go @@ -0,0 +1,563 @@ +package protoutil + +import ( + "testing" + + "github.com/emicklei/proto" + "github.com/stretchr/testify/require" +) + +const ( + world = "World" + elements = "Elements" + kirby = "Kirby" +) + +// Make a simple replacement of package -> import. +func TestSimpleReplacement(t *testing.T) { + f, err := parseStringProto(`package "package"`) + require.NoError(t, err) + Apply(f, nil, func(c *Cursor) bool { + n := c.Node() + if _, ok := n.(*proto.Package); ok { + imp := NewImport("that") + c.Replace(imp) + } + + return true + }) + require.True(t, containsElement(f, NewImport("that"))) + require.False(t, containsElement(f, NewPackage("package"))) +} + +func TestSimpleInsertAfter(t *testing.T) { + f, err := parseStringProto(`syntax = "proto3" + + message Hello { + message World {} + } + `) + require.NoError(t, err) + + // keep ref for checking containment. + var msg *proto.Message + Apply(f, nil, func(c *Cursor) bool { + n := c.Node() + if n, ok := n.(*proto.Message); ok { + if n.Name == world { + msg = NewMessage("WeComeInPeace") + c.InsertAfter(msg) + } + } + return true + }) + require.True(t, containsElement(f, msg)) + // check that it is inserted after "World" + Apply(f, nil, func(c *Cursor) bool { + n := c.Node() + if n, ok := n.(*proto.Message); ok { + if n.Name == world { + next, ok := c.Next() + require.True(t, ok) + require.True(t, next.(*proto.Message).Name == "WeComeInPeace") + } + } + return true + }) +} + +// Can really only panic with comments since +// other elements in nodes aren't Visitees. +// +//nolint:dupword +func TestInsertAfterPanic(t *testing.T) { + f, err := parseStringProto(`syntax = "proto3" + + // my import + import "this"; + `) + require.NoError(t, err) + + // Try calling insertAfter when c is a Comment + require.Panics(t, func() { + Apply(f, nil, func(c *Cursor) bool { + n := c.Node() + if _, ok := n.(*proto.Comment); ok { + c.InsertAfter(NewImport("that")) + } + return true + }) + }) +} + +func TestSimpleInsertBefore(t *testing.T) { + f, err := parseStringProto(`syntax = "proto3" + + message Say {} + message World {} + `) + require.NoError(t, err) + + // keep ref for checking containment. + var msg *proto.Message + Apply(f, nil, func(c *Cursor) bool { + n := c.Node() + if n, ok := n.(*proto.Message); ok { + if n.Name == world { + // add hello between say and world + msg = NewMessage("Hello") + c.InsertBefore(msg) + } + } + return true + }) + require.True(t, containsElement(f, msg)) + + // check that it is inserted after "Say" + Apply(f, nil, func(c *Cursor) bool { + n := c.Node() + if n, ok := n.(*proto.Message); ok { + if n.Name == "Say" { + next, ok := c.Next() + require.True(t, ok) + require.True(t, next.(*proto.Message).Name == "Hello") + } + } + return true + }) +} + +// Can really only panic with comments since +// other elements in nodes aren't Visitees. +// +//nolint:dupword +func TestInsertBeforePanic(t *testing.T) { + f, err := parseStringProto(`syntax = "proto3" + + // my import + import "this"; + `) + require.NoError(t, err) + + // Try calling insertAfter when c is a Comment + require.Panics(t, func() { + Apply(f, nil, func(c *Cursor) bool { + n := c.Node() + if _, ok := n.(*proto.Comment); ok { + c.InsertBefore(NewImport("that")) + } + return true + }) + }) +} + +// Build a skeleton of a file by continuous appends on the file. +func TestAppendFile(t *testing.T) { + f, err := parseStringProto(`syntax = "proto3"`) + require.NoError(t, err) + + i := NewImport("importpath") + Append(f, i) + require.True(t, containsElement(f, i)) + + p := NewPackage("package") + Append(f, p) + require.True(t, containsElement(f, p)) + + o := NewOption("this", "that") + Append(f, o) + require.True(t, containsElement(f, o)) + + oneofF := NewOneofField("this", "string", 2) + // Can directly append an option if required: + opt := NewOption("this", "that") + Append(oneofF, opt) + require.True(t, containsElement(oneofF, opt)) + + oneof := NewOneof("myoneof") + Append(oneof, oneofF) + require.True(t, containsElement(oneof, oneofF)) + + normalfield := NewField("that", "string", 3) + + m := NewMessage("Hello") + Append(m, oneof) + require.True(t, containsElement(m, oneof)) + Append(m, normalfield) + require.True(t, containsElement(m, normalfield)) + + Append(f, m) + require.True(t, containsElement(f, m)) + + // Append an empty service + s := NewService("Hey") + Append(f, s) + require.True(t, containsElement(f, s)) + + // An empty enum + e := NewEnum("Hey") + // Add an enum field to it: + ef := NewEnumField("HEY", 1) + Append(e, ef) + require.True(t, containsElement(e, ef)) + + Append(f, e) + require.True(t, containsElement(f, e)) +} + +// Append to a node w/o elements panics. +func TestAppendEdges(t *testing.T) { + f, err := parseStringProto(`syntax = "proto3"`) + require.NoError(t, err) + + // Can't append to a Syntax node, panic. + require.Panics(t, func() { + Apply(f, nil, func(c *Cursor) bool { + n := c.Node() + if n, ok := n.(*proto.Syntax); ok { + Append(n, NewImport("that")) + } + return true + }) + }) + + // Empty append does nothing. + elems := len(f.Elements) + Append(f) + require.True(t, len(f.Elements) == elems) + + // Appending a non-option to NormalField/OneOfField panics. + require.Panics(t, func() { + f := NewField("that", "string", 3) + Append(f, NewImport("that")) + }) +} + +func TestCursorOps(t *testing.T) { + f, err := parseStringProto(`syntax = "proto3" + + message Hello {} + message World { + message Hey {} + enum E {} + } + `) + require.NoError(t, err) + + Apply(f, nil, func(c *Cursor) bool { + n := c.Node() + if n, ok := n.(*proto.Message); ok { + if n.Name == "Hello" { + require.False(t, c.IsLast()) + n, ok := c.Next() + require.True(t, ok) + require.NotNil(t, n) + + parent, ok := c.Parent().(*proto.Proto) + require.True(t, ok) + require.True(t, parent.Filename == "") + // currently useless. + require.True(t, c.Name() == elements) + } + if n.Name == world { + require.True(t, c.IsLast()) + n, ok := c.Next() + require.False(t, ok) + require.Nil(t, n) + + parent, ok := c.Parent().(*proto.Proto) + require.True(t, ok) + require.True(t, parent.Filename == "") + // currently useless. + require.True(t, c.Name() == elements) + } + + if n.Name == "Hey" { + require.False(t, c.IsLast()) + n, ok := c.Next() + require.True(t, ok) + require.NotNil(t, n) + + // parent is the message + parent, ok := c.Parent().(*proto.Message) + require.True(t, ok) + require.True(t, parent.Name == "World") + // currently useless. + require.True(t, c.Name() == elements) + } + } + + if _, ok := n.(*proto.Enum); ok { + require.True(t, c.IsLast()) + n, ok := c.Next() + require.False(t, ok) + require.Nil(t, n) + + // parent is the message + parent, ok := c.Parent().(*proto.Message) + require.True(t, ok) + require.True(t, parent.Name == "World") + // currently useless. + require.True(t, c.Name() == elements) + } + + // Don't make sense for elements not contained in a slice (currently + // proto.Proto or comments) + if _, ok := n.(*proto.Proto); ok { + require.Panics(t, func() { c.IsLast() }) + require.Panics(t, func() { c.Next() }) + } + return true + }) +} + +// Also test the utilities here. + +func TestAddImports(t *testing.T) { + f, err := parseStringProto(`syntax = "proto3"`) + require.NoError(t, err) + + // Add an import + err = AddImports(f, true, NewImport("this.proto")) + require.NoError(t, err) + require.True(t, HasImport(f, "this.proto")) + // Note: added in reverse order. + err = AddImports(f, true, + NewImport("that.proto"), + NewImport("the.other.proto"), + NewImport("and.another.proto"), + ) + require.NoError(t, err) + require.True(t, HasImport(f, "that.proto")) + require.True(t, HasImport(f, "the.other.proto")) + require.True(t, HasImport(f, "and.another.proto")) + + // Empty import is no-op. + require.NoError(t, AddImports(f, true)) + // Importing on empty file is currently an error. + require.Error(t, AddImports( + &proto.Proto{}, + true, + NewImport("this.proto"), + )) + + // Exercise the recursive case: + f, err = parseStringProto(`syntax = "proto3"`) + require.NoError(t, err) + err = AddImports(f, true, + NewImport("this.proto"), + NewImport("that.proto"), + ) + require.NoError(t, err) + require.True(t, HasImport(f, "this.proto")) + require.True(t, HasImport(f, "that.proto")) + + f, err = parseStringProto(`syntax = "proto3"; +package cosmonaut.chainname.chainname; + +import "gogoproto/gogo.proto"; +import "google/api/annotations.proto"; +import "cosmos/base/query/v1beta1/pagination.proto"; +import "chainname/params.proto"; +`) + require.NoError(t, err) + err = AddImports(f, true, NewImport("chainname/bleep.proto")) + require.NoError(t, err) + // Add dupes: + err = AddImports(f, true, NewImport("chainname/bleep.proto")) + require.NoError(t, err) + err = AddImports(f, true, NewImport("chainname/bleep.proto")) + require.NoError(t, err) + err = AddImports(f, true, NewImport("chainname/params.proto")) + require.NoError(t, err) + // just checking that is added last. + // fmt.Print(Printer(f)) + + // Check that adding duplicates does nothing. + f, err = parseStringProto(`syntax = "proto3"; +package cosmonaut.chainname.chainname; + +import "gogoproto/gogo.proto"; +import "google/api/annotations.proto"; +import "cosmos/base/query/v1beta1/pagination.proto"; +import "chainname/params.proto"; +`) + require.NoError(t, err) + imports := []*proto.Import{ + NewImport("chainname/params.proto"), + NewImport("gogoproto/gogo.proto"), + } + err = AddImports(f, true, imports...) + require.NoError(t, err) + require.Equal(t, len(f.Elements), 6, "The number of elements shouldn't have changed") + + // Pass an empty import list. + f, err = parseStringProto(`syntax = "proto3";`) + require.NoError(t, err) + ret := AddImports(f, true) + require.Nil(t, ret) + + // No imports, no fallback. + f, err = parseStringProto(`syntax = "proto3";`) + require.NoError(t, err) + err = AddImports(f, false, NewImport("this.proto")) + require.Error(t, err) +} + +func TestHasImport(t *testing.T) { + f, err := parseStringProto(`syntax = "proto3" + + import "this.proto"; + import "that.proto"; + import "the.other.proto" + `) + require.NoError(t, err) + require.True(t, HasImport(f, "this.proto")) + require.True(t, HasImport(f, "that.proto")) + require.True(t, HasImport(f, "the.other.proto")) + require.False(t, HasImport(f, "this.proto.proto")) +} + +func TestGetMessage(t *testing.T) { + f, err := parseStringProto(`syntax = "proto3" + + message Hello { + message World { + message WeComeInPeace { + message TheAnswerToLifeTheUniverseAndEverything { + message IsActuallyFortyTwo {} + } + } + } + } + `) + require.NoError(t, err) + m, err := GetMessageByName(f, "Hello") + require.NoError(t, err) + require.Equal(t, "Hello", m.Name) + + m, err = GetMessageByName(f, "World") + require.NoError(t, err) + require.Equal(t, "World", m.Name) + + m, err = GetMessageByName(f, "WeComeInPeace") + require.NoError(t, err) + require.Equal(t, "WeComeInPeace", m.Name) + + m, err = GetMessageByName(f, "TheAnswerToLifeTheUniverseAndEverything") + require.NoError(t, err) + require.Equal(t, "TheAnswerToLifeTheUniverseAndEverything", m.Name) + + m, err = GetMessageByName(f, "IsActuallyFortyTwo") + require.NoError(t, err) + require.Equal(t, "IsActuallyFortyTwo", m.Name) + + _, err = GetMessageByName(f, "DoesNotExist") + require.Error(t, err) +} + +func TestHasMessage(t *testing.T) { + f, err := parseStringProto(`syntax = "proto3" + + message Hello { + message World { + message WeComeInPeace { + message TheAnswerToLifeTheUniverseAndEverything { + message IsActuallyFortyTwo {} + } + } + } + } + `) + require.NoError(t, err) + require.True(t, HasMessage(f, "Hello")) + require.True(t, HasMessage(f, "World")) + require.True(t, HasMessage(f, "WeComeInPeace")) + require.True(t, HasMessage(f, "TheAnswerToLifeTheUniverseAndEverything")) + require.True(t, HasMessage(f, "IsActuallyFortyTwo")) + require.False(t, HasMessage(f, "DoesNotExist")) + require.False(t, HasMessage(f, "Hello.World")) +} + +func TestGetService(t *testing.T) { + f, err := parseStringProto(`syntax = "proto3" + + service Msg { + } + service AnotherMsg {} + service YetAnotherMsg { + rpc Foo(Bar) returns (Bar) {} + } + `) + require.NoError(t, err) + s, err := GetServiceByName(f, "Msg") + require.NoError(t, err) + require.Equal(t, "Msg", s.Name) + + s, err = GetServiceByName(f, "AnotherMsg") + require.NoError(t, err) + require.Equal(t, "AnotherMsg", s.Name) + + s, err = GetServiceByName(f, "YetAnotherMsg") + require.NoError(t, err) + require.Equal(t, "YetAnotherMsg", s.Name) + + _, err = GetServiceByName(f, "DoesNotExist") + require.Error(t, err) +} + +func TestHasService(t *testing.T) { + f, err := parseStringProto(`syntax = "proto3" + + service Msg {} + service AnotherMsg {} + service YetAnotherMsg {} + `) + require.NoError(t, err) + require.True(t, HasService(f, "Msg")) + require.True(t, HasService(f, "AnotherMsg")) + require.True(t, HasService(f, "YetAnotherMsg")) + require.False(t, HasService(f, "DoesNotExist")) +} + +func TestGetNextId(t *testing.T) { + f, err := parseStringProto(`syntax = "proto3" + + message Hello { + string g = 1; + message World { + message WeComeInPeace { + message TheAnswerToLifeTheUniverseAndEverything { + message IsActuallyFortyTwo { + string foo = 1; + int32 bar = 2; + int64 baz = 3; + } + } + } + } + } + `) + require.NoError(t, err) + + m, err := GetMessageByName(f, "IsActuallyFortyTwo") + require.NoError(t, err) + require.Equal(t, 4, NextUniqueID(m)) + + m, err = GetMessageByName(f, "Hello") + require.NoError(t, err) + require.Equal(t, 2, NextUniqueID(m)) + + f, err = parseStringProto(`syntax = "proto3" + + message Hello { + string g = 1; + string foo = 2; + int32 bar = 3; + int64 baz = 5; + }`) + require.NoError(t, err) + m, err = GetMessageByName(f, "Hello") + require.NoError(t, err) + require.Equal(t, 6, NextUniqueID(m)) +} diff --git a/ignite/pkg/protoanalysis/protoutil/helpers.go b/ignite/pkg/protoanalysis/protoutil/helpers.go new file mode 100644 index 0000000..495d19d --- /dev/null +++ b/ignite/pkg/protoanalysis/protoutil/helpers.go @@ -0,0 +1,343 @@ +package protoutil + +import ( + "github.com/emicklei/proto" + + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +// AddAfterSyntax tries to add the given Visitee after the 'syntax' statement. +// If no syntax statement is found, returns an error. +func AddAfterSyntax(f *proto.Proto, v proto.Visitee) error { + // return false to immediately stop + inserted := false + Apply(f, nil, func(c *Cursor) bool { + if _, ok := c.Node().(*proto.Syntax); ok { + c.InsertAfter(v) + inserted = true + return false + } + // Continue until we insert. + return true + }) + if inserted { + return nil + } + return errors.New("could not find syntax statement") +} + +// AddAfterPackage tries to add the given Visitee after the 'package' statement. +// If no package statement is found, returns an error. +func AddAfterPackage(f *proto.Proto, v proto.Visitee) error { + inserted := false + Apply(f, nil, func(c *Cursor) bool { + if _, ok := c.Node().(*proto.Package); ok { + c.InsertAfter(v) + inserted = true + return false + } + // Continue until we insert. + return true + }) + if inserted { + return nil + } + return errors.New("could not find proto package statement") +} + +// Fallback logic, try and use import after a package and if that fails +// attempts to use it after a syntax statement. +// If that fails, returns an error. +func importFallback(f *proto.Proto, imp *proto.Import) error { + if err := AddAfterPackage(f, imp); err != nil { + if err = AddAfterSyntax(f, imp); err != nil { + return err + } + } + return nil +} + +// AddImports attempts to add the given import *after* any other imports +// in the file. +// +// If fallback is supplied, attempts to add it after the 'package' +// statement and then the 'syntax' statement are made. +// +// If none of the attempts are successful, returns an error. +func AddImports(f *proto.Proto, fallback bool, imports ...*proto.Import) (err error) { + // No effect. + if len(imports) == 0 { + return nil + } + importMap, inserted := make(map[string]*proto.Import), false + for _, i := range imports { + importMap[i.Filename] = i + } + + Apply(f, nil, func(c *Cursor) bool { + if i, ok := c.Node().(*proto.Import); ok { + delete(importMap, i.Filename) + if next, ok := c.Next(); ok { + if _, ok := next.(*proto.Import); ok { + return true + } + for _, imp := range importMap { + c.InsertAfter(imp) + } + inserted = true + return false + } + // We're at the end (no Next()) + for _, imp := range importMap { + c.InsertAfter(imp) + } + inserted = true + return false + } + return true + }) + // return if inserted. + if inserted { + return nil + } + // else fallback if defined. + if fallback { + // if the number of imports is > 1, we can try and insert the first after + // the package/syntax and then recurse into AddImport with the rest (which we'll) + // know that we can insert after an import since we just added it. + imports = []*proto.Import{} + for _, imp := range importMap { + imports = append(imports, imp) + } + if len(imports) == 0 { + return nil + } + if err := importFallback(f, imports[0]); err != nil { + return err + } + // recurse with the rest. (might be empty) + return AddImports(f, false, imports[1:]...) + } + return errors.New("unable to add proto import, no import statements found") +} + +// NextUniqueID goes through the fields of the given Message and returns +// an id > max(fieldIds). It does not try to 'plug the holes' by selecting the +// least available id. +// +// // In 'example.proto' file +// syntax = "proto3" +// +// message Hello { +// string g = 1; +// string foo = 2; +// int32 bar = 3; +// int64 baz = 5; +// } +// f := ParseProtoPath("example.proto") +// m := GetMessageByName(f, "Hello") +// NextUniqueID(m) // 6 +func NextUniqueID(m *proto.Message) int { + // Best to recurse through elements directly here since + // messages can embed other messages and the Apply could get + // hairy. + // if no elements exist => 1. + maximum := 0 + for _, el := range m.Elements { + if f, ok := el.(*proto.NormalField); ok { + if f.Sequence > maximum { + maximum = f.Sequence + } + } + } + return maximum + 1 +} + +// GetMessageByName returns the message with the given name or nil if not found. +// Only traverses in proto.Proto and proto.Message since they are the only nodes +// that contain messages: +// +// f, _ := ParseProtoPath("foo.proto") +// m := GetMessageByName(f, "Foo") +// m.Name // "Foo" +func GetMessageByName(f *proto.Proto, name string) (node *proto.Message, err error) { + node, err = nil, nil + found := false + Apply(f, + func(c *Cursor) bool { + if m, ok := c.Node().(*proto.Message); ok { + if m.Name == name { + found = true + node = m + return false + } + // keep looking if we're in a Message + return true + } + // keep looking while we're in a proto.Proto. + _, ok := c.Node().(*proto.Proto) + return ok + }, + // return immediately iff found. + func(*Cursor) bool { return !found }) + + if found { + return + } + return nil, errors.Errorf("proto message %s not found", name) +} + +// GetServiceByName returns the service with the given name or nil if not found. +// Only traverses in proto.Proto since it is the only node that contain services: +// +// f, _ := ParseProtoPath("foo.proto") +// s := GetServiceByName(f, "FooSrv") +// s.Name // "FooSrv" +func GetServiceByName(f *proto.Proto, name string) (*proto.Service, error) { + var ( + node *proto.Service + err error + ) + + found := false + Apply(f, + func(c *Cursor) bool { + if s, ok := c.Node().(*proto.Service); ok { + if s.Name == name { + found = true + node = s + } + // No nested services + return false + } + // keep looking while we're in a proto.Proto. + _, ok := c.Node().(*proto.Proto) + return ok + }, + + // return immediately iff found. + func(*Cursor) bool { return !found }, + ) + if found { + return node, err + } + + return nil, errors.Errorf("proto service %s not found", name) +} + +// GetImportByPath returns the import with the given path or nil if not found. +// Only traverses in proto.Proto since it is the only node that contain imports: +// +// f, _ := ParseProtoPath("foo.proto") +// s := GetImportByPath(f, "other.proto") +// s.FileName // "other.proto" +func GetImportByPath(f *proto.Proto, path string) (*proto.Import, error) { + var ( + node *proto.Import + err error + ) + + found := false + Apply(f, + func(c *Cursor) bool { + if i, ok := c.Node().(*proto.Import); ok { + if i.Filename == path { + found = true + node = i + } + // No nested imports + return false + } + // keep looking while we're in a proto.Proto. + _, ok := c.Node().(*proto.Proto) + return ok + }, + + // return immediately iff found. + func(*Cursor) bool { return !found }, + ) + if found { + return node, err + } + + return nil, errors.Errorf("proto import %s not found", path) +} + +// GetFieldByName returns the field with the given name or nil if not found within a message. +// Only traverses in proto.Message since they are the only nodes that contain fields: +// +// f, _ := ParseProtoPath("foo.proto") +// m := GetMessageByName(f, "Foo") +// f := GetFieldByName(m, "Bar") +// f.Name // "Bar" +func GetFieldByName(f *proto.Message, name string) (*proto.NormalField, error) { + var ( + node *proto.NormalField + err error + ) + + found := false + Apply(f, + func(c *Cursor) bool { + if m, ok := c.Node().(*proto.NormalField); ok { + if m.Name == name { + found = true + node = m + return false + } + // keep looking if we're in a Message + return true + } + // keep looking while we're in a proto.Message. + _, ok := c.Node().(*proto.Message) + return ok + }, + // return immediately iff found. + func(*Cursor) bool { return !found }, + ) + if found { + return node, err + } + + return nil, errors.Errorf("proto field %s not found", name) +} + +// HasMessage returns true if the given message is found in the given file. +// +// f, _ := ParseProtoPath("foo.proto") +// // true if 'foo.proto' contains message Foo { ... } +// r := HasMessage(f, "Foo") +func HasMessage(f *proto.Proto, name string) bool { + _, err := GetMessageByName(f, name) + return err == nil +} + +// HasService returns true if the given service is found in the given file. +// +// f, _ := ParseProtoPath("foo.proto") +// // true if 'foo.proto' contains service FooSrv { ... } +// r := HasService(f, "FooSrv") +func HasService(f *proto.Proto, name string) bool { + _, err := GetServiceByName(f, name) + return err == nil +} + +// HasImport returns true if the given import (by path) is found in the given file. +// +// f, _ := ParseProtoPath("foo.proto") +// // true if 'foo.proto' contains import "path.to.other.proto" +// r := HasImport(f, "path.to.other.proto") +func HasImport(f *proto.Proto, path string) bool { + _, err := GetImportByPath(f, path) + return err == nil +} + +func HasField(f *proto.Proto, messageName, field string) bool { + msg, err := GetMessageByName(f, messageName) + if err != nil { + return false + } + + _, err = GetFieldByName(msg, field) + return err == nil +} diff --git a/ignite/pkg/protoanalysis/protoutil/parser.go b/ignite/pkg/protoanalysis/protoutil/parser.go new file mode 100644 index 0000000..afa64ee --- /dev/null +++ b/ignite/pkg/protoanalysis/protoutil/parser.go @@ -0,0 +1,36 @@ +package protoutil + +import ( + "io" + "os" + "strings" + + "github.com/emicklei/proto" + "github.com/emicklei/proto-contrib/pkg/protofmt" +) + +// ParseProtoPath opens the file denoted by path and parses it +// into a proto file. +func ParseProtoPath(path string) (pf *proto.Proto, err error) { + f, err := os.Open(path) + if err != nil { + return nil, err + } + defer f.Close() + + return proto.NewParser(f).Parse() +} + +// ParseProtoFile parses the given file. +func ParseProtoFile(r io.Reader) (*proto.Proto, error) { + return proto.NewParser(r).Parse() +} + +// Print formats the proto file using proto-contrib/pkg/protofmt. +// This does have certain opinions on how formatting is done. +func Print(pf *proto.Proto) string { + output := new(strings.Builder) + protofmt.NewFormatter(output, " ").Format(pf) // 2 spaces + + return output.String() +} diff --git a/ignite/pkg/protoanalysis/protoutil/parser_test.go b/ignite/pkg/protoanalysis/protoutil/parser_test.go new file mode 100644 index 0000000..a70284f --- /dev/null +++ b/ignite/pkg/protoanalysis/protoutil/parser_test.go @@ -0,0 +1,67 @@ +package protoutil + +import ( + "fmt" + "os" + "testing" + + "github.com/stretchr/testify/require" +) + +const ( + protoPath = "../testdata/liquidity" +) + +// Sanity check: Ensure that parsing works fine. +func TestParseSuccess(t *testing.T) { + files := []string{"genesis", "liquidity", "msg", "query", "tx"} + for _, file := range files { + file = fmt.Sprintf(`../testdata/liquidity/%[1]v.proto`, file) + _, err := ParseProtoPath(file) + require.NoError(t, err) + } + + // Cover the error case 1) -- non existent file: + _, err := ParseProtoPath("p.proto") + require.Error(t, err) + // Cover the error case 2) -- invalid file type + _, err = ParseProtoPath("parser.go") + require.Error(t, err) +} + +// Sanity check: Ensure that parsing works fine with a string input. +func TestParseString(t *testing.T) { + _, err := parseStringProto(`syntax = "proto3"; + + package test; + import "github.com/cosmos/cosmos-sdk/codec"; + import "github.com/cosmos/cosmos-sdk/codec/types"; + + message Msg { + string name = 1; + string description = 2; + }`) + require.NoError(t, err) + + // Cover the error case. + _, err = parseStringProto(`var b = "go"`) + require.Error(t, err) +} + +func TestParseProtoFiles(t *testing.T) { + files := []string{"genesis", "liquidity", "msg", "query", "tx"} + for _, f := range files { + f = fmt.Sprintf(`%[1]v/%[2]v.proto`, protoPath, f) + fp, err := os.Open(f) + require.NoError(t, err) + + nodes, err := ParseProtoFile(fp) + require.NoError(t, err) + + // Pass through printer and check that it still parses + // afterwards: + out := Print(nodes) + _, err = parseStringProto(out) + require.NoError(t, err) + } +} diff --git a/ignite/pkg/protoanalysis/protoutil/proto_test.go b/ignite/pkg/protoanalysis/protoutil/proto_test.go new file mode 100644 index 0000000..8c725c1 --- /dev/null +++ b/ignite/pkg/protoanalysis/protoutil/proto_test.go @@ -0,0 +1,231 @@ +package protoutil + +import ( + "fmt" + "reflect" + "strings" + "testing" + + "github.com/emicklei/proto" + "github.com/stretchr/testify/require" +) + +// Helpers: +// Only checks containment, not positioning. +func containsElement(f proto.Visitee, v proto.Visitee) bool { + contains := false + Apply(f, nil, func(c *Cursor) bool { + if reflect.DeepEqual(c.Node(), v) { + contains = true + return false + } + return true + }) + return contains +} + +// parseStringProto takes a string, parses it into a proto.File, and returns a ProtoFile. +// Nodes can be created easily (newnode) by wrapping them correctly. (e.g field in a message). +func parseStringProto(s string) (*proto.Proto, error) { + p, err := proto.NewParser(strings.NewReader(s)).Parse() + if err != nil { + return nil, err + } + + return p, nil +} + +// Test that the changes from adding a list with starport scaffold list +// Relatively old files but still exercise some paths of the code. + +var ( + genesisProto = `syntax = "proto3"; +package cosmonaut.chainname.chainname; + +import "gogoproto/gogo.proto"; +import "chainname/params.proto"; + +option go_package = "github.com/cosmonaut/chainname/x/chainname/types"; + +// GenesisState defines the houhah module's genesis state. +message GenesisState { + Params params = 1 [(gogoproto.nullable) = false]; +} +` + queryProto = `syntax = "proto3"; +package cosmonaut.chainname.chainname; + +import "gogoproto/gogo.proto"; +import "google/api/annotations.proto"; +import "cosmos/base/query/v1beta1/pagination.proto"; +import "chainname/params.proto"; + +option go_package = "github.com/cosmonaut/chainname/x/houhah/types"; + +// Query defines the gRPC querier service. +service Query { + // Parameters queries the parameters of the module. + rpc Params(QueryParamsRequest) returns (QueryParamsResponse) { + option (google.api.http).get = "/cosmonaut/chainname/chainname/params"; + } +} + +// QueryParamsRequest is request type for the Query/Params RPC method. +message QueryParamsRequest {} + +// QueryParamsResponse is response type for the Query/Params RPC method. +message QueryParamsResponse { + // params holds all the parameters of this module. + Params params = 1 [(gogoproto.nullable) = false]; +}` + txProto = `syntax = "proto3"; +package cosmonautchainname.chainname; + +option go_package = "github.com/cosmonaut/chainname/x/chainname/types"; + +// Msg defines the Msg service. +service Msg {} +` +) + +// Test that the changes from adding a list with starport scaffold list +// are applied correctly to tx.proto. +func TestAddEmptyList_tx(t *testing.T) { + typename, modname := kirby, "chainname" + f, err := parseStringProto(txProto) + require.NoError(t, err) + + // 1) Add import for the new type (module/lowercase_typ) + imp := NewImport(fmt.Sprintf("%s/%s.proto", modname, strings.ToLower(typename))) + err = AddImports(f, true, imp) + require.NoError(t, err) + require.True(t, containsElement(f, imp)) + + // 2) Add rpcs + var rpcs []*proto.RPC + for _, op := range []string{"Create", "Update", "Delete"} { + rpc := NewRPC(op+typename, "Msg"+op+typename, "Msg"+op+typename+"Response") + rpcs = append(rpcs, rpc) + } + Apply(f, nil, func(c *Cursor) bool { + // Find the specific service and append. + if m, ok := c.Node().(*proto.Service); ok { + if m.Name == "Msg" { + for _, rpc := range rpcs { + Append(m, rpc) + } + return false // stop + } + } + // Msg will be traversed first. + // If it was empty, we just stop traversing. + return true + }) + for _, rpc := range rpcs { + require.True(t, containsElement(f, rpc)) + } + // Add the messages after service Msgs at the end of f. + createtyp := NewMessage("MsgCreateKirby", + WithFields(NewField("creator", "string", 1))) + resp := NewMessage("MsgCreateKirbyResponse", + WithFields( + NewField("id", "uint64", 1), + ), + ) + Append(f, createtyp, resp) + require.True(t, containsElement(f, createtyp)) + require.True(t, containsElement(f, resp)) + + updatetyp := NewMessage("MsgUpdateKirby", + WithFields( + NewField("creator", "string", 1), + NewField("id", "uint64", 2), + ), + ) + updateResp := NewMessage("MsgUpdateKirbyResponse") + Append(f, updatetyp, updateResp) + require.True(t, containsElement(f, updatetyp)) + require.True(t, containsElement(f, updateResp)) + + deltyp := NewMessage("MsgDeleteKirby", + WithFields( + NewField("creator", "string", 1), + NewField("id", "uint64", 2), + ), + ) + delResp := NewMessage("MsgDeleteResponse") + Append(f, deltyp, delResp) + require.True(t, containsElement(f, deltyp)) + require.True(t, containsElement(f, delResp)) +} + +// Test that the changes from adding a list with starport scaffold list +// are applied correctly to genesis.proto. +func TestAddEmptyList_genesis(t *testing.T) { + typename, modname := "Kirby", "mod" + f, err := parseStringProto(genesisProto) + require.NoError(t, err) + + // 1) Add import for the new type (module/lowercase_typ) + imp := NewImport(fmt.Sprintf("%s/%s.proto", modname, strings.ToLower(typename))) + err = AddImports(f, true, imp) + require.NoError(t, err) + require.True(t, containsElement(f, imp)) + + // 2) Add fields to GenesisState. Append. + Apply(f, nil, func(c *Cursor) bool { + if m, ok := c.Node().(*proto.Message); ok { + if m.Name == "GenesisState" { + lst := NewField(typename+"_list", typename, 2, + WithFieldOptions(NewOption("gogoproto.nullable", "false", Custom())), + Repeated(), + ) + field := NewField(typename+"Count", typename, 3) + Append(m, lst, field) + require.True(t, containsElement(f, lst)) + require.True(t, containsElement(f, field)) + return false + } + } + return true + }) +} + +func TestAddEmptyList_query(t *testing.T) { + typename, modname := "Kirby", "mod" + f, err := parseStringProto(queryProto) + require.NoError(t, err) + + // 1) Add import for the new type (module/lowercase_typ) + imp := NewImport(fmt.Sprintf("%s/%s.proto", modname, strings.ToLower(typename))) + err = AddImports(f, true, imp) + require.NoError(t, err) + require.True(t, containsElement(f, imp)) + + q, err := GetServiceByName(f, "Query") + require.NoError(t, err) + // Add the rpcs + single := NewRPC(typename, "QueryGet"+typename+"Request", "QueryGet"+typename+"Response", + WithRPCOptions( + NewOption( + "google.api.http", + "/cosmonaut/chainname/chainname/"+typename+"/{id}", + Custom(), + SetField("get"), + ), + ), + ) + all := NewRPC(typename+"All", "QueryAll"+typename+"Request", "QueryAll"+typename+"Response", + WithRPCOptions( + NewOption( + "google.api.http", + "/cosmonaut/chainname/chainname/"+typename, + Custom(), + SetField("get"), + ), + ), + ) + Append(q, single, all) + require.True(t, containsElement(f, single)) + require.True(t, containsElement(f, all)) +} diff --git a/ignite/pkg/protoanalysis/testdata/internal/foo.proto b/ignite/pkg/protoanalysis/testdata/internal/foo.proto new file mode 100644 index 0000000..6659ad1 --- /dev/null +++ b/ignite/pkg/protoanalysis/testdata/internal/foo.proto @@ -0,0 +1 @@ +syntax = "proto3"; diff --git a/ignite/pkg/protoanalysis/testdata/liquidity/genesis.proto b/ignite/pkg/protoanalysis/testdata/liquidity/genesis.proto new file mode 100644 index 0000000..52257d7 --- /dev/null +++ b/ignite/pkg/protoanalysis/testdata/liquidity/genesis.proto @@ -0,0 +1,26 @@ +syntax = "proto3"; +package tendermint.liquidity; + +import "liquidity.proto"; +import "gogoproto/gogo.proto"; + +option go_package = "github.com/tendermint/liquidity/x/liquidity/types"; + +message PoolRecord { + Pool pool = 1 [(gogoproto.nullable) = false, (gogoproto.moretags) = "yaml:\"pool\""]; + PoolMetadata pool_metadata = 2 [(gogoproto.nullable) = false, (gogoproto.moretags) = "yaml:\"pool_metadata\""]; + PoolBatch pool_batch = 3 [(gogoproto.nullable) = false, (gogoproto.moretags) = "yaml:\"pool_batch\""]; + repeated DepositMsgState deposit_msg_states = 4 [(gogoproto.nullable) = false, (gogoproto.moretags) = "yaml:\"deposit_msg_states\""]; + repeated WithdrawMsgState withdraw_msg_states = 5 [(gogoproto.nullable) = false, (gogoproto.moretags) = "yaml:\"withdraw_msg_states\""]; + repeated SwapMsgState swap_msg_states = 6 [(gogoproto.nullable) = false, (gogoproto.moretags) = "yaml:\"swap_msg_states\""]; +} + +// GenesisState defines the liquidity module's genesis state. +message GenesisState { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + // params defines all the parameters of related to liquidity. + Params params = 1 [(gogoproto.nullable) = false]; + repeated PoolRecord pool_records = 2 [(gogoproto.nullable) = false, (gogoproto.moretags) = "yaml:\"pools\""]; +} diff --git a/ignite/pkg/protoanalysis/testdata/liquidity/liquidity.proto b/ignite/pkg/protoanalysis/testdata/liquidity/liquidity.proto new file mode 100644 index 0000000..dec4f57 --- /dev/null +++ b/ignite/pkg/protoanalysis/testdata/liquidity/liquidity.proto @@ -0,0 +1,485 @@ +syntax = "proto3"; +package tendermint.liquidity; + +import "tx.proto"; +import "gogoproto/gogo.proto"; +import "cosmos_proto/coin.proto"; +import "protoc-gen-openapiv2/options/annotations.proto"; + +option go_package = "github.com/tendermint/liquidity/x/liquidity/types"; +option (gogoproto.goproto_getters_all) = false; + +message PoolType { + option (gogoproto.equal) = true; + + // id of target pool type, only 1 is allowed on this version. + uint32 id = 1 [(gogoproto.moretags) = "yaml:\"id\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "\"1\"", + format: "uint32" + }]; + + // name of the pool type + string name = 2 [(gogoproto.moretags) = "yaml:\"name\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "\"ConstantProductLiquidityPool\"", + }]; + + // min number of reserveCoins for LiquidityPoolType only 2 is allowed on this spec + uint32 min_reserve_coin_num = 3 [(gogoproto.moretags) = "yaml:\"min_reserve_coin_num\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "\"2\"", + format: "uint32" + }]; + + // max number of reserveCoins for LiquidityPoolType only 2 is allowed on this spec + uint32 max_reserve_coin_num = 4 [(gogoproto.moretags) = "yaml:\"max_reserve_coin_num\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "\"2\"", + format: "uint32" + }]; + + // description of the pool type + string description = 5 [(gogoproto.moretags) = "yaml:\"description\""]; +} + +message Params { + option (gogoproto.equal) = true; + option (gogoproto.goproto_stringer) = false; + + // list of available pool types + repeated PoolType pool_types = 1 [ + (gogoproto.moretags) = "yaml:\"pool_types\"", + (gogoproto.nullable) = false + ]; + + // Minimum number of coins to be deposited to the liquidity pool upon pool creation + string min_init_deposit_amount = 2 [ + (gogoproto.moretags) = "yaml:\"min_init_deposit_amount\"", + (gogoproto.customtype) = "github.com/cosmos/cosmos-sdk/types.Int", + (gogoproto.nullable) = false, + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "\"1000000\"", + format: "sdk.Int" + }]; + + // Initial mint amount of pool coin upon pool creation + string init_pool_coin_mint_amount = 3 [ + (gogoproto.moretags) = "yaml:\"init_pool_coin_mint_amount\"", + (gogoproto.customtype) = "github.com/cosmos/cosmos-sdk/types.Int", + (gogoproto.nullable) = false, + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "\"1000000\"", + format: "sdk.Int" + }]; + + // Limit the size of each liquidity pool in the beginning phase of Liquidity Module adoption to minimize risk, 0 means no limit + string max_reserve_coin_amount = 4 [ + (gogoproto.moretags) = "yaml:\"max_reserve_coin_amount\"", + (gogoproto.customtype) = "github.com/cosmos/cosmos-sdk/types.Int", + (gogoproto.nullable) = false, + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "\"1000000000000\"", + format: "sdk.Int" + }]; + + // Fee paid for new Liquidity Pool creation to prevent spamming + repeated cosmos.base.v1beta1.Coin pool_creation_fee = 5 [ + (gogoproto.moretags) = "yaml:\"pool_creation_fee\"", + (gogoproto.nullable) = false, + (gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.Coins", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "[{\"denom\": \"uatom\", \"amount\": \"100000000\"}]", + format: "sdk.Coins" + } + ]; + + // Swap fee rate for every executed swap + bytes swap_fee_rate = 6 [ + (gogoproto.moretags) = "yaml:\"swap_fee_rate\"", + (gogoproto.customtype) = "github.com/cosmos/cosmos-sdk/types.Dec", + (gogoproto.nullable) = false, + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "\"0.003\"", + format: "sdk.Dec" + }]; + + // Reserve coin withdrawal with less proportion by withdrawFeeRate + bytes withdraw_fee_rate = 7 [ + (gogoproto.moretags) = "yaml:\"withdraw_fee_rate\"", + (gogoproto.customtype) = "github.com/cosmos/cosmos-sdk/types.Dec", + (gogoproto.nullable) = false, + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "\"0.003\"", + format: "sdk.Dec" + }]; + + // Maximum ratio of reserve coins that can be ordered at a swap order + bytes max_order_amount_ratio = 8 [ + (gogoproto.moretags) = "yaml:\"max_order_amount_ratio\"", + (gogoproto.customtype) = "github.com/cosmos/cosmos-sdk/types.Dec", + (gogoproto.nullable) = false, + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "\"0.003\"", + format: "sdk.Dec" + }]; + + // The smallest unit batch height for every liquidity pool + uint32 unit_batch_height = 9 [ + (gogoproto.moretags) = "yaml:\"unit_batch_height\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "\"1\"", + format: "uint32" + }]; + + // // fees paid for each batch messages, to be added v2 + // repeated cosmos.base.v1beta1.Coin liquidity_msg_fee = 5 [ + // (gogoproto.moretags) = "yaml:\"liquidity_msg_fee\"", + // (gogoproto.nullable) = false, + // (gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.Coins", + // (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + // example: "[{\"denom\": \"uatom\", \"amount\": \"50000\"}]", + // format: "sdk.Coins" + // }]; +} + +message Pool { + option (gogoproto.equal) = true; + option (gogoproto.goproto_stringer) = true; + + // id of the pool + uint64 id = 1 [(gogoproto.moretags) = "yaml:\"id\"", (gogoproto.jsontag) = "id", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "\"1\"", + format: "uint64" + }]; + + // id of the pool type + uint32 type_id = 2 [(gogoproto.moretags) = "yaml:\"type_id\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "\"1\"", + format: "uint32" + }]; + + // denoms of reserve coin pair of the pool + repeated string reserve_coin_denoms = 3 [(gogoproto.moretags) = "yaml:\"reserve_coin_denoms\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "[\"denomX\",\"denomY\"]" + }]; + + // reserve account address of the pool + string reserve_account_address = 4 [(gogoproto.moretags) = "yaml:\"reserve_account_address\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "\"cosmos16ddqestwukv0jzcyfn3fdfq9h2wrs83cr4rfm3\"", + format: "sdk.AccAddress" + }]; + + // denom of pool coin of the pool + string pool_coin_denom = 5 [(gogoproto.moretags) = "yaml:\"pool_coin_denom\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "\"poolD35A0CC16EE598F90B044CE296A405BA9C381E38837599D96F2F70C2F02A23A4\"", + }]; +} + +message PoolMetadata { + option (gogoproto.equal) = true; + option (gogoproto.goproto_stringer) = true; + + // id of the pool + uint64 pool_id = 1 [(gogoproto.moretags) = "yaml:\"pool_id\"", (gogoproto.jsontag) = "pool_id", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "\"1\"", + format: "uint64" + }]; + + // pool coin issued at the pool + cosmos.base.v1beta1.Coin pool_coin_total_supply = 2 [ + (gogoproto.moretags) = "yaml:\"pool_coin_total_supply\"", + (gogoproto.nullable) = false, + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "{\"denom\": \"poolD35A0CC16EE598F90B044CE296A405BA9C381E38837599D96F2F70C2F02A23A4\", \"amount\": \"1000000\"}", + format: "sdk.Coin" + }]; + + // reserve coins deposited in the pool + repeated cosmos.base.v1beta1.Coin reserve_coins = 3 [ + (gogoproto.moretags) = "yaml:\"reserve_coins\"", + (gogoproto.nullable) = false, + (gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.Coins", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "[{\"denom\": \"denomX\", \"amount\": \"1000000\"}, {\"denom\": \"denomY\", \"amount\": \"2000000\"}]", + format: "sdk.Coins" + }]; +} + +message PoolMetadataResponse { + option (gogoproto.equal) = true; + option (gogoproto.goproto_stringer) = true; + + // pool coin issued at the pool + cosmos.base.v1beta1.Coin pool_coin_total_supply = 1 [ + (gogoproto.moretags) = "yaml:\"pool_coin_total_supply\"", + (gogoproto.nullable) = false, + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "{\"denom\": \"poolD35A0CC16EE598F90B044CE296A405BA9C381E38837599D96F2F70C2F02A23A4\", \"amount\": \"1000000\"}", + format: "sdk.Coin" + }]; + + // reserve coins deposited in the pool + repeated cosmos.base.v1beta1.Coin reserve_coins = 2 [ + (gogoproto.moretags) = "yaml:\"reserve_coins\"", + (gogoproto.nullable) = false, + (gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.Coins", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "[{\"denom\": \"denomX\", \"amount\": \"1000000\"}, {\"denom\": \"denomY\", \"amount\": \"2000000\"}]", + format: "sdk.Coins" + }]; +} + +message PoolBatch { + option (gogoproto.equal) = true; + option (gogoproto.goproto_stringer) = true; + + // id of the pool + uint64 pool_id = 1 [(gogoproto.moretags) = "yaml:\"pool_id\"", (gogoproto.jsontag) = "pool_id", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "\"1\"", + format: "uint64" + }]; + + // index of this batch + uint64 index = 2 [(gogoproto.moretags) = "yaml:\"index\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "\"1\"", + format: "uint64" + }]; + + // height where this batch is begun + int64 begin_height = 3 [(gogoproto.moretags) = "yaml:\"begin_height\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "\"1000\"", + format: "int64" + }]; + + // last index of DepositMsgStates + uint64 deposit_msg_index = 4 [(gogoproto.moretags) = "yaml:\"deposit_msg_index\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "\"1\"", + format: "uint64" + }]; + + // last index of WithdrawMsgStates + uint64 withdraw_msg_index = 5 [(gogoproto.moretags) = "yaml:\"withdraw_msg_index\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "\"1\"", + format: "uint64" + }]; + + // last index of SwapMsgStates + uint64 swap_msg_index = 6 [(gogoproto.moretags) = "yaml:\"swap_msg_index\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "\"1\"", + format: "uint64" + }]; + + // true if executed, false if not executed yet + bool executed = 7 [(gogoproto.moretags) = "yaml:\"executed\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "true", + }]; +} + +message PoolBatchResponse { + option (gogoproto.equal) = true; + option (gogoproto.goproto_stringer) = true; + + // index of this batch + uint64 index = 1 [(gogoproto.moretags) = "yaml:\"index\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "\"1\"", + format: "uint64" + }]; + + // height where this batch is begun + int64 begin_height = 2 [(gogoproto.moretags) = "yaml:\"begin_height\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "\"1000\"", + format: "int64" + }]; + + // last index of DepositMsgStates + uint64 deposit_msg_index = 3 [(gogoproto.moretags) = "yaml:\"deposit_msg_index\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "\"1\"", + format: "uint64" + }]; + + // last index of WithdrawMsgStates + uint64 withdraw_msg_index = 4 [(gogoproto.moretags) = "yaml:\"withdraw_msg_index\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "\"1\"", + format: "uint64" + }]; + + // last index of SwapMsgStates + uint64 swap_msg_index = 5 [(gogoproto.moretags) = "yaml:\"swap_msg_index\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "\"1\"", + format: "uint64" + }]; + + // true if executed, false if not executed yet + bool executed = 6 [(gogoproto.moretags) = "yaml:\"executed\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "true", + }]; +} + +message DepositMsgState { + + // height where this message is appended to the batch + int64 msg_height = 1 [(gogoproto.moretags) = "yaml:\"msg_height\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "\"1000\"", + format: "int64" + }]; + + // index of this deposit message in this liquidity pool + uint64 msg_index = 2 [(gogoproto.moretags) = "yaml:\"msg_index\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "\"1\"", + format: "uint64" + }]; + + // true if executed on this batch, false if not executed yet + bool executed = 3 [(gogoproto.moretags) = "yaml:\"executed\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "true", + }]; + + // true if executed successfully on this batch, false if failed + bool succeeded = 4 [(gogoproto.moretags) = "yaml:\"succeeded\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "true", + }]; + + // true if ready to be deleted on kvstore, false if not ready to be deleted + bool to_be_deleted = 5 [(gogoproto.moretags) = "yaml:\"to_be_deleted\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "true", + }]; + + // MsgDepositWithinBatch + MsgDepositWithinBatch msg = 6 [(gogoproto.moretags) = "yaml:\"msg\""]; +} + +message WithdrawMsgState { + + // height where this message is appended to the batch + int64 msg_height = 1 [(gogoproto.moretags) = "yaml:\"msg_height\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "\"1000\"", + format: "int64" + }]; + + // index of this withdraw message in this liquidity pool + uint64 msg_index = 2 [(gogoproto.moretags) = "yaml:\"msg_index\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "\"1\"", + format: "uint64" + }]; + + // true if executed on this batch, false if not executed yet + bool executed = 3 [(gogoproto.moretags) = "yaml:\"executed\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "true", + }]; + + // true if executed successfully on this batch, false if failed + bool succeeded = 4 [(gogoproto.moretags) = "yaml:\"succeeded\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "true", + }]; + + // true if ready to be deleted on kvstore, false if not ready to be deleted + bool to_be_deleted = 5 [(gogoproto.moretags) = "yaml:\"to_be_deleted\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "true", + }]; + + // MsgWithdrawWithinBatch + MsgWithdrawWithinBatch msg = 6 [(gogoproto.moretags) = "yaml:\"msg\""]; +} + +message SwapMsgState { + + // height where this message is appended to the batch + int64 msg_height = 1 [(gogoproto.moretags) = "yaml:\"msg_height\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "\"1000\"", + format: "int64" + }]; + + // index of this swap message in this liquidity pool + uint64 msg_index = 2 [(gogoproto.moretags) = "yaml:\"msg_index\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "\"1\"", + format: "uint64" + }]; + + // true if executed on this batch, false if not executed yet + bool executed = 3 [(gogoproto.moretags) = "yaml:\"executed\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "true", + }]; + + // true if executed successfully on this batch, false if failed + bool succeeded = 4 [(gogoproto.moretags) = "yaml:\"succeeded\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "true", + }]; + + // true if ready to be deleted on kvstore, false if not ready to be deleted + bool to_be_deleted = 5 [(gogoproto.moretags) = "yaml:\"to_be_deleted\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "true", + }]; + + // swap orders are cancelled when current height is equal or higher than ExpiryHeight + int64 order_expiry_height = 6 [(gogoproto.moretags) = "yaml:\"order_expiry_height\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "\"1000\"", + format: "int64" + }]; + + // offer coin exchanged until now + cosmos.base.v1beta1.Coin exchanged_offer_coin = 7 [ + (gogoproto.nullable) = false, + (gogoproto.moretags) = "yaml:\"exchanged_offer_coin\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "{\"denom\": \"denomX\", \"amount\": \"600000\"}", + format: "sdk.Coin" + }]; + + // offer coin currently remaining to be exchanged + cosmos.base.v1beta1.Coin remaining_offer_coin = 8 [ + (gogoproto.nullable) = false, + (gogoproto.moretags) = "yaml:\"remaining_offer_coin\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "{\"denom\": \"denomX\", \"amount\": \"400000\"}", + format: "sdk.Coin" + }]; + + // reserve fee for pays fee in half offer coin + cosmos.base.v1beta1.Coin reserved_offer_coin_fee = 9 [ + (gogoproto.nullable) = false, + (gogoproto.moretags) = "yaml:\"reserved_offer_coin_fee\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "{\"denom\": \"denomX\", \"amount\": \"5000\"}", + format: "sdk.Coin" + } + ]; + + // MsgSwapWithinBatch + MsgSwapWithinBatch msg = 10 [(gogoproto.moretags) = "yaml:\"msg\""]; +} diff --git a/ignite/pkg/protoanalysis/testdata/liquidity/msg.proto b/ignite/pkg/protoanalysis/testdata/liquidity/msg.proto new file mode 100644 index 0000000..4148771 --- /dev/null +++ b/ignite/pkg/protoanalysis/testdata/liquidity/msg.proto @@ -0,0 +1,93 @@ +syntax = "proto3"; +package tendermint.liquidity; + +import "google/api/annotations.proto"; +import "protoc-gen-openapiv2/options/annotations.proto"; +import "tx.proto"; + +option go_package = "github.com/tendermint/liquidity/x/liquidity/types"; +//option (gogoproto.goproto_getters_all) = false; + +//option (gogoproto.goproto_stringer_all) = false; +//option (gogoproto.stringer_all) = false; + +// Msg defines the staking Msg service. +service MsgApi { + // Submit create liquidity pool message. + rpc CreatePoolApi(MsgCreatePoolRequest) returns (MsgCreatePoolResponse) { + option (google.api.http) = { + post: "/liquidity/pools/{test}" + body: "msg" // WARNING(protoanalysis): changed from * to msg to test: param + body + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "unsigned tx can be generated upon request through this POST endpoint, you can broadcast adding signature to the generated tx, through endpoint such as /txs or rpc, After broadcast, the result can be subscribe through the event and please refer to the spec." + tags: "Tx"; + external_docs: { + // url: "https://github.com/tendermint/liquidity/tree/develop/x/liquidity/spec"; + // description: "Find out more message spec and response events here"; + url: "https://github.com/cosmos/cosmos-sdk/blob/main/docs/docs/migrations/00-intro.md"; + description: "According to migrating-to-new-rest-endpoints, the POST endpoints of the New gGPC-gateway REST are N/A and guided directly to use Protobuf, need to use cli or localhost:1317/cosmos/tx/v1beta1/txs for broadcast txs temporarily"; + } +// responses: { +// key: "500"; +// value: { +// description: "Server internal error"; +// } +// } +// responses: { +// key: "400"; +// value: { +// description: "Invalid request"; +// } +// } + }; + }; + + // Submit deposit to the liquidity pool batch + rpc DepositWithinBatchApi(MsgDepositWithinBatchRequest) returns (MsgDepositWithinBatchResponse) { + option (google.api.http) = { + post: "/liquidity/pools/{pool_id}/batch/deposits" + body: "*" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "unsigned tx can be generated upon request through this POST endpoint, you can broadcast adding signature to the generated tx, through endpoint such as /txs or rpc, After broadcast, the result can be subscribe through the event and please refer to the spec." + tags: "Tx"; + external_docs: { + url: "https://github.com/cosmos/cosmos-sdk/blob/main/docs/docs/migrations/00-intro.md"; + description: "According to migrating-to-new-rest-endpoints, the POST endpoints of the New gGPC-gateway REST are N/A and guided directly to use Protobuf, need to use cli or localhost:1317/cosmos/tx/v1beta1/txs for broadcast txs temporarily"; + } + }; + }; + + // Submit withdraw from to the liquidity pool batch + rpc WithdrawWithinBatchApi(MsgWithdrawWithinBatchRequest) returns (MsgWithdrawWithinBatchResponse) { + option (google.api.http) = { + post: "/liquidity/pools/{pool_id}/batch/withdraws" + body: "*" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "unsigned tx can be generated upon request through this POST endpoint, you can broadcast adding signature to the generated tx, through endpoint such as /txs or rpc, After broadcast, the result can be subscribe through the event and please refer to the spec." + tags: "Tx"; + external_docs: { + url: "https://github.com/cosmos/cosmos-sdk/blob/main/docs/docs/migrations/00-intro.md"; + description: "According to migrating-to-new-rest-endpoints, the POST endpoints of the New gGPC-gateway REST are N/A and guided directly to use Protobuf, need to use cli or localhost:1317/cosmos/tx/v1beta1/txs for broadcast txs temporarily"; + } + }; + }; + + // Submit swap to the liquidity pool batch + rpc SwapApi(MsgSwapWithinBatchRequest) returns (MsgSwapWithinBatchResponse) { + option (google.api.http) = { + post: "/liquidity/pools/{pool_id}/batch/swaps" + body: "msg" // WARNING(protoanalysis): changed from * to msg to test: param + query + body + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "unsigned tx can be generated upon request through this POST endpoint, you can broadcast adding signature to the generated tx, through endpoint such as /txs or rpc, After broadcast, the result can be subscribe through the event and please refer to the spec." + tags: "Tx"; + external_docs: { + url: "https://github.com/cosmos/cosmos-sdk/blob/main/docs/docs/migrations/00-intro.md"; + description: "According to migrating-to-new-rest-endpoints, the POST endpoints of the New gGPC-gateway REST are N/A and guided directly to use Protobuf, need to use cli or localhost:1317/cosmos/tx/v1beta1/txs for broadcast txs temporarily"; + } + }; + }; +} diff --git a/ignite/pkg/protoanalysis/testdata/liquidity/query.proto b/ignite/pkg/protoanalysis/testdata/liquidity/query.proto new file mode 100644 index 0000000..5886a7d --- /dev/null +++ b/ignite/pkg/protoanalysis/testdata/liquidity/query.proto @@ -0,0 +1,465 @@ +syntax = "proto3"; +package tendermint.liquidity; + +import "gogoproto/gogo.proto"; +import "liquidity.proto"; +import "google/api/annotations.proto"; +import "cosmos_proto/pagination.proto"; +import "protoc-gen-openapiv2/options/annotations.proto"; + +option go_package = "github.com/tendermint/liquidity/x/liquidity/types"; + +// Query defines the gRPC querier service for liquidity module. +service Query { + // Get existing liquidity pools. + rpc LiquidityPools (QueryLiquidityPoolsRequest) returns (QueryLiquidityPoolsResponse) { + option (google.api.http).get = "/liquidity/pools"; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "It returns list of all liquidity pools with pagination result."; + external_docs: { + url: "https://github.com/tendermint/liquidity/blob/develop/doc/client.md"; + description: "Find out more about the query and error codes"; + } + responses: { + key: "500" + value: { + description: "Internal Server Error" + examples: { + key: "application/json" + value: '{"code":2,"message":"rpc error: code = NotFound desc = There are no pools present.: key not found","details":[]}' + } + } + } + }; + } + + // Get specific liquidity pool. + rpc LiquidityPool (QueryLiquidityPoolRequest) returns (QueryLiquidityPoolResponse) { + option (google.api.http).get = "/liquidity/pools/{pool_id}"; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "It returns the liquidity pool corresponding to the pool_id."; + external_docs: { + url: "https://github.com/tendermint/liquidity/blob/develop/doc/client.md"; + description: "Find out more about the query and error codes"; + } + responses: { + key: "500" + value: { + description: "Internal Server Error" + examples: { + key: "application/json" + value: '{"code":2,"message":"rpc error: code = NotFound desc = liquidity pool 3 doesn\'t exist: key not found","details":[]}' + } + } + } + responses: { + key: "400" + value: { + description: "Bad Request" + examples: { + key: "application/json" + value: '{"code":3,"message":"type mismatch, parameter: pool_id, error: strconv.ParseUint: parsing *: invalid syntax","details":[]}' + } + } + } + }; + } + + // Get the pool's current batch. + rpc LiquidityPoolBatch (QueryLiquidityPoolBatchRequest) returns (QueryLiquidityPoolBatchResponse) { + option (google.api.http).get = "/liquidity/pools/{pool_id}/batch"; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "It returns the current batch of the pool corresponding to the pool_id."; + external_docs: { + url: "https://github.com/tendermint/liquidity/blob/develop/doc/client.md"; + description: "Find out more about the query and error codes"; + } + responses: { + key: "500" + value: { + description: "Internal Server Error" + examples: { + key: "application/json" + value: '{"code":2,"message":"rpc error: code = NotFound desc = liquidity pool 3 doesn\'t exist: key not found","details":[]}' + } + } + } + responses: { + key: "400" + value: { + description: "Bad Request" + examples: { + key: "application/json" + value: '{"code":3,"message":"type mismatch, parameter: pool_id, error: strconv.ParseUint: parsing *: invalid syntax","details":[]}' + } + } + } + }; + } + + // Get all swap messages in the pool's current batch. + rpc PoolBatchSwapMsgs(QueryPoolBatchSwapMsgsRequest) returns (QueryPoolBatchSwapMsgsResponse) { + option (google.api.http).get = "/liquidity/pools/{pool_id}/batch/swaps"; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "It returns list of all swap messages in the current batch of the pool with pagination result."; + external_docs: { + url: "https://github.com/tendermint/liquidity/blob/develop/doc/client.md"; + description: "Find out more about the query and error codes"; + } + responses: { + key: "500" + value: { + description: "Internal Server Error" + examples: { + key: "application/json" + value: '{"code":2,"message":"rpc error: code = NotFound desc = liquidity pool 3 doesn\'t exist: key not found","details":[]}' + } + } + } + responses: { + key: "400" + value: { + description: "Bad Request" + examples: { + key: "application/json" + value: '{"code":3,"message":"type mismatch, parameter: pool_id, error: strconv.ParseUint: parsing *: invalid syntax","details":[]}' + } + } + } + }; + } + + // Get specific swap message in the pool's current batch. + rpc PoolBatchSwapMsg(QueryPoolBatchSwapMsgRequest) returns (QueryPoolBatchSwapMsgResponse) { + option (google.api.http).get = "/liquidity/pools/{pool_id}/batch/swaps/{msg_index}"; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "It returns the swap message corresponding to the msg_index in the pool's current batch"; + external_docs: { + url: "https://github.com/tendermint/liquidity/blob/develop/doc/client.md"; + description: "Find out more about the query and error codes"; + } + responses: { + key: "500" + value: { + description: "Internal Server Error" + examples: { + key: "application/json" + value: '{"root":{"code":2,"details":[],"message":"rpc error: code = NotFound desc = the msg given msg_index 1 doesn\'t exist or deleted: key not found"}}' + } + } + } + responses: { + key: "400" + value: { + description: "Bad Request" + examples: { + key: "application/json" + value: '{"code":3,"message":"type mismatch, parameter: msg_index, error: strconv.ParseUint: parsing *: invalid syntax","details":[]}' + } + } + } + }; + } + + // Get all deposit messages in the pool's current batch. + rpc PoolBatchDepositMsgs(QueryPoolBatchDepositMsgsRequest) returns (QueryPoolBatchDepositMsgsResponse) { + option (google.api.http).get = "/liquidity/pools/{pool_id}/batch/deposits"; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "It returns list of all deposit messages in the current batch of the pool with pagination result."; + external_docs: { + url: "https://github.com/tendermint/liquidity/blob/develop/doc/client.md"; + description: "Find out more about the query and error codes"; + } + responses: { + key: "500" + value: { + description: "Internal Server Error" + examples: { + key: "application/json" + value: '{"code":2,"message":"rpc error: code = NotFound desc = liquidity pool 3 doesn\'t exist: key not found","details":[]}' + } + } + } + responses: { + key: "400" + value: { + description: "Bad Request" + examples: { + key: "application/json" + value: '{"code":3,"message":"type mismatch, parameter: pool_id, error: strconv.ParseUint: parsing *: invalid syntax","details":[]}' + } + } + } + + }; + } + + // Get specific deposit message in the pool's current batch. + rpc PoolBatchDepositMsg(QueryPoolBatchDepositMsgRequest) returns (QueryPoolBatchDepositMsgResponse) { + option (google.api.http).get = "/liquidity/pools/{pool_id}/batch/deposits/{msg_index}"; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "It returns the deposit message corresponding to the msg_index in the pool's current batch."; + external_docs: { + url: "https://github.com/tendermint/liquidity/blob/develop/doc/client.md"; + description: "Find out more about the query and error codes"; + } + responses: { + key: "500" + value: { + description: "Internal Server Error" + examples: { + key: "application/json" + value: '{"root":{"code":2,"details":[],"message":"rpc error: code = NotFound desc = the msg given msg_index 1 doesn\'t exist or deleted: key not found"}}' + } + } + } + responses: { + key: "400" + value: { + description: "Bad Request" + examples: { + key: "application/json" + value: '{"code":3,"message":"type mismatch, parameter: msg_index, error: strconv.ParseUint: parsing *: invalid syntax","details":[]}' + } + } + } + }; + } + + // Get all withdraw messages in the pool's current batch. + rpc PoolBatchWithdrawMsgs(QueryPoolBatchWithdrawMsgsRequest) returns (QueryPoolBatchWithdrawMsgsResponse) { + option (google.api.http).get = "/liquidity/pools/{pool_id}/batch/withdraws"; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "It returns list of all withdraw messages in the current batch of the pool with pagination result."; + external_docs: { + url: "https://github.com/tendermint/liquidity/blob/develop/doc/client.md"; + description: "Find out more about the query and error codes"; + } + responses: { + key: "500" + value: { + description: "Internal Server Error" + examples: { + key: "application/json" + value: '{"code":2,"message":"rpc error: code = NotFound desc = liquidity pool 3 doesn\'t exist: key not found","details":[]}' + } + } + } + responses: { + key: "400" + value: { + description: "Bad Request" + examples: { + key: "application/json" + value: '{"code":3,"message":"type mismatch, parameter: pool_id, error: strconv.ParseUint: parsing *: invalid syntax","details":[]}' + } + } + } + + }; + } + + // Get specific withdraw message in the pool's current batch. + rpc PoolBatchWithdrawMsg(QueryPoolBatchWithdrawMsgRequest) returns (QueryPoolBatchWithdrawMsgResponse) { + option (google.api.http).get = "/liquidity/pools/{pool_id}/batch/withdraws/{msg_index}"; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "It returns the withdraw message corresponding to the msg_index in the pool's current batch."; + external_docs: { + url: "https://github.com/tendermint/liquidity/blob/develop/doc/client.md"; + description: "Find out more about the query and error codes"; + } + responses: { + key: "500" + value: { + description: "Internal Server Error" + examples: { + key: "application/json" + value: '{"root":{"code":2,"details":[],"message":"rpc error: code = NotFound desc = the msg given msg_index 1 doesn\'t exist or deleted: key not found"}}' + } + } + } + responses: { + key: "400" + value: { + description: "Bad Request" + examples: { + key: "application/json" + value: '{"code":3,"message":"type mismatch, parameter: msg_index, error: strconv.ParseUint: parsing *: invalid syntax","details":[]}' + } + } + } + }; + } + + // Get all parameters of the liquidity module. + rpc Params(QueryParamsRequest) returns (QueryParamsResponse) { + option (google.api.http).get = "/liquidity/params"; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "It returns all parameters of the liquidity module."; + external_docs: { + url: "https://github.com/tendermint/liquidity/blob/develop/x/liquidity/spec/08_params.md"; + description: "Find out more about the params"; + } + }; + } +} + +// the request type for the QueryLiquidityPool RPC method. requestable specified pool_id. +message QueryLiquidityPoolRequest { + uint64 pool_id = 1; +} + +// the response type for the QueryLiquidityPoolResponse RPC method. It returns the liquidity pool corresponding to the requested pool_id. +message QueryLiquidityPoolResponse { + Pool pool = 1 [(gogoproto.nullable) = false]; +// // id of the pool +// uint64 id = 1 [(gogoproto.moretags) = "yaml:\"id\"", (gogoproto.jsontag) = "id", +// (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { +// example: "\"1\"", +// format: "uint64" +// }]; +// +// // id of the pool type +// uint32 type_id = 2 [(gogoproto.moretags) = "yaml:\"type_id\"", +// (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { +// example: "\"1\"", +// format: "uint32" +// }]; +// +// // denoms of reserve coin pair of the pool +// repeated string reserve_coin_denoms = 3 [(gogoproto.moretags) = "yaml:\"reserve_coin_denoms\"", +// (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { +// example: "[\"denomX\",\"denomY\"]" +// }]; +// +// // reserve account address of the pool +// string reserve_account_address = 4 [(gogoproto.moretags) = "yaml:\"reserve_account_address\"", +// (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { +// example: "\"cosmos16ddqestwukv0jzcyfn3fdfq9h2wrs83cr4rfm3\"", +// format: "sdk.AccAddress" +// }]; +// +// // denom of pool coin of the pool +// string pool_coin_denom = 5 [(gogoproto.moretags) = "yaml:\"pool_coin_denom\"", +// (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { +// example: "\"poolD35A0CC16EE598F90B044CE296A405BA9C381E38837599D96F2F70C2F02A23A4\"", +// }]; +} + +// the request type for the QueryLiquidityPoolBatch RPC method. requestable including specified pool_id. +message QueryLiquidityPoolBatchRequest { + // id of the target pool for query + uint64 pool_id = 1; +} + +// the response type for the QueryLiquidityPoolBatchResponse RPC method. It returns the liquidity pool batch corresponding to the requested pool_id. +message QueryLiquidityPoolBatchResponse { + PoolBatch batch = 1 [(gogoproto.nullable) = false]; +} + +// the request type for the QueryLiquidityPools RPC method. requestable including pagination offset, limit, key. +message QueryLiquidityPoolsRequest { + // pagination defines an optional pagination for the request. + cosmos.base.query.v1beta1.PageRequest pagination = 1; +} + +// the response type for the QueryLiquidityPoolsResponse RPC method. This includes list of all liquidity pools currently existed and paging results containing next_key and total count. +message QueryLiquidityPoolsResponse { + repeated Pool pools = 1 [(gogoproto.nullable) = false]; + // pagination defines the pagination in the response. not working on this version. + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} + +// QueryParamsRequest is request type for the QueryParams RPC method. +message QueryParamsRequest {} + +// the response type for the QueryParamsResponse RPC method. This includes current parameter of the liquidity module. +message QueryParamsResponse { + // params holds all the parameters of this module. + Params params = 1 [(gogoproto.nullable) = false]; +} + +// the request type for the QueryPoolBatchSwapMsgs RPC method. requestable including specified pool_id and pagination offset, limit, key. +message QueryPoolBatchSwapMsgsRequest { + // id of the target pool for query + uint64 pool_id = 1; + // pagination defines an optional pagination for the request. + cosmos.base.query.v1beta1.PageRequest pagination = 2; +} + +// the request type for the QueryPoolBatchSwap RPC method. requestable including specified pool_id and msg_index +message QueryPoolBatchSwapMsgRequest { + // id of the target pool for query + uint64 pool_id = 1; + // target msg_index of the pool + uint64 msg_index = 2; +} + +// the response type for the QueryPoolBatchSwapMsgs RPC method. This includes list of all currently existing swap messages of the batch and paging results containing next_key and total count. +message QueryPoolBatchSwapMsgsResponse { + repeated SwapMsgState swaps = 1 [(gogoproto.nullable) = false]; + // pagination defines the pagination in the response. not working on this version. + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} + +// the response type for the QueryPoolBatchSwapMsg RPC method. This includes a batch swap message of the batch +message QueryPoolBatchSwapMsgResponse { + SwapMsgState swap = 1 [(gogoproto.nullable) = false]; +} + +// the request type for the QueryPoolBatchDeposit RPC method. requestable including specified pool_id and pagination offset, limit, key. +message QueryPoolBatchDepositMsgsRequest { + // id of the target pool for query + uint64 pool_id = 1; + // pagination defines an optional pagination for the request. + cosmos.base.query.v1beta1.PageRequest pagination = 2; +} + +// the request type for the QueryPoolBatchDeposit RPC method. requestable including specified pool_id and msg_index +message QueryPoolBatchDepositMsgRequest { + // id of the target pool for query + uint64 pool_id = 1; + // target msg_index of the pool + uint64 msg_index = 2; +} + +// the response type for the QueryPoolBatchDeposit RPC method. This includes a list of all currently existing deposit messages of the batch and paging results containing next_key and total count. +message QueryPoolBatchDepositMsgsResponse { + repeated DepositMsgState deposits = 1 [(gogoproto.nullable) = false]; + // pagination defines the pagination in the response. not working on this version. + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} + +// the response type for the QueryPoolBatchDepositMsg RPC method. This includes a batch swap message of the batch +message QueryPoolBatchDepositMsgResponse { + DepositMsgState deposit = 1 [(gogoproto.nullable) = false]; +} + + +// the request type for the QueryPoolBatchWithdraw RPC method. requestable including specified pool_id and pagination offset, limit, key. +message QueryPoolBatchWithdrawMsgsRequest { + // id of the target pool for query + uint64 pool_id = 1; + // pagination defines an optional pagination for the request. + cosmos.base.query.v1beta1.PageRequest pagination = 2; +} + +// the request type for the QueryPoolBatchWithdraw RPC method. requestable including specified pool_id and msg_index +message QueryPoolBatchWithdrawMsgRequest { + // id of the target pool for query + uint64 pool_id = 1; + // target msg_index of the pool + uint64 msg_index = 2; +} + +// the response type for the QueryPoolBatchWithdraw RPC method. This includes a list of all currently existing withdraw messages of the batch and paging results containing next_key and total count. +message QueryPoolBatchWithdrawMsgsResponse { + repeated WithdrawMsgState withdraws = 1 [(gogoproto.nullable) = false]; + // pagination defines the pagination in the response. not working on this version. + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} + +// the response type for the QueryPoolBatchWithdrawMsg RPC method. This includes a batch swap message of the batch +message QueryPoolBatchWithdrawMsgResponse { + WithdrawMsgState withdraw = 1 [(gogoproto.nullable) = false]; +} diff --git a/ignite/pkg/protoanalysis/testdata/liquidity/tx.proto b/ignite/pkg/protoanalysis/testdata/liquidity/tx.proto new file mode 100644 index 0000000..b452aec --- /dev/null +++ b/ignite/pkg/protoanalysis/testdata/liquidity/tx.proto @@ -0,0 +1,436 @@ +syntax = "proto3"; +package tendermint.liquidity; + +import "gogoproto/gogo.proto"; +import "cosmos_proto/coin.proto"; +import "protoc-gen-openapiv2/options/annotations.proto"; + +option go_package = "github.com/tendermint/liquidity/x/liquidity/types"; +//option (gogoproto.goproto_getters_all) = false; + +//option (gogoproto.goproto_stringer_all) = false; +//option (gogoproto.stringer_all) = false; + +// Msg defines the liquidity Msg service. +service Msg { + + // Submit create liquidity pool message. + rpc CreatePool(MsgCreatePool) returns (MsgCreatePoolResponse); + + // Submit deposit to the liquidity pool batch. + rpc DepositWithinBatch(MsgDepositWithinBatch) returns (MsgDepositWithinBatchResponse); + + // Submit withdraw from to the liquidity pool batch. + rpc WithdrawWithinBatch(MsgWithdrawWithinBatch) returns (MsgWithdrawWithinBatchResponse); + + // Submit swap to the liquidity pool batch. + rpc Swap(MsgSwapWithinBatch) returns (MsgSwapWithinBatchResponse); +} + +// MsgCreatePool defines an sdk.Msg type that supports submitting create liquidity pool +message MsgCreatePool { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + string pool_creator_address = 1 [(gogoproto.moretags) = "yaml:\"pool_creator_address\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + description: "account address of the origin of this message", + example: "\"cosmos1e35y69rhrt7y4yce5l5u73sjnxu0l33wvznyun\"", + format: "sdk.AccAddress" + }]; + + // id of target pool type, only 1 is allowed on this version, Must match the value in the pool. + uint32 pool_type_id = 2 [(gogoproto.moretags) = "yaml:\"pool_type_id\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "\"1\"", + format: "uint32" + }]; + + // reserve coin pair of the pool to deposit + repeated cosmos.base.v1beta1.Coin deposit_coins = 4 [(gogoproto.nullable) = false, + (gogoproto.moretags) = "yaml:\"deposit_coins\"", + (gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.Coins", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "[{\"denom\": \"denomX\", \"amount\": \"1000000\"}, {\"denom\": \"denomY\", \"amount\": \"2000000\"}]", + format: "sdk.Coins" + }]; +} + +// MsgCreatePoolRequest is the request type for the Msg/MsgCreatePoolRequest RPC method. +message MsgCreatePoolRequest { + BaseReq base_req = 1; + + // MsgCreatePool + MsgCreatePool msg = 2; +} + +// MsgCreatePoolResponse defines the Msg/CreatePool response type. +message MsgCreatePoolResponse { + StdTx std_tx = 1 [(gogoproto.moretags) = "yaml:\"std_tx\""]; +} + +// `MsgDepositWithinBatch defines` an `sdk.Msg` type that supports submitting deposit request to the batch of the liquidity pool +// Deposit submit to the batch of the Liquidity pool with the specified `pool_id`, deposit_coins for reserve +// this requests are stacked in the batch of the liquidity pool, not immediately processed and +// processed in the `endblock` at once with other requests. +// +// See: https://github.com/tendermint/liquidity/blob/develop/x/liquidity/spec/04_messages.md +message MsgDepositWithinBatch { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + // The publisher in which to create the book. + // + // Format: `publishers/{publisher}` + // + // Example: `publishers/1257894000000000000` + string depositor_address = 1 [(gogoproto.moretags) = "yaml:\"depositor_address\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + description: "account address of the origin of this message", + example: "\"cosmos1e35y69rhrt7y4yce5l5u73sjnxu0l33wvznyun\"", + format: "sdk.AccAddress" + }]; + // id of the target pool + uint64 pool_id = 2 [(gogoproto.moretags) = "yaml:\"pool_id\"", (gogoproto.jsontag) = "pool_id", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "\"1\"", + format: "uint64" + }]; + + // reserve coin pair of the pool to deposit + repeated cosmos.base.v1beta1.Coin deposit_coins = 3 [ + (gogoproto.nullable) = false, + (gogoproto.moretags) = "yaml:\"deposit_coins\"", + (gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.Coins", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "[{\"denom\": \"denomX\", \"amount\": \"1000000\"}, {\"denom\": \"denomY\", \"amount\": \"2000000\"}]", + format: "sdk.Coins" + }]; + +} + +// MsgDepositWithinBatchRequest is the request type for the Msg/DepositWithinBatch RPC method. +message MsgDepositWithinBatchRequest { + BaseReq base_req = 1; + // id of the target pool + uint64 pool_id = 2 [(gogoproto.moretags) = "yaml:\"pool_id\"", (gogoproto.jsontag) = "pool_id", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "\"1\"", + format: "uint64" + }]; + + // MsgDepositWithinBatch + MsgDepositWithinBatch msg = 3; +} + +// MsgDepositWithinBatchResponse defines the Msg/DepositWithinBatch response type. +message MsgDepositWithinBatchResponse { + StdTx std_tx = 1 [(gogoproto.moretags) = "yaml:\"std_tx\""]; +} + +// `MsgWithdrawWithinBatch` defines an `sdk.Msg` type that supports submitting withdraw request to the batch of the liquidity pool +// Withdraw submit to the batch from the Liquidity pool with the specified `pool_id`, `pool_coin` of the pool +// this requests are stacked in the batch of the liquidity pool, not immediately processed and +// processed in the `endblock` at once with other requests. +// +// See: https://github.com/tendermint/liquidity/blob/develop/x/liquidity/spec/04_messages.md +message MsgWithdrawWithinBatch { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + string withdrawer_address = 1 [ (gogoproto.moretags) = "yaml:\"withdrawer_address\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + description: "account address of the origin of this message", + example: "\"cosmos1e35y69rhrt7y4yce5l5u73sjnxu0l33wvznyun\"", + format: "sdk.AccAddress" + }]; + // id of the target pool + uint64 pool_id = 2 [(gogoproto.moretags) = "yaml:\"pool_id\"", (gogoproto.jsontag) = "pool_id", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "\"1\"", + format: "uint64" + }]; + cosmos.base.v1beta1.Coin pool_coin = 3 [ + (gogoproto.nullable) = false, + (gogoproto.moretags) = "yaml:\"pool_coin\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "{\"denom\": \"poolD35A0CC16EE598F90B044CE296A405BA9C381E38837599D96F2F70C2F02A23A4\", \"amount\": \"1000\"}", + format: "sdk.Coin" + }]; +} + +// MsgWithdrawWithinBatchRequest is the request type for the Query/WithdrawWithinBatch RPC method. +message MsgWithdrawWithinBatchRequest { + BaseReq base_req = 1; + // id of the target pool + uint64 pool_id = 2 [(gogoproto.moretags) = "yaml:\"pool_id\"", (gogoproto.jsontag) = "pool_id", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "\"1\"", + format: "uint64" + }]; + + // MsgWithdrawWithinBatch + MsgWithdrawWithinBatch msg = 3; +} + +// MsgWithdrawWithinBatchResponse defines the Msg/WithdrawWithinBatch response type. +message MsgWithdrawWithinBatchResponse { + StdTx std_tx = 1 [(gogoproto.moretags) = "yaml:\"std_tx\""]; +} + +// `MsgSwapWithinBatch` defines an sdk.Msg type that supports submitting swap offer request to the batch of the liquidity pool +// Swap offer submit to the batch to the Liquidity pool with the specified the `pool_id`, `swap_type_id`, +// `demand_coin_denom` with the coin and the price you're offering and current `params.swap_fee_rate` +// this requests are stacked in the batch of the liquidity pool, not immediately processed and +// processed in the `endblock` at once with other requests +// You should request the same each field as the pool +// Currently, only the default `swap_type_id`1 is available on this version +// The detailed swap algorithm can be found here. +// +// See: https://github.com/tendermint/liquidity/tree/develop/doc +// https://github.com/tendermint/liquidity/blob/develop/x/liquidity/spec/04_messages.md +message MsgSwapWithinBatch { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + // address of swap requester + string swap_requester_address = 1 [(gogoproto.moretags) = "yaml:\"swap_requester_address\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + description: "account address of the origin of this message", + example: "\"cosmos1e35y69rhrt7y4yce5l5u73sjnxu0l33wvznyun\"", + format: "sdk.AccAddress" + }]; + // id of the target pool + uint64 pool_id = 2 [(gogoproto.moretags) = "yaml:\"pool_id\"", (gogoproto.jsontag) = "pool_id", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "\"1\"", + format: "uint64" + }]; + + // id of swap type, only 1 is allowed on this version, Must match the value in the pool. + uint32 swap_type_id = 3 [(gogoproto.moretags) = "yaml:\"swap_type_id\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "\"1\"", + format: "uint32" + }]; + + // offer sdk.coin for the swap request, Must match the denom in the pool. + cosmos.base.v1beta1.Coin offer_coin = 4 [ + (gogoproto.nullable) = false, + (gogoproto.moretags) = "yaml:\"offer_coin\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "{\"denom\": \"denomX\", \"amount\": \"1000000\"}", + format: "sdk.Coin" + }]; + + // denom of demand coin to be exchanged on the swap request, Must match the denom in the pool. + string demand_coin_denom = 5 [(gogoproto.moretags) = "yaml:\"demand_coin_denom\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "\"denomB\"", + }]; + + // offer coin fee for pay fees in half offer coin + cosmos.base.v1beta1.Coin offer_coin_fee = 6 [ + (gogoproto.nullable) = false, + (gogoproto.moretags) = "yaml:\"offer_coin_fee\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "{\"denom\": \"denomX\", \"amount\": \"5000\"}", + format: "sdk.Coin" + } + ]; + + // limit order price for this offer + bytes order_price = 7 [ + (gogoproto.moretags) = "yaml:\"order_price\"", + (gogoproto.customtype) = "github.com/cosmos/cosmos-sdk/types.Dec", + (gogoproto.nullable) = false, + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "\"1.1\"", + format: "sdk.Dec" + }]; +} + +// MsgSwapWithinBatchRequest is the request type for the Query/Swap RPC method. +message MsgSwapWithinBatchRequest { + BaseReq base_req = 1; + // id of the target pool + uint64 pool_id = 2 [(gogoproto.moretags) = "yaml:\"pool_id\"", (gogoproto.jsontag) = "pool_id", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "\"1\"", + format: "uint64" + }]; + + // MsgSwapWithinBatch + MsgSwapWithinBatch msg = 3; +} + +// MsgSwapWithinBatchResponse defines the Msg/Swap response type. +message MsgSwapWithinBatchResponse { + StdTx std_tx = 1 [(gogoproto.moretags) = "yaml:\"std_tx\""]; +} + +// Base Request struct for Post Tx, standard of tendermint/cosmos-sdk +message BaseReq { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + // Sender address or Keybase name to generate a transaction + string from = 1 [(gogoproto.moretags) = "yaml:\"from\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "\"cosmos1qz38nymksetqd2d4qesrxpffzywuel82a4l0vs\"", + format: "sdk.AccAddress" + }]; + + // Memo to send along with transaction + string memo = 2 [(gogoproto.moretags) = "yaml:\"memo\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "\"Sent via Cosmos Voyager\"", + }]; + + // Name or address of private key with which to sign + string chain_id = 3 [(gogoproto.moretags) = "yaml:\"chain_id\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "\"Cosmos-Hub\"", + }]; + + // The account number of the signing account (offline mode only) + uint64 account_number = 4 [(gogoproto.moretags) = "yaml:\"account_number\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "\"1421\"", + format: "uint64" + }]; + + // The sequence number of the signing account (offline mode only) + uint64 sequence = 5 [(gogoproto.moretags) = "yaml:\"sequence\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "\"13\"", + format: "uint64" + }]; + + // Set a block timeout height to prevent the tx from being committed past a certain height + uint64 timeout_height = 6 [(gogoproto.moretags) = "yaml:\"timeout_height\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "\"200\"", + format: "uint64" + }]; + + // Fees to pay along with transaction + repeated cosmos.base.v1beta1.Coin fees = 7 + [(gogoproto.nullable) = false, + (gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.Coins", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "[{\"denom\": \"uatom\", \"amount\": \"10\"}]", + format: "sdk.Coins" + } + ]; + + // Gas prices in decimal format to determine the transaction fee + repeated cosmos.base.v1beta1.DecCoin gas_prices = 8 [ + (gogoproto.moretags) = "yaml:\"gas_prices\"", + (gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.DecCoins", + (gogoproto.nullable) = false, + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "[{\"denom\": \"uatom\", \"amount\": \"0.1\"}]", + format: "sdk.DecCoins" + } + ]; + + // Gas amount to determine the transaction fee + uint64 gas = 9 [(gogoproto.moretags) = "yaml:\"gas\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "\"200000\"", + format: "uint64" + }]; + + // adjustment factor to be multiplied against the estimate returned by the tx simulation; if the gas limit is set manually this flag is ignored + string gas_adjustment = 10 [(gogoproto.moretags) = "yaml:\"gas_adjustment\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "\"1.2\"", + format: "sdk.Dec" + }]; + + // Estimate gas for a transaction (cannot be used in conjunction with generate_only) + bool simulate = 11 [(gogoproto.moretags) = "yaml:\"simulate\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "false", + }]; +} + +// Fee struct of cosmos-sdk +message Fee { + uint64 gas = 1 [(gogoproto.moretags) = "yaml:\"gas\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "\"200000\"", + format: "uint64" + }]; + + // amount is the amount of coins to be paid as a fee + repeated cosmos.base.v1beta1.Coin amount = 2 + [(gogoproto.nullable) = false, + (gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.Coins", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "[{\"denom\": \"uatom\", \"amount\": \"10\"}]", + format: "sdk.Coins" + } + ]; +} + +// PubKey struct of tendermint/cosmos-sdk +message PubKey { + // type of pubkey algorithm + string type = 1 [(gogoproto.moretags) = "yaml:\"type\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "\"tendermint/PubKeySecp256k1\"", + }]; + + // value of pubkey + string value = 2 [(gogoproto.moretags) = "yaml:\"value\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "\"Avz04VhtKJh8ACCVzlI8aTosGy0ikFXKIVHQ3jKMrosH\"", + }]; +} + +// signature struct of tendermint/cosmos-sdk +message Signature { + // signature base64 + string signature = 1 [(gogoproto.moretags) = "yaml:\"signature\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "\"MEUCIQD02fsDPra8MtbRsyB1w7bqTM55Wu138zQbFcWx4+CFyAIge5WNPfKIuvzBZ69MyqHsqD8S1IwiEp+iUb6VSdtlpgY=\"", + }]; + + // PubKey + PubKey pub_key = 2 [(gogoproto.moretags) = "yaml:\"pub_key\""]; + + // The account number of the signing account (offline mode only) + uint64 account_number = 3 [(gogoproto.moretags) = "yaml:\"account_number\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "\"1421\"", + format: "uint64" + }]; + + // The sequence number of the signing account (offline mode only) + uint64 sequence = 4 [(gogoproto.moretags) = "yaml:\"sequence\"", + (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { + example: "\"13\"", + format: "uint64" + }]; + +} + +// Base response struct of result of the requested Tx, standard of tendermint/cosmos-sdk +message StdTx { + // Msgs + repeated string msg = 1 [(gogoproto.moretags) = "yaml:\"msg\""]; +// repeated string msg = 1 [(gogoproto.moretags) = "yaml:\"msg\"", +// (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { +// example: "[\"string\"]" +// }]; + + // Fee + Fee fee = 2 [(gogoproto.moretags) = "yaml:\"fee\""]; + + // Memo of the transaction + string memo = 3 [(gogoproto.moretags) = "yaml:\"memo\""]; + + // Signature + Signature signature = 4 [(gogoproto.moretags) = "yaml:\"signature\""]; +} diff --git a/ignite/pkg/protoanalysis/testdata/nested_messages/nested_messages.proto b/ignite/pkg/protoanalysis/testdata/nested_messages/nested_messages.proto new file mode 100644 index 0000000..a147820 --- /dev/null +++ b/ignite/pkg/protoanalysis/testdata/nested_messages/nested_messages.proto @@ -0,0 +1,11 @@ +syntax = "proto3"; + +package nested_messages; + +message A { + message B { + message C { + hello string = 1; + } + } +} diff --git a/ignite/pkg/protoanalysis/testdata/qualified_service/service.proto b/ignite/pkg/protoanalysis/testdata/qualified_service/service.proto new file mode 100644 index 0000000..7973292 --- /dev/null +++ b/ignite/pkg/protoanalysis/testdata/qualified_service/service.proto @@ -0,0 +1,22 @@ +syntax = "proto3"; + +package qualified_service; + +option go_package = "github.com/ignite/qualified_service"; + +service Query { + rpc Ping(.qualified_service.PingRequest) returns (.qualified_service.PingResponse); + rpc Nested(.qualified_service.Outer.NestedRequest) returns (.qualified_service.PingResponse); +} + +message PingRequest { + string id = 1; +} + +message PingResponse {} + +message Outer { + message NestedRequest { + string id = 1; + } +} diff --git a/ignite/pkg/randstr/randstr.go b/ignite/pkg/randstr/randstr.go new file mode 100644 index 0000000..1cd63ea --- /dev/null +++ b/ignite/pkg/randstr/randstr.go @@ -0,0 +1,18 @@ +package randstr + +import ( + "crypto/rand" + "math/big" +) + +var letterRunes = []rune("abcdefghijklmnopqrstuvwxyz") + +// Runes generates a random string with n length from runes. +func Runes(n int) string { + b := make([]rune, n) + for i := range b { + num, _ := rand.Int(rand.Reader, big.NewInt(int64(len(letterRunes)))) + b[i] = letterRunes[num.Int64()] + } + return string(b) +} diff --git a/ignite/pkg/randstr/randstr_test.go b/ignite/pkg/randstr/randstr_test.go new file mode 100644 index 0000000..aaa4b37 --- /dev/null +++ b/ignite/pkg/randstr/randstr_test.go @@ -0,0 +1,27 @@ +package randstr + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestRunesReturnsRequestedLength(t *testing.T) { + for _, n := range []int{0, 1, 16, 64} { + got := Runes(n) + require.Len(t, got, n) + } +} + +func TestRunesUsesOnlyLowercaseLetters(t *testing.T) { + allowed := make(map[rune]struct{}, len(letterRunes)) + for _, r := range letterRunes { + allowed[r] = struct{}{} + } + + got := Runes(128) + for _, r := range got { + _, ok := allowed[r] + require.Truef(t, ok, "unexpected rune %q in %q", r, got) + } +} diff --git a/ignite/pkg/repoversion/repoversion.go b/ignite/pkg/repoversion/repoversion.go new file mode 100644 index 0000000..3222e6b --- /dev/null +++ b/ignite/pkg/repoversion/repoversion.go @@ -0,0 +1,118 @@ +package repoversion + +import ( + "fmt" + "strings" + + "github.com/go-git/go-git/v5" + "github.com/go-git/go-git/v5/plumbing" + "github.com/go-git/go-git/v5/plumbing/object" +) + +type Version struct { + Tag string + Hash string +} + +func Determine(path string) (v Version, err error) { + repo, err := git.PlainOpenWithOptions(path, &git.PlainOpenOptions{DetectDotGit: true}) + if err != nil { + return Version{}, err + } + + tags, err := repo.Tags() + if err != nil { + return Version{}, err + } + + head, err := repo.Head() + if err != nil { + return Version{}, err + } + + cIter, err := repo.Log(&git.LogOptions{Order: git.LogOrderCommitterTime}) + if err != nil { + return Version{}, err + } + + var ( + tag string + tagHash string + commitIndex int + taggerTimestamp int64 + ) + + idMap := make(map[string]int) + + err = cIter.ForEach(func(c *object.Commit) error { + idMap[c.Hash.String()] = commitIndex + commitIndex++ + + return nil + }) + if err != nil { + return Version{}, err + } + + err = tags.ForEach(func(t *plumbing.Reference) error { + obj, err := repo.TagObject(t.Hash()) + + if err == nil { + + _, exists := idMap[obj.Target.String()] + + if !exists { + return nil + } + + if taggerTimestamp < obj.Tagger.When.Unix() { + taggerTimestamp = obj.Tagger.When.Unix() + + tag = strings.TrimPrefix(obj.Name, "v") + tagHash = obj.Target.String() + } + } else { + _, exists := idMap[t.Hash().String()] + + if !exists { + return nil + } + + commit, err := repo.CommitObject(t.Hash()) + if err != nil { + return err + } + + if taggerTimestamp < commit.Committer.When.Unix() { + taggerTimestamp = commit.Committer.When.Unix() + + tag = strings.TrimPrefix(t.Name().Short(), "v") + tagHash = t.Hash().String() + } + } + + return nil + }) + if err != nil { + return Version{}, err + } + + const subHashLen int = 8 + + tagHashIndex := idMap[tagHash] + headHashText := head.Hash().String() + subHeadHash := headHashText + + if len(headHashText) > subHashLen { + subHeadHash = subHeadHash[:subHashLen] + } + + v.Tag = tag + v.Hash = headHashText + + if tagHashIndex > 0 { + v.Tag = fmt.Sprintf("%s-%s", tag, subHeadHash) + } + + return v, nil +} diff --git a/ignite/pkg/repoversion/repoversion_test.go b/ignite/pkg/repoversion/repoversion_test.go new file mode 100644 index 0000000..854f8c7 --- /dev/null +++ b/ignite/pkg/repoversion/repoversion_test.go @@ -0,0 +1,81 @@ +package repoversion + +import ( + "os" + "path/filepath" + "testing" + "time" + + "github.com/go-git/go-git/v5" + "github.com/go-git/go-git/v5/plumbing/object" + "github.com/stretchr/testify/require" +) + +func commitFile(t *testing.T, repo *git.Repository, dir, name, content string, when time.Time) string { + t.Helper() + + path := filepath.Join(dir, name) + require.NoError(t, os.WriteFile(path, []byte(content), 0o600)) + + wt, err := repo.Worktree() + require.NoError(t, err) + _, err = wt.Add(name) + require.NoError(t, err) + + hash, err := wt.Commit(name, &git.CommitOptions{ + Author: &object.Signature{Name: "test", Email: "test@example.com", When: when}, + Committer: &object.Signature{Name: "test", Email: "test@example.com", When: when}, + }) + require.NoError(t, err) + + return hash.String() +} + +func TestDetermineWithoutTag(t *testing.T) { + dir := t.TempDir() + repo, err := git.PlainInit(dir, false) + require.NoError(t, err) + + headHash := commitFile(t, repo, dir, "a.txt", "a", time.Unix(100, 0)) + + v, err := Determine(dir) + require.NoError(t, err) + require.Empty(t, v.Tag) + require.Equal(t, headHash, v.Hash) +} + +func TestDetermineWithTagOnHead(t *testing.T) { + dir := t.TempDir() + repo, err := git.PlainInit(dir, false) + require.NoError(t, err) + + headHash := commitFile(t, repo, dir, "a.txt", "a", time.Unix(100, 0)) + head, err := repo.Head() + require.NoError(t, err) + _, err = repo.CreateTag("v1.2.3", head.Hash(), nil) + require.NoError(t, err) + + v, err := Determine(dir) + require.NoError(t, err) + require.Equal(t, "1.2.3", v.Tag) + require.Equal(t, headHash, v.Hash) +} + +func TestDetermineWithOlderTagUsesSuffix(t *testing.T) { + dir := t.TempDir() + repo, err := git.PlainInit(dir, false) + require.NoError(t, err) + + _ = commitFile(t, repo, dir, "a.txt", "a", time.Unix(100, 0)) + head, err := repo.Head() + require.NoError(t, err) + _, err = repo.CreateTag("v1.0.0", head.Hash(), nil) + require.NoError(t, err) + + headHash := commitFile(t, repo, dir, "b.txt", "b", time.Unix(200, 0)) + + v, err := Determine(dir) + require.NoError(t, err) + require.Equal(t, "1.0.0-"+headHash[:8], v.Tag) + require.Equal(t, headHash, v.Hash) +} diff --git a/ignite/pkg/safeconverter/safeconverter.go b/ignite/pkg/safeconverter/safeconverter.go new file mode 100644 index 0000000..48920d5 --- /dev/null +++ b/ignite/pkg/safeconverter/safeconverter.go @@ -0,0 +1,13 @@ +package safeconverter + +type SafeToConvertToInt interface { + uintptr | uint | uint32 | uint64 | int | int32 | int64 +} + +func ToInt[T SafeToConvertToInt](x T) int { + return int(x) +} + +func ToInt64[T SafeToConvertToInt](x T) int64 { + return int64(x) +} diff --git a/ignite/pkg/safeconverter/safeconverter_test.go b/ignite/pkg/safeconverter/safeconverter_test.go new file mode 100644 index 0000000..bfd3792 --- /dev/null +++ b/ignite/pkg/safeconverter/safeconverter_test.go @@ -0,0 +1,15 @@ +package safeconverter + +import "testing" + +func TestToInt(t *testing.T) { + if ToInt(uint64(12)) != 12 { + t.Fatalf("expected 12") + } +} + +func TestToInt64(t *testing.T) { + if ToInt64(int32(34)) != 34 { + t.Fatalf("expected 34") + } +} diff --git a/ignite/pkg/swagger-combine/swagger-combine.go b/ignite/pkg/swagger-combine/swagger-combine.go new file mode 100644 index 0000000..e55773c --- /dev/null +++ b/ignite/pkg/swagger-combine/swagger-combine.go @@ -0,0 +1,121 @@ +package swaggercombine + +import ( + "fmt" + "os" + "path/filepath" + "sort" + "strings" + + "github.com/go-openapi/analysis" + "github.com/go-openapi/loads" + "github.com/go-openapi/spec" + + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +// Config represent swagger-combine config. +type Config struct { + spec *spec.Swagger + specs []*spec.Swagger +} + +// New create a mew swagger combine config. +func New(title, name string) *Config { + return &Config{ + spec: &spec.Swagger{ + SwaggerProps: spec.SwaggerProps{ + ID: name, + Swagger: "2.0", + Info: &spec.Info{ + InfoProps: spec.InfoProps{ + Description: fmt.Sprintf("Chain %s REST API", name), + Title: title, + Contact: &spec.ContactInfo{ContactInfoProps: spec.ContactInfoProps{Name: name}}, + }, + }, + Definitions: make(spec.Definitions), + }, + }, + specs: make([]*spec.Swagger, 0), + } +} + +// AddSpec adds a new OpenAPI spec to Config by path in the fs and unique id of spec. +func (c *Config) AddSpec(id, path string, makeUnique bool) error { + baseDoc, err := loads.Spec(path) + if err != nil { + return errors.Wrapf(err, "failed to load spec from path %s", path) + } + + spec := baseDoc.Spec() + if makeUnique { + for i, specPath := range spec.Paths.Paths { + if specPath.Get != nil { + specPath.Get.ID = id + specPath.Get.ID + } + if specPath.Post != nil { + specPath.Post.ID = id + specPath.Post.ID + } + spec.Paths.Paths[i] = specPath + } + } + + c.specs = append(c.specs, c.mergeTags(c.mergeDefinitions(spec))) + + return nil +} + +// mergeDefinitions merge spec definitions with main spec and erase the spec definition. +func (c *Config) mergeDefinitions(m *spec.Swagger) *spec.Swagger { + for k, v := range m.Definitions { + if _, exists := c.spec.Definitions[k]; exists { + continue + } + c.spec.Definitions[k] = v + } + m.Definitions = nil + return m +} + +// mergeTags merge spec tags with main spec and erase the spec tag. +func (c *Config) mergeTags(m *spec.Swagger) *spec.Swagger { + for _, v := range m.Tags { + found := false + for _, vv := range c.spec.Tags { + if v.Name == vv.Name { + found = true + break + } + } + if found { + continue + } + c.spec.Tags = append(c.spec.Tags, v) + } + m.Tags = nil + return m +} + +// Combine combines openapi specs into one and saves to out path. +func (c *Config) Combine(out string) error { + sort.Slice(c.specs, func(a, b int) bool { return c.specs[a].ID < c.specs[b].ID }) + + errs := analysis.Mixin(c.spec, c.specs...) + if len(errs) > 0 { + return errors.Errorf("invalid mix specs: %s", strings.Join(errs, ", ")) + } + specJSON, err := c.spec.MarshalJSON() + if err != nil { + return err + } + // ensure out dir exists. + outDir := filepath.Dir(out) + if err := os.MkdirAll(outDir, 0o766); err != nil { + return err + } + if err = os.WriteFile(out, specJSON, 0o600); err != nil { + return errors.Wrapf(err, "failed to write combined spec to file %s", out) + } + return nil +} diff --git a/ignite/pkg/swagger-combine/swagger_combine_test.go b/ignite/pkg/swagger-combine/swagger_combine_test.go new file mode 100644 index 0000000..8aa0186 --- /dev/null +++ b/ignite/pkg/swagger-combine/swagger_combine_test.go @@ -0,0 +1,77 @@ +package swaggercombine + +import ( + "encoding/json" + "os" + "path/filepath" + "testing" + + "github.com/go-openapi/spec" + "github.com/stretchr/testify/require" +) + +func TestNew(t *testing.T) { + c := New("My API", "ignite") + require.NotNil(t, c.spec) + require.Equal(t, "ignite", c.spec.ID) + require.Equal(t, "2.0", c.spec.Swagger) + require.Equal(t, "My API", c.spec.Info.Title) +} + +func TestMergeDefinitionsAndTags(t *testing.T) { + c := New("My API", "ignite") + in := &spec.Swagger{ + SwaggerProps: spec.SwaggerProps{ + Definitions: spec.Definitions{ + "MyType": spec.Schema{ + SchemaProps: spec.SchemaProps{ + Type: spec.StringOrArray{"object"}, + }, + }, + }, + Tags: []spec.Tag{ + {TagProps: spec.TagProps{Name: "tag-a"}}, + }, + }, + } + + out := c.mergeDefinitions(in) + require.Nil(t, out.Definitions) + require.Contains(t, c.spec.Definitions, "MyType") + + out = c.mergeTags(out) + require.Nil(t, out.Tags) + require.Len(t, c.spec.Tags, 1) + require.Equal(t, "tag-a", c.spec.Tags[0].Name) +} + +func TestAddSpecAndCombine(t *testing.T) { + dir := t.TempDir() + specPath := filepath.Join(dir, "openapi.json") + specJSON := `{ + "swagger":"2.0", + "info":{"title":"A","version":"1.0"}, + "paths":{ + "/hello":{ + "get":{"operationId":"GetHello","responses":{"200":{"description":"ok"}}} + } + } +}` + require.NoError(t, os.WriteFile(specPath, []byte(specJSON), 0o600)) + + c := New("My API", "ignite") + require.NoError(t, c.AddSpec("mod1-", specPath, true)) + + outPath := filepath.Join(dir, "combined", "swagger.json") + require.NoError(t, c.Combine(outPath)) + + raw, err := os.ReadFile(outPath) + require.NoError(t, err) + + var out map[string]any + require.NoError(t, json.Unmarshal(raw, &out)) + require.Equal(t, "2.0", out["swagger"]) + paths, ok := out["paths"].(map[string]any) + require.True(t, ok) + require.Contains(t, paths, "/hello") +} diff --git a/ignite/pkg/tarball/tarball.go b/ignite/pkg/tarball/tarball.go new file mode 100644 index 0000000..d7d1b9e --- /dev/null +++ b/ignite/pkg/tarball/tarball.go @@ -0,0 +1,79 @@ +package tarball + +import ( + "archive/tar" + "compress/gzip" + "io" + "path/filepath" + "strings" + + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +var ( + // ErrGzipFileNotFound the file not found in the gzip. + ErrGzipFileNotFound = errors.New("file not found in the gzip") + // ErrNotGzipType the file is not a gzip. + ErrNotGzipType = errors.New("file is not a gzip type") + // ErrInvalidFileName the file name is invalid. + ErrInvalidFileName = errors.New("invalid file name") + // ErrInvalidFilePath the file path is invalid. + ErrInvalidFilePath = errors.New("invalid file path") + // ErrFileTooLarge the file is too large to extract. + ErrFileTooLarge = errors.New("file too large to extract") +) + +// ExtractFile founds and reads a specific file into a gzip file and folders recursively. +func ExtractFile(reader io.Reader, out io.Writer, fileName string) (string, error) { + if fileName == "" { + return "", ErrInvalidFileName + } + archive, err := gzip.NewReader(reader) + // Verify if is a GZIP file + if errors.Is(err, io.EOF) || errors.Is(err, gzip.ErrHeader) { + return "", ErrNotGzipType + } else if err != nil { + return "", err + } + defer archive.Close() + + tarReader := tar.NewReader(archive) + // Read the tarball files and find only the necessary file + for { + header, err := tarReader.Next() + if errors.Is(err, io.EOF) { + return "", ErrGzipFileNotFound + } else if err != nil { + return header.Name, err + } + + // Validate the file path + if !isValidPath(header.Name) { + return "", ErrInvalidFilePath + } + + switch header.Typeflag { + case tar.TypeDir: + continue + case tar.TypeReg: + name := filepath.Base(header.Name) + if fileName == name { + // Limit the size of the file to extract + if header.Size > 100<<20 { // 100 MB limit + return "", ErrFileTooLarge + } + limitedReader := io.LimitReader(tarReader, 1000<<20) // 1000 MB limit + _, err := io.Copy(out, limitedReader) + return header.Name, err + } + default: + continue + } + } +} + +// isValidPath checks for directory traversal attacks. +func isValidPath(filePath string) bool { + cleanPath := filepath.Clean(filePath) + return !strings.Contains(cleanPath, "..") +} diff --git a/ignite/pkg/tarball/tarball_test.go b/ignite/pkg/tarball/tarball_test.go new file mode 100644 index 0000000..0af1981 --- /dev/null +++ b/ignite/pkg/tarball/tarball_test.go @@ -0,0 +1,97 @@ +package tarball + +import ( + "bytes" + "os" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestExtractFile(t *testing.T) { + exampleJSON, err := os.ReadFile("testdata/example.json") + require.NoError(t, err) + + type args struct { + tarballPath string + file string + } + tests := []struct { + name string + args args + want []byte + wantPath string + err error + }{ + { + name: "simple read", + args: args{ + tarballPath: "testdata/example.tar.gz", + file: "example.json", + }, + want: exampleJSON, + wantPath: "genesis/example.json", + }, + { + name: "read from root", + args: args{ + tarballPath: "testdata/example-root.tar.gz", + file: "example.json", + }, + want: exampleJSON, + wantPath: "example.json", + }, + { + name: "read from subfolder", + args: args{ + tarballPath: "testdata/example-subfolder.tar.gz", + file: "example.json", + }, + want: exampleJSON, + wantPath: "config/genesis/example.json", + }, + { + name: "empty folders", + args: args{ + tarballPath: "testdata/example-empty.tar.gz", + file: "example.json", + }, + err: ErrGzipFileNotFound, + }, + { + name: "invalid file", + args: args{ + tarballPath: "testdata/invalid_file", + file: "example.json", + }, + err: ErrNotGzipType, + }, + { + name: "invalid file extension", + args: args{ + tarballPath: "testdata/example.json", + file: "example.json", + }, + err: ErrNotGzipType, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + tarball, err := os.Open(tt.args.tarballPath) + require.NoError(t, err) + + var buf bytes.Buffer + gotPath, err := ExtractFile(tarball, &buf, tt.args.file) + if tt.err != nil { + require.Error(t, err) + require.ErrorIs(t, err, tt.err) + return + } + require.NoError(t, err) + require.Equal(t, tt.wantPath, gotPath) + + require.NoError(t, err) + require.Equal(t, tt.want, buf.Bytes()) + }) + } +} diff --git a/ignite/pkg/tarball/testdata/example-empty.tar.gz b/ignite/pkg/tarball/testdata/example-empty.tar.gz new file mode 100644 index 0000000..b441148 Binary files /dev/null and b/ignite/pkg/tarball/testdata/example-empty.tar.gz differ diff --git a/ignite/pkg/tarball/testdata/example-root.tar.gz b/ignite/pkg/tarball/testdata/example-root.tar.gz new file mode 100644 index 0000000..e79dbcb Binary files /dev/null and b/ignite/pkg/tarball/testdata/example-root.tar.gz differ diff --git a/ignite/pkg/tarball/testdata/example-subfolder.tar.gz b/ignite/pkg/tarball/testdata/example-subfolder.tar.gz new file mode 100644 index 0000000..e383942 Binary files /dev/null and b/ignite/pkg/tarball/testdata/example-subfolder.tar.gz differ diff --git a/ignite/pkg/tarball/testdata/example.json b/ignite/pkg/tarball/testdata/example.json new file mode 100644 index 0000000..7e7db32 --- /dev/null +++ b/ignite/pkg/tarball/testdata/example.json @@ -0,0 +1,4 @@ +{ + "chain_id": "gaia-1", + "genesis_time": "2022-03-24T21:00:36.557659Z" +} \ No newline at end of file diff --git a/ignite/pkg/tarball/testdata/example.tar.gz b/ignite/pkg/tarball/testdata/example.tar.gz new file mode 100644 index 0000000..4c21731 Binary files /dev/null and b/ignite/pkg/tarball/testdata/example.tar.gz differ diff --git a/ignite/pkg/tarball/testdata/invalid_file b/ignite/pkg/tarball/testdata/invalid_file new file mode 100644 index 0000000..473a0f4 diff --git a/ignite/pkg/truncatedbuffer/truncatedbuffer.go b/ignite/pkg/truncatedbuffer/truncatedbuffer.go new file mode 100644 index 0000000..4abac2d --- /dev/null +++ b/ignite/pkg/truncatedbuffer/truncatedbuffer.go @@ -0,0 +1,49 @@ +package truncatedbuffer + +import ( + "bytes" +) + +// TruncatedBuffer contains a bytes buffer that has a limited capacity. +// The buffer is truncated on Write if the length reaches the maximum capacity. +// Only the first bytes are preserved. +type TruncatedBuffer struct { + buf *bytes.Buffer + cap int +} + +// NewTruncatedBuffer returns a new TruncatedBuffer. +// If the provided cap is 0, the truncated buffer has no limit for truncating. +func NewTruncatedBuffer(c int) *TruncatedBuffer { + return &TruncatedBuffer{ + buf: &bytes.Buffer{}, + cap: c, + } +} + +// GetBuffer returns the buffer. +func (b TruncatedBuffer) GetBuffer() *bytes.Buffer { + return b.buf +} + +// GetCap returns the maximum capacity of the buffer. +func (b TruncatedBuffer) GetCap() int { + return b.cap +} + +// Write implements io.Writer. +func (b *TruncatedBuffer) Write(p []byte) (n int, err error) { + n, err = b.buf.Write(p) + if err != nil { + return n, err + } + + // Check surplus bytes + surplus := b.buf.Len() - b.cap + + if b.cap > 0 && surplus > 0 { + b.buf.Truncate(b.cap) + } + + return n, nil +} diff --git a/ignite/pkg/truncatedbuffer/truncatedbuffer_test.go b/ignite/pkg/truncatedbuffer/truncatedbuffer_test.go new file mode 100644 index 0000000..9129e7d --- /dev/null +++ b/ignite/pkg/truncatedbuffer/truncatedbuffer_test.go @@ -0,0 +1,40 @@ +package truncatedbuffer + +import ( + "crypto/rand" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestWriter(t *testing.T) { + ranBytes10 := make([]byte, 10) + _, err := rand.Read(ranBytes10) + require.NoError(t, err) + + ranBytes1000 := make([]byte, 1000) + _, err = rand.Read(ranBytes1000) + require.NoError(t, err) + + // TruncatedBuffer has a max capacity + b := NewTruncatedBuffer(100) + + require.Equal(t, 100, b.GetCap()) + + n, err := b.Write(ranBytes10) + require.NoError(t, err) + require.Equal(t, 10, n) + require.Equal(t, ranBytes10, b.GetBuffer().Bytes()) + + n, err = b.Write(ranBytes1000) + require.NoError(t, err) + require.Equal(t, 1000, n) + require.Equal(t, append(ranBytes10, ranBytes1000[:90]...), b.GetBuffer().Bytes()) + + // TruncatedBuffer has no max capacity + b = NewTruncatedBuffer(0) + n, err = b.Write(ranBytes1000) + require.NoError(t, err) + require.Equal(t, 1000, n) + require.Equal(t, ranBytes1000, b.GetBuffer().Bytes()) +} diff --git a/ignite/pkg/xast/function.go b/ignite/pkg/xast/function.go new file mode 100644 index 0000000..f8febf1 --- /dev/null +++ b/ignite/pkg/xast/function.go @@ -0,0 +1,1035 @@ +package xast + +import ( + "bytes" + "fmt" + "go/ast" + "go/format" + "go/parser" + "go/token" + "sort" + "strings" + + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +type ( + // functionOpts represent the options for functions. + functionOpts struct { + newParams []functionParam // Parameters to add to the function. + body string // New function body content. + newLines []functionLine // Lines to insert at specific positions. + insideCall functionCalls // Function calls to modify. + insideStruct functionStructs // Struct literals to modify. + appendTestCase []string // Test cases to append. + appendCode []string // Code to append at the end. + returnVars []string // Return variables to modify. + appendSwitch functionSwitches // Switch cases to append. + removeCalls []string // Function calls to remove. + } + + // FunctionOptions configures code generation. + FunctionOptions func(*functionOpts) + + // functionStruct represents a struct literal to modify. + functionSwitch struct { + condition string // Condition to find. + switchCase string // Switch case to insert. + switchBody string // Code to insert. + } + + functionSwitches []functionSwitch + functionSwitchesMap map[string]functionSwitches + + // functionStruct represents a struct literal to modify. + functionStruct struct { + name string // Name of the struct type. + param string // Name of the struct field. + code string // Code to insert. + } + functionStructs []functionStruct + functionStructsMap map[string]functionStructs + + // functionCall represents a function call to modify. + functionCall struct { + name string // Name of the function. + code string // Code to insert. + index int // Position to insert at. + } + functionCalls []functionCall + functionCallsMap map[string]functionCalls + + // functionParam represents a parameter to add to a function. + functionParam struct { + name string // Parameter name. + varType string // Parameter type. + index int // Position to insert at. + } + + // functionLine represents a line of code to insert. + functionLine struct { + code string // Code to insert. + number uint64 // Line number to insert at. + } +) + +// Field creates an AST field node from the function parameter. +func (p functionParam) Field() *ast.Field { + return &ast.Field{ + Names: []*ast.Ident{ast.NewIdent(p.name)}, + Type: ast.NewIdent(p.varType), + } +} + +// Map converts a slice of functionStructs to a map keyed by struct name. +func (s functionStructs) Map() functionStructsMap { + structMap := make(functionStructsMap) + for _, c := range s { + structs, ok := structMap[c.name] + if !ok { + structs = make(functionStructs, 0) + } + structMap[c.name] = append(structs, c) + } + return structMap +} + +// Map converts a slice of functionStructs to a map keyed by struct name. +func (s functionSwitches) Map() functionSwitchesMap { + switchesMap := make(functionSwitchesMap) + for _, c := range s { + switches, ok := switchesMap[c.condition] + if !ok { + switches = make(functionSwitches, 0) + } + switchesMap[c.condition] = append(switches, c) + } + return switchesMap +} + +// Map converts a slice of functionCalls to a map keyed by function name. +func (c functionCalls) Map() functionCallsMap { + callMap := make(functionCallsMap) + for _, c := range c { + calls, ok := callMap[c.name] + if !ok { + calls = make(functionCalls, 0) + } + callMap[c.name] = append(calls, c) + } + return callMap +} + +func cloneFunctionCallsMap(src functionCallsMap) functionCallsMap { + dst := make(functionCallsMap, len(src)) + for k, v := range src { + dst[k] = v + } + return dst +} + +func cloneFunctionStructsMap(src functionStructsMap) functionStructsMap { + dst := make(functionStructsMap, len(src)) + for k, v := range src { + dst[k] = v + } + return dst +} + +func cloneFunctionSwitchesMap(src functionSwitchesMap) functionSwitchesMap { + dst := make(functionSwitchesMap, len(src)) + for k, v := range src { + dst[k] = v + } + return dst +} + +// AppendFuncParams adds a new parameter to a function. +func AppendFuncParams(name, varType string, index int) FunctionOptions { + return func(c *functionOpts) { + c.newParams = append(c.newParams, functionParam{ + name: name, + varType: varType, + index: index, + }) + } +} + +// ReplaceFuncBody replaces the entire body of a function, the method will replace first and apply the other options after. +func ReplaceFuncBody(body string) FunctionOptions { + return func(c *functionOpts) { + c.body = body + } +} + +// AppendFuncTestCase adds a test case to a test function, if exists, of a function in Go source code content. +func AppendFuncTestCase(testCase string) FunctionOptions { + return func(c *functionOpts) { + c.appendTestCase = append(c.appendTestCase, testCase) + } +} + +// AppendFuncCode adds code to the end of a function, if exists, of a function in Go source code content. +func AppendFuncCode(code string) FunctionOptions { + return func(c *functionOpts) { + c.appendCode = append(c.appendCode, code) + } +} + +// AppendFuncCodeAtLine inserts code at a specific line number. +var AppendFuncCodeAtLine = AppendFuncAtLine + +// AppendFuncAtLine inserts code at a specific line number. +func AppendFuncAtLine(code string, lineNumber uint64) FunctionOptions { + return func(c *functionOpts) { + c.newLines = append(c.newLines, functionLine{ + code: code, + number: lineNumber, + }) + } +} + +// AppendInsideFuncCall adds an argument to a function call. For instances, the method have a parameter a +// // call 'New(param1, param2)' and we want to add the param3 the result will be 'New(param1, param2, param3)'. +// AppendInsideFuncCall appends code inside a function call. +// The callName parameter can be either: +// - Simple name: "NewKeeper" matches any call to NewKeeper regardless of package/receiver +// - Qualified name: "foo.NewKeeper" matches only calls to NewKeeper with foo as the package/receiver +// +// The code parameter is the argument to insert, and index specifies the position: +// - index >= 0: insert at the specified position +// - index == -1: append at the end +func AppendInsideFuncCall(callName, code string, index int) FunctionOptions { + return func(c *functionOpts) { + c.insideCall = append(c.insideCall, functionCall{ + name: callName, + code: code, + index: index, + }) + } +} + +// AppendFuncStruct adds a field to a struct literal. For instance, +// the struct has only one parameter 'Params{Param1: param1}' and we want to add +// the param2 the result will be 'Params{Param1: param1, Param2: param2}'. +// +// The name parameter can be either: +// - Simple name: "Keeper" matches any struct literal of type Keeper regardless of package +// - Qualified name: "keeper.Keeper" matches only struct literals with keeper as the package +func AppendFuncStruct(name, param, code string) FunctionOptions { + return func(c *functionOpts) { + c.insideStruct = append(c.insideStruct, functionStruct{ + name: name, + param: param, + code: code, + }) + } +} + +// NewFuncReturn replaces return statements in a function. +func NewFuncReturn(returnVars ...string) FunctionOptions { + return func(c *functionOpts) { + c.returnVars = append(c.returnVars, returnVars...) + } +} + +// AppendSwitchCase inserts a new case with the code at a specific switch condition statement. +func AppendSwitchCase(condition, switchCase, switchBody string) FunctionOptions { + return func(c *functionOpts) { + c.appendSwitch = append(c.appendSwitch, functionSwitch{ + condition: condition, + switchCase: switchCase, + switchBody: switchBody, + }) + } +} + +// RemoveFuncCall removes function calls with the specified name from within a function. +// The callName can be either a simple function name like "doSomething" or a qualified +// name like "pkg.DoSomething". +func RemoveFuncCall(callName string) FunctionOptions { + return func(c *functionOpts) { + c.removeCalls = append(c.removeCalls, callName) + } +} + +// newFunctionOptions creates a new functionOpts with defaults. +func newFunctionOptions() functionOpts { + return functionOpts{ + newParams: make([]functionParam, 0), + body: "", + newLines: make([]functionLine, 0), + insideCall: make(functionCalls, 0), + insideStruct: make(functionStructs, 0), + appendTestCase: make([]string, 0), + appendCode: make([]string, 0), + returnVars: make([]string, 0), + removeCalls: make([]string, 0), + } +} + +// ModifyFunction modifies a function in Go source code using functional options. +func ModifyFunction(content string, funcName string, functions ...FunctionOptions) (string, error) { + // Collect all function options. + opts := newFunctionOptions() + for _, fn := range functions { + fn(&opts) + } + + // Parse source into AST + fset := token.NewFileSet() + file, err := parser.ParseFile(fset, "", content, parser.ParseComments) + if err != nil { + return "", errors.Errorf("failed to parse file (%s): %w", funcName, err) + } + + cmap := ast.NewCommentMap(fset, file, file.Comments) + + // Find the target function. + funcDecl := findFuncDecl(file, funcName) + if funcDecl == nil { + return "", errors.Errorf("function %q not found", funcName) + } + + // Apply modifications. + if err := applyFunctionOptions(fset, funcDecl, &opts); err != nil { + return "", err + } + + file.Comments = cmap.Filter(file).Comments() + return formatNode(fset, file) +} + +// findFuncDecl finds a function declaration in an AST by name. +func findFuncDecl(file *ast.File, name string) *ast.FuncDecl { + for _, decl := range file.Decls { + if fd, ok := decl.(*ast.FuncDecl); ok && fd.Name.Name == name { + return fd + } + } + return nil +} + +// addCode converts string code snippets into AST statements. +func addCode(fileSet *token.FileSet, appendCode []string) ([]ast.Stmt, error) { + code := make([]ast.Stmt, 0, len(appendCode)) + for _, codeToInsert := range appendCode { + body, err := codeToBlockStmt(fileSet, codeToInsert) + if err != nil { + return nil, err + } + code = append(code, body.List...) + } + return code, nil +} + +// modifyReturnVars converts return variable strings into AST expressions. +func modifyReturnVars(fileSet *token.FileSet, returnVars []string) ([]ast.Expr, error) { + stmts := make([]ast.Expr, 0, len(returnVars)) + for _, returnVar := range returnVars { + newRetExpr, err := parser.ParseExprFrom(fileSet, "", []byte(returnVar), parser.ParseComments) + if err != nil { + return nil, err + } + stmts = append(stmts, newRetExpr) + } + return stmts, nil +} + +// appendSwitchCase appends a new case to a switch statement. +func appendSwitchCase(fileSet *token.FileSet, stmt ast.Node, fs functionSwitches) error { + for _, f := range fs { + // Parse the new case code + newRetExpr, err := parser.ParseExprFrom(fileSet, "", []byte(f.switchCase), parser.ParseComments) + if err != nil { + return err + } + + bodyStmt, err := codeToBlockStmt(fileSet, f.switchBody) + if err != nil { + return err + } + + // Create a new case clause + newCase := &ast.CaseClause{ + List: []ast.Expr{newRetExpr}, + Body: bodyStmt.List, + Case: token.NoPos, // Keep first item aligned with case keyword + Colon: token.NoPos, // Keep colon aligned with case keyword + } + + // Handle different types of switch statements + switch statement := stmt.(type) { + case *ast.TypeSwitchStmt: + statement.Body.List = appendCaseToList(statement.Body.List, newCase) + case *ast.SwitchStmt: + statement.Body.List = appendCaseToList(statement.Body.List, newCase) + default: + return errors.Errorf("unsupported switch statement type: %T", stmt) + } + } + return nil +} + +// appendCaseToList handles inserting a case clause into a list of statements, +// placing it before any default case if one exists. +func appendCaseToList(list []ast.Stmt, newCase *ast.CaseClause) []ast.Stmt { + if len(list) > 0 { + lastCase, isDefault := list[len(list)-1].(*ast.CaseClause) + if isDefault && len(lastCase.List) == 0 { + // Insert before default. + return append(list[:len(list)-1], newCase, list[len(list)-1]) + } + } + + return append(list, newCase) +} + +// addParams adds new parameters to a function declaration. +func addParams(funcDecl *ast.FuncDecl, newParams []functionParam) error { + for _, p := range newParams { + switch { + case p.index == -1: + // Append at end + funcDecl.Type.Params.List = append(funcDecl.Type.Params.List, p.Field()) + case p.index >= 0 && p.index <= len(funcDecl.Type.Params.List): + // Insert at index + funcDecl.Type.Params.List = append( + funcDecl.Type.Params.List[:p.index], + append([]*ast.Field{p.Field()}, funcDecl.Type.Params.List[p.index:]...)..., + ) + default: + return errors.Errorf("params index %d out of range", p.index) + } + } + return nil +} + +// addNewLine inserts code at specific line numbers in a function body. +func addNewLine(fileSet *token.FileSet, funcDecl *ast.FuncDecl, newLines []functionLine) error { + for _, newLine := range newLines { + maxLine := uint64(len(funcDecl.Body.List)) - 1 + // Validate line number + if newLine.number > maxLine { + return errors.Errorf("line number %d out of range (max %d)", newLine.number, maxLine) + } + + // Parse insertion code + insertionExpr, err := codeToBlockStmt(fileSet, newLine.code) + if err != nil { + return err + } + + // Insert code at the specified line number. + funcDecl.Body.List = append( + funcDecl.Body.List[:newLine.number], + append(insertionExpr.List, funcDecl.Body.List[newLine.number:]...)..., + ) + } + return nil +} + +// modifyReturn handles return statement modifications and code appending. +func modifyReturn(funcDecl *ast.FuncDecl, returnStmts []ast.Expr, appendCode []ast.Stmt) error { + if len(funcDecl.Body.List) > 0 { + lastStmt := funcDecl.Body.List[len(funcDecl.Body.List)-1] + switch stmt := lastStmt.(type) { + case *ast.ReturnStmt: + // Modify the return statement + if len(returnStmts) > 0 { + stmt.Results = returnStmts + } + if len(appendCode) > 0 { + // Insert before return + appendCode = append(appendCode, stmt) + funcDecl.Body.List = append(funcDecl.Body.List[:len(funcDecl.Body.List)-1], appendCode...) + } + default: + if len(returnStmts) > 0 { + return errors.New("return statement not found") + } + // Append at end + if len(appendCode) > 0 { + funcDecl.Body.List = append(funcDecl.Body.List, appendCode...) + } + } + } else { + if len(returnStmts) > 0 { + return errors.New("return statement not found") + } + // Append to empty body + if len(appendCode) > 0 { + funcDecl.Body.List = append(funcDecl.Body.List, appendCode...) + } + } + return nil +} + +// addTestCase adds test cases to a test function. +func addTestCase(fSet *token.FileSet, funcDecl *ast.FuncDecl, testCase []string) error { + // Find tests variable + for _, stmt := range funcDecl.Body.List { + assignStmt, ok := stmt.(*ast.AssignStmt) + if !ok || len(assignStmt.Lhs) == 0 { + continue + } + + // Check for "tests" variable + ident, ok := assignStmt.Lhs[0].(*ast.Ident) + if !ok || ident.Name != "tests" { + continue + } + + // Get composite literal + compositeLit, ok := assignStmt.Rhs[0].(*ast.CompositeLit) + if !ok { + continue + } + + // Add test cases + for _, tc := range testCase { + testCaseStmt, err := structToBlockStmt(fSet, tc) + if err != nil { + return err + } + compositeLit.Elts = append(compositeLit.Elts, testCaseStmt) + } + } + return nil +} + +// structToBlockStmt parses struct literal code into AST expression. +func structToBlockStmt(fSet *token.FileSet, code string) (ast.Expr, error) { + newFuncContent := toStruct(code) + newContent, err := parser.ParseExprFrom(fSet, "", newFuncContent, parser.AllErrors) + if err != nil { + return nil, err + } + + newCompositeList, ok := newContent.(*ast.CompositeLit) + if !ok { + return nil, errors.New("not a composite literal") + } + + if len(newCompositeList.Elts) != 1 { + return nil, errors.New("composite literal has more than one element or zero") + } + + return newCompositeList.Elts[0], nil +} + +// toStruct wraps code in an anonymous struct literal for parsing. +func toStruct(code string) string { + code = strings.TrimSpace(code) + code = strings.ReplaceAll(code, "\n\t", "\n") + code = strings.ReplaceAll(code, "\n ", "\n") + return fmt.Sprintf("struct {}{ %s }", code) +} + +// exprName extracts the name from an AST expression. +func exprName(expr ast.Expr) (string, bool) { + switch exp := expr.(type) { + case *ast.Ident: + return exp.Name, true + case *ast.SelectorExpr: + // Check if X is an identifier to get the package name + if ident, ok := exp.X.(*ast.Ident); ok { + // Return qualified name: package.Function + return ident.Name + "." + exp.Sel.Name, true + } + // Fallback to just the selector name if X is not an identifier + return exp.Sel.Name, true + default: + return "", false + } +} + +// addFunctionCall modifies function call arguments. +func addFunctionCall(expr *ast.CallExpr, calls functionCalls) error { + for _, c := range calls { + newArg := ast.NewIdent(c.code) + newArg.NamePos = token.Pos(c.index) + + switch { + case c.index == -1: + // Append at end + expr.Args = append(expr.Args, newArg) + case c.index >= 0 && c.index <= len(expr.Args): + // Insert at index + expr.Args = append(expr.Args[:c.index], append([]ast.Expr{newArg}, expr.Args[c.index:]...)...) + default: + return errors.Errorf("function call index %d out of range", c.index) + } + } + return nil +} + +// addStructs modifies struct literal fields. +func addStructs(fileSet *token.FileSet, expr *ast.CompositeLit, structs functionStructs) { + // Find the current max offset to avoid reused positions + file := fileSet.File(expr.Pos()) + maxOffset := file.Offset(expr.Rbrace) + for _, elt := range expr.Elts { + if pos := elt.End(); pos.IsValid() { + offset := file.Offset(pos) + if offset > maxOffset { + maxOffset = offset + } + } + } + + for i, s := range structs { + // Advance position + insertOffset := maxOffset + i + insertPos := file.Pos(insertOffset) + + value := ast.NewIdent(s.code) + value.NamePos = insertPos + + var newArg ast.Expr = value + if s.param != "" { + key := ast.NewIdent(s.param) + key.NamePos = insertPos + token.Pos(i) + + newArg = &ast.KeyValueExpr{ + Key: key, + Value: value, + Colon: insertPos, + } + } + + expr.Elts = append(expr.Elts, newArg) + expr.Rbrace += token.Pos(i + 1) + } + + // Ensure closing brace is on a new line + if len(expr.Elts) > 0 { + last := expr.Elts[len(expr.Elts)-1] + if file.Line(expr.Rbrace) == file.Line(last.End()) { + // Force a new line before Rbrace + file.AddLine(file.Offset(expr.Rbrace)) + } + } +} + +// codeToBlockStmt parses code string into AST block statement. +func codeToBlockStmt(fileSet *token.FileSet, code string) (*ast.BlockStmt, error) { + newFuncContent := toCode(code) + newContent, err := parser.ParseFile(fileSet, "", newFuncContent, parser.ParseComments) + if err != nil { + return nil, err + } + return newContent.Decls[0].(*ast.FuncDecl).Body, nil +} + +// toCode wraps code in a function for parsing. +func toCode(code string) string { + return fmt.Sprintf("package p; func _() { %s }", strings.TrimSpace(code)) +} + +// formatNode formats an AST node into Go source code. +func formatNode(fileSet *token.FileSet, n ast.Node) (string, error) { + var buf bytes.Buffer + if err := format.Node(&buf, fileSet, n); err != nil { + return "", err + } + + node := strings.TrimSpace(buf.String()) + return node, nil +} + +// applyFunctionOptions applies all modifications to a function. +func applyFunctionOptions(fileSet *token.FileSet, f *ast.FuncDecl, opts *functionOpts) (err error) { + var newFunctionBody *ast.BlockStmt + if opts.body != "" { + newFunctionBody, err = codeToBlockStmt(fileSet, opts.body) + if err != nil { + return err + } + } + + appendCode, err := addCode(fileSet, opts.appendCode) + if err != nil { + return err + } + + returnStmts, err := modifyReturnVars(fileSet, opts.returnVars) + if err != nil { + return err + } + + callMap := opts.insideCall.Map() + callMapCheck := cloneFunctionCallsMap(callMap) + structMap := opts.insideStruct.Map() + structMapCheck := cloneFunctionStructsMap(structMap) + switchesCasesMap := opts.appendSwitch.Map() + switchesCasesMapCheck := cloneFunctionSwitchesMap(switchesCasesMap) + + if len(opts.removeCalls) > 0 { + removeFunctionCalls(f, opts.removeCalls) + } + + if err := addParams(f, opts.newParams); err != nil { + return err + } + + if newFunctionBody != nil { + f.Body = newFunctionBody + f.Body.Rbrace = f.Body.Pos() + } + + if err := addNewLine(fileSet, f, opts.newLines); err != nil { + return err + } + + if err := modifyReturn(f, returnStmts, appendCode); err != nil { + return err + } + + for _, bodyList := range f.Body.List { + var ( + stmt ast.Stmt + key string + ) + switch expr := bodyList.(type) { + case *ast.TypeSwitchStmt: + stmt = expr + key, err = formatNode(fileSet, expr.Assign) + case *ast.SwitchStmt: + stmt = expr + key, err = formatNode(fileSet, expr.Tag) + default: + continue + } + if err != nil { + return err + } + + switchCase, ok := switchesCasesMap[key] + if !ok { + continue + } + + if err := appendSwitchCase(fileSet, stmt, switchCase); err != nil { + return err + } + delete(switchesCasesMapCheck, key) + } + + ast.Inspect(f, func(n ast.Node) bool { + switch expr := n.(type) { + case *ast.CallExpr: + name, exist := exprName(expr.Fun) + if !exist { + return true + } + + var allCalls functionCalls + if calls, ok := callMap[name]; ok { + allCalls = append(allCalls, calls...) + delete(callMapCheck, name) + } + + if sel, isSel := expr.Fun.(*ast.SelectorExpr); isSel { + simpleName := sel.Sel.Name + if calls, ok := callMap[simpleName]; ok { + allCalls = append(allCalls, calls...) + delete(callMapCheck, simpleName) + } + } + + if len(allCalls) == 0 { + return true + } + + if err = addFunctionCall(expr, allCalls); err != nil { + return false + } + + case *ast.CompositeLit: + name, exist := exprName(expr.Type) + if !exist { + return true + } + + var allStructs functionStructs + if structs, ok := structMap[name]; ok { + allStructs = append(allStructs, structs...) + delete(structMapCheck, name) + } + + if sel, isSel := expr.Type.(*ast.SelectorExpr); isSel { + simpleName := sel.Sel.Name + if structs, ok := structMap[simpleName]; ok { + allStructs = append(allStructs, structs...) + delete(structMapCheck, simpleName) + } + } + + if len(allStructs) == 0 { + return true + } + + addStructs(fileSet, expr, allStructs) + } + return true + }) + if err != nil { + return err + } + + if err := addTestCase(fileSet, f, opts.appendTestCase); err != nil { + return err + } + + if len(callMapCheck) > 0 { + return errors.Errorf("function calls not found: %v", callMapCheck) + } + if len(structMapCheck) > 0 { + return errors.Errorf("function structs not found: %v", structMapCheck) + } + if len(switchesCasesMapCheck) > 0 { + return errors.Errorf("function switch not found: %v", switchesCasesMapCheck) + } + return nil +} + +// ModifyCaller replaces all arguments of a specific function call in the given content. +// The callerExpr should be in the format "pkgname.FuncName" or just "FuncName". +// The modifiers function is called with the existing arguments and should return the new arguments. +func ModifyCaller(content, callerExpr string, modifiers func([]string) ([]string, error)) (string, error) { + // parse the caller expression to extract package name and function name + var pkgName, funcName string + parts := strings.Split(callerExpr, ".") + switch len(parts) { + case 1: + funcName = parts[0] + case 2: + pkgName = parts[0] + funcName = parts[1] + default: + return "", errors.New("invalid caller expression format, use 'pkgname.FuncName' or 'FuncName'") + } + + fileSet := token.NewFileSet() + // preserve original source positions for maintaining whitespace + fileSet.AddFile("", fileSet.Base(), len(content)) + + f, err := parser.ParseFile(fileSet, "", content, parser.ParseComments) + if err != nil { + return "", err + } + + // track positions of all call expressions that need modification + type callModification struct { + node *ast.CallExpr + newArgs []string + startPos token.Pos + endPos token.Pos + } + + var modifications []callModification + + errInspect := Inspect(f, func(n ast.Node) error { + callExpr, ok := n.(*ast.CallExpr) + if !ok { + return nil + } + + // check if this call matches our target function + match := false + switch fun := callExpr.Fun.(type) { + case *ast.Ident: + // handle case of FuncName() + if pkgName == "" && fun.Name == funcName { + match = true + } + case *ast.SelectorExpr: + // handle case of pkg.FuncName() + if ident, ok := fun.X.(*ast.Ident); ok && ident.Name == pkgName && fun.Sel.Name == funcName { + match = true + } + } + + if !match { + return nil + } + + // extract current arguments as strings + currentArgs := make([]string, len(callExpr.Args)) + for i, arg := range callExpr.Args { + var buf bytes.Buffer + if err := format.Node(&buf, fileSet, arg); err != nil { + return err + } + currentArgs[i] = buf.String() + } + + // apply the modifier function + newArgs, err := modifiers(currentArgs) + if err != nil { + return err + } + + // record this modification for later application + modifications = append(modifications, callModification{ + node: callExpr, + newArgs: newArgs, + startPos: callExpr.Lparen + 1, // position right after the left parenthesis + endPos: callExpr.Rparen, // position of the right parenthesis + }) + + return nil + }) + + if errInspect != nil { + return "", errInspect + } + + if len(modifications) == 0 { + return "", errors.Errorf("function call %s not found in file content", callerExpr) + } + + // apply modifications in reverse order to avoid position shifts + sort.Slice(modifications, func(i, j int) bool { + return modifications[i].startPos > modifications[j].startPos + }) + + // make modifications directly to the content string + result := []byte(content) + for _, mod := range modifications { + // build the new arguments string + newArgsStr := strings.Join(mod.newArgs, ", ") + + // replace the arguments in the original content + startOffset := fileSet.Position(mod.startPos).Offset + endOffset := fileSet.Position(mod.endPos).Offset + + result = append( + result[:startOffset], + append( + []byte(newArgsStr), + result[endOffset:]..., + )..., + ) + } + + return string(result), nil +} + +// RemoveFunction removes a function declaration from the file content. +func RemoveFunction(content, funcName string) (string, error) { + // Parse source into AST. + fset := token.NewFileSet() + file, err := parser.ParseFile(fset, "", content, parser.ParseComments) + if err != nil { + return "", errors.Errorf("failed to parse file: %w", err) + } + + cmap := ast.NewCommentMap(fset, file, file.Comments) + + // Find the function to remove. + var found bool + var newDecls []ast.Decl + for _, decl := range file.Decls { + if fd, ok := decl.(*ast.FuncDecl); ok && fd.Name.Name == funcName { + found = true + // Remove comments associated with this function. + delete(cmap, decl) + continue // Skip this declaration to remove it. + } + newDecls = append(newDecls, decl) + } + + if !found { + return "", errors.Errorf("function %q not found", funcName) + } + + // Update file declarations and comments. + file.Decls = newDecls + file.Comments = cmap.Filter(file).Comments() + + return formatNode(fset, file) +} + +// removeFunctionCalls removes all function calls matching the specified names from a function. +func removeFunctionCalls(f *ast.FuncDecl, callNames []string) { + if f.Body == nil { + return + } + + callMap := make(map[string]bool, len(callNames)) + for _, name := range callNames { + callMap[name] = true + } + + matchesCall := func(callExpr *ast.CallExpr) bool { + switch fun := callExpr.Fun.(type) { + case *ast.Ident: + return callMap[fun.Name] + case *ast.SelectorExpr: + if ident, ok := fun.X.(*ast.Ident); ok { + return callMap[ident.Name+"."+fun.Sel.Name] + } + } + return false + } + + var filterStmts func([]ast.Stmt) []ast.Stmt + filterStmts = func(stmts []ast.Stmt) []ast.Stmt { + filtered := make([]ast.Stmt, 0, len(stmts)) + for _, stmt := range stmts { + if exprStmt, ok := stmt.(*ast.ExprStmt); ok { + if callExpr, ok := exprStmt.X.(*ast.CallExpr); ok && matchesCall(callExpr) { + continue + } + } + + switch typedStmt := stmt.(type) { + case *ast.BlockStmt: + typedStmt.List = filterStmts(typedStmt.List) + case *ast.IfStmt: + if typedStmt.Body != nil { + typedStmt.Body.List = filterStmts(typedStmt.Body.List) + } + switch elseNode := typedStmt.Else.(type) { + case *ast.BlockStmt: + elseNode.List = filterStmts(elseNode.List) + case *ast.IfStmt: + elseNode.Body.List = filterStmts(elseNode.Body.List) + if elseBlock, ok := elseNode.Else.(*ast.BlockStmt); ok { + elseBlock.List = filterStmts(elseBlock.List) + } + } + case *ast.ForStmt: + if typedStmt.Body != nil { + typedStmt.Body.List = filterStmts(typedStmt.Body.List) + } + case *ast.RangeStmt: + if typedStmt.Body != nil { + typedStmt.Body.List = filterStmts(typedStmt.Body.List) + } + case *ast.SwitchStmt: + if typedStmt.Body != nil { + for _, caseClause := range typedStmt.Body.List { + if cc, ok := caseClause.(*ast.CaseClause); ok { + cc.Body = filterStmts(cc.Body) + } + } + } + case *ast.TypeSwitchStmt: + if typedStmt.Body != nil { + for _, caseClause := range typedStmt.Body.List { + if cc, ok := caseClause.(*ast.CaseClause); ok { + cc.Body = filterStmts(cc.Body) + } + } + } + } + filtered = append(filtered, stmt) + } + return filtered + } + + f.Body.List = filterStmts(f.Body.List) +} diff --git a/ignite/pkg/xast/function_test.go b/ignite/pkg/xast/function_test.go new file mode 100644 index 0000000..a931e01 --- /dev/null +++ b/ignite/pkg/xast/function_test.go @@ -0,0 +1,1487 @@ +package xast + +import ( + "strconv" + "strings" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +func TestModifyFunction(t *testing.T) { + existingContent := `package main + +import ( + "fmt" +) + +// main function +func main() { + // print hello world + fmt.Println("Hello, world!") + // call new param function + New(param1, param2) +} + +// anotherFunction another function +func anotherFunction() bool { + // init param + p := bla.NewParam() + // start to call something + p.CallSomething("Another call") + // return always true + return true +} + +// TestValidate test the validations +func TestValidate(t *testing.T) { + tests := []struct { + desc string + genState types.GenesisState + }{ + { + desc: "default is valid", + genState: types.DefaultGenesis(), + }, + { + desc: "valid genesis state", + genState: types.GenesisState{}, + }, + } + for _, tc := range tests { + t.Run(tc.desc, func(t *testing.T) { + err := tc.genState.Validate() + require.NoError(t, err) + }) + } +}` + + type args struct { + fileContent string + functionName string + functions []FunctionOptions + } + tests := []struct { + name string + args args + want string + err error + }{ + { + name: "add a case to switch statement", + args: args{ + fileContent: `package test + +func processPacket(packet interface{}) error { + switch packet := packet.(type) { + default: + return fmt.Errorf("unknown packet type: %T", packet) + } +}`, + functionName: "processPacket", + functions: []FunctionOptions{ + AppendSwitchCase( + "packet := packet.(type)", + "*types.FooPacket", + "return handleFooPacket(packet)", + ), + }, + }, + want: `package test + +func processPacket(packet interface{}) error { + switch packet := packet.(type) { + case *types.FooPacket: + return handleFooPacket(packet) + + default: + return fmt.Errorf("unknown packet type: %T", packet) + } +}`, + }, + { + name: "add multiple cases to switch statement", + args: args{ + fileContent: `package test + +func handlePacket(data interface{}) error { + switch v := data.(type) { + case string: + return processString(v) + default: + return fmt.Errorf("unsupported type: %T", v) + } +}`, + functionName: "handlePacket", + functions: []FunctionOptions{ + AppendSwitchCase( + "v := data.(type)", + "int", + "return processInt(v)", + ), + AppendSwitchCase( + "v := data.(type)", + "bool", + "return processBool(v)", + ), + }, + }, + want: `package test + +func handlePacket(data interface{}) error { + switch v := data.(type) { + case string: + return processString(v) + case int: + return processInt(v) + case bool: + return processBool(v) + + default: + return fmt.Errorf("unsupported type: %T", v) + } +}`, + }, + { + name: "add multiple cases to two switch statement", + args: args{ + fileContent: `package test + +func handlePacket(data interface{}) error { + switch v := data.(type) { + case string: + return processString(v) + default: + return fmt.Errorf("unsupported type: %T", v) + } + + switch x { + case 1: + return "one" + default: + return "unknown" + } +}`, + functionName: "handlePacket", + functions: []FunctionOptions{ + AppendSwitchCase( + "v := data.(type)", + "int", + "return processInt(v)", + ), + AppendSwitchCase( + "x", + "2", + `return "two"`, + ), + }, + }, + want: `package test + +func handlePacket(data interface{}) error { + switch v := data.(type) { + case string: + return processString(v) + case int: + return processInt(v) + + default: + return fmt.Errorf("unsupported type: %T", v) + } + + switch x { + case 1: + return "one" + case 2: + return "two" + + default: + return "unknown" + } +}`, + }, + { + name: "add case to switch with non-matching condition", + args: args{ + fileContent: `package test + +func process(x int) string { + switch x { + case 1: + return "one" + default: + return "unknown" + } +}`, + functionName: "process", + functions: []FunctionOptions{ + AppendSwitchCase( + "wrongCondition", + "2", + `return "two"`, + ), + }, + }, + err: errors.New("function switch not found: map[wrongCondition:[{wrongCondition 2 return \"two\"}]]"), + }, + + { + name: "add all modifications type", + args: args{ + fileContent: existingContent, + functionName: "anotherFunction", + functions: []FunctionOptions{ + AppendFuncParams("param1", "string", 0), + ReplaceFuncBody(`return false`), + AppendFuncAtLine(`fmt.Println("Appended at line 0.")`, 0), + AppendFuncAtLine(`SimpleCall(foo, bar)`, 1), + AppendFuncAtLine(`if param1 == "" { + return false + }`, 2), + AppendFuncCode(`fmt.Println("Appended code.")`), + AppendFuncCode(`Param{ + Baz: baz, + Foo: foo, + }`), + NewFuncReturn("1"), + AppendInsideFuncCall("SimpleCall", "baz", 0), + AppendInsideFuncCall("SimpleCall", "bla", -1), + AppendInsideFuncCall("Println", strconv.Quote("test"), -1), + AppendFuncStruct("Param", "Bar", strconv.Quote("bar")), + AppendFuncTestCase(`{ + desc: "valid first genesis state", + genState: GenesisState{}, + }`), + }, + }, + want: `package main + +import ( + "fmt" +) + +// main function +func main() { + // print hello world + fmt.Println("Hello, world!") + // call new param function + New(param1, param2) +} + +// anotherFunction another function +func anotherFunction(param1 string) bool { + fmt.Println("Appended at line 0.", "test") + SimpleCall(baz, foo, bar, bla) + if param1 == "" { + return false + } + fmt.Println("Appended code.", "test") + Param{ + Baz: baz, + Foo: foo, + Bar: "bar", + } + return 1 +} + +// TestValidate test the validations +func TestValidate(t *testing.T) { + tests := []struct { + desc string + genState types.GenesisState + }{ + { + desc: "default is valid", + genState: types.DefaultGenesis(), + }, + { + desc: "valid genesis state", + genState: types.GenesisState{}, + }, + } + for _, tc := range tests { + t.Run(tc.desc, func(t *testing.T) { + err := tc.genState.Validate() + require.NoError(t, err) + }) + } +}`, + }, + { + name: "add the replace body", + args: args{ + fileContent: existingContent, + functionName: "anotherFunction", + functions: []FunctionOptions{ReplaceFuncBody(`return false`)}, + }, + want: `package main + +import ( + "fmt" +) + +// main function +func main() { + // print hello world + fmt.Println("Hello, world!") + // call new param function + New(param1, param2) +} + +// anotherFunction another function +func anotherFunction() bool { return false } + +// TestValidate test the validations +func TestValidate(t *testing.T) { + tests := []struct { + desc string + genState types.GenesisState + }{ + { + desc: "default is valid", + genState: types.DefaultGenesis(), + }, + { + desc: "valid genesis state", + genState: types.GenesisState{}, + }, + } + for _, tc := range tests { + t.Run(tc.desc, func(t *testing.T) { + err := tc.genState.Validate() + require.NoError(t, err) + }) + } +}`, + }, + { + name: "add a new test case", + args: args{ + fileContent: existingContent, + functionName: "TestValidate", + functions: []FunctionOptions{ + AppendFuncTestCase(`{ + desc: "valid genesis state", + genState: GenesisState{}, +}`), + }, + }, + want: `package main + +import ( + "fmt" +) + +// main function +func main() { + // print hello world + fmt.Println("Hello, world!") + // call new param function + New(param1, param2) +} + +// anotherFunction another function +func anotherFunction() bool { + // init param + p := bla.NewParam() + // start to call something + p.CallSomething("Another call") + // return always true + return true +} + +// TestValidate test the validations +func TestValidate(t *testing.T) { + tests := []struct { + desc string + genState types.GenesisState + }{ + { + desc: "default is valid", + genState: types.DefaultGenesis(), + }, + { + desc: "valid genesis state", + genState: types.GenesisState{}, + }, { + desc: "valid genesis state", + genState: GenesisState{}, + }, + } + for _, tc := range tests { + t.Run(tc.desc, func(t *testing.T) { + err := tc.genState.Validate() + require.NoError(t, err) + }) + } +}`, + }, + { + name: "add two test cases", + args: args{ + fileContent: existingContent, + functionName: "TestValidate", + functions: []FunctionOptions{ + AppendFuncTestCase(` +{ + desc: "valid first genesis state", + genState: GenesisState{}, +}`), + AppendFuncTestCase(` +{ + desc: "valid second genesis state", + genState: GenesisState{}, +}`), + }, + }, + want: `package main + +import ( + "fmt" +) + +// main function +func main() { + // print hello world + fmt.Println("Hello, world!") + // call new param function + New(param1, param2) +} + +// anotherFunction another function +func anotherFunction() bool { + // init param + p := bla.NewParam() + // start to call something + p.CallSomething("Another call") + // return always true + return true +} + +// TestValidate test the validations +func TestValidate(t *testing.T) { + tests := []struct { + desc string + genState types.GenesisState + }{ + { + desc: "default is valid", + genState: types.DefaultGenesis(), + }, + { + desc: "valid genesis state", + genState: types.GenesisState{}, + }, { + desc: "valid first genesis state", + genState: GenesisState{}, + }, { + desc: "valid second genesis state", + genState: GenesisState{}, + }, + } + for _, tc := range tests { + t.Run(tc.desc, func(t *testing.T) { + err := tc.genState.Validate() + require.NoError(t, err) + }) + } +}`, + }, + { + name: "add append line and code modification", + args: args{ + fileContent: existingContent, + functionName: "anotherFunction", + functions: []FunctionOptions{ + AppendFuncAtLine(`fmt.Println("Appended at line 0.")`, 0), + AppendFuncAtLine(`SimpleCall(foo, bar)`, 1), + AppendFuncCode(`fmt.Println("Appended code.")`), + }, + }, + want: `package main + +import ( + "fmt" +) + +// main function +func main() { + // print hello world + fmt.Println("Hello, world!") + // call new param function + New(param1, param2) +} + +// anotherFunction another function +func anotherFunction() bool { + fmt.Println("Appended at line 0.") + SimpleCall(foo, bar) + + // init param + p := bla.NewParam() + // start to call something + p.CallSomething("Another call") + fmt.Println("Appended code.") + + // return always true + return true +} + +// TestValidate test the validations +func TestValidate(t *testing.T) { + tests := []struct { + desc string + genState types.GenesisState + }{ + { + desc: "default is valid", + genState: types.DefaultGenesis(), + }, + { + desc: "valid genesis state", + genState: types.GenesisState{}, + }, + } + for _, tc := range tests { + t.Run(tc.desc, func(t *testing.T) { + err := tc.genState.Validate() + require.NoError(t, err) + }) + } +}`, + }, + { + name: "add all modifications type", + args: args{ + fileContent: existingContent, + functionName: "anotherFunction", + functions: []FunctionOptions{NewFuncReturn("1")}, + }, + want: strings.ReplaceAll(existingContent, "return true", "return 1\n"), + }, + { + name: "add inside call modifications", + args: args{ + fileContent: existingContent, + functionName: "anotherFunction", + functions: []FunctionOptions{ + AppendInsideFuncCall("NewParam", "baz", 0), + AppendInsideFuncCall("NewParam", "bla", -1), + AppendInsideFuncCall("CallSomething", strconv.Quote("test1"), -1), + AppendInsideFuncCall("CallSomething", strconv.Quote("test2"), 0), + }, + }, + want: `package main + +import ( + "fmt" +) + +// main function +func main() { + // print hello world + fmt.Println("Hello, world!") + // call new param function + New(param1, param2) +} + +// anotherFunction another function +func anotherFunction() bool { + // init param + p := bla.NewParam(baz, bla) + // start to call something + p.CallSomething("test2", "Another call", "test1") + // return always true + return true +} + +// TestValidate test the validations +func TestValidate(t *testing.T) { + tests := []struct { + desc string + genState types.GenesisState + }{ + { + desc: "default is valid", + genState: types.DefaultGenesis(), + }, + { + desc: "valid genesis state", + genState: types.GenesisState{}, + }, + } + for _, tc := range tests { + t.Run(tc.desc, func(t *testing.T) { + err := tc.genState.Validate() + require.NoError(t, err) + }) + } +}`, + }, + { + name: "add inside call modifications with qualified package name", + args: args{ + fileContent: existingContent, + functionName: "anotherFunction", + functions: []FunctionOptions{ + AppendInsideFuncCall("bla.NewParam", "baz", 0), + AppendInsideFuncCall("bla.NewParam", "bla", -1), + AppendInsideFuncCall("CallSomething", strconv.Quote("test1"), -1), + }, + }, + want: `package main + +import ( + "fmt" +) + +// main function +func main() { + // print hello world + fmt.Println("Hello, world!") + // call new param function + New(param1, param2) +} + +// anotherFunction another function +func anotherFunction() bool { + // init param + p := bla.NewParam(baz, bla) + // start to call something + p.CallSomething("Another call", "test1") + // return always true + return true +} + +// TestValidate test the validations +func TestValidate(t *testing.T) { + tests := []struct { + desc string + genState types.GenesisState + }{ + { + desc: "default is valid", + genState: types.DefaultGenesis(), + }, + { + desc: "valid genesis state", + genState: types.GenesisState{}, + }, + } + for _, tc := range tests { + t.Run(tc.desc, func(t *testing.T) { + err := tc.genState.Validate() + require.NoError(t, err) + }) + } +}`, + }, + { + name: "add inside call modifications with mixed qualified and unqualified names", + args: args{ + fileContent: existingContent, + functionName: "anotherFunction", + functions: []FunctionOptions{ + AppendInsideFuncCall("bla.NewParam", "ctx", 0), + AppendInsideFuncCall("NewParam", "baz", -1), + AppendInsideFuncCall("p.CallSomething", strconv.Quote("test1"), 0), + AppendInsideFuncCall("CallSomething", strconv.Quote("test2"), -1), + }, + }, + want: `package main + +import ( + "fmt" +) + +// main function +func main() { + // print hello world + fmt.Println("Hello, world!") + // call new param function + New(param1, param2) +} + +// anotherFunction another function +func anotherFunction() bool { + // init param + p := bla.NewParam(ctx, baz) + // start to call something + p.CallSomething("test1", "Another call", "test2") + // return always true + return true +} + +// TestValidate test the validations +func TestValidate(t *testing.T) { + tests := []struct { + desc string + genState types.GenesisState + }{ + { + desc: "default is valid", + genState: types.DefaultGenesis(), + }, + { + desc: "valid genesis state", + genState: types.GenesisState{}, + }, + } + for _, tc := range tests { + t.Run(tc.desc, func(t *testing.T) { + err := tc.genState.Validate() + require.NoError(t, err) + }) + } +}`, + }, + { + name: "add inside struct modifications", + args: args{ + fileContent: `package main + +import ( + "fmt" +) + +// anotherFunction another function +func anotherFunction() bool { + Param{ + Baz: baz, + Foo: foo, + } + Client{baz, foo} + // return always true + return true +} + +// TestValidate test the validations +func TestValidate(t *testing.T) { + tests := []struct { + desc string + genState types.GenesisState + }{ + { + desc: "default is valid", + genState: types.DefaultGenesis(), + }, + { + desc: "valid genesis state", + genState: types.GenesisState{}, + }, + } + for _, tc := range tests { + t.Run(tc.desc, func(t *testing.T) { + err := tc.genState.Validate() + require.NoError(t, err) + }) + } +}`, + functionName: "anotherFunction", + functions: []FunctionOptions{ + AppendFuncStruct("Param", "Bar", "bar"), + AppendFuncStruct("Param", "Bla", "bla"), + AppendFuncStruct("Client", "", "bar"), + }, + }, + want: `package main + +import ( + "fmt" +) + +// anotherFunction another function +func anotherFunction() bool { + Param{ + Baz: baz, + Foo: foo, + Bar: bar, + Bla: bla, + } + Client{baz, foo, bar} + // return always true + return true +} + +// TestValidate test the validations +func TestValidate(t *testing.T) { + tests := []struct { + desc string + genState types.GenesisState + }{ + { + desc: "default is valid", + genState: types.DefaultGenesis(), + }, + { + desc: "valid genesis state", + genState: types.GenesisState{}, + }, + } + for _, tc := range tests { + t.Run(tc.desc, func(t *testing.T) { + err := tc.genState.Validate() + require.NoError(t, err) + }) + } +}`, + }, + { + name: "function without test case assertion", + args: args{ + fileContent: existingContent, + functionName: "anotherFunction", + functions: []FunctionOptions{ + AppendFuncTestCase(`{ + desc: "valid second genesis state", + genState: GenesisState{}, + }`), + }, + }, + want: existingContent, + }, + { + name: "params out of range", + args: args{ + fileContent: existingContent, + functionName: "anotherFunction", + functions: []FunctionOptions{AppendFuncParams("param1", "string", 1)}, + }, + err: errors.New("params index 1 out of range"), + }, + { + name: "invalid params", + args: args{ + fileContent: existingContent, + functionName: "anotherFunction", + functions: []FunctionOptions{AppendFuncParams("9#.(c", "string", 0)}, + }, + err: errors.New("format.Node internal error (16:22: expected ')', found 9 (and 1 more errors))"), + }, + { + name: "invalid content for replace body", + args: args{ + fileContent: existingContent, + functionName: "anotherFunction", + functions: []FunctionOptions{ReplaceFuncBody("9#.(c")}, + }, + err: errors.New("1:24: illegal character U+0023 '#'"), + }, + { + name: "line number out of range", + args: args{ + fileContent: existingContent, + functionName: "anotherFunction", + functions: []FunctionOptions{AppendFuncAtLine(`fmt.Println("")`, 4)}, + }, + err: errors.New("line number 4 out of range (max 2)"), + }, + { + name: "invalid code for append at line", + args: args{ + fileContent: existingContent, + functionName: "anotherFunction", + functions: []FunctionOptions{AppendFuncAtLine("9#.(c", 0)}, + }, + err: errors.New("1:24: illegal character U+0023 '#'"), + }, + { + name: "invalid code append", + args: args{ + fileContent: existingContent, + functionName: "anotherFunction", + functions: []FunctionOptions{AppendFuncCode("9#.(c")}, + }, + err: errors.New("1:24: illegal character U+0023 '#'"), + }, + { + name: "invalid new return", + args: args{ + fileContent: existingContent, + functionName: "anotherFunction", + functions: []FunctionOptions{NewFuncReturn("9#.(c")}, + }, + err: errors.New("1:2: illegal character U+0023 '#'"), + }, + { + name: "call name not found", + args: args{ + fileContent: existingContent, + functionName: "anotherFunction", + functions: []FunctionOptions{AppendInsideFuncCall("FooFunction", "baz", 0)}, + }, + err: errors.New("function calls not found: map[FooFunction:[{FooFunction baz 0}]]"), + }, + { + name: "invalid call param", + args: args{ + fileContent: existingContent, + functionName: "anotherFunction", + functions: []FunctionOptions{AppendInsideFuncCall("NewParam", "9#.(c", 0)}, + }, + err: errors.New("format.Node internal error (18:21: illegal character U+0023 '#' (and 4 more errors))"), + }, + { + name: "call params out of range", + args: args{ + fileContent: existingContent, + functionName: "anotherFunction", + functions: []FunctionOptions{AppendInsideFuncCall("NewParam", "baz", 1)}, + }, + err: errors.New("function call index 1 out of range"), + }, + { + name: "empty modifications", + args: args{ + fileContent: existingContent, + functionName: "anotherFunction", + functions: []FunctionOptions{}, + }, + want: existingContent, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := ModifyFunction(tt.args.fileContent, tt.args.functionName, tt.args.functions...) + if tt.err != nil { + require.Error(t, err) + require.Equal(t, tt.err.Error(), err.Error()) + return + } + require.NoError(t, err) + require.Equal(t, tt.want, got) + }) + } +} + +func TestModifyCaller(t *testing.T) { + existingContent := `package main + +import ( + "context" + "fmt" +) + +// main function +func main() { + // Simple function call + // print hello world + fmt.Println("Hello, world!") + + // Call with multiple arguments + server.Foo(param1, param2, 42) + + // Call with no arguments + EmptyFunc() + + // Call with complex arguments + ComplexFunc([]string{"a", "b"}, map[string]int{"a": 1}) + + // Multiple calls to the same function + fmt.Println("First call") + fmt.Println("Second call") +} +` + + tests := []struct { + name string + content string + callerExpr string + modifierFunc func([]string) ([]string, error) + expected string + expectedError string + }{ + { + name: "replace arguments in fmt.Println", + content: existingContent, + callerExpr: "fmt.Println", + modifierFunc: func(args []string) ([]string, error) { + return []string{`"Modified output"`}, nil + }, + expected: `package main + +import ( + "context" + "fmt" +) + +// main function +func main() { + // Simple function call + // print hello world + fmt.Println("Modified output") + + // Call with multiple arguments + server.Foo(param1, param2, 42) + + // Call with no arguments + EmptyFunc() + + // Call with complex arguments + ComplexFunc([]string{"a", "b"}, map[string]int{"a": 1}) + + // Multiple calls to the same function + fmt.Println("Modified output") + fmt.Println("Modified output") +} +`, + }, + { + name: "replace server.Foo arguments", + content: existingContent, + callerExpr: "server.Foo", + modifierFunc: func(args []string) ([]string, error) { + return []string{"context.Background()", "newParam", "123"}, nil + }, + expected: `package main + +import ( + "context" + "fmt" +) + +// main function +func main() { + // Simple function call + // print hello world + fmt.Println("Hello, world!") + + // Call with multiple arguments + server.Foo(context.Background(), newParam, 123) + + // Call with no arguments + EmptyFunc() + + // Call with complex arguments + ComplexFunc([]string{"a", "b"}, map[string]int{"a": 1}) + + // Multiple calls to the same function + fmt.Println("First call") + fmt.Println("Second call") +} +`, + }, + { + name: "add argument to EmptyFunc", + content: existingContent, + callerExpr: "EmptyFunc", + modifierFunc: func(args []string) ([]string, error) { + return []string{`"new argument"`}, nil + }, + expected: `package main + +import ( + "context" + "fmt" +) + +// main function +func main() { + // Simple function call + // print hello world + fmt.Println("Hello, world!") + + // Call with multiple arguments + server.Foo(param1, param2, 42) + + // Call with no arguments + EmptyFunc("new argument") + + // Call with complex arguments + ComplexFunc([]string{"a", "b"}, map[string]int{"a": 1}) + + // Multiple calls to the same function + fmt.Println("First call") + fmt.Println("Second call") +} +`, + }, + { + name: "modify complex arguments", + content: existingContent, + callerExpr: "ComplexFunc", + modifierFunc: func(args []string) ([]string, error) { + return []string{`[]string{"x", "y", "z"}`, `map[string]int{"x": 10}`}, nil + }, + expected: `package main + +import ( + "context" + "fmt" +) + +// main function +func main() { + // Simple function call + // print hello world + fmt.Println("Hello, world!") + + // Call with multiple arguments + server.Foo(param1, param2, 42) + + // Call with no arguments + EmptyFunc() + + // Call with complex arguments + ComplexFunc([]string{"x", "y", "z"}, map[string]int{"x": 10}) + + // Multiple calls to the same function + fmt.Println("First call") + fmt.Println("Second call") +} +`, + }, + { + name: "function not found", + content: existingContent, + callerExpr: "NonExistentFunc", + modifierFunc: func(args []string) ([]string, error) { + return []string{`"test"`}, nil + }, + expectedError: "function call NonExistentFunc not found in file content", + }, + { + name: "error in modifier function", + content: existingContent, + callerExpr: "fmt.Println", + modifierFunc: func(args []string) ([]string, error) { + return nil, errors.New("custom error in modifier") + }, + expectedError: "custom error in modifier", + }, + { + name: "invalid caller expression", + content: existingContent, + callerExpr: "pkg.sub.Function", + modifierFunc: func(args []string) ([]string, error) { + return []string{`"test"`}, nil + }, + expectedError: "invalid caller expression format, use 'pkgname.FuncName' or 'FuncName'", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result, err := ModifyCaller(tt.content, tt.callerExpr, tt.modifierFunc) + + if tt.expectedError != "" { + require.Error(t, err) + require.Contains(t, err.Error(), tt.expectedError) + return + } + + require.NoError(t, err) + require.Equal(t, tt.expected, result) + }) + } +} + +func TestRemoveFunction(t *testing.T) { + tests := []struct { + name string + content string + funcName string + expected string + expectError bool + }{ + { + name: "remove a simple function", + content: `package main + +func main() { + println("hello") +} + +func anotherFunction() { + println("another") +} + +func thirdFunction() { + println("third") +} +`, + funcName: "anotherFunction", + expected: `package main + +func main() { + println("hello") +} + +func thirdFunction() { + println("third") +}`, + }, + { + name: "remove first function", + content: `package main + +func first() { + println("first") +} + +func second() { + println("second") +} +`, + funcName: "first", + expected: `package main + +func second() { + println("second") +}`, + }, + { + name: "remove last function", + content: `package main + +func first() { + println("first") +} + +func second() { + println("second") +} +`, + funcName: "second", + expected: `package main + +func first() { + println("first") +}`, + }, + { + name: "remove function with comments", + content: `package main + +// main is the entry point +func main() { + println("main") +} + +// helperFunc does something +func helperFunc() { + println("helper") +} +`, + funcName: "helperFunc", + expected: `package main + +// main is the entry point +func main() { + println("main") +}`, + }, + { + name: "function not found", + content: `package main + +func main() { + println("hello") +} +`, + funcName: "notFound", + expectError: true, + }, + { + name: "invalid source file", + content: `package main func`, + funcName: "main", + expectError: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result, err := RemoveFunction(tt.content, tt.funcName) + + if tt.expectError { + require.Error(t, err) + return + } + + require.NoError(t, err) + require.Equal(t, tt.expected, result) + }) + } +} + +func TestRemoveFuncCall(t *testing.T) { + tests := []struct { + name string + content string + funcName string + callName string + expected string + }{ + { + name: "remove a function call", + content: `package main + +func main() { + fmt.Println("before") + doSomething() + fmt.Println("after") +} +`, + funcName: "main", + callName: "doSomething", + expected: `package main + +func main() { + fmt.Println("before") + + fmt.Println("after") +}`, + }, + { + name: "remove qualified function call", + content: `package main + +func main() { + fmt.Println("hello") + pkg.DoSomething() + fmt.Println("world") +} +`, + funcName: "main", + callName: "pkg.DoSomething", + expected: `package main + +func main() { + fmt.Println("hello") + + fmt.Println("world") +}`, + }, + { + name: "remove multiple calls to same function", + content: `package main + +func main() { + doSomething() + fmt.Println("middle") + doSomething() +} +`, + funcName: "main", + callName: "doSomething", + expected: `package main + +func main() { + + fmt.Println("middle") + +}`, + }, + { + name: "remove call with arguments", + content: `package main + +func process() { + validate(arg1, arg2) + execute() +} +`, + funcName: "process", + callName: "validate", + expected: `package main + +func process() { + + execute() +}`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result, err := ModifyFunction(tt.content, tt.funcName, RemoveFuncCall(tt.callName)) + require.NoError(t, err) + require.Equal(t, tt.expected, result) + }) + } +} + +func TestModifyFunctionMissingTargets(t *testing.T) { + t.Run("invalid source content", func(t *testing.T) { + _, err := ModifyFunction("package main\nfunc", "anotherFunction") + require.Error(t, err) + require.Contains(t, err.Error(), "failed to parse file (anotherFunction)") + }) + + t.Run("function not found", func(t *testing.T) { + _, err := ModifyFunction(`package main + +func main() {} +`, "anotherFunction") + require.EqualError(t, err, `function "anotherFunction" not found`) + }) +} + +func TestModifyFunctionReturnStatementNotFound(t *testing.T) { + t.Run("non-empty body without return", func(t *testing.T) { + _, err := ModifyFunction(`package main + +func noReturn() { + doSomething() +} +`, "noReturn", NewFuncReturn("1")) + require.EqualError(t, err, "return statement not found") + }) + + t.Run("empty body without return", func(t *testing.T) { + _, err := ModifyFunction(`package main + +func empty() {} +`, "empty", NewFuncReturn("1")) + require.EqualError(t, err, "return statement not found") + }) +} + +func TestRemoveFuncCallNestedStatements(t *testing.T) { + content := `package main + +func process(values []int, anyValue interface{}) int { + if len(values) > 0 { + doRemove() + } else if len(values) == 0 { + doRemove() + } else { + doRemove() + } + + for i := 0; i < len(values); i++ { + doRemove() + } + + for _, v := range values { + _ = v + doRemove() + } + + switch value := anyValue.(type) { + case int: + _ = value + doRemove() + default: + doKeep() + } + + switch len(values) { + case 1: + doRemove() + default: + doKeep() + } + + return 1 +} +` + got, err := ModifyFunction(content, "process", RemoveFuncCall("doRemove")) + require.NoError(t, err) + require.NotContains(t, got, "doRemove(") + require.Contains(t, got, "doKeep()") + require.Contains(t, got, "return 1") +} diff --git a/ignite/pkg/xast/global.go b/ignite/pkg/xast/global.go new file mode 100644 index 0000000..fde135d --- /dev/null +++ b/ignite/pkg/xast/global.go @@ -0,0 +1,417 @@ +package xast + +import ( + "bytes" + "go/ast" + "go/format" + "go/parser" + "go/token" + + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +type ( + // globalOpts represent the options for globals. + globalOpts struct { + globals []global + } + + // GlobalOptions configures code generation. + GlobalOptions func(*globalOpts) + + global struct { + name, varType, value string + } + + // GlobalType represents the global type. + GlobalType string +) + +const ( + GlobalTypeVar GlobalType = "var" + GlobalTypeConst GlobalType = "const" +) + +// WithGlobal add a new global. +func WithGlobal(name, varType, value string) GlobalOptions { + return func(c *globalOpts) { + c.globals = append(c.globals, global{ + name: name, + varType: varType, + value: value, + }) + } +} + +func newGlobalOptions() globalOpts { + return globalOpts{ + globals: make([]global, 0), + } +} + +// InsertGlobal inserts global variables or constants into the provided Go source code content after the import section. +// The function parses the provided content, locates the import section, and inserts the global declarations immediately after it. +// The type of globals (variables or constants) is specified by the globalType parameter. +// Each global declaration is defined by calling WithGlobal function with appropriate arguments. +// The function returns the modified content with the inserted global declarations. +func InsertGlobal(fileContent string, globalType GlobalType, globals ...GlobalOptions) (modifiedContent string, err error) { + // apply global options. + opts := newGlobalOptions() + for _, o := range globals { + o(&opts) + } + if len(opts.globals) == 0 { + return fileContent, nil + } + + tok, err := globalTypeToken(globalType) + if err != nil { + return "", err + } + + fileSet := token.NewFileSet() + + // Parse the Go source code content. + f, err := parser.ParseFile(fileSet, "", fileContent, parser.ParseComments) + if err != nil { + return "", err + } + cmap := ast.NewCommentMap(fileSet, f, f.Comments) + + // Find the index of the import declaration or package declaration if no imports. + var insertIndex int + for i, decl := range f.Decls { + if genDecl, ok := decl.(*ast.GenDecl); ok && genDecl.Tok == token.IMPORT { + insertIndex = i + 1 + break + } else if funcDecl, ok := decl.(*ast.FuncDecl); ok { + insertIndex = i + if funcDecl.Doc == nil { + insertIndex++ + } + break + } + } + + for _, global := range opts.globals { + spec, err := newGlobalValueSpec(fileSet, global) + if err != nil { + return "", err + } + + f.Decls = append( + f.Decls[:insertIndex], + append([]ast.Decl{ + &ast.GenDecl{ + TokPos: 1, + Tok: tok, + Specs: []ast.Spec{spec}, + }, + }, f.Decls[insertIndex:]...)..., + ) + insertIndex++ + } + + f.Comments = cmap.Filter(f).Comments() + + // Format the modified AST. + var buf bytes.Buffer + if err := format.Node(&buf, fileSet, f); err != nil { + return "", err + } + + // Return the modified content. + return buf.String(), nil +} + +// AppendFunction appends a new function to the end of the Go source code content. +func AppendFunction(fileContent string, function string) (modifiedContent string, err error) { + fileSet := token.NewFileSet() + + // Parse the function body as a separate file. + funcFile, err := parser.ParseFile(fileSet, "", "package main\n"+function, parser.AllErrors) + if err != nil { + return "", err + } + + // Extract the first declaration, assuming it's a function declaration. + var funcDecl *ast.FuncDecl + for _, decl := range funcFile.Decls { + if fDecl, ok := decl.(*ast.FuncDecl); ok { + funcDecl = fDecl + break + } + } + if funcDecl == nil { + return "", errors.Errorf("no function declaration found in the provided function body") + } + + // Parse the Go source code content. + f, err := parser.ParseFile(fileSet, "", fileContent, parser.ParseComments) + if err != nil { + return "", err + } + cmap := ast.NewCommentMap(fileSet, f, f.Comments) + + // Append the function declaration to the file's declarations. + f.Decls = append(f.Decls, funcDecl) + + f.Comments = cmap.Filter(f).Comments() + + // Format the modified AST. + var buf bytes.Buffer + if err := format.Node(&buf, fileSet, f); err != nil { + return "", err + } + + return buf.String(), nil +} + +type ( + // structOpts represent the options for structs. + structOpts struct { + values []structValue + } + + // StructOpts configures struct changes. + StructOpts func(*structOpts) + + structValue struct { + value string + valueType string + } +) + +// AppendStructValue add a new value inside struct. For instances, +// the struct have only one field 'test struct{ test1 string }' and we want to add +// the `test2 int` the result will be 'test struct{ test1 string, test int }'. +func AppendStructValue(value, valueType string) StructOpts { + return func(c *structOpts) { + c.values = append(c.values, structValue{ + value: value, + valueType: valueType, + }) + } +} + +func newStructOptions() structOpts { + return structOpts{ + values: make([]structValue, 0), + } +} + +// ModifyStruct modifies a struct in the provided Go source code. +func ModifyStruct(fileContent, structName string, options ...StructOpts) (string, error) { + // Apply struct options. + opts := newStructOptions() + for _, o := range options { + o(&opts) + } + + fileSet := token.NewFileSet() + + // Parse the Go source code content. + f, err := parser.ParseFile(fileSet, "", fileContent, parser.ParseComments) + if err != nil { + return "", err + } + cmap := ast.NewCommentMap(fileSet, f, f.Comments) + + // Locate and modify the struct declaration. + var ( + found bool + structType *ast.StructType + ) + for _, decl := range f.Decls { + genDecl, ok := decl.(*ast.GenDecl) + if !ok || genDecl.Tok != token.TYPE { + continue + } + + for _, spec := range genDecl.Specs { + typeSpec, ok := spec.(*ast.TypeSpec) + if !ok || typeSpec.Name.Name != structName { + continue + } + + structType, ok = typeSpec.Type.(*ast.StructType) + if !ok { + continue + } + + found = true + break + } + if found { + break + } + } + if !found { + return "", errors.Errorf("struct %q not found in file content", structName) + } + for _, v := range opts.values { + structType.Fields.List = append(structType.Fields.List, &ast.Field{ + Names: []*ast.Ident{ast.NewIdent(v.value)}, + Type: ast.NewIdent(v.valueType), + }) + } + + f.Comments = cmap.Filter(f).Comments() + + // Format the modified AST. + var buf bytes.Buffer + if err := format.Node(&buf, fileSet, f); err != nil { + return "", err + } + + // Return the modified content. + return buf.String(), nil +} + +type ( + // globalArrayOpts represent the options for globar array variables. + globalArrayOpts struct { + values []string + } + + // GlobalArrayOpts configures global array variable changes. + GlobalArrayOpts func(*globalArrayOpts) +) + +// AppendGlobalArrayValue add a new value inside a global array variable. For instances, +// the variable have only one field '[]]' and we want to add +// the `test2 int` the result will be 'test struct{ test1 string, test int }'. +func AppendGlobalArrayValue(value string) GlobalArrayOpts { + return func(c *globalArrayOpts) { + c.values = append(c.values, value) + } +} + +func newGlobalArrayOptions() globalArrayOpts { + return globalArrayOpts{ + values: make([]string, 0), + } +} + +// ModifyGlobalArrayVar modifies an array global array variable in the provided Go source code by appending new values. +func ModifyGlobalArrayVar(fileContent, globalName string, options ...GlobalArrayOpts) (string, error) { + opts := newGlobalArrayOptions() + for _, o := range options { + o(&opts) + } + if len(opts.values) == 0 { + return fileContent, nil + } + + fileSet := token.NewFileSet() + + f, err := parser.ParseFile(fileSet, "", fileContent, parser.ParseComments) + if err != nil { + return "", err + } + cmap := ast.NewCommentMap(fileSet, f, f.Comments) + + var ( + found bool + compLit *ast.CompositeLit + ) + for _, decl := range f.Decls { + genDecl, ok := decl.(*ast.GenDecl) + if !ok || genDecl.Tok != token.VAR { + continue + } + + for _, spec := range genDecl.Specs { + valueSpec, ok := spec.(*ast.ValueSpec) + if !ok || len(valueSpec.Names) == 0 || valueSpec.Names[0].Name != globalName || len(valueSpec.Values) == 0 { + continue + } + + compLit, ok = valueSpec.Values[0].(*ast.CompositeLit) + if !ok { + continue + } + + found = true + break + } + if found { + break + } + } + + if !found { + return "", errors.Errorf("global array %q not found in file content", globalName) + } + appendCompositeLiteralValues(fileSet, compLit, opts.values) + + f.Comments = cmap.Filter(f).Comments() + + var buf bytes.Buffer + if err := format.Node(&buf, fileSet, f); err != nil { + return "", err + } + + return buf.String(), nil +} + +func globalTypeToken(globalType GlobalType) (token.Token, error) { + switch globalType { + case GlobalTypeVar: + return token.VAR, nil + case GlobalTypeConst: + return token.CONST, nil + default: + return token.ILLEGAL, errors.Errorf("unsupported global type: %s", string(globalType)) + } +} + +func newGlobalValueSpec(fileSet *token.FileSet, global global) (*ast.ValueSpec, error) { + spec := &ast.ValueSpec{ + Names: []*ast.Ident{ast.NewIdent(global.name)}, + } + if global.varType != "" { + spec.Type = ast.NewIdent(global.varType) + } + if global.value == "" { + return spec, nil + } + + valueExpr, err := parser.ParseExprFrom(fileSet, "", []byte(global.value), parser.ParseComments) + if err != nil { + return nil, err + } + spec.Values = []ast.Expr{valueExpr} + return spec, nil +} + +func appendCompositeLiteralValues(fileSet *token.FileSet, compLit *ast.CompositeLit, values []string) { + file := fileSet.File(compLit.Pos()) + maxOffset := file.Offset(compLit.Rbrace) + for _, elt := range compLit.Elts { + if pos := elt.End(); pos.IsValid() { + if offset := file.Offset(pos); offset > maxOffset { + maxOffset = offset + } + } + } + + for i, valueName := range values { + insertPos := file.Pos(maxOffset + i) + value := ast.NewIdent(valueName) + value.NamePos = insertPos + + compLit.Elts = append(compLit.Elts, value) + compLit.Rbrace += token.Pos(i + 1) + } + + if len(compLit.Elts) == 0 { + return + } + + last := compLit.Elts[len(compLit.Elts)-1] + if file.Line(compLit.Rbrace) == file.Line(last.End())-1 { + file.AddLine(file.Offset(compLit.Rbrace)) + compLit.Rbrace += token.Pos(1) + } +} diff --git a/ignite/pkg/xast/global_test.go b/ignite/pkg/xast/global_test.go new file mode 100644 index 0000000..440e7c4 --- /dev/null +++ b/ignite/pkg/xast/global_test.go @@ -0,0 +1,752 @@ +package xast + +import ( + "go/ast" + "go/token" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +func TestInsertGlobal(t *testing.T) { + type args struct { + fileContent string + globalType GlobalType + globals []GlobalOptions + } + tests := []struct { + name string + args args + want string + err error + }{ + { + name: "Insert global int var", + args: args{ + fileContent: `package main + +import ( + "fmt" +) + +// This is a comment +`, + globalType: GlobalTypeVar, + globals: []GlobalOptions{ + WithGlobal("myIntVar", "int", "42"), + }, + }, + want: `package main + +import ( + "fmt" +) + +var myIntVar int = 42 + +// This is a comment +`, + }, + { + name: "Insert global int var without type", + args: args{ + fileContent: `package main + +import ( + "fmt" +) + +`, + globalType: GlobalTypeVar, + globals: []GlobalOptions{ + WithGlobal("myIntVar", "", "42"), + }, + }, + want: `package main + +import ( + "fmt" +) + +var myIntVar = 42 +`, + }, + { + name: "Insert global int const", + args: args{ + fileContent: `package main + +import ( + "fmt" +) + +// This is a comment +`, + globalType: GlobalTypeConst, + globals: []GlobalOptions{ + WithGlobal("myIntConst", "int", "42"), + }, + }, + want: `package main + +import ( + "fmt" +) + +const myIntConst int = 42 + +// This is a comment +`, + }, + { + name: "Insert string const", + args: args{ + fileContent: `package main + +import ( + "fmt" +) + +// This is a comment +`, + globalType: GlobalTypeConst, + globals: []GlobalOptions{ + WithGlobal("myStringConst", "string", `"hello"`), + }, + }, + want: `package main + +import ( + "fmt" +) + +const myStringConst string = "hello" + +// This is a comment +`, + }, + { + name: "Insert string const when already exist one", + args: args{ + fileContent: `package main + +import ( + "fmt" +) + +// myIntConst is my const int +const myIntConst int = 42 + +// This is a comment +`, + globalType: GlobalTypeConst, + globals: []GlobalOptions{ + WithGlobal("myStringConst", "string", `"hello"`), + }, + }, + want: `package main + +import ( + "fmt" +) + +const myStringConst string = "hello" + +// myIntConst is my const int +const myIntConst int = 42 + +// This is a comment +`, + }, + { + name: "Insert multiples consts", + args: args{ + fileContent: `package main + +import ( + "fmt" +) + +// This is a comment +`, + globalType: GlobalTypeConst, + globals: []GlobalOptions{ + WithGlobal("myStringConst", "string", `"hello"`), + WithGlobal("myBoolConst", "bool", "true"), + WithGlobal("myUintConst", "uint64", "40"), + }, + }, + want: `package main + +import ( + "fmt" +) + +const myStringConst string = "hello" +const myBoolConst bool = true +const myUintConst uint64 = 40 + +// This is a comment +`, + }, + { + name: "Insert global int var with not imports", + args: args{ + fileContent: `package main + +// This is a comment +`, + globalType: GlobalTypeVar, + globals: []GlobalOptions{ + WithGlobal("myIntVar", "int", "42"), + }, + }, + want: `package main + +var myIntVar int = 42 + +// This is a comment +`, + }, + { + name: "Insert global int var int an empty file", + args: args{ + fileContent: ``, + globalType: GlobalTypeVar, + globals: []GlobalOptions{ + WithGlobal("myIntVar", "int", "42"), + }, + }, + err: errors.New("1:1: expected 'package', found 'EOF'"), + }, + { + name: "Insert a custom var", + args: args{ + fileContent: `package main`, + globalType: GlobalTypeVar, + globals: []GlobalOptions{ + WithGlobal("fooVar", "foo", "42"), + }, + }, + want: `package main + +var fooVar foo = 42 +`, + }, + { + name: "Insert an invalid var", + args: args{ + fileContent: `package main`, + globalType: GlobalTypeVar, + globals: []GlobalOptions{ + WithGlobal("myInvalidVar", "invalid", "AEF#3fa."), + }, + }, + err: errors.New("1:4: illegal character U+0023 '#'"), + }, + { + name: "Insert an invalid type", + args: args{ + fileContent: `package main`, + globalType: "invalid", + globals: []GlobalOptions{ + WithGlobal("fooVar", "foo", "42"), + }, + }, + err: errors.New("unsupported global type: invalid"), + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := InsertGlobal(tt.args.fileContent, tt.args.globalType, tt.args.globals...) + if tt.err != nil { + require.Error(t, err) + require.Equal(t, tt.err.Error(), err.Error()) + return + } + require.NoError(t, err) + require.Equal(t, tt.want, got) + }) + } +} + +func TestAppendFunction(t *testing.T) { + type args struct { + fileContent string + function string + } + tests := []struct { + name string + args args + want string + err error + }{ + { + name: "Append a function after the package declaration", + args: args{ + fileContent: `package main`, + function: `func add(a, b int) int { + return a + b +}`, + }, + want: `package main + +func add(a, b int) int { + return a + b +} +`, + }, + { + name: "Append a function after a var", + args: args{ + fileContent: `package main + +import ( + "fmt" +) + +var myIntVar int = 42 +`, + function: `func add(a, b int) int { + return a + b +}`, + }, + want: `package main + +import ( + "fmt" +) + +var myIntVar int = 42 + +func add(a, b int) int { + return a + b +} +`, + }, + { + name: "Append a function after the import", + args: args{ + fileContent: `package main + +import ( + "fmt" +) +`, + function: `func add(a, b int) int { + return a + b +}`, + }, + want: `package main + +import ( + "fmt" +) + +func add(a, b int) int { + return a + b +} +`, + }, + { + name: "Append a function after another function", + args: args{ + fileContent: `package main + +import ( + "fmt" +) + +var myIntVar int = 42 + +func myFunction() int { + return 42 +} +`, + function: `func add(a, b int) int { + return a + b +}`, + }, + want: `package main + +import ( + "fmt" +) + +var myIntVar int = 42 + +func myFunction() int { + return 42 +} +func add(a, b int) int { + return a + b +} +`, + }, + { + name: "Append a function in an empty file", + args: args{ + fileContent: ``, + function: `func add(a, b int) int { + return a + b +}`, + }, + err: errors.New("1:1: expected 'package', found 'EOF'"), + }, + { + name: "Append a empty function", + args: args{ + fileContent: `package main`, + function: ``, + }, + err: errors.New("no function declaration found in the provided function body"), + }, + { + name: "Append an invalid function", + args: args{ + fileContent: `package main`, + function: `@,.l.e,`, + }, + err: errors.New("2:1: illegal character U+0040 '@'"), + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := AppendFunction(tt.args.fileContent, tt.args.function) + if tt.err != nil { + require.Error(t, err) + require.Equal(t, tt.err.Error(), err.Error()) + return + } + require.NoError(t, err) + require.Equal(t, tt.want, got) + }) + } +} + +func TestModifyStruct(t *testing.T) { + type args struct { + fileContent string + structName string + options []StructOpts + } + tests := []struct { + name string + args args + want string + err error + }{ + { + name: "Add field to existing struct", + args: args{ + fileContent: `package main + +type MyStruct struct { + ExistingField int +} +`, + structName: "MyStruct", + options: []StructOpts{AppendStructValue("NewField", "string")}, + }, + want: `package main + +type MyStruct struct { + ExistingField int + NewField string +} +`, + }, + { + name: "Add field to empty struct", + args: args{ + fileContent: `package main + +type EmptyStruct struct { +} +`, + structName: "EmptyStruct", + options: []StructOpts{AppendStructValue("NewField", "string")}, + }, + want: `package main + +type EmptyStruct struct { + NewField string +} +`, + }, + { + name: "Struct not found", + args: args{ + fileContent: `package main + +type AnotherStruct struct { + ExistingField int +} +`, + structName: "NonExistentStruct", + options: []StructOpts{AppendStructValue("NewField", "string")}, + }, + err: errors.New(`struct "NonExistentStruct" not found in file content`), + }, + { + name: "Invalid Go code", + args: args{ + fileContent: `package main + +type MyStruct`, + structName: "MyStruct", + options: []StructOpts{AppendStructValue("NewField", "string")}, + }, + err: errors.New("3:14: expected type, found newline"), + }, + { + name: "Add field after multiple existing fields", + args: args{ + fileContent: `package main + +type MyStruct struct { + Field1 int + Field2 string +} +`, + structName: "MyStruct", + options: []StructOpts{AppendStructValue("Field3", "bool")}, + }, + want: `package main + +type MyStruct struct { + Field1 int + Field2 string + Field3 bool +} +`, + }, + { + name: "Empty file input", + args: args{ + fileContent: ``, + structName: "MyStruct", + options: []StructOpts{AppendStructValue("NewField", "string")}, + }, + err: errors.New("1:1: expected 'package', found 'EOF'"), + }, + { + name: "Add field with pointer type", + args: args{ + fileContent: `package main + +type MyStruct struct { + ExistingField int +} +`, + structName: "MyStruct", + options: []StructOpts{AppendStructValue("PointerField", "*int")}, + }, + want: `package main + +type MyStruct struct { + ExistingField int + PointerField *int +} +`, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := ModifyStruct(tt.args.fileContent, tt.args.structName, tt.args.options...) + if tt.err != nil { + require.Error(t, err) + require.Equal(t, tt.err.Error(), err.Error()) + return + } + require.NoError(t, err) + require.Equal(t, tt.want, got) + }) + } +} + +func TestModifyGlobalArrayVar(t *testing.T) { + type args struct { + fileContent string + globalName string + options []GlobalArrayOpts + } + tests := []struct { + name string + args args + want string + err error + }{ + { + name: "Add field to custom variable array", + args: args{ + fileContent: `package app +var ( + moduleAccPerms = []*authmodulev1.ModuleAccountPermission{ + {Account: nft.ModuleName}, + {Account: ibctransfertypes.ModuleName, Permissions: []string{authtypes.Minter, authtypes.Burner}}, + } +) +`, + globalName: "moduleAccPerms", + options: []GlobalArrayOpts{AppendGlobalArrayValue("{Account: icatypes.ModuleName}")}, + }, + want: `package app + +var ( + moduleAccPerms = []*authmodulev1.ModuleAccountPermission{ + {Account: nft.ModuleName}, + {Account: ibctransfertypes.ModuleName, Permissions: []string{authtypes.Minter, authtypes.Burner}}, + {Account: icatypes.ModuleName}, + } +) +`, + }, + { + name: "Add field to string variable array", + args: args{ + fileContent: `package app + +var ( + blockAccAddrs = []string{ + authtypes.FeeCollectorName, + distrtypes.ModuleName, + minttypes.ModuleName, + stakingtypes.BondedPoolName, + stakingtypes.NotBondedPoolName, + } +) +`, + globalName: "blockAccAddrs", + options: []GlobalArrayOpts{AppendGlobalArrayValue("nft.ModuleName")}, + }, + want: `package app + +var ( + blockAccAddrs = []string{ + authtypes.FeeCollectorName, + distrtypes.ModuleName, + minttypes.ModuleName, + stakingtypes.BondedPoolName, + stakingtypes.NotBondedPoolName, + nft.ModuleName, + } +) +`, + }, + { + name: "name not found", + args: args{ + fileContent: `package app + +var ( + blockAccAddrs = []string{ + authtypes.FeeCollectorName, + distrtypes.ModuleName, + minttypes.ModuleName, + stakingtypes.BondedPoolName, + stakingtypes.NotBondedPoolName, + } +) +`, + globalName: "notFound", + options: []GlobalArrayOpts{AppendGlobalArrayValue("nft.ModuleName")}, + }, + err: errors.New("global array \"notFound\" not found in file content"), + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := ModifyGlobalArrayVar(tt.args.fileContent, tt.args.globalName, tt.args.options...) + if tt.err != nil { + require.Error(t, err) + require.Equal(t, tt.err.Error(), err.Error()) + return + } + require.NoError(t, err) + require.Equal(t, tt.want, got) + }) + } +} + +func TestInsertGlobalNoOptions(t *testing.T) { + content := "not valid go source" + + got, err := InsertGlobal(content, GlobalTypeVar) + require.NoError(t, err) + require.Equal(t, content, got) +} + +func TestModifyGlobalArrayVarNoOptions(t *testing.T) { + content := "not valid go source" + + got, err := ModifyGlobalArrayVar(content, "moduleAccPerms") + require.NoError(t, err) + require.Equal(t, content, got) +} + +func TestModifyGlobalArrayVarWithNonArrayValue(t *testing.T) { + content := `package app + +var moduleAccPerms = 1 +` + + _, err := ModifyGlobalArrayVar(content, "moduleAccPerms", AppendGlobalArrayValue("newValue")) + require.EqualError(t, err, `global array "moduleAccPerms" not found in file content`) +} + +func TestModifyStructWithTypeAlias(t *testing.T) { + content := `package main + +type MyStruct = string +` + + _, err := ModifyStruct(content, "MyStruct", AppendStructValue("NewField", "string")) + require.EqualError(t, err, `struct "MyStruct" not found in file content`) +} + +func TestGlobalTypeToken(t *testing.T) { + tok, err := globalTypeToken(GlobalTypeVar) + require.NoError(t, err) + require.Equal(t, token.VAR, tok) + + tok, err = globalTypeToken(GlobalTypeConst) + require.NoError(t, err) + require.Equal(t, token.CONST, tok) + + tok, err = globalTypeToken("invalid") + require.Error(t, err) + require.Equal(t, token.ILLEGAL, tok) + require.Equal(t, "unsupported global type: invalid", err.Error()) +} + +func TestNewGlobalValueSpec(t *testing.T) { + fileSet := token.NewFileSet() + + spec, err := newGlobalValueSpec(fileSet, global{ + name: "myVar", + varType: "int", + value: "", + }) + require.NoError(t, err) + require.Len(t, spec.Names, 1) + require.Equal(t, "myVar", spec.Names[0].Name) + require.NotNil(t, spec.Type) + require.Equal(t, "int", spec.Type.(*ast.Ident).Name) + require.Len(t, spec.Values, 0) + + spec, err = newGlobalValueSpec(fileSet, global{ + name: "myExprVar", + value: "1 + 2", + }) + require.NoError(t, err) + require.Len(t, spec.Values, 1) + + _, err = newGlobalValueSpec(fileSet, global{ + name: "badVar", + value: "1 + #", + }) + require.Error(t, err) +} diff --git a/ignite/pkg/xast/import.go b/ignite/pkg/xast/import.go new file mode 100644 index 0000000..79871cf --- /dev/null +++ b/ignite/pkg/xast/import.go @@ -0,0 +1,172 @@ +package xast + +import ( + "bytes" + "go/ast" + "go/format" + "go/parser" + "go/token" + "strconv" + + "golang.org/x/tools/go/ast/astutil" + + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +type ( + // importOpts represent the options for imp. + importOpts struct { + imports []imp + } + + // ImportOptions configures code generation. + ImportOptions func(*importOpts) + + imp struct { + path string + name string + } +) + +// WithImport add a new import at the end of the imports. +func WithImport(repo string) ImportOptions { + return func(c *importOpts) { + c.imports = append(c.imports, imp{ + path: repo, + name: "", + }) + } +} + +// WithNamedImport add a new import with name at the end of the imports. +func WithNamedImport(name, repo string) ImportOptions { + return func(c *importOpts) { + c.imports = append(c.imports, imp{ + name: name, + path: repo, + }) + } +} + +func newImportOptions() importOpts { + return importOpts{ + imports: make([]imp, 0), + } +} + +// AppendImports appends import statements to the existing import block in Go source code content. +func AppendImports(fileContent string, imports ...ImportOptions) (string, error) { + // apply global options. + opts := newImportOptions() + for _, o := range imports { + o(&opts) + } + if len(opts.imports) == 0 { + return fileContent, nil + } + + fileSet := token.NewFileSet() + + // Parse the Go source code content. + f, err := parser.ParseFile(fileSet, "", fileContent, parser.ParseComments) + if err != nil { + return "", err + } + cmap := ast.NewCommentMap(fileSet, f, f.Comments) + + // Add new import statements. + for _, importPath := range opts.imports { + deleteImportsByPath(fileSet, f, importPath.path) + + if !astutil.AddNamedImport(fileSet, f, importPath.name, importPath.path) { + if hasImport(f, importPath.name, importPath.path) { + continue + } + return "", errors.Errorf("failed to add import %s - %s", importPath.name, importPath.path) + } + } + ast.SortImports(fileSet, f) + + f.Comments = cmap.Filter(f).Comments() + + // Format the modified AST. + var buf bytes.Buffer + if err := format.Node(&buf, fileSet, f); err != nil { + return "", err + } + + return buf.String(), nil +} + +// RemoveImports removes import statements from the existing import block in Go source code content. +func RemoveImports(fileContent string, imports ...ImportOptions) (string, error) { + // apply global options. + opts := newImportOptions() + for _, o := range imports { + o(&opts) + } + if len(opts.imports) == 0 { + return fileContent, nil + } + + fileSet := token.NewFileSet() + + // Parse the Go source code content. + f, err := parser.ParseFile(fileSet, "", fileContent, parser.ParseComments) + if err != nil { + return "", err + } + cmap := ast.NewCommentMap(fileSet, f, f.Comments) + + // Remove import statements. + for _, importPath := range opts.imports { + astutil.DeleteNamedImport(fileSet, f, importPath.name, importPath.path) + } + + f.Comments = cmap.Filter(f).Comments() + + // Format the modified AST. + var buf bytes.Buffer + if err := format.Node(&buf, fileSet, f); err != nil { + return "", err + } + + return buf.String(), nil +} + +func deleteImportsByPath(fileSet *token.FileSet, file *ast.File, path string) { + names := make([]string, 0, len(file.Imports)) + for _, spec := range file.Imports { + if importPath(spec) == path { + names = append(names, importName(spec)) + } + } + + for _, name := range names { + astutil.DeleteNamedImport(fileSet, file, name, path) + } +} + +func hasImport(file *ast.File, name, path string) bool { + for _, spec := range file.Imports { + if importName(spec) == name && importPath(spec) == path { + return true + } + } + return false +} + +func importName(spec *ast.ImportSpec) string { + if spec.Name == nil { + return "" + } + return spec.Name.Name +} + +func importPath(spec *ast.ImportSpec) string { + value, err := strconv.Unquote(spec.Path.Value) + if err != nil { + return "" + } + return value +} diff --git a/ignite/pkg/xast/import_test.go b/ignite/pkg/xast/import_test.go new file mode 100644 index 0000000..9a725be --- /dev/null +++ b/ignite/pkg/xast/import_test.go @@ -0,0 +1,551 @@ +package xast + +import ( + "go/ast" + "go/parser" + "go/token" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +func TestAppendImports(t *testing.T) { + existingContent := `package main + +import ( + "fmt" +) + +func main() { + // print hello world + fmt.Println("Hello, world!") +}` + + type args struct { + fileContent string + imports []ImportOptions + } + tests := []struct { + name string + args args + want string + err error + }{ + { + name: "add single import statement", + args: args{ + fileContent: existingContent, + imports: []ImportOptions{ + WithImport("strings"), + }, + }, + want: `package main + +import ( + "fmt" + "strings" +) + +func main() { + // print hello world + fmt.Println("Hello, world!") +} +`, + }, + { + name: "add multiple import statements", + args: args{ + fileContent: existingContent, + imports: []ImportOptions{ + WithNamedImport("st", "strings"), + WithImport("strconv"), + WithImport("os"), + }, + }, + want: `package main + +import ( + "fmt" + "os" + "strconv" + st "strings" +) + +func main() { + // print hello world + fmt.Println("Hello, world!") +} +`, + }, + { + name: "add multiple import statements with an existing one", + args: args{ + fileContent: existingContent, + imports: []ImportOptions{ + WithNamedImport("st", "strings"), + WithImport("strconv"), + WithImport("os"), + }, + }, + want: `package main + +import ( + "fmt" + "os" + "strconv" + st "strings" +) + +func main() { + // print hello world + fmt.Println("Hello, world!") +} +`, + }, + { + name: "add import to specific index", + args: args{ + fileContent: `package main + +import ( + "fmt" + "os" + st "strings" +)`, + imports: []ImportOptions{ + WithImport("strconv"), + }, + }, + want: `package main + +import ( + "fmt" + "os" + "strconv" + st "strings" +) +`, + }, + { + name: "add multiple imports to specific index", + args: args{ + fileContent: `package main + +import ( + "fmt" + "os" + st "strings" +)`, + imports: []ImportOptions{ + WithImport("strconv"), + WithNamedImport("", "testing"), + WithImport("bytes"), + }, + }, + want: `package main + +import ( + "bytes" + "fmt" + "os" + "strconv" + st "strings" + "testing" +) +`, + }, + { + name: "add duplicate import statement", + args: args{ + fileContent: existingContent, + imports: []ImportOptions{ + WithImport("fmt"), + }, + }, + want: `package main + +import "fmt" + +func main() { + // print hello world + fmt.Println("Hello, world!") +} +`, + }, + { + name: "add duplicate named import statement", + args: args{ + fileContent: `package main + +import sdk "github.com/cosmos/cosmos-sdk/types" + +func main() {} +`, + imports: []ImportOptions{ + WithNamedImport("sdk", "github.com/cosmos/cosmos-sdk/types"), + }, + }, + want: `package main + +import sdk "github.com/cosmos/cosmos-sdk/types" + +func main() {} +`, + }, + { + name: "replace import alias", + args: args{ + fileContent: `package main + +import types "github.com/cosmos/cosmos-sdk/types" + +func main() { + _ = types.AccAddress{} +} +`, + imports: []ImportOptions{ + WithNamedImport("sdk", "github.com/cosmos/cosmos-sdk/types"), + }, + }, + want: `package main + +import sdk "github.com/cosmos/cosmos-sdk/types" + +func main() { + _ = types.AccAddress{} +} +`, + }, + { + name: "no import statement", + args: args{ + fileContent: `package main + +func main() { + // print hello world + fmt.Println("Hello, world!") +}`, + imports: []ImportOptions{ + WithImport("fmt"), + }, + }, + want: `package main + +import "fmt" + +func main() { + // print hello world + fmt.Println("Hello, world!") +} +`, + }, + { + name: "no import statement and add two imports", + args: args{ + fileContent: `package main + +func main() { + // print hello world + fmt.Println("Hello, world!") +}`, + imports: []ImportOptions{ + WithImport("fmt"), + WithImport("os"), + }, + }, + want: `package main + +import ( + "fmt" + "os" +) + +func main() { + // print hello world + fmt.Println("Hello, world!") +} +`, + }, + { + name: "add invalid import name", + args: args{ + fileContent: existingContent, + imports: []ImportOptions{ + WithNamedImport("fmt\"", "fmt\""), + }, + }, + err: errors.New("format.Node internal error (5:8: expected ';', found fmt (and 2 more errors))"), + }, + { + name: "add empty file content", + args: args{ + fileContent: "", + imports: []ImportOptions{ + WithImport("fmt"), + }, + }, + err: errors.New("1:1: expected 'package', found 'EOF'"), + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := AppendImports(tt.args.fileContent, tt.args.imports...) + if tt.err != nil { + require.Error(t, err) + require.Equal(t, tt.err.Error(), err.Error()) + return + } + require.NoError(t, err) + require.Equal(t, tt.want, got) + }) + } +} + +func TestRemoveImports(t *testing.T) { + type args struct { + fileContent string + imports []ImportOptions + } + tests := []struct { + name string + args args + want string + err error + }{ + { + name: "remove single import statement", + args: args{ + fileContent: `package main + +import ( + "fmt" + "strings" +) + +func main() { + fmt.Println("Hello, world!") +}`, + imports: []ImportOptions{ + WithImport("strings"), + }, + }, + want: `package main + +import ( + "fmt" +) + +func main() { + fmt.Println("Hello, world!") +} +`, + }, + { + name: "remove multiple import statements", + args: args{ + fileContent: `package main + +import ( + "fmt" + "os" + "strconv" + st "strings" +) + +func main() { + fmt.Println("Hello, world!") +}`, + imports: []ImportOptions{ + WithNamedImport("st", "strings"), + WithImport("strconv"), + WithImport("os"), + }, + }, + want: `package main + +import ( + "fmt" +) + +func main() { + fmt.Println("Hello, world!") +} +`, + }, + { + name: "remove all imports", + args: args{ + fileContent: `package main + +import ( + "fmt" + "strings" +) + +func main() { + fmt.Println("Hello, world!") +}`, + imports: []ImportOptions{ + WithImport("fmt"), + WithImport("strings"), + }, + }, + want: `package main + +func main() { + fmt.Println("Hello, world!") +} +`, + }, + { + name: "remove non-existent import", + args: args{ + fileContent: `package main + +import "fmt" + +func main() { + fmt.Println("Hello, world!") +}`, + imports: []ImportOptions{ + WithImport("strings"), + }, + }, + want: `package main + +import "fmt" + +func main() { + fmt.Println("Hello, world!") +} +`, + }, + { + name: "remove named import", + args: args{ + fileContent: `package main + +import ( + "fmt" + st "strings" +) + +func main() { + fmt.Println("Hello, world!") +}`, + imports: []ImportOptions{ + WithNamedImport("st", "strings"), + }, + }, + want: `package main + +import ( + "fmt" +) + +func main() { + fmt.Println("Hello, world!") +} +`, + }, + { + name: "remove import from file with no imports", + args: args{ + fileContent: `package main + +func main() { + fmt.Println("Hello, world!") +}`, + imports: []ImportOptions{ + WithImport("fmt"), + }, + }, + want: `package main + +func main() { + fmt.Println("Hello, world!") +} +`, + }, + { + name: "remove empty file content", + args: args{ + fileContent: "", + imports: []ImportOptions{ + WithImport("fmt"), + }, + }, + err: errors.New("1:1: expected 'package', found 'EOF'"), + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := RemoveImports(tt.args.fileContent, tt.args.imports...) + if tt.err != nil { + require.Error(t, err) + require.Equal(t, tt.err.Error(), err.Error()) + return + } + require.NoError(t, err) + require.Equal(t, tt.want, got) + }) + } +} + +func TestAppendImportsNoOptions(t *testing.T) { + content := `package main + +func main() {} +` + + got, err := AppendImports(content) + require.NoError(t, err) + require.Equal(t, content, got) +} + +func TestRemoveImportsNoOptions(t *testing.T) { + content := `package main + +import "fmt" + +func main() {} +` + + got, err := RemoveImports(content) + require.NoError(t, err) + require.Equal(t, content, got) +} + +func TestImportHelpers(t *testing.T) { + fileSet := token.NewFileSet() + content := `package main + +import ( + "fmt" + types "github.com/cosmos/cosmos-sdk/types" + sdk "github.com/cosmos/cosmos-sdk/types" +) + +func main() {} +` + file, err := parser.ParseFile(fileSet, "", content, parser.ParseComments) + require.NoError(t, err) + + deleteImportsByPath(fileSet, file, "github.com/cosmos/cosmos-sdk/types") + require.Len(t, file.Imports, 1) + require.Equal(t, "fmt", importPath(file.Imports[0])) + require.Equal(t, "", importName(file.Imports[0])) + + require.True(t, hasImport(file, "", "fmt")) + require.False(t, hasImport(file, "sdk", "fmt")) + require.False(t, hasImport(file, "", "strings")) + + invalidSpec := &ast.ImportSpec{ + Name: ast.NewIdent("broken"), + Path: &ast.BasicLit{Value: "\"unterminated"}, + } + require.Equal(t, "broken", importName(invalidSpec)) + require.Equal(t, "", importPath(invalidSpec)) +} diff --git a/ignite/pkg/xast/testdata/inspect/test.go b/ignite/pkg/xast/testdata/inspect/test.go new file mode 100644 index 0000000..80ad747 --- /dev/null +++ b/ignite/pkg/xast/testdata/inspect/test.go @@ -0,0 +1,5 @@ +package test + +type T struct { + A int +} diff --git a/ignite/pkg/xast/testdata/parseDir/file.go b/ignite/pkg/xast/testdata/parseDir/file.go new file mode 100644 index 0000000..945797e --- /dev/null +++ b/ignite/pkg/xast/testdata/parseDir/file.go @@ -0,0 +1 @@ +package file diff --git a/ignite/pkg/xast/testdata/parseDir/file_test.go b/ignite/pkg/xast/testdata/parseDir/file_test.go new file mode 100644 index 0000000..bb1c5e7 --- /dev/null +++ b/ignite/pkg/xast/testdata/parseDir/file_test.go @@ -0,0 +1 @@ +package file_test diff --git a/ignite/pkg/xast/xast.go b/ignite/pkg/xast/xast.go new file mode 100644 index 0000000..14bac07 --- /dev/null +++ b/ignite/pkg/xast/xast.go @@ -0,0 +1,55 @@ +package xast + +import ( + "go/ast" + "go/parser" + "go/token" + "strings" + + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +var ErrStop = errors.New("ast stop") + +// Inspect is like ast.Inspect but with error handling. +// Unlike ast.Inspect the function parameter f returns an error and not a bool. +// The returned error is propagated to the caller, unless it is equal to +// ErrStop, which in that case indicates the child nodes shouldn't not be +// inspected (like returning false in the function of ast.Inspect). +func Inspect(n ast.Node, f func(n ast.Node) error) (err error) { + ast.Inspect(n, func(n ast.Node) bool { + err = errors.Join(err, f(n)) + if err == nil { + return true + } + if errors.Is(err, ErrStop) { + err = nil + } + return false + }) + return +} + +// ParseDir invokes ast.ParseDir and returns the first package found that is +// doesn't has the "_test" suffix. +func ParseDir(dir string) (*ast.Package, *token.FileSet, error) { //nolint:staticcheck,nolintlint + fileSet := token.NewFileSet() + pkgs, err := parser.ParseDir(fileSet, dir, nil, 0) + if err != nil { + return nil, nil, err + } + for name, pkg := range pkgs { + if strings.HasSuffix(name, "_test") { + continue + } + return pkg, fileSet, nil + } + return nil, nil, errors.Errorf("no valid package found in %s", dir) +} + +// ParseFile invokes ast.ParseFile and returns the *ast.File. +func ParseFile(filepath string) (*ast.File, *token.FileSet, error) { + fileSet := token.NewFileSet() + file, err := parser.ParseFile(fileSet, filepath, nil, 0) + return file, fileSet, err +} diff --git a/ignite/pkg/xast/xast_test.go b/ignite/pkg/xast/xast_test.go new file mode 100644 index 0000000..2013775 --- /dev/null +++ b/ignite/pkg/xast/xast_test.go @@ -0,0 +1,105 @@ +package xast_test + +import ( + "go/ast" + "go/parser" + "go/token" + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/xast" +) + +func TestInspect(t *testing.T) { + fset := token.NewFileSet() + n, err := parser.ParseFile(fset, "testdata/inspect/test.go", nil, 0) + require.NoError(t, err) + var calls int + tests := []struct { + name string + f func(n ast.Node) error + expectedError string + expectedCalls int + }{ + { + name: "random error", + f: func(ast.Node) error { + return errors.New("oups") + }, + expectedError: "oups", + }, + { + name: "stop error", + f: func(ast.Node) error { + calls++ + return xast.ErrStop + }, + expectedCalls: 1, + }, + { + name: "no error", + f: func(ast.Node) error { + calls++ + return nil + }, + expectedCalls: 20, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + calls = 0 + err = xast.Inspect(n, tt.f) + + if tt.expectedError != "" { + require.EqualError(t, err, tt.expectedError) + return + } + require.NoError(t, err) + require.Equal(t, tt.expectedCalls, calls) + }) + } +} + +func TestParseDir(t *testing.T) { + pkg, fileSet, err := xast.ParseDir("testdata/parseDir") + + require.NoError(t, err) + require.NotNil(t, fileSet) + require.Equal(t, "file", pkg.Name) +} + +func TestParseFile(t *testing.T) { + dir := t.TempDir() + + t.Run("parse valid file", func(t *testing.T) { + filePath := filepath.Join(dir, "valid.go") + content := `package sample + +func hello() {} +` + require.NoError(t, os.WriteFile(filePath, []byte(content), 0o644)) + + file, fileSet, err := xast.ParseFile(filePath) + require.NoError(t, err) + require.NotNil(t, fileSet) + require.Equal(t, "sample", file.Name.Name) + require.Len(t, file.Decls, 1) + }) + + t.Run("invalid file path", func(t *testing.T) { + _, _, err := xast.ParseFile(filepath.Join(dir, "missing.go")) + require.Error(t, err) + }) + + t.Run("invalid go file", func(t *testing.T) { + filePath := filepath.Join(dir, "invalid.go") + require.NoError(t, os.WriteFile(filePath, []byte("package sample\nfunc"), 0o644)) + + _, _, err := xast.ParseFile(filePath) + require.Error(t, err) + }) +} diff --git a/ignite/pkg/xembed/testdata/subtestdata/subfile.txt b/ignite/pkg/xembed/testdata/subtestdata/subfile.txt new file mode 100644 index 0000000..473a0f4 diff --git a/ignite/pkg/xembed/testdata/subtestdata/subtestdata/subfile2.txt b/ignite/pkg/xembed/testdata/subtestdata/subtestdata/subfile2.txt new file mode 100644 index 0000000..473a0f4 diff --git a/ignite/pkg/xembed/testdata/test.txt b/ignite/pkg/xembed/testdata/test.txt new file mode 100644 index 0000000..473a0f4 diff --git a/ignite/pkg/xembed/xembed.go b/ignite/pkg/xembed/xembed.go new file mode 100644 index 0000000..e96803c --- /dev/null +++ b/ignite/pkg/xembed/xembed.go @@ -0,0 +1,39 @@ +package xembed + +import ( + "embed" + "io/fs" + "path/filepath" +) + +// FileList list all files into an embed.FS in a provider path. +func FileList(efs embed.FS, path string) ([]string, error) { + return fileList(efs, path, path) +} + +func fileList(efs embed.FS, path, currentDir string) ([]string, error) { + dir, err := fs.ReadDir(efs, currentDir) + if err != nil { + return nil, err + } + + files := make([]string, 0) + for _, f := range dir { + if !f.IsDir() { + relPath, err := filepath.Rel(path, filepath.Join(currentDir, f.Name())) + if err != nil { + return nil, err + } + files = append(files, relPath) + continue + } + + newDir := filepath.Join(currentDir, f.Name()) + dirFiles, err := fileList(efs, path, newDir) + if err != nil { + return nil, err + } + files = append(files, dirFiles...) + } + return files, nil +} diff --git a/ignite/pkg/xembed/xembed_test.go b/ignite/pkg/xembed/xembed_test.go new file mode 100644 index 0000000..4d8720d --- /dev/null +++ b/ignite/pkg/xembed/xembed_test.go @@ -0,0 +1,78 @@ +package xembed + +import ( + "embed" + "testing" + + "github.com/stretchr/testify/require" +) + +//go:embed testdata/* +var fsProtoTest embed.FS + +func TestFileList(t *testing.T) { + type args struct { + efs embed.FS + path string + } + tests := []struct { + name string + args args + want []string + err error + }{ + { + name: "root folder", + args: args{ + efs: fsProtoTest, + path: ".", + }, + want: []string{ + "testdata/subtestdata/subfile.txt", + "testdata/subtestdata/subtestdata/subfile2.txt", + "testdata/test.txt", + }, + }, + { + name: "testdata folder", + args: args{ + efs: fsProtoTest, + path: "testdata", + }, + want: []string{ + "subtestdata/subfile.txt", + "subtestdata/subtestdata/subfile2.txt", + "test.txt", + }, + }, + { + name: "sub testdata folder", + args: args{ + efs: fsProtoTest, + path: "testdata/subtestdata", + }, + want: []string{ + "subfile.txt", + "subtestdata/subfile2.txt", + }, + }, + { + name: "sub sub testdata folder", //nolint:dupword + args: args{ + efs: fsProtoTest, + path: "testdata/subtestdata/subtestdata", + }, + want: []string{"subfile2.txt"}, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := FileList(tt.args.efs, tt.args.path) + if tt.err != nil { + require.Error(t, err) + } + require.NoError(t, err) + require.EqualValues(t, tt.want, got) + }) + } +} diff --git a/ignite/pkg/xexec/testdata/bin.sh b/ignite/pkg/xexec/testdata/bin.sh new file mode 100755 index 0000000..cb9f3fa --- /dev/null +++ b/ignite/pkg/xexec/testdata/bin.sh @@ -0,0 +1 @@ +#!/bin/bash diff --git a/ignite/pkg/xexec/testdata/nobin b/ignite/pkg/xexec/testdata/nobin new file mode 100644 index 0000000..473a0f4 diff --git a/ignite/pkg/xexec/xexec.go b/ignite/pkg/xexec/xexec.go new file mode 100644 index 0000000..89a5a73 --- /dev/null +++ b/ignite/pkg/xexec/xexec.go @@ -0,0 +1,68 @@ +package xexec + +import ( + "os" + "os/exec" + "path/filepath" + + "github.com/ignite/cli/v29/ignite/pkg/goenv" +) + +// IsExec checks if a file is executable by anyone. +func IsExec(binaryPath string) (bool, error) { + info, err := os.Stat(binaryPath) + if err != nil { + return false, err + } + + if m := info.Mode(); !m.IsDir() && m&0o111 != 0 { + return true, nil + } + + return false, nil +} + +// ResolveAbsPath searches for an executable file in the current +// working directory, the directories defined by the PATH environment +// variable and in the Go binary path. Once found returns the absolute +// path to the file. +func ResolveAbsPath(filePath string) (path string, err error) { + // Check if file exists and it's an executable file + if path, err = filepath.Abs(filePath); err == nil { + if ok, _ := IsExec(path); ok { + return path, nil + } + } + + // Search file in the directories defined by the PATH env variable + path, err = exec.LookPath(filePath) + if err == nil { + return path, nil + } + + // When PATH search fails check if file is located in the Go binary path + path = filepath.Join(goenv.Bin(), filePath) + if ok, _ := IsExec(path); ok { + return path, nil + } + + return path, err +} + +// TryResolveAbsPath searches for an executable file in the current +// working directory, the directories defined by the PATH environment +// variable and in the Go binary path. Once found returns the absolute +// path to the file, or otherwise it returns the file path unmodified. +func TryResolveAbsPath(filePath string) string { + if path, err := ResolveAbsPath(filePath); err == nil { + return path + } + + return filePath +} + +// IsCommandAvailable checks if command is available on user's path. +func IsCommandAvailable(name string) bool { + _, err := ResolveAbsPath(name) + return err == nil +} diff --git a/ignite/pkg/xexec/xexec_test.go b/ignite/pkg/xexec/xexec_test.go new file mode 100644 index 0000000..b11d0ba --- /dev/null +++ b/ignite/pkg/xexec/xexec_test.go @@ -0,0 +1,152 @@ +package xexec_test + +import ( + "path/filepath" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/xexec" +) + +func TestIsExec(t *testing.T) { + cases := []struct { + name, path string + want bool + }{ + { + name: "executable", + path: "testdata/bin.sh", + want: true, + }, + { + name: "not_executable", + path: "testdata/nobin", + want: false, + }, + } + for _, tt := range cases { + t.Run(tt.name, func(t *testing.T) { + // Act + ok, err := xexec.IsExec(tt.path) + + // Assert + require.NoError(t, err) + require.Equal(t, tt.want, ok) + }) + } +} + +func TestResolveAbsPath(t *testing.T) { + // Get the absolute path to the testdata directory + testdata, err := filepath.Abs("testdata") + require.NoError(t, err) + + cases := []struct { + name, path, want string + env []string + }{ + { + name: "relative", + path: "testdata/bin.sh", + want: filepath.Join(testdata, "bin.sh"), + }, + { + name: "path", + path: "bin.sh", + env: []string{"PATH", testdata}, + want: filepath.Join(testdata, "bin.sh"), + }, + { + name: "go bin path", + path: "bin.sh", + env: []string{"GOBIN", testdata}, + want: filepath.Join(testdata, "bin.sh"), + }, + } + for _, tt := range cases { + t.Run(tt.name, func(t *testing.T) { + // Arrange + if tt.env != nil { + t.Setenv(tt.env[0], tt.env[1]) + } + + // Act + path, err := xexec.ResolveAbsPath(tt.path) + + // Assert + require.NoError(t, err) + require.Equal(t, tt.want, path) + }) + } +} + +func TestResolveAbsPathError(t *testing.T) { + // Arrange + fileName := "invalid-file.ko" + + // Act + _, err := xexec.ResolveAbsPath(fileName) + + // Assert + require.Errorf(t, err, `exec: "%s": executable file not found in $PATH`, fileName) +} + +func TestTryResolveAbsPath(t *testing.T) { + // Get the absolute path to the testdata directory + testdata, err := filepath.Abs("testdata") + require.NoError(t, err) + + cases := []struct { + name, path, want string + env []string + }{ + { + name: "valid file", + path: "testdata/bin.sh", + want: filepath.Join(testdata, "bin.sh"), + }, + { + name: "invalid file", + path: "invalid-file.ko", + want: "invalid-file.ko", + }, + } + for _, tt := range cases { + t.Run(tt.name, func(t *testing.T) { + // Act + path := xexec.TryResolveAbsPath(tt.path) + + // Assert + require.NoError(t, err) + require.Equal(t, tt.want, path) + }) + } +} + +func TestIsCommandAvailable(t *testing.T) { + cases := []struct { + name, path string + want bool + }{ + { + name: "available", + path: "testdata/bin.sh", + want: true, + }, + { + name: "not_available", + path: "invalid-file.ko", + want: false, + }, + } + for _, tt := range cases { + t.Run(tt.name, func(t *testing.T) { + // Act + ok := xexec.IsCommandAvailable(tt.path) + + // Assert + require.Equal(t, tt.want, ok) + }) + } +} diff --git a/ignite/pkg/xfilepath/testdata/testfile b/ignite/pkg/xfilepath/testdata/testfile new file mode 100644 index 0000000..473a0f4 diff --git a/ignite/pkg/xfilepath/xfilepath.go b/ignite/pkg/xfilepath/xfilepath.go new file mode 100644 index 0000000..0ba0b4e --- /dev/null +++ b/ignite/pkg/xfilepath/xfilepath.go @@ -0,0 +1,118 @@ +// Package xfilepath defines functions to define path retrievers that support error handling +package xfilepath + +import ( + "os" + "path/filepath" +) + +// PathRetriever is a function that retrieves the contained path or an error. +type PathRetriever func() (path string, err error) + +// PathsRetriever is a function that retrieves the contained list of paths or an error. +type PathsRetriever func() (path []string, err error) + +// MustInvoke invokes the PathsRetriever func and panics if it returns an error. +func MustInvoke(p PathRetriever) string { + path, err := p() + if err != nil { + panic(err) + } + return path +} + +// Path returns a path retriever from the provided path. +func Path(path string) PathRetriever { + return func() (string, error) { return path, nil } +} + +// PathWithError returns a path retriever from the provided path and error. +func PathWithError(path string, err error) PathRetriever { + return func() (string, error) { return path, err } +} + +// Join returns a path retriever from the join of the provided path retrievers. +// The returned path retriever eventually returns the error from the first provided path retrievers +// that returns a non-nil error. +func Join(paths ...PathRetriever) PathRetriever { + return func() (string, error) { + var components []string + var err error + for _, path := range paths { + var component string + component, err = path() + if err != nil { + break + } + components = append(components, component) + } + path := filepath.Join(components...) + return path, err + } +} + +// JoinFromHome returns a path retriever from the join of the user home and the provided path retrievers. +// The returned path retriever eventually returns the error from the first provided path retrievers that returns a non-nil error. +func JoinFromHome(paths ...PathRetriever) PathRetriever { + return Join(append([]PathRetriever{os.UserHomeDir}, paths...)...) +} + +// List returns a paths retriever from a list of path retrievers. +// The returned paths retriever eventually returns the error from the first provided path retrievers that returns a non-nil error. +func List(paths ...PathRetriever) PathsRetriever { + return func() ([]string, error) { + var list []string + var err error + for _, path := range paths { + var resolved string + resolved, err = path() + if err != nil { + break + } + list = append(list, resolved) + } + + return list, err + } +} + +// Mkdir ensure path exists before returning it. +func Mkdir(path PathRetriever) PathRetriever { + return func() (string, error) { + p, err := path() + if err != nil { + return "", err + } + return p, os.MkdirAll(p, 0o755) + } +} + +// RelativePath return the relative app path from the current directory. +func RelativePath(appPath string) (string, error) { + pwd, err := os.Getwd() + if err != nil { + return "", err + } + path, err := filepath.Rel(pwd, appPath) + if err != nil { + return "", err + } + return path, nil +} + +// IsDir returns true if the path is a local directory. +func IsDir(path string) bool { + if info, err := os.Stat(path); err == nil && info.IsDir() { + return true + } + return false +} + +// MustAbs returns an absolute representation of path +// if the path is not absolute. +func MustAbs(path string) (string, error) { + if !filepath.IsAbs(path) { + return filepath.Abs(path) + } + return path, nil +} diff --git a/ignite/pkg/xfilepath/xfilepath_test.go b/ignite/pkg/xfilepath/xfilepath_test.go new file mode 100644 index 0000000..1ff9bec --- /dev/null +++ b/ignite/pkg/xfilepath/xfilepath_test.go @@ -0,0 +1,237 @@ +package xfilepath_test + +import ( + "os" + "path" + "path/filepath" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/xfilepath" +) + +func TestJoin(t *testing.T) { + retriever := xfilepath.Join( + xfilepath.Path("foo"), + xfilepath.PathWithError("bar", nil), + xfilepath.Path("foobar/barfoo"), + ) + p, err := retriever() + require.NoError(t, err) + require.Equal(t, filepath.Join( + "foo", + "bar", + "foobar", + "barfoo", + ), p) + + retriever = xfilepath.Join( + xfilepath.Path("foo"), + xfilepath.PathWithError("bar", errors.New("foo")), + xfilepath.Path("foobar/barfoo"), + ) + _, err = retriever() + require.Error(t, err) +} + +func TestJoinFromHome(t *testing.T) { + home, err := os.UserHomeDir() + require.NoError(t, err) + + retriever := xfilepath.JoinFromHome( + xfilepath.Path("foo"), + xfilepath.PathWithError("bar", nil), + xfilepath.Path("foobar/barfoo"), + ) + p, err := retriever() + require.NoError(t, err) + require.Equal(t, filepath.Join( + home, + "foo", + "bar", + "foobar", + "barfoo", + ), p) + + retriever = xfilepath.JoinFromHome( + xfilepath.Path("foo"), + xfilepath.PathWithError("bar", errors.New("foo")), + xfilepath.Path("foobar/barfoo"), + ) + _, err = retriever() + require.Error(t, err) +} + +func TestList(t *testing.T) { + retriever := xfilepath.List() + list, err := retriever() + require.NoError(t, err) + require.Equal(t, []string(nil), list) + + retriever1 := xfilepath.Join( + xfilepath.Path("foo/bar"), + ) + retriever2 := xfilepath.Join( + xfilepath.Path("bar/foo"), + ) + retriever = xfilepath.List(retriever1, retriever2) + list, err = retriever() + require.NoError(t, err) + require.Equal(t, []string{ + filepath.Join("foo", "bar"), + filepath.Join("bar", "foo"), + }, list) + + retrieverError := xfilepath.PathWithError("foo", errors.New("foo")) + retriever = xfilepath.List(retriever1, retrieverError, retriever2) + _, err = retriever() + require.Error(t, err) +} + +func TestMkdir(t *testing.T) { + newdir := path.Join(t.TempDir(), "hey") + + dir, err := xfilepath.Mkdir(xfilepath.Path(newdir))() + + require.NoError(t, err) + require.Equal(t, newdir, dir) + require.DirExists(t, dir) +} + +func TestRelativePath(t *testing.T) { + pwd, err := os.Getwd() + require.NoError(t, err) + rootRelative, err := filepath.Rel(pwd, "/") + require.NoError(t, err) + + tests := []struct { + name string + appPath string + want string + err error + }{ + { + name: "same directory", + appPath: filepath.Join(pwd, "file.go"), + want: "file.go", + }, + { + name: "previous directory", + appPath: filepath.Join(filepath.Dir(pwd), "file.go"), + want: "../file.go", + }, + { + name: "root directory", + appPath: "/file.go", + want: filepath.Join(rootRelative, "file.go"), + }, + { + name: "absolute path", + appPath: pwd, + want: ".", + }, + { + name: "NonExistentPath", + appPath: filepath.Join(filepath.Base(pwd), "file.go"), + want: "", + err: errors.Errorf("Rel: can't make xfilepath/file.go relative to %s", pwd), + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := xfilepath.RelativePath(tt.appPath) + if tt.err != nil { + require.Error(t, err) + require.Equal(t, tt.err.Error(), err.Error()) + return + } + require.NoError(t, err) + require.Equal(t, tt.want, got) + }) + } +} + +func TestIsDir(t *testing.T) { + tests := []struct { + name string + path string + want bool + }{ + { + name: "existing directory", + path: ".", + want: true, + }, + { + name: "existing sub directory", + path: "./testdata", + want: true, + }, + { + name: "existing file", + path: "./testdata/testfile", + want: false, + }, + { + name: "non-existing directory", + path: "nonexistent", + want: false, + }, + { + name: "parent directory", + path: "..", + want: true, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := xfilepath.IsDir(tt.path) + require.Equal(t, tt.want, got) + }) + } +} + +func TestMustAbs(t *testing.T) { + pwd, err := os.Getwd() + require.NoError(t, err) + + tests := []struct { + name string + path string + want string + err error + }{ + { + name: "already absolute path", + path: "/absolute/path", + want: "/absolute/path", + err: nil, + }, + { + name: "relative path", + path: "relative/path", + want: filepath.Join(pwd, "relative/path"), + err: nil, + }, + { + name: "current directory", + path: ".", + want: pwd, + err: nil, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := xfilepath.MustAbs(tt.path) + if tt.err != nil { + require.Error(t, err) + require.Equal(t, tt.err.Error(), err.Error()) + return + } + require.NoError(t, err) + require.Equal(t, tt.want, got) + }) + } +} diff --git a/ignite/pkg/xgenny/runner.go b/ignite/pkg/xgenny/runner.go new file mode 100644 index 0000000..ede862c --- /dev/null +++ b/ignite/pkg/xgenny/runner.go @@ -0,0 +1,208 @@ +package xgenny + +import ( + "context" + "io" + "os" + "path/filepath" + + "github.com/gobuffalo/genny/v2" + + "github.com/ignite/cli/v29/ignite/pkg/randstr" + "github.com/ignite/cli/v29/ignite/pkg/xos" +) + +type Runner struct { + *genny.Runner + ctx context.Context + results []genny.File + tmpPath string + root string +} + +// NewRunner is a xgenny Runner with a logger. +func NewRunner(ctx context.Context, root string) *Runner { + var ( + runner = genny.WetRunner(ctx) + tmpPath = filepath.Join(os.TempDir(), randstr.Runes(5)) + ) + runner.Root = root + r := &Runner{ + ctx: ctx, + Runner: runner, + tmpPath: tmpPath, + results: make([]genny.File, 0), + root: root, + } + runner.FileFn = wetFileFn(r) + return r +} + +// cleanup clears the underlying genny runner state so previously executed +// generators are not re-run on subsequent calls. +func (r *Runner) cleanup() { + runner := genny.WetRunner(r.ctx) + runner.Root = r.root + runner.FileFn = wetFileFn(r) + r.Runner = runner +} + +type ( + OverwriteCallback func(_, _, duplicated []string) error + + // ApplyOption holds the ApplyModifications options. + applyOptions struct { + preRun OverwriteCallback + postRun OverwriteCallback + } + + // ApplyOption configures the ApplyModifications options. + ApplyOption func(r *applyOptions) +) + +// ApplyPreRun sets pre-runner for the ApplyModifications function. +func ApplyPreRun(preRun OverwriteCallback) ApplyOption { + return func(o *applyOptions) { + o.preRun = preRun + } +} + +// ApplyPostRun sets pos-runner for the ApplyModifications function. +func ApplyPostRun(postRun OverwriteCallback) ApplyOption { + return func(o *applyOptions) { + o.postRun = postRun + } +} + +// ApplyModifications copy all modifications from the temporary folder to the target path. +func (r *Runner) ApplyModifications(options ...ApplyOption) (SourceModification, error) { + opts := applyOptions{} + for _, apply := range options { + apply(&opts) + } + + // fetch the source modification + sm := NewSourceModification() + for _, file := range r.results { + fileName := file.Name() + _, err := os.Stat(fileName) + switch { + case os.IsNotExist(err): + sm.AppendCreatedFiles(fileName) // if the file doesn't exist in the source, it means it has been created by the runner + case err != nil: + return sm, err + default: + sm.AppendModifiedFiles(fileName) // the file has been modified by the runner + } + } + r.results = make([]genny.File, 0) + + if _, err := os.Stat(r.tmpPath); os.IsNotExist(err) { + return sm, nil + } + + duplicatedFiles, err := xos.ValidateFolderCopy(r.tmpPath, r.Root, sm.ModifiedFiles()...) + if err != nil { + return sm, err + } + + if opts.preRun != nil { + if err := opts.preRun(sm.CreatedFiles(), sm.ModifiedFiles(), duplicatedFiles); err != nil { + return sm, err + } + } + + // Create the target path and copy the content from the temporary folder. + if err := os.MkdirAll(r.Root, os.ModePerm); err != nil { + return sm, err + } + + if err := xos.CopyFolder(r.tmpPath, r.Root); err != nil { + return sm, err + } + + if err := os.RemoveAll(r.tmpPath); err != nil { + return sm, err + } + + if opts.postRun != nil { + if err := opts.postRun(sm.CreatedFiles(), sm.ModifiedFiles(), duplicatedFiles); err != nil { + return sm, err + } + } + return sm, nil +} + +// RunAndApply run the generators and apply the modifications to the target path. +func (r *Runner) RunAndApply(gens *genny.Generator, options ...ApplyOption) (SourceModification, error) { + if err := r.Run(gens); err != nil { + return SourceModification{}, err + } + return r.ApplyModifications(options...) +} + +// Run all generators into a temp folder for we can apply the modifications later. +func (r *Runner) Run(gens ...*genny.Generator) error { + // ensure the underlying genny runner starts clean to avoid re-running previous generators + r.cleanup() + + for _, gen := range gens { + if err := r.Runner.With(gen); err != nil { + return err + } + } + + if err := r.Runner.Run(); err != nil { + return err + } + + r.results = append(r.results, r.Results().Files...) + + // reset again so a future Run call starts fresh + r.cleanup() + + return nil +} + +func wetFileFn(runner *Runner) func(genny.File) (genny.File, error) { + return func(f genny.File) (genny.File, error) { + if d, ok := f.(genny.Dir); ok { + if err := os.MkdirAll(d.Name(), d.Perm); err != nil { + return f, err + } + return d, nil + } + + var err error + if !filepath.IsAbs(runner.Root) { + runner.Root, err = filepath.Abs(runner.Root) + if err != nil { + return f, err + } + } + + name := f.Name() + if !filepath.IsAbs(name) { + name = filepath.Join(runner.Root, name) + } + relPath, err := filepath.Rel(runner.Root, name) + if err != nil { + return f, err + } + + dstPath := filepath.Join(runner.tmpPath, relPath) + dir := filepath.Dir(dstPath) + if err := os.MkdirAll(dir, 0o755); err != nil { + return f, err + } + ff, err := os.Create(dstPath) + if err != nil { + return f, err + } + defer ff.Close() + if _, err := io.Copy(ff, f); err != nil { + return f, err + } + return f, nil + } +} diff --git a/ignite/pkg/xgenny/runner_test.go b/ignite/pkg/xgenny/runner_test.go new file mode 100644 index 0000000..7e17231 --- /dev/null +++ b/ignite/pkg/xgenny/runner_test.go @@ -0,0 +1,35 @@ +package xgenny_test + +import ( + "context" + "testing" + + "github.com/gobuffalo/genny/v2" + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/xgenny" +) + +func TestMultipleGen(t *testing.T) { + var ( + runner = xgenny.NewRunner(context.Background(), t.TempDir()) + firstGen = genny.New() + secondGen = genny.New() + firstRunCount int + secondRunCount int + ) + + firstGen.RunFn(func(_ *genny.Runner) error { + firstRunCount++ + return nil + }) + + secondGen.RunFn(func(_ *genny.Runner) error { + secondRunCount++ + return nil + }) + + require.NoError(t, runner.Run(firstGen, secondGen)) + require.Equal(t, 1, firstRunCount, "first generator should run only once") + require.Equal(t, 1, secondRunCount, "second generator should run only once") +} diff --git a/ignite/pkg/xgenny/sourcemodification.go b/ignite/pkg/xgenny/sourcemodification.go new file mode 100644 index 0000000..48a3a53 --- /dev/null +++ b/ignite/pkg/xgenny/sourcemodification.go @@ -0,0 +1,116 @@ +package xgenny + +import ( + "path/filepath" + "sort" + "strings" + + "github.com/ignite/cli/v29/ignite/pkg/cliui/colors" + "github.com/ignite/cli/v29/ignite/pkg/xfilepath" +) + +var ( + modifyPrefix = colors.Modified("modify ") + createPrefix = colors.Success("create ") + removePrefix = func(s string) string { + return strings.TrimPrefix(strings.TrimPrefix(s, modifyPrefix), createPrefix) + } +) + +// SourceModification describes modified and created files in the source code after a run. +type SourceModification struct { + modified map[string]struct{} + created map[string]struct{} +} + +func NewSourceModification() SourceModification { + return SourceModification{ + make(map[string]struct{}), + make(map[string]struct{}), + } +} + +// ModifiedFiles returns the modified files of the source modification. +func (sm SourceModification) ModifiedFiles() (modifiedFiles []string) { + for modified := range sm.modified { + modifiedFiles = append(modifiedFiles, modified) + } + return +} + +// CreatedFiles returns the created files of the source modification. +func (sm SourceModification) CreatedFiles() (createdFiles []string) { + for created := range sm.created { + createdFiles = append(createdFiles, created) + } + return +} + +// AppendModifiedFiles appends modified files in the source modification that are not already documented. +func (sm *SourceModification) AppendModifiedFiles(modifiedFiles ...string) { + for _, modifiedFile := range modifiedFiles { + _, alreadyModified := sm.modified[modifiedFile] + _, alreadyCreated := sm.created[modifiedFile] + if !alreadyModified && !alreadyCreated { + sm.modified[modifiedFile] = struct{}{} + } + } +} + +// AppendCreatedFiles appends a created files in the source modification that are not already documented. +func (sm *SourceModification) AppendCreatedFiles(createdFiles ...string) { + for _, createdFile := range createdFiles { + _, alreadyModified := sm.modified[createdFile] + _, alreadyCreated := sm.created[createdFile] + if !alreadyModified && !alreadyCreated { + sm.created[createdFile] = struct{}{} + } + } +} + +// Merge merges a new source modification to an existing one. +func (sm *SourceModification) Merge(newSm SourceModification) { + sm.AppendModifiedFiles(newSm.ModifiedFiles()...) + sm.AppendCreatedFiles(newSm.CreatedFiles()...) +} + +// String convert to string value. +func (sm *SourceModification) String() (string, error) { + appendPrefix := func(paths []string, prefix string) ([]string, error) { + files := make([]string, 0) + for _, path := range paths { + absPath, err := filepath.Abs(path) + if err != nil { + return nil, err + } + // get the relative app path from the current directory + relPath, err := xfilepath.RelativePath(absPath) + if err != nil { + return nil, err + } + files = append(files, prefix+relPath) + } + return files, nil + } + + files, err := appendPrefix(sm.CreatedFiles(), createPrefix) + if err != nil { + return "", err + } + modified, err := appendPrefix(sm.ModifiedFiles(), modifyPrefix) + if err != nil { + return "", err + } + + files = append(files, modified...) + + // sort filenames without a prefix + sort.Slice(files, func(i, j int) bool { + s1 := removePrefix(files[i]) + s2 := removePrefix(files[j]) + + return strings.Compare(s1, s2) == -1 + }) + + return "\n" + strings.Join(files, "\n"), nil +} diff --git a/ignite/pkg/xgenny/sourcemodification_test.go b/ignite/pkg/xgenny/sourcemodification_test.go new file mode 100644 index 0000000..512c2cb --- /dev/null +++ b/ignite/pkg/xgenny/sourcemodification_test.go @@ -0,0 +1,83 @@ +package xgenny_test + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/xgenny" +) + +var ( + modifiedExample = []string{"mfoo", "mbar", "mfoobar"} + createdExample = []string{"cfoo", "cbar", "cfoobar"} +) + +func sourceModificationExample() xgenny.SourceModification { + sourceModification := xgenny.NewSourceModification() + sourceModification.AppendModifiedFiles(modifiedExample...) + sourceModification.AppendCreatedFiles(createdExample...) + return sourceModification +} + +func TestNewSourceModification(t *testing.T) { + sm := xgenny.NewSourceModification() + require.Empty(t, sm.ModifiedFiles()) + require.Empty(t, sm.CreatedFiles()) +} + +func TestModifiedFiles(t *testing.T) { + sm := sourceModificationExample() + require.Len(t, sm.ModifiedFiles(), len(modifiedExample)) + require.Subset(t, sm.ModifiedFiles(), modifiedExample) +} + +func TestCreatedFiles(t *testing.T) { + sm := sourceModificationExample() + require.Len(t, sm.CreatedFiles(), len(createdExample)) + require.Subset(t, sm.CreatedFiles(), createdExample) +} + +func TestAppendModifiedFiles(t *testing.T) { + sm := sourceModificationExample() + sm.AppendModifiedFiles("foo1") + require.Len(t, sm.ModifiedFiles(), len(modifiedExample)+1) + require.Contains(t, sm.ModifiedFiles(), "foo1") + + // Do not append a existing element + sm.AppendModifiedFiles("foo1") + require.Len(t, sm.ModifiedFiles(), len(modifiedExample)+1) + sm.AppendCreatedFiles("foo2") + sm.AppendModifiedFiles("foo2") + require.Len(t, sm.ModifiedFiles(), len(modifiedExample)+1) +} + +func TestAppendCreatedFiles(t *testing.T) { + sm := sourceModificationExample() + sm.AppendCreatedFiles("foo1") + require.Len(t, sm.CreatedFiles(), len(createdExample)+1) + require.Contains(t, sm.CreatedFiles(), "foo1") + + // Do not append a existing element + sm.AppendCreatedFiles("foo1") + require.Len(t, sm.CreatedFiles(), len(createdExample)+1) + sm.AppendModifiedFiles("foo2") + sm.AppendCreatedFiles("foo2") + require.Len(t, sm.ModifiedFiles(), len(modifiedExample)+1) +} + +func TestMerge(t *testing.T) { + sm1 := xgenny.NewSourceModification() + sm2 := xgenny.NewSourceModification() + + sm1.AppendModifiedFiles("foo1", "foo2", "foo3") + sm2.AppendModifiedFiles("foo3", "foo4", "foo5") + sm1.AppendCreatedFiles("bar1", "bar2", "bar3") + sm2.AppendCreatedFiles("foo1", "bar2", "bar3") + + sm1.Merge(sm2) + require.Len(t, sm1.ModifiedFiles(), 5) + require.Len(t, sm1.CreatedFiles(), 3) + require.Subset(t, sm1.ModifiedFiles(), []string{"foo1", "foo2", "foo3", "foo4", "foo5"}) + require.Subset(t, sm1.CreatedFiles(), []string{"bar1", "bar2", "bar3"}) +} diff --git a/ignite/pkg/xgenny/transformer.go b/ignite/pkg/xgenny/transformer.go new file mode 100644 index 0000000..be6233c --- /dev/null +++ b/ignite/pkg/xgenny/transformer.go @@ -0,0 +1,21 @@ +package xgenny + +import ( + "github.com/gobuffalo/genny/v2" + "github.com/gobuffalo/plush/v4" + + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +// Transformer will plush-ify any file that has a ".plush" extension. +func Transformer(ctx *plush.Context) genny.Transformer { + t := genny.NewTransformer(".plush", func(f genny.File) (genny.File, error) { + s, err := plush.RenderR(f, ctx) + if err != nil { + return f, errors.Wrap(err, f.Name()) + } + return genny.NewFileS(f.Name(), s), nil + }) + t.StripExt = true + return t +} diff --git a/ignite/pkg/xgenny/transformer_test.go b/ignite/pkg/xgenny/transformer_test.go new file mode 100644 index 0000000..8954c99 --- /dev/null +++ b/ignite/pkg/xgenny/transformer_test.go @@ -0,0 +1,47 @@ +package xgenny_test + +import ( + "io" + "strings" + "testing" + + "github.com/gobuffalo/genny/v2" + "github.com/gobuffalo/plush/v4" + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/xgenny" +) + +func Test_Transformer(t *testing.T) { + r := require.New(t) + + ctx := plush.NewContext() + ctx.Set("name", "mark") + f := genny.NewFile("foo.plush.txt", strings.NewReader("Hello <%= name %>")) + + tr := xgenny.Transformer(ctx) + f, err := tr.Transform(f) + r.NoError(err) + r.Equal("foo.txt", f.Name()) + + b, err := io.ReadAll(f) + r.NoError(err) + r.Equal("Hello mark", string(b)) +} + +func Test_Transformer_No_Ext(t *testing.T) { + r := require.New(t) + + ctx := plush.NewContext() + ctx.Set("name", "mark") + f := genny.NewFile("foo.txt", strings.NewReader("Hello <%= name %>")) + + tr := xgenny.Transformer(ctx) + f, err := tr.Transform(f) + r.NoError(err) + r.Equal("foo.txt", f.Name()) + + b, err := io.ReadAll(f) + r.NoError(err) + r.Equal("Hello <%= name %>", string(b)) +} diff --git a/ignite/pkg/xgit/xgit.go b/ignite/pkg/xgit/xgit.go new file mode 100644 index 0000000..dedb9b3 --- /dev/null +++ b/ignite/pkg/xgit/xgit.go @@ -0,0 +1,181 @@ +package xgit + +import ( + "context" + "os" + "path/filepath" + "strings" + "time" + + "github.com/go-git/go-git/v5" + "github.com/go-git/go-git/v5/plumbing" + "github.com/go-git/go-git/v5/plumbing/object" + + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +var ( + commitMsg = "Initialized with Ignite CLI" + defaultOpenOpts = git.PlainOpenOptions{DetectDotGit: true} + devXAuthor = &object.Signature{ + Name: "Developer Experience team at Ignite", + Email: "hello@ignite.com", + When: time.Now(), + } +) + +// InitAndCommit creates a git repo in path if path isn't already inside a git +// repository, then commits path content. +func InitAndCommit(path string) error { + repo, err := git.PlainOpenWithOptions(path, &defaultOpenOpts) + if err != nil { + if !errors.Is(err, git.ErrRepositoryNotExists) { + return errors.Errorf("open git repo %s: %w", path, err) + } + // not a git repo, creates a new one + repo, err = git.PlainInitWithOptions(path, &git.PlainInitOptions{ + InitOptions: git.InitOptions{ + DefaultBranch: plumbing.Main, + }, + Bare: false, + }) + if err != nil { + return errors.Errorf("init git repo %s: %w", path, err) + } + } + wt, err := repo.Worktree() + if err != nil { + return errors.Errorf("worktree %s: %w", path, err) + } + // wt.Add(path) takes only relative path, we need to turn path relative to + // repo path. + repoPath := wt.Filesystem.Root() + path, err = filepath.Rel(repoPath, path) + if err != nil { + return errors.Errorf("find relative path %s %s: %w", repoPath, path, err) + } + if _, err := wt.Add(path); err != nil { + return errors.Errorf("git add %s: %w", path, err) + } + _, err = wt.Commit(commitMsg, &git.CommitOptions{ + All: true, + Author: devXAuthor, + }) + if err != nil { + return errors.Errorf("git commit %s: %w", path, err) + } + return nil +} + +// AreChangesCommitted returns true if dir is a clean git repository with no +// pending changes. It returns also true if dir is NOT a git repository. +func AreChangesCommitted(dir string) (bool, error) { + dir, err := filepath.Abs(dir) + if err != nil { + return false, err + } + + repository, err := git.PlainOpen(dir) + if err != nil { + if errors.Is(err, git.ErrRepositoryNotExists) { + return true, nil + } + return false, err + } + + w, err := repository.Worktree() + if err != nil { + return false, err + } + + ws, err := w.Status() + if err != nil { + return false, err + } + return ws.IsClean(), nil +} + +// Clone clones a git repository represented by urlRef, into dir. +// urlRef is the URL of the repository, with an optional ref, suffixed to the +// URL with a `@`. Ref can be a tag, a branch or a hash. +// Valid examples of urlRef: github.com/org/repo, github.com/org/repo@v1, +// github.com/org/repo@develop, github.com/org/repo@ab88cdf. +func Clone(ctx context.Context, urlRef, dir string) error { + // Ensure dir is empty if it exists (if it doesn't exist, the call to + // git.PlainCloneContext below will create it). + files, _ := os.ReadDir(dir) + if len(files) > 0 { + return errors.Errorf("clone: target directory %q is not empty", dir) + } + // Split urlRef + var ( + parts = strings.Split(urlRef, "@") + url = parts[0] + ref string + ) + if len(parts) > 1 { + ref = parts[1] + } + // First clone the repo + repo, err := git.PlainCloneContext(ctx, dir, false, &git.CloneOptions{ + URL: url, + }) + if err != nil { + return err + } + if ref == "" { + // if ref is not provided, job is done + return nil + } + // Reference provided, try to resolve + wt, err := repo.Worktree() + if err != nil { + return err + } + var h *plumbing.Hash + for _, ref := range []string{ref, "origin/" + ref} { + h, err = repo.ResolveRevision(plumbing.Revision(ref)) + if err == nil { + break + } + } + if err != nil { + // Ref not found, clean up dir and return error + os.RemoveAll(dir) + return err + } + return wt.Checkout(&git.CheckoutOptions{ + Hash: *h, + }) +} + +// IsRepository checks if a path contains a Git repository. +func IsRepository(path string) (bool, error) { + if _, err := git.PlainOpenWithOptions(path, &defaultOpenOpts); err != nil { + if errors.Is(err, git.ErrRepositoryNotExists) { + return false, nil + } + return false, err + } + return true, nil +} + +// RepositoryURL returns the URL of the origin remote of a Git repository. +func RepositoryURL(path string) (string, error) { + repo, err := git.PlainOpenWithOptions(path, &defaultOpenOpts) + if err != nil { + return "", err + } + + cfg, err := repo.Config() + if err != nil { + return "", err + } + + origin, ok := cfg.Remotes["origin"] + if !ok { + return "", errors.Errorf("no origin remote found in %s", path) + } + + return origin.URLs[0], nil +} diff --git a/ignite/pkg/xgit/xgit_test.go b/ignite/pkg/xgit/xgit_test.go new file mode 100644 index 0000000..8fe9b38 --- /dev/null +++ b/ignite/pkg/xgit/xgit_test.go @@ -0,0 +1,445 @@ +package xgit_test + +import ( + "context" + "fmt" + "os" + "path" + "testing" + "time" + + "github.com/go-git/go-git/v5" + "github.com/go-git/go-git/v5/plumbing" + "github.com/go-git/go-git/v5/plumbing/object" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/randstr" + "github.com/ignite/cli/v29/ignite/pkg/xgit" +) + +func TestInitAndCommit(t *testing.T) { + tests := []struct { + name string + dirFunc func(*testing.T) string + expectDotGitFolder bool + expectedNumCommits int + expectedFilesInCommit []string + }{ + { + name: "dir is not inside an existing repo", + dirFunc: func(t *testing.T) string { + t.Helper() + dir := t.TempDir() + err := os.WriteFile(path.Join(dir, "foo"), []byte("hello"), 0o755) + require.NoError(t, err) + return dir + }, + expectDotGitFolder: true, + expectedNumCommits: 1, + expectedFilesInCommit: []string{"foo"}, + }, + { + name: "dir is inside an existing repo", + // In this repo, there's no existing commit but a standalone uncommitted + // foo file that shouldn't be included in the xgit.InitAndCommit's commit. + dirFunc: func(t *testing.T) string { + t.Helper() + dir := t.TempDir() + _, err := git.PlainInit(dir, false) + require.NoError(t, err) + err = os.WriteFile(path.Join(dir, "foo"), []byte("hello"), 0o755) + require.NoError(t, err) + dirInsideRepo := path.Join(dir, "bar") + err = os.Mkdir(dirInsideRepo, 0o0755) + require.NoError(t, err) + err = os.WriteFile(path.Join(dirInsideRepo, "baz"), []byte("hello"), 0o755) + require.NoError(t, err) + return dirInsideRepo + }, + expectDotGitFolder: false, + expectedNumCommits: 1, + expectedFilesInCommit: []string{"bar/baz"}, + }, + { + name: "dir is an existing repo", + dirFunc: func(t *testing.T) string { + t.Helper() + // In this repo, there's one existing commit, and an uncommitted baz file + // that must be included in the xgit.InitAndCommit's commit. + dir := t.TempDir() + _, err := git.PlainInit(dir, false) + require.NoError(t, err) + err = os.WriteFile(path.Join(dir, "foo"), []byte("hello"), 0o755) + require.NoError(t, err) + repo, err := git.PlainOpenWithOptions(dir, &git.PlainOpenOptions{}) + require.NoError(t, err) + wt, err := repo.Worktree() + require.NoError(t, err) + _, err = wt.Add(".") + require.NoError(t, err) + _, err = wt.Commit("First commit", &git.CommitOptions{ + Author: &object.Signature{}, + }) + require.NoError(t, err) + err = os.WriteFile(path.Join(dir, "bar"), []byte("hello"), 0o755) + require.NoError(t, err) + return dir + }, + expectDotGitFolder: true, + expectedNumCommits: 2, + expectedFilesInCommit: []string{"bar"}, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + dir := tt.dirFunc(t) + + err := xgit.InitAndCommit(dir) + + require.NoError(t, err) + _, err = os.Stat(path.Join(dir, ".git")) + require.Equal(t, tt.expectDotGitFolder, !os.IsNotExist(err)) + // Assert repository commits. For that we need to open the repo and + // iterate over existing commits. + repo, err := git.PlainOpenWithOptions(dir, &git.PlainOpenOptions{ + DetectDotGit: true, + }) + require.NoError(t, err) + logs, err := repo.Log(&git.LogOptions{}) + require.NoError(t, err) + var ( + numCommits int + lastCommit *object.Commit + ) + err = logs.ForEach(func(c *object.Commit) error { + if numCommits == 0 { + lastCommit = c + } + numCommits++ + return nil + }) + require.NoError(t, err) + require.Equal(t, tt.expectedNumCommits, numCommits) + if assert.NotNil(t, lastCommit) { + require.Equal(t, "Initialized with Ignite CLI", lastCommit.Message) + require.WithinDuration(t, time.Now(), lastCommit.Committer.When, 10*time.Second) + require.Equal(t, "Developer Experience team at Ignite", lastCommit.Author.Name) + require.Equal(t, "hello@ignite.com", lastCommit.Author.Email) + stats, err := lastCommit.Stats() + require.NoError(t, err) + var files []string + for _, s := range stats { + files = append(files, s.Name) + } + require.Equal(t, tt.expectedFilesInCommit, files) + } + }) + } +} + +func TestAreChangesCommitted(t *testing.T) { + tests := []struct { + name string + dirFunc func(*testing.T) string + expectedResult bool + }{ + { + name: "dir is not a git repo", + dirFunc: func(t *testing.T) string { + t.Helper() + return t.TempDir() + }, + expectedResult: true, + }, + { + name: "dir is a empty git repo", + dirFunc: func(t *testing.T) string { + t.Helper() + dir := t.TempDir() + _, err := git.PlainInit(dir, false) + require.NoError(t, err) + return dir + }, + expectedResult: true, + }, + { + name: "dir is a dirty empty git repo", + dirFunc: func(t *testing.T) string { + t.Helper() + dir := t.TempDir() + _, err := git.PlainInit(dir, false) + require.NoError(t, err) + err = os.WriteFile(path.Join(dir, "foo"), []byte("hello"), 0o755) + require.NoError(t, err) + return dir + }, + expectedResult: false, + }, + { + name: "dir is a cleaned git repo", + dirFunc: func(t *testing.T) string { + t.Helper() + dir := t.TempDir() + _, err := git.PlainInit(dir, false) + require.NoError(t, err) + err = os.WriteFile(path.Join(dir, "foo"), []byte("hello"), 0o755) + require.NoError(t, err) + repo, err := git.PlainOpenWithOptions(dir, &git.PlainOpenOptions{}) + require.NoError(t, err) + wt, err := repo.Worktree() + require.NoError(t, err) + _, err = wt.Add(".") + require.NoError(t, err) + _, err = wt.Commit("First commit", &git.CommitOptions{ + Author: &object.Signature{}, + }) + require.NoError(t, err) + return dir + }, + expectedResult: true, + }, + { + name: "dir is a dirty git repo", + dirFunc: func(t *testing.T) string { + t.Helper() + dir := t.TempDir() + _, err := git.PlainInit(dir, false) + require.NoError(t, err) + err = os.WriteFile(path.Join(dir, "foo"), []byte("hello"), 0o755) + require.NoError(t, err) + repo, err := git.PlainOpenWithOptions(dir, &git.PlainOpenOptions{}) + require.NoError(t, err) + wt, err := repo.Worktree() + require.NoError(t, err) + _, err = wt.Add(".") + require.NoError(t, err) + _, err = wt.Commit("First commit", &git.CommitOptions{ + Author: &object.Signature{}, + }) + require.NoError(t, err) + err = os.WriteFile(path.Join(dir, "bar"), []byte("hello"), 0o755) + require.NoError(t, err) + return dir + }, + expectedResult: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + dir := tt.dirFunc(t) + + res, err := xgit.AreChangesCommitted(dir) + + require.NoError(t, err) + assert.Equal(t, tt.expectedResult, res) + }) + } +} + +func TestClone(t *testing.T) { + // Create a folder with content + notEmptyDir := t.TempDir() + err := os.WriteFile(path.Join(notEmptyDir, ".foo"), []byte("hello"), 0o755) + require.NoError(t, err) + // Create a local git repo for all the test cases + repoDir := t.TempDir() + repo, err := git.PlainInit(repoDir, false) + require.NoError(t, err) + err = os.WriteFile(path.Join(repoDir, "foo"), []byte("hello"), 0o755) + require.NoError(t, err) + // Add a first commit + w, err := repo.Worktree() + require.NoError(t, err) + _, err = w.Add(".") + require.NoError(t, err) + commit1, err := w.Commit("commit1", &git.CommitOptions{ + Author: &object.Signature{ + Name: "bob", + Email: "bob@example.com", + When: time.Now(), + }, + }) + // Add a branch on commit1 + require.NoError(t, err) + err = w.Checkout(&git.CheckoutOptions{ + Branch: plumbing.NewBranchReferenceName("my-branch"), + Create: true, + }) + require.NoError(t, err) + // Back to master + err = w.Checkout(&git.CheckoutOptions{Branch: plumbing.NewBranchReferenceName("master")}) + require.NoError(t, err) + // Add a tag on commit1 + _, err = repo.CreateTag("v1", commit1, &git.CreateTagOptions{ + Tagger: &object.Signature{Name: "me"}, + Message: "v1", + }) + require.NoError(t, err) + // Add a second commit + err = os.WriteFile(path.Join(repoDir, "bar"), []byte("hello"), 0o755) + require.NoError(t, err) + _, err = w.Add(".") + require.NoError(t, err) + commit2, err := w.Commit("commit2", &git.CommitOptions{ + Author: &object.Signature{ + Name: "bob", + Email: "bob@example.com", + When: time.Now(), + }, + }) + require.NoError(t, err) + + tests := []struct { + name string + dir string + urlRef string + expectedError string + expectedRef plumbing.Hash + }{ + { + name: "fail: repo doesn't exist", + dir: t.TempDir(), + urlRef: "/tmp/not/exists", + expectedError: "repository not found", + }, + { + name: "fail: target dir isn't empty", + dir: notEmptyDir, + urlRef: repoDir, + expectedError: fmt.Sprintf(`clone: target directory "%s" is not empty`, notEmptyDir), + }, + { + name: "ok: target dir doesn't exists", + dir: "/tmp/not/exists/" + randstr.Runes(6), + urlRef: repoDir, + expectedRef: commit2, + }, + { + name: "ok: no ref", + dir: t.TempDir(), + urlRef: repoDir, + expectedRef: commit2, + }, + { + name: "ok: empty ref", + dir: t.TempDir(), + urlRef: repoDir + "@", + expectedRef: commit2, + }, + { + name: "ok: with tag ref", + dir: t.TempDir(), + urlRef: repoDir + "@v1", + expectedRef: commit1, + }, + { + name: "ok: with branch ref", + dir: t.TempDir(), + urlRef: repoDir + "@my-branch", + expectedRef: commit1, + }, + { + name: "ok: with commit1 hash ref", + dir: t.TempDir(), + urlRef: repoDir + "@" + commit1.String(), + expectedRef: commit1, + }, + { + name: "ok: with commit2 hash ref", + dir: t.TempDir(), + urlRef: repoDir + "@" + commit2.String(), + expectedRef: commit2, + }, + { + name: "fail: ref not found", + dir: t.TempDir(), + urlRef: repoDir + "@what", + expectedError: "reference not found", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + var ( + files, _ = os.ReadDir(tt.dir) + dirWasEmpty = len(files) == 0 + ) + + err := xgit.Clone(context.Background(), tt.urlRef, tt.dir) + + if tt.expectedError != "" { + require.EqualError(t, err, tt.expectedError) + if dirWasEmpty { + // If it was empty, ensure target dir is still clean + files, _ := os.ReadDir(tt.dir) + require.Empty(t, files, "target dir should be empty in case of error") + } + return + } + require.NoError(t, err) + _, err = os.Stat(tt.dir) + require.False(t, os.IsNotExist(err), "dir %s should exist", tt.dir) + repo, err := git.PlainOpen(tt.dir) + require.NoError(t, err) + h, err := repo.Head() + require.NoError(t, err) + require.Equal(t, tt.expectedRef, h.Hash()) + }) + } +} + +func TestIsRepository(t *testing.T) { + tests := []struct { + name string + dirFunc func(*testing.T) string + shouldFail bool + expected bool + }{ + { + name: "path is a repository", + dirFunc: func(t *testing.T) string { + t.Helper() + dir := t.TempDir() + _, err := git.PlainInit(dir, false) + require.NoError(t, err) + return dir + }, + expected: true, + }, + { + name: "path is not a repository", + dirFunc: func(t *testing.T) string { + t.Helper() + return t.TempDir() + }, + expected: false, + }, + { + name: "repository error", + dirFunc: func(t *testing.T) string { + t.Helper() + dir := t.TempDir() + err := os.Chmod(dir, 0) + require.NoError(t, err) + return dir + }, + shouldFail: true, + expected: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + // Act + exists, err := xgit.IsRepository(tt.dirFunc(t)) + + // Assert + require.Equal(t, tt.expected, exists) + + if tt.shouldFail { + require.Error(t, err) + } else { + require.NoError(t, err) + } + }) + } +} diff --git a/ignite/pkg/xhttp/response.go b/ignite/pkg/xhttp/response.go new file mode 100644 index 0000000..06241b7 --- /dev/null +++ b/ignite/pkg/xhttp/response.go @@ -0,0 +1,48 @@ +package xhttp + +import ( + "encoding/json" + "net/http" + + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +// ResponseJSON writes a JSON response to w by using status as http status and data +// as payload. +func ResponseJSON(w http.ResponseWriter, status int, data interface{}) error { + var errMarhsal error + bz, err := json.Marshal(data) + if err != nil { + status = http.StatusInternalServerError + bz, errMarhsal = json.Marshal(NewErrorResponse(errors.New(http.StatusText(status)))) + + // wrap error + if errMarhsal != nil { + err = errors.Errorf("%w: %s", err, errMarhsal.Error()) + } + } + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(status) + _, _ = w.Write(bz) + return err +} + +// ErrorResponseBody is the skeleton for error messages that should be sent to +// client. +type ErrorResponseBody struct { + Error ErrorResponse `json:"error"` +} + +// ErrorResponse holds the error message. +type ErrorResponse struct { + Message string `json:"message"` +} + +// NewErrorResponse creates a new http error response from err. +func NewErrorResponse(err error) ErrorResponseBody { + return ErrorResponseBody{ + Error: ErrorResponse{ + Message: err.Error(), + }, + } +} diff --git a/ignite/pkg/xhttp/response_test.go b/ignite/pkg/xhttp/response_test.go new file mode 100644 index 0000000..9aa50c4 --- /dev/null +++ b/ignite/pkg/xhttp/response_test.go @@ -0,0 +1,37 @@ +package xhttp + +import ( + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +func TestResponseJSON(t *testing.T) { + w := httptest.NewRecorder() + data := map[string]interface{}{"a": 1} + require.NoError(t, ResponseJSON(w, http.StatusCreated, data)) + resp := w.Result() + defer resp.Body.Close() // Ensure the response body is closed + + require.Equal(t, http.StatusCreated, resp.StatusCode) + require.Equal(t, "application/json", resp.Header.Get("Content-Type")) + + body, _ := io.ReadAll(resp.Body) + dataJSON, err := json.Marshal(data) + require.NoError(t, err) + require.Equal(t, dataJSON, body) +} + +func TestNewErrorResponse(t *testing.T) { + require.Equal(t, ErrorResponseBody{ + Error: ErrorResponse{ + Message: "error", + }, + }, NewErrorResponse(errors.New("error"))) +} diff --git a/ignite/pkg/xhttp/server.go b/ignite/pkg/xhttp/server.go new file mode 100644 index 0000000..82e07f1 --- /dev/null +++ b/ignite/pkg/xhttp/server.go @@ -0,0 +1,31 @@ +package xhttp + +import ( + "context" + "net/http" + "time" + + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +// ShutdownTimeout is the timeout for waiting all requests to complete. +const ShutdownTimeout = time.Minute + +// Serve starts s server and shutdowns it once the ctx is cancelled. +func Serve(ctx context.Context, s *http.Server) error { + go func() { + <-ctx.Done() + + shutdownCtx, cancel := context.WithTimeout(context.Background(), ShutdownTimeout) + defer cancel() + + _ = s.Shutdown(shutdownCtx) + }() + + err := s.ListenAndServe() + if errors.Is(err, http.ErrServerClosed) { + return nil + } + + return err +} diff --git a/ignite/pkg/xio/xio.go b/ignite/pkg/xio/xio.go new file mode 100644 index 0000000..11001ad --- /dev/null +++ b/ignite/pkg/xio/xio.go @@ -0,0 +1,16 @@ +package xio + +import "io" + +type nopWriteCloser struct { + io.Writer +} + +func (w *nopWriteCloser) Close() error { + return nil +} + +// NopWriteCloser returns a WriteCloser. +func NopWriteCloser(w io.Writer) io.WriteCloser { + return &nopWriteCloser{w} +} diff --git a/ignite/pkg/xio/xio_test.go b/ignite/pkg/xio/xio_test.go new file mode 100644 index 0000000..a257a44 --- /dev/null +++ b/ignite/pkg/xio/xio_test.go @@ -0,0 +1,19 @@ +package xio + +import ( + "bytes" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestNopWriteCloser(t *testing.T) { + var b bytes.Buffer + w := NopWriteCloser(&b) + + n, err := w.Write([]byte("ignite")) + require.NoError(t, err) + require.Equal(t, 6, n) + require.Equal(t, "ignite", b.String()) + require.NoError(t, w.Close()) +} diff --git a/ignite/pkg/xnet/xnet.go b/ignite/pkg/xnet/xnet.go new file mode 100644 index 0000000..9bb90ed --- /dev/null +++ b/ignite/pkg/xnet/xnet.go @@ -0,0 +1,55 @@ +package xnet + +import ( + "fmt" + "net" + "strconv" +) + +// LocalhostIPv4Address returns a localhost IPv4 address with a port +// that represents the localhost IP address listening on that port. +func LocalhostIPv4Address(port int) string { + return fmt.Sprintf("localhost:%d", port) +} + +// AnyIPv4Address returns an IPv4 meta address "0.0.0.0" with a port +// that represents any IP address listening on that port. +func AnyIPv4Address(port int) string { + return fmt.Sprintf("0.0.0.0:%d", port) +} + +// IncreasePort increases a port number by 1. +// This can be useful to generate port ranges or consecutive +// port numbers for the same address. +func IncreasePort(addr string) (string, error) { + return IncreasePortBy(addr, 1) +} + +// IncreasePortBy increases a port number by a factor of "inc". +// This can be useful to generate port ranges or consecutive +// port numbers for the same address. +func IncreasePortBy(addr string, inc uint64) (string, error) { + host, port, err := net.SplitHostPort(addr) + if err != nil { + return "", err + } + + v, err := strconv.ParseUint(port, 10, 0) + if err != nil { + return "", err + } + + port = strconv.FormatUint(v+inc, 10) + + return net.JoinHostPort(host, port), nil +} + +// MustIncreasePortBy calls IncreasePortBy and panics on error. +func MustIncreasePortBy(addr string, inc uint64) string { + s, err := IncreasePortBy(addr, inc) + if err != nil { + panic(err) + } + + return s +} diff --git a/ignite/pkg/xnet/xnet_test.go b/ignite/pkg/xnet/xnet_test.go new file mode 100644 index 0000000..fa1857f --- /dev/null +++ b/ignite/pkg/xnet/xnet_test.go @@ -0,0 +1,55 @@ +package xnet_test + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/xnet" +) + +func TestLocalhostIPv4Address(t *testing.T) { + require.Equal(t, "localhost:42", xnet.LocalhostIPv4Address(42)) +} + +func TestAnyIPv4Address(t *testing.T) { + require.Equal(t, "0.0.0.0:42", xnet.AnyIPv4Address(42)) +} + +func TestIncreasePort(t *testing.T) { + addr, err := xnet.IncreasePort("localhost:41") + + require.NoError(t, err) + require.Equal(t, "localhost:42", addr) +} + +func TestIncreasePortWithInvalidAddress(t *testing.T) { + _, err := xnet.IncreasePort("localhost:x:41") + + require.Error(t, err) +} + +func TestIncreasePortWithInvalidPort(t *testing.T) { + _, err := xnet.IncreasePort("localhost:x") + + require.Error(t, err) +} + +func TestIncreasePortBy(t *testing.T) { + addr, err := xnet.IncreasePortBy("localhost:32", 10) + + require.NoError(t, err) + require.Equal(t, "localhost:42", addr) +} + +func TestIncreasePortByWithInvalidAddress(t *testing.T) { + _, err := xnet.IncreasePortBy("localhost:x:32", 10) + + require.Error(t, err) +} + +func TestIncreasePortByWithInvalidPort(t *testing.T) { + _, err := xnet.IncreasePortBy("localhost:x", 10) + + require.Error(t, err) +} diff --git a/ignite/pkg/xos/cp.go b/ignite/pkg/xos/cp.go new file mode 100644 index 0000000..b08c013 --- /dev/null +++ b/ignite/pkg/xos/cp.go @@ -0,0 +1,131 @@ +package xos + +import ( + "io" + "os" + "path/filepath" + + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +// CopyFolder copy the source folder to the destination folder. +func CopyFolder(srcPath, dstPath string) error { + return filepath.Walk(srcPath, func(path string, info os.FileInfo, err error) error { + if err != nil { + return err + } + + // Skip the root folder + if path == srcPath { + return nil + } + + // Get the relative path within the source folder + relativePath, err := filepath.Rel(srcPath, path) + if err != nil { + return err + } + + // Create the corresponding destination path + destPath := filepath.Join(dstPath, relativePath) + + if info.IsDir() { + // Create the directory in the destination + err = os.MkdirAll(destPath, 0o755) + if err != nil { + return err + } + } else { + // Copy the file content + err = CopyFile(path, destPath) + if err != nil { + return err + } + } + return nil + }) +} + +// ValidateFolderCopy validates that all files in source folder exist in destination folder +// with same name and relative path. +func ValidateFolderCopy(srcPath, dstPath string, exclude ...string) ([]string, error) { + if srcPath == dstPath { + return nil, errors.Errorf("source and destination paths are the same %s", srcPath) + } + + // Check if the destination path exists + if _, err := os.Stat(dstPath); errors.Is(err, os.ErrNotExist) { + return nil, errors.Errorf("destination path does not exist: %s", dstPath) + } else if err != nil { + return nil, err + } + + excludeMap := make(map[string]struct{}, len(exclude)) + for _, ex := range exclude { + excludeMap[ex] = struct{}{} + } + + var sameFiles []string + err := filepath.Walk(srcPath, func(path string, info os.FileInfo, err error) error { + if errors.Is(err, os.ErrNotExist) { + return errors.Errorf("source path does not exist: %s", path) + } + if err != nil { + return err + } + + // Skip dirs + if info.IsDir() { + return nil + } + + // Get the relative path within the source folder + relativePath, err := filepath.Rel(srcPath, path) + if err != nil { + return err + } + + // Skip excluded files + if _, ok := excludeMap[relativePath]; ok { + return nil + } + + // Create the corresponding destination path + destPath := filepath.Join(dstPath, relativePath) + + // Check if the destination path exists + destInfo, err := os.Stat(destPath) + if os.IsNotExist(err) { + return nil + } else if err != nil { + return err + } + + // Verify if directory/file types match + if info.IsDir() != destInfo.IsDir() { + return os.ErrInvalid + } + + sameFiles = append(sameFiles, relativePath) + return nil + }) + return sameFiles, err +} + +// CopyFile copy the source file to the destination file. +func CopyFile(srcPath, dstPath string) error { + srcFile, err := os.OpenFile(srcPath, os.O_RDONLY, 0o666) + if err != nil { + return err + } + defer srcFile.Close() + + destFile, err := os.Create(dstPath) + if err != nil { + return err + } + defer destFile.Close() + + _, err = io.Copy(destFile, srcFile) + return err +} diff --git a/ignite/pkg/xos/cp_test.go b/ignite/pkg/xos/cp_test.go new file mode 100644 index 0000000..b1a7f84 --- /dev/null +++ b/ignite/pkg/xos/cp_test.go @@ -0,0 +1,264 @@ +package xos_test + +import ( + "os" + "path/filepath" + "testing" + + "github.com/ignite/cli/v29/ignite/pkg/errors" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/xos" +) + +func TestCopyFolder(t *testing.T) { + tempDir, err := os.MkdirTemp("", "TestCopyFile") + require.NoError(t, err) + t.Cleanup(func() { + require.NoError(t, os.RemoveAll(tempDir)) + }) + + // Create temporary source and destination directories + srcDir := filepath.Join(tempDir, "source") + err = os.MkdirAll(srcDir, 0o755) + require.NoError(t, err) + + dstDir := filepath.Join(tempDir, "destination") + err = os.MkdirAll(dstDir, 0o755) + require.NoError(t, err) + + emptyDir := filepath.Join(tempDir, "empty") + err = os.MkdirAll(emptyDir, 0o755) + require.NoError(t, err) + + // Create a temporary source file + srcFile1 := filepath.Join(srcDir, "file_1.txt") + err = os.WriteFile(srcFile1, []byte("File content 1"), 0o644) + require.NoError(t, err) + + srcFile2 := filepath.Join(srcDir, "file_2.txt") + err = os.WriteFile(srcFile2, []byte("File content 2"), 0o644) + require.NoError(t, err) + + tests := []struct { + name string + srcPath string + dstPath string + expectedErr error + expectedFileCount int + }{ + { + name: "valid paths", + srcPath: srcDir, + dstPath: dstDir, + expectedFileCount: 2, + }, + { + name: "non existent destination", + srcPath: srcDir, + dstPath: filepath.Join(dstDir, "non-existent-destination"), + expectedErr: os.ErrNotExist, + }, + { + name: "non existent source", + srcPath: filepath.Join(dstDir, "non-existent-source"), + dstPath: dstDir, + expectedErr: os.ErrNotExist, + }, + { + name: "same source and destination", + srcPath: srcDir, + dstPath: srcDir, + expectedFileCount: 2, + }, + { + name: "empty source", + srcPath: emptyDir, + dstPath: filepath.Join(tempDir, "empty"), + expectedFileCount: 0, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + err := xos.CopyFolder(tt.srcPath, tt.dstPath) + if tt.expectedErr != nil { + require.ErrorIs(t, err, tt.expectedErr) + return + } + require.NoError(t, err) + + // Check the number of files in the destination directory + files, err := os.ReadDir(tt.dstPath) + require.NoError(t, err) + require.Equal(t, tt.expectedFileCount, len(files)) + }) + } +} + +func TestCopyFile(t *testing.T) { + tempDir, err := os.MkdirTemp("", "TestCopyFile") + require.NoError(t, err) + t.Cleanup(func() { + require.NoError(t, os.RemoveAll(tempDir)) + }) + + // Create temporary source and destination directories + srcDir := filepath.Join(tempDir, "source") + dstDir := filepath.Join(tempDir, "destination") + err = os.MkdirAll(srcDir, 0o755) + require.NoError(t, err) + err = os.MkdirAll(dstDir, 0o755) + require.NoError(t, err) + + // Create a temporary source file + srcFile := filepath.Join(srcDir, "file.txt") + err = os.WriteFile(srcFile, []byte("File content"), 0o644) + require.NoError(t, err) + + tests := []struct { + name string + srcPath string + dstPath string + expectedErr error + expectedBytes int64 // Provide the expected number of bytes copied + }{ + { + name: "valid path", + srcPath: srcFile, + dstPath: filepath.Join(dstDir, "test_1.txt"), + expectedBytes: 12, + }, + { + name: "non existent file", + srcPath: filepath.Join(srcDir, "non_existent_file.txt"), + dstPath: filepath.Join(dstDir, "test_2.txt"), + expectedErr: os.ErrNotExist, + }, + { + name: "non existent destination", + srcPath: srcFile, + dstPath: "/path/to/nonexistent/file.txt", + expectedErr: os.ErrNotExist, + }, + { + name: "same source and destination", + srcPath: srcFile, + dstPath: srcFile, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + err := xos.CopyFile(tt.srcPath, tt.dstPath) + if tt.expectedErr != nil { + require.ErrorIs(t, err, tt.expectedErr) + return + } + require.NoError(t, err) + + destFile, err := os.Open(tt.dstPath) + require.NoError(t, err) + + destFileInfo, err := destFile.Stat() + require.NoError(t, err) + require.NoError(t, destFile.Close()) + require.Equal(t, tt.expectedBytes, destFileInfo.Size()) + }) + } +} + +func TestValidateFolderCopy(t *testing.T) { + tempDir, err := os.MkdirTemp("", "TestValidateFolderCopy") + require.NoError(t, err) + t.Cleanup(func() { + require.NoError(t, os.RemoveAll(tempDir)) + }) + + // Create source and destination directories and files + var ( + srcPath = filepath.Join(tempDir, "source") + srcFile = filepath.Join(srcPath, "test.txt") + dstPath = filepath.Join(tempDir, "destination") + dstFile = filepath.Join(dstPath, "test.txt") + emptyPath = filepath.Join(tempDir, "empty") + nonExistentPath = filepath.Join(tempDir, "nonexistent") + ) + + err = os.MkdirAll(srcPath, 0o755) + require.NoError(t, err) + err = os.MkdirAll(dstPath, 0o755) + require.NoError(t, err) + err = os.MkdirAll(emptyPath, 0o755) + require.NoError(t, err) + err = os.WriteFile(srcFile, []byte("source test"), 0o644) + require.NoError(t, err) + err = os.WriteFile(dstFile, []byte("destination test"), 0o644) + require.NoError(t, err) + + type args struct { + srcPath string + dstPath string + } + tests := []struct { + name string + args args + want []string + err error + }{ + { + name: "valid paths", + args: args{ + srcPath: srcPath, + dstPath: dstPath, + }, + want: []string{"test.txt"}, + }, + { + name: "same source and destination", + args: args{ + srcPath: srcPath, + dstPath: srcPath, + }, + want: []string{}, + err: errors.Errorf("source and destination paths are the same %s", srcPath), + }, + { + name: "empty directory", + args: args{ + srcPath: emptyPath, + dstPath: dstPath, + }, + want: []string{}, + }, + { + name: "non existent source", + args: args{ + srcPath: nonExistentPath, + dstPath: dstPath, + }, + err: errors.Errorf("source path does not exist: %s", nonExistentPath), + }, + { + name: "non existent destination", + args: args{ + srcPath: srcPath, + dstPath: nonExistentPath, + }, + err: errors.Errorf("destination path does not exist: %s", nonExistentPath), + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := xos.ValidateFolderCopy(tt.args.srcPath, tt.args.dstPath) + if tt.err != nil { + require.Error(t, err) + require.Equal(t, err.Error(), tt.err.Error()) + return + } + require.NoError(t, err) + require.ElementsMatch(t, tt.want, got) + }) + } +} diff --git a/ignite/pkg/xos/files.go b/ignite/pkg/xos/files.go new file mode 100644 index 0000000..ca2f435 --- /dev/null +++ b/ignite/pkg/xos/files.go @@ -0,0 +1,86 @@ +package xos + +import ( + "fmt" + "os" + "path/filepath" + "strings" +) + +const ( + JSONFile = "json" + ProtoFile = "proto" + YAMLFile = "yaml" + YMLFile = "yml" +) + +type findFileOptions struct { + extension []string + prefix string +} + +type FindFileOptions func(o *findFileOptions) + +// WithExtension adds a file extension to the search options. +// It can be called multiple times to add multiple extensions. +func WithExtension(extension string) FindFileOptions { + return func(o *findFileOptions) { + o.extension = append(o.extension, extension) + } +} + +func WithPrefix(prefix string) FindFileOptions { + return func(o *findFileOptions) { + o.prefix = prefix + } +} + +// FindFiles searches for files in the specified directory based on the given options. +// It supports filtering files by extension and prefix. Returns a list of matching files or an error. +func FindFiles(directory string, options ...FindFileOptions) ([]string, error) { + opts := findFileOptions{} + for _, apply := range options { + apply(&opts) + } + + files := make([]string, 0) + return files, filepath.Walk(directory, func(path string, info os.FileInfo, err error) error { + if err != nil { + return err + } + + // Filter by file extension if provided + var matched bool + for _, ext := range opts.extension { + if filepath.Ext(path) == fmt.Sprintf(".%s", ext) { + matched = true + break + } + } + + if len(opts.extension) > 0 && !matched { + return nil // Skip files that don't match the extension + } + + // Filter by file prefix if provided + if opts.prefix != "" && !strings.HasPrefix(filepath.Base(path), opts.prefix) { + return nil // Skip files that don't match the prefix + } + + // Add file to the result list if it is not a directory + if !info.IsDir() { + files = append(files, path) + } + + return nil + }) +} + +// FileExists check if a file from a given path exists. +func FileExists(filename string) bool { + info, err := os.Stat(filename) + if os.IsNotExist(err) { + return false + } + return !info.IsDir() +} diff --git a/ignite/pkg/xos/files_test.go b/ignite/pkg/xos/files_test.go new file mode 100644 index 0000000..6093b4b --- /dev/null +++ b/ignite/pkg/xos/files_test.go @@ -0,0 +1,204 @@ +package xos_test + +import ( + "os" + "path/filepath" + "strings" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/xos" +) + +func TestFindFiles(t *testing.T) { + tests := []struct { + name string + files []string + extension []string + prefix string + want []string + err error + }{ + { + name: "test zero files", + files: []string{}, + want: []string{}, + err: nil, + }, + { + name: "test one file", + files: []string{"file.json"}, + want: []string{"file.json"}, + err: nil, + }, + { + name: "test 3 files", + files: []string{"file1.json", "file2.txt", "file3.json"}, + want: []string{"file1.json", "file2.txt", "file3.json"}, + err: nil, + }, + { + name: "test file prefix", + files: []string{"file.prefix.test.json"}, + prefix: "file.prefix", + want: []string{"file.prefix.test.json"}, + err: nil, + }, + { + name: "test bigger file prefix", + files: []string{"file.prefix.test.json"}, + prefix: "file.prefix.test", + want: []string{"file.prefix.test.json"}, + err: nil, + }, + { + name: "test 3 files prefix", + files: []string{"test.file1.json", "test.file2.txt", "test.file3.json"}, + prefix: "test.file", + want: []string{"test.file1.json", "test.file2.txt", "test.file3.json"}, + err: nil, + }, + { + name: "test 3 extension json files", + files: []string{"file1.json", "file2.txt", "file3.json", "file4.json"}, + extension: []string{"json"}, + want: []string{"file1.json", "file3.json", "file4.json"}, + err: nil, + }, + { + name: "test 3 extension json files with subfolder", + files: []string{"testdata/file1.json", "file2.txt", "foo/file3.json", "file4.json"}, + extension: []string{"json"}, + want: []string{"testdata/file1.json", "foo/file3.json", "file4.json"}, + err: nil, + }, + { + name: "test 1 extension txt files", + files: []string{"file1.json", "file2.txt", "file3.json", "file4.json"}, + extension: []string{"txt"}, + want: []string{"file2.txt"}, + err: nil, + }, + { + name: "test 1 extension json files", + files: []string{"file1.json"}, + extension: []string{"json"}, + want: []string{"file1.json"}, + err: nil, + }, + { + name: "test invalid files extension", + files: []string{"file1.json", "file2.json", "file3.json", "file4.json"}, + extension: []string{"txt"}, + want: []string{}, + err: nil, + }, + { + name: "test file prefix and extension", + files: []string{"test.file1.json", "test.file2.txt", "test.file3.json"}, + prefix: "test.file", + extension: []string{"json"}, + want: []string{"test.file1.json", "test.file3.json"}, + err: nil, + }, + { + name: "test 2 different extensions", + files: []string{"file1.json", "file2.txt", "file3.json", "file4.json", "file.yaml"}, + extension: []string{"txt", "yaml"}, + want: []string{"file2.txt", "file.yaml"}, + err: nil, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + dirName := strings.ReplaceAll(t.Name(), "/", "_") + tempDir, err := os.MkdirTemp("", dirName) + require.NoError(t, err) + t.Cleanup(func() { + require.NoError(t, os.RemoveAll(tempDir)) + }) + + for _, filename := range tt.files { + filePath := filepath.Join(tempDir, filename) + require.NoError(t, os.MkdirAll(filepath.Dir(filePath), 0o755)) + file, err := os.Create(filePath) + require.NoError(t, err) + require.NoError(t, file.Close()) + } + + opts := make([]xos.FindFileOptions, 0) + if tt.prefix != "" { + opts = append(opts, xos.WithPrefix(tt.prefix)) + } + + for _, ext := range tt.extension { + opts = append(opts, xos.WithExtension(ext)) + } + + gotFiles, err := xos.FindFiles(tempDir, opts...) + if tt.err != nil { + require.Error(t, err) + require.ErrorIs(t, err, tt.err) + return + } + require.NoError(t, err) + + want := make([]string, len(tt.want)) + for i, filename := range tt.want { + want[i] = filepath.Join(tempDir, filename) + } + require.ElementsMatch(t, want, gotFiles) + }) + } +} + +func TestFileExists(t *testing.T) { + tempDir, err := os.MkdirTemp("", "TestCopyFile") + require.NoError(t, err) + t.Cleanup(func() { + require.NoError(t, os.RemoveAll(tempDir)) + }) + + srcDir := filepath.Join(tempDir, "source") + err = os.MkdirAll(srcDir, 0o755) + require.NoError(t, err) + + srcFile := filepath.Join(srcDir, "file.txt") + err = os.WriteFile(srcFile, []byte("File content"), 0o644) + require.NoError(t, err) + + tests := []struct { + name string + filename string + want bool + }{ + { + name: "existing file", + filename: srcFile, + want: true, + }, + { + name: "non existing file", + filename: "non_existing_file.txt", + want: false, + }, + { + name: "directory", + filename: srcDir, + want: false, + }, + { + name: "empty filename", + filename: "", + want: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := xos.FileExists(tt.filename) + require.EqualValues(t, tt.want, got) + }) + } +} diff --git a/ignite/pkg/xos/mv.go b/ignite/pkg/xos/mv.go new file mode 100644 index 0000000..53e599f --- /dev/null +++ b/ignite/pkg/xos/mv.go @@ -0,0 +1,34 @@ +package xos + +import ( + "io" + "os" + + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +// Rename copy oldPath to newPath and then delete oldPath. +// Unlike os.Rename, it doesn't fail when the oldPath and newPath are in +// different partitions (error: invalid cross-device link). +func Rename(oldPath, newPath string) error { + inputFile, err := os.Open(oldPath) + if err != nil { + return errors.Errorf("rename %s %s: couldn't open oldpath: %w", oldPath, newPath, err) + } + defer inputFile.Close() + outputFile, err := os.Create(newPath) + if err != nil { + return errors.Errorf("rename %s %s: couldn't open dest file: %w", oldPath, newPath, err) + } + defer outputFile.Close() + _, err = io.Copy(outputFile, inputFile) + if err != nil { + return errors.Errorf("rename %s %s: writing to output file failed: %w", oldPath, newPath, err) + } + // The copy was successful, so now delete the original file + err = os.Remove(oldPath) + if err != nil { + return errors.Errorf("rename %s %s: failed removing original file: %w", oldPath, newPath, err) + } + return nil +} diff --git a/ignite/pkg/xos/mv_test.go b/ignite/pkg/xos/mv_test.go new file mode 100644 index 0000000..3d1f189 --- /dev/null +++ b/ignite/pkg/xos/mv_test.go @@ -0,0 +1,32 @@ +package xos_test + +import ( + "fmt" + "os" + "path" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/xos" +) + +func TestRename(t *testing.T) { + var ( + dir = t.TempDir() + oldpath = path.Join(dir, "old") + newpath = path.Join(dir, "new") + require = require.New(t) + ) + err := os.WriteFile(oldpath, []byte("foo"), os.ModePerm) + require.NoError(err) + + err = xos.Rename(oldpath, newpath) + + require.NoError(err) + bz, err := os.ReadFile(newpath) + require.NoError(err) + require.Equal([]byte("foo"), bz) + _, err = os.Open(oldpath) + require.EqualError(err, fmt.Sprintf("open %s: no such file or directory", oldpath)) +} diff --git a/ignite/pkg/xos/rm.go b/ignite/pkg/xos/rm.go new file mode 100644 index 0000000..47d8365 --- /dev/null +++ b/ignite/pkg/xos/rm.go @@ -0,0 +1,14 @@ +package xos + +import ( + "os" + "path/filepath" +) + +func RemoveAllUnderHome(path string) error { + home, err := os.UserHomeDir() + if err != nil { + return err + } + return os.RemoveAll(filepath.Join(home, path)) +} diff --git a/ignite/pkg/xstrcase/xstrcase.go b/ignite/pkg/xstrcase/xstrcase.go new file mode 100644 index 0000000..fa30d7a --- /dev/null +++ b/ignite/pkg/xstrcase/xstrcase.go @@ -0,0 +1,37 @@ +package xstrcase + +import ( + "strings" + + protogenerator "github.com/cosmos/gogoproto/protoc-gen-gogo/generator" + "github.com/iancoleman/strcase" + + "github.com/ignite/cli/v29/ignite/pkg/xstrings" +) + +// UpperCamel returns the name with upper camel and no special character. +func UpperCamel(name string) string { + return protogenerator.CamelCase(strcase.ToSnake(name)) +} + +// Lowercase returns the name with lower case and no special character. +func Lowercase(name string) string { + return strings.ToLower( + strings.ReplaceAll( + xstrings.NoDash(name), + "_", + "", + ), + ) +} + +// Uppercase returns the name with upper case and no special character. +func Uppercase(name string) string { + return strings.ToUpper( + strings.ReplaceAll( + xstrings.NoDash(name), + "_", + "", + ), + ) +} diff --git a/ignite/pkg/xstrcase/xstrcase_test.go b/ignite/pkg/xstrcase/xstrcase_test.go new file mode 100644 index 0000000..3e7ca9a --- /dev/null +++ b/ignite/pkg/xstrcase/xstrcase_test.go @@ -0,0 +1,112 @@ +package xstrcase + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestLowercase(t *testing.T) { + tests := []struct { + name string + arg string + want string + }{ + { + name: "simple lowercase", + arg: "Example-Test", + want: "exampletest", + }, + { + name: "already lowercase", + arg: "example_test", + want: "exampletest", + }, + { + name: "mixed case with dash", + arg: "Mixed-Case_String", + want: "mixedcasestring", + }, + { + name: "uppercase input", + arg: "UPPER-CASE", + want: "uppercase", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := Lowercase(tt.arg) + require.Equal(t, tt.want, got) + }) + } +} + +func TestUpperCamel(t *testing.T) { + tests := []struct { + name string + arg string + want string + }{ + { + name: "simple camel case", + arg: "example_test", + want: "ExampleTest", + }, + { + name: "mixed case with dash", + arg: "Mixed-Case_String", + want: "MixedCaseString", + }, + { + name: "uppercase input", + arg: "UPPER_CASE", + want: "UpperCase", + }, + { + name: "lowercase input", + arg: "lower_case", + want: "LowerCase", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := UpperCamel(tt.arg) + require.Equal(t, tt.want, got) + }) + } +} + +func TestUppercase(t *testing.T) { + tests := []struct { + name string + arg string + want string + }{ + { + name: "simple uppercase", + arg: "example-test", + want: "EXAMPLETEST", + }, + { + name: "already uppercase", + arg: "EXAMPLE_TEST", + want: "EXAMPLETEST", + }, + { + name: "mixed case input", + arg: "Mixed-Case_String", + want: "MIXEDCASESTRING", + }, + { + name: "lowercase input", + arg: "lower-case", + want: "LOWERCASE", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := Uppercase(tt.arg) + require.Equal(t, tt.want, got) + }) + } +} diff --git a/ignite/pkg/xstrings/xstrings.go b/ignite/pkg/xstrings/xstrings.go new file mode 100644 index 0000000..b887402 --- /dev/null +++ b/ignite/pkg/xstrings/xstrings.go @@ -0,0 +1,94 @@ +package xstrings + +import ( + "slices" + "strings" + "unicode" + + "golang.org/x/text/cases" + "golang.org/x/text/language" +) + +// AllOrSomeFilter filters elems out from the list as they present in filterList and +// returns the remaining ones. +// if filterList is empty, all elems from list returned. +func AllOrSomeFilter(list, filterList []string) []string { + if len(filterList) == 0 { + return list + } + + var elems []string + + for _, elem := range list { + if !slices.Contains(filterList, elem) { + elems = append(elems, elem) + } + } + + return elems +} + +// List returns a slice of strings captured after the value returned by do which is +// called n times. +func List(n int, do func(i int) string) []string { + var list []string + + for i := 0; i < n; i++ { + list = append(list, do(i)) + } + + return list +} + +// FormatUsername formats a username to make it usable as a variable. +func FormatUsername(s string) string { + return NoDash(NoNumberPrefix(s)) +} + +// NoDash removes dash from the string. +func NoDash(s string) string { + return strings.ReplaceAll(s, "-", "") +} + +// NoNumberPrefix adds an underscore at the beginning of the string if it stars with a number +// this is used for package of proto files template because the package name can't start with a number. +func NoNumberPrefix(s string) string { + // Check if it starts with a digit + if unicode.IsDigit(rune(s[0])) { + return "_" + s + } + return s +} + +// Title returns a copy of the string s with all Unicode letters that begin words +// mapped to their Unicode title case. +func Title(s string) string { + return cases.Title(language.English).String(s) +} + +// ToUpperFirst returns a copy of the string with the first unicode letter in upper case. +func ToUpperFirst(s string) string { + return strings.ToUpper(s[:1]) + s[1:] +} + +// StringBetween returns the string between two other strings. +// The comparison is not greedy so the between result includes the +// string between the start value and the first match of the end value. +func StringBetween(s, start, end string) string { + if s == "" || start == "" || end == "" { + return "" + } + + i := strings.Index(s, start) + if i == -1 { + return "" + } + + s = s[i+len(start):] + i = strings.Index(s, end) + if i == -1 { + return "" + } + + return s[:i] +} diff --git a/ignite/pkg/xstrings/xstrings_test.go b/ignite/pkg/xstrings/xstrings_test.go new file mode 100644 index 0000000..c54ead0 --- /dev/null +++ b/ignite/pkg/xstrings/xstrings_test.go @@ -0,0 +1,40 @@ +package xstrings_test + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/xstrings" +) + +func TestTitle(t *testing.T) { + require.Equal(t, "Foo", xstrings.Title("foo")) + require.Equal(t, "Foo", xstrings.Title("FOO")) + require.Equal(t, "Foobar", xstrings.Title("fooBar")) +} + +func TestToUpperFirst(t *testing.T) { + require.Equal(t, "Foo", xstrings.ToUpperFirst("foo")) + require.Equal(t, "FOO", xstrings.ToUpperFirst("FOO")) + require.Equal(t, "FooBar", xstrings.ToUpperFirst("fooBar")) +} + +func TestNoDash(t *testing.T) { + require.Equal(t, "foo", xstrings.NoDash("foo")) + require.Equal(t, "foo", xstrings.NoDash("-f-o-o---")) +} + +func TestNoNumberPrefix(t *testing.T) { + require.Equal(t, "foo", xstrings.NoNumberPrefix("foo")) + require.Equal(t, "_0foo", xstrings.NoNumberPrefix("0foo")) + require.Equal(t, "_999foo", xstrings.NoNumberPrefix("999foo")) +} + +func TestStringBetween(t *testing.T) { + require.Equal(t, "bar", xstrings.StringBetween("foobarbaz", "foo", "baz")) + require.Equal(t, "bar", xstrings.StringBetween("0foobarbaz1", "foo", "baz")) + require.Equal(t, "", xstrings.StringBetween("0foo", "0", "")) + require.Equal(t, "", xstrings.StringBetween("foo0", "", "0")) + require.Equal(t, "", xstrings.StringBetween("", "0", "1")) +} diff --git a/ignite/pkg/xtime/clock.go b/ignite/pkg/xtime/clock.go new file mode 100644 index 0000000..9caae79 --- /dev/null +++ b/ignite/pkg/xtime/clock.go @@ -0,0 +1,49 @@ +package xtime + +import "time" + +// Clock represents a clock that can retrieve current time. +type Clock interface { + Now() time.Time + Add(duration time.Duration) +} + +// ClockSystem is a clock that retrieves system time. +type ClockSystem struct{} + +// NewClockSystem returns a new ClockSystem. +func NewClockSystem() ClockSystem { + return ClockSystem{} +} + +// Now implements Clock. +func (ClockSystem) Now() time.Time { + return time.Now() +} + +// Add implements Clock. +func (ClockSystem) Add(_ time.Duration) { + panic("Add can't be called for ClockSystem") +} + +// ClockMock is a clock mocking time with an internal counter. +type ClockMock struct { + t time.Time +} + +// NewClockMock returns a new ClockMock. +func NewClockMock(originalTime time.Time) *ClockMock { + return &ClockMock{ + t: originalTime, + } +} + +// Now implements Clock. +func (c ClockMock) Now() time.Time { + return c.t +} + +// Add implements Clock. +func (c *ClockMock) Add(duration time.Duration) { + c.t = c.t.Add(duration) +} diff --git a/ignite/pkg/xtime/clock_test.go b/ignite/pkg/xtime/clock_test.go new file mode 100644 index 0000000..baaa92f --- /dev/null +++ b/ignite/pkg/xtime/clock_test.go @@ -0,0 +1,24 @@ +package xtime_test + +import ( + "testing" + "time" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/xtime" +) + +func TestClockSystem(t *testing.T) { + c := xtime.NewClockSystem() + require.False(t, c.Now().IsZero()) + require.Panics(t, func() { c.Add(time.Second) }) +} + +func TestClockMock(t *testing.T) { + timeSample := time.Now() + c := xtime.NewClockMock(timeSample) + require.True(t, c.Now().Equal(timeSample)) + c.Add(time.Second) + require.True(t, c.Now().Equal(timeSample.Add(time.Second))) +} diff --git a/ignite/pkg/xtime/unix.go b/ignite/pkg/xtime/unix.go new file mode 100644 index 0000000..f1fc025 --- /dev/null +++ b/ignite/pkg/xtime/unix.go @@ -0,0 +1,26 @@ +package xtime + +import ( + "time" +) + +// Seconds creates a time.Duration based on the seconds parameter. +func Seconds(seconds int64) time.Duration { + return time.Duration(seconds) * time.Second +} + +// NowAfter returns a unix date string from now plus the duration. +func NowAfter(unix time.Duration) string { + date := time.Now().Add(unix) + return FormatUnix(date) +} + +// FormatUnix formats the time.Time to unix date string. +func FormatUnix(date time.Time) string { + return date.Format(time.UnixDate) +} + +// FormatUnixInt formats the int timestamp to unix date string. +func FormatUnixInt(unix int64) string { + return FormatUnix(time.Unix(unix, 0)) +} diff --git a/ignite/pkg/xtime/unix_test.go b/ignite/pkg/xtime/unix_test.go new file mode 100644 index 0000000..a005933 --- /dev/null +++ b/ignite/pkg/xtime/unix_test.go @@ -0,0 +1,68 @@ +package xtime_test + +import ( + "fmt" + "testing" + "time" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/xtime" +) + +func TestSeconds(t *testing.T) { + tests := []int64{ + 9999999999, + 10000, + 100, + 0, + } + for _, tt := range tests { + t.Run(fmt.Sprintf("test %d value", tt), func(t *testing.T) { + got := xtime.Seconds(tt) + require.Equal(t, time.Duration(tt)*time.Second, got) + }) + } +} + +func TestNowAfter(t *testing.T) { + tests := []int64{ + 9999999999, + 10000, + 100, + 0, + } + for _, tt := range tests { + t.Run(fmt.Sprintf("test %d value", tt), func(t *testing.T) { + got := xtime.NowAfter(xtime.Seconds(tt)) + date := time.Now().Add(time.Duration(tt) * time.Second) + require.Equal(t, date.Format(time.UnixDate), got) + }) + } +} + +func TestFormatUnix(t *testing.T) { + tests := []struct { + date time.Time + want string + }{ + { + date: time.Time{}, + want: "Mon Jan 1 00:00:00 UTC 0001", + }, + { + date: time.Unix(10000000000, 100).In(time.UTC), + want: "Sat Nov 20 17:46:40 UTC 2286", + }, + { + date: time.Date(2020, 10, 11, 12, 30, 50, 0, time.FixedZone("Europe/Berlin", 3*60*60)), + want: "Sun Oct 11 12:30:50 Europe/Berlin 2020", + }, + } + for _, tt := range tests { + t.Run("test date "+tt.date.String(), func(t *testing.T) { + got := xtime.FormatUnix(tt.date) + require.Equal(t, tt.want, got) + }) + } +} diff --git a/ignite/pkg/xurl/xurl.go b/ignite/pkg/xurl/xurl.go new file mode 100644 index 0000000..85aa14c --- /dev/null +++ b/ignite/pkg/xurl/xurl.go @@ -0,0 +1,139 @@ +package xurl + +import ( + "fmt" + "net" + "net/url" + "strings" + + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +const ( + schemeTCP = "tcp" + schemeHTTP = "http" + schemeHTTPS = "https" + schemeWS = "ws" +) + +// TCP ensures that a URL contains a TCP scheme. +func TCP(s string) (string, error) { + u, err := parseURL(s) + if err != nil { + return "", err + } + + u.Scheme = schemeTCP + + return u.String(), nil +} + +// HTTP ensures that a URL contains an HTTP scheme. +func HTTP(s string) (string, error) { + u, err := parseURL(s) + if err != nil { + return "", err + } + + u.Scheme = schemeHTTP + + return u.String(), nil +} + +// HTTPS ensures that a URL contains an HTTPS scheme. +func HTTPS(s string) (string, error) { + u, err := parseURL(s) + if err != nil { + return "", err + } + + u.Scheme = schemeHTTPS + + return u.String(), nil +} + +// MightHTTPS ensures that a URL contains an HTTPS scheme when the current scheme is not HTTP. +// When the URL contains an HTTP scheme it is not modified. +func MightHTTPS(s string) (string, error) { + if strings.HasPrefix(strings.ToLower(s), "http://") { + return s, nil + } + + return HTTPS(s) +} + +// WS ensures that a URL contains a WS scheme. +func WS(s string) (string, error) { + u, err := parseURL(s) + if err != nil { + return "", err + } + + u.Scheme = schemeWS + + return u.String(), nil +} + +// HTTPEnsurePort ensures that url has a port number suits with the connection type. +func HTTPEnsurePort(s string) string { + u, err := url.Parse(s) + if err != nil || u.Port() != "" { + return s + } + + port := "80" + + if u.Scheme == schemeHTTPS { + port = "443" + } + + u.Host = fmt.Sprintf("%s:%s", u.Hostname(), port) + + return u.String() +} + +// Address ensures that address contains localhost as host if non specified. +func Address(address string) string { + if strings.HasPrefix(address, ":") { + return "localhost" + address + } + return address +} + +func IsHTTP(address string) bool { + return strings.HasPrefix(address, "http") +} + +func parseURL(s string) (*url.URL, error) { + if s == "" { + return nil, errors.New("url is empty") + } + + // Handle the case where the URI is an IP:PORT or HOST:PORT + // without scheme prefix because that case can't be URL parsed. + // When the URI has no scheme it is parsed as a path by "url.Parse" + // placing the colon within the path, which is invalid. + if host, isAddrPort := addressPort(s); isAddrPort { + return &url.URL{Host: host}, nil + } + + p, err := url.Parse(Address(s)) + return p, err +} + +func addressPort(s string) (string, bool) { + // Check that the value doesn't contain a URI path + if strings.Contains(s, "/") { + return "", false + } + + // Use the net split function to support IPv6 addresses + host, port, err := net.SplitHostPort(s) + if err != nil { + return "", false + } + if host == "" { + host = "0.0.0.0" + } + return net.JoinHostPort(host, port), true +} diff --git a/ignite/pkg/xurl/xurl_test.go b/ignite/pkg/xurl/xurl_test.go new file mode 100644 index 0000000..10e5893 --- /dev/null +++ b/ignite/pkg/xurl/xurl_test.go @@ -0,0 +1,378 @@ +package xurl + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestHTTPEnsurePort(t *testing.T) { + cases := []struct { + name string + addr string + want string + }{ + { + name: "http", + addr: "http://localhost", + want: "http://localhost:80", + }, + { + name: "https", + addr: "https://localhost", + want: "https://localhost:443", + }, + { + name: "custom", + addr: "http://localhost:4000", + want: "http://localhost:4000", + }, + } + + for _, tt := range cases { + t.Run(tt.name, func(t *testing.T) { + addr := HTTPEnsurePort(tt.addr) + require.Equal(t, tt.want, addr) + }) + } +} + +func TestTCP(t *testing.T) { + cases := []struct { + name string + addr string + want string + error bool + }{ + { + name: "with scheme", + addr: "tcp://github.com/ignite/cli/v29", + want: "tcp://github.com/ignite/cli/v29", + }, + { + name: "without scheme", + addr: "github.com/ignite/cli/v29", + want: "tcp://github.com/ignite/cli/v29", + }, + { + name: "with invalid scheme", + addr: "ftp://github.com/ignite/cli/v29", + want: "tcp://github.com/ignite/cli/v29", + }, + { + name: "with ip and port", + addr: "0.0.0.0:4500", + want: "tcp://0.0.0.0:4500", + }, + { + name: "with localhost and port", + addr: "localhost:4500", + want: "tcp://localhost:4500", + }, + { + name: "with invalid url", + addr: "tcp://github.com:x", + error: true, + }, + { + name: "empty", + addr: "", + error: true, + }, + } + + for _, tt := range cases { + t.Run(tt.name, func(t *testing.T) { + addr, err := TCP(tt.addr) + if tt.error { + require.Error(t, err) + } else { + require.NoError(t, err) + require.Equal(t, tt.want, addr) + } + }) + } +} + +func TestHTTP(t *testing.T) { + cases := []struct { + name string + addr string + want string + error bool + }{ + { + name: "with scheme", + addr: "http://github.com/ignite/cli/v29", + want: "http://github.com/ignite/cli/v29", + }, + { + name: "without scheme", + addr: "github.com/ignite/cli/v29", + want: "http://github.com/ignite/cli/v29", + }, + { + name: "with invalid scheme", + addr: "ftp://github.com/ignite/cli/v29", + want: "http://github.com/ignite/cli/v29", + }, + { + name: "with ip and port", + addr: "0.0.0.0:4500", + want: "http://0.0.0.0:4500", + }, + { + name: "with localhost and port", + addr: "localhost:4500", + want: "http://localhost:4500", + }, + { + name: "with invalid url", + addr: "http://github.com:x", + error: true, + }, + { + name: "empty", + addr: "", + error: true, + }, + } + + for _, tt := range cases { + t.Run(tt.name, func(t *testing.T) { + addr, err := HTTP(tt.addr) + if tt.error { + require.Error(t, err) + } else { + require.NoError(t, err) + require.Equal(t, tt.want, addr) + } + }) + } +} + +func TestHTTPS(t *testing.T) { + cases := []struct { + name string + addr string + want string + error bool + }{ + { + name: "with scheme", + addr: "https://github.com/ignite/cli/v29", + want: "https://github.com/ignite/cli/v29", + }, + { + name: "without scheme", + addr: "github.com/ignite/cli/v29", + want: "https://github.com/ignite/cli/v29", + }, + { + name: "with invalid scheme", + addr: "ftp://github.com/ignite/cli/v29", + want: "https://github.com/ignite/cli/v29", + }, + { + name: "with ip and port", + addr: "0.0.0.0:4500", + want: "https://0.0.0.0:4500", + }, + { + name: "with localhost and port", + addr: "localhost:4500", + want: "https://localhost:4500", + }, + { + name: "with invalid url", + addr: "https://github.com:x", + error: true, + }, + { + name: "empty", + addr: "", + error: true, + }, + } + + for _, tt := range cases { + t.Run(tt.name, func(t *testing.T) { + addr, err := HTTPS(tt.addr) + if tt.error { + require.Error(t, err) + } else { + require.NoError(t, err) + require.Equal(t, tt.want, addr) + } + }) + } +} + +func TestWS(t *testing.T) { + cases := []struct { + name string + addr string + want string + error bool + }{ + { + name: "with scheme", + addr: "ws://github.com/ignite/cli/v29", + want: "ws://github.com/ignite/cli/v29", + }, + { + name: "without scheme", + addr: "github.com/ignite/cli/v29", + want: "ws://github.com/ignite/cli/v29", + }, + { + name: "with invalid scheme", + addr: "ftp://github.com/ignite/cli/v29", + want: "ws://github.com/ignite/cli/v29", + }, + { + name: "with ip and port", + addr: "0.0.0.0:4500", + want: "ws://0.0.0.0:4500", + }, + { + name: "with localhost and port", + addr: "localhost:4500", + want: "ws://localhost:4500", + }, + { + name: "with invalid url", + addr: "ws://github.com:x", + error: true, + }, + { + name: "empty", + addr: "", + error: true, + }, + } + + for _, tt := range cases { + t.Run(tt.name, func(t *testing.T) { + addr, err := WS(tt.addr) + if tt.error { + require.Error(t, err) + } else { + require.NoError(t, err) + require.Equal(t, tt.want, addr) + } + }) + } +} + +func TestMightHTTPS(t *testing.T) { + cases := []struct { + name string + addr string + want string + error bool + }{ + { + name: "with http scheme", + addr: "http://github.com/ignite/cli/v29", + want: "http://github.com/ignite/cli/v29", + }, + { + name: "with https scheme", + addr: "https://github.com/ignite/cli/v29", + want: "https://github.com/ignite/cli/v29", + }, + { + name: "without scheme", + addr: "github.com/ignite/cli/v29", + want: "https://github.com/ignite/cli/v29", + }, + { + name: "with invalid scheme", + addr: "ftp://github.com/ignite/cli/v29", + want: "https://github.com/ignite/cli/v29", + }, + { + name: "with ip and port", + addr: "0.0.0.0:4500", + want: "https://0.0.0.0:4500", + }, + { + name: "with localhost and port", + addr: "localhost:4500", + want: "https://localhost:4500", + }, + { + name: "with invalid url", + addr: "https://github.com:x", + error: true, + }, + { + name: "empty", + addr: "", + error: true, + }, + } + + for _, tt := range cases { + t.Run(tt.name, func(t *testing.T) { + addr, err := MightHTTPS(tt.addr) + if tt.error { + require.Error(t, err) + } else { + require.NoError(t, err) + require.Equal(t, tt.want, addr) + } + }) + } +} + +func Test_addressPort(t *testing.T) { + tests := []struct { + name string + arg string + wantHost string + want bool + }{ + { + name: "URI path", + arg: "/test/false", + want: false, + }, + { + name: "invalid address", + arg: "aeihf3/aef/f..//", + want: false, + }, + { + name: "host and port", + arg: "102.33.3.43:10000", + wantHost: "102.33.3.43:10000", + want: true, + }, + { + name: "local port", + arg: "0.0.0.0:10000", + wantHost: "0.0.0.0:10000", + want: true, + }, + { + name: "only port", + arg: ":10000", + wantHost: "0.0.0.0:10000", + want: true, + }, + { + name: "only host", + arg: "102.33.3.43", + want: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + gotHost, got := addressPort(tt.arg) + require.Equal(t, tt.want, got) + require.Equal(t, tt.wantHost, gotHost) + }) + } +} diff --git a/ignite/pkg/xyaml/map.go b/ignite/pkg/xyaml/map.go new file mode 100644 index 0000000..f787859 --- /dev/null +++ b/ignite/pkg/xyaml/map.go @@ -0,0 +1,53 @@ +package xyaml + +// Map defines a map type that uses strings as key value. +// The map implements the Unmarshaller interface to convert +// the unmarshalled map keys type from interface{} to string. +type Map map[string]interface{} + +func (m *Map) UnmarshalYAML(unmarshal func(interface{}) error) error { + var raw map[interface{}]interface{} + + if err := unmarshal(&raw); err != nil { + return err + } + + *m = convertMapKeys(raw) + + return nil +} + +func convertSlice(raw []interface{}) []interface{} { + if len(raw) == 0 { + return raw + } + + if _, ok := raw[0].(map[interface{}]interface{}); !ok { + return raw + } + + values := make([]interface{}, len(raw)) + for i, v := range raw { + values[i] = convertMapKeys(v.(map[interface{}]interface{})) + } + + return values +} + +func convertMapKeys(raw map[interface{}]interface{}) map[string]interface{} { + m := make(map[string]interface{}) + + for k, v := range raw { + if value, _ := v.(map[interface{}]interface{}); value != nil { + // Convert map keys to string + v = convertMapKeys(value) + } else if values, _ := v.([]interface{}); values != nil { + // Make sure that maps inside slices also use strings as key + v = convertSlice(values) + } + + m[k.(string)] = v + } + + return m +} diff --git a/ignite/pkg/xyaml/map_test.go b/ignite/pkg/xyaml/map_test.go new file mode 100644 index 0000000..e88c334 --- /dev/null +++ b/ignite/pkg/xyaml/map_test.go @@ -0,0 +1,44 @@ +package xyaml_test + +import ( + "testing" + + "github.com/stretchr/testify/require" + "gopkg.in/yaml.v3" + + "github.com/ignite/cli/v29/ignite/pkg/xyaml" +) + +func TestUnmarshalWithCustomMapType(t *testing.T) { + // Arrange + input := ` + foo: + bar: baz + ` + output := xyaml.Map{} + + // Act + err := yaml.Unmarshal([]byte(input), &output) + + // Assert + require.NoError(t, err) + require.NotNil(t, output["foo"]) + require.IsType(t, (map[string]interface{})(nil), output["foo"]) +} + +func TestUnmarshalWithNativeMapType(t *testing.T) { + // Arrange + input := ` + foo: + bar: baz + ` + output := make(map[string]interface{}) + + // Act + err := yaml.Unmarshal([]byte(input), &output) + + // Assert + require.NoError(t, err) + require.NotNil(t, output["foo"]) + require.IsType(t, (map[string]interface{})(nil), output["foo"]) +} diff --git a/ignite/pkg/xyaml/yaml.go b/ignite/pkg/xyaml/yaml.go new file mode 100644 index 0000000..0de9805 --- /dev/null +++ b/ignite/pkg/xyaml/yaml.go @@ -0,0 +1,44 @@ +package xyaml + +import ( + "context" + "strings" + + "github.com/goccy/go-yaml" + "github.com/goccy/go-yaml/parser" + + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +// Marshal converts an object to a string in a YAML format and transforms +// the byte slice fields from the path to string to be more readable. +func Marshal(ctx context.Context, obj interface{}, paths ...string) (string, error) { + requestYaml, err := yaml.MarshalContext(ctx, obj) + if err != nil { + return "", err + } + file, err := parser.ParseBytes(requestYaml, 0) + if err != nil { + return "", err + } + + // normalize the structure converting the byte slice fields to string + for _, path := range paths { + pathString, err := yaml.PathString(path) + if err != nil { + return "", err + } + var byteSlice []byte + err = pathString.Read(strings.NewReader(string(requestYaml)), &byteSlice) + if err != nil && !errors.Is(err, yaml.ErrNotFoundNode) { + return "", err + } + if err := pathString.ReplaceWithReader(file, + strings.NewReader(string(byteSlice)), + ); err != nil { + return "", err + } + } + + return file.String(), nil +} diff --git a/ignite/pkg/xyaml/yaml_test.go b/ignite/pkg/xyaml/yaml_test.go new file mode 100644 index 0000000..a2e38b6 --- /dev/null +++ b/ignite/pkg/xyaml/yaml_test.go @@ -0,0 +1,154 @@ +package xyaml + +import ( + "context" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestMarshal(t *testing.T) { + type byteSliceParser struct { + Field1 string `json:"field1"` + Field2 struct { + Field1 []byte `json:"field1"` + Field2 struct { + Field1 []byte `json:"field1"` + Field2 string `json:"field2"` + } `json:"field2"` + Field3 string `json:"field3"` + } `json:"field2"` + Field3 string `json:"field3"` + } + bParser := &byteSliceParser{ + Field1: "field1", + Field3: "field3", + } + bParser.Field2.Field1 = []byte("field1") + bParser.Field2.Field3 = "field3" + bParser.Field2.Field2.Field1 = []byte("field1") + bParser.Field2.Field2.Field2 = "field2" + + type simpleParser struct { + Field1 string `json:"field1"` + Field2 string `json:"field2"` + } + sParser := &simpleParser{ + Field1: "field1", + Field2: "field2", + } + + type args struct { + obj interface{} + paths []string + } + tests := []struct { + name string + args args + want string + err error + }{ + { + name: "parse nil obj", + want: "null\n", + }, + { + name: "parse map without byte slice", + args: args{ + obj: map[string]string{ + "field1": "field1", + "field2": "field2", + }, + }, + want: `field1: field1 +field2: field2 +`, + }, + { + name: "parse map with byte slice", + args: args{ + obj: map[string][]byte{ + "field1": []byte("field1"), + "field2": []byte("field2"), + }, + paths: []string{ + "$.field1", + "$.field2", + }, + }, + want: `field1: field1 +field2: field2 +`, + }, + { + name: "parse struct without byte slice", + args: args{ + obj: sParser, + }, + want: `field1: field1 +field2: field2 +`, + }, + { + name: "parse struct with byte slice", + args: args{ + obj: bParser, + paths: []string{ + "$.field2.field1", + "$.field2.field2.field1", + }, + }, + want: `field1: field1 +field2: + field1: field1 + field2: + field1: field1 + field2: field2 + field3: field3 +field3: field3 +`, + }, + { + name: "parse struct with byte slice and wrong path", + args: args{ + obj: bParser, + paths: []string{ + "$.field2.field30", + "$.field2.field31", + }, + }, + want: `field1: field1 +field2: + field1: + - 102 + - 105 + - 101 + - 108 + - 100 + - 49 + field2: + field1: + - 102 + - 105 + - 101 + - 108 + - 100 + - 49 + field2: field2 + field3: field3 +field3: field3 +`, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := Marshal(context.Background(), tt.args.obj, tt.args.paths...) + if tt.err != nil { + require.ErrorIs(t, tt.err, err) + return + } + require.NoError(t, err) + require.Equal(t, tt.want, got) + }) + } +} diff --git a/ignite/services/chain/app.go b/ignite/services/chain/app.go new file mode 100644 index 0000000..1d12e20 --- /dev/null +++ b/ignite/services/chain/app.go @@ -0,0 +1,49 @@ +package chain + +import ( + "path/filepath" + + "github.com/ignite/cli/v29/ignite/pkg/gomodulepath" + "github.com/ignite/cli/v29/ignite/pkg/xstrings" +) + +// App keeps info about chain. +type App struct { + Name string + Path string + ImportPath string +} + +// NewAppAt creates an App from the blockchain source code located at path. +func NewAppAt(path string) (App, error) { + p, appPath, err := gomodulepath.Find(path) + if err != nil { + return App{}, err + } + return App{ + Path: appPath, + Name: p.Root, + ImportPath: p.RawPath, + }, nil +} + +// N returns app name without dashes. +func (a App) N() string { + return xstrings.NoDash(a.Name) +} + +// D returns appd name. +func (a App) D() string { + return a.Name + "d" +} + +// ND returns no-dash appd name. +func (a App) ND() string { + return a.N() + "d" +} + +// Root returns the root path of app. +func (a App) Root() string { + path, _ := filepath.Abs(a.Path) + return path +} diff --git a/ignite/services/chain/build.go b/ignite/services/chain/build.go new file mode 100644 index 0000000..e4a4883 --- /dev/null +++ b/ignite/services/chain/build.go @@ -0,0 +1,280 @@ +package chain + +import ( + "context" + "fmt" + "os" + "path/filepath" + "runtime" + "strings" + + "github.com/ignite/cli/v29/ignite/pkg/archive" + "github.com/ignite/cli/v29/ignite/pkg/cache" + "github.com/ignite/cli/v29/ignite/pkg/checksum" + "github.com/ignite/cli/v29/ignite/pkg/cmdrunner" + "github.com/ignite/cli/v29/ignite/pkg/cmdrunner/exec" + "github.com/ignite/cli/v29/ignite/pkg/cmdrunner/step" + "github.com/ignite/cli/v29/ignite/pkg/dirchange" + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/events" + "github.com/ignite/cli/v29/ignite/pkg/goanalysis" + "github.com/ignite/cli/v29/ignite/pkg/gocmd" + "github.com/ignite/cli/v29/ignite/pkg/xstrings" +) + +const ( + releaseDir = "release" + releaseChecksumKey = "release_checksum" + modChecksumKey = "go_mod_checksum" + buildDirchangeCacheNamespace = "build.dirchange" + consumerDevel = "consumer_devel" +) + +// Build builds and installs app binaries. +func (c *Chain) Build( + ctx context.Context, + cacheStorage cache.Storage, + buildTags []string, + output string, + skipProto, debug bool, +) (binaryName string, err error) { + if err := c.setup(); err != nil { + return "", err + } + + if err := c.build(ctx, cacheStorage, buildTags, output, skipProto, false, debug); err != nil { + return "", err + } + + return c.Binary() +} + +func (c *Chain) build( + ctx context.Context, + cacheStorage cache.Storage, + buildTags []string, + output string, + skipProto, generateClients, debug bool, +) (err error) { + defer func() { + var exitErr *exec.ExitError + + if errors.As(err, &exitErr) || errors.Is(err, goanalysis.ErrMultipleMainPackagesFound) { + err = &CannotBuildAppError{err} + } + }() + + if !skipProto { + // Generate code from proto files + if err := c.generateFromConfig(ctx, cacheStorage, generateClients); err != nil { + return err + } + } + + cfg, err := c.Config() + if err != nil { + return err + } + if cfg.IsConsumerChain() { + // When building a non-release consumer chain (which is the case for this + // build() method), enable consumerDevel (see templates consumer_devel and + // consumer_final for more info). + buildTags = append(buildTags, consumerDevel) + } + + buildFlags, err := c.preBuild(ctx, cacheStorage, buildTags...) + if err != nil { + return err + } + + if debug { + // Add flags to disable binary optimizations and inlining to allow debugging + buildFlags = append(buildFlags, gocmd.FlagGcflags, gocmd.FlagGcflagsValueDebug) + } + + binary, err := c.Binary() + if err != nil { + return err + } + + path, err := c.discoverMain(c.app.Path) + if err != nil { + return err + } + + return gocmd.BuildPath(ctx, output, binary, path, buildFlags) +} + +// BuildRelease builds binaries for a release. targets is a list +// of GOOS:GOARCH when provided. It defaults to your system when no targets provided. +// prefix is used as prefix to tarballs containing each target. +func (c *Chain) BuildRelease( + ctx context.Context, + cacheStorage cache.Storage, + buildParams []string, + output, prefix string, + targets ...string, +) (releasePath string, err error) { + if prefix == "" { + prefix = c.app.Name + } + if len(targets) == 0 { + targets = []string{gocmd.BuildTarget(runtime.GOOS, runtime.GOARCH)} + } + + // prepare for build. + if err := c.setup(); err != nil { + return "", err + } + + buildFlags, err := c.preBuild(ctx, cacheStorage, buildParams...) + if err != nil { + return "", err + } + + binary, err := c.Binary() + if err != nil { + return "", err + } + + mainPath, err := c.discoverMain(c.app.Path) + if err != nil { + return "", err + } + + releasePath = output + if releasePath == "" { + releasePath = filepath.Join(c.app.Path, releaseDir) + // reset the release dir. + if err := os.RemoveAll(releasePath); err != nil { + return "", err + } + } + + if err := os.MkdirAll(releasePath, 0o755); err != nil { + return "", err + } + + for _, t := range targets { + // build binary for a target, tarball it and save it under the release dir. + goos, goarch, err := gocmd.ParseTarget(t) + if err != nil { + return "", err + } + + out, err := os.MkdirTemp("", "") + if err != nil { + return "", err + } + defer os.RemoveAll(out) + + buildOptions := []exec.Option{ + exec.StepOption(step.Env( + cmdrunner.Env(gocmd.EnvGOOS, goos), + cmdrunner.Env(gocmd.EnvGOARCH, goarch), + )), + } + + if err := gocmd.BuildPath(ctx, out, binary, mainPath, buildFlags, buildOptions...); err != nil { + return "", err + } + + tarName := fmt.Sprintf("%s_%s_%s.tar.gz", prefix, goos, goarch) + tarPath := filepath.Join(releasePath, tarName) + + tarf, err := os.Create(tarPath) + if err != nil { + return "", err + } + defer tarf.Close() + + if err := archive.CreateArchive(out, tarf); err != nil { + return "", errors.Errorf("error creating release archive: %w", err) + } + } + + checksumPath := filepath.Join(releasePath, releaseChecksumKey) + + // create a checksum.txt and return with the path to release dir. + return releasePath, checksum.Sum(releasePath, checksumPath) +} + +func (c *Chain) preBuild( + ctx context.Context, + cacheStorage cache.Storage, + buildTags ...string, +) (buildFlags []string, err error) { + config, err := c.Config() + if err != nil { + return nil, err + } + + chainID, err := c.ID() + if err != nil { + return nil, err + } + + ldFlags := config.Build.LDFlags + ldFlags = append(ldFlags, + fmt.Sprintf("-X github.com/cosmos/cosmos-sdk/version.Name=%s", xstrings.Title(c.app.Name)), + fmt.Sprintf("-X github.com/cosmos/cosmos-sdk/version.AppName=%sd", c.app.Name), + fmt.Sprintf("-X github.com/cosmos/cosmos-sdk/version.Version=%s", c.sourceVersion.tag), + fmt.Sprintf("-X github.com/cosmos/cosmos-sdk/version.Commit=%s", c.sourceVersion.hash), + fmt.Sprintf("-X github.com/cosmos/cosmos-sdk/version.BuildTags=%s", strings.Join(buildTags, ",")), + fmt.Sprintf("-X %s/cmd/%s/cmd.ChainID=%s", c.app.ImportPath, c.app.D(), chainID), + ) + buildFlags = []string{ + gocmd.FlagMod, gocmd.FlagModValueReadOnly, + gocmd.FlagTags, gocmd.Tags(buildTags...), + gocmd.FlagLdflags, gocmd.Ldflags(ldFlags...), + } + + c.ev.Send("Installing dependencies...", events.ProgressUpdate()) + + // We do mod tidy before checking for checksum changes, because go.mod gets modified often + // and the mod verify command is the expensive one anyway + if err := gocmd.ModTidy(ctx, c.app.Path); err != nil { + return nil, err + } + + dirCache := cache.New[[]byte](cacheStorage, buildDirchangeCacheNamespace) + modChanged, err := dirchange.HasDirChecksumChanged(dirCache, modChecksumKey, c.app.Path, "go.mod") + if err != nil { + return nil, err + } + + if modChanged { + // By default no dependencies are checked to avoid issues with module + // ziphash files in case a Go workspace is being used. + if c.options.checkDependencies { + if err := gocmd.ModVerify(ctx, c.app.Path); err != nil { + return nil, err + } + } + + if err := dirchange.SaveDirChecksum(dirCache, modChecksumKey, c.app.Path, "go.mod"); err != nil { + return nil, err + } + } + + c.ev.Send("Building the blockchain...", events.ProgressUpdate()) + + return buildFlags, nil +} + +func (c *Chain) discoverMain(path string) (pkgPath string, err error) { + conf, err := c.Config() + if err != nil { + return "", err + } + + if conf.Build.Main != "" { + return filepath.Join(c.app.Path, conf.Build.Main), nil + } + + path, err = goanalysis.DiscoverOneMain(path) + if errors.Is(err, goanalysis.ErrMultipleMainPackagesFound) { + return "", errors.Wrap(err, "specify the path to your chain's main package in your config.yml>build.main") + } + return path, err +} diff --git a/ignite/services/chain/chain.go b/ignite/services/chain/chain.go new file mode 100644 index 0000000..62ab4cd --- /dev/null +++ b/ignite/services/chain/chain.go @@ -0,0 +1,670 @@ +package chain + +import ( + "context" + "fmt" + "os" + "path/filepath" + "strings" + + "github.com/go-git/go-git/v5" + "github.com/spf13/cobra" + + chainconfig "github.com/ignite/cli/v29/ignite/config/chain" + chainconfigv1 "github.com/ignite/cli/v29/ignite/config/chain/v1" + "github.com/ignite/cli/v29/ignite/pkg/chaincmd" + chaincmdrunner "github.com/ignite/cli/v29/ignite/pkg/chaincmd/runner" + "github.com/ignite/cli/v29/ignite/pkg/cliui/colors" + uilog "github.com/ignite/cli/v29/ignite/pkg/cliui/log" + "github.com/ignite/cli/v29/ignite/pkg/confile" + "github.com/ignite/cli/v29/ignite/pkg/cosmosaccount" + "github.com/ignite/cli/v29/ignite/pkg/cosmosanalysis" + "github.com/ignite/cli/v29/ignite/pkg/cosmosver" + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/events" + "github.com/ignite/cli/v29/ignite/pkg/repoversion" + "github.com/ignite/cli/v29/ignite/pkg/xexec" + "github.com/ignite/cli/v29/ignite/pkg/xurl" + igniteversion "github.com/ignite/cli/v29/ignite/version" +) + +const ( + flagPath = "path" + flagHome = "home" +) + +type ( + // Chain provides programmatic access and tools for a Cosmos SDK blockchain. + Chain struct { + // app holds info about blockchain app. + app App + + options chainOptions + + Version cosmosver.Version + + sourceVersion version + serveCancel context.CancelFunc + serveRefresher chan struct{} + served bool + + ev events.Bus + logOutputer uilog.Outputer + } + + // chainOptions holds user given options that overwrites chain's defaults. + chainOptions struct { + // chainID is the chain's id. + chainID string + + // homePath of the chain's config dir. + homePath string + + // keyring backend used by commands if not specified in configuration + keyringBackend chaincmd.KeyringBackend + + // checkDependencies checks that cached Go dependencies of the chain have not + // been modified since they were downloaded. + checkDependencies bool + + // checkCosmosSDKVersion checks that the app was scaffolded with version of + // the Cosmos SDK that is supported by Ignite CLI. + checkCosmosSDKVersion bool + + // printGeneratedPaths prints the output paths of the generated code + printGeneratedPaths bool + + // path of a custom config file + ConfigFile string + } + + version struct { + tag string + hash string + } + + // Option configures Chain. + Option func(*Chain) +) + +// ID replaces chain's id with given id. +func ID(id string) Option { + return func(c *Chain) { + c.options.chainID = id + } +} + +// HomePath replaces chain's configuration home path with given path. +func HomePath(path string) Option { + return func(c *Chain) { + c.options.homePath = path + } +} + +// KeyringBackend specifies the keyring backend to use for the chain command. +func KeyringBackend(keyringBackend chaincmd.KeyringBackend) Option { + return func(c *Chain) { + c.options.keyringBackend = keyringBackend + } +} + +// ConfigFile specifies a custom config file to use. +func ConfigFile(configFile string) Option { + return func(c *Chain) { + c.options.ConfigFile = configFile + } +} + +// WithOutputer sets the CLI outputer for the chain. +func WithOutputer(s uilog.Outputer) Option { + return func(c *Chain) { + c.logOutputer = s + } +} + +// CollectEvents collects events from the chain. +func CollectEvents(ev events.Bus) Option { + return func(c *Chain) { + c.ev = ev + } +} + +// CheckDependencies checks that cached Go dependencies of the chain have not +// been modified since they were downloaded. Dependencies are checked by +// running `go mod verify`. +func CheckDependencies() Option { + return func(c *Chain) { + c.options.checkDependencies = true + } +} + +// CheckCosmosSDKVersion checks that the app was scaffolded with a version of +// the Cosmos SDK that is supported by Ignite CLI. +func CheckCosmosSDKVersion() Option { + return func(c *Chain) { + c.options.checkCosmosSDKVersion = true + } +} + +// PrintGeneratedPaths prints the output paths of the generated code. +func PrintGeneratedPaths() Option { + return func(c *Chain) { + c.options.printGeneratedPaths = true + } +} + +// New initializes a new Chain with options that its source lives at path. +func New(path string, options ...Option) (*Chain, error) { + app, err := NewAppAt(path) + if err != nil { + return nil, err + } + + c := &Chain{ + app: app, + serveRefresher: make(chan struct{}, 1), + } + + // Apply the options + for _, apply := range options { + apply(c) + } + + c.sourceVersion, err = c.appVersion() + if err != nil && !errors.Is(err, git.ErrRepositoryNotExists) { + return nil, err + } + + c.Version, err = cosmosver.Detect(c.app.Path) + if err != nil { + return nil, err + } + + if c.options.checkCosmosSDKVersion { + if err := igniteversion.AssertSupportedCosmosSDKVersion(c.Version); err != nil { + return nil, err + } + } + + return c, nil +} + +func NewWithHomeFlags(cmd *cobra.Command, chainOption ...Option) (*Chain, error) { + var ( + home, _ = cmd.Flags().GetString(flagHome) + appPath, _ = cmd.Flags().GetString(flagPath) + ) + + absPath, err := filepath.Abs(appPath) + if err != nil { + return nil, err + } + + // Check if custom home is provided + if home != "" { + chainOption = append(chainOption, HomePath(home)) + } + return New(absPath, chainOption...) +} + +func (c *Chain) appVersion() (v version, err error) { + ver, err := repoversion.Determine(c.app.Path) + if err != nil { + return version{}, err + } + + v.hash = ver.Hash + v.tag = ver.Tag + + return v, nil +} + +// RPCPublicAddress points to the public address of Tendermint RPC, this is shared by +// other chains for relayer related actions. +func (c *Chain) RPCPublicAddress() (string, error) { + conf, err := c.Config() + if err != nil { + return "", err + } + + validator, err := chainconfig.FirstValidator(conf) + if err != nil { + return "", err + } + + servers, err := validator.GetServers() + if err != nil { + return "", err + } + + return servers.RPC.Address, nil +} + +// ConfigPath returns the config path of the chain. +// Empty string means that the chain has no defined config. +func (c *Chain) ConfigPath() string { + if c.options.ConfigFile != "" { + return c.options.ConfigFile + } + path, err := chainconfig.LocateDefault(c.app.Path) + if err != nil { + return "" + } + return path +} + +// Config returns the config of the chain. +func (c *Chain) Config() (*chainconfig.Config, error) { + configPath := c.ConfigPath() + if configPath == "" { + return chainconfig.DefaultChainConfig(), nil + } + return chainconfig.ParseFile(configPath) +} + +// ID returns the chain's id. +func (c *Chain) ID() (string, error) { + // chainID in App has the most priority. + if c.options.chainID != "" { + return c.options.chainID, nil + } + + // otherwise uses defined in config.yml + chainConfig, err := c.Config() + if err != nil { + return "", err + } + genid, ok := chainConfig.Genesis["chain_id"] + if ok { + return genid.(string), nil + } + + // use app name by default. + return c.app.N(), nil +} + +// Name returns the chain's name. +func (c *Chain) Name() string { + return c.app.N() +} + +// Binary returns the name of app's default (appd) binary. +func (c *Chain) Binary() (string, error) { + conf, err := c.Config() + if err != nil { + return "", err + } + + if conf.Build.Binary != "" { + return conf.Build.Binary, nil + } + + return c.app.D(), nil +} + +// AbsBinaryPath returns the absolute path to the app's binary. +// Returned path includes the binary name. +func (c *Chain) AbsBinaryPath() (string, error) { + bin, err := c.Binary() + if err != nil { + return "", err + } + + return xexec.ResolveAbsPath(bin) +} + +// SetHome sets the chain home directory. +func (c *Chain) SetHome(home string) { + c.options.homePath = home +} + +// Home returns the blockchain node's home dir. +func (c *Chain) Home() (string, error) { + // check if home is explicitly defined for the app + home := c.options.homePath + if home == "" { + // return default home otherwise + var err error + home, err = c.DefaultHome() + if err != nil { + return "", err + } + + } + + // expand environment variables in home + home = os.ExpandEnv(home) + + return home, nil +} + +// AppPath returns the configured App's path. +func (c *Chain) AppPath() string { + return c.app.Path +} + +// DefaultHome returns the blockchain node's default home dir when not specified in the app. +func (c *Chain) DefaultHome() (string, error) { + // check if home is defined in config + cfg, err := c.Config() + if err != nil { + return "", err + } + validator, _ := chainconfig.FirstValidator(cfg) + if validator.Home != "" { + expandedHome, err := expandHome(validator.Home) + if err != nil { + return "", err + } + validator.Home = expandedHome + return validator.Home, nil + } + + return c.appHome(), nil +} + +// DefaultGentxPath returns default gentx.json path of the app. +func (c *Chain) DefaultGentxPath() (string, error) { + home, err := c.Home() + if err != nil { + return "", err + } + return filepath.Join(home, "config/gentx/gentx.json"), nil +} + +// GenesisPath returns genesis.json path of the app. +func (c *Chain) GenesisPath() (string, error) { + home, err := c.Home() + if err != nil { + return "", err + } + return filepath.Join(home, "config/genesis.json"), nil +} + +// GentxsPath returns the directory where gentxs are stored for the app. +func (c *Chain) GentxsPath() (string, error) { + home, err := c.Home() + if err != nil { + return "", err + } + return filepath.Join(home, "config/gentx"), nil +} + +// AppTOMLPath returns app.toml path of the app. +func (c *Chain) AppTOMLPath() (string, error) { + home, err := c.Home() + if err != nil { + return "", err + } + return filepath.Join(home, "config/app.toml"), nil +} + +// ConfigTOMLPath returns config.toml path of the app. +func (c *Chain) ConfigTOMLPath() (string, error) { + home, err := c.Home() + if err != nil { + return "", err + } + return filepath.Join(home, "config/config.toml"), nil +} + +// ClientTOMLPath returns client.toml path of the app. +func (c *Chain) ClientTOMLPath() (string, error) { + home, err := c.Home() + if err != nil { + return "", err + } + return filepath.Join(home, "config/client.toml"), nil +} + +// KeyringBackend returns the keyring backend chosen for the chain. +func (c *Chain) KeyringBackend() (chaincmd.KeyringBackend, error) { + // When keyring backend is initialized as a chain + // option it overrides any configured backends. + if c.options.keyringBackend != "" { + return c.options.keyringBackend, nil + } + + // Try to get keyring backend from the first configured validator + cfg, err := c.Config() + if err != nil { + return "", err + } + + validator, _ := chainconfig.FirstValidator(cfg) + if validator.Client != nil { + if v, ok := validator.Client["keyring-backend"]; ok { + if backend, ok := v.(string); ok { + return chaincmd.KeyringBackendFromString(backend) + } + } + } + + // Try to get keyring backend from client.toml config file + configTOMLPath, err := c.ClientTOMLPath() + if err != nil { + return "", err + } + cf := confile.New(confile.DefaultTOMLEncodingCreator, configTOMLPath) + var conf struct { + KeyringBackend string `toml:"keyring-backend"` + } + if err := cf.Load(&conf); err != nil { + return "", err + } + if conf.KeyringBackend != "" { + return chaincmd.KeyringBackendFromString(conf.KeyringBackend) + } + + // Use test backend as default when none is configured + return chaincmd.KeyringBackendTest, nil +} + +// Commands returns the runner execute commands on the chain's binary. +func (c *Chain) Commands(ctx context.Context) (chaincmdrunner.Runner, error) { + id, err := c.ID() + if err != nil { + return chaincmdrunner.Runner{}, err + } + + home, err := c.Home() + if err != nil { + return chaincmdrunner.Runner{}, err + } + + binary, err := c.Binary() + if err != nil { + return chaincmdrunner.Runner{}, err + } + + // Try to make the binary path absolute. This will also + // find the binary path when the Go bin path is not part + // of the PATH environment variable. + binary = xexec.TryResolveAbsPath(binary) + + backend, err := c.KeyringBackend() + if err != nil { + return chaincmdrunner.Runner{}, err + } + + cfg, err := c.Config() + if err != nil { + return chaincmdrunner.Runner{}, err + } + + servers := chainconfigv1.DefaultServers() + if len(cfg.Validators) > 0 { + validator, _ := chainconfig.FirstValidator(cfg) + servers, err = validator.GetServers() + if err != nil { + return chaincmdrunner.Runner{}, err + } + } + + nodeAddr, err := xurl.TCP(servers.RPC.Address) + if err != nil { + return chaincmdrunner.Runner{}, err + } + + chainCommandOptions := []chaincmd.Option{ + chaincmd.WithChainID(id), + chaincmd.WithHome(home), + chaincmd.WithVersion(c.Version), + chaincmd.WithNodeAddress(nodeAddr), + chaincmd.WithKeyringBackend(backend), + } + + cc := chaincmd.New(binary, chainCommandOptions...) + + ccrOptions := []chaincmdrunner.Option{} + + // Enable command output only when CLI verbosity is enabled + if c.logOutputer != nil && c.logOutputer.Verbosity() == uilog.VerbosityVerbose { + out := c.logOutputer.NewOutput(c.app.D(), colors.Cyan) + ccrOptions = append( + ccrOptions, + chaincmdrunner.Stdout(out.Stdout()), + chaincmdrunner.Stderr(out.Stderr()), + ) + } + + return chaincmdrunner.New(ctx, cc, ccrOptions...) +} + +func appBackendSourceWatchPaths(protoDir string) []string { + return []string{ + "app", + "cmd", + "x", + "third_party", + protoDir, + } +} + +// expandHome expands a path that may start with "~" and may contain environment variables. +func expandHome(path string) (string, error) { + if strings.HasPrefix(path, "~") { + home, err := os.UserHomeDir() + if err != nil { + return "", err + } + // Only replace the first occurrence at the start. + path = home + strings.TrimPrefix(path, "~") + } + return os.ExpandEnv(path), nil +} + +// Bech32Prefix returns the bech32 prefix of the chain. +func (c *Chain) Bech32Prefix() (string, error) { + prefix, err := c.parseAddressPrefix() + if err != nil || prefix == "" { + return cosmosaccount.AccountPrefixCosmos, err + } + + return prefix, nil +} + +// CoinType returns the coin type of the chain. +func (c *Chain) CoinType() (uint32, error) { + coinType, err := c.parseCoinType() + if err != nil || coinType == 0 { + return cosmosaccount.CoinTypeCosmos, err + } + + return coinType, nil +} + +// parseAddressPrefix parses the address prefix from the app code. +func (c *Chain) parseAddressPrefix() (string, error) { + appGoPath, err := c.findAppGoFile() + if err != nil { + return "", err + } + + content, err := os.ReadFile(appGoPath) + if err != nil { + return "", err + } + + // try to find the AccountAddressPrefix constant + lines := strings.SplitSeq(string(content), "\n") + for line := range lines { + // match both formats: + // AccountAddressPrefix = "cosmos" + // AccountAddressPrefix string = "cosmos" + if strings.Contains(line, "AccountAddressPrefix") && strings.Contains(line, "=") { + parts := strings.Split(line, "=") + if len(parts) < 2 { + continue + } + + // extract the value within quotes + value := strings.TrimSpace(parts[1]) + // remove comments if any + if idx := strings.Index(value, "//"); idx >= 0 { + value = value[:idx] + } + value = strings.TrimSpace(value) + + // extract string between quotes + if start := strings.Index(value, "\""); start >= 0 { + if end := strings.Index(value[start+1:], "\""); end >= 0 { + return value[start+1 : start+1+end], nil + } + } + } + } + + return "", nil +} + +// parseCoinType parses the coin type from the app code. +func (c *Chain) parseCoinType() (uint32, error) { + appGoPath, err := c.findAppGoFile() + if err != nil { + return 0, err + } + + content, err := os.ReadFile(appGoPath) + if err != nil { + return 0, err + } + + // try to find the ChainCoinType constant + lines := strings.SplitSeq(string(content), "\n") + for line := range lines { + if strings.Contains(line, "ChainCoinType") && strings.Contains(line, "=") { + parts := strings.Split(line, "=") + if len(parts) < 2 { + continue + } + + // extract the numeric value + value := strings.TrimSpace(parts[1]) + // remove comments if any + if idx := strings.Index(value, "//"); idx >= 0 { + value = value[:idx] + } + value = strings.TrimSpace(value) + + // parse the value as uint32 + var coinType uint32 + if _, err := fmt.Sscanf(value, "%d", &coinType); err == nil { + return coinType, nil + } + } + } + + return 0, nil +} + +// findAppGoFile attempts to find the app.go file in the project. +func (c *Chain) findAppGoFile() (string, error) { + // Look for the app.go file in common locations + commonPath := filepath.Join(c.app.Path, "app", "app.go") + if _, err := os.Stat(commonPath); err == nil { + return commonPath, nil + } + + return cosmosanalysis.FindAppFilePath(c.app.Path) +} diff --git a/ignite/services/chain/chain_test.go b/ignite/services/chain/chain_test.go new file mode 100644 index 0000000..ba5b868 --- /dev/null +++ b/ignite/services/chain/chain_test.go @@ -0,0 +1,190 @@ +package chain + +import ( + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/archive" +) + +func TestSourceVersion(t *testing.T) { + t.Run("tagged latest commit", func(t *testing.T) { + c, err := New(tempSource(t, "testdata/version/mars.v0.2.tar.gz")) + require.NoError(t, err) + + assert.Equal(t, "0.2", c.sourceVersion.tag) + assert.Equal(t, "503123b1ac552437c7db3d17f816fd4121ff400d", c.sourceVersion.hash) + }) + + t.Run("tagged older commit", func(t *testing.T) { + c, err := New(tempSource(t, "testdata/version/mars.v0.2-3-gaae48b7.tar.gz")) + require.NoError(t, err) + + assert.Equal(t, "0.2-aae48b7f", c.sourceVersion.tag) + assert.Equal(t, "aae48b7ffa4991bbe229f0969db8fe8623bf1fd4", c.sourceVersion.hash) + }) +} + +func TestBech32Prefix(t *testing.T) { + t.Run("default prefix when not specified", func(t *testing.T) { + dir, err := tempSourceWithApp(t) + require.NoError(t, err) + c, err := New(dir) + require.NoError(t, err) + + prefix, err := c.Bech32Prefix() + require.NoError(t, err) + + // Should return the default Cosmos prefix + assert.Equal(t, "cosmos", prefix) + }) + + t.Run("returns custom prefix when specified", func(t *testing.T) { + dir, err := tempSourceWithApp(t) + require.NoError(t, err) + + // Create mock app.go with custom prefix + mockAppGo := `package app + + const ( + AccountAddressPrefix = "mars" + ) + ` + require.NoError(t, os.WriteFile(filepath.Join(dir, "app", "app.go"), []byte(mockAppGo), 0o644)) + + c, err := New(dir) + require.NoError(t, err) + + prefix, err := c.Bech32Prefix() + require.NoError(t, err) + + assert.Equal(t, "mars", prefix) + }) + + t.Run("handles alternate prefix declaration format", func(t *testing.T) { + dir, err := tempSourceWithApp(t) + require.NoError(t, err) + + // Create mock app.go with custom prefix in alternate format + mockAppGo := `package app + + const AccountAddressPrefix string = "jupiter" // Some comment + ` + require.NoError(t, os.WriteFile(filepath.Join(dir, "app", "app.go"), []byte(mockAppGo), 0o644)) + + c, err := New(dir) + require.NoError(t, err) + + prefix, err := c.Bech32Prefix() + require.NoError(t, err) + + assert.Equal(t, "jupiter", prefix) + }) +} + +func TestCoinType(t *testing.T) { + t.Run("default coin type when not specified", func(t *testing.T) { + dir, err := tempSourceWithApp(t) + require.NoError(t, err) + + c, err := New(dir) + require.NoError(t, err) + + coinType, err := c.CoinType() + require.NoError(t, err) + + assert.Equal(t, uint32(118), coinType) + }) + + t.Run("returns custom coin type when specified", func(t *testing.T) { + dir, err := tempSourceWithApp(t) + require.NoError(t, err) + + // Create mock app.go with custom coin type + mockAppGo := `package app + + const ( + ChainCoinType = 529 + ) + ` + require.NoError(t, os.WriteFile(filepath.Join(dir, "app", "app.go"), []byte(mockAppGo), 0o644)) + + c, err := New(dir) + require.NoError(t, err) + + coinType, err := c.CoinType() + require.NoError(t, err) + + assert.Equal(t, uint32(529), coinType) + }) + + t.Run("handles coin type with comments", func(t *testing.T) { + dir, err := tempSourceWithApp(t) + require.NoError(t, err) + + mockAppGo := `package app + + // ChainCoinType is the coin type for this chain + const ChainCoinType = 330 // Custom coin type for test + ` + require.NoError(t, os.WriteFile(filepath.Join(dir, "app", "app.go"), []byte(mockAppGo), 0o644)) + + c, err := New(dir) + require.NoError(t, err) + + coinType, err := c.CoinType() + require.NoError(t, err) + + assert.Equal(t, uint32(330), coinType) + }) +} + +func tempSource(t *testing.T, tarPath string) (path string) { + t.Helper() + + f, err := os.Open(tarPath) + require.NoError(t, err) + + defer f.Close() + + dir := t.TempDir() + + require.NoError(t, archive.ExtractArchive(dir, f)) + + dirs, err := os.ReadDir(dir) + require.NoError(t, err) + + return filepath.Join(dir, dirs[0].Name()) +} + +func tempSourceWithApp(t *testing.T) (string, error) { + t.Helper() + + tmpDir := t.TempDir() + + emptyFilesPaths := []string{ + filepath.Join(tmpDir, "go.mod"), + filepath.Join(tmpDir, "app", "app.go"), + } + + if err := os.WriteFile(emptyFilesPaths[0], []byte("module my-new-chain"), 0o755); err != nil { + return "", err + } + + for _, f := range emptyFilesPaths[1:] { + if err := os.MkdirAll(filepath.Dir(f), 0o755); err != nil { + return "", err + } + + if err := os.WriteFile(f, []byte("package my-new-chain"), 0o755); err != nil { + return "", err + } + + } + + return tmpDir, nil +} diff --git a/ignite/services/chain/faucet.go b/ignite/services/chain/faucet.go new file mode 100644 index 0000000..f399c93 --- /dev/null +++ b/ignite/services/chain/faucet.go @@ -0,0 +1,153 @@ +package chain + +import ( + "context" + "fmt" + "os" + "strings" + "time" + + sdkmath "cosmossdk.io/math" + + sdk "github.com/cosmos/cosmos-sdk/types" + + chainconfig "github.com/ignite/cli/v29/ignite/config/chain" + chaincmdrunner "github.com/ignite/cli/v29/ignite/pkg/chaincmd/runner" + "github.com/ignite/cli/v29/ignite/pkg/cosmosfaucet" + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/xurl" + "github.com/ignite/cli/v29/ignite/pkg/xyaml" +) + +var ( + // ErrFaucetIsNotEnabled is returned when faucet is not enabled in the config.yml. + ErrFaucetIsNotEnabled = errors.New("faucet is not enabled in the config.yml") + + // ErrFaucetAccountDoesNotExist returned when specified faucet account in the config.yml does not exist. + ErrFaucetAccountDoesNotExist = errors.New("specified account (faucet.name) does not exist") +) + +var envAPIAddress = os.Getenv("API_ADDRESS") + +// Faucet returns the faucet for the chain or an error if the faucet +// configuration is wrong or not configured (not enabled) at all. +func (c *Chain) Faucet(ctx context.Context) (cosmosfaucet.Faucet, error) { + id, err := c.ID() + if err != nil { + return cosmosfaucet.Faucet{}, err + } + + conf, err := c.Config() + if err != nil { + return cosmosfaucet.Faucet{}, err + } + + commands, err := c.Commands(ctx) + if err != nil { + return cosmosfaucet.Faucet{}, err + } + + // validate if the faucet initialization in the config.yml is correct. + if conf.Faucet.Name == nil { + return cosmosfaucet.Faucet{}, ErrFaucetIsNotEnabled + } + + if _, err := commands.ShowAccount(ctx, *conf.Faucet.Name); err != nil { + if errors.Is(err, chaincmdrunner.ErrAccountDoesNotExist) { + return cosmosfaucet.Faucet{}, ErrFaucetAccountDoesNotExist + } + return cosmosfaucet.Faucet{}, err + } + + // construct faucet options. + validator, err := chainconfig.FirstValidator(conf) + if err != nil { + return cosmosfaucet.Faucet{}, err + } + + servers, err := validator.GetServers() + if err != nil { + return cosmosfaucet.Faucet{}, err + } + + apiAddress := servers.API.Address + if envAPIAddress != "" { + apiAddress = envAPIAddress + } + + apiAddress, err = xurl.HTTP(apiAddress) + if err != nil { + return cosmosfaucet.Faucet{}, errors.Errorf("invalid host api address format: %w", err) + } + + faucetOptions := []cosmosfaucet.Option{ + cosmosfaucet.Account(*conf.Faucet.Name, "", "", "", ""), + cosmosfaucet.ChainID(id), + cosmosfaucet.OpenAPI(apiAddress), + cosmosfaucet.Version(c.Version), + } + + // check if indexer is enabled or not. + if indexerDisabled(validator.Config) { + faucetOptions = append(faucetOptions, cosmosfaucet.IndexerDisabled()) + c.ev.Send("⚠️ CometBFT indexer disabled. Faucet can't check account limits or verify transaction status.") + } + + // parse coins to pass to the faucet as coins. + for _, coin := range conf.Faucet.Coins { + parsedCoin, err := sdk.ParseCoinNormalized(coin) + if err != nil { + return cosmosfaucet.Faucet{}, errors.Errorf("%w: %s", err, coin) + } + + var amountMax sdkmath.Int + + // find out the max amount for this coin. + for _, coinMax := range conf.Faucet.CoinsMax { + parsedMax, err := sdk.ParseCoinNormalized(coinMax) + if err != nil { + return cosmosfaucet.Faucet{}, errors.Errorf("%w: %s", err, coin) + } + if parsedMax.Denom == parsedCoin.Denom { + amountMax = parsedMax.Amount + break + } + } + + faucetOptions = append(faucetOptions, cosmosfaucet.Coin(parsedCoin.Amount, amountMax, parsedCoin.Denom)) + } + + // parse fees to pass to the faucet as fees. + if fee := conf.Faucet.TxFee; fee != "" { + parsedFee, err := sdk.ParseCoinNormalized(fee) + if err != nil { + return cosmosfaucet.Faucet{}, errors.Errorf("%w: %s", err, fee) + } + + faucetOptions = append(faucetOptions, cosmosfaucet.FeeAmount(parsedFee.Amount, parsedFee.Denom)) + } + + if conf.Faucet.RateLimitWindow != "" { + rateLimitWindow, err := time.ParseDuration(conf.Faucet.RateLimitWindow) + if err != nil { + return cosmosfaucet.Faucet{}, errors.Errorf("%w: %s", err, conf.Faucet.RateLimitWindow) + } + + faucetOptions = append(faucetOptions, cosmosfaucet.RefreshWindow(rateLimitWindow)) + } + + // init the faucet with options and return. + return cosmosfaucet.New(ctx, commands, faucetOptions...) +} + +// indexerDisabled checks if the indexer is disabled in the config.yml. +// More specifically, it checks if a kv indexer is used (psql indexer is not supported). +func indexerDisabled(valCfg xyaml.Map) bool { + const txIndexKey = "tx_index" + v, ok := valCfg[txIndexKey] + if !ok { + return false + } + + return !strings.Contains(fmt.Sprintf("%v", v), "kv") +} diff --git a/ignite/services/chain/generate.go b/ignite/services/chain/generate.go new file mode 100644 index 0000000..c60596d --- /dev/null +++ b/ignite/services/chain/generate.go @@ -0,0 +1,278 @@ +package chain + +import ( + "context" + "fmt" + "os" + "path/filepath" + + chainconfig "github.com/ignite/cli/v29/ignite/config/chain" + "github.com/ignite/cli/v29/ignite/config/chain/base" + "github.com/ignite/cli/v29/ignite/pkg/cache" + "github.com/ignite/cli/v29/ignite/pkg/cliui/icons" + "github.com/ignite/cli/v29/ignite/pkg/cosmosgen" + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/events" +) + +type generateOptions struct { + useCache bool + isProtoVendorEnabled bool + isGoEnabled bool + isTSClientEnabled bool + isComposablesEnabled bool + isOpenAPIEnabled bool + openAPIExcludeList []string + tsClientPath string + composablesPath string +} + +// GenerateTarget is a target to generate code for from proto files. +type GenerateTarget func(*generateOptions) + +// GenerateGo enables generating proto based Go code needed for the chain's source code. +func GenerateGo() GenerateTarget { + return func(o *generateOptions) { + o.isGoEnabled = true + } +} + +// GenerateTSClient enables generating proto based Typescript Client. +// The path assigns the output path to use for the generated Typescript client +// overriding the configured or default path. Path can be an empty string. +func GenerateTSClient(path string, useCache bool) GenerateTarget { + return func(o *generateOptions) { + o.isTSClientEnabled = true + o.tsClientPath = path + o.useCache = useCache + } +} + +// GenerateComposables enables generating proto based Typescript Client and Vue 3 composables. +func GenerateComposables(path string) GenerateTarget { + return func(o *generateOptions) { + o.isTSClientEnabled = true + o.isComposablesEnabled = true + o.composablesPath = path + } +} + +// GenerateOpenAPI enables generating OpenAPI spec for your chain. +func GenerateOpenAPI(excludeList []string) GenerateTarget { + return func(o *generateOptions) { + o.isOpenAPIEnabled = true + o.openAPIExcludeList = excludeList + } +} + +// GenerateProtoVendor enables `proto_vendor` folder generation. +// Proto vendor is generated from Go dependencies that contain proto files that +// are not included in the app's Buf config. +// Enabling proto vendoring might update Buf config with missing dependencies +// if a Go dependency contains proto files and a Buf config with a name that is +// not listed in the Buf dependencies. +func GenerateProtoVendor() GenerateTarget { + return func(o *generateOptions) { + o.isProtoVendorEnabled = true + } +} + +// generateFromConfig makes code generation from proto files from the given config. +func (c *Chain) generateFromConfig(ctx context.Context, cacheStorage cache.Storage, generateClients bool) error { + conf, err := c.Config() + if err != nil { + return err + } + + // Additional code generation targets + var targets []GenerateTarget + + if conf.Client.OpenAPI.Path != "" { + targets = append(targets, GenerateOpenAPI(conf.Client.OpenAPI.ExcludeList)) + } + + if generateClients { + if p := conf.Client.Typescript.Path; p != "" { + targets = append(targets, GenerateTSClient(p, true)) + } + + if p := conf.Client.Composables.Path; p != "" { + targets = append(targets, GenerateComposables(p)) + } + } + + // Generate proto based code for Go and optionally for any optional targets + return c.Generate(ctx, cacheStorage, GenerateGo(), targets...) +} + +// Generate makes code generation from proto files for given target and additionalTargets. +func (c *Chain) Generate( + ctx context.Context, + cacheStorage cache.Storage, + target GenerateTarget, + additionalTargets ...GenerateTarget, +) error { + var targetOptions generateOptions + + for _, apply := range append(additionalTargets, target) { + apply(&targetOptions) + } + + conf, err := c.Config() + if err != nil { + return err + } + + c.ev.Send("Building proto...", events.ProgressUpdate()) + + options := []cosmosgen.Option{cosmosgen.CollectEvents(c.ev)} + + if targetOptions.isGoEnabled { + options = append(options, cosmosgen.WithGoGeneration()) + } + + if targetOptions.isProtoVendorEnabled { + options = append(options, cosmosgen.UpdateBufModule()) + } + + var ( + openAPIPath, tsClientPath, composablesPath string + updateConfig bool + ) + + if targetOptions.isOpenAPIEnabled { + openAPIPath = conf.Client.OpenAPI.Path + if openAPIPath == "" { + openAPIPath = chainconfig.DefaultOpenAPIPath + } + + // Non-absolute OpenAPI paths must be treated as relative to the app directory + if !filepath.IsAbs(openAPIPath) { + openAPIPath = filepath.Join(c.app.Path, openAPIPath) + } + + options = append(options, cosmosgen.WithOpenAPIGeneration(openAPIPath, targetOptions.openAPIExcludeList)) + } + + if targetOptions.isTSClientEnabled { + tsClientPath = targetOptions.tsClientPath + if tsClientPath == "" { + tsClientPath = chainconfig.TSClientPath(*conf) + + // When TS client is generated make sure the config is updated + // with the output path when the client path option is empty. + if conf.Client.Typescript.Path == "" { + conf.Client.Typescript.Path = tsClientPath + updateConfig = true + } + } + + // Non-absolute TS client output paths must be treated as relative to the app directory + if !filepath.IsAbs(tsClientPath) { + tsClientPath = filepath.Join(c.app.Path, tsClientPath) + } + + options = append(options, + cosmosgen.WithTSClientGeneration( + cosmosgen.TypescriptModulePath(tsClientPath), + tsClientPath, + targetOptions.useCache, + ), + ) + } + + if targetOptions.isComposablesEnabled { + composablesPath = targetOptions.composablesPath + + if composablesPath == "" { + composablesPath = chainconfig.ComposablesPath(conf) + + if conf.Client.Composables.Path == "" { + conf.Client.Composables.Path = composablesPath + updateConfig = true + } + } + + // Non-absolute Composables output paths must be treated as relative to the app directory + if !filepath.IsAbs(composablesPath) { + composablesPath = filepath.Join(c.app.Path, composablesPath) + } + + options = append(options, + cosmosgen.WithComposablesGeneration( + cosmosgen.ComposableModulePath(composablesPath), + composablesPath, + ), + ) + } + + if err := cosmosgen.Generate( + ctx, + cacheStorage, + c.app.Path, + conf.Build.Proto.Path, + c.app.ImportPath, + chainconfig.DefaultVuePath, + options..., + ); err != nil { + return &CannotBuildAppError{err} + } + + // Check if the client config options have to be updated with the paths of the generated code + if updateConfig { + if err := c.saveClientConfig(conf.Client); err != nil { + return errors.Errorf("error adding generated paths to config file: %w", err) + } + } + + if c.options.printGeneratedPaths { + if targetOptions.isTSClientEnabled { + c.ev.Send( + fmt.Sprintf("Typescript client path: %s", tsClientPath), + events.Icon(icons.Bullet), + events.ProgressFinish(), + ) + } + + if targetOptions.isComposablesEnabled { + c.ev.Send( + fmt.Sprintf("Vue composables path: %s", composablesPath), + events.Icon(icons.Bullet), + events.ProgressFinish(), + ) + } + + if targetOptions.isOpenAPIEnabled { + c.ev.Send( + fmt.Sprintf("OpenAPI path: %s", openAPIPath), + events.Icon(icons.Bullet), + events.ProgressFinish(), + ) + } + } + + return nil +} + +func (c Chain) saveClientConfig(client base.Client) error { + path := c.ConfigPath() + file, err := os.Open(path) + if err != nil { + return err + } + + defer file.Close() + + // Initialize the config to the file values ignoring empty + // values that otherwise would be initialized to defaults. + // Defaults must be ignored to avoid writing them to the + // YAML config file when they are not present. + var cfg chainconfig.Config + if err := cfg.Decode(file); err != nil { + return err + } + + cfg.Client = client + + return chainconfig.Save(cfg, path) +} diff --git a/ignite/services/chain/init.go b/ignite/services/chain/init.go new file mode 100644 index 0000000..09b6705 --- /dev/null +++ b/ignite/services/chain/init.go @@ -0,0 +1,333 @@ +package chain + +import ( + "context" + "fmt" + "os" + "path/filepath" + "strings" + + "dario.cat/mergo" + + chainconfig "github.com/ignite/cli/v29/ignite/config/chain" + chaincmdrunner "github.com/ignite/cli/v29/ignite/pkg/chaincmd/runner" + "github.com/ignite/cli/v29/ignite/pkg/cliui/view/accountview" + "github.com/ignite/cli/v29/ignite/pkg/confile" + "github.com/ignite/cli/v29/ignite/pkg/events" +) + +type ( + // InitArgs represents argument to add additional initialization for the chain. + // InitAccounts initializes chain accounts from the Ignite config. + // InitConfiguration initializes node configuration from the Ignite config. + // InitGenesis initializes genesis state for the chain from Ignite config. + InitArgs struct { + InitAccounts bool + InitConfiguration bool + InitGenesis bool + } +) + +const ( + moniker = "mynode" +) + +var ( + // InitArgsAll performs all initialization for the chain. + InitArgsAll = InitArgs{ + InitAccounts: true, + InitConfiguration: true, + InitGenesis: true, + } + + // InitArgsNone performs minimal initialization for the chain by only initializing a node. + InitArgsNone = InitArgs{ + InitAccounts: false, + InitConfiguration: false, + InitGenesis: false, + } +) + +// Init initializes the chain and accounts. +func (c *Chain) Init(ctx context.Context, args InitArgs) error { + if err := c.InitChain(ctx, args.InitConfiguration, args.InitGenesis); err != nil { + return err + } + + if args.InitAccounts { + conf, err := c.Config() + if err != nil { + return &CannotBuildAppError{err} + } + + return c.InitAccounts(ctx, conf) + } + return nil +} + +// InitChain initializes the chain. +func (c *Chain) InitChain(ctx context.Context, initConfiguration, initGenesis bool) error { + chainID, err := c.ID() + if err != nil { + return err + } + + // cleanup persistent data from previous `serve`. + home, err := c.Home() + if err != nil { + return err + } + if err := os.RemoveAll(home); err != nil { + return err + } + + var conf *chainconfig.Config + if initConfiguration || initGenesis { + conf, err = c.Config() + if err != nil { + return err + } + } + + commands, err := c.Commands(ctx) + if err != nil { + return err + } + + // init node. + var initOptions []string + if denom := conf.DefaultDenom; len(denom) > 0 { + initOptions = append(initOptions, + fmt.Sprintf("--default-denom=%s", denom), + ) + } + + if err := commands.Init(ctx, moniker, initOptions...); err != nil { + return err + } + + // overwrite app config files with the values defined in Ignite's config file + if initConfiguration { + if err := c.Configure(home, chainID, conf); err != nil { + return err + } + } + + if initGenesis { + // make sure that chain id given during chain.New() has the most priority. + if conf.Genesis != nil { + conf.Genesis["chain_id"] = chainID + } + + // update genesis file with the genesis values defined in the config + if err := c.UpdateGenesisFile(conf.Genesis); err != nil { + return err + } + } + + return nil +} + +// InitAccounts initializes the chain accounts and creates validator gentxs. +func (c *Chain) InitAccounts(ctx context.Context, cfg *chainconfig.Config) error { + commands, err := c.Commands(ctx) + if err != nil { + return err + } + + c.ev.Send("Initializing accounts...", events.ProgressUpdate()) + + var accounts accountview.Accounts + + // add accounts from config into genesis + for _, account := range cfg.Accounts { + var generatedAccount chaincmdrunner.Account + accountAddress := account.Address + + // If the account doesn't provide an address, we create one + if accountAddress == "" { + generatedAccount, err = commands.AddAccount( + ctx, + account.Name, + account.Mnemonic, + account.CoinType, + account.AccountNumber, + account.AddressIndex, + ) + if err != nil { + return err + } + accountAddress = generatedAccount.Address + } + + coins := strings.Join(account.Coins, ",") + if err := commands.AddGenesisAccount(ctx, accountAddress, coins); err != nil { + return err + } + + if account.Address == "" { + accounts = accounts.Append(accountview.NewAccount( + generatedAccount.Name, + accountAddress, + accountview.WithMnemonic(generatedAccount.Mnemonic), + )) + } else { + accounts = accounts.Append(accountview.NewAccount(account.Name, accountAddress)) + } + } + + c.ev.SendView(accounts, events.ProgressFinish()) + + // 0 length validator set when using network config + if len(cfg.Validators) == 0 { + return nil + } + + if cfg.IsConsumerChain() { + // we skip early if the chain is a consumer chain + return nil + } + + // Sovereign chain writes validators in gentxs. + if _, err := c.IssueGentx(ctx, createValidatorFromConfig(cfg)); err != nil { + return err + } + + return nil +} + +// IssueGentx generates a gentx from the validator information in chain config and imports it in the chain genesis. +func (c Chain) IssueGentx(ctx context.Context, v Validator) (string, error) { + commands, err := c.Commands(ctx) + if err != nil { + return "", err + } + + // create the gentx from the validator from the config + gentxPath, err := c.Gentx(ctx, commands, v) + if err != nil { + return "", err + } + + // import the gentx into the genesis + return gentxPath, commands.CollectGentxs(ctx) +} + +// IsInitialized checks if the chain is initialized. +// The check is performed by checking if the gentx dir exists in the config, +// unless c is a consumer chain, in which case the check relies on checking if +// the consumer genesis module is filled with validators. +func (c *Chain) IsInitialized() (bool, error) { + home, err := c.Home() + if err != nil { + return false, err + } + + cfg, err := c.Config() + if err != nil { + return false, err + } + if cfg.IsConsumerChain() { + // when consumer chain, we skip the IsInialized logic + return true, nil + } + + gentxDir := filepath.Join(home, "config", "gentx") + + if _, err := os.Stat(gentxDir); os.IsNotExist(err) { + return false, nil + } else if err != nil { + return false, err + } + + return true, nil +} + +// UpdateGenesisFile updates the chain genesis with a generic map of data. +// Updates are made using an override merge strategy. +func (c Chain) UpdateGenesisFile(data map[string]interface{}) error { + path, err := c.GenesisPath() + if err != nil { + return err + } + + genesis := make(map[string]interface{}) + cf := confile.New(confile.DefaultJSONEncodingCreator, path) + if err := cf.Load(&genesis); err != nil { + return err + } + + if err := mergo.Merge(&genesis, data, mergo.WithOverride); err != nil { + return err + } + + return cf.Save(genesis) +} + +type Validator struct { + Name string + Moniker string + StakingAmount string + CommissionRate string + CommissionMaxRate string + CommissionMaxChangeRate string + MinSelfDelegation string + GasPrices string + Details string + Identity string + Website string + SecurityContact string +} + +// Account represents an account in the chain. +type Account struct { + Name string + Address string + Mnemonic string `json:"mnemonic"` + CoinType string + Coins string +} + +func createValidatorFromConfig(conf *chainconfig.Config) (validator Validator) { + // Currently, we support the config file with one valid validator. + validatorFromConfig := conf.Validators[0] + validator.Name = validatorFromConfig.Name + validator.StakingAmount = validatorFromConfig.Bonded + + if validatorFromConfig.Gentx != nil { + if validatorFromConfig.Gentx.Amount != "" { + validator.StakingAmount = validatorFromConfig.Gentx.Amount + } + if validatorFromConfig.Gentx.Moniker != "" { + validator.Moniker = validatorFromConfig.Gentx.Moniker + } + if validatorFromConfig.Gentx.CommissionRate != "" { + validator.CommissionRate = validatorFromConfig.Gentx.CommissionRate + } + if validatorFromConfig.Gentx.CommissionMaxRate != "" { + validator.CommissionMaxRate = validatorFromConfig.Gentx.CommissionMaxRate + } + if validatorFromConfig.Gentx.CommissionMaxChangeRate != "" { + validator.CommissionMaxChangeRate = validatorFromConfig.Gentx.CommissionMaxChangeRate + } + if validatorFromConfig.Gentx.GasPrices != "" { + validator.GasPrices = validatorFromConfig.Gentx.GasPrices + } + if validatorFromConfig.Gentx.Details != "" { + validator.Details = validatorFromConfig.Gentx.Details + } + if validatorFromConfig.Gentx.Identity != "" { + validator.Identity = validatorFromConfig.Gentx.Identity + } + if validatorFromConfig.Gentx.Website != "" { + validator.Website = validatorFromConfig.Gentx.Website + } + if validatorFromConfig.Gentx.SecurityContact != "" { + validator.SecurityContact = validatorFromConfig.Gentx.SecurityContact + } + if validatorFromConfig.Gentx.MinSelfDelegation != "" { + validator.MinSelfDelegation = validatorFromConfig.Gentx.MinSelfDelegation + } + } + return validator +} diff --git a/ignite/services/chain/lint.go b/ignite/services/chain/lint.go new file mode 100644 index 0000000..aa4d45d --- /dev/null +++ b/ignite/services/chain/lint.go @@ -0,0 +1,22 @@ +package chain + +import ( + "context" + + "github.com/ignite/cli/v29/ignite/pkg/cmdrunner/exec" +) + +// Lint runs the linting process for the chain. +// It uses golangci-lint to lint the chain's codebase. +func (c *Chain) Lint(ctx context.Context) error { + cmd := []string{ + "go", + "tool", + "github.com/golangci/golangci-lint/cmd/golangci-lint", + "run", + "./...", + "--out-format=tab", + } + + return exec.Exec(ctx, cmd, exec.IncludeStdLogsToError()) +} diff --git a/ignite/services/chain/runtime.go b/ignite/services/chain/runtime.go new file mode 100644 index 0000000..93f9a6c --- /dev/null +++ b/ignite/services/chain/runtime.go @@ -0,0 +1,248 @@ +package chain + +import ( + "context" + "os" + "path/filepath" + + "github.com/nqd/flat" + "github.com/pelletier/go-toml" + + sdktypes "github.com/cosmos/cosmos-sdk/types" + + chainconfig "github.com/ignite/cli/v29/ignite/config/chain" + "github.com/ignite/cli/v29/ignite/pkg/chaincmd" + chaincmdrunner "github.com/ignite/cli/v29/ignite/pkg/chaincmd/runner" + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/xurl" +) + +// Gentx wraps the "testd gentx" command for generating a gentx for a validator. +// Returns path of generated gentx. +func (c Chain) Gentx(ctx context.Context, runner chaincmdrunner.Runner, v Validator) (path string, err error) { + return runner.Gentx( + ctx, + v.Name, + v.StakingAmount, + chaincmd.GentxWithMoniker(v.Moniker), + chaincmd.GentxWithCommissionRate(v.CommissionRate), + chaincmd.GentxWithCommissionMaxRate(v.CommissionMaxRate), + chaincmd.GentxWithCommissionMaxChangeRate(v.CommissionMaxChangeRate), + chaincmd.GentxWithMinSelfDelegation(v.MinSelfDelegation), + chaincmd.GentxWithGasPrices(v.GasPrices), + chaincmd.GentxWithDetails(v.Details), + chaincmd.GentxWithIdentity(v.Identity), + chaincmd.GentxWithWebsite(v.Website), + chaincmd.GentxWithSecurityContact(v.SecurityContact), + ) +} + +func (c Chain) InPlace(ctx context.Context, runner chaincmdrunner.Runner, args InPlaceArgs) error { + err := runner.InPlace(ctx, + args.NewChainID, + args.NewOperatorAddress, + chaincmd.InPlaceWithPrvKey(args.PrvKeyValidator), + chaincmd.InPlaceWithAccountToFund(args.AccountsToFund), + chaincmd.InPlaceWithSkipConfirmation(), + ) + return err +} + +// MultiNode sets up multiple nodes in the chain network with the specified arguments and returns an error if any issue occurs. +func (c Chain) MultiNode(ctx context.Context, runner chaincmdrunner.Runner, args MultiNodeArgs) error { + err := runner.MultiNode(ctx, + chaincmd.MultiNodeWithDirOutput(args.OutputDir), + chaincmd.MultiNodeWithNumValidator(args.NumValidator), + chaincmd.MultiNodeWithValidatorsStakeAmount(args.ValidatorsStakeAmount), + chaincmd.MultiNodeDirPrefix(args.NodeDirPrefix), + chaincmd.MultiNodePorts(args.ConvertPorts()), + ) + return err +} + +// Start wraps the "appd start" command to begin running a chain from the daemon. +func (c Chain) Start(ctx context.Context, runner chaincmdrunner.Runner, cfg *chainconfig.Config) error { + validator, err := chainconfig.FirstValidator(cfg) + if err != nil { + return err + } + + servers, err := validator.GetServers() + if err != nil { + return err + } + + err = runner.Start(ctx, "--pruning", "nothing", "--grpc.address", servers.GRPC.Address) + + return &CannotStartAppError{runner.Cmd().Name(), err} +} + +// Configure sets the runtime configurations files for a chain (app.toml, client.toml, config.toml). +func (c Chain) Configure(homePath, chainID string, cfg *chainconfig.Config) error { + if err := appTOML(homePath, cfg); err != nil { + return err + } + if err := clientTOML(homePath, chainID, cfg); err != nil { + return err + } + return configTOML(homePath, cfg) +} + +func appTOML(homePath string, cfg *chainconfig.Config) error { + validator, err := chainconfig.FirstValidator(cfg) + if err != nil { + return err + } + + // TODO find a better way in order to not delete comments in the toml.yml + path := filepath.Join(homePath, "config/app.toml") + appConfig, err := toml.LoadFile(path) + if err != nil { + return err + } + + servers, err := validator.GetServers() + if err != nil { + return err + } + + apiAddr, err := xurl.TCP(servers.API.Address) + if err != nil { + return errors.Errorf("invalid api address format %s: %w", servers.API.Address, err) + } + + // Set default config values + appConfig.Set("api.enable", true) + appConfig.Set("api.enabled-unsafe-cors", true) + appConfig.Set("rpc.cors_allowed_origins", []string{"*"}) + + staked, err := sdktypes.ParseCoinNormalized(validator.Bonded) + if err != nil { + return err + } + gas := sdktypes.NewInt64Coin(staked.Denom, 0) + appConfig.Set("minimum-gas-prices", gas.String()) + + // Update config values with the validator's Cosmos SDK app config + if err := updateTomlTreeValues(appConfig, validator.App); err != nil { + return err + } + + // Make sure the API address have the protocol prefix + appConfig.Set("api.address", apiAddr) + + file, err := os.OpenFile(path, os.O_RDWR|os.O_TRUNC, 0o644) + if err != nil { + return err + } + defer file.Close() + + _, err = appConfig.WriteTo(file) + return err +} + +func configTOML(homePath string, cfg *chainconfig.Config) error { + validator, err := chainconfig.FirstValidator(cfg) + if err != nil { + return err + } + + // TODO find a better way in order to not delete comments in the toml.yml + path := filepath.Join(homePath, "config/config.toml") + tmConfig, err := toml.LoadFile(path) + if err != nil { + return err + } + + servers, err := validator.GetServers() + if err != nil { + return err + } + + rpcAddr, err := xurl.TCP(servers.RPC.Address) + if err != nil { + return errors.Errorf("invalid rpc address format %s: %w", servers.RPC.Address, err) + } + + p2pAddr, err := xurl.TCP(servers.P2P.Address) + if err != nil { + return errors.Errorf("invalid p2p address format %s: %w", servers.P2P.Address, err) + } + + // Set default config values + tmConfig.Set("mode", "validator") + tmConfig.Set("rpc.cors_allowed_origins", []string{"*"}) + tmConfig.Set("consensus.timeout_commit", "1s") + tmConfig.Set("consensus.timeout_propose", "1s") + + // Update config values with the validator's Tendermint config + if err := updateTomlTreeValues(tmConfig, validator.Config); err != nil { + return err + } + + // Make sure the addresses have the protocol prefix + tmConfig.Set("rpc.laddr", rpcAddr) + tmConfig.Set("p2p.laddr", p2pAddr) + + file, err := os.OpenFile(path, os.O_RDWR|os.O_TRUNC, 0o644) + if err != nil { + return err + } + defer file.Close() + + _, err = tmConfig.WriteTo(file) + return err +} + +func clientTOML(homePath, chainID string, cfg *chainconfig.Config) error { + validator, err := chainconfig.FirstValidator(cfg) + if err != nil { + return err + } + + path := filepath.Join(homePath, "config/client.toml") + clientConfig, err := toml.LoadFile(path) + if os.IsNotExist(err) { + return nil + } + + if err != nil { + return err + } + + // Set default config values + clientConfig.Set("chain-id", chainID) + clientConfig.Set("keyring-backend", "test") + clientConfig.Set("broadcast-mode", "sync") + + // Update config values with the validator's client config + if err := updateTomlTreeValues(clientConfig, validator.Client); err != nil { + return err + } + + file, err := os.OpenFile(path, os.O_RDWR|os.O_TRUNC, 0o644) + if err != nil { + return err + } + defer file.Close() + + _, err = clientConfig.WriteTo(file) + return err +} + +func (c Chain) appHome() string { + home, _ := os.UserHomeDir() + return filepath.Join(home, "."+c.app.Name) +} + +func updateTomlTreeValues(t *toml.Tree, values map[string]interface{}) error { + flatValues, err := flat.Flatten(values, nil) + if err != nil { + return err + } + + for name, v := range flatValues { + t.Set(name, v) + } + return nil +} diff --git a/ignite/services/chain/serve.go b/ignite/services/chain/serve.go new file mode 100644 index 0000000..78b7ac2 --- /dev/null +++ b/ignite/services/chain/serve.go @@ -0,0 +1,693 @@ +package chain + +import ( + "context" + "fmt" + "net/http" + "os" + "os/exec" + "path/filepath" + "regexp" + "strings" + "time" + + "github.com/otiai10/copy" + "golang.org/x/sync/errgroup" + + "github.com/ignite/cli/v29/ignite/config" + chainconfig "github.com/ignite/cli/v29/ignite/config/chain" + "github.com/ignite/cli/v29/ignite/config/chain/defaults" + "github.com/ignite/cli/v29/ignite/pkg/cache" + chaincmdrunner "github.com/ignite/cli/v29/ignite/pkg/chaincmd/runner" + "github.com/ignite/cli/v29/ignite/pkg/cliui/colors" + "github.com/ignite/cli/v29/ignite/pkg/cliui/icons" + "github.com/ignite/cli/v29/ignite/pkg/cliui/view/accountview" + "github.com/ignite/cli/v29/ignite/pkg/cliui/view/errorview" + "github.com/ignite/cli/v29/ignite/pkg/cosmosfaucet" + "github.com/ignite/cli/v29/ignite/pkg/dirchange" + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/events" + "github.com/ignite/cli/v29/ignite/pkg/localfs" + "github.com/ignite/cli/v29/ignite/pkg/xexec" + "github.com/ignite/cli/v29/ignite/pkg/xfilepath" + "github.com/ignite/cli/v29/ignite/pkg/xhttp" + "github.com/ignite/cli/v29/ignite/pkg/xurl" +) + +const ( + // EvtGroupPath is the group to use for path related events. + EvtGroupPath = "path" + + // exportedGenesis is the name of the exported genesis file for a chain. + exportedGenesis = "exported_genesis.json" + + // sourceChecksumKey is the cache key for the checksum to detect source modification. + sourceChecksumKey = "source_checksum" + + // binaryChecksumKey is the cache key for the checksum to detect binary modification. + binaryChecksumKey = "binary_checksum" + + // configChecksumKey is the cache key for containing the checksum to detect config modification. + configChecksumKey = "config_checksum" + + // serveDirchangeCacheNamespace is the name of the cache namespace for detecting changes in directories. + serveDirchangeCacheNamespace = "serve.dirchange" +) + +var ( + // ignoredExts holds a list of ignored files from watching. + ignoredExts = []string{"pb.go", "pb.gw.go"} + + // starportSavePath is the place where chain exported genesis are saved. + starportSavePath = xfilepath.Join( + config.DirPath, + xfilepath.Path("local-chains"), + ) +) + +type serveOptions struct { + forceReset bool + resetOnce bool + skipProto bool + skipBuild bool + quitOnFail bool + generateClients bool + buildTags []string +} + +func newServeOption() serveOptions { + return serveOptions{ + forceReset: false, + resetOnce: false, + } +} + +// ServeOption provides options for the serve command. +type ServeOption func(*serveOptions) + +// ServeForceReset allows to force reset of the state when the chain is served and on every source change. +func ServeForceReset() ServeOption { + return func(c *serveOptions) { + c.forceReset = true + } +} + +// ServeResetOnce allows to reset of the state when the chain is served once. +func ServeResetOnce() ServeOption { + return func(c *serveOptions) { + c.resetOnce = true + } +} + +// QuitOnFail exits the serve immediately if an error occurs. +func QuitOnFail() ServeOption { + return func(c *serveOptions) { + c.quitOnFail = true + } +} + +// GenerateClients enables client code generation. +func GenerateClients() ServeOption { + return func(c *serveOptions) { + c.generateClients = true + } +} + +// ServeSkipProto allows to serve the app without generate Go from proto. +func ServeSkipProto() ServeOption { + return func(c *serveOptions) { + c.skipProto = true + } +} + +// ServeSkipBuild allows to serve the app without rebuilding it. +// It looks up the binary in the PATH. +func ServeSkipBuild() ServeOption { + return func(c *serveOptions) { + c.skipBuild = true + } +} + +// BuildTags set the build tags for the go build. +func BuildTags(buildTags ...string) ServeOption { + return func(c *serveOptions) { + c.buildTags = buildTags + } +} + +// Serve serves an app. +func (c *Chain) Serve(ctx context.Context, cacheStorage cache.Storage, options ...ServeOption) error { + serveOptions := newServeOption() + + // apply the options + for _, apply := range options { + apply(&serveOptions) + } + + // initial checks and setup. + if err := c.setup(); err != nil { + return err + } + + // make sure that config.yml exists + if c.options.ConfigFile != "" { + if _, err := os.Stat(c.options.ConfigFile); err != nil { + return err + } + } else if _, err := chainconfig.LocateDefault(c.app.Path); err != nil { + return err + } + + // start serving components. + g, ctx := errgroup.WithContext(ctx) + + // blockchain node routine + g.Go(func() error { + c.refreshServe() + + for { + if ctx.Err() != nil { + return ctx.Err() + } + + select { + case <-ctx.Done(): + return ctx.Err() + + case <-c.serveRefresher: + commands, err := c.Commands(ctx) + if err != nil { + return err + } + + var ( + serveCtx context.Context + buildErr *CannotBuildAppError + startErr *CannotStartAppError + validationErr *chainconfig.ValidationError + ) + + serveCtx, c.serveCancel = context.WithCancel(ctx) + + // determine if the chain should reset the state + shouldReset := serveOptions.forceReset || serveOptions.resetOnce + + // serve the app. + err = c.serve( + serveCtx, + cacheStorage, + serveOptions.buildTags, + shouldReset, + serveOptions.skipProto, + serveOptions.skipBuild, + serveOptions.generateClients, + ) + serveOptions.resetOnce = false + + switch { + case err == nil: + case errors.Is(err, context.Canceled): + // If the app has been served, we save the genesis state + if c.served { + c.served = false + + c.ev.Send("Saving genesis state...", events.ProgressStart()) + + // If serve has been stopped, save the genesis state + if err := c.saveChainState(context.TODO(), commands); err != nil { + c.ev.SendError(err, events.ProgressFinish()) + return err + } + + genesisPath, err := c.exportedGenesisPath() + if err != nil { + return err + } + + // Inform where the genesis file is saved without using + // progress update to keep the event text in the terminal. + c.ev.Send( + fmt.Sprintf("Genesis state saved in %s", genesisPath), + events.Icon(icons.CD), + ) + } + case errors.As(err, &validationErr): + if serveOptions.quitOnFail { + return err + } + + // Change error message to add a link to the configuration docs + err = errors.Errorf("%w\nsee: https://github.com/ignite/cli#configure", err) + + c.ev.SendView(errorview.NewError(err), events.ProgressFinish(), events.Group(events.GroupError)) + case errors.As(err, &buildErr): + if serveOptions.quitOnFail { + return err + } + + c.ev.SendView(errorview.NewError(err), events.ProgressFinish(), events.Group(events.GroupError)) + case errors.As(err, &startErr): + // Parse returned error logs + parsedErr := startErr.ParseStartError() + + // If empty, we cannot recognized the error + // Therefore, the error may be caused by a new logic that is not compatible with the old app state + // We suggest the user to eventually reset the app state + if parsedErr == "" { + info := colors.Info( + "Blockchain failed to start.\n", + "If the new code is no longer compatible with the saved\n", + "state, you can reset the database by launching:\n", + "For more verbose logging, add -v to the command.", + ) + command := colors.SprintFunc(colors.White)("ignite chain serve --reset-once -v") + return errors.Errorf("cannot run %s\n\n%s\n%s", startErr.AppName, info, command) + } + + // return the clear parsed error + return errors.New(parsedErr) + default: + return err + } + } + } + }) + + // routine to watch back-end + g.Go(func() error { + return c.watchAppBackend(ctx) + }) + + return g.Wait() +} + +func (c *Chain) setup() error { + c.ev.Sendf("Cosmos SDK's version is: %s\n", colors.Info(c.Version)) + + return c.checkSystem() +} + +// checkSystem checks if developer's work environment comply must to have +// dependencies and pre-conditions. +func (c *Chain) checkSystem() error { + // check if Go has installed. + if !xexec.IsCommandAvailable("go") { + return errors.New("Please, check that Go language is installed correctly in $PATH. See https://golang.org/doc/install") + } + return nil +} + +func (c *Chain) refreshServe() { + if c.serveCancel != nil { + c.serveCancel() + } + // send event changes detected + c.serveRefresher <- struct{}{} +} + +func (c *Chain) watchAppBackend(ctx context.Context) error { + watchPaths := appBackendSourceWatchPaths(defaults.ProtoDir) + + if c.ConfigPath() != "" { + conf, err := c.Config() + if err != nil { + return err + } + watchPaths = append(appBackendSourceWatchPaths(conf.Build.Proto.Path), c.ConfigPath()) + } + + return localfs.Watch( + ctx, + watchPaths, + localfs.WatcherWorkdir(c.app.Path), + localfs.WatcherOnChange(c.refreshServe), + localfs.WatcherIgnoreHidden(), + localfs.WatcherIgnoreFolders(), + localfs.WatcherIgnoreExt(ignoredExts...), + ) +} + +// serve performs the operations to serve the blockchain: build, init and start. +// If the chain is already initialized and the file weren't changed, the app is directly started. +// If the files changed, the state is imported. +func (c *Chain) serve( + ctx context.Context, + cacheStorage cache.Storage, + buildTags []string, + forceReset, skipProto, skipBuild, generateClients bool, +) error { + conf, err := c.Config() + if err != nil { + return &CannotBuildAppError{err} + } + + sourceWatchPaths := appBackendSourceWatchPaths(conf.Build.Proto.Path) + + commands, err := c.Commands(ctx) + if err != nil { + return err + } + + // isInit determines if the app is initialized + var isInit bool + + dirCache := cache.New[[]byte](cacheStorage, serveDirchangeCacheNamespace) + + // determine if the app must reset the state + // if the state must be reset, then we consider the chain as being not initialized + isInit, err = c.IsInitialized() + if err != nil { + return err + } + if isInit { + configModified := false + if c.ConfigPath() != "" { + configModified, err = dirchange.HasDirChecksumChanged(dirCache, configChecksumKey, c.app.Path, c.ConfigPath()) + if err != nil { + return err + } + } + + if forceReset || configModified { + // if forceReset is set, we consider the app as being not initialized + c.ev.Send("Resetting the app state...", events.ProgressUpdate()) + isInit = false + } + } + + // check if source has been modified since last serve + // if the state must not be reset but the source has changed, we rebuild the chain and import the exported state + sourceModified, err := dirchange.HasDirChecksumChanged(dirCache, sourceChecksumKey, c.app.Path, sourceWatchPaths...) + if err != nil { + return err + } + + // we also consider the binary in the checksum to ensure the binary has not been changed by a third party + var binaryModified bool + binaryName, err := c.Binary() + if err != nil { + return err + } + binaryPath, err := xexec.ResolveAbsPath(binaryName) + if err != nil { + if !errors.Is(err, exec.ErrNotFound) { + return err + } + binaryModified = true + } else { + binaryModified, err = dirchange.HasDirChecksumChanged(dirCache, binaryChecksumKey, "", binaryPath) + if err != nil { + return err + } + } + + appModified := sourceModified || binaryModified + + // check if exported genesis exists + exportGenesisExists := true + exportedGenesisPath, err := c.exportedGenesisPath() + if err != nil { + return err + } + if _, err := os.Stat(exportedGenesisPath); os.IsNotExist(err) { + exportGenesisExists = false + } else if err != nil { + return err + } + + // build phase + // if the app is not initialized or the source/binary has been modified + // and if the --skip-build flag is not set + if skipBuild { + c.ev.SendInfo("Skip building activated. Binary won't be rebuilt, nor refresh on changes") + } + + if (!isInit || appModified) && !skipBuild { + // build the blockchain app + if err := c.build(ctx, cacheStorage, buildTags, "", skipProto, generateClients, true); err != nil { + return err + } + } + + // init phase + initApp := !isInit || (appModified && !exportGenesisExists) + + //nolint:gocritic + if initApp { + c.ev.Send("Initializing the app...", events.ProgressUpdate()) + + if err := c.Init(ctx, InitArgsAll); err != nil { + return err + } + } else if appModified { + // if the chain is already initialized but the source has been modified + // we reset the chain database and import the genesis state + c.ev.Send("Existent genesis detected, restoring the database...", events.ProgressUpdate()) + + if err := commands.UnsafeReset(ctx); err != nil { + return err + } + + if err := c.importChainState(); err != nil { + return err + } + } else { + c.ev.Send("Restarting existing app...", events.ProgressUpdate()) + } + + // save checksums + if c.ConfigPath() != "" { + if err := dirchange.SaveDirChecksum(dirCache, configChecksumKey, c.app.Path, c.ConfigPath()); err != nil { + return err + } + } + + if err := dirchange.SaveDirChecksum(dirCache, sourceChecksumKey, c.app.Path, sourceWatchPaths...); err != nil { + return err + } + + if err := dirchange.SaveDirChecksum(dirCache, binaryChecksumKey, "", binaryPath); err != nil { + return err + } + + // Display existing accounts if they were not initialized. + // Note that chain init displays accounts when the app is initialized. + if !initApp { + accounts, err := commands.ListAccounts(ctx) + if err != nil { + return err + } + + var view accountview.Accounts + for _, a := range accounts { + view = view.Append(accountview.NewAccount(a.Name, a.Address)) + } + + c.ev.SendView(view, events.ProgressFinish()) + } + + // start the blockchain + return c.start(ctx, conf) +} + +func (c *Chain) start(ctx context.Context, cfg *chainconfig.Config) error { + commands, err := c.Commands(ctx) + if err != nil { + return err + } + + g, ctx := errgroup.WithContext(ctx) + + // start the blockchain. + g.Go(func() error { return c.Start(ctx, commands, cfg) }) + + // start the faucet if enabled. + faucet, err := c.Faucet(ctx) + isFaucetEnabled := !errors.Is(err, ErrFaucetIsNotEnabled) + + if isFaucetEnabled { + if errors.Is(err, ErrFaucetAccountDoesNotExist) { + return &CannotBuildAppError{errors.Wrap(err, "faucet account doesn't exist")} + } + if err != nil { + return err + } + + g.Go(func() (err error) { + if err := c.runFaucetServer(ctx, faucet); err != nil { + return &CannotBuildAppError{err} + } + return nil + }) + } + + // set the app as being served + c.served = true + + validator, err := chainconfig.FirstValidator(cfg) + if err != nil { + return err + } + + servers, err := validator.GetServers() + if err != nil { + return err + } + + // note: address format errors are handled by the + // error group, so they can be safely ignored here + + rpcAddr, _ := xurl.HTTP(servers.RPC.Address) + apiAddr, _ := xurl.HTTP(servers.API.Address) + + c.ev.Send( + fmt.Sprintf("Tendermint node: %s", rpcAddr), + events.Icon(icons.Earth), + events.ProgressFinish(), + ) + c.ev.Send( + fmt.Sprintf("Blockchain API: %s", apiAddr), + events.Icon(icons.Earth), + ) + + if isFaucetEnabled { + faucetAddr, _ := xurl.HTTP(chainconfig.FaucetHost(cfg)) + + c.ev.Send( + fmt.Sprintf("Token faucet: %s", faucetAddr), + events.Icon(icons.Earth), + ) + } + + appHome, _ := c.Home() + appBin, _ := c.AbsBinaryPath() + + c.ev.Send( + fmt.Sprintf("Data directory: %s", colors.Faint(appHome)), + events.Icon(icons.Bullet), + events.Group(EvtGroupPath), + ) + c.ev.Send( + fmt.Sprintf("App binary: %s", colors.Faint(appBin)), + events.Icon(icons.Bullet), + events.Group(EvtGroupPath), + ) + + return g.Wait() +} + +func (c *Chain) runFaucetServer(ctx context.Context, faucet cosmosfaucet.Faucet) error { + cfg, err := c.Config() + if err != nil { + return err + } + + return xhttp.Serve(ctx, &http.Server{ + Addr: chainconfig.FaucetHost(cfg), + Handler: faucet, + ReadHeaderTimeout: 5 * time.Second, // Set a reasonable timeout + }) +} + +// saveChainState runs the export command of the chain and store the exported genesis in the chain saved config. +func (c *Chain) saveChainState(ctx context.Context, commands chaincmdrunner.Runner) error { + genesisPath, err := c.exportedGenesisPath() + if err != nil { + return err + } + + return commands.Export(ctx, genesisPath) +} + +// importChainState imports the saved genesis in chain config to use it as the genesis. +func (c *Chain) importChainState() error { + exportGenesisPath, err := c.exportedGenesisPath() + if err != nil { + return err + } + genesisPath, err := c.GenesisPath() + if err != nil { + return err + } + + return copy.Copy(exportGenesisPath, genesisPath) +} + +// chainSavePath returns the path where the chain state is saved. +// Creates the path if it doesn't exist. +func (c *Chain) chainSavePath() (string, error) { + savePath, err := starportSavePath() + if err != nil { + return "", err + } + + chainID, err := c.ID() + if err != nil { + return "", err + } + chainSavePath := filepath.Join(savePath, chainID) + + // ensure the path exists + if err := os.MkdirAll(savePath, 0o700); err != nil && !os.IsExist(err) { + return "", err + } + + return chainSavePath, nil +} + +// exportedGenesisPath returns the path of the exported genesis file. +func (c *Chain) exportedGenesisPath() (string, error) { + savePath, err := c.chainSavePath() + if err != nil { + return "", err + } + + return filepath.Join(savePath, exportedGenesis), nil +} + +type CannotBuildAppError struct { + Err error +} + +func (e *CannotBuildAppError) Error() string { + // TODO: Find at which point the error is wrapped twice + var buildErr *CannotBuildAppError + if errors.As(e.Err, &buildErr) { + return buildErr.Error() + } + + return fmt.Sprintf("cannot build app:\n\n%s", e.Err) +} + +func (e *CannotBuildAppError) Unwrap() error { + return e.Err +} + +type CannotStartAppError struct { + AppName string + Err error +} + +func (e *CannotStartAppError) Error() string { + return fmt.Sprintf("cannot run %sd start:\n%s", e.AppName, errors.Unwrap(e.Err)) +} + +func (e *CannotStartAppError) Unwrap() error { + return e.Err +} + +// ParseStartError parses the error into a clear error string. +// The error logs from Cosmos SDK application are too extensive to be directly printed. +// If the error is not recognized, returns an empty string. +func (e *CannotStartAppError) ParseStartError() string { + errorLogs := errors.Unwrap(e.Err).Error() + switch { + case strings.Contains(errorLogs, "bind: address already in use"): + r := regexp.MustCompile(`listen .* bind: address already in use`) + return r.FindString(errorLogs) + case strings.Contains(errorLogs, "validator set is nil in genesis"): + return "Error: error during handshake: error on replay: validator set is nil in genesis and still empty after InitChain" + default: + return "" + } +} diff --git a/ignite/services/chain/simulate.go b/ignite/services/chain/simulate.go new file mode 100644 index 0000000..9239281 --- /dev/null +++ b/ignite/services/chain/simulate.go @@ -0,0 +1,69 @@ +package chain + +import ( + "context" + + "github.com/cosmos/cosmos-sdk/types/simulation" +) + +type simappOptions struct { + simulationTestName string + enabled bool + config simulation.Config + genesisTime int64 +} + +func newSimappOptions() simappOptions { + return simappOptions{ + config: simulation.Config{ + Commit: true, + }, + enabled: true, + genesisTime: 0, + } +} + +// SimappOption provides options for the simapp command. +type SimappOption func(*simappOptions) + +// SimappWithGenesisTime allows overriding genesis UNIX time instead of using a random UNIX time. +func SimappWithGenesisTime(genesisTime int64) SimappOption { + return func(c *simappOptions) { + c.genesisTime = genesisTime + } +} + +// SimappWithConfig allows to add a simulation config. +func SimappWithConfig(config simulation.Config) SimappOption { + return func(c *simappOptions) { + c.config = config + } +} + +// SimappWithSimulationTestName allows to set the simulation test name. +func SimappWithSimulationTestName(name string) SimappOption { + return func(c *simappOptions) { + c.simulationTestName = name + } +} + +func (c *Chain) Simulate(ctx context.Context, options ...SimappOption) error { + simappOptions := newSimappOptions() + + // apply the options + for _, apply := range options { + apply(&simappOptions) + } + + commands, err := c.Commands(ctx) + if err != nil { + return err + } + return commands.Simulation(ctx, + c.app.Path, + simappOptions.simulationTestName, + simappOptions.enabled, + simappOptions.config, + simappOptions.genesisTime, + ) +} diff --git a/ignite/services/chain/testdata/version/mars.v0.2-3-gaae48b7.tar.gz b/ignite/services/chain/testdata/version/mars.v0.2-3-gaae48b7.tar.gz new file mode 100644 index 0000000..2fba72e Binary files /dev/null and b/ignite/services/chain/testdata/version/mars.v0.2-3-gaae48b7.tar.gz differ diff --git a/ignite/services/chain/testdata/version/mars.v0.2.tar.gz b/ignite/services/chain/testdata/version/mars.v0.2.tar.gz new file mode 100644 index 0000000..1d820b2 Binary files /dev/null and b/ignite/services/chain/testdata/version/mars.v0.2.tar.gz differ diff --git a/ignite/services/chain/testnet.go b/ignite/services/chain/testnet.go new file mode 100644 index 0000000..186bc49 --- /dev/null +++ b/ignite/services/chain/testnet.go @@ -0,0 +1,77 @@ +package chain + +import ( + "context" + "fmt" + "os" + "strings" + + chainconfig "github.com/ignite/cli/v29/ignite/config/chain" +) + +type InPlaceArgs struct { + NewChainID string + NewOperatorAddress string + PrvKeyValidator string + AccountsToFund string +} + +func (c Chain) TestnetInPlace(ctx context.Context, args InPlaceArgs) error { + commands, err := c.Commands(ctx) + if err != nil { + return err + } + + // make sure that config.yml exists + if c.options.ConfigFile != "" { + if _, err := os.Stat(c.options.ConfigFile); err != nil { + return err + } + } else if _, err := chainconfig.LocateDefault(c.app.Path); err != nil { + return err + } + + err = c.InPlace(ctx, commands, args) + if err != nil { + return err + } + return nil +} + +type MultiNodeArgs struct { + OutputDir string + NumValidator string + ValidatorsStakeAmount string + NodeDirPrefix string + ListPorts []uint +} + +func (m MultiNodeArgs) ConvertPorts() string { + var result []string + + for _, port := range m.ListPorts { + result = append(result, fmt.Sprintf("%d", port)) + } + + return strings.Join(result, ",") +} + +// If the app state still exists, TestnetMultiNode will reuse it. +// Otherwise, it will automatically re-initialize the app state from the beginning. +func (c Chain) TestnetMultiNode(ctx context.Context, args MultiNodeArgs) error { + commands, err := c.Commands(ctx) + if err != nil { + return err + } + + // make sure that config.yml exists + if c.options.ConfigFile != "" { + if _, err := os.Stat(c.options.ConfigFile); err != nil { + return err + } + } else if _, err := chainconfig.LocateDefault(c.app.Path); err != nil { + return err + } + + return c.MultiNode(ctx, commands, args) +} diff --git a/ignite/services/doctor/doctor.go b/ignite/services/doctor/doctor.go new file mode 100644 index 0000000..c98316a --- /dev/null +++ b/ignite/services/doctor/doctor.go @@ -0,0 +1,248 @@ +package doctor + +import ( + "bytes" + "context" + "fmt" + "os" + "path" + + "golang.org/x/mod/modfile" + + chainconfig "github.com/ignite/cli/v29/ignite/config/chain" + "github.com/ignite/cli/v29/ignite/pkg/cache" + "github.com/ignite/cli/v29/ignite/pkg/cliui/colors" + "github.com/ignite/cli/v29/ignite/pkg/cliui/icons" + "github.com/ignite/cli/v29/ignite/pkg/cosmosbuf" + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/events" + "github.com/ignite/cli/v29/ignite/pkg/goanalysis" + "github.com/ignite/cli/v29/ignite/pkg/xast" + "github.com/ignite/cli/v29/ignite/pkg/xgenny" + "github.com/ignite/cli/v29/ignite/templates/app" +) + +// DONTCOVER: Doctor read and write the filesystem intensively, so it's better +// to rely on integration tests only. See integration/doctor package. +type Doctor struct { + ev events.Bus +} + +// New returns a new doctor. +func New(opts ...Option) *Doctor { + d := &Doctor{} + for _, opt := range opts { + opt(d) + } + return d +} + +type Option func(*Doctor) + +// CollectEvents sets doctor event bus. +func CollectEvents(ev events.Bus) Option { + return func(d *Doctor) { + d.ev = ev + } +} + +// MigrateBufConfig migrates the buf chain config if required. +func (d *Doctor) MigrateBufConfig(ctx context.Context, cacheStorage cache.Storage, appPath, configPath string) error { + errf := func(err error) error { + return errors.Errorf("doctor migrate buf config: %w", err) + } + + d.ev.Send("Checking buf config file version:") + + // Check if the appPath contains the buf.work.yaml file in the root folder. + // The buf.work.yaml file does not exist in buf v2 config, so it is a good + // indicator that the buf config is already migrated. + bufWorkFile := path.Join(appPath, "buf.work.yaml") + if _, err := os.Stat(bufWorkFile); os.IsNotExist(err) { + d.ev.Send( + fmt.Sprintf("buf files %s", colors.Success("OK")), + events.Icon(icons.OK), + events.Indent(1), + ) + return nil + } else if err != nil { + return errf(errors.Errorf("unable to check buf files have been migrated: %w", err)) + } + + d.ev.Send("Migrating buf config file to v2") + + configFile, err := os.Open(configPath) + if err != nil { + return err + } + defer configFile.Close() + + protoPath, err := chainconfig.ReadProtoPath(configFile) + if err != nil { + return errf(err) + } + + b, err := cosmosbuf.New(cacheStorage, appPath) + if err != nil { + return errf(err) + } + + if err := b.Migrate(ctx, protoPath); err != nil { + return errf(err) + } + + runner := xgenny.NewRunner(ctx, appPath) + _, err = boxBufFiles(runner, protoPath) + if err != nil { + return err + } + + d.ev.Send( + "buf config files migrated", + events.Icon(icons.OK), + events.Indent(1), + events.ProgressFinish(), + ) + + return nil +} + +// BoxBufFiles box all buf files. +func boxBufFiles(runner *xgenny.Runner, protoDir string) (xgenny.SourceModification, error) { + g, err := app.NewBufGenerator(protoDir) + if err != nil { + return xgenny.SourceModification{}, err + } + return runner.RunAndApply(g) +} + +// MigrateChainConfig migrates the chain config if required. +func (d *Doctor) MigrateChainConfig(configPath string) error { + errf := func(err error) error { + return errors.Errorf("doctor migrate config: %w", err) + } + + d.ev.Send("Checking chain config file:") + configFile, err := os.Open(configPath) + if err != nil { + return err + } + defer configFile.Close() + + version, err := chainconfig.ReadConfigVersion(configFile) + if err != nil { + return errf(err) + } + + status := "OK" + if version != chainconfig.LatestVersion { + _, err := configFile.Seek(0, 0) + if err != nil { + return errf(errors.Errorf("failed to reset the file: %w", err)) + } + // migrate config file + // Convert the current config to the latest version and update the YAML file + var buf bytes.Buffer + if err := chainconfig.MigrateLatest(configFile, &buf); err != nil { + return errf(err) + } + + if err := os.WriteFile(configPath, buf.Bytes(), 0o600); err != nil { + return errf(errors.Errorf("config file migration failed: %w", err)) + } + + status = "migrated" + } + + d.ev.Send( + fmt.Sprintf("config file %s", colors.Success(status)), + events.Icon(icons.OK), + events.Indent(1), + events.ProgressFinish(), + ) + + return nil +} + +// MigrateToolsGo ensures that. +// - go.mod is bumped to go 1.25. +// - removes tools.go file from chain. +// - add all tools to go.mod. +func (d *Doctor) MigrateToolsGo(appPath string) error { + errf := func(err error) error { + return errors.Errorf("doctor migrate tools.go: %w", err) + } + + const ( + // toolsFile defines the app relative path to the Go tools file. + toolsFile = "tools/tools.go" + // goModFile defines the app relative path to the Go module file. + goModFile = "go.mod" + ) + + _, err := os.Stat(toolsFile) + if os.IsNotExist(err) { // file doesn't exist, nothing to do + return nil + } + + d.ev.Send("Migrating dependency tools:") + + toolsAst, _, err := xast.ParseFile(toolsFile) + if err != nil { + return errf(errors.Errorf("failed to parse tools.go file: %w", err)) + } + + imports := goanalysis.FormatImports(toolsAst) + if len(imports) == 0 { + d.ev.Send( + "no tools to migrate", + events.Icon(icons.OK), + events.Indent(1), + events.ProgressFinish(), + ) + return nil + } + + goModPath := path.Join(appPath, goModFile) + data, err := os.ReadFile(goModPath) + if err != nil { + return errf(errors.Errorf("failed to read go.mod file: %w", err)) + } + + goModAst, err := modfile.Parse(goModPath, data, nil) + if err != nil { + return errf(errors.Errorf("failed to parse go.mod file: %w", err)) + } + + // bump to go 1.25 + if goModAst.Go.Version < "1.24" { + goModAst.Go.Version = "1.25" + } + + for _, imp := range imports { + _ = goModAst.AddTool(imp) + } + + // remove the tools.go file + if err := os.Remove(toolsFile); err != nil { + return errf(errors.Errorf("failed to remove tools.go file: %w", err)) + } + + // write the updated go.mod file + data, err = goModAst.Format() + if err != nil { + return errf(errors.Errorf("failed to format go.mod file: %w", err)) + } + + if err := os.WriteFile(goModPath, data, 0o600); err != nil { + return errf(errors.Errorf("failed to write go.mod file: %w", err)) + } + d.ev.Send( + fmt.Sprintf("tools migrated to %s", colors.Success(goModFile)), + events.Icon(icons.OK), + events.Indent(1), + events.ProgressFinish(), + ) + + return nil +} diff --git a/ignite/services/doctor/doctor_plugins.go b/ignite/services/doctor/doctor_plugins.go new file mode 100644 index 0000000..bddb906 --- /dev/null +++ b/ignite/services/doctor/doctor_plugins.go @@ -0,0 +1,210 @@ +package doctor + +import ( + "bytes" + "fmt" + "io" + "os" + "path/filepath" + + "gopkg.in/yaml.v3" + + "github.com/ignite/cli/v29/ignite/config" + chainconfig "github.com/ignite/cli/v29/ignite/config/chain" + "github.com/ignite/cli/v29/ignite/pkg/cliui/colors" + "github.com/ignite/cli/v29/ignite/pkg/cliui/icons" + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/events" +) + +// MigratePluginsConfig migrates plugins config to Ignite App config if required. +func (d Doctor) MigratePluginsConfig() error { + errf := func(err error) error { + return errors.Errorf("doctor migrate plugins config: %w", err) + } + + d.ev.Send("Checking for legacy plugin config files:") + d.ev.Send("Searching global plugins config file", events.ProgressStart()) + + if err := d.migrateGlobalPluginConfig(); err != nil { + return errf(err) + } + + d.ev.Send("Searching local plugins config file", events.ProgressUpdate()) + + if err := d.migrateLocalPluginsConfig(); err != nil { + return errf(err) + } + + d.ev.Send( + fmt.Sprintf("plugin config files %s", colors.Success("OK")), + events.Icon(icons.OK), + events.Indent(1), + events.ProgressFinish(), + ) + + return nil +} + +func (d Doctor) migrateGlobalPluginConfig() error { + globalPath, err := config.DirPath() + if err != nil { + return err + } + + // Global apps directory is always available because it is + // created if it doesn't exists when any command is executed. + appsPath := filepath.Join(globalPath, "apps", "igniteapps.yml") + if _, err := os.Stat(appsPath); err == nil { + d.ev.Send( + fmt.Sprintf("%s %s", appsPath, colors.Success("exists")), + events.Icon(icons.OK), + events.Indent(1), + ) + + // Ignite apps config file exists in global directory + return nil + } + + legacyPath, err := findPluginsConfigPath(filepath.Join(globalPath, "plugins")) + if err != nil { + return err + } else if legacyPath == "" { + // Nothing to migrate when the legacy plugins config path doesn't exist + return nil + } + + if err := d.migratePluginsConfigFiles(legacyPath, appsPath); err != nil { + return err + } + + d.ev.SendInfo( + fmt.Sprintf("directory %s can safely be removed", filepath.Dir(legacyPath)), + events.Icon(icons.Info), + events.Indent(1), + ) + + return nil +} + +func (d Doctor) migrateLocalPluginsConfig() error { + localPath, err := chainconfig.LocateDefault(".") + if err != nil { + if errors.Is(err, chainconfig.ErrConfigNotFound) { + // When app config is not found it means the doctor + // command is not being run within a blockchain app, + // so there is not local config to migrate + return nil + } + + return err + } + + localPath, err = filepath.Abs(filepath.Dir(localPath)) + if err != nil { + return err + } + + appsPath := filepath.Join(localPath, "igniteapps.yml") + if _, err := os.Stat(appsPath); err == nil { + d.ev.Send( + fmt.Sprintf("%s %s", appsPath, colors.Success("exists")), + events.Icon(icons.OK), + events.Indent(1), + ) + + // Ignite apps config file exists in current directory + return nil + } + + legacyPath, err := findPluginsConfigPath(localPath) + if err != nil { + return err + } else if legacyPath == "" { + // Nothing to migrate when plugins config file is not found in current directory + return nil + } + + return d.migratePluginsConfigFiles(legacyPath, appsPath) +} + +func (d Doctor) migratePluginsConfigFiles(pluginsPath, appsPath string) error { + pluginsFile, err := os.Open(pluginsPath) + if err != nil { + return err + } + + defer pluginsFile.Close() + + appsFile, err := os.OpenFile(appsPath, os.O_WRONLY|os.O_CREATE, 0o644) + if err != nil { + return err + } + + defer appsFile.Close() + + if err = migratePluginsConfig(pluginsFile, appsFile); err != nil { + return err + } + + d.ev.Send( + fmt.Sprintf("migrated config file %s to %s", colors.Faint(pluginsPath), colors.Faint(appsPath)), + events.Icon(icons.OK), + events.Indent(1), + ) + d.ev.SendInfo( + fmt.Sprintf("file %s can safely be removed", pluginsPath), + events.Icon(icons.Info), + events.Indent(1), + ) + + return nil +} + +func migratePluginsConfig(r io.Reader, w io.Writer) error { + bz, err := updatePluginsConfig(r) + if err != nil { + return err + } + + _, err = w.Write(bz) + if err != nil { + return err + } + return nil +} + +func updatePluginsConfig(r io.Reader) ([]byte, error) { + var cfg map[string]any + err := yaml.NewDecoder(r).Decode(&cfg) + if err != nil && !errors.Is(err, io.EOF) { + return nil, err + } + + if apps, ok := cfg["plugins"]; ok { + cfg["apps"] = apps + delete(cfg, "plugins") + } + + var buf bytes.Buffer + if err = yaml.NewEncoder(&buf).Encode(cfg); err != nil { + return nil, err + } + return buf.Bytes(), nil +} + +func findPluginsConfigPath(dir string) (string, error) { + for _, ext := range []string{"yml", "yaml"} { + path := filepath.Join(dir, fmt.Sprintf("plugins.%s", ext)) + _, err := os.Stat(path) + if err == nil { + // File found + return path, nil + } + + if !os.IsNotExist(err) { + return "", err + } + } + return "", nil +} diff --git a/ignite/services/plugin/apps_config.go b/ignite/services/plugin/apps_config.go new file mode 100644 index 0000000..ca90bbe --- /dev/null +++ b/ignite/services/plugin/apps_config.go @@ -0,0 +1,14 @@ +package plugin + +// AppsConfig is the structure of app.ignite.yml file. +type AppsConfig struct { + Version uint `yaml:"version"` + Apps map[string]AppInfo `yaml:"apps"` +} + +// AppInfo is the structure of app info in app.ignite.yml file which only holds +// the description and the relative path of the app. +type AppInfo struct { + Description string `yaml:"description"` + Path string `yaml:"path"` +} diff --git a/ignite/services/plugin/cache.go b/ignite/services/plugin/cache.go new file mode 100644 index 0000000..46dca0a --- /dev/null +++ b/ignite/services/plugin/cache.go @@ -0,0 +1,95 @@ +package plugin + +import ( + "encoding/gob" + "net" + "path" + + hplugin "github.com/hashicorp/go-plugin" + + "github.com/ignite/cli/v29/ignite/pkg/cache" + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/version" +) + +const ( + cacheFileName = "ignite_plugin_cache.db" + cacheNamespace = "plugin.rpc.context" +) + +// Caches configuration for shared plugin hosts. +// The cached configuration can be used to re-attach to running plugins. +// These type of plugins must have "shared_host: true" in their manifest. +var storageCache *cache.Cache[hplugin.ReattachConfig] + +func init() { + gob.Register(hplugin.ReattachConfig{}) + gob.Register(&net.UnixAddr{}) +} + +func writeConfigCache(pluginPath string, conf hplugin.ReattachConfig) error { + if pluginPath == "" { + return errors.Errorf("provided path is invalid: %s", pluginPath) + } + if conf.Addr == nil { + return errors.Errorf("app Address info cannot be empty") + } + cache, err := newCache() + if err != nil { + return err + } + return cache.Put(pluginPath, conf) +} + +func readConfigCache(pluginPath string) (hplugin.ReattachConfig, error) { + if pluginPath == "" { + return hplugin.ReattachConfig{}, errors.Errorf("provided path is invalid: %s", pluginPath) + } + cache, err := newCache() + if err != nil { + return hplugin.ReattachConfig{}, err + } + return cache.Get(pluginPath) +} + +func checkConfCache(pluginPath string) bool { + if pluginPath == "" { + return false + } + cache, err := newCache() + if err != nil { + return false + } + _, err = cache.Get(pluginPath) + return err == nil +} + +func deleteConfCache(pluginPath string) error { + if pluginPath == "" { + return errors.Errorf("provided path is invalid: %s", pluginPath) + } + cache, err := newCache() + if err != nil { + return err + } + return cache.Delete(pluginPath) +} + +func newCache() (*cache.Cache[hplugin.ReattachConfig], error) { + cacheRootDir, err := PluginsPath() + if err != nil { + return nil, err + } + if storageCache == nil { + storage, err := cache.NewStorage( + path.Join(cacheRootDir, cacheFileName), + cache.WithVersion(version.Version), + ) + if err != nil { + return nil, err + } + c := cache.New[hplugin.ReattachConfig](storage, cacheNamespace) + storageCache = &c + } + return storageCache, nil +} diff --git a/ignite/services/plugin/cache_test.go b/ignite/services/plugin/cache_test.go new file mode 100644 index 0000000..85b3d26 --- /dev/null +++ b/ignite/services/plugin/cache_test.go @@ -0,0 +1,109 @@ +package plugin + +import ( + "net" + "testing" + + hplugin "github.com/hashicorp/go-plugin" + "github.com/stretchr/testify/require" +) + +func TestReadWriteConfigCache(t *testing.T) { + t.Run("Should cache plugin config and read from cache", func(t *testing.T) { + const path = "/path/to/awesome/plugin" + unixFD, _ := net.ResolveUnixAddr("unix", "/var/folders/5k/sv4bxrs102n_6rr7430jc7j80000gn/T/plugin193424090") + + rc := hplugin.ReattachConfig{ + Protocol: hplugin.ProtocolGRPC, + ProtocolVersion: hplugin.CoreProtocolVersion, + Addr: unixFD, + Pid: 24464, + } + + err := writeConfigCache(path, rc) + require.NoError(t, err) + + c, err := readConfigCache(path) + require.NoError(t, err) + require.Equal(t, rc, c) + }) + + t.Run("Should error writing bad plugin config to cache", func(t *testing.T) { + const path = "/path/to/awesome/plugin" + rc := hplugin.ReattachConfig{ + Protocol: hplugin.ProtocolGRPC, + ProtocolVersion: hplugin.CoreProtocolVersion, + Addr: nil, + Pid: 24464, + } + + err := writeConfigCache(path, rc) + require.Error(t, err) + }) + + t.Run("Should error with invalid plugin path", func(t *testing.T) { + const path = "" + rc := hplugin.ReattachConfig{ + Protocol: hplugin.ProtocolGRPC, + ProtocolVersion: hplugin.CoreProtocolVersion, + Addr: nil, + Pid: 24464, + } + + err := writeConfigCache(path, rc) + require.Error(t, err) + }) +} + +func TestDeleteConfCache(t *testing.T) { + t.Run("Delete plugin config after write to cache should remove from cache", func(t *testing.T) { + const path = "/path/to/awesome/plugin" + unixFD, _ := net.ResolveUnixAddr("unix", "/var/folders/5k/sv4bxrs102n_6rr7430jc7j80000gn/T/plugin193424090") + + rc := hplugin.ReattachConfig{ + Protocol: hplugin.ProtocolGRPC, + ProtocolVersion: hplugin.CoreProtocolVersion, + Addr: unixFD, + Pid: 24464, + } + + err := writeConfigCache(path, rc) + require.NoError(t, err) + + err = deleteConfCache(path) + require.NoError(t, err) + + // there should be an error after deleting the config from the cache + _, err = readConfigCache(path) + require.Error(t, err) + }) + + t.Run("Delete plugin config should return error given empty path", func(t *testing.T) { + const path = "" + err := deleteConfCache(path) + require.Error(t, err) + }) +} + +func TestCheckConfCache(t *testing.T) { + const path = "/path/to/awesome/plugin" + unixFD, _ := net.ResolveUnixAddr("unix", "/var/folders/5k/sv4bxrs102n_6rr7430jc7j80000gn/T/plugin193424090") + + rc := hplugin.ReattachConfig{ + Protocol: hplugin.ProtocolGRPC, + ProtocolVersion: hplugin.CoreProtocolVersion, + Addr: unixFD, + Pid: 24464, + } + + t.Run("Cache should be hydrated", func(t *testing.T) { + err := writeConfigCache(path, rc) + require.NoError(t, err) + require.Equal(t, true, checkConfCache(path)) + }) + + t.Run("Cache should be empty", func(t *testing.T) { + _ = deleteConfCache(path) + require.Equal(t, false, checkConfCache(path)) + }) +} diff --git a/ignite/services/plugin/client_api.go b/ignite/services/plugin/client_api.go new file mode 100644 index 0000000..bbff11d --- /dev/null +++ b/ignite/services/plugin/client_api.go @@ -0,0 +1,113 @@ +package plugin + +import ( + "context" + + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/version" +) + +// ErrAppChainNotFound indicates that the plugin command is not running inside a blockchain app. +var ErrAppChainNotFound = errors.New("blockchain app not found") + +//go:generate mockery --srcpkg . --name Chainer --structname ChainerInterface --filename chainer.go --with-expecter +type Chainer interface { + // AppPath returns the configured App's path. + AppPath() string + + // ID returns the configured App's chain id. + ID() (string, error) + + // ConfigPath returns the path to the App's config file. + ConfigPath() string + + // RPCPublicAddress returns the configured App's rpc endpoint. + RPCPublicAddress() (string, error) + + // Home returns the App's home dir. + Home() (string, error) +} + +// APIOption defines options for the client API. +type APIOption func(*apiOptions) + +type apiOptions struct { + chain Chainer +} + +// WithChain configures the chain to use for the client API. +func WithChain(c Chainer) APIOption { + return func(o *apiOptions) { + o.chain = c + } +} + +// NewClientAPI creates a new app ClientAPI. +func NewClientAPI(options ...APIOption) ClientAPI { + o := apiOptions{} + for _, apply := range options { + apply(&o) + } + return clientAPI{o} +} + +type clientAPI struct { + o apiOptions +} + +func (api clientAPI) GetChainInfo(context.Context) (*ChainInfo, error) { + chain, err := api.getChain() + if err != nil { + return nil, err + } + + chainID, err := chain.ID() + if err != nil { + return nil, err + } + + rpc, err := chain.RPCPublicAddress() + if err != nil { + return nil, err + } + + home, err := chain.Home() + if err != nil { + return nil, err + } + + return &ChainInfo{ + ChainId: chainID, + AppPath: chain.AppPath(), + ConfigPath: chain.ConfigPath(), + RpcAddress: rpc, + Home: home, + }, nil +} + +func (api clientAPI) getChain() (Chainer, error) { + if api.o.chain == nil { + return nil, ErrAppChainNotFound + } + return api.o.chain, nil +} + +func (api clientAPI) GetIgniteInfo(ctx context.Context) (*IgniteInfo, error) { + info, err := version.GetInfo(ctx) + if err != nil { + return nil, err + } + + return &IgniteInfo{ + CliVersion: info.CLIVersion, + GoVersion: info.GoVersion, + SdkVersion: info.SDKVersion, + BufVersion: info.BufVersion, + BuildDate: info.BuildDate, + SourceHash: info.SourceHash, + ConfigVersion: info.ConfigVersion, + Os: info.OS, + Arch: info.Arch, + BuildFromSource: info.BuildFromSource, + }, nil +} diff --git a/ignite/services/plugin/flag.go b/ignite/services/plugin/flag.go new file mode 100644 index 0000000..226f0a4 --- /dev/null +++ b/ignite/services/plugin/flag.go @@ -0,0 +1,152 @@ +package plugin + +import ( + "strconv" + "strings" + + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +var ( + // ErrFlagNotFound error key flag not found. + ErrFlagNotFound = errors.New("flag not found") + // ErrInvalidFlagType error invalid flag type. + ErrInvalidFlagType = errors.New("invalid flag type") + // ErrFlagAssertion error flag type assertion failed. + ErrFlagAssertion = errors.New("flag type assertion failed") +) + +// Flags represents a slice of Flag pointers. +type Flags []*Flag + +// getValue returns the value of the flag with the specified key and type. +// It uses the provided conversion function to convert the string value to the desired type. +func (f Flags) getValue(key string, flagType FlagType, convFunc func(v string) (interface{}, error)) (interface{}, error) { + for _, flag := range f { + if flag.Name == key { + if flag.Type != flagType { + return nil, errors.Wrapf(ErrInvalidFlagType, "invalid flag type %v for key %s", flag.Type, key) + } + return convFunc(flagValue(flag)) + } + } + return nil, errors.Wrap(ErrFlagNotFound, key) +} + +// GetString retrieves the string value of the flag with the specified key. +func (f Flags) GetString(key string) (string, error) { + v, err := f.getValue(key, FlagTypeString, func(v string) (interface{}, error) { + return strings.TrimSpace(v), nil + }) + if err != nil { + return "", err + } + result, ok := v.(string) + if !ok { + return "", errors.Wrapf(ErrFlagAssertion, "invalid assertion type %T for key %s", v, key) + } + return result, nil +} + +// GetStringSlice retrieves the string slice value of the flag with the specified key. +func (f Flags) GetStringSlice(key string) ([]string, error) { + v, err := f.getValue(key, FlagTypeStringSlice, func(v string) (interface{}, error) { + v = strings.Trim(v, "[]") + s := strings.Split(v, ",") + if len(s) == 0 || (len(s) == 1 && s[0] == "") { + return []string{}, nil + } + return s, nil + }) + if err != nil { + return []string{}, err + } + result, ok := v.([]string) + if !ok { + return []string{}, errors.Wrapf(ErrFlagAssertion, "invalid string slice assertion type %T for key %s", v, key) + } + return result, nil +} + +// GetBool retrieves the boolean value of the flag with the specified key. +func (f Flags) GetBool(key string) (bool, error) { + v, err := f.getValue(key, FlagTypeBool, func(v string) (interface{}, error) { + return strconv.ParseBool(v) + }) + if err != nil { + return false, err + } + result, ok := v.(bool) + if !ok { + return false, errors.Wrapf(ErrFlagAssertion, "invalid bool assertion type %T for key %s", v, key) + } + return result, nil +} + +// GetInt retrieves the integer value of the flag with the specified key. +func (f Flags) GetInt(key string) (int, error) { + v, err := f.getValue(key, FlagTypeInt, func(v string) (interface{}, error) { + return strconv.Atoi(v) + }) + if err != nil { + return 0, err + } + result, ok := v.(int) + if !ok { + return 0, errors.Wrapf(ErrFlagAssertion, "invalid int assertion type %T for key %s", v, key) + } + return result, nil +} + +// GetInt64 retrieves the int64 value of the flag with the specified key. +func (f Flags) GetInt64(key string) (int64, error) { + v, err := f.getValue(key, FlagTypeInt64, func(v string) (interface{}, error) { + return strconv.ParseInt(v, 10, 64) + }) + if err != nil { + return int64(0), err + } + result, ok := v.(int64) + if !ok { + return int64(0), errors.Wrapf(ErrFlagAssertion, "invalid int64 assertion type %T for key %s", v, key) + } + return result, nil +} + +// GetUint retrieves the uint value of the flag with the specified key. +func (f Flags) GetUint(key string) (uint, error) { + v, err := f.getValue(key, FlagTypeUint, func(v string) (interface{}, error) { + return strconv.ParseUint(v, 10, 64) + }) + if err != nil { + return uint(0), err + } + result, ok := v.(uint64) + if !ok { + return uint(0), errors.Wrapf(ErrFlagAssertion, "invalid uint assertion type %T for key %s", v, key) + } + return uint(result), nil +} + +// GetUint64 retrieves the uint64 value of the flag with the specified key. +func (f Flags) GetUint64(key string) (uint64, error) { + v, err := f.getValue(key, FlagTypeUint64, func(v string) (interface{}, error) { + return strconv.ParseUint(v, 10, 64) + }) + if err != nil { + return uint64(0), err + } + result, ok := v.(uint64) + if !ok { + return uint64(0), errors.Wrapf(ErrFlagAssertion, "invalid uint64 assertion type %T for key %s", v, key) + } + return result, nil +} + +// flagValue returns the value of the flag if set, otherwise returns the default value. +func flagValue(flag *Flag) string { + if flag.Value != "" { + return flag.Value + } + return flag.DefaultValue +} diff --git a/ignite/services/plugin/flag_test.go b/ignite/services/plugin/flag_test.go new file mode 100644 index 0000000..731a43d --- /dev/null +++ b/ignite/services/plugin/flag_test.go @@ -0,0 +1,594 @@ +package plugin + +import ( + "strconv" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +const ( + flagString1 = "string_flag_1" + flagString2 = "string_flag_2" + flagString3 = "string_flag_3" + flagStringSlice1 = "string_slice_flag_1" + flagStringSlice2 = "string_slice_flag_2" + flagStringSlice3 = "string_slice_flag_3" + flagBool1 = "bool_flag_1" + flagBool2 = "bool_flag_2" + flagBool3 = "bool_flag_3" + flagInt1 = "int_flag_1" + flagInt2 = "int_flag_2" + flagInt3 = "int_flag_3" + flagUint1 = "uint_flag_1" + flagUint2 = "uint_flag_2" + flagUint3 = "uint_flag_3" + flagInt641 = "int64_flag_1" + flagInt642 = "int64_flag_2" + flagInt643 = "int64_flag_3" + flagUint641 = "uint64_flag_1" + flagUint642 = "uint64_flag_2" + flagUint643 = "uint64_flag_3" + flagWrongType1 = "wrong_type_1" + flagWrongType2 = "wrong_type_2" + flagWrongType3 = "wrong_type_3" +) + +var testFlags = Flags{ + {Name: flagString1, Value: "text_1", DefaultValue: "def_text_1", Type: FlagTypeString}, + {Name: flagString2, DefaultValue: "def_text_2", Type: FlagTypeString}, + {Name: flagString3, Type: FlagTypeString}, + + {Name: flagStringSlice1, Value: "slice_1,slice_2", DefaultValue: "slice_1,slice_2,slice_3", Type: FlagTypeStringSlice}, + {Name: flagStringSlice2, DefaultValue: "slice_1,slice_2,slice_3", Type: FlagTypeStringSlice}, + {Name: flagStringSlice3, Type: FlagTypeStringSlice}, + + {Name: flagInt1, Value: "-100", DefaultValue: "300", Type: FlagTypeInt}, + {Name: flagInt2, DefaultValue: "200", Type: FlagTypeInt}, + {Name: flagInt3, Type: FlagTypeInt}, + + {Name: flagUint1, Value: "22", DefaultValue: "34", Type: FlagTypeUint}, + {Name: flagUint2, DefaultValue: "40", Type: FlagTypeUint}, + {Name: flagUint3, Type: FlagTypeUint}, + + {Name: flagInt641, Value: "123", DefaultValue: "641", Type: FlagTypeInt64}, + {Name: flagInt642, DefaultValue: "344", Type: FlagTypeInt64}, + {Name: flagInt643, Type: FlagTypeInt64}, + + {Name: flagUint641, Value: "123", DefaultValue: "433333", Type: FlagTypeUint64}, + {Name: flagUint642, DefaultValue: "100000", Type: FlagTypeUint64}, + {Name: flagUint643, Type: FlagTypeUint64}, + + {Name: flagBool1, Value: "true", DefaultValue: "false", Type: FlagTypeBool}, + {Name: flagBool2, DefaultValue: "true", Type: FlagTypeBool}, + {Name: flagBool3, Type: FlagTypeBool}, + + {Name: flagWrongType1, Value: "text_wrong", DefaultValue: "def_text", Type: FlagTypeUint64}, + {Name: flagWrongType2, DefaultValue: "text_wrong", Type: FlagTypeBool}, + {Name: flagWrongType3, Type: FlagTypeInt}, +} + +func TestFlags_GetBool(t *testing.T) { + tests := []struct { + name string + key string + f Flags + want bool + err error + }{ + { + name: "flag with value", + key: flagBool1, + f: testFlags, + want: true, + }, + { + name: "flag with default value", + key: flagBool2, + f: testFlags, + want: true, + }, + { + name: "flag without value and default value", + key: flagBool3, + f: testFlags, + err: errors.New("strconv.ParseBool: parsing \"\": invalid syntax"), + }, + { + name: "invalid flag type", + key: flagString1, + f: testFlags, + err: errors.Wrapf(ErrInvalidFlagType, "invalid flag type %v for key %s", FlagTypeString, flagString1), + }, + { + name: "invalid flag", + key: "invalid_key", + f: testFlags, + err: errors.Wrap(ErrFlagNotFound, "invalid_key"), + }, + { + name: "wrong flag type", + key: flagWrongType1, + f: testFlags, + err: errors.Wrap(ErrInvalidFlagType, "invalid flag type TYPE_FLAG_UINT64 for key wrong_type_1"), + }, + { + name: "wrong flag value", + key: flagWrongType2, + f: testFlags, + err: errors.New("strconv.ParseBool: parsing \"text_wrong\": invalid syntax"), + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := tt.f.GetBool(tt.key) + if tt.err != nil { + require.Error(t, err) + require.Equal(t, tt.err.Error(), err.Error()) + return + } + require.NoError(t, err) + require.Equal(t, tt.want, got) + }) + } +} + +func TestFlags_GetInt(t *testing.T) { + tests := []struct { + name string + f Flags + key string + want int + err error + }{ + { + name: "flag with value", + key: flagInt1, + f: testFlags, + want: -100, + }, + { + name: "flag with default value", + key: flagInt2, + f: testFlags, + want: 200, + }, + { + name: "flag without value and default value", + key: flagInt3, + f: testFlags, + err: errors.New("strconv.Atoi: parsing \"\": invalid syntax"), + }, + { + name: "invalid flag type", + key: flagString1, + f: testFlags, + err: errors.Wrapf(ErrInvalidFlagType, "invalid flag type %v for key %s", FlagTypeString, flagString1), + }, + { + name: "invalid flag", + key: "invalid_key", + f: testFlags, + err: errors.Wrap(ErrFlagNotFound, "invalid_key"), + }, + { + name: "wrong flag type", + key: flagWrongType2, + f: testFlags, + err: errors.Wrap(ErrInvalidFlagType, "invalid flag type TYPE_FLAG_BOOL for key wrong_type_2"), + }, + { + name: "wrong flag value without default or value", + key: flagWrongType3, + f: testFlags, + err: errors.New("strconv.Atoi: parsing \"\": invalid syntax"), + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := tt.f.GetInt(tt.key) + if tt.err != nil { + require.Error(t, err) + require.Equal(t, tt.err.Error(), err.Error()) + return + } + require.NoError(t, err) + require.Equal(t, tt.want, got) + }) + } +} + +func TestFlags_GetInt64(t *testing.T) { + tests := []struct { + name string + f Flags + key string + want int64 + err error + }{ + { + name: "flag with value", + key: flagInt641, + f: testFlags, + want: 123, + }, + { + name: "flag with default value", + key: flagInt642, + f: testFlags, + want: 344, + }, + { + name: "flag without value and default value", + key: flagInt643, + f: testFlags, + err: errors.New("strconv.ParseInt: parsing \"\": invalid syntax"), + }, + { + name: "invalid flag type", + key: flagString1, + f: testFlags, + err: errors.Wrapf(ErrInvalidFlagType, "invalid flag type %v for key %s", FlagTypeString, flagString1), + }, + { + name: "invalid flag", + key: "invalid_key", + f: testFlags, + err: errors.Wrap(ErrFlagNotFound, "invalid_key"), + }, + { + name: "wrong flag type", + key: flagWrongType3, + f: testFlags, + err: errors.Wrap(ErrInvalidFlagType, "invalid flag type TYPE_FLAG_INT for key wrong_type_3"), + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := tt.f.GetInt64(tt.key) + if tt.err != nil { + require.Error(t, err) + require.Equal(t, tt.err.Error(), err.Error()) + return + } + require.NoError(t, err) + require.Equal(t, tt.want, got) + }) + } +} + +func TestFlags_GetString(t *testing.T) { + tests := []struct { + name string + f Flags + key string + want string + err error + }{ + { + name: "flag with value", + key: flagString1, + f: testFlags, + want: "text_1", + }, + { + name: "flag with default value", + key: flagString2, + f: testFlags, + want: "def_text_2", + }, + { + name: "flag without value and default value", + key: flagString3, + f: testFlags, + want: "", + }, + { + name: "invalid flag type", + key: flagInt1, + f: testFlags, + err: errors.Wrapf(ErrInvalidFlagType, "invalid flag type %v for key %s", FlagTypeInt, flagInt1), + }, + { + name: "invalid flag", + key: "invalid_key", + f: testFlags, + err: errors.Wrap(ErrFlagNotFound, "invalid_key"), + }, + { + name: "wrong flag type", + key: flagWrongType2, + f: testFlags, + err: errors.Wrap(ErrInvalidFlagType, "invalid flag type TYPE_FLAG_BOOL for key wrong_type_2"), + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := tt.f.GetString(tt.key) + if tt.err != nil { + require.Error(t, err) + require.Equal(t, tt.err.Error(), err.Error()) + return + } + require.NoError(t, err) + require.Equal(t, tt.want, got) + }) + } +} + +func TestFlags_GetStringSlice(t *testing.T) { + tests := []struct { + name string + f Flags + key string + want []string + err error + }{ + { + name: "flag with default value", + key: flagStringSlice1, + f: testFlags, + want: []string{"slice_1", "slice_2"}, + }, + { + name: "flag with default value", + key: flagStringSlice2, + f: testFlags, + want: []string{"slice_1", "slice_2", "slice_3"}, + }, + { + name: "flag without value and default value", + key: flagStringSlice3, + f: testFlags, + want: []string{}, + }, + { + name: "invalid flag type", + key: flagString1, + f: testFlags, + err: errors.Wrapf(ErrInvalidFlagType, "invalid flag type %v for key %s", FlagTypeString, flagString1), + }, + { + name: "invalid flag", + key: "invalid_key", + f: testFlags, + err: errors.Wrap(ErrFlagNotFound, "invalid_key"), + }, + { + name: "wrong flag type", + key: flagWrongType1, + f: testFlags, + err: errors.Wrap(ErrInvalidFlagType, "invalid flag type TYPE_FLAG_UINT64 for key wrong_type_1"), + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := tt.f.GetStringSlice(tt.key) + if tt.err != nil { + require.Error(t, err) + require.Equal(t, tt.err.Error(), err.Error()) + return + } + require.NoError(t, err) + require.Equal(t, tt.want, got) + }) + } +} + +func TestFlags_GetUint(t *testing.T) { + tests := []struct { + name string + f Flags + key string + want uint + err error + }{ + { + name: "flag with value", + key: flagUint1, + f: testFlags, + want: 22, + }, + { + name: "flag with default value", + key: flagUint2, + f: testFlags, + want: 40, + }, + { + name: "flag without value and default value", + key: flagUint3, + f: testFlags, + err: errors.New("strconv.ParseUint: parsing \"\": invalid syntax"), + }, + { + name: "invalid flag type", + key: flagString1, + f: testFlags, + err: errors.Wrapf(ErrInvalidFlagType, "invalid flag type %v for key %s", FlagTypeString, flagString1), + }, + { + name: "invalid flag", + key: "invalid_key", + f: testFlags, + err: errors.Wrap(ErrFlagNotFound, "invalid_key"), + }, + { + name: "wrong flag type", + key: flagWrongType1, + f: testFlags, + err: errors.Wrap(ErrInvalidFlagType, "invalid flag type TYPE_FLAG_UINT64 for key wrong_type_1"), + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := tt.f.GetUint(tt.key) + if tt.err != nil { + require.Error(t, err) + require.Equal(t, tt.err.Error(), err.Error()) + return + } + require.NoError(t, err) + require.Equal(t, tt.want, got) + }) + } +} + +func TestFlags_GetUint64(t *testing.T) { + tests := []struct { + name string + f Flags + key string + want uint64 + err error + }{ + { + name: "flag with value", + key: flagUint641, + f: testFlags, + want: 123, + }, + { + name: "flag with default value", + key: flagUint642, + f: testFlags, + want: 100000, + }, + { + name: "flag without value and default value", + key: flagUint643, + f: testFlags, + err: errors.New("strconv.ParseUint: parsing \"\": invalid syntax"), + }, + { + name: "invalid flag type", + key: flagString1, + f: testFlags, + err: errors.Wrapf(ErrInvalidFlagType, "invalid flag type %v for key %s", FlagTypeString, flagString1), + }, + { + name: "invalid flag", + key: "invalid_key", + f: testFlags, + err: errors.Wrap(ErrFlagNotFound, "invalid_key"), + }, + { + name: "wrong flag type", + key: flagWrongType1, + f: testFlags, + err: errors.New("strconv.ParseUint: parsing \"text_wrong\": invalid syntax"), + }, + { + name: "wrong flag type", + key: flagWrongType3, + f: testFlags, + err: errors.Wrap(ErrInvalidFlagType, "invalid flag type TYPE_FLAG_INT for key wrong_type_3"), + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := tt.f.GetUint64(tt.key) + if tt.err != nil { + require.Error(t, err) + require.Equal(t, tt.err.Error(), err.Error()) + return + } + require.NoError(t, err) + require.Equal(t, tt.want, got) + }) + } +} + +func TestFlags_getValue(t *testing.T) { + tests := []struct { + name string + f Flags + key string + flagType FlagType + convFunc func(v string) (interface{}, error) + want interface{} + err error + }{ + { + name: "valid string conversion", + f: testFlags, + key: flagString1, + flagType: FlagTypeString, + convFunc: func(v string) (interface{}, error) { return v, nil }, + want: "text_1", + }, + { + name: "valid int conversion", + f: testFlags, + key: flagInt1, + flagType: FlagTypeInt, + convFunc: func(v string) (interface{}, error) { return strconv.Atoi(v) }, + want: -100, + }, + { + name: "invalid flag type", + f: testFlags, + key: flagString1, + flagType: FlagTypeInt, + convFunc: func(v string) (interface{}, error) { return v, nil }, + err: errors.Wrapf(ErrInvalidFlagType, "invalid flag type %v for key %s", FlagTypeString, flagString1), + }, + { + name: "flag not found", + f: testFlags, + key: "non_existing_flag", + flagType: FlagTypeString, + convFunc: func(v string) (interface{}, error) { return v, nil }, + err: errors.Wrap(ErrFlagNotFound, "non_existing_flag"), + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := tt.f.getValue(tt.key, tt.flagType, tt.convFunc) + if tt.err != nil { + require.Error(t, err) + require.Equal(t, tt.err.Error(), err.Error()) + return + } + require.NoError(t, err) + require.Equal(t, tt.want, got) + }) + } +} + +func Test_flagValue(t *testing.T) { + tests := []struct { + name string + flag *Flag + want string + }{ + { + name: "with value", + flag: &Flag{Name: flagString1, Value: "actual_value", DefaultValue: "default_value"}, + want: "actual_value", + }, + { + name: "with default value", + flag: &Flag{Name: flagString1, DefaultValue: "default_value"}, + want: "default_value", + }, + { + name: "without value and default value", + flag: &Flag{Name: flagString1}, + want: "", + }, + { + name: "number without value and default value", + flag: &Flag{Name: flagUint642, Type: FlagTypeUint64}, + want: "", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := flagValue(tt.flag) + require.Equal(t, tt.want, got) + }) + } +} diff --git a/ignite/services/plugin/grpc/v1/client_api.pb.go b/ignite/services/plugin/grpc/v1/client_api.pb.go new file mode 100644 index 0000000..4ad6fbc --- /dev/null +++ b/ignite/services/plugin/grpc/v1/client_api.pb.go @@ -0,0 +1,297 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.36.6 +// protoc (unknown) +// source: ignite/services/plugin/grpc/v1/client_api.proto + +package v1 + +import ( + reflect "reflect" + sync "sync" + unsafe "unsafe" + + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type ChainInfo struct { + state protoimpl.MessageState `protogen:"open.v1"` + ChainId string `protobuf:"bytes,1,opt,name=chain_id,json=chainId,proto3" json:"chain_id,omitempty"` + AppPath string `protobuf:"bytes,2,opt,name=app_path,json=appPath,proto3" json:"app_path,omitempty"` + ConfigPath string `protobuf:"bytes,3,opt,name=config_path,json=configPath,proto3" json:"config_path,omitempty"` + RpcAddress string `protobuf:"bytes,4,opt,name=rpc_address,json=rpcAddress,proto3" json:"rpc_address,omitempty"` + Home string `protobuf:"bytes,5,opt,name=home,proto3" json:"home,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *ChainInfo) Reset() { + *x = ChainInfo{} + mi := &file_ignite_services_plugin_grpc_v1_client_api_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *ChainInfo) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ChainInfo) ProtoMessage() {} + +func (x *ChainInfo) ProtoReflect() protoreflect.Message { + mi := &file_ignite_services_plugin_grpc_v1_client_api_proto_msgTypes[0] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ChainInfo.ProtoReflect.Descriptor instead. +func (*ChainInfo) Descriptor() ([]byte, []int) { + return file_ignite_services_plugin_grpc_v1_client_api_proto_rawDescGZIP(), []int{0} +} + +func (x *ChainInfo) GetChainId() string { + if x != nil { + return x.ChainId + } + return "" +} + +func (x *ChainInfo) GetAppPath() string { + if x != nil { + return x.AppPath + } + return "" +} + +func (x *ChainInfo) GetConfigPath() string { + if x != nil { + return x.ConfigPath + } + return "" +} + +func (x *ChainInfo) GetRpcAddress() string { + if x != nil { + return x.RpcAddress + } + return "" +} + +func (x *ChainInfo) GetHome() string { + if x != nil { + return x.Home + } + return "" +} + +type IgniteInfo struct { + state protoimpl.MessageState `protogen:"open.v1"` + CliVersion string `protobuf:"bytes,1,opt,name=cli_version,json=cliVersion,proto3" json:"cli_version,omitempty"` + GoVersion string `protobuf:"bytes,2,opt,name=go_version,json=goVersion,proto3" json:"go_version,omitempty"` + SdkVersion string `protobuf:"bytes,3,opt,name=sdk_version,json=sdkVersion,proto3" json:"sdk_version,omitempty"` + BufVersion string `protobuf:"bytes,4,opt,name=buf_version,json=bufVersion,proto3" json:"buf_version,omitempty"` + BuildDate string `protobuf:"bytes,5,opt,name=build_date,json=buildDate,proto3" json:"build_date,omitempty"` + SourceHash string `protobuf:"bytes,6,opt,name=source_hash,json=sourceHash,proto3" json:"source_hash,omitempty"` + ConfigVersion string `protobuf:"bytes,7,opt,name=config_version,json=configVersion,proto3" json:"config_version,omitempty"` + Os string `protobuf:"bytes,8,opt,name=os,proto3" json:"os,omitempty"` + Arch string `protobuf:"bytes,9,opt,name=arch,proto3" json:"arch,omitempty"` + BuildFromSource bool `protobuf:"varint,10,opt,name=build_from_source,json=buildFromSource,proto3" json:"build_from_source,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *IgniteInfo) Reset() { + *x = IgniteInfo{} + mi := &file_ignite_services_plugin_grpc_v1_client_api_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *IgniteInfo) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*IgniteInfo) ProtoMessage() {} + +func (x *IgniteInfo) ProtoReflect() protoreflect.Message { + mi := &file_ignite_services_plugin_grpc_v1_client_api_proto_msgTypes[1] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use IgniteInfo.ProtoReflect.Descriptor instead. +func (*IgniteInfo) Descriptor() ([]byte, []int) { + return file_ignite_services_plugin_grpc_v1_client_api_proto_rawDescGZIP(), []int{1} +} + +func (x *IgniteInfo) GetCliVersion() string { + if x != nil { + return x.CliVersion + } + return "" +} + +func (x *IgniteInfo) GetGoVersion() string { + if x != nil { + return x.GoVersion + } + return "" +} + +func (x *IgniteInfo) GetSdkVersion() string { + if x != nil { + return x.SdkVersion + } + return "" +} + +func (x *IgniteInfo) GetBufVersion() string { + if x != nil { + return x.BufVersion + } + return "" +} + +func (x *IgniteInfo) GetBuildDate() string { + if x != nil { + return x.BuildDate + } + return "" +} + +func (x *IgniteInfo) GetSourceHash() string { + if x != nil { + return x.SourceHash + } + return "" +} + +func (x *IgniteInfo) GetConfigVersion() string { + if x != nil { + return x.ConfigVersion + } + return "" +} + +func (x *IgniteInfo) GetOs() string { + if x != nil { + return x.Os + } + return "" +} + +func (x *IgniteInfo) GetArch() string { + if x != nil { + return x.Arch + } + return "" +} + +func (x *IgniteInfo) GetBuildFromSource() bool { + if x != nil { + return x.BuildFromSource + } + return false +} + +var File_ignite_services_plugin_grpc_v1_client_api_proto protoreflect.FileDescriptor + +const file_ignite_services_plugin_grpc_v1_client_api_proto_rawDesc = "" + + "\n" + + "/ignite/services/plugin/grpc/v1/client_api.proto\x12\x1eignite.services.plugin.grpc.v1\"\x97\x01\n" + + "\tChainInfo\x12\x19\n" + + "\bchain_id\x18\x01 \x01(\tR\achainId\x12\x19\n" + + "\bapp_path\x18\x02 \x01(\tR\aappPath\x12\x1f\n" + + "\vconfig_path\x18\x03 \x01(\tR\n" + + "configPath\x12\x1f\n" + + "\vrpc_address\x18\x04 \x01(\tR\n" + + "rpcAddress\x12\x12\n" + + "\x04home\x18\x05 \x01(\tR\x04home\"\xc5\x02\n" + + "\n" + + "IgniteInfo\x12\x1f\n" + + "\vcli_version\x18\x01 \x01(\tR\n" + + "cliVersion\x12\x1d\n" + + "\n" + + "go_version\x18\x02 \x01(\tR\tgoVersion\x12\x1f\n" + + "\vsdk_version\x18\x03 \x01(\tR\n" + + "sdkVersion\x12\x1f\n" + + "\vbuf_version\x18\x04 \x01(\tR\n" + + "bufVersion\x12\x1d\n" + + "\n" + + "build_date\x18\x05 \x01(\tR\tbuildDate\x12\x1f\n" + + "\vsource_hash\x18\x06 \x01(\tR\n" + + "sourceHash\x12%\n" + + "\x0econfig_version\x18\a \x01(\tR\rconfigVersion\x12\x0e\n" + + "\x02os\x18\b \x01(\tR\x02os\x12\x12\n" + + "\x04arch\x18\t \x01(\tR\x04arch\x12*\n" + + "\x11build_from_source\x18\n" + + " \x01(\bR\x0fbuildFromSourceB:Z8github.com/ignite/cli/v29/ignite/services/plugin/grpc/v1b\x06proto3" + +var ( + file_ignite_services_plugin_grpc_v1_client_api_proto_rawDescOnce sync.Once + file_ignite_services_plugin_grpc_v1_client_api_proto_rawDescData []byte +) + +func file_ignite_services_plugin_grpc_v1_client_api_proto_rawDescGZIP() []byte { + file_ignite_services_plugin_grpc_v1_client_api_proto_rawDescOnce.Do(func() { + file_ignite_services_plugin_grpc_v1_client_api_proto_rawDescData = protoimpl.X.CompressGZIP(unsafe.Slice(unsafe.StringData(file_ignite_services_plugin_grpc_v1_client_api_proto_rawDesc), len(file_ignite_services_plugin_grpc_v1_client_api_proto_rawDesc))) + }) + return file_ignite_services_plugin_grpc_v1_client_api_proto_rawDescData +} + +var file_ignite_services_plugin_grpc_v1_client_api_proto_msgTypes = make([]protoimpl.MessageInfo, 2) +var file_ignite_services_plugin_grpc_v1_client_api_proto_goTypes = []any{ + (*ChainInfo)(nil), // 0: ignite.services.plugin.grpc.v1.ChainInfo + (*IgniteInfo)(nil), // 1: ignite.services.plugin.grpc.v1.IgniteInfo +} +var file_ignite_services_plugin_grpc_v1_client_api_proto_depIdxs = []int32{ + 0, // [0:0] is the sub-list for method output_type + 0, // [0:0] is the sub-list for method input_type + 0, // [0:0] is the sub-list for extension type_name + 0, // [0:0] is the sub-list for extension extendee + 0, // [0:0] is the sub-list for field type_name +} + +func init() { file_ignite_services_plugin_grpc_v1_client_api_proto_init() } +func file_ignite_services_plugin_grpc_v1_client_api_proto_init() { + if File_ignite_services_plugin_grpc_v1_client_api_proto != nil { + return + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: unsafe.Slice(unsafe.StringData(file_ignite_services_plugin_grpc_v1_client_api_proto_rawDesc), len(file_ignite_services_plugin_grpc_v1_client_api_proto_rawDesc)), + NumEnums: 0, + NumMessages: 2, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_ignite_services_plugin_grpc_v1_client_api_proto_goTypes, + DependencyIndexes: file_ignite_services_plugin_grpc_v1_client_api_proto_depIdxs, + MessageInfos: file_ignite_services_plugin_grpc_v1_client_api_proto_msgTypes, + }.Build() + File_ignite_services_plugin_grpc_v1_client_api_proto = out.File + file_ignite_services_plugin_grpc_v1_client_api_proto_goTypes = nil + file_ignite_services_plugin_grpc_v1_client_api_proto_depIdxs = nil +} diff --git a/ignite/services/plugin/grpc/v1/interface.pb.go b/ignite/services/plugin/grpc/v1/interface.pb.go new file mode 100644 index 0000000..394e0bc --- /dev/null +++ b/ignite/services/plugin/grpc/v1/interface.pb.go @@ -0,0 +1,728 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.36.6 +// protoc (unknown) +// source: ignite/services/plugin/grpc/v1/interface.proto + +package v1 + +import ( + reflect "reflect" + sync "sync" + unsafe "unsafe" + + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +// Type represents the flag type. +type Flag_Type int32 + +const ( + Flag_TYPE_FLAG_STRING_UNSPECIFIED Flag_Type = 0 + Flag_TYPE_FLAG_INT Flag_Type = 1 + Flag_TYPE_FLAG_UINT Flag_Type = 2 + Flag_TYPE_FLAG_INT64 Flag_Type = 3 + Flag_TYPE_FLAG_UINT64 Flag_Type = 4 + Flag_TYPE_FLAG_BOOL Flag_Type = 5 + Flag_TYPE_FLAG_STRING_SLICE Flag_Type = 6 +) + +// Enum value maps for Flag_Type. +var ( + Flag_Type_name = map[int32]string{ + 0: "TYPE_FLAG_STRING_UNSPECIFIED", + 1: "TYPE_FLAG_INT", + 2: "TYPE_FLAG_UINT", + 3: "TYPE_FLAG_INT64", + 4: "TYPE_FLAG_UINT64", + 5: "TYPE_FLAG_BOOL", + 6: "TYPE_FLAG_STRING_SLICE", + } + Flag_Type_value = map[string]int32{ + "TYPE_FLAG_STRING_UNSPECIFIED": 0, + "TYPE_FLAG_INT": 1, + "TYPE_FLAG_UINT": 2, + "TYPE_FLAG_INT64": 3, + "TYPE_FLAG_UINT64": 4, + "TYPE_FLAG_BOOL": 5, + "TYPE_FLAG_STRING_SLICE": 6, + } +) + +func (x Flag_Type) Enum() *Flag_Type { + p := new(Flag_Type) + *p = x + return p +} + +func (x Flag_Type) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (Flag_Type) Descriptor() protoreflect.EnumDescriptor { + return file_ignite_services_plugin_grpc_v1_interface_proto_enumTypes[0].Descriptor() +} + +func (Flag_Type) Type() protoreflect.EnumType { + return &file_ignite_services_plugin_grpc_v1_interface_proto_enumTypes[0] +} + +func (x Flag_Type) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use Flag_Type.Descriptor instead. +func (Flag_Type) EnumDescriptor() ([]byte, []int) { + return file_ignite_services_plugin_grpc_v1_interface_proto_rawDescGZIP(), []int{4, 0} +} + +// ExecutedCommand represents a plugin command under execution. +type ExecutedCommand struct { + state protoimpl.MessageState `protogen:"open.v1"` + // Use is the one-line usage message. + Use string `protobuf:"bytes,1,opt,name=use,proto3" json:"use,omitempty"` + // Path contains the command path, e.g. `ignite scaffold foo`. + Path string `protobuf:"bytes,2,opt,name=path,proto3" json:"path,omitempty"` + // Args are the command arguments. + Args []string `protobuf:"bytes,3,rep,name=args,proto3" json:"args,omitempty"` + // Full list of args taken from the command line. + OsArgs []string `protobuf:"bytes,4,rep,name=os_args,json=osArgs,proto3" json:"os_args,omitempty"` + // With contains the plugin config parameters. + With map[string]string `protobuf:"bytes,5,rep,name=with,proto3" json:"with,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + // Flags holds the list of command flags. + Flags []*Flag `protobuf:"bytes,6,rep,name=flags,proto3" json:"flags,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *ExecutedCommand) Reset() { + *x = ExecutedCommand{} + mi := &file_ignite_services_plugin_grpc_v1_interface_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *ExecutedCommand) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ExecutedCommand) ProtoMessage() {} + +func (x *ExecutedCommand) ProtoReflect() protoreflect.Message { + mi := &file_ignite_services_plugin_grpc_v1_interface_proto_msgTypes[0] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ExecutedCommand.ProtoReflect.Descriptor instead. +func (*ExecutedCommand) Descriptor() ([]byte, []int) { + return file_ignite_services_plugin_grpc_v1_interface_proto_rawDescGZIP(), []int{0} +} + +func (x *ExecutedCommand) GetUse() string { + if x != nil { + return x.Use + } + return "" +} + +func (x *ExecutedCommand) GetPath() string { + if x != nil { + return x.Path + } + return "" +} + +func (x *ExecutedCommand) GetArgs() []string { + if x != nil { + return x.Args + } + return nil +} + +func (x *ExecutedCommand) GetOsArgs() []string { + if x != nil { + return x.OsArgs + } + return nil +} + +func (x *ExecutedCommand) GetWith() map[string]string { + if x != nil { + return x.With + } + return nil +} + +func (x *ExecutedCommand) GetFlags() []*Flag { + if x != nil { + return x.Flags + } + return nil +} + +// ExecutedHook represents a plugin hook under execution. +type ExecutedHook struct { + state protoimpl.MessageState `protogen:"open.v1"` + // Hook is a copy of the original Hook defined in the Manifest. + Hook *Hook `protobuf:"bytes,1,opt,name=hook,proto3" json:"hook,omitempty"` + // ExecutedCommand gives access to the command attached by the hook. + ExecutedCommand *ExecutedCommand `protobuf:"bytes,2,opt,name=executed_command,json=executedCommand,proto3" json:"executed_command,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *ExecutedHook) Reset() { + *x = ExecutedHook{} + mi := &file_ignite_services_plugin_grpc_v1_interface_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *ExecutedHook) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ExecutedHook) ProtoMessage() {} + +func (x *ExecutedHook) ProtoReflect() protoreflect.Message { + mi := &file_ignite_services_plugin_grpc_v1_interface_proto_msgTypes[1] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ExecutedHook.ProtoReflect.Descriptor instead. +func (*ExecutedHook) Descriptor() ([]byte, []int) { + return file_ignite_services_plugin_grpc_v1_interface_proto_rawDescGZIP(), []int{1} +} + +func (x *ExecutedHook) GetHook() *Hook { + if x != nil { + return x.Hook + } + return nil +} + +func (x *ExecutedHook) GetExecutedCommand() *ExecutedCommand { + if x != nil { + return x.ExecutedCommand + } + return nil +} + +// Manifest represents the plugin behavior. +type Manifest struct { + state protoimpl.MessageState `protogen:"open.v1"` + // Plugin name. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Commands contains the commands that will be added to the list of ignite commands. + // Each commands are independent, for nested commands use the inner Commands field. + SharedHost bool `protobuf:"varint,2,opt,name=shared_host,json=sharedHost,proto3" json:"shared_host,omitempty"` + // Hooks contains the hooks that will be attached to the existing ignite commands. + Commands []*Command `protobuf:"bytes,3,rep,name=commands,proto3" json:"commands,omitempty"` + // Enables sharing a single plugin server across all running instances of a plugin. + // Useful if a plugin adds or extends long running commands. + // + // Example: if a plugin defines a hook on `ignite chain serve`, a plugin server is + // instanciated when the command is run. Now if you want to interact with that instance + // from commands defined in that plugin, you need to enable shared host, or else the + // commands will just instantiate separate plugin servers. + // + // When enabled, all plugins of the same path loaded from the same configuration will + // attach it's RPC client to a an existing RPC server. + // + // If a plugin instance has no other running plugin servers, it will create one and it + // will be the host. + Hooks []*Hook `protobuf:"bytes,4,rep,name=hooks,proto3" json:"hooks,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *Manifest) Reset() { + *x = Manifest{} + mi := &file_ignite_services_plugin_grpc_v1_interface_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *Manifest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Manifest) ProtoMessage() {} + +func (x *Manifest) ProtoReflect() protoreflect.Message { + mi := &file_ignite_services_plugin_grpc_v1_interface_proto_msgTypes[2] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Manifest.ProtoReflect.Descriptor instead. +func (*Manifest) Descriptor() ([]byte, []int) { + return file_ignite_services_plugin_grpc_v1_interface_proto_rawDescGZIP(), []int{2} +} + +func (x *Manifest) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *Manifest) GetSharedHost() bool { + if x != nil { + return x.SharedHost + } + return false +} + +func (x *Manifest) GetCommands() []*Command { + if x != nil { + return x.Commands + } + return nil +} + +func (x *Manifest) GetHooks() []*Hook { + if x != nil { + return x.Hooks + } + return nil +} + +// Command represents a plugin command. +type Command struct { + state protoimpl.MessageState `protogen:"open.v1"` + // Use is the one-line usage message. + // + // Recommended syntax is as follow: + // + // [ ] identifies an optional argument. Arguments that are not enclosed in brackets are required. + // ... indicates that you can specify multiple values for the previous argument. + // | indicates mutually exclusive information. You can use the argument to the left of the separator or the + // argument to the right of the separator. You cannot use both arguments in a single use of the command. + // { } delimits a set of mutually exclusive arguments when one of the arguments is required. If the arguments are + // optional, they are enclosed in brackets ([ ]). + // + // Example: add [-F file | -D dir]... [-f format] profile + Use string `protobuf:"bytes,1,opt,name=use,proto3" json:"use,omitempty"` + // Aliases is an array of aliases that can be used instead of the first word in Use. + // Note: Aliases have no effect on runnable commands. + Aliases []string `protobuf:"bytes,2,rep,name=aliases,proto3" json:"aliases,omitempty"` + // Short is the short description shown in the 'help' output. + Short string `protobuf:"bytes,3,opt,name=short,proto3" json:"short,omitempty"` + // Long is the long message shown in the 'help ' output. + Long string `protobuf:"bytes,4,opt,name=long,proto3" json:"long,omitempty"` + // Hidden defines, if this command is hidden and should NOT show up in the list of available commands. + Hidden bool `protobuf:"varint,5,opt,name=hidden,proto3" json:"hidden,omitempty"` + // Flags holds the list of command flags. + Flags []*Flag `protobuf:"bytes,6,rep,name=flags,proto3" json:"flags,omitempty"` + // Indicates where the command should be placed. + // For instance `ignite scaffold` will place the command at the `scaffold` command. + // An empty value is interpreted as `ignite` (==root). + PlaceCommandUnder string `protobuf:"bytes,7,opt,name=place_command_under,json=placeCommandUnder,proto3" json:"place_command_under,omitempty"` + // List of sub commands. + Commands []*Command `protobuf:"bytes,8,rep,name=commands,proto3" json:"commands,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *Command) Reset() { + *x = Command{} + mi := &file_ignite_services_plugin_grpc_v1_interface_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *Command) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Command) ProtoMessage() {} + +func (x *Command) ProtoReflect() protoreflect.Message { + mi := &file_ignite_services_plugin_grpc_v1_interface_proto_msgTypes[3] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Command.ProtoReflect.Descriptor instead. +func (*Command) Descriptor() ([]byte, []int) { + return file_ignite_services_plugin_grpc_v1_interface_proto_rawDescGZIP(), []int{3} +} + +func (x *Command) GetUse() string { + if x != nil { + return x.Use + } + return "" +} + +func (x *Command) GetAliases() []string { + if x != nil { + return x.Aliases + } + return nil +} + +func (x *Command) GetShort() string { + if x != nil { + return x.Short + } + return "" +} + +func (x *Command) GetLong() string { + if x != nil { + return x.Long + } + return "" +} + +func (x *Command) GetHidden() bool { + if x != nil { + return x.Hidden + } + return false +} + +func (x *Command) GetFlags() []*Flag { + if x != nil { + return x.Flags + } + return nil +} + +func (x *Command) GetPlaceCommandUnder() string { + if x != nil { + return x.PlaceCommandUnder + } + return "" +} + +func (x *Command) GetCommands() []*Command { + if x != nil { + return x.Commands + } + return nil +} + +// Flag represents of a command line flag. +type Flag struct { + state protoimpl.MessageState `protogen:"open.v1"` + // Name as it appears in the command line. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // One letter abbreviation of the flag. + Shorthand string `protobuf:"bytes,2,opt,name=shorthand,proto3" json:"shorthand,omitempty"` + // Help message. + Usage string `protobuf:"bytes,3,opt,name=usage,proto3" json:"usage,omitempty"` + // Default flag value. + DefaultValue string `protobuf:"bytes,4,opt,name=default_value,json=defaultValue,proto3" json:"default_value,omitempty"` + // Flag type. + Type Flag_Type `protobuf:"varint,5,opt,name=type,proto3,enum=ignite.services.plugin.grpc.v1.Flag_Type" json:"type,omitempty"` + // Flag value. + Value string `protobuf:"bytes,6,opt,name=value,proto3" json:"value,omitempty"` + // Indicates wether or not the flag is propagated on children commands. + Persistent bool `protobuf:"varint,7,opt,name=persistent,proto3" json:"persistent,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *Flag) Reset() { + *x = Flag{} + mi := &file_ignite_services_plugin_grpc_v1_interface_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *Flag) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Flag) ProtoMessage() {} + +func (x *Flag) ProtoReflect() protoreflect.Message { + mi := &file_ignite_services_plugin_grpc_v1_interface_proto_msgTypes[4] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Flag.ProtoReflect.Descriptor instead. +func (*Flag) Descriptor() ([]byte, []int) { + return file_ignite_services_plugin_grpc_v1_interface_proto_rawDescGZIP(), []int{4} +} + +func (x *Flag) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *Flag) GetShorthand() string { + if x != nil { + return x.Shorthand + } + return "" +} + +func (x *Flag) GetUsage() string { + if x != nil { + return x.Usage + } + return "" +} + +func (x *Flag) GetDefaultValue() string { + if x != nil { + return x.DefaultValue + } + return "" +} + +func (x *Flag) GetType() Flag_Type { + if x != nil { + return x.Type + } + return Flag_TYPE_FLAG_STRING_UNSPECIFIED +} + +func (x *Flag) GetValue() string { + if x != nil { + return x.Value + } + return "" +} + +func (x *Flag) GetPersistent() bool { + if x != nil { + return x.Persistent + } + return false +} + +// Hook represents a user defined action within a plugin. +type Hook struct { + state protoimpl.MessageState `protogen:"open.v1"` + // Identifies the hook for the client to invoke the correct hook. + // It must be unique. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Indicates the command where to register the hooks. + PlaceHookOn string `protobuf:"bytes,2,opt,name=place_hook_on,json=placeHookOn,proto3" json:"place_hook_on,omitempty"` + // Flags holds the list of command flags. + Flags []*Flag `protobuf:"bytes,3,rep,name=flags,proto3" json:"flags,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *Hook) Reset() { + *x = Hook{} + mi := &file_ignite_services_plugin_grpc_v1_interface_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *Hook) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Hook) ProtoMessage() {} + +func (x *Hook) ProtoReflect() protoreflect.Message { + mi := &file_ignite_services_plugin_grpc_v1_interface_proto_msgTypes[5] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Hook.ProtoReflect.Descriptor instead. +func (*Hook) Descriptor() ([]byte, []int) { + return file_ignite_services_plugin_grpc_v1_interface_proto_rawDescGZIP(), []int{5} +} + +func (x *Hook) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *Hook) GetPlaceHookOn() string { + if x != nil { + return x.PlaceHookOn + } + return "" +} + +func (x *Hook) GetFlags() []*Flag { + if x != nil { + return x.Flags + } + return nil +} + +var File_ignite_services_plugin_grpc_v1_interface_proto protoreflect.FileDescriptor + +const file_ignite_services_plugin_grpc_v1_interface_proto_rawDesc = "" + + "\n" + + ".ignite/services/plugin/grpc/v1/interface.proto\x12\x1eignite.services.plugin.grpc.v1\"\xa8\x02\n" + + "\x0fExecutedCommand\x12\x10\n" + + "\x03use\x18\x01 \x01(\tR\x03use\x12\x12\n" + + "\x04path\x18\x02 \x01(\tR\x04path\x12\x12\n" + + "\x04args\x18\x03 \x03(\tR\x04args\x12\x17\n" + + "\aos_args\x18\x04 \x03(\tR\x06osArgs\x12M\n" + + "\x04with\x18\x05 \x03(\v29.ignite.services.plugin.grpc.v1.ExecutedCommand.WithEntryR\x04with\x12:\n" + + "\x05flags\x18\x06 \x03(\v2$.ignite.services.plugin.grpc.v1.FlagR\x05flags\x1a7\n" + + "\tWithEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n" + + "\x05value\x18\x02 \x01(\tR\x05value:\x028\x01\"\xa4\x01\n" + + "\fExecutedHook\x128\n" + + "\x04hook\x18\x01 \x01(\v2$.ignite.services.plugin.grpc.v1.HookR\x04hook\x12Z\n" + + "\x10executed_command\x18\x02 \x01(\v2/.ignite.services.plugin.grpc.v1.ExecutedCommandR\x0fexecutedCommand\"\xc0\x01\n" + + "\bManifest\x12\x12\n" + + "\x04name\x18\x01 \x01(\tR\x04name\x12\x1f\n" + + "\vshared_host\x18\x02 \x01(\bR\n" + + "sharedHost\x12C\n" + + "\bcommands\x18\x03 \x03(\v2'.ignite.services.plugin.grpc.v1.CommandR\bcommands\x12:\n" + + "\x05hooks\x18\x04 \x03(\v2$.ignite.services.plugin.grpc.v1.HookR\x05hooks\"\xa8\x02\n" + + "\aCommand\x12\x10\n" + + "\x03use\x18\x01 \x01(\tR\x03use\x12\x18\n" + + "\aaliases\x18\x02 \x03(\tR\aaliases\x12\x14\n" + + "\x05short\x18\x03 \x01(\tR\x05short\x12\x12\n" + + "\x04long\x18\x04 \x01(\tR\x04long\x12\x16\n" + + "\x06hidden\x18\x05 \x01(\bR\x06hidden\x12:\n" + + "\x05flags\x18\x06 \x03(\v2$.ignite.services.plugin.grpc.v1.FlagR\x05flags\x12.\n" + + "\x13place_command_under\x18\a \x01(\tR\x11placeCommandUnder\x12C\n" + + "\bcommands\x18\b \x03(\v2'.ignite.services.plugin.grpc.v1.CommandR\bcommands\"\x95\x03\n" + + "\x04Flag\x12\x12\n" + + "\x04name\x18\x01 \x01(\tR\x04name\x12\x1c\n" + + "\tshorthand\x18\x02 \x01(\tR\tshorthand\x12\x14\n" + + "\x05usage\x18\x03 \x01(\tR\x05usage\x12#\n" + + "\rdefault_value\x18\x04 \x01(\tR\fdefaultValue\x12=\n" + + "\x04type\x18\x05 \x01(\x0e2).ignite.services.plugin.grpc.v1.Flag.TypeR\x04type\x12\x14\n" + + "\x05value\x18\x06 \x01(\tR\x05value\x12\x1e\n" + + "\n" + + "persistent\x18\a \x01(\bR\n" + + "persistent\"\xaa\x01\n" + + "\x04Type\x12 \n" + + "\x1cTYPE_FLAG_STRING_UNSPECIFIED\x10\x00\x12\x11\n" + + "\rTYPE_FLAG_INT\x10\x01\x12\x12\n" + + "\x0eTYPE_FLAG_UINT\x10\x02\x12\x13\n" + + "\x0fTYPE_FLAG_INT64\x10\x03\x12\x14\n" + + "\x10TYPE_FLAG_UINT64\x10\x04\x12\x12\n" + + "\x0eTYPE_FLAG_BOOL\x10\x05\x12\x1a\n" + + "\x16TYPE_FLAG_STRING_SLICE\x10\x06\"z\n" + + "\x04Hook\x12\x12\n" + + "\x04name\x18\x01 \x01(\tR\x04name\x12\"\n" + + "\rplace_hook_on\x18\x02 \x01(\tR\vplaceHookOn\x12:\n" + + "\x05flags\x18\x03 \x03(\v2$.ignite.services.plugin.grpc.v1.FlagR\x05flagsB:Z8github.com/ignite/cli/v29/ignite/services/plugin/grpc/v1b\x06proto3" + +var ( + file_ignite_services_plugin_grpc_v1_interface_proto_rawDescOnce sync.Once + file_ignite_services_plugin_grpc_v1_interface_proto_rawDescData []byte +) + +func file_ignite_services_plugin_grpc_v1_interface_proto_rawDescGZIP() []byte { + file_ignite_services_plugin_grpc_v1_interface_proto_rawDescOnce.Do(func() { + file_ignite_services_plugin_grpc_v1_interface_proto_rawDescData = protoimpl.X.CompressGZIP(unsafe.Slice(unsafe.StringData(file_ignite_services_plugin_grpc_v1_interface_proto_rawDesc), len(file_ignite_services_plugin_grpc_v1_interface_proto_rawDesc))) + }) + return file_ignite_services_plugin_grpc_v1_interface_proto_rawDescData +} + +var file_ignite_services_plugin_grpc_v1_interface_proto_enumTypes = make([]protoimpl.EnumInfo, 1) +var file_ignite_services_plugin_grpc_v1_interface_proto_msgTypes = make([]protoimpl.MessageInfo, 7) +var file_ignite_services_plugin_grpc_v1_interface_proto_goTypes = []any{ + (Flag_Type)(0), // 0: ignite.services.plugin.grpc.v1.Flag.Type + (*ExecutedCommand)(nil), // 1: ignite.services.plugin.grpc.v1.ExecutedCommand + (*ExecutedHook)(nil), // 2: ignite.services.plugin.grpc.v1.ExecutedHook + (*Manifest)(nil), // 3: ignite.services.plugin.grpc.v1.Manifest + (*Command)(nil), // 4: ignite.services.plugin.grpc.v1.Command + (*Flag)(nil), // 5: ignite.services.plugin.grpc.v1.Flag + (*Hook)(nil), // 6: ignite.services.plugin.grpc.v1.Hook + nil, // 7: ignite.services.plugin.grpc.v1.ExecutedCommand.WithEntry +} +var file_ignite_services_plugin_grpc_v1_interface_proto_depIdxs = []int32{ + 7, // 0: ignite.services.plugin.grpc.v1.ExecutedCommand.with:type_name -> ignite.services.plugin.grpc.v1.ExecutedCommand.WithEntry + 5, // 1: ignite.services.plugin.grpc.v1.ExecutedCommand.flags:type_name -> ignite.services.plugin.grpc.v1.Flag + 6, // 2: ignite.services.plugin.grpc.v1.ExecutedHook.hook:type_name -> ignite.services.plugin.grpc.v1.Hook + 1, // 3: ignite.services.plugin.grpc.v1.ExecutedHook.executed_command:type_name -> ignite.services.plugin.grpc.v1.ExecutedCommand + 4, // 4: ignite.services.plugin.grpc.v1.Manifest.commands:type_name -> ignite.services.plugin.grpc.v1.Command + 6, // 5: ignite.services.plugin.grpc.v1.Manifest.hooks:type_name -> ignite.services.plugin.grpc.v1.Hook + 5, // 6: ignite.services.plugin.grpc.v1.Command.flags:type_name -> ignite.services.plugin.grpc.v1.Flag + 4, // 7: ignite.services.plugin.grpc.v1.Command.commands:type_name -> ignite.services.plugin.grpc.v1.Command + 0, // 8: ignite.services.plugin.grpc.v1.Flag.type:type_name -> ignite.services.plugin.grpc.v1.Flag.Type + 5, // 9: ignite.services.plugin.grpc.v1.Hook.flags:type_name -> ignite.services.plugin.grpc.v1.Flag + 10, // [10:10] is the sub-list for method output_type + 10, // [10:10] is the sub-list for method input_type + 10, // [10:10] is the sub-list for extension type_name + 10, // [10:10] is the sub-list for extension extendee + 0, // [0:10] is the sub-list for field type_name +} + +func init() { file_ignite_services_plugin_grpc_v1_interface_proto_init() } +func file_ignite_services_plugin_grpc_v1_interface_proto_init() { + if File_ignite_services_plugin_grpc_v1_interface_proto != nil { + return + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: unsafe.Slice(unsafe.StringData(file_ignite_services_plugin_grpc_v1_interface_proto_rawDesc), len(file_ignite_services_plugin_grpc_v1_interface_proto_rawDesc)), + NumEnums: 1, + NumMessages: 7, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_ignite_services_plugin_grpc_v1_interface_proto_goTypes, + DependencyIndexes: file_ignite_services_plugin_grpc_v1_interface_proto_depIdxs, + EnumInfos: file_ignite_services_plugin_grpc_v1_interface_proto_enumTypes, + MessageInfos: file_ignite_services_plugin_grpc_v1_interface_proto_msgTypes, + }.Build() + File_ignite_services_plugin_grpc_v1_interface_proto = out.File + file_ignite_services_plugin_grpc_v1_interface_proto_goTypes = nil + file_ignite_services_plugin_grpc_v1_interface_proto_depIdxs = nil +} diff --git a/ignite/services/plugin/grpc/v1/interface_command.go b/ignite/services/plugin/grpc/v1/interface_command.go new file mode 100644 index 0000000..2b6617f --- /dev/null +++ b/ignite/services/plugin/grpc/v1/interface_command.go @@ -0,0 +1,92 @@ +package v1 + +import ( + "strings" + + "github.com/spf13/cobra" + "github.com/spf13/pflag" +) + +const igniteBinaryName = "ignite" + +// Path returns the absolute command path including the binary name as prefix. +func (c *Command) Path() string { + return ensureFullCommandPath(c.PlaceCommandUnder) +} + +// ToCobraCommand returns a new Cobra command that matches the current command. +func (c *Command) ToCobraCommand() (*cobra.Command, error) { + cmd := &cobra.Command{ + Use: c.Use, + Aliases: c.Aliases, + Short: c.Short, + Long: c.Long, + Hidden: c.Hidden, + } + + for _, f := range c.Flags { + var fs *pflag.FlagSet + if f.Persistent { + fs = cmd.PersistentFlags() + } else { + fs = cmd.Flags() + } + + if err := f.ExportToFlagSet(fs); err != nil { + return nil, err + } + } + + return cmd, nil +} + +// ImportFlags imports flags from a Cobra command. +func (c *ExecutedCommand) ImportFlags(cmd *cobra.Command) { + c.Flags = extractCobraFlags(cmd) +} + +// NewFlags creates a new flags set initialized with the executed command's flags. +func (c *ExecutedCommand) NewFlags() (*pflag.FlagSet, error) { + fs := pflag.NewFlagSet(igniteBinaryName, pflag.ContinueOnError) + + for _, f := range c.Flags { + if f.Persistent { + continue + } + + if err := f.ExportToFlagSet(fs); err != nil { + return nil, err + } + } + + return fs, nil +} + +// NewPersistentFlags creates a new flags set initialized with the executed command's persistent flags. +func (c *ExecutedCommand) NewPersistentFlags() (*pflag.FlagSet, error) { + fs := pflag.NewFlagSet(igniteBinaryName, pflag.ContinueOnError) + + for _, f := range c.Flags { + if !f.Persistent { + continue + } + + if err := f.ExportToFlagSet(fs); err != nil { + return nil, err + } + } + + return fs, nil +} + +func ensureFullCommandPath(path string) string { + path = strings.TrimSpace(path) + if path == "" { + return igniteBinaryName + } + + if !strings.HasPrefix(path, igniteBinaryName) { + path = igniteBinaryName + " " + path + } + return path +} diff --git a/ignite/services/plugin/grpc/v1/interface_flag.go b/ignite/services/plugin/grpc/v1/interface_flag.go new file mode 100644 index 0000000..f4105ba --- /dev/null +++ b/ignite/services/plugin/grpc/v1/interface_flag.go @@ -0,0 +1,169 @@ +package v1 + +import ( + "strconv" + "strings" + + "github.com/spf13/pflag" + + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +const ( + cobraFlagTypeBool = "bool" + cobraFlagTypeInt = "int" + cobraFlagTypeInt64 = "int64" + cobraFlagTypeString = "string" + cobraFlagTypeStringSlice = "stringSlice" + cobraFlagTypeUint = "uint" + cobraFlagTypeUint64 = "uint64" +) + +var flagTypes = map[string]Flag_Type{ + cobraFlagTypeBool: Flag_TYPE_FLAG_BOOL, + cobraFlagTypeInt: Flag_TYPE_FLAG_INT, + cobraFlagTypeInt64: Flag_TYPE_FLAG_INT64, + cobraFlagTypeString: Flag_TYPE_FLAG_STRING_UNSPECIFIED, + cobraFlagTypeStringSlice: Flag_TYPE_FLAG_STRING_SLICE, + cobraFlagTypeUint: Flag_TYPE_FLAG_UINT, + cobraFlagTypeUint64: Flag_TYPE_FLAG_UINT64, +} + +func newDefaultFlagValueError(typeName, value string) error { + return errors.Errorf("invalid default value for plugin command %s flag: %s", typeName, value) +} + +func (f *Flag) ExportToFlagSet(fs *pflag.FlagSet) error { + switch f.Type { //nolint:exhaustive + case Flag_TYPE_FLAG_BOOL, + Flag_TYPE_FLAG_INT, + Flag_TYPE_FLAG_INT64, + Flag_TYPE_FLAG_UINT, + Flag_TYPE_FLAG_UINT64: + if f.DefaultValue == "" { + f.DefaultValue = "0" + } + } + + switch f.Type { + case Flag_TYPE_FLAG_BOOL: + v, err := strconv.ParseBool(f.DefaultValue) + if err != nil { + return newDefaultFlagValueError(cobraFlagTypeBool, f.DefaultValue) + } + + fs.BoolP(f.Name, f.Shorthand, v, f.Usage) + if f.Value != "" { + if err := fs.Set(f.Name, f.Value); err != nil { + return newDefaultFlagValueError(cobraFlagTypeBool, f.Value) + } + } + case Flag_TYPE_FLAG_INT: + v, err := strconv.Atoi(f.DefaultValue) + if err != nil { + return newDefaultFlagValueError(cobraFlagTypeInt, f.DefaultValue) + } + + fs.IntP(f.Name, f.Shorthand, v, f.Usage) + if f.Value != "" { + if err := fs.Set(f.Name, f.Value); err != nil { + return newDefaultFlagValueError(cobraFlagTypeInt, f.Value) + } + } + case Flag_TYPE_FLAG_UINT: + v, err := strconv.ParseUint(f.DefaultValue, 10, 64) + if err != nil { + return newDefaultFlagValueError(cobraFlagTypeUint, f.DefaultValue) + } + + fs.UintP(f.Name, f.Shorthand, uint(v), f.Usage) + if f.Value != "" { + if err := fs.Set(f.Name, f.Value); err != nil { + return newDefaultFlagValueError(cobraFlagTypeUint, f.Value) + } + } + case Flag_TYPE_FLAG_INT64: + v, err := strconv.ParseInt(f.DefaultValue, 10, 64) + if err != nil { + return newDefaultFlagValueError(cobraFlagTypeInt64, f.DefaultValue) + } + + fs.Int64P(f.Name, f.Shorthand, v, f.Usage) + if f.Value != "" { + if err := fs.Set(f.Name, f.Value); err != nil { + return newDefaultFlagValueError(cobraFlagTypeInt64, f.Value) + } + } + case Flag_TYPE_FLAG_UINT64: + v, err := strconv.ParseUint(f.DefaultValue, 10, 64) + if err != nil { + return newDefaultFlagValueError(cobraFlagTypeUint64, f.DefaultValue) + } + + fs.Uint64P(f.Name, f.Shorthand, v, f.Usage) + if f.Value != "" { + if err := fs.Set(f.Name, f.Value); err != nil { + return newDefaultFlagValueError(cobraFlagTypeUint64, f.Value) + } + } + case Flag_TYPE_FLAG_STRING_SLICE: + s := strings.Trim(f.DefaultValue, "[]") + fs.StringSliceP(f.Name, f.Shorthand, strings.Fields(s), f.Usage) + if f.Value != "" { + if err := fs.Set(f.Name, strings.Trim(f.Value, "[]")); err != nil { + return newDefaultFlagValueError(cobraFlagTypeStringSlice, f.Value) + } + } + case Flag_TYPE_FLAG_STRING_UNSPECIFIED: + fs.StringP(f.Name, f.Shorthand, f.DefaultValue, f.Usage) + if f.Value != "" { + if err := fs.Set(f.Name, f.Value); err != nil { + return newDefaultFlagValueError(cobraFlagTypeString, f.Value) + } + } + } + return nil +} + +type flagger interface { + Flags() *pflag.FlagSet + PersistentFlags() *pflag.FlagSet +} + +func extractCobraFlags(cmd flagger) []*Flag { + var flags []*Flag + + if cmd.Flags() != nil { + cmd.Flags().VisitAll(func(pf *pflag.Flag) { + // Skip persistent flags + if cmd.PersistentFlags().Lookup(pf.Name) != nil { + return + } + + flags = append(flags, &Flag{ + Name: pf.Name, + Shorthand: pf.Shorthand, + Usage: pf.Usage, + DefaultValue: pf.DefValue, + Value: pf.Value.String(), + Type: flagTypes[pf.Value.Type()], + }) + }) + } + + if cmd.PersistentFlags() != nil { + cmd.PersistentFlags().VisitAll(func(pf *pflag.Flag) { + flags = append(flags, &Flag{ + Name: pf.Name, + Shorthand: pf.Shorthand, + Usage: pf.Usage, + DefaultValue: pf.DefValue, + Value: pf.Value.String(), + Type: flagTypes[pf.Value.Type()], + Persistent: true, + }) + }) + } + + return flags +} diff --git a/ignite/services/plugin/grpc/v1/interface_hook.go b/ignite/services/plugin/grpc/v1/interface_hook.go new file mode 100644 index 0000000..c03fecb --- /dev/null +++ b/ignite/services/plugin/grpc/v1/interface_hook.go @@ -0,0 +1,13 @@ +package v1 + +import "github.com/spf13/cobra" + +// CommandPath returns the absolute command path including the binary name as prefix. +func (h *Hook) CommandPath() string { + return ensureFullCommandPath(h.PlaceHookOn) +} + +// ImportFlags imports flags from a Cobra command. +func (h *Hook) ImportFlags(cmd *cobra.Command) { + h.Flags = extractCobraFlags(cmd) +} diff --git a/ignite/services/plugin/grpc/v1/interface_manifest.go b/ignite/services/plugin/grpc/v1/interface_manifest.go new file mode 100644 index 0000000..a388fcc --- /dev/null +++ b/ignite/services/plugin/grpc/v1/interface_manifest.go @@ -0,0 +1,27 @@ +package v1 + +import "github.com/spf13/cobra" + +// ImportCobraCommand appends Cobra command definitions to the list of plugin commands. +// This method can be used in cases where a plugin defines the commands using Cobra. +func (m *Manifest) ImportCobraCommand(cmd *cobra.Command, placeCommandUnder string) { + m.Commands = append(m.Commands, convertCobraCommand(cmd, placeCommandUnder)) +} + +func convertCobraCommand(c *cobra.Command, placeCommandUnder string) *Command { + cmd := &Command{ + Use: c.Use, + Aliases: c.Aliases, + Short: c.Short, + Long: c.Long, + Hidden: c.Hidden, + PlaceCommandUnder: placeCommandUnder, + Flags: extractCobraFlags(c), + } + + for _, c := range c.Commands() { + cmd.Commands = append(cmd.Commands, convertCobraCommand(c, "")) + } + + return cmd +} diff --git a/ignite/services/plugin/grpc/v1/service.pb.go b/ignite/services/plugin/grpc/v1/service.pb.go new file mode 100644 index 0000000..0e7b5b2 --- /dev/null +++ b/ignite/services/plugin/grpc/v1/service.pb.go @@ -0,0 +1,750 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.36.6 +// protoc (unknown) +// source: ignite/services/plugin/grpc/v1/service.proto + +package v1 + +import ( + reflect "reflect" + sync "sync" + unsafe "unsafe" + + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type ManifestRequest struct { + state protoimpl.MessageState `protogen:"open.v1"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *ManifestRequest) Reset() { + *x = ManifestRequest{} + mi := &file_ignite_services_plugin_grpc_v1_service_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *ManifestRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ManifestRequest) ProtoMessage() {} + +func (x *ManifestRequest) ProtoReflect() protoreflect.Message { + mi := &file_ignite_services_plugin_grpc_v1_service_proto_msgTypes[0] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ManifestRequest.ProtoReflect.Descriptor instead. +func (*ManifestRequest) Descriptor() ([]byte, []int) { + return file_ignite_services_plugin_grpc_v1_service_proto_rawDescGZIP(), []int{0} +} + +type ManifestResponse struct { + state protoimpl.MessageState `protogen:"open.v1"` + Manifest *Manifest `protobuf:"bytes,1,opt,name=manifest,proto3" json:"manifest,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *ManifestResponse) Reset() { + *x = ManifestResponse{} + mi := &file_ignite_services_plugin_grpc_v1_service_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *ManifestResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ManifestResponse) ProtoMessage() {} + +func (x *ManifestResponse) ProtoReflect() protoreflect.Message { + mi := &file_ignite_services_plugin_grpc_v1_service_proto_msgTypes[1] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ManifestResponse.ProtoReflect.Descriptor instead. +func (*ManifestResponse) Descriptor() ([]byte, []int) { + return file_ignite_services_plugin_grpc_v1_service_proto_rawDescGZIP(), []int{1} +} + +func (x *ManifestResponse) GetManifest() *Manifest { + if x != nil { + return x.Manifest + } + return nil +} + +type ExecuteRequest struct { + state protoimpl.MessageState `protogen:"open.v1"` + Cmd *ExecutedCommand `protobuf:"bytes,1,opt,name=cmd,proto3" json:"cmd,omitempty"` + ClientApi uint32 `protobuf:"varint,2,opt,name=client_api,json=clientApi,proto3" json:"client_api,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *ExecuteRequest) Reset() { + *x = ExecuteRequest{} + mi := &file_ignite_services_plugin_grpc_v1_service_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *ExecuteRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ExecuteRequest) ProtoMessage() {} + +func (x *ExecuteRequest) ProtoReflect() protoreflect.Message { + mi := &file_ignite_services_plugin_grpc_v1_service_proto_msgTypes[2] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ExecuteRequest.ProtoReflect.Descriptor instead. +func (*ExecuteRequest) Descriptor() ([]byte, []int) { + return file_ignite_services_plugin_grpc_v1_service_proto_rawDescGZIP(), []int{2} +} + +func (x *ExecuteRequest) GetCmd() *ExecutedCommand { + if x != nil { + return x.Cmd + } + return nil +} + +func (x *ExecuteRequest) GetClientApi() uint32 { + if x != nil { + return x.ClientApi + } + return 0 +} + +type ExecuteResponse struct { + state protoimpl.MessageState `protogen:"open.v1"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *ExecuteResponse) Reset() { + *x = ExecuteResponse{} + mi := &file_ignite_services_plugin_grpc_v1_service_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *ExecuteResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ExecuteResponse) ProtoMessage() {} + +func (x *ExecuteResponse) ProtoReflect() protoreflect.Message { + mi := &file_ignite_services_plugin_grpc_v1_service_proto_msgTypes[3] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ExecuteResponse.ProtoReflect.Descriptor instead. +func (*ExecuteResponse) Descriptor() ([]byte, []int) { + return file_ignite_services_plugin_grpc_v1_service_proto_rawDescGZIP(), []int{3} +} + +type ExecuteHookPreRequest struct { + state protoimpl.MessageState `protogen:"open.v1"` + Hook *ExecutedHook `protobuf:"bytes,1,opt,name=hook,proto3" json:"hook,omitempty"` + ClientApi uint32 `protobuf:"varint,2,opt,name=client_api,json=clientApi,proto3" json:"client_api,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *ExecuteHookPreRequest) Reset() { + *x = ExecuteHookPreRequest{} + mi := &file_ignite_services_plugin_grpc_v1_service_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *ExecuteHookPreRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ExecuteHookPreRequest) ProtoMessage() {} + +func (x *ExecuteHookPreRequest) ProtoReflect() protoreflect.Message { + mi := &file_ignite_services_plugin_grpc_v1_service_proto_msgTypes[4] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ExecuteHookPreRequest.ProtoReflect.Descriptor instead. +func (*ExecuteHookPreRequest) Descriptor() ([]byte, []int) { + return file_ignite_services_plugin_grpc_v1_service_proto_rawDescGZIP(), []int{4} +} + +func (x *ExecuteHookPreRequest) GetHook() *ExecutedHook { + if x != nil { + return x.Hook + } + return nil +} + +func (x *ExecuteHookPreRequest) GetClientApi() uint32 { + if x != nil { + return x.ClientApi + } + return 0 +} + +type ExecuteHookPreResponse struct { + state protoimpl.MessageState `protogen:"open.v1"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *ExecuteHookPreResponse) Reset() { + *x = ExecuteHookPreResponse{} + mi := &file_ignite_services_plugin_grpc_v1_service_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *ExecuteHookPreResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ExecuteHookPreResponse) ProtoMessage() {} + +func (x *ExecuteHookPreResponse) ProtoReflect() protoreflect.Message { + mi := &file_ignite_services_plugin_grpc_v1_service_proto_msgTypes[5] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ExecuteHookPreResponse.ProtoReflect.Descriptor instead. +func (*ExecuteHookPreResponse) Descriptor() ([]byte, []int) { + return file_ignite_services_plugin_grpc_v1_service_proto_rawDescGZIP(), []int{5} +} + +type ExecuteHookPostRequest struct { + state protoimpl.MessageState `protogen:"open.v1"` + Hook *ExecutedHook `protobuf:"bytes,1,opt,name=hook,proto3" json:"hook,omitempty"` + ClientApi uint32 `protobuf:"varint,2,opt,name=client_api,json=clientApi,proto3" json:"client_api,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *ExecuteHookPostRequest) Reset() { + *x = ExecuteHookPostRequest{} + mi := &file_ignite_services_plugin_grpc_v1_service_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *ExecuteHookPostRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ExecuteHookPostRequest) ProtoMessage() {} + +func (x *ExecuteHookPostRequest) ProtoReflect() protoreflect.Message { + mi := &file_ignite_services_plugin_grpc_v1_service_proto_msgTypes[6] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ExecuteHookPostRequest.ProtoReflect.Descriptor instead. +func (*ExecuteHookPostRequest) Descriptor() ([]byte, []int) { + return file_ignite_services_plugin_grpc_v1_service_proto_rawDescGZIP(), []int{6} +} + +func (x *ExecuteHookPostRequest) GetHook() *ExecutedHook { + if x != nil { + return x.Hook + } + return nil +} + +func (x *ExecuteHookPostRequest) GetClientApi() uint32 { + if x != nil { + return x.ClientApi + } + return 0 +} + +type ExecuteHookPostResponse struct { + state protoimpl.MessageState `protogen:"open.v1"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *ExecuteHookPostResponse) Reset() { + *x = ExecuteHookPostResponse{} + mi := &file_ignite_services_plugin_grpc_v1_service_proto_msgTypes[7] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *ExecuteHookPostResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ExecuteHookPostResponse) ProtoMessage() {} + +func (x *ExecuteHookPostResponse) ProtoReflect() protoreflect.Message { + mi := &file_ignite_services_plugin_grpc_v1_service_proto_msgTypes[7] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ExecuteHookPostResponse.ProtoReflect.Descriptor instead. +func (*ExecuteHookPostResponse) Descriptor() ([]byte, []int) { + return file_ignite_services_plugin_grpc_v1_service_proto_rawDescGZIP(), []int{7} +} + +type ExecuteHookCleanUpRequest struct { + state protoimpl.MessageState `protogen:"open.v1"` + Hook *ExecutedHook `protobuf:"bytes,1,opt,name=hook,proto3" json:"hook,omitempty"` + ClientApi uint32 `protobuf:"varint,2,opt,name=client_api,json=clientApi,proto3" json:"client_api,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *ExecuteHookCleanUpRequest) Reset() { + *x = ExecuteHookCleanUpRequest{} + mi := &file_ignite_services_plugin_grpc_v1_service_proto_msgTypes[8] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *ExecuteHookCleanUpRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ExecuteHookCleanUpRequest) ProtoMessage() {} + +func (x *ExecuteHookCleanUpRequest) ProtoReflect() protoreflect.Message { + mi := &file_ignite_services_plugin_grpc_v1_service_proto_msgTypes[8] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ExecuteHookCleanUpRequest.ProtoReflect.Descriptor instead. +func (*ExecuteHookCleanUpRequest) Descriptor() ([]byte, []int) { + return file_ignite_services_plugin_grpc_v1_service_proto_rawDescGZIP(), []int{8} +} + +func (x *ExecuteHookCleanUpRequest) GetHook() *ExecutedHook { + if x != nil { + return x.Hook + } + return nil +} + +func (x *ExecuteHookCleanUpRequest) GetClientApi() uint32 { + if x != nil { + return x.ClientApi + } + return 0 +} + +type ExecuteHookCleanUpResponse struct { + state protoimpl.MessageState `protogen:"open.v1"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *ExecuteHookCleanUpResponse) Reset() { + *x = ExecuteHookCleanUpResponse{} + mi := &file_ignite_services_plugin_grpc_v1_service_proto_msgTypes[9] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *ExecuteHookCleanUpResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ExecuteHookCleanUpResponse) ProtoMessage() {} + +func (x *ExecuteHookCleanUpResponse) ProtoReflect() protoreflect.Message { + mi := &file_ignite_services_plugin_grpc_v1_service_proto_msgTypes[9] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ExecuteHookCleanUpResponse.ProtoReflect.Descriptor instead. +func (*ExecuteHookCleanUpResponse) Descriptor() ([]byte, []int) { + return file_ignite_services_plugin_grpc_v1_service_proto_rawDescGZIP(), []int{9} +} + +type GetChainInfoRequest struct { + state protoimpl.MessageState `protogen:"open.v1"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GetChainInfoRequest) Reset() { + *x = GetChainInfoRequest{} + mi := &file_ignite_services_plugin_grpc_v1_service_proto_msgTypes[10] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GetChainInfoRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetChainInfoRequest) ProtoMessage() {} + +func (x *GetChainInfoRequest) ProtoReflect() protoreflect.Message { + mi := &file_ignite_services_plugin_grpc_v1_service_proto_msgTypes[10] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetChainInfoRequest.ProtoReflect.Descriptor instead. +func (*GetChainInfoRequest) Descriptor() ([]byte, []int) { + return file_ignite_services_plugin_grpc_v1_service_proto_rawDescGZIP(), []int{10} +} + +type GetChainInfoResponse struct { + state protoimpl.MessageState `protogen:"open.v1"` + ChainInfo *ChainInfo `protobuf:"bytes,1,opt,name=chain_info,json=chainInfo,proto3" json:"chain_info,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GetChainInfoResponse) Reset() { + *x = GetChainInfoResponse{} + mi := &file_ignite_services_plugin_grpc_v1_service_proto_msgTypes[11] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GetChainInfoResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetChainInfoResponse) ProtoMessage() {} + +func (x *GetChainInfoResponse) ProtoReflect() protoreflect.Message { + mi := &file_ignite_services_plugin_grpc_v1_service_proto_msgTypes[11] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetChainInfoResponse.ProtoReflect.Descriptor instead. +func (*GetChainInfoResponse) Descriptor() ([]byte, []int) { + return file_ignite_services_plugin_grpc_v1_service_proto_rawDescGZIP(), []int{11} +} + +func (x *GetChainInfoResponse) GetChainInfo() *ChainInfo { + if x != nil { + return x.ChainInfo + } + return nil +} + +type GetIgniteInfoRequest struct { + state protoimpl.MessageState `protogen:"open.v1"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GetIgniteInfoRequest) Reset() { + *x = GetIgniteInfoRequest{} + mi := &file_ignite_services_plugin_grpc_v1_service_proto_msgTypes[12] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GetIgniteInfoRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetIgniteInfoRequest) ProtoMessage() {} + +func (x *GetIgniteInfoRequest) ProtoReflect() protoreflect.Message { + mi := &file_ignite_services_plugin_grpc_v1_service_proto_msgTypes[12] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetIgniteInfoRequest.ProtoReflect.Descriptor instead. +func (*GetIgniteInfoRequest) Descriptor() ([]byte, []int) { + return file_ignite_services_plugin_grpc_v1_service_proto_rawDescGZIP(), []int{12} +} + +type GetIgniteInfoResponse struct { + state protoimpl.MessageState `protogen:"open.v1"` + IgniteInfo *IgniteInfo `protobuf:"bytes,1,opt,name=ignite_info,json=igniteInfo,proto3" json:"ignite_info,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GetIgniteInfoResponse) Reset() { + *x = GetIgniteInfoResponse{} + mi := &file_ignite_services_plugin_grpc_v1_service_proto_msgTypes[13] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GetIgniteInfoResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetIgniteInfoResponse) ProtoMessage() {} + +func (x *GetIgniteInfoResponse) ProtoReflect() protoreflect.Message { + mi := &file_ignite_services_plugin_grpc_v1_service_proto_msgTypes[13] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetIgniteInfoResponse.ProtoReflect.Descriptor instead. +func (*GetIgniteInfoResponse) Descriptor() ([]byte, []int) { + return file_ignite_services_plugin_grpc_v1_service_proto_rawDescGZIP(), []int{13} +} + +func (x *GetIgniteInfoResponse) GetIgniteInfo() *IgniteInfo { + if x != nil { + return x.IgniteInfo + } + return nil +} + +var File_ignite_services_plugin_grpc_v1_service_proto protoreflect.FileDescriptor + +const file_ignite_services_plugin_grpc_v1_service_proto_rawDesc = "" + + "\n" + + ",ignite/services/plugin/grpc/v1/service.proto\x12\x1eignite.services.plugin.grpc.v1\x1a/ignite/services/plugin/grpc/v1/client_api.proto\x1a.ignite/services/plugin/grpc/v1/interface.proto\"\x11\n" + + "\x0fManifestRequest\"X\n" + + "\x10ManifestResponse\x12D\n" + + "\bmanifest\x18\x01 \x01(\v2(.ignite.services.plugin.grpc.v1.ManifestR\bmanifest\"r\n" + + "\x0eExecuteRequest\x12A\n" + + "\x03cmd\x18\x01 \x01(\v2/.ignite.services.plugin.grpc.v1.ExecutedCommandR\x03cmd\x12\x1d\n" + + "\n" + + "client_api\x18\x02 \x01(\rR\tclientApi\"\x11\n" + + "\x0fExecuteResponse\"x\n" + + "\x15ExecuteHookPreRequest\x12@\n" + + "\x04hook\x18\x01 \x01(\v2,.ignite.services.plugin.grpc.v1.ExecutedHookR\x04hook\x12\x1d\n" + + "\n" + + "client_api\x18\x02 \x01(\rR\tclientApi\"\x18\n" + + "\x16ExecuteHookPreResponse\"y\n" + + "\x16ExecuteHookPostRequest\x12@\n" + + "\x04hook\x18\x01 \x01(\v2,.ignite.services.plugin.grpc.v1.ExecutedHookR\x04hook\x12\x1d\n" + + "\n" + + "client_api\x18\x02 \x01(\rR\tclientApi\"\x19\n" + + "\x17ExecuteHookPostResponse\"|\n" + + "\x19ExecuteHookCleanUpRequest\x12@\n" + + "\x04hook\x18\x01 \x01(\v2,.ignite.services.plugin.grpc.v1.ExecutedHookR\x04hook\x12\x1d\n" + + "\n" + + "client_api\x18\x02 \x01(\rR\tclientApi\"\x1c\n" + + "\x1aExecuteHookCleanUpResponse\"\x15\n" + + "\x13GetChainInfoRequest\"`\n" + + "\x14GetChainInfoResponse\x12H\n" + + "\n" + + "chain_info\x18\x01 \x01(\v2).ignite.services.plugin.grpc.v1.ChainInfoR\tchainInfo\"\x16\n" + + "\x14GetIgniteInfoRequest\"d\n" + + "\x15GetIgniteInfoResponse\x12K\n" + + "\vignite_info\x18\x01 \x01(\v2*.ignite.services.plugin.grpc.v1.IgniteInfoR\n" + + "igniteInfo2\x81\x05\n" + + "\x10InterfaceService\x12m\n" + + "\bManifest\x12/.ignite.services.plugin.grpc.v1.ManifestRequest\x1a0.ignite.services.plugin.grpc.v1.ManifestResponse\x12j\n" + + "\aExecute\x12..ignite.services.plugin.grpc.v1.ExecuteRequest\x1a/.ignite.services.plugin.grpc.v1.ExecuteResponse\x12\x7f\n" + + "\x0eExecuteHookPre\x125.ignite.services.plugin.grpc.v1.ExecuteHookPreRequest\x1a6.ignite.services.plugin.grpc.v1.ExecuteHookPreResponse\x12\x82\x01\n" + + "\x0fExecuteHookPost\x126.ignite.services.plugin.grpc.v1.ExecuteHookPostRequest\x1a7.ignite.services.plugin.grpc.v1.ExecuteHookPostResponse\x12\x8b\x01\n" + + "\x12ExecuteHookCleanUp\x129.ignite.services.plugin.grpc.v1.ExecuteHookCleanUpRequest\x1a:.ignite.services.plugin.grpc.v1.ExecuteHookCleanUpResponse2\x8b\x02\n" + + "\x10ClientAPIService\x12y\n" + + "\fGetChainInfo\x123.ignite.services.plugin.grpc.v1.GetChainInfoRequest\x1a4.ignite.services.plugin.grpc.v1.GetChainInfoResponse\x12|\n" + + "\rGetIgniteInfo\x124.ignite.services.plugin.grpc.v1.GetIgniteInfoRequest\x1a5.ignite.services.plugin.grpc.v1.GetIgniteInfoResponseB:Z8github.com/ignite/cli/v29/ignite/services/plugin/grpc/v1b\x06proto3" + +var ( + file_ignite_services_plugin_grpc_v1_service_proto_rawDescOnce sync.Once + file_ignite_services_plugin_grpc_v1_service_proto_rawDescData []byte +) + +func file_ignite_services_plugin_grpc_v1_service_proto_rawDescGZIP() []byte { + file_ignite_services_plugin_grpc_v1_service_proto_rawDescOnce.Do(func() { + file_ignite_services_plugin_grpc_v1_service_proto_rawDescData = protoimpl.X.CompressGZIP(unsafe.Slice(unsafe.StringData(file_ignite_services_plugin_grpc_v1_service_proto_rawDesc), len(file_ignite_services_plugin_grpc_v1_service_proto_rawDesc))) + }) + return file_ignite_services_plugin_grpc_v1_service_proto_rawDescData +} + +var file_ignite_services_plugin_grpc_v1_service_proto_msgTypes = make([]protoimpl.MessageInfo, 14) +var file_ignite_services_plugin_grpc_v1_service_proto_goTypes = []any{ + (*ManifestRequest)(nil), // 0: ignite.services.plugin.grpc.v1.ManifestRequest + (*ManifestResponse)(nil), // 1: ignite.services.plugin.grpc.v1.ManifestResponse + (*ExecuteRequest)(nil), // 2: ignite.services.plugin.grpc.v1.ExecuteRequest + (*ExecuteResponse)(nil), // 3: ignite.services.plugin.grpc.v1.ExecuteResponse + (*ExecuteHookPreRequest)(nil), // 4: ignite.services.plugin.grpc.v1.ExecuteHookPreRequest + (*ExecuteHookPreResponse)(nil), // 5: ignite.services.plugin.grpc.v1.ExecuteHookPreResponse + (*ExecuteHookPostRequest)(nil), // 6: ignite.services.plugin.grpc.v1.ExecuteHookPostRequest + (*ExecuteHookPostResponse)(nil), // 7: ignite.services.plugin.grpc.v1.ExecuteHookPostResponse + (*ExecuteHookCleanUpRequest)(nil), // 8: ignite.services.plugin.grpc.v1.ExecuteHookCleanUpRequest + (*ExecuteHookCleanUpResponse)(nil), // 9: ignite.services.plugin.grpc.v1.ExecuteHookCleanUpResponse + (*GetChainInfoRequest)(nil), // 10: ignite.services.plugin.grpc.v1.GetChainInfoRequest + (*GetChainInfoResponse)(nil), // 11: ignite.services.plugin.grpc.v1.GetChainInfoResponse + (*GetIgniteInfoRequest)(nil), // 12: ignite.services.plugin.grpc.v1.GetIgniteInfoRequest + (*GetIgniteInfoResponse)(nil), // 13: ignite.services.plugin.grpc.v1.GetIgniteInfoResponse + (*Manifest)(nil), // 14: ignite.services.plugin.grpc.v1.Manifest + (*ExecutedCommand)(nil), // 15: ignite.services.plugin.grpc.v1.ExecutedCommand + (*ExecutedHook)(nil), // 16: ignite.services.plugin.grpc.v1.ExecutedHook + (*ChainInfo)(nil), // 17: ignite.services.plugin.grpc.v1.ChainInfo + (*IgniteInfo)(nil), // 18: ignite.services.plugin.grpc.v1.IgniteInfo +} +var file_ignite_services_plugin_grpc_v1_service_proto_depIdxs = []int32{ + 14, // 0: ignite.services.plugin.grpc.v1.ManifestResponse.manifest:type_name -> ignite.services.plugin.grpc.v1.Manifest + 15, // 1: ignite.services.plugin.grpc.v1.ExecuteRequest.cmd:type_name -> ignite.services.plugin.grpc.v1.ExecutedCommand + 16, // 2: ignite.services.plugin.grpc.v1.ExecuteHookPreRequest.hook:type_name -> ignite.services.plugin.grpc.v1.ExecutedHook + 16, // 3: ignite.services.plugin.grpc.v1.ExecuteHookPostRequest.hook:type_name -> ignite.services.plugin.grpc.v1.ExecutedHook + 16, // 4: ignite.services.plugin.grpc.v1.ExecuteHookCleanUpRequest.hook:type_name -> ignite.services.plugin.grpc.v1.ExecutedHook + 17, // 5: ignite.services.plugin.grpc.v1.GetChainInfoResponse.chain_info:type_name -> ignite.services.plugin.grpc.v1.ChainInfo + 18, // 6: ignite.services.plugin.grpc.v1.GetIgniteInfoResponse.ignite_info:type_name -> ignite.services.plugin.grpc.v1.IgniteInfo + 0, // 7: ignite.services.plugin.grpc.v1.InterfaceService.Manifest:input_type -> ignite.services.plugin.grpc.v1.ManifestRequest + 2, // 8: ignite.services.plugin.grpc.v1.InterfaceService.Execute:input_type -> ignite.services.plugin.grpc.v1.ExecuteRequest + 4, // 9: ignite.services.plugin.grpc.v1.InterfaceService.ExecuteHookPre:input_type -> ignite.services.plugin.grpc.v1.ExecuteHookPreRequest + 6, // 10: ignite.services.plugin.grpc.v1.InterfaceService.ExecuteHookPost:input_type -> ignite.services.plugin.grpc.v1.ExecuteHookPostRequest + 8, // 11: ignite.services.plugin.grpc.v1.InterfaceService.ExecuteHookCleanUp:input_type -> ignite.services.plugin.grpc.v1.ExecuteHookCleanUpRequest + 10, // 12: ignite.services.plugin.grpc.v1.ClientAPIService.GetChainInfo:input_type -> ignite.services.plugin.grpc.v1.GetChainInfoRequest + 12, // 13: ignite.services.plugin.grpc.v1.ClientAPIService.GetIgniteInfo:input_type -> ignite.services.plugin.grpc.v1.GetIgniteInfoRequest + 1, // 14: ignite.services.plugin.grpc.v1.InterfaceService.Manifest:output_type -> ignite.services.plugin.grpc.v1.ManifestResponse + 3, // 15: ignite.services.plugin.grpc.v1.InterfaceService.Execute:output_type -> ignite.services.plugin.grpc.v1.ExecuteResponse + 5, // 16: ignite.services.plugin.grpc.v1.InterfaceService.ExecuteHookPre:output_type -> ignite.services.plugin.grpc.v1.ExecuteHookPreResponse + 7, // 17: ignite.services.plugin.grpc.v1.InterfaceService.ExecuteHookPost:output_type -> ignite.services.plugin.grpc.v1.ExecuteHookPostResponse + 9, // 18: ignite.services.plugin.grpc.v1.InterfaceService.ExecuteHookCleanUp:output_type -> ignite.services.plugin.grpc.v1.ExecuteHookCleanUpResponse + 11, // 19: ignite.services.plugin.grpc.v1.ClientAPIService.GetChainInfo:output_type -> ignite.services.plugin.grpc.v1.GetChainInfoResponse + 13, // 20: ignite.services.plugin.grpc.v1.ClientAPIService.GetIgniteInfo:output_type -> ignite.services.plugin.grpc.v1.GetIgniteInfoResponse + 14, // [14:21] is the sub-list for method output_type + 7, // [7:14] is the sub-list for method input_type + 7, // [7:7] is the sub-list for extension type_name + 7, // [7:7] is the sub-list for extension extendee + 0, // [0:7] is the sub-list for field type_name +} + +func init() { file_ignite_services_plugin_grpc_v1_service_proto_init() } +func file_ignite_services_plugin_grpc_v1_service_proto_init() { + if File_ignite_services_plugin_grpc_v1_service_proto != nil { + return + } + file_ignite_services_plugin_grpc_v1_client_api_proto_init() + file_ignite_services_plugin_grpc_v1_interface_proto_init() + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: unsafe.Slice(unsafe.StringData(file_ignite_services_plugin_grpc_v1_service_proto_rawDesc), len(file_ignite_services_plugin_grpc_v1_service_proto_rawDesc)), + NumEnums: 0, + NumMessages: 14, + NumExtensions: 0, + NumServices: 2, + }, + GoTypes: file_ignite_services_plugin_grpc_v1_service_proto_goTypes, + DependencyIndexes: file_ignite_services_plugin_grpc_v1_service_proto_depIdxs, + MessageInfos: file_ignite_services_plugin_grpc_v1_service_proto_msgTypes, + }.Build() + File_ignite_services_plugin_grpc_v1_service_proto = out.File + file_ignite_services_plugin_grpc_v1_service_proto_goTypes = nil + file_ignite_services_plugin_grpc_v1_service_proto_depIdxs = nil +} diff --git a/ignite/services/plugin/grpc/v1/service_grpc.pb.go b/ignite/services/plugin/grpc/v1/service_grpc.pb.go new file mode 100644 index 0000000..8b6e83c --- /dev/null +++ b/ignite/services/plugin/grpc/v1/service_grpc.pb.go @@ -0,0 +1,460 @@ +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. +// versions: +// - protoc-gen-go-grpc v1.5.1 +// - protoc (unknown) +// source: ignite/services/plugin/grpc/v1/service.proto + +package v1 + +import ( + context "context" + + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.64.0 or later. +const _ = grpc.SupportPackageIsVersion9 + +const ( + InterfaceService_Manifest_FullMethodName = "/ignite.services.plugin.grpc.v1.InterfaceService/Manifest" + InterfaceService_Execute_FullMethodName = "/ignite.services.plugin.grpc.v1.InterfaceService/Execute" + InterfaceService_ExecuteHookPre_FullMethodName = "/ignite.services.plugin.grpc.v1.InterfaceService/ExecuteHookPre" + InterfaceService_ExecuteHookPost_FullMethodName = "/ignite.services.plugin.grpc.v1.InterfaceService/ExecuteHookPost" + InterfaceService_ExecuteHookCleanUp_FullMethodName = "/ignite.services.plugin.grpc.v1.InterfaceService/ExecuteHookCleanUp" +) + +// InterfaceServiceClient is the client API for InterfaceService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +// +// InterfaceService defines the interface that must be implemented by all plugins. +type InterfaceServiceClient interface { + // Manifest declares the plugin's Command(s) and Hook(s). + Manifest(ctx context.Context, in *ManifestRequest, opts ...grpc.CallOption) (*ManifestResponse, error) + // Execute will be invoked by ignite when a plugin Command is executed. + // It is global for all commands declared in Manifest, if you have declared + // multiple commands, use cmd.Path to distinguish them. + Execute(ctx context.Context, in *ExecuteRequest, opts ...grpc.CallOption) (*ExecuteResponse, error) + // ExecuteHookPre is invoked by ignite when a command specified by the Hook + // path is invoked. + // It is global for all hooks declared in Manifest, if you have declared + // multiple hooks, use hook.Name to distinguish them. + ExecuteHookPre(ctx context.Context, in *ExecuteHookPreRequest, opts ...grpc.CallOption) (*ExecuteHookPreResponse, error) + // ExecuteHookPost is invoked by ignite when a command specified by the hook + // path is invoked. + // It is global for all hooks declared in Manifest, if you have declared + // multiple hooks, use hook.Name to distinguish them. + ExecuteHookPost(ctx context.Context, in *ExecuteHookPostRequest, opts ...grpc.CallOption) (*ExecuteHookPostResponse, error) + // ExecuteHookCleanUp is invoked by ignite when a command specified by the + // hook path is invoked. Unlike ExecuteHookPost, it is invoked regardless of + // execution status of the command and hooks. + // It is global for all hooks declared in Manifest, if you have declared + // multiple hooks, use hook.Name to distinguish them. + ExecuteHookCleanUp(ctx context.Context, in *ExecuteHookCleanUpRequest, opts ...grpc.CallOption) (*ExecuteHookCleanUpResponse, error) +} + +type interfaceServiceClient struct { + cc grpc.ClientConnInterface +} + +func NewInterfaceServiceClient(cc grpc.ClientConnInterface) InterfaceServiceClient { + return &interfaceServiceClient{cc} +} + +func (c *interfaceServiceClient) Manifest(ctx context.Context, in *ManifestRequest, opts ...grpc.CallOption) (*ManifestResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(ManifestResponse) + err := c.cc.Invoke(ctx, InterfaceService_Manifest_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *interfaceServiceClient) Execute(ctx context.Context, in *ExecuteRequest, opts ...grpc.CallOption) (*ExecuteResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(ExecuteResponse) + err := c.cc.Invoke(ctx, InterfaceService_Execute_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *interfaceServiceClient) ExecuteHookPre(ctx context.Context, in *ExecuteHookPreRequest, opts ...grpc.CallOption) (*ExecuteHookPreResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(ExecuteHookPreResponse) + err := c.cc.Invoke(ctx, InterfaceService_ExecuteHookPre_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *interfaceServiceClient) ExecuteHookPost(ctx context.Context, in *ExecuteHookPostRequest, opts ...grpc.CallOption) (*ExecuteHookPostResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(ExecuteHookPostResponse) + err := c.cc.Invoke(ctx, InterfaceService_ExecuteHookPost_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *interfaceServiceClient) ExecuteHookCleanUp(ctx context.Context, in *ExecuteHookCleanUpRequest, opts ...grpc.CallOption) (*ExecuteHookCleanUpResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(ExecuteHookCleanUpResponse) + err := c.cc.Invoke(ctx, InterfaceService_ExecuteHookCleanUp_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +// InterfaceServiceServer is the server API for InterfaceService service. +// All implementations must embed UnimplementedInterfaceServiceServer +// for forward compatibility. +// +// InterfaceService defines the interface that must be implemented by all plugins. +type InterfaceServiceServer interface { + // Manifest declares the plugin's Command(s) and Hook(s). + Manifest(context.Context, *ManifestRequest) (*ManifestResponse, error) + // Execute will be invoked by ignite when a plugin Command is executed. + // It is global for all commands declared in Manifest, if you have declared + // multiple commands, use cmd.Path to distinguish them. + Execute(context.Context, *ExecuteRequest) (*ExecuteResponse, error) + // ExecuteHookPre is invoked by ignite when a command specified by the Hook + // path is invoked. + // It is global for all hooks declared in Manifest, if you have declared + // multiple hooks, use hook.Name to distinguish them. + ExecuteHookPre(context.Context, *ExecuteHookPreRequest) (*ExecuteHookPreResponse, error) + // ExecuteHookPost is invoked by ignite when a command specified by the hook + // path is invoked. + // It is global for all hooks declared in Manifest, if you have declared + // multiple hooks, use hook.Name to distinguish them. + ExecuteHookPost(context.Context, *ExecuteHookPostRequest) (*ExecuteHookPostResponse, error) + // ExecuteHookCleanUp is invoked by ignite when a command specified by the + // hook path is invoked. Unlike ExecuteHookPost, it is invoked regardless of + // execution status of the command and hooks. + // It is global for all hooks declared in Manifest, if you have declared + // multiple hooks, use hook.Name to distinguish them. + ExecuteHookCleanUp(context.Context, *ExecuteHookCleanUpRequest) (*ExecuteHookCleanUpResponse, error) + mustEmbedUnimplementedInterfaceServiceServer() +} + +// UnimplementedInterfaceServiceServer must be embedded to have +// forward compatible implementations. +// +// NOTE: this should be embedded by value instead of pointer to avoid a nil +// pointer dereference when methods are called. +type UnimplementedInterfaceServiceServer struct{} + +func (UnimplementedInterfaceServiceServer) Manifest(context.Context, *ManifestRequest) (*ManifestResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method Manifest not implemented") +} +func (UnimplementedInterfaceServiceServer) Execute(context.Context, *ExecuteRequest) (*ExecuteResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method Execute not implemented") +} +func (UnimplementedInterfaceServiceServer) ExecuteHookPre(context.Context, *ExecuteHookPreRequest) (*ExecuteHookPreResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ExecuteHookPre not implemented") +} +func (UnimplementedInterfaceServiceServer) ExecuteHookPost(context.Context, *ExecuteHookPostRequest) (*ExecuteHookPostResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ExecuteHookPost not implemented") +} +func (UnimplementedInterfaceServiceServer) ExecuteHookCleanUp(context.Context, *ExecuteHookCleanUpRequest) (*ExecuteHookCleanUpResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ExecuteHookCleanUp not implemented") +} +func (UnimplementedInterfaceServiceServer) mustEmbedUnimplementedInterfaceServiceServer() {} +func (UnimplementedInterfaceServiceServer) testEmbeddedByValue() {} + +// UnsafeInterfaceServiceServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to InterfaceServiceServer will +// result in compilation errors. +type UnsafeInterfaceServiceServer interface { + mustEmbedUnimplementedInterfaceServiceServer() +} + +func RegisterInterfaceServiceServer(s grpc.ServiceRegistrar, srv InterfaceServiceServer) { + // If the following call pancis, it indicates UnimplementedInterfaceServiceServer was + // embedded by pointer and is nil. This will cause panics if an + // unimplemented method is ever invoked, so we test this at initialization + // time to prevent it from happening at runtime later due to I/O. + if t, ok := srv.(interface{ testEmbeddedByValue() }); ok { + t.testEmbeddedByValue() + } + s.RegisterService(&InterfaceService_ServiceDesc, srv) +} + +func _InterfaceService_Manifest_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ManifestRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(InterfaceServiceServer).Manifest(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: InterfaceService_Manifest_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(InterfaceServiceServer).Manifest(ctx, req.(*ManifestRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _InterfaceService_Execute_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ExecuteRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(InterfaceServiceServer).Execute(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: InterfaceService_Execute_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(InterfaceServiceServer).Execute(ctx, req.(*ExecuteRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _InterfaceService_ExecuteHookPre_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ExecuteHookPreRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(InterfaceServiceServer).ExecuteHookPre(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: InterfaceService_ExecuteHookPre_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(InterfaceServiceServer).ExecuteHookPre(ctx, req.(*ExecuteHookPreRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _InterfaceService_ExecuteHookPost_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ExecuteHookPostRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(InterfaceServiceServer).ExecuteHookPost(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: InterfaceService_ExecuteHookPost_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(InterfaceServiceServer).ExecuteHookPost(ctx, req.(*ExecuteHookPostRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _InterfaceService_ExecuteHookCleanUp_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ExecuteHookCleanUpRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(InterfaceServiceServer).ExecuteHookCleanUp(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: InterfaceService_ExecuteHookCleanUp_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(InterfaceServiceServer).ExecuteHookCleanUp(ctx, req.(*ExecuteHookCleanUpRequest)) + } + return interceptor(ctx, in, info, handler) +} + +// InterfaceService_ServiceDesc is the grpc.ServiceDesc for InterfaceService service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var InterfaceService_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "ignite.services.plugin.grpc.v1.InterfaceService", + HandlerType: (*InterfaceServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "Manifest", + Handler: _InterfaceService_Manifest_Handler, + }, + { + MethodName: "Execute", + Handler: _InterfaceService_Execute_Handler, + }, + { + MethodName: "ExecuteHookPre", + Handler: _InterfaceService_ExecuteHookPre_Handler, + }, + { + MethodName: "ExecuteHookPost", + Handler: _InterfaceService_ExecuteHookPost_Handler, + }, + { + MethodName: "ExecuteHookCleanUp", + Handler: _InterfaceService_ExecuteHookCleanUp_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "ignite/services/plugin/grpc/v1/service.proto", +} + +const ( + ClientAPIService_GetChainInfo_FullMethodName = "/ignite.services.plugin.grpc.v1.ClientAPIService/GetChainInfo" + ClientAPIService_GetIgniteInfo_FullMethodName = "/ignite.services.plugin.grpc.v1.ClientAPIService/GetIgniteInfo" +) + +// ClientAPIServiceClient is the client API for ClientAPIService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +// +// ClientAPIService defines the interface that allows plugins to get chain app analysis info. +type ClientAPIServiceClient interface { + // GetChainInfo returns basic chain info for the configured app + GetChainInfo(ctx context.Context, in *GetChainInfoRequest, opts ...grpc.CallOption) (*GetChainInfoResponse, error) + // GetIgniteInfo returns basic ignite info + GetIgniteInfo(ctx context.Context, in *GetIgniteInfoRequest, opts ...grpc.CallOption) (*GetIgniteInfoResponse, error) +} + +type clientAPIServiceClient struct { + cc grpc.ClientConnInterface +} + +func NewClientAPIServiceClient(cc grpc.ClientConnInterface) ClientAPIServiceClient { + return &clientAPIServiceClient{cc} +} + +func (c *clientAPIServiceClient) GetChainInfo(ctx context.Context, in *GetChainInfoRequest, opts ...grpc.CallOption) (*GetChainInfoResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(GetChainInfoResponse) + err := c.cc.Invoke(ctx, ClientAPIService_GetChainInfo_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clientAPIServiceClient) GetIgniteInfo(ctx context.Context, in *GetIgniteInfoRequest, opts ...grpc.CallOption) (*GetIgniteInfoResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(GetIgniteInfoResponse) + err := c.cc.Invoke(ctx, ClientAPIService_GetIgniteInfo_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ClientAPIServiceServer is the server API for ClientAPIService service. +// All implementations must embed UnimplementedClientAPIServiceServer +// for forward compatibility. +// +// ClientAPIService defines the interface that allows plugins to get chain app analysis info. +type ClientAPIServiceServer interface { + // GetChainInfo returns basic chain info for the configured app + GetChainInfo(context.Context, *GetChainInfoRequest) (*GetChainInfoResponse, error) + // GetIgniteInfo returns basic ignite info + GetIgniteInfo(context.Context, *GetIgniteInfoRequest) (*GetIgniteInfoResponse, error) + mustEmbedUnimplementedClientAPIServiceServer() +} + +// UnimplementedClientAPIServiceServer must be embedded to have +// forward compatible implementations. +// +// NOTE: this should be embedded by value instead of pointer to avoid a nil +// pointer dereference when methods are called. +type UnimplementedClientAPIServiceServer struct{} + +func (UnimplementedClientAPIServiceServer) GetChainInfo(context.Context, *GetChainInfoRequest) (*GetChainInfoResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetChainInfo not implemented") +} +func (UnimplementedClientAPIServiceServer) GetIgniteInfo(context.Context, *GetIgniteInfoRequest) (*GetIgniteInfoResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetIgniteInfo not implemented") +} +func (UnimplementedClientAPIServiceServer) mustEmbedUnimplementedClientAPIServiceServer() {} +func (UnimplementedClientAPIServiceServer) testEmbeddedByValue() {} + +// UnsafeClientAPIServiceServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to ClientAPIServiceServer will +// result in compilation errors. +type UnsafeClientAPIServiceServer interface { + mustEmbedUnimplementedClientAPIServiceServer() +} + +func RegisterClientAPIServiceServer(s grpc.ServiceRegistrar, srv ClientAPIServiceServer) { + // If the following call pancis, it indicates UnimplementedClientAPIServiceServer was + // embedded by pointer and is nil. This will cause panics if an + // unimplemented method is ever invoked, so we test this at initialization + // time to prevent it from happening at runtime later due to I/O. + if t, ok := srv.(interface{ testEmbeddedByValue() }); ok { + t.testEmbeddedByValue() + } + s.RegisterService(&ClientAPIService_ServiceDesc, srv) +} + +func _ClientAPIService_GetChainInfo_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetChainInfoRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClientAPIServiceServer).GetChainInfo(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: ClientAPIService_GetChainInfo_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClientAPIServiceServer).GetChainInfo(ctx, req.(*GetChainInfoRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClientAPIService_GetIgniteInfo_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetIgniteInfoRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClientAPIServiceServer).GetIgniteInfo(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: ClientAPIService_GetIgniteInfo_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClientAPIServiceServer).GetIgniteInfo(ctx, req.(*GetIgniteInfoRequest)) + } + return interceptor(ctx, in, info, handler) +} + +// ClientAPIService_ServiceDesc is the grpc.ServiceDesc for ClientAPIService service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var ClientAPIService_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "ignite.services.plugin.grpc.v1.ClientAPIService", + HandlerType: (*ClientAPIServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetChainInfo", + Handler: _ClientAPIService_GetChainInfo_Handler, + }, + { + MethodName: "GetIgniteInfo", + Handler: _ClientAPIService_GetIgniteInfo_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "ignite/services/plugin/grpc/v1/service.proto", +} diff --git a/ignite/services/plugin/grpc/v1/types_command_test.go b/ignite/services/plugin/grpc/v1/types_command_test.go new file mode 100644 index 0000000..ab0d8f6 --- /dev/null +++ b/ignite/services/plugin/grpc/v1/types_command_test.go @@ -0,0 +1,358 @@ +package v1_test + +import ( + "fmt" + "testing" + + "github.com/spf13/cobra" + "github.com/spf13/pflag" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + v1 "github.com/ignite/cli/v29/ignite/services/plugin/grpc/v1" +) + +func TestCommandToCobraCommand(t *testing.T) { + var ( + require = require.New(t) + assert = assert.New(t) + pcmd = v1.Command{ + Use: "new", + Aliases: []string{"n"}, + Short: "short", + Long: "long", + Hidden: true, + Flags: []*v1.Flag{ + { + Name: "bool", + Shorthand: "b", + DefaultValue: "true", + Value: "true", + Usage: "a bool", + Type: v1.Flag_TYPE_FLAG_BOOL, + }, + { + Name: "string", + DefaultValue: "hello", + Value: "hello", + Usage: "a string", + Type: v1.Flag_TYPE_FLAG_STRING_UNSPECIFIED, + Persistent: true, + }, + }, + Commands: []*v1.Command{ + { + Use: "sub", + Aliases: []string{"s"}, + Short: "sub short", + Long: "sub long", + }, + }, + } + ) + + cmd, err := pcmd.ToCobraCommand() + + require.NoError(err) + require.NotNil(cmd) + assert.Empty(cmd.Commands()) // subcommands aren't converted + assert.Equal(pcmd.Use, cmd.Use) + assert.Equal(pcmd.Short, cmd.Short) + assert.Equal(pcmd.Long, cmd.Long) + assert.Equal(pcmd.Aliases, cmd.Aliases) + assert.Equal(pcmd.Hidden, cmd.Hidden) + for _, f := range pcmd.Flags { + if f.Persistent { + assert.NotNil(cmd.PersistentFlags().Lookup(f.Name), "missing pflag %s", f.Name) + } else { + assert.NotNil(cmd.Flags().Lookup(f.Name), "missing flag %s", f.Name) + } + } +} + +func TestCommandPath(t *testing.T) { + cases := []struct { + name, wantPath string + cmd *v1.Command + }{ + { + name: "relative path", + cmd: &v1.Command{ + PlaceCommandUnder: "chain", + }, + wantPath: "ignite chain", + }, + { + name: "full path", + cmd: &v1.Command{ + PlaceCommandUnder: "ignite chain", + }, + wantPath: "ignite chain", + }, + { + name: "path with spaces", + cmd: &v1.Command{ + PlaceCommandUnder: " ignite scaffold ", + }, + wantPath: "ignite scaffold", + }, + } + + for _, tc := range cases { + t.Run(tc.name, func(t *testing.T) { + path := tc.cmd.Path() + require.Equal(t, tc.wantPath, path) + }) + } +} + +func TestExecutedCommandImportFlags(t *testing.T) { + // Arrange + execCmd := &v1.ExecutedCommand{} + wantFlags := []*v1.Flag{ + { + Name: "foo", + Shorthand: "f", + Usage: "foo usage", + DefaultValue: "bar", + Value: "baz", + Type: v1.Flag_TYPE_FLAG_STRING_UNSPECIFIED, + }, { + Name: "test", + Shorthand: "t", + Usage: "test usage", + DefaultValue: "1", + Value: "42", + Type: v1.Flag_TYPE_FLAG_INT, + Persistent: true, + }, + } + + cmd := cobra.Command{} + cmd.Flags().StringP("foo", "f", "bar", "foo usage") + cmd.PersistentFlags().IntP("test", "t", 1, "test usage") + err := cmd.ParseFlags([]string{"--foo", "baz", "--test", "42"}) + require.NoError(t, err) + + // Act + execCmd.ImportFlags(&cmd) + + // Assert + require.Equal(t, wantFlags, execCmd.Flags) +} + +func TestExecutedCommandNewFlags(t *testing.T) { + // Arrange + execCmd := &v1.ExecutedCommand{ + Flags: []*v1.Flag{ + { + Name: "bool", + Shorthand: "b", + Usage: "bool usage", + DefaultValue: "false", + Value: "true", + Type: v1.Flag_TYPE_FLAG_BOOL, + }, + { + Name: "int", + Shorthand: "i", + Usage: "int usage", + DefaultValue: "0", + Value: "42", + Type: v1.Flag_TYPE_FLAG_INT, + }, + { + Name: "uint", + Shorthand: "u", + Usage: "uint usage", + DefaultValue: "0", + Value: "42", + Type: v1.Flag_TYPE_FLAG_UINT, + }, + { + Name: "int64", + Shorthand: "j", + Usage: "int64 usage", + DefaultValue: "0", + Value: "42", + Type: v1.Flag_TYPE_FLAG_INT64, + }, + { + Name: "uint64", + Shorthand: "k", + Usage: "uint64 usage", + DefaultValue: "0", + Value: "42", + Type: v1.Flag_TYPE_FLAG_UINT64, + }, + { + Name: "string", + Shorthand: "s", + Usage: "string usage", + DefaultValue: "", + Value: "hello", + Type: v1.Flag_TYPE_FLAG_STRING_UNSPECIFIED, + }, + { + Name: "string-slice", + Shorthand: "l", + Usage: "string slice usage", + DefaultValue: "[]", + Value: "[]", + Type: v1.Flag_TYPE_FLAG_STRING_SLICE, + }, + { + Name: "persistent", + Persistent: true, + }, + }, + } + + wantFlags := make(map[string]pflag.Flag) + for _, f := range execCmd.Flags { + wantFlags[f.Name] = pflag.Flag{ + Name: f.Name, + Shorthand: f.Shorthand, + Usage: f.Usage, + DefValue: f.DefaultValue, + } + } + + var ( + flagCount int + + // Persistent flag should not be included + wantFlagCount = len(execCmd.Flags) - 1 + ) + + // Act + flags, err := execCmd.NewFlags() + + // Assert + require.NoError(t, err) + + flags.VisitAll(func(f *pflag.Flag) { + flag, ok := wantFlags[f.Name] + + require.True(t, ok, fmt.Sprintf("missing flag: %s", f.Name)) + require.Equal(t, flag.Name, f.Name) + require.Equal(t, flag.Shorthand, f.Shorthand) + require.Equal(t, flag.Usage, f.Usage) + require.Equal(t, flag.DefValue, f.DefValue) + + flagCount++ + }) + + require.Equal(t, wantFlagCount, flagCount) +} + +func TestExecutedCommandNewPersistentFlags(t *testing.T) { + // Arrange + execCmd := &v1.ExecutedCommand{ + Flags: []*v1.Flag{ + { + Name: "bool", + Shorthand: "b", + Usage: "bool usage", + DefaultValue: "false", + Value: "true", + Type: v1.Flag_TYPE_FLAG_BOOL, + Persistent: true, + }, + { + Name: "int", + Shorthand: "i", + Usage: "int usage", + DefaultValue: "0", + Value: "42", + Type: v1.Flag_TYPE_FLAG_INT, + Persistent: true, + }, + { + Name: "uint", + Shorthand: "u", + Usage: "uint usage", + DefaultValue: "0", + Value: "42", + Type: v1.Flag_TYPE_FLAG_UINT, + Persistent: true, + }, + { + Name: "int64", + Shorthand: "j", + Usage: "int64 usage", + DefaultValue: "0", + Value: "42", + Type: v1.Flag_TYPE_FLAG_INT64, + Persistent: true, + }, + { + Name: "uint64", + Shorthand: "k", + Usage: "uint64 usage", + DefaultValue: "0", + Value: "42", + Type: v1.Flag_TYPE_FLAG_UINT64, + Persistent: true, + }, + { + Name: "string", + Shorthand: "s", + Usage: "string usage", + DefaultValue: "", + Value: "hello", + Type: v1.Flag_TYPE_FLAG_STRING_UNSPECIFIED, + Persistent: true, + }, + { + Name: "string-slice", + Shorthand: "l", + Usage: "string slice usage", + DefaultValue: "[]", + Value: "[]", + Type: v1.Flag_TYPE_FLAG_STRING_SLICE, + Persistent: true, + }, + { + Name: "non-persistent", + }, + }, + } + + wantFlags := make(map[string]pflag.Flag) + for _, f := range execCmd.Flags { + wantFlags[f.Name] = pflag.Flag{ + Name: f.Name, + Shorthand: f.Shorthand, + Usage: f.Usage, + DefValue: f.DefaultValue, + } + } + + var ( + flagCount int + + // Non persistent flag should not be included + wantFlagCount = len(execCmd.Flags) - 1 + ) + + // Act + flags, err := execCmd.NewPersistentFlags() + + // Assert + require.NoError(t, err) + + flags.VisitAll(func(f *pflag.Flag) { + flag, ok := wantFlags[f.Name] + + require.True(t, ok, fmt.Sprintf("missing flag: %s", f.Name)) + require.Equal(t, flag.Name, f.Name) + require.Equal(t, flag.Shorthand, f.Shorthand) + require.Equal(t, flag.Usage, f.Usage) + require.Equal(t, flag.DefValue, f.DefValue) + + flagCount++ + }) + + require.Equal(t, wantFlagCount, flagCount) +} diff --git a/ignite/services/plugin/grpc/v1/types_hook_test.go b/ignite/services/plugin/grpc/v1/types_hook_test.go new file mode 100644 index 0000000..d14c707 --- /dev/null +++ b/ignite/services/plugin/grpc/v1/types_hook_test.go @@ -0,0 +1,45 @@ +package v1_test + +import ( + "testing" + + "github.com/stretchr/testify/require" + + v1 "github.com/ignite/cli/v29/ignite/services/plugin/grpc/v1" +) + +func TestHookCommandPath(t *testing.T) { + cases := []struct { + name, wantPath string + hook *v1.Hook + }{ + { + name: "relative path", + hook: &v1.Hook{ + PlaceHookOn: "chain", + }, + wantPath: "ignite chain", + }, + { + name: "full path", + hook: &v1.Hook{ + PlaceHookOn: "ignite chain", + }, + wantPath: "ignite chain", + }, + { + name: "path with spaces", + hook: &v1.Hook{ + PlaceHookOn: " ignite scaffold ", + }, + wantPath: "ignite scaffold", + }, + } + + for _, tc := range cases { + t.Run(tc.name, func(t *testing.T) { + path := tc.hook.CommandPath() + require.Equal(t, tc.wantPath, path) + }) + } +} diff --git a/ignite/services/plugin/grpc/v1/types_manifest_test.go b/ignite/services/plugin/grpc/v1/types_manifest_test.go new file mode 100644 index 0000000..b6688dd --- /dev/null +++ b/ignite/services/plugin/grpc/v1/types_manifest_test.go @@ -0,0 +1,110 @@ +package v1_test + +import ( + "testing" + + "github.com/spf13/cobra" + "github.com/stretchr/testify/assert" + + v1 "github.com/ignite/cli/v29/ignite/services/plugin/grpc/v1" +) + +func TestManifestImportCobraCommand(t *testing.T) { + manifest := &v1.Manifest{ + Name: "hey", + Commands: []*v1.Command{ + {Use: "existing"}, + }, + } + cmd := &cobra.Command{ + Use: "new", + Aliases: []string{"n"}, + Short: "short", + Long: "long", + Hidden: true, + } + cmd.Flags().BoolP("bool", "b", true, "a bool") + cmd.Flags().String("string", "hello", "a string") + cmd.PersistentFlags().String("persistent", "hello", "a persistent string") + subcmd := &cobra.Command{ + Use: "sub", + Aliases: []string{"s"}, + Short: "sub short", + Long: "sub long", + } + subcmd.Flags().BoolP("subbool", "b", true, "a bool") + subcmd.Flags().String("substring", "hello", "a string") + subcmd.AddCommand(&cobra.Command{ + Use: "subsub", + }) + cmd.AddCommand(subcmd) + + manifest.ImportCobraCommand(cmd, "under") + + expectedManifest := &v1.Manifest{ + Name: "hey", + Commands: []*v1.Command{ + {Use: "existing"}, + { + Use: "new", + Aliases: []string{"n"}, + Short: "short", + Long: "long", + Hidden: true, + PlaceCommandUnder: "under", + Flags: []*v1.Flag{ + { + Name: "bool", + Shorthand: "b", + DefaultValue: "true", + Value: "true", + Usage: "a bool", + Type: v1.Flag_TYPE_FLAG_BOOL, + }, + { + Name: "string", + DefaultValue: "hello", + Value: "hello", + Usage: "a string", + Type: v1.Flag_TYPE_FLAG_STRING_UNSPECIFIED, + }, + { + Name: "persistent", + DefaultValue: "hello", + Value: "hello", + Usage: "a persistent string", + Type: v1.Flag_TYPE_FLAG_STRING_UNSPECIFIED, + Persistent: true, + }, + }, + Commands: []*v1.Command{ + { + Use: "sub", + Aliases: []string{"s"}, + Short: "sub short", + Long: "sub long", + Flags: []*v1.Flag{ + { + Name: "subbool", + Shorthand: "b", + DefaultValue: "true", + Value: "true", + Usage: "a bool", + Type: v1.Flag_TYPE_FLAG_BOOL, + }, + { + Name: "substring", + DefaultValue: "hello", + Value: "hello", + Usage: "a string", + Type: v1.Flag_TYPE_FLAG_STRING_UNSPECIFIED, + }, + }, + Commands: []*v1.Command{{Use: "subsub"}}, + }, + }, + }, + }, + } + assert.Equal(t, expectedManifest, manifest) +} diff --git a/ignite/services/plugin/interface.go b/ignite/services/plugin/interface.go new file mode 100644 index 0000000..be7bf22 --- /dev/null +++ b/ignite/services/plugin/interface.go @@ -0,0 +1,77 @@ +package plugin + +import ( + "context" + + v1 "github.com/ignite/cli/v29/ignite/services/plugin/grpc/v1" +) + +// Flag type aliases. +const ( + FlagTypeString = v1.Flag_TYPE_FLAG_STRING_UNSPECIFIED + FlagTypeInt = v1.Flag_TYPE_FLAG_INT + FlagTypeUint = v1.Flag_TYPE_FLAG_UINT + FlagTypeInt64 = v1.Flag_TYPE_FLAG_INT64 + FlagTypeUint64 = v1.Flag_TYPE_FLAG_UINT64 + FlagTypeBool = v1.Flag_TYPE_FLAG_BOOL + FlagTypeStringSlice = v1.Flag_TYPE_FLAG_STRING_SLICE +) + +// Type aliases for the current plugin version. +type ( + Command = v1.Command + ChainInfo = v1.ChainInfo + IgniteInfo = v1.IgniteInfo + ExecutedCommand = v1.ExecutedCommand + ExecutedHook = v1.ExecutedHook + Flag = v1.Flag + FlagType = v1.Flag_Type + Hook = v1.Hook + Manifest = v1.Manifest +) + +// Interface defines the interface that all Ignite App must implement. +// +//go:generate mockery --srcpkg . --name Interface --structname PluginInterface --filename interface.go --with-expecter +type Interface interface { + // Manifest declares the app's Command(s) and Hook(s). + Manifest(context.Context) (*Manifest, error) + + // Execute will be invoked by ignite when an app Command is executed. + // It is global for all commands declared in Manifest, if you have declared + // multiple commands, use cmd.Path to distinguish them. + // The clientAPI argument can be used by plugins to get chain app analysis info. + Execute(context.Context, *ExecutedCommand, ClientAPI) error + + // ExecuteHookPre is invoked by ignite when a command specified by the Hook + // path is invoked. + // It is global for all hooks declared in Manifest, if you have declared + // multiple hooks, use hook.Name to distinguish them. + // The clientAPI argument can be used by plugins to get chain app analysis info. + ExecuteHookPre(context.Context, *ExecutedHook, ClientAPI) error + + // ExecuteHookPost is invoked by ignite when a command specified by the hook + // path is invoked. + // It is global for all hooks declared in Manifest, if you have declared + // multiple hooks, use hook.Name to distinguish them. + // The clientAPI argument can be used by plugins to get chain app analysis info. + ExecuteHookPost(context.Context, *ExecutedHook, ClientAPI) error + + // ExecuteHookCleanUp is invoked by ignite when a command specified by the + // hook path is invoked. Unlike ExecuteHookPost, it is invoked regardless of + // execution status of the command and hooks. + // It is global for all hooks declared in Manifest, if you have declared + // multiple hooks, use hook.Name to distinguish them. + // The clientAPI argument can be used by plugins to get chain app analysis info. + ExecuteHookCleanUp(context.Context, *ExecutedHook, ClientAPI) error +} + +// ClientAPI defines the interface for plugins to get chain app code analysis info. +// +//go:generate mockery --srcpkg . --name ClientAPI --structname PluginClientAPI --filename client_api.go --with-expecter +type ClientAPI interface { + // GetChainInfo returns basic info for the configured blockchain app. + GetChainInfo(context.Context) (*ChainInfo, error) + // GetIgniteInfo returns basic info for the Ignite. + GetIgniteInfo(context.Context) (*IgniteInfo, error) +} diff --git a/ignite/services/plugin/mocks/chainer.go b/ignite/services/plugin/mocks/chainer.go new file mode 100644 index 0000000..5653b75 --- /dev/null +++ b/ignite/services/plugin/mocks/chainer.go @@ -0,0 +1,287 @@ +// Code generated by mockery v2.53.3. DO NOT EDIT. + +package mocks + +import mock "github.com/stretchr/testify/mock" + +// ChainerInterface is an autogenerated mock type for the Chainer type +type ChainerInterface struct { + mock.Mock +} + +type ChainerInterface_Expecter struct { + mock *mock.Mock +} + +func (_m *ChainerInterface) EXPECT() *ChainerInterface_Expecter { + return &ChainerInterface_Expecter{mock: &_m.Mock} +} + +// AppPath provides a mock function with no fields +func (_m *ChainerInterface) AppPath() string { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for AppPath") + } + + var r0 string + if rf, ok := ret.Get(0).(func() string); ok { + r0 = rf() + } else { + r0 = ret.Get(0).(string) + } + + return r0 +} + +// ChainerInterface_AppPath_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'AppPath' +type ChainerInterface_AppPath_Call struct { + *mock.Call +} + +// AppPath is a helper method to define mock.On call +func (_e *ChainerInterface_Expecter) AppPath() *ChainerInterface_AppPath_Call { + return &ChainerInterface_AppPath_Call{Call: _e.mock.On("AppPath")} +} + +func (_c *ChainerInterface_AppPath_Call) Run(run func()) *ChainerInterface_AppPath_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *ChainerInterface_AppPath_Call) Return(_a0 string) *ChainerInterface_AppPath_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *ChainerInterface_AppPath_Call) RunAndReturn(run func() string) *ChainerInterface_AppPath_Call { + _c.Call.Return(run) + return _c +} + +// ConfigPath provides a mock function with no fields +func (_m *ChainerInterface) ConfigPath() string { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for ConfigPath") + } + + var r0 string + if rf, ok := ret.Get(0).(func() string); ok { + r0 = rf() + } else { + r0 = ret.Get(0).(string) + } + + return r0 +} + +// ChainerInterface_ConfigPath_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ConfigPath' +type ChainerInterface_ConfigPath_Call struct { + *mock.Call +} + +// ConfigPath is a helper method to define mock.On call +func (_e *ChainerInterface_Expecter) ConfigPath() *ChainerInterface_ConfigPath_Call { + return &ChainerInterface_ConfigPath_Call{Call: _e.mock.On("ConfigPath")} +} + +func (_c *ChainerInterface_ConfigPath_Call) Run(run func()) *ChainerInterface_ConfigPath_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *ChainerInterface_ConfigPath_Call) Return(_a0 string) *ChainerInterface_ConfigPath_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *ChainerInterface_ConfigPath_Call) RunAndReturn(run func() string) *ChainerInterface_ConfigPath_Call { + _c.Call.Return(run) + return _c +} + +// Home provides a mock function with no fields +func (_m *ChainerInterface) Home() (string, error) { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for Home") + } + + var r0 string + var r1 error + if rf, ok := ret.Get(0).(func() (string, error)); ok { + return rf() + } + if rf, ok := ret.Get(0).(func() string); ok { + r0 = rf() + } else { + r0 = ret.Get(0).(string) + } + + if rf, ok := ret.Get(1).(func() error); ok { + r1 = rf() + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// ChainerInterface_Home_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Home' +type ChainerInterface_Home_Call struct { + *mock.Call +} + +// Home is a helper method to define mock.On call +func (_e *ChainerInterface_Expecter) Home() *ChainerInterface_Home_Call { + return &ChainerInterface_Home_Call{Call: _e.mock.On("Home")} +} + +func (_c *ChainerInterface_Home_Call) Run(run func()) *ChainerInterface_Home_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *ChainerInterface_Home_Call) Return(_a0 string, _a1 error) *ChainerInterface_Home_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *ChainerInterface_Home_Call) RunAndReturn(run func() (string, error)) *ChainerInterface_Home_Call { + _c.Call.Return(run) + return _c +} + +// ID provides a mock function with no fields +func (_m *ChainerInterface) ID() (string, error) { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for ID") + } + + var r0 string + var r1 error + if rf, ok := ret.Get(0).(func() (string, error)); ok { + return rf() + } + if rf, ok := ret.Get(0).(func() string); ok { + r0 = rf() + } else { + r0 = ret.Get(0).(string) + } + + if rf, ok := ret.Get(1).(func() error); ok { + r1 = rf() + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// ChainerInterface_ID_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ID' +type ChainerInterface_ID_Call struct { + *mock.Call +} + +// ID is a helper method to define mock.On call +func (_e *ChainerInterface_Expecter) ID() *ChainerInterface_ID_Call { + return &ChainerInterface_ID_Call{Call: _e.mock.On("ID")} +} + +func (_c *ChainerInterface_ID_Call) Run(run func()) *ChainerInterface_ID_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *ChainerInterface_ID_Call) Return(_a0 string, _a1 error) *ChainerInterface_ID_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *ChainerInterface_ID_Call) RunAndReturn(run func() (string, error)) *ChainerInterface_ID_Call { + _c.Call.Return(run) + return _c +} + +// RPCPublicAddress provides a mock function with no fields +func (_m *ChainerInterface) RPCPublicAddress() (string, error) { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for RPCPublicAddress") + } + + var r0 string + var r1 error + if rf, ok := ret.Get(0).(func() (string, error)); ok { + return rf() + } + if rf, ok := ret.Get(0).(func() string); ok { + r0 = rf() + } else { + r0 = ret.Get(0).(string) + } + + if rf, ok := ret.Get(1).(func() error); ok { + r1 = rf() + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// ChainerInterface_RPCPublicAddress_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'RPCPublicAddress' +type ChainerInterface_RPCPublicAddress_Call struct { + *mock.Call +} + +// RPCPublicAddress is a helper method to define mock.On call +func (_e *ChainerInterface_Expecter) RPCPublicAddress() *ChainerInterface_RPCPublicAddress_Call { + return &ChainerInterface_RPCPublicAddress_Call{Call: _e.mock.On("RPCPublicAddress")} +} + +func (_c *ChainerInterface_RPCPublicAddress_Call) Run(run func()) *ChainerInterface_RPCPublicAddress_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *ChainerInterface_RPCPublicAddress_Call) Return(_a0 string, _a1 error) *ChainerInterface_RPCPublicAddress_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *ChainerInterface_RPCPublicAddress_Call) RunAndReturn(run func() (string, error)) *ChainerInterface_RPCPublicAddress_Call { + _c.Call.Return(run) + return _c +} + +// NewChainerInterface creates a new instance of ChainerInterface. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewChainerInterface(t interface { + mock.TestingT + Cleanup(func()) +}) *ChainerInterface { + mock := &ChainerInterface{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/ignite/services/plugin/mocks/client_api.go b/ignite/services/plugin/mocks/client_api.go new file mode 100644 index 0000000..9d275d7 --- /dev/null +++ b/ignite/services/plugin/mocks/client_api.go @@ -0,0 +1,154 @@ +// Code generated by mockery v2.53.3. DO NOT EDIT. + +package mocks + +import ( + context "context" + + mock "github.com/stretchr/testify/mock" + + v1 "github.com/ignite/cli/v29/ignite/services/plugin/grpc/v1" +) + +// PluginClientAPI is an autogenerated mock type for the ClientAPI type +type PluginClientAPI struct { + mock.Mock +} + +type PluginClientAPI_Expecter struct { + mock *mock.Mock +} + +func (_m *PluginClientAPI) EXPECT() *PluginClientAPI_Expecter { + return &PluginClientAPI_Expecter{mock: &_m.Mock} +} + +// GetChainInfo provides a mock function with given fields: _a0 +func (_m *PluginClientAPI) GetChainInfo(_a0 context.Context) (*v1.ChainInfo, error) { + ret := _m.Called(_a0) + + if len(ret) == 0 { + panic("no return value specified for GetChainInfo") + } + + var r0 *v1.ChainInfo + var r1 error + if rf, ok := ret.Get(0).(func(context.Context) (*v1.ChainInfo, error)); ok { + return rf(_a0) + } + if rf, ok := ret.Get(0).(func(context.Context) *v1.ChainInfo); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*v1.ChainInfo) + } + } + + if rf, ok := ret.Get(1).(func(context.Context) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PluginClientAPI_GetChainInfo_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetChainInfo' +type PluginClientAPI_GetChainInfo_Call struct { + *mock.Call +} + +// GetChainInfo is a helper method to define mock.On call +// - _a0 context.Context +func (_e *PluginClientAPI_Expecter) GetChainInfo(_a0 interface{}) *PluginClientAPI_GetChainInfo_Call { + return &PluginClientAPI_GetChainInfo_Call{Call: _e.mock.On("GetChainInfo", _a0)} +} + +func (_c *PluginClientAPI_GetChainInfo_Call) Run(run func(_a0 context.Context)) *PluginClientAPI_GetChainInfo_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context)) + }) + return _c +} + +func (_c *PluginClientAPI_GetChainInfo_Call) Return(_a0 *v1.ChainInfo, _a1 error) *PluginClientAPI_GetChainInfo_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *PluginClientAPI_GetChainInfo_Call) RunAndReturn(run func(context.Context) (*v1.ChainInfo, error)) *PluginClientAPI_GetChainInfo_Call { + _c.Call.Return(run) + return _c +} + +// GetIgniteInfo provides a mock function with given fields: _a0 +func (_m *PluginClientAPI) GetIgniteInfo(_a0 context.Context) (*v1.IgniteInfo, error) { + ret := _m.Called(_a0) + + if len(ret) == 0 { + panic("no return value specified for GetIgniteInfo") + } + + var r0 *v1.IgniteInfo + var r1 error + if rf, ok := ret.Get(0).(func(context.Context) (*v1.IgniteInfo, error)); ok { + return rf(_a0) + } + if rf, ok := ret.Get(0).(func(context.Context) *v1.IgniteInfo); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*v1.IgniteInfo) + } + } + + if rf, ok := ret.Get(1).(func(context.Context) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PluginClientAPI_GetIgniteInfo_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetIgniteInfo' +type PluginClientAPI_GetIgniteInfo_Call struct { + *mock.Call +} + +// GetIgniteInfo is a helper method to define mock.On call +// - _a0 context.Context +func (_e *PluginClientAPI_Expecter) GetIgniteInfo(_a0 interface{}) *PluginClientAPI_GetIgniteInfo_Call { + return &PluginClientAPI_GetIgniteInfo_Call{Call: _e.mock.On("GetIgniteInfo", _a0)} +} + +func (_c *PluginClientAPI_GetIgniteInfo_Call) Run(run func(_a0 context.Context)) *PluginClientAPI_GetIgniteInfo_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context)) + }) + return _c +} + +func (_c *PluginClientAPI_GetIgniteInfo_Call) Return(_a0 *v1.IgniteInfo, _a1 error) *PluginClientAPI_GetIgniteInfo_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *PluginClientAPI_GetIgniteInfo_Call) RunAndReturn(run func(context.Context) (*v1.IgniteInfo, error)) *PluginClientAPI_GetIgniteInfo_Call { + _c.Call.Return(run) + return _c +} + +// NewPluginClientAPI creates a new instance of PluginClientAPI. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewPluginClientAPI(t interface { + mock.TestingT + Cleanup(func()) +}) *PluginClientAPI { + mock := &PluginClientAPI{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/ignite/services/plugin/mocks/interface.go b/ignite/services/plugin/mocks/interface.go new file mode 100644 index 0000000..55c72eb --- /dev/null +++ b/ignite/services/plugin/mocks/interface.go @@ -0,0 +1,290 @@ +// Code generated by mockery v2.53.3. DO NOT EDIT. + +package mocks + +import ( + context "context" + + mock "github.com/stretchr/testify/mock" + + plugin "github.com/ignite/cli/v29/ignite/services/plugin" + + v1 "github.com/ignite/cli/v29/ignite/services/plugin/grpc/v1" +) + +// PluginInterface is an autogenerated mock type for the Interface type +type PluginInterface struct { + mock.Mock +} + +type PluginInterface_Expecter struct { + mock *mock.Mock +} + +func (_m *PluginInterface) EXPECT() *PluginInterface_Expecter { + return &PluginInterface_Expecter{mock: &_m.Mock} +} + +// Execute provides a mock function with given fields: _a0, _a1, _a2 +func (_m *PluginInterface) Execute(_a0 context.Context, _a1 *v1.ExecutedCommand, _a2 plugin.ClientAPI) error { + ret := _m.Called(_a0, _a1, _a2) + + if len(ret) == 0 { + panic("no return value specified for Execute") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, *v1.ExecutedCommand, plugin.ClientAPI) error); ok { + r0 = rf(_a0, _a1, _a2) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// PluginInterface_Execute_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Execute' +type PluginInterface_Execute_Call struct { + *mock.Call +} + +// Execute is a helper method to define mock.On call +// - _a0 context.Context +// - _a1 *v1.ExecutedCommand +// - _a2 plugin.ClientAPI +func (_e *PluginInterface_Expecter) Execute(_a0 interface{}, _a1 interface{}, _a2 interface{}) *PluginInterface_Execute_Call { + return &PluginInterface_Execute_Call{Call: _e.mock.On("Execute", _a0, _a1, _a2)} +} + +func (_c *PluginInterface_Execute_Call) Run(run func(_a0 context.Context, _a1 *v1.ExecutedCommand, _a2 plugin.ClientAPI)) *PluginInterface_Execute_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(*v1.ExecutedCommand), args[2].(plugin.ClientAPI)) + }) + return _c +} + +func (_c *PluginInterface_Execute_Call) Return(_a0 error) *PluginInterface_Execute_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *PluginInterface_Execute_Call) RunAndReturn(run func(context.Context, *v1.ExecutedCommand, plugin.ClientAPI) error) *PluginInterface_Execute_Call { + _c.Call.Return(run) + return _c +} + +// ExecuteHookCleanUp provides a mock function with given fields: _a0, _a1, _a2 +func (_m *PluginInterface) ExecuteHookCleanUp(_a0 context.Context, _a1 *v1.ExecutedHook, _a2 plugin.ClientAPI) error { + ret := _m.Called(_a0, _a1, _a2) + + if len(ret) == 0 { + panic("no return value specified for ExecuteHookCleanUp") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, *v1.ExecutedHook, plugin.ClientAPI) error); ok { + r0 = rf(_a0, _a1, _a2) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// PluginInterface_ExecuteHookCleanUp_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ExecuteHookCleanUp' +type PluginInterface_ExecuteHookCleanUp_Call struct { + *mock.Call +} + +// ExecuteHookCleanUp is a helper method to define mock.On call +// - _a0 context.Context +// - _a1 *v1.ExecutedHook +// - _a2 plugin.ClientAPI +func (_e *PluginInterface_Expecter) ExecuteHookCleanUp(_a0 interface{}, _a1 interface{}, _a2 interface{}) *PluginInterface_ExecuteHookCleanUp_Call { + return &PluginInterface_ExecuteHookCleanUp_Call{Call: _e.mock.On("ExecuteHookCleanUp", _a0, _a1, _a2)} +} + +func (_c *PluginInterface_ExecuteHookCleanUp_Call) Run(run func(_a0 context.Context, _a1 *v1.ExecutedHook, _a2 plugin.ClientAPI)) *PluginInterface_ExecuteHookCleanUp_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(*v1.ExecutedHook), args[2].(plugin.ClientAPI)) + }) + return _c +} + +func (_c *PluginInterface_ExecuteHookCleanUp_Call) Return(_a0 error) *PluginInterface_ExecuteHookCleanUp_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *PluginInterface_ExecuteHookCleanUp_Call) RunAndReturn(run func(context.Context, *v1.ExecutedHook, plugin.ClientAPI) error) *PluginInterface_ExecuteHookCleanUp_Call { + _c.Call.Return(run) + return _c +} + +// ExecuteHookPost provides a mock function with given fields: _a0, _a1, _a2 +func (_m *PluginInterface) ExecuteHookPost(_a0 context.Context, _a1 *v1.ExecutedHook, _a2 plugin.ClientAPI) error { + ret := _m.Called(_a0, _a1, _a2) + + if len(ret) == 0 { + panic("no return value specified for ExecuteHookPost") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, *v1.ExecutedHook, plugin.ClientAPI) error); ok { + r0 = rf(_a0, _a1, _a2) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// PluginInterface_ExecuteHookPost_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ExecuteHookPost' +type PluginInterface_ExecuteHookPost_Call struct { + *mock.Call +} + +// ExecuteHookPost is a helper method to define mock.On call +// - _a0 context.Context +// - _a1 *v1.ExecutedHook +// - _a2 plugin.ClientAPI +func (_e *PluginInterface_Expecter) ExecuteHookPost(_a0 interface{}, _a1 interface{}, _a2 interface{}) *PluginInterface_ExecuteHookPost_Call { + return &PluginInterface_ExecuteHookPost_Call{Call: _e.mock.On("ExecuteHookPost", _a0, _a1, _a2)} +} + +func (_c *PluginInterface_ExecuteHookPost_Call) Run(run func(_a0 context.Context, _a1 *v1.ExecutedHook, _a2 plugin.ClientAPI)) *PluginInterface_ExecuteHookPost_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(*v1.ExecutedHook), args[2].(plugin.ClientAPI)) + }) + return _c +} + +func (_c *PluginInterface_ExecuteHookPost_Call) Return(_a0 error) *PluginInterface_ExecuteHookPost_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *PluginInterface_ExecuteHookPost_Call) RunAndReturn(run func(context.Context, *v1.ExecutedHook, plugin.ClientAPI) error) *PluginInterface_ExecuteHookPost_Call { + _c.Call.Return(run) + return _c +} + +// ExecuteHookPre provides a mock function with given fields: _a0, _a1, _a2 +func (_m *PluginInterface) ExecuteHookPre(_a0 context.Context, _a1 *v1.ExecutedHook, _a2 plugin.ClientAPI) error { + ret := _m.Called(_a0, _a1, _a2) + + if len(ret) == 0 { + panic("no return value specified for ExecuteHookPre") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, *v1.ExecutedHook, plugin.ClientAPI) error); ok { + r0 = rf(_a0, _a1, _a2) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// PluginInterface_ExecuteHookPre_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ExecuteHookPre' +type PluginInterface_ExecuteHookPre_Call struct { + *mock.Call +} + +// ExecuteHookPre is a helper method to define mock.On call +// - _a0 context.Context +// - _a1 *v1.ExecutedHook +// - _a2 plugin.ClientAPI +func (_e *PluginInterface_Expecter) ExecuteHookPre(_a0 interface{}, _a1 interface{}, _a2 interface{}) *PluginInterface_ExecuteHookPre_Call { + return &PluginInterface_ExecuteHookPre_Call{Call: _e.mock.On("ExecuteHookPre", _a0, _a1, _a2)} +} + +func (_c *PluginInterface_ExecuteHookPre_Call) Run(run func(_a0 context.Context, _a1 *v1.ExecutedHook, _a2 plugin.ClientAPI)) *PluginInterface_ExecuteHookPre_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(*v1.ExecutedHook), args[2].(plugin.ClientAPI)) + }) + return _c +} + +func (_c *PluginInterface_ExecuteHookPre_Call) Return(_a0 error) *PluginInterface_ExecuteHookPre_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *PluginInterface_ExecuteHookPre_Call) RunAndReturn(run func(context.Context, *v1.ExecutedHook, plugin.ClientAPI) error) *PluginInterface_ExecuteHookPre_Call { + _c.Call.Return(run) + return _c +} + +// Manifest provides a mock function with given fields: _a0 +func (_m *PluginInterface) Manifest(_a0 context.Context) (*v1.Manifest, error) { + ret := _m.Called(_a0) + + if len(ret) == 0 { + panic("no return value specified for Manifest") + } + + var r0 *v1.Manifest + var r1 error + if rf, ok := ret.Get(0).(func(context.Context) (*v1.Manifest, error)); ok { + return rf(_a0) + } + if rf, ok := ret.Get(0).(func(context.Context) *v1.Manifest); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*v1.Manifest) + } + } + + if rf, ok := ret.Get(1).(func(context.Context) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PluginInterface_Manifest_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Manifest' +type PluginInterface_Manifest_Call struct { + *mock.Call +} + +// Manifest is a helper method to define mock.On call +// - _a0 context.Context +func (_e *PluginInterface_Expecter) Manifest(_a0 interface{}) *PluginInterface_Manifest_Call { + return &PluginInterface_Manifest_Call{Call: _e.mock.On("Manifest", _a0)} +} + +func (_c *PluginInterface_Manifest_Call) Run(run func(_a0 context.Context)) *PluginInterface_Manifest_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context)) + }) + return _c +} + +func (_c *PluginInterface_Manifest_Call) Return(_a0 *v1.Manifest, _a1 error) *PluginInterface_Manifest_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *PluginInterface_Manifest_Call) RunAndReturn(run func(context.Context) (*v1.Manifest, error)) *PluginInterface_Manifest_Call { + _c.Call.Return(run) + return _c +} + +// NewPluginInterface creates a new instance of PluginInterface. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewPluginInterface(t interface { + mock.TestingT + Cleanup(func()) +}) *PluginInterface { + mock := &PluginInterface{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/ignite/services/plugin/plugin.go b/ignite/services/plugin/plugin.go new file mode 100644 index 0000000..79a1185 --- /dev/null +++ b/ignite/services/plugin/plugin.go @@ -0,0 +1,432 @@ +// Package plugin implements ignite plugin management. +// An ignite plugin is a binary which communicates with the ignite binary +// via RPC thanks to the github.com/hashicorp/go-plugin library. +package plugin + +import ( + "context" + "fmt" + "io" + "io/fs" + "os" + "os/exec" + "path" + "path/filepath" + "strings" + "time" + + "github.com/hashicorp/go-hclog" + hplugin "github.com/hashicorp/go-plugin" + + "github.com/ignite/cli/v29/ignite/config" + pluginsconfig "github.com/ignite/cli/v29/ignite/config/plugins" + "github.com/ignite/cli/v29/ignite/pkg/cliui/icons" + "github.com/ignite/cli/v29/ignite/pkg/env" + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/events" + "github.com/ignite/cli/v29/ignite/pkg/gocmd" + "github.com/ignite/cli/v29/ignite/pkg/xfilepath" + "github.com/ignite/cli/v29/ignite/pkg/xgit" + "github.com/ignite/cli/v29/ignite/pkg/xurl" +) + +// PluginsPath holds the plugin cache directory. +var PluginsPath = xfilepath.Mkdir(xfilepath.Join( + config.DirPath, + xfilepath.Path("apps"), +)) + +// Plugin represents a ignite plugin. +type Plugin struct { + // Embed the plugin configuration. + pluginsconfig.Plugin + + // Interface allows to communicate with the plugin via RPC. + Interface Interface + + // If any error occurred during the plugin load, it's stored here. + Error error + + name string + repoPath string + cloneURL string + cloneDir string + reference string + srcPath string + + client *hplugin.Client + + // Holds a cache of the plugin manifest to prevent mant calls over the rpc boundary. + manifest *Manifest + + // If a plugin's ShareHost flag is set to true, isHost is used to discern if a + // plugin instance is controlling the rpc server. + isHost bool + isSharedHost bool + + ev events.Bus + + stdout io.Writer + stderr io.Writer +} + +// Option configures Plugin. +type Option func(*Plugin) + +// CollectEvents collects events from the chain. +func CollectEvents(ev events.Bus) Option { + return func(p *Plugin) { + p.ev = ev + } +} + +func RedirectStdout(w io.Writer) Option { + return func(p *Plugin) { + p.stdout = w + } +} + +func RedirectStderr(w io.Writer) Option { + return func(p *Plugin) { + p.stderr = w + } +} + +// Load loads the plugins found in the chain config. +// +// There's 2 kinds of plugins, local or remote. +// Local plugins have their path starting with a `/`, while remote plugins don't. +// Local plugins are useful for development purpose. +// Remote plugins require to be fetched first, in $HOME/.ignite/apps folder, +// then they are loaded from there. +// +// If an error occurs during a plugin load, it's not returned but rather stored in +// the `Plugin.Error` field. This prevents the loading of other plugins to be interrupted. +func Load(ctx context.Context, plugins []pluginsconfig.Plugin, options ...Option) ([]*Plugin, error) { + pluginsDir, err := PluginsPath() + if err != nil { + return nil, errors.WithStack(err) + } + var loaded []*Plugin + for _, cp := range plugins { + p := newPlugin(pluginsDir, cp, options...) + p.load(ctx) + + loaded = append(loaded, p) + } + return loaded, nil +} + +// Update removes the cache directory of plugins and fetch them again. +func Update(plugins ...*Plugin) error { + for _, p := range plugins { + if err := p.clean(); err != nil { + return err + } + p.fetch() + } + return nil +} + +// newPlugin creates a Plugin from configuration. +func newPlugin(pluginsDir string, cp pluginsconfig.Plugin, options ...Option) *Plugin { + var ( + p = &Plugin{ + Plugin: cp, + stdout: os.Stdout, + stderr: os.Stderr, + } + pluginPath = cp.Path + ) + if pluginPath == "" { + p.Error = errors.Errorf(`missing app property "path"`) + return p + } + + // Apply the options + for _, apply := range options { + apply(p) + } + + // This is a local plugin, check if the directory exists + if filepath.IsAbs(pluginPath) { + + st, err := os.Stat(pluginPath) + if err != nil { + p.Error = errors.Wrapf(err, "local app path %q not found", pluginPath) + return p + } + if !st.IsDir() { + p.Error = errors.Errorf("local app path %q is not a directory", pluginPath) + return p + } + p.srcPath = pluginPath + p.name = path.Base(pluginPath) + return p + } + + // This is a remote plugin, parse the URL + if i := strings.LastIndex(pluginPath, "@"); i != -1 { + // path contains a reference + p.reference = pluginPath[i+1:] + pluginPath = pluginPath[:i] + } + parts := strings.Split(pluginPath, "/") + if len(parts) < 3 { + p.Error = errors.Errorf("app path %q is not a valid repository URL", pluginPath) + return p + } + p.repoPath = path.Join(parts[:3]...) + p.cloneURL, _ = xurl.HTTPS(p.repoPath) + + if len(p.reference) > 0 { + ref := strings.ReplaceAll(p.reference, "/", "-") + p.cloneDir = path.Join(pluginsDir, fmt.Sprintf("%s-%s", p.repoPath, ref)) + p.repoPath += "@" + p.reference + } else { + p.cloneDir = path.Join(pluginsDir, p.repoPath) + } + + // Plugin can have a subpath within its repository. + // For example, "github.com/ignite/apps/app1" where "app1" is the subpath. + repoSubPath := path.Join(parts[3:]...) + + p.srcPath = path.Join(p.cloneDir, repoSubPath) + p.name = path.Base(pluginPath) + + return p +} + +// KillClient kills the running plugin client. +func (p *Plugin) KillClient() { + if p.isSharedHost && !p.isHost { + // Don't send kill signal to a shared-host plugin when this process isn't + // the one who initiated it. + return + } + + if p.client != nil { + p.client.Kill() + } + + if p.isHost { + _ = deleteConfCache(p.Path) + p.isHost = false + } +} + +// Manifest returns plugin's manigest. +// The manifest is available after the plugin has been loaded. +func (p Plugin) Manifest() *Manifest { + return p.manifest +} + +func (p Plugin) binaryName() string { + return fmt.Sprintf("%s.ign", p.name) +} + +func (p Plugin) binaryPath() string { + return path.Join(p.srcPath, p.binaryName()) +} + +// load tries to fill p.Interface, ensuring the plugin is usable. +func (p *Plugin) load(ctx context.Context) { + if p.Error != nil { + return + } + _, err := os.Stat(p.srcPath) + if err != nil { + // srcPath found, need to fetch the plugin + p.fetch() + if p.Error != nil { + return + } + } + + if p.IsLocalPath() { + // trigger rebuild for local plugin if binary is outdated + if p.outdatedBinary() { + p.build(ctx) + } + } else { + // Check if binary is already build + _, err = os.Stat(p.binaryPath()) + if err != nil { + // binary not found, need to build it + p.build(ctx) + } + } + if p.Error != nil { + return + } + // pluginMap is the map of plugins we can dispense. + pluginMap := map[string]hplugin.Plugin{ + p.name: NewGRPC(nil), + } + // Create an hclog.Logger + logLevel := hclog.Error + if env.IsDebug() { + logLevel = hclog.Trace + } + logger := hclog.New(&hclog.LoggerOptions{ + Name: fmt.Sprintf("app %s", p.Path), + Output: p.stderr, + Level: logLevel, + }) + + // Common plugin client configuration values + cfg := &hplugin.ClientConfig{ + HandshakeConfig: HandshakeConfig(), + Plugins: pluginMap, + Logger: logger, + SyncStdout: p.stdout, + SyncStderr: p.stderr, + AllowedProtocols: []hplugin.Protocol{hplugin.ProtocolGRPC}, + } + + if checkConfCache(p.Path) { + rconf, err := readConfigCache(p.Path) + if err != nil { + p.Error = err + return + } + + // Attach to an existing plugin process + cfg.Reattach = &rconf + p.client = hplugin.NewClient(cfg) + } else { + // Launch a new plugin process + cfg.Cmd = exec.Command(p.binaryPath()) //nolint:gosec + p.client = hplugin.NewClient(cfg) + } + + // Connect via gRPC + rpcClient, err := p.client.Client() + if err != nil { + p.Error = errors.Wrapf(err, "connecting") + return + } + + // Request the plugin + raw, err := rpcClient.Dispense(p.name) + if err != nil { + p.Error = errors.Wrapf(err, "dispensing") + return + } + + // We should have an Interface now! This feels like a normal interface + // implementation but is in fact over an gRPC connection. + p.Interface = raw.(Interface) + + m, err := p.Interface.Manifest(ctx) + if err != nil { + p.Error = errors.Wrapf(err, "manifest load") + return + } + + p.isSharedHost = m.SharedHost + + // Cache the manifest to avoid extra plugin requests + p.manifest = m + + // write the rpc context to cache if the plugin is declared as host. + // writing it to cache as lost operation within load to assure rpc client's reattach config + // is hydrated. + if m.SharedHost && !checkConfCache(p.Path) { + err := writeConfigCache(p.Path, *p.client.ReattachConfig()) + if err != nil { + p.Error = err + return + } + + // set the plugin's rpc server as host so other plugin clients may share + p.isHost = true + } +} + +// fetch clones the plugin repository at the expected reference. +func (p *Plugin) fetch() { + if p.IsLocalPath() { + return + } + if p.Error != nil { + return + } + p.ev.Send(fmt.Sprintf("Fetching app %q", p.cloneURL), events.ProgressStart()) + defer p.ev.Send(fmt.Sprintf("%s App fetched %q", icons.OK, p.cloneURL), events.ProgressFinish()) + + urlref := strings.Join([]string{p.cloneURL, p.reference}, "@") + err := xgit.Clone(context.Background(), urlref, p.cloneDir) + if err != nil { + p.Error = errors.Wrapf(err, "cloning %q", p.repoPath) + } +} + +// build compiles the plugin binary. +func (p *Plugin) build(ctx context.Context) { + if p.Error != nil { + return + } + p.ev.Send(fmt.Sprintf("Building app %q", p.Path), events.ProgressStart()) + defer p.ev.Send(fmt.Sprintf("%s App built %q", icons.OK, p.Path), events.ProgressFinish()) + + if err := gocmd.ModTidy(ctx, p.srcPath); err != nil { + p.Error = errors.Wrapf(err, "go mod tidy") + return + } + if err := gocmd.Build(ctx, p.binaryName(), p.srcPath, nil); err != nil { + p.Error = errors.Wrapf(err, "go build") + return + } +} + +// clean removes the plugin cache (only for remote plugins). +func (p *Plugin) clean() error { + if p.Error != nil { + // Dont try to clean plugins with error + return nil + } + if p.IsLocalPath() { + // Not a remote plugin, nothing to clean + return nil + } + // Clean the cloneDir, next time the ignite command will be invoked, the + // plugin will be fetched again. + err := os.RemoveAll(p.cloneDir) + return errors.WithStack(err) +} + +// outdatedBinary returns true if the plugin binary is older than the other +// files in p.srcPath. +// Also returns true if the plugin binary is absent. +func (p *Plugin) outdatedBinary() bool { + var ( + binaryTime time.Time + mostRecent time.Time + ) + err := filepath.Walk(p.srcPath, func(path string, info fs.FileInfo, err error) error { + if err != nil { + return err + } + if info.IsDir() { + return nil + } + if path == p.binaryPath() { + binaryTime = info.ModTime() + return nil + } + t := info.ModTime() + if mostRecent.IsZero() || t.After(mostRecent) { + mostRecent = t + } + return nil + }) + if err != nil { + fmt.Printf("error while walking app source path %q\n", p.srcPath) + return false + } + // Rebuild when source files are newer OR have the same timestamp as the binary. + // In some environments (such as fresh CI checkouts), mtimes can be normalized + // to identical values, and strict "after" checks may incorrectly reuse stale binaries. + return !mostRecent.Before(binaryTime) +} diff --git a/ignite/services/plugin/plugin_test.go b/ignite/services/plugin/plugin_test.go new file mode 100644 index 0000000..fd07af3 --- /dev/null +++ b/ignite/services/plugin/plugin_test.go @@ -0,0 +1,611 @@ +package plugin + +import ( + "context" + "fmt" + "os" + "path" + "path/filepath" + "testing" + "time" + + "github.com/go-git/go-git/v5" + "github.com/go-git/go-git/v5/plumbing" + "github.com/go-git/go-git/v5/plumbing/object" + hplugin "github.com/hashicorp/go-plugin" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + pluginsconfig "github.com/ignite/cli/v29/ignite/config/plugins" + "github.com/ignite/cli/v29/ignite/pkg/cliui" + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/gocmd" + "github.com/ignite/cli/v29/ignite/pkg/gomodule" +) + +func TestNewPlugin(t *testing.T) { + wd, err := os.Getwd() + require.NoError(t, err) + + tests := []struct { + name string + pluginCfg pluginsconfig.Plugin + expectedPlugin Plugin + }{ + { + name: "fail: empty path", + expectedPlugin: Plugin{ + Error: errors.Errorf(`missing app property "path"`), + stdout: os.Stdout, + stderr: os.Stderr, + }, + }, + { + name: "fail: local plugin doesnt exists", + pluginCfg: pluginsconfig.Plugin{Path: "/xxx/yyy/app"}, + expectedPlugin: Plugin{ + Error: errors.Errorf(`local app path "/xxx/yyy/app" not found`), + stdout: os.Stdout, + stderr: os.Stderr, + }, + }, + { + name: "fail: local plugin is not a directory", + pluginCfg: pluginsconfig.Plugin{Path: path.Join(wd, "testdata/fakebin")}, + expectedPlugin: Plugin{ + Error: errors.Errorf(fmt.Sprintf("local app path %q is not a directory", path.Join(wd, "testdata/fakebin"))), + stdout: os.Stdout, + stderr: os.Stderr, + }, + }, + { + name: "ok: local plugin", + pluginCfg: pluginsconfig.Plugin{Path: path.Join(wd, "testdata")}, + expectedPlugin: Plugin{ + srcPath: path.Join(wd, "testdata"), + name: "testdata", + stdout: os.Stdout, + stderr: os.Stderr, + }, + }, + { + name: "fail: remote plugin with only domain", + pluginCfg: pluginsconfig.Plugin{Path: "github.com"}, + expectedPlugin: Plugin{ + Error: errors.Errorf(`app path "github.com" is not a valid repository URL`), + stdout: os.Stdout, + stderr: os.Stderr, + }, + }, + { + name: "fail: remote plugin with incomplete URL", + pluginCfg: pluginsconfig.Plugin{Path: "github.com/ignite"}, + expectedPlugin: Plugin{ + Error: errors.Errorf(`app path "github.com/ignite" is not a valid repository URL`), + stdout: os.Stdout, + stderr: os.Stderr, + }, + }, + { + name: "ok: remote app", + pluginCfg: pluginsconfig.Plugin{Path: "github.com/ignite/app"}, + expectedPlugin: Plugin{ + repoPath: "github.com/ignite/app", + cloneURL: "https://github.com/ignite/app", + cloneDir: ".ignite/apps/github.com/ignite/app", + reference: "", + srcPath: ".ignite/apps/github.com/ignite/app", + name: "app", + stdout: os.Stdout, + stderr: os.Stderr, + }, + }, + { + name: "ok: remote plugin with @ref", + pluginCfg: pluginsconfig.Plugin{Path: "github.com/ignite/app@develop"}, + expectedPlugin: Plugin{ + repoPath: "github.com/ignite/app@develop", + cloneURL: "https://github.com/ignite/app", + cloneDir: ".ignite/apps/github.com/ignite/app-develop", + reference: "develop", + srcPath: ".ignite/apps/github.com/ignite/app-develop", + name: "app", + stdout: os.Stdout, + stderr: os.Stderr, + }, + }, + { + name: "ok: remote plugin with @ref containing slash", + pluginCfg: pluginsconfig.Plugin{Path: "github.com/ignite/app@package/v1.0.0"}, + expectedPlugin: Plugin{ + repoPath: "github.com/ignite/app@package/v1.0.0", + cloneURL: "https://github.com/ignite/app", + cloneDir: ".ignite/apps/github.com/ignite/app-package-v1.0.0", + reference: "package/v1.0.0", + srcPath: ".ignite/apps/github.com/ignite/app-package-v1.0.0", + name: "app", + stdout: os.Stdout, + stderr: os.Stderr, + }, + }, + { + name: "ok: remote plugin with subpath", + pluginCfg: pluginsconfig.Plugin{Path: "github.com/ignite/app/plugin1"}, + expectedPlugin: Plugin{ + repoPath: "github.com/ignite/app", + cloneURL: "https://github.com/ignite/app", + cloneDir: ".ignite/apps/github.com/ignite/app", + reference: "", + srcPath: ".ignite/apps/github.com/ignite/app/plugin1", + name: "plugin1", + stdout: os.Stdout, + stderr: os.Stderr, + }, + }, + { + name: "ok: remote plugin with subpath and @ref", + pluginCfg: pluginsconfig.Plugin{Path: "github.com/ignite/app/plugin1@develop"}, + expectedPlugin: Plugin{ + repoPath: "github.com/ignite/app@develop", + cloneURL: "https://github.com/ignite/app", + cloneDir: ".ignite/apps/github.com/ignite/app-develop", + reference: "develop", + srcPath: ".ignite/apps/github.com/ignite/app-develop/plugin1", + name: "plugin1", + stdout: os.Stdout, + stderr: os.Stderr, + }, + }, + { + name: "ok: remote plugin with subpath and @ref containing slash", + pluginCfg: pluginsconfig.Plugin{Path: "github.com/ignite/app/plugin1@package/v1.0.0"}, + expectedPlugin: Plugin{ + repoPath: "github.com/ignite/app@package/v1.0.0", + cloneURL: "https://github.com/ignite/app", + cloneDir: ".ignite/apps/github.com/ignite/app-package-v1.0.0", + reference: "package/v1.0.0", + srcPath: ".ignite/apps/github.com/ignite/app-package-v1.0.0/plugin1", + name: "plugin1", + stdout: os.Stdout, + stderr: os.Stderr, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + tt.expectedPlugin.Plugin = tt.pluginCfg + + p := newPlugin(".ignite/apps", tt.pluginCfg) + + assertPlugin(t, tt.expectedPlugin, *p) + }) + } +} + +// Helper to make a local git repository with gofile committed. +// Returns the repo directory and the git.Repository. +func makeGitRepo(t *testing.T, name string) (string, *git.Repository) { + t.Helper() + + require := require.New(t) + repoDir := t.TempDir() + scaffoldPlugin(t, repoDir, "github.com/ignite/"+name, false) + + repo, err := git.PlainInit(repoDir, false) + require.NoError(err) + + w, err := repo.Worktree() + require.NoError(err) + + _, err = w.Add(".") + require.NoError(err) + + _, err = w.Commit("msg", &git.CommitOptions{ + Author: &object.Signature{ + Name: "bob", + Email: "bob@example.com", + When: time.Now(), + }, + }) + require.NoError(err) + return repoDir, repo +} + +type TestClientAPI struct{ ClientAPI } + +func (TestClientAPI) GetChainInfo(context.Context) (*ChainInfo, error) { + return &ChainInfo{}, nil +} + +func (TestClientAPI) GetIgniteInfo(context.Context) (*IgniteInfo, error) { + return &IgniteInfo{}, nil +} + +func TestPluginLoad(t *testing.T) { + wd, err := os.Getwd() + require.NoError(t, err) + + clientAPI := &TestClientAPI{} + + tests := []struct { + name string + buildPlugin func(t *testing.T) Plugin + expectedError string + }{ + { + name: "fail: plugin is already in error", + buildPlugin: func(t *testing.T) Plugin { + t.Helper() + return Plugin{ + Error: errors.New("oups"), + } + }, + expectedError: `oups`, + }, + { + name: "fail: no go files in srcPath", + buildPlugin: func(t *testing.T) Plugin { + t.Helper() + return Plugin{ + srcPath: path.Join(wd, "testdata"), + name: "testdata", + } + }, + expectedError: `no Go files in`, + }, + { + name: "ok: from local", + buildPlugin: func(t *testing.T) Plugin { + t.Helper() + path := scaffoldPlugin(t, t.TempDir(), "github.com/foo/bar", false) + return Plugin{ + srcPath: path, + name: "bar", + } + }, + }, + { + name: "ok: from git repo", + buildPlugin: func(t *testing.T) Plugin { + t.Helper() + repoDir, _ := makeGitRepo(t, "remote") + cloneDir := t.TempDir() + + return Plugin{ + cloneURL: repoDir, + cloneDir: cloneDir, + srcPath: path.Join(cloneDir, "remote"), + name: "remote", + } + }, + }, + { + name: "fail: git repo doesnt exists", + buildPlugin: func(t *testing.T) Plugin { + t.Helper() + cloneDir := t.TempDir() + + return Plugin{ + repoPath: "/xxxx/yyyy", + cloneURL: "/xxxx/yyyy", + cloneDir: cloneDir, + srcPath: path.Join(cloneDir, "app"), + } + }, + expectedError: `cloning "/xxxx/yyyy": repository not found`, + }, + { + name: "ok: from git repo with tag", + buildPlugin: func(t *testing.T) Plugin { + t.Helper() + repoDir, repo := makeGitRepo(t, "remote-tag") + h, err := repo.Head() + require.NoError(t, err) + _, err = repo.CreateTag("v1", h.Hash(), &git.CreateTagOptions{ + Tagger: &object.Signature{Name: "me"}, + Message: "v1", + }) + require.NoError(t, err) + + cloneDir := t.TempDir() + + return Plugin{ + cloneURL: repoDir, + reference: "v1", + cloneDir: cloneDir, + srcPath: path.Join(cloneDir, "remote-tag"), + name: "remote-tag", + } + }, + }, + { + name: "ok: from git repo with branch", + buildPlugin: func(t *testing.T) Plugin { + t.Helper() + repoDir, repo := makeGitRepo(t, "remote-branch") + w, err := repo.Worktree() + require.NoError(t, err) + err = w.Checkout(&git.CheckoutOptions{ + Branch: plumbing.NewBranchReferenceName("branch1"), + Create: true, + }) + require.NoError(t, err) + + cloneDir := t.TempDir() + + return Plugin{ + cloneURL: repoDir, + reference: "branch1", + cloneDir: cloneDir, + srcPath: path.Join(cloneDir, "remote-branch"), + name: "remote-branch", + } + }, + }, + { + name: "ok: from git repo with hash", + buildPlugin: func(t *testing.T) Plugin { + t.Helper() + repoDir, repo := makeGitRepo(t, "remote-hash") + h, err := repo.Head() + require.NoError(t, err) + + cloneDir := t.TempDir() + + return Plugin{ + cloneURL: repoDir, + reference: h.Hash().String(), + cloneDir: cloneDir, + srcPath: path.Join(cloneDir, "remote-hash"), + name: "remote-hash", + } + }, + }, + { + name: "fail: git ref not found", + buildPlugin: func(t *testing.T) Plugin { + t.Helper() + repoDir, _ := makeGitRepo(t, "remote-no-ref") + + cloneDir := t.TempDir() + + return Plugin{ + cloneURL: repoDir, + reference: "doesnt_exists", + cloneDir: cloneDir, + srcPath: path.Join(cloneDir, "remote-no-ref"), + name: "remote-no-ref", + } + }, + expectedError: `cloning ".*": reference not found`, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ctx := context.Background() + require := require.New(t) + assert := assert.New(t) + p := tt.buildPlugin(t) + defer p.KillClient() + + p.load(context.Background()) + + if tt.expectedError != "" { + require.Error(p.Error, "expected error %q", tt.expectedError) + require.Regexp(tt.expectedError, p.Error.Error()) + return + } + + require.NoError(p.Error) + require.NotNil(p.Interface) + manifest, err := p.Interface.Manifest(ctx) + require.NoError(err) + assert.Equal(p.name, manifest.Name) + assert.NoError(p.Interface.Execute(ctx, &ExecutedCommand{OsArgs: []string{"ignite", p.name, "hello"}}, clientAPI)) + assert.NoError(p.Interface.ExecuteHookPre(ctx, &ExecutedHook{}, clientAPI)) + assert.NoError(p.Interface.ExecuteHookPost(ctx, &ExecutedHook{}, clientAPI)) + assert.NoError(p.Interface.ExecuteHookCleanUp(ctx, &ExecutedHook{}, clientAPI)) + }) + } +} + +func TestPluginLoadSharedHost(t *testing.T) { + tests := []struct { + name string + instances int + sharesHost bool + }{ + { + name: "ok: from local sharedhost is on 1 instance", + instances: 1, + sharesHost: true, + }, + { + name: "ok: from local sharedhost is on 2 instances", + instances: 2, + sharesHost: true, + }, + { + name: "ok: from local sharedhost is on 4 instances", + instances: 4, + sharesHost: true, + }, + { + name: "ok: from local sharedhost is off 4 instances", + instances: 4, + sharesHost: false, + }, + } + + for i, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + var ( + require = require.New(t) + assert = assert.New(t) + // scaffold an unique plugin for all instances + path = scaffoldPlugin(t, t.TempDir(), + fmt.Sprintf("github.com/foo/bar-%d", i), tt.sharesHost) + plugins []*Plugin + ) + // Load one plugin per instance + for i := 0; i < tt.instances; i++ { + p := Plugin{ + Plugin: pluginsconfig.Plugin{Path: path}, + srcPath: path, + name: filepath.Base(path), + } + p.load(context.Background()) + require.NoError(p.Error) + plugins = append(plugins, &p) + } + // Ensure all plugins are killed at the end of test case + defer func() { + for i := len(plugins) - 1; i >= 0; i-- { + plugins[i].KillClient() + if tt.sharesHost && i > 0 { + assert.False(plugins[i].client.Exited(), "non host app can't kill host app") + assert.True(checkConfCache(plugins[i].Path), "non host app doesn't remove config cache when killed") + } else { + assert.True(plugins[i].client.Exited(), "app should be killed") + } + assert.False(plugins[i].isHost, "killed plugins are no longer host") + } + assert.False(checkConfCache(plugins[0].Path), "once host is killed the cache should be cleared") + }() + + var hostConf *hplugin.ReattachConfig + for i := 0; i < len(plugins); i++ { + if tt.sharesHost { + assert.True(checkConfCache(plugins[i].Path), "sharedHost must have a cache entry") + if i == 0 { + // first plugin is the host + assert.True(plugins[i].isHost, "first app is the host") + // Assert reattach config has been saved + hostConf = plugins[i].client.ReattachConfig() + ref, err := readConfigCache(plugins[i].Path) + if assert.NoError(err) { + assert.Equal(hostConf, &ref, "wrong cache entry for app host") + } + } else { + // plugins after first aren't host + assert.False(plugins[i].isHost, "app %d can't be host", i) + assert.Equal(hostConf, plugins[i].client.ReattachConfig(), "ReattachConfig different from host app") + } + } else { + assert.False(plugins[i].isHost, "app %d can't be host if sharedHost is disabled", i) + assert.False(checkConfCache(plugins[i].Path), "app %d can't have a cache entry if sharedHost is disabled", i) + } + } + }) + } +} + +func TestPluginClean(t *testing.T) { + tests := []struct { + name string + plugin *Plugin + expectRemove bool + }{ + { + name: "dont clean local app", + plugin: &Plugin{ + Plugin: pluginsconfig.Plugin{Path: "/local"}, + }, + }, + { + name: "dont clean plugin with errors", + plugin: &Plugin{Error: errors.New("oups")}, + }, + { + name: "ok", + plugin: &Plugin{ + cloneURL: "https://github.com/ignite/app", + }, + expectRemove: true, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + tmp, err := os.MkdirTemp("", "cloneDir") + require.NoError(t, err) + tt.plugin.cloneDir = tmp + + err = tt.plugin.clean() + + require.NoError(t, err) + if tt.expectRemove { + _, err := os.Stat(tmp) + assert.True(t, os.IsNotExist(err), "cloneDir not removed") + } + }) + } +} + +func TestPluginOutdatedBinary(t *testing.T) { + t.Run("returns true when source and binary mtimes are equal", func(t *testing.T) { + tmp := t.TempDir() + srcFile := filepath.Join(tmp, "main.go") + binFile := filepath.Join(tmp, "app.ign") + + require.NoError(t, os.WriteFile(srcFile, []byte("package main\n"), 0o644)) + require.NoError(t, os.WriteFile(binFile, []byte("binary"), 0o755)) + + equalTime := time.Now().Add(-time.Minute).Truncate(time.Second) + require.NoError(t, os.Chtimes(srcFile, equalTime, equalTime)) + require.NoError(t, os.Chtimes(binFile, equalTime, equalTime)) + + p := Plugin{ + srcPath: tmp, + name: "app", + } + + require.True(t, p.outdatedBinary()) + }) +} + +// scaffoldPlugin runs Scaffold and updates the go.mod so it uses the +// current ignite/cli sources. +func scaffoldPlugin(t *testing.T, dir, name string, sharedHost bool) string { + t.Helper() + + require := require.New(t) + + session := cliui.New(cliui.WithoutUserInteraction(true)) + path, err := Scaffold(context.Background(), session, dir, name, sharedHost) + require.NoError(err) + + // We want the scaffolded plugin to use the current version of ignite/cli, + // for that we need to update the plugin go.mod and add a replace to target + // current ignite/cli + gomod, err := gomodule.ParseAt(path) + require.NoError(err) + + // use GOMOD env to get current directory module path + modpath, err := gocmd.Env(gocmd.EnvGOMOD) + require.NoError(err) + modpath = filepath.Dir(modpath) + err = gomod.AddReplace("github.com/ignite/cli/v29", "", modpath, "") + require.NoError(err) + // Save go.mod + data, err := gomod.Format() + require.NoError(err) + err = os.WriteFile(filepath.Join(path, "go.mod"), data, 0o644) + require.NoError(err) + return path +} + +func assertPlugin(t *testing.T, want, have Plugin) { + t.Helper() + + if want.Error != nil { + require.Errorf(t, have.Error, "expected error %q", want.Error) + assert.Regexp(t, want.Error.Error(), have.Error.Error()) + } else { + require.NoErrorf(t, have.Error, "expected no error, got %v", have.Error) + } + + // Errors aren't comparable with assert.Equal, because of the different stacks + want.Error = nil + have.Error = nil + assert.Equal(t, want, have) +} diff --git a/ignite/services/plugin/protocol.go b/ignite/services/plugin/protocol.go new file mode 100644 index 0000000..fa65c06 --- /dev/null +++ b/ignite/services/plugin/protocol.go @@ -0,0 +1,263 @@ +package plugin + +import ( + "context" + "sync" + + hplugin "github.com/hashicorp/go-plugin" + "google.golang.org/grpc" + + v1 "github.com/ignite/cli/v29/ignite/services/plugin/grpc/v1" +) + +var handshakeConfig = hplugin.HandshakeConfig{ + ProtocolVersion: 1, + MagicCookieKey: "IGNITE_APP", + MagicCookieValue: "ignite", +} + +// HandshakeConfig are used to just do a basic handshake between a plugin and host. +// If the handshake fails, a user friendly error is shown. This prevents users from +// executing bad plugins or executing a plugin directory. It is a UX feature, not a +// security feature. +func HandshakeConfig() hplugin.HandshakeConfig { + return handshakeConfig +} + +// NewGRPC returns a new gRPC plugin that implements the interface over gRPC. +func NewGRPC(impl Interface) hplugin.Plugin { + return grpcPlugin{impl: impl} +} + +type grpcPlugin struct { + hplugin.NetRPCUnsupportedPlugin + + impl Interface +} + +// GRPCServer returns a new server that implements the plugin interface over gRPC. +func (p grpcPlugin) GRPCServer(broker *hplugin.GRPCBroker, s *grpc.Server) error { + v1.RegisterInterfaceServiceServer(s, &server{ + impl: p.impl, + broker: broker, + }) + return nil +} + +// GRPCClient returns a new plugin client that allows calling the plugin interface over gRPC. +func (p grpcPlugin) GRPCClient(_ context.Context, broker *hplugin.GRPCBroker, c *grpc.ClientConn) (interface{}, error) { + return &client{ + grpc: v1.NewInterfaceServiceClient(c), + broker: broker, + }, nil +} + +type client struct { + grpc v1.InterfaceServiceClient + broker *hplugin.GRPCBroker +} + +func (c client) Manifest(ctx context.Context) (*Manifest, error) { + r, err := c.grpc.Manifest(ctx, &v1.ManifestRequest{}) + if err != nil { + return nil, err + } + + return r.Manifest, nil +} + +func (c client) Execute(ctx context.Context, cmd *ExecutedCommand, api ClientAPI) error { + brokerID, stopServer := c.startClientAPIServer(api) + _, err := c.grpc.Execute(ctx, &v1.ExecuteRequest{ + Cmd: cmd, + ClientApi: brokerID, + }) + stopServer() + return err +} + +func (c client) ExecuteHookPre(ctx context.Context, h *ExecutedHook, api ClientAPI) error { + brokerID, stopServer := c.startClientAPIServer(api) + _, err := c.grpc.ExecuteHookPre(ctx, &v1.ExecuteHookPreRequest{ + Hook: h, + ClientApi: brokerID, + }) + stopServer() + return err +} + +func (c client) ExecuteHookPost(ctx context.Context, h *ExecutedHook, api ClientAPI) error { + brokerID, stopServer := c.startClientAPIServer(api) + _, err := c.grpc.ExecuteHookPost(ctx, &v1.ExecuteHookPostRequest{ + Hook: h, + ClientApi: brokerID, + }) + stopServer() + return err +} + +func (c client) ExecuteHookCleanUp(ctx context.Context, h *ExecutedHook, api ClientAPI) error { + brokerID, stopServer := c.startClientAPIServer(api) + _, err := c.grpc.ExecuteHookCleanUp(ctx, &v1.ExecuteHookCleanUpRequest{ + Hook: h, + ClientApi: brokerID, + }) + stopServer() + return err +} + +func (c client) startClientAPIServer(api ClientAPI) (uint32, func()) { + var ( + srv *grpc.Server + m sync.Mutex + brokerID = c.broker.NextId() + ) + + go c.broker.AcceptAndServe(brokerID, func(opts []grpc.ServerOption) *grpc.Server { + m.Lock() + defer m.Unlock() + srv = grpc.NewServer(opts...) + v1.RegisterClientAPIServiceServer(srv, &clientAPIServer{impl: api}) + return srv + }) + + stop := func() { + m.Lock() + if srv != nil { + srv.Stop() + } + m.Unlock() + } + + return brokerID, stop +} + +type server struct { + v1.UnimplementedInterfaceServiceServer + + impl Interface + broker *hplugin.GRPCBroker +} + +func (s server) Manifest(ctx context.Context, _ *v1.ManifestRequest) (*v1.ManifestResponse, error) { + m, err := s.impl.Manifest(ctx) + if err != nil { + return nil, err + } + + return &v1.ManifestResponse{Manifest: m}, nil +} + +func (s server) Execute(ctx context.Context, r *v1.ExecuteRequest) (*v1.ExecuteResponse, error) { + conn, err := s.broker.Dial(r.ClientApi) + if err != nil { + return nil, err + } + + defer conn.Close() + + err = s.impl.Execute(ctx, r.GetCmd(), newClientAPIClient(conn)) + if err != nil { + return nil, err + } + + return &v1.ExecuteResponse{}, nil +} + +func (s server) ExecuteHookPre(ctx context.Context, r *v1.ExecuteHookPreRequest) (*v1.ExecuteHookPreResponse, error) { + conn, err := s.broker.Dial(r.ClientApi) + if err != nil { + return nil, err + } + + defer conn.Close() + + err = s.impl.ExecuteHookPre(ctx, r.GetHook(), newClientAPIClient(conn)) + if err != nil { + return nil, err + } + + return &v1.ExecuteHookPreResponse{}, nil +} + +func (s server) ExecuteHookPost(ctx context.Context, r *v1.ExecuteHookPostRequest) (*v1.ExecuteHookPostResponse, error) { + conn, err := s.broker.Dial(r.ClientApi) + if err != nil { + return nil, err + } + + defer conn.Close() + + err = s.impl.ExecuteHookPost(ctx, r.GetHook(), newClientAPIClient(conn)) + if err != nil { + return nil, err + } + + return &v1.ExecuteHookPostResponse{}, nil +} + +func (s server) ExecuteHookCleanUp(ctx context.Context, r *v1.ExecuteHookCleanUpRequest) (*v1.ExecuteHookCleanUpResponse, error) { + conn, err := s.broker.Dial(r.ClientApi) + if err != nil { + return nil, err + } + + defer conn.Close() + + err = s.impl.ExecuteHookCleanUp(ctx, r.GetHook(), newClientAPIClient(conn)) + if err != nil { + return nil, err + } + + return &v1.ExecuteHookCleanUpResponse{}, nil +} + +func newClientAPIClient(c *grpc.ClientConn) *clientAPIClient { + return &clientAPIClient{v1.NewClientAPIServiceClient(c)} +} + +type clientAPIClient struct { + grpc v1.ClientAPIServiceClient +} + +func (c clientAPIClient) GetChainInfo(ctx context.Context) (*ChainInfo, error) { + r, err := c.grpc.GetChainInfo(ctx, &v1.GetChainInfoRequest{}) + if err != nil { + return nil, err + } + + return r.ChainInfo, nil +} + +func (c clientAPIClient) GetIgniteInfo(ctx context.Context) (*IgniteInfo, error) { + r, err := c.grpc.GetIgniteInfo(ctx, &v1.GetIgniteInfoRequest{}) + if err != nil { + return nil, err + } + + return r.IgniteInfo, nil +} + +type clientAPIServer struct { + v1.UnimplementedClientAPIServiceServer + + impl ClientAPI +} + +func (s clientAPIServer) GetChainInfo(ctx context.Context, _ *v1.GetChainInfoRequest) (*v1.GetChainInfoResponse, error) { + chainInfo, err := s.impl.GetChainInfo(ctx) + if err != nil { + return nil, err + } + + return &v1.GetChainInfoResponse{ChainInfo: chainInfo}, nil +} + +func (s clientAPIServer) GetIgniteInfo(ctx context.Context, _ *v1.GetIgniteInfoRequest) (*v1.GetIgniteInfoResponse, error) { + igniteInfo, err := s.impl.GetIgniteInfo(ctx) + if err != nil { + return nil, err + } + + return &v1.GetIgniteInfoResponse{IgniteInfo: igniteInfo}, nil +} diff --git a/ignite/services/plugin/scaffold.go b/ignite/services/plugin/scaffold.go new file mode 100644 index 0000000..8be62bc --- /dev/null +++ b/ignite/services/plugin/scaffold.go @@ -0,0 +1,81 @@ +package plugin + +import ( + "context" + "embed" + "fmt" + "io/fs" + "os" + "path" + "path/filepath" + "strings" + + "github.com/gobuffalo/genny/v2" + "github.com/gobuffalo/plush/v4" + "golang.org/x/text/cases" + "golang.org/x/text/language" + + "github.com/ignite/cli/v29/ignite/pkg/cliui" + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/gocmd" + "github.com/ignite/cli/v29/ignite/pkg/xgenny" +) + +//go:embed template/* +var fsPluginSource embed.FS + +// Scaffold generates a plugin structure under dir/path.Base(appName). +func Scaffold(ctx context.Context, session *cliui.Session, dir, appName string, sharedHost bool) (string, error) { + subFs, err := fs.Sub(fsPluginSource, "template") + if err != nil { + return "", errors.WithStack(err) + } + + var ( + name = filepath.Base(appName) + title = toTitle(name) + finalDir = path.Join(dir, name) + ) + if _, err := os.Stat(finalDir); err == nil { + // finalDir already exists, don't overwrite stuff + return "", errors.Errorf("directory %q already exists, abort scaffolding", finalDir) + } + + g := genny.New() + if err := g.OnlyFS(subFs, nil, nil); err != nil { + return "", errors.WithStack(err) + } + + pctx := plush.NewContextWithContext(ctx) + pctx.Set("AppName", appName) + pctx.Set("Name", name) + pctx.Set("Title", title) + pctx.Set("SharedHost", sharedHost) + + g.Transformer(xgenny.Transformer(pctx)) + r := xgenny.NewRunner(ctx, finalDir) + _, err = r.RunAndApply(g, xgenny.ApplyPreRun(func(_, _, duplicated []string) error { + if len(duplicated) == 0 { + return nil + } + question := fmt.Sprintf("Do you want to overwrite the existing files? \n%s", strings.Join(duplicated, "\n")) + return session.AskConfirm(question) + })) + if err != nil { + return "", err + } + + if err := gocmd.ModTidy(ctx, finalDir); err != nil { + return "", errors.WithStack(err) + } + + if err := gocmd.Fmt(ctx, finalDir); err != nil { + return "", errors.WithStack(err) + } + + return finalDir, nil +} + +func toTitle(s string) string { + return strings.ReplaceAll(strings.ReplaceAll(cases.Title(language.English).String(s), "_", ""), "-", "") +} diff --git a/ignite/services/plugin/scaffold_test.go b/ignite/services/plugin/scaffold_test.go new file mode 100644 index 0000000..bac3b64 --- /dev/null +++ b/ignite/services/plugin/scaffold_test.go @@ -0,0 +1,82 @@ +package plugin_test + +import ( + "context" + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/require" + "gopkg.in/yaml.v3" + + "github.com/ignite/cli/v29/ignite/pkg/cliui" + "github.com/ignite/cli/v29/ignite/pkg/gocmd" + "github.com/ignite/cli/v29/ignite/services/plugin" +) + +const fooBarAppURI = "github.com/foo/bar" + +func TestScaffold(t *testing.T) { + // Arrange + tmp := t.TempDir() + ctx := context.Background() + + // Act + session := cliui.New(cliui.WithoutUserInteraction(true)) + path, err := plugin.Scaffold(ctx, session, tmp, fooBarAppURI, false) + + // Assert + require.NoError(t, err) + require.DirExists(t, path) + require.FileExists(t, filepath.Join(path, "go.mod")) + require.FileExists(t, filepath.Join(path, "main.go")) +} + +func TestScaffoldedConfig(t *testing.T) { + // Arrange + ctx := context.Background() + path := scaffoldApp(ctx, t, fooBarAppURI) + + // Act + cfg := readConfig(t, path) + + // Assert + require.EqualValues(t, 1, cfg.Version) + require.Len(t, cfg.Apps, 1) +} + +func TestScaffoldedTests(t *testing.T) { + // Arrange + ctx := context.Background() + path := scaffoldApp(ctx, t, fooBarAppURI) + path = filepath.Join(path, "integration") + + // Act + err := gocmd.Test(ctx, path, []string{ + "-timeout", + "10m", + "-run", + "^TestBar$", + }) + + // Assert + require.NoError(t, err) +} + +func scaffoldApp(ctx context.Context, t *testing.T, path string) string { + t.Helper() + + session := cliui.New(cliui.WithoutUserInteraction(true)) + path, err := plugin.Scaffold(ctx, session, t.TempDir(), path, false) + require.NoError(t, err) + return path +} + +func readConfig(t *testing.T, path string) (cfg plugin.AppsConfig) { + t.Helper() + + bz, err := os.ReadFile(filepath.Join(path, "app.ignite.yml")) + require.NoError(t, err) + require.NoError(t, yaml.Unmarshal(bz, &cfg)) + return +} diff --git a/ignite/services/plugin/template/.gitignore.plush b/ignite/services/plugin/template/.gitignore.plush new file mode 100644 index 0000000..6f1ccc9 --- /dev/null +++ b/ignite/services/plugin/template/.gitignore.plush @@ -0,0 +1,21 @@ +# Built with https://github.com/ignite/cli +# Binaries for programs and plugins +*.exe +*.exe~ +*.dll +*.so +*.dylib +*.ign + +# Test binary, built with `go test -c` +*.test + +# Output of the go coverage tool, specifically when used with LiteIDE +*.out + +# Dependency directories (remove the comment below to include it) +# vendor/ + +# Go workspace file +go.work +go.work.sum diff --git a/ignite/services/plugin/template/app.ignite.yml.plush b/ignite/services/plugin/template/app.ignite.yml.plush new file mode 100644 index 0000000..6f1377e --- /dev/null +++ b/ignite/services/plugin/template/app.ignite.yml.plush @@ -0,0 +1,6 @@ +version: 1 +apps: + <%= Name %>: + description: <%= Name %> is an awesome Ignite application! + path: ./ + \ No newline at end of file diff --git a/ignite/services/plugin/template/cmd/cmd.go.plush b/ignite/services/plugin/template/cmd/cmd.go.plush new file mode 100644 index 0000000..e0f1a12 --- /dev/null +++ b/ignite/services/plugin/template/cmd/cmd.go.plush @@ -0,0 +1,19 @@ +package cmd + +import "github.com/ignite/cli/v29/ignite/services/plugin" + +// GetCommands returns the list of <%= Name %> app commands. +func GetCommands() []*plugin.Command { + return []*plugin.Command{ + { + Use: "<%= Name %> [command]", + Short: "<%= Name %> is an awesome Ignite application!", + Commands: []*plugin.Command{ + { + Use: "hello", + Short: "Say hello to the world of ignite!", + }, + }, + }, + } +} diff --git a/ignite/services/plugin/template/cmd/hello.go.plush b/ignite/services/plugin/template/cmd/hello.go.plush new file mode 100644 index 0000000..bc2351c --- /dev/null +++ b/ignite/services/plugin/template/cmd/hello.go.plush @@ -0,0 +1,14 @@ +package cmd + +import ( + "context" + "fmt" + + "github.com/ignite/cli/v29/ignite/services/plugin" +) + +// ExecuteHello executes the hello subcommand. +func ExecuteHello(ctx context.Context, cmd *plugin.ExecutedCommand) error { + fmt.Println("Hello, world!") + return nil +} diff --git a/ignite/services/plugin/template/go.mod.plush b/ignite/services/plugin/template/go.mod.plush new file mode 100644 index 0000000..2d6c669 --- /dev/null +++ b/ignite/services/plugin/template/go.mod.plush @@ -0,0 +1,13 @@ +module <%= AppName %> + +go 1.23 + +require ( + github.com/hashicorp/go-plugin v1.6.2 + github.com/ignite/cli/v29 v29.0.0 + github.com/stretchr/testify v1.8.4 +) + +replace github.com/ignite/cli/v29 => github.com/ignite/cli/v29 main +replace github.com/cosmos/cosmos-sdk => github.com/cosmos/cosmos-sdk v0.53.6 +replace github.com/bytedance/sonic => github.com/bytedance/sonic v1.15.0 // TODO: remove when cosmossdk.io/log is updated. diff --git a/ignite/services/plugin/template/integration/app_test.go.plush b/ignite/services/plugin/template/integration/app_test.go.plush new file mode 100644 index 0000000..8019345 --- /dev/null +++ b/ignite/services/plugin/template/integration/app_test.go.plush @@ -0,0 +1,73 @@ +package integration_test + +import ( + "bytes" + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/require" + + pluginsconfig "github.com/ignite/cli/v29/ignite/config/plugins" + "github.com/ignite/cli/v29/ignite/pkg/cmdrunner/step" + "github.com/ignite/cli/v29/ignite/services/plugin" + envtest "github.com/ignite/cli/v29/integration" +) + +func Test<%= Title %>(t *testing.T) { + var ( + require = require.New(t) + env = envtest.New(t) + app = env.ScaffoldApp("<%= AppName %>-app") + ) + + dir, err := os.Getwd() + require.NoError(err) + pluginPath := filepath.Join(filepath.Dir(filepath.Dir(dir)), "<%= Name %>") + + env.Must(env.Exec("install <%= Name %> app locally", + step.NewSteps(step.New( + step.Exec(envtest.IgniteApp, "app", "install", pluginPath), + step.Workdir(app.SourcePath()), + )), + )) + + // One local plugin expected + assertLocalPlugins(t, app, []pluginsconfig.Plugin{ + { + Path: pluginPath, + }, + }) + assertGlobalPlugins(t, nil) + + buf := &bytes.Buffer{} + env.Must(env.Exec("run <%= Name %>", + step.NewSteps(step.New( + step.Exec( + envtest.IgniteApp, + "<%= Name %>", + "hello", + ), + step.Workdir(app.SourcePath()), + step.Stdout(buf), + step.Stderr(buf), + )), + )) + require.Equal("Hello, world!\n", buf.String()) +} + +func assertLocalPlugins(t *testing.T, app envtest.App, expectedPlugins []pluginsconfig.Plugin) { + t.Helper() + cfg, err := pluginsconfig.ParseDir(app.SourcePath()) + require.NoError(t, err) + require.ElementsMatch(t, expectedPlugins, cfg.Apps, "unexpected local apps") +} + +func assertGlobalPlugins(t *testing.T, expectedPlugins []pluginsconfig.Plugin) { + t.Helper() + cfgPath, err := plugin.PluginsPath() + require.NoError(t, err) + cfg, err := pluginsconfig.ParseDir(cfgPath) + require.NoError(t, err) + require.ElementsMatch(t, expectedPlugins, cfg.Apps, "unexpected global apps") +} diff --git a/ignite/services/plugin/template/main.go.plush b/ignite/services/plugin/template/main.go.plush new file mode 100644 index 0000000..2155ff5 --- /dev/null +++ b/ignite/services/plugin/template/main.go.plush @@ -0,0 +1,56 @@ +package main + +import ( + "context" + "fmt" + + hplugin "github.com/hashicorp/go-plugin" + + + "github.com/ignite/cli/v29/ignite/services/plugin" + "<%= AppName %>/cmd" +) + +type app struct{} + +func (app) Manifest(_ context.Context) (*plugin.Manifest, error) { + return &plugin.Manifest{ + Name: "<%= Name %>",<%= if (SharedHost) { %> + SharedHost: true,<% } %> + Commands: cmd.GetCommands(), + }, nil +} + +func (app) Execute(ctx context.Context, c *plugin.ExecutedCommand, _ plugin.ClientAPI) error { + // Remove the first two elements "ignite" and "<%= Name %>" from OsArgs. + args := c.OsArgs[2:] + + switch args[0] { + case "hello": + return cmd.ExecuteHello(ctx, c) + default: + return fmt.Errorf("unknown command: %s", c.Path) + } +} + +func (app) ExecuteHookPre(_ context.Context, _ *plugin.ExecutedHook, _ plugin.ClientAPI) error { + return nil +} + +func (app) ExecuteHookPost(_ context.Context, _ *plugin.ExecutedHook, _ plugin.ClientAPI) error { + return nil +} + +func (app) ExecuteHookCleanUp(_ context.Context, _ *plugin.ExecutedHook, _ plugin.ClientAPI) error { + return nil +} + +func main() { + hplugin.Serve(&hplugin.ServeConfig{ + HandshakeConfig: plugin.HandshakeConfig(), + Plugins: map[string]hplugin.Plugin{ + "<%= Name %>": plugin.NewGRPC(&app{}), + }, + GRPCServer: hplugin.DefaultGRPCServer, + }) +} diff --git a/ignite/services/plugin/testdata/fakebin b/ignite/services/plugin/testdata/fakebin new file mode 100644 index 0000000..473a0f4 diff --git a/ignite/services/scaffolder/chain_registry.go b/ignite/services/scaffolder/chain_registry.go new file mode 100644 index 0000000..1ef6271 --- /dev/null +++ b/ignite/services/scaffolder/chain_registry.go @@ -0,0 +1,222 @@ +package scaffolder + +import ( + "bufio" + "fmt" + "os" + "path/filepath" + "strings" + + sdk "github.com/cosmos/cosmos-sdk/types" + + chainconfig "github.com/ignite/cli/v29/ignite/config/chain" + "github.com/ignite/cli/v29/ignite/pkg/chainregistry" + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/xgit" + "github.com/ignite/cli/v29/ignite/services/chain" +) + +const ( + chainFilename = "chain.json" + assetListFilename = "assetlist.json" +) + +// CreateChainRegistryFiles generates the chain registry files in the scaffolded chains. +func (s Scaffolder) CreateChainRegistryFiles(chain *chain.Chain, cfg *chainconfig.Config) error { + binaryName, err := chain.Binary() + if err != nil { + return errors.Wrap(err, "failed to get binary name") + } + + chainHome, err := chain.DefaultHome() + if err != nil { + return errors.Wrap(err, "failed to get default home directory") + } + + chainID, err := chain.ID() + if err != nil { + return errors.Wrap(err, "failed to get chain ID") + } + + chainGitURL, _ /* do not fail on non-existing git repo */ := xgit.RepositoryURL(chain.AppPath()) + + var ( + consensus chainregistry.CodebaseInfo + ibc chainregistry.CodebaseInfo + cosmwasm chainregistry.CodebaseInfoEnabled + ) + + consensusVersion, err := getVersionOfFromGoMod(chain, "github.com/cometbft/cometbft") + if err == nil { + consensus = chainregistry.CodebaseInfo{ + Type: "cometbft", + Version: consensusVersion, + } + } + + cosmwasmVersion, err := getVersionOfFromGoMod(chain, "github.com/CosmWasm/wasmd") + if err == nil { + cosmwasm = chainregistry.CodebaseInfoEnabled{ + Version: cosmwasmVersion, + Enabled: true, + } + } + + ibcVersion, err := getVersionOfFromGoMod(chain, "github.com/cosmos/ibc-go") + if err == nil { + ibc = chainregistry.CodebaseInfo{ + Type: "go", + Version: ibcVersion, + } + } + + // get validators from config and parse their coins + // we can assume it holds the base denom + defaultDenom := "stake" + if len(cfg.Validators) > 0 { + coin, err := sdk.ParseCoinNormalized(cfg.Validators[0].Bonded) + if err == nil { + defaultDenom = coin.Denom + } + } + + bech32Prefix, err := chain.Bech32Prefix() + if err != nil { + return errors.Wrap(err, "failed to get bech32 prefix") + } + + coinType, err := chain.CoinType() + if err != nil { + return errors.Wrap(err, "failed to get coin type") + } + + chainData := chainregistry.Chain{ + ChainName: chain.Name(), + PrettyName: chain.Name(), + ChainType: chainregistry.ChainTypeCosmos, + Status: chainregistry.ChainStatusUpcoming, + NetworkType: chainregistry.NetworkTypeDevnet, + Website: fmt.Sprintf("https://%s.zone", chain.Name()), + ChainID: chainID, + Bech32Prefix: bech32Prefix, + DaemonName: binaryName, + NodeHome: chainHome, + KeyAlgos: []chainregistry.KeyAlgos{chainregistry.KeyAlgoSecp256k1}, + Slip44: coinType, + Fees: chainregistry.Fees{ + FeeTokens: []chainregistry.FeeToken{ + { + Denom: defaultDenom, + FixedMinGasPrice: 0.025, + LowGasPrice: 0.01, + AverageGasPrice: 0.025, + HighGasPrice: 0.03, + }, + }, + }, + Staking: chainregistry.Staking{ + StakingTokens: []chainregistry.StakingToken{ + { + Denom: defaultDenom, + }, + }, + }, + Codebase: chainregistry.Codebase{ + GitRepo: chainGitURL, + RecommendedVersion: "v1.0.0", + CompatibleVersions: []string{"v1.0.0"}, + Sdk: chainregistry.CodebaseInfo{ + Type: "cosmos", + Version: chain.Version.String(), + }, + Consensus: consensus, + Ibc: ibc, + Cosmwasm: cosmwasm, + }, + APIs: chainregistry.APIs{ + RPC: []chainregistry.APIProvider{ + { + Address: "http://localhost:26657", + Provider: "localhost", + }, + }, + Rest: []chainregistry.APIProvider{ + { + Address: "http://localhost:1317", + Provider: "localhost", + }, + }, + Grpc: []chainregistry.APIProvider{ + { + Address: "localhost:9090", + Provider: "localhost", + }, + }, + }, + } + + assetListData := chainregistry.AssetList{ + ChainName: chainData.ChainName, + Assets: []chainregistry.Asset{ + { + Description: fmt.Sprintf("The native token of the %s chain", chainData.ChainName), + DenomUnits: []chainregistry.DenomUnit{ + { + Denom: defaultDenom, + Exponent: 0, + }, + }, + Base: defaultDenom, + Name: chainData.ChainName, + Symbol: strings.ToUpper(defaultDenom), + LogoURIs: chainregistry.LogoURIs{ + Png: "https://ignite.com/favicon.ico", + Svg: "https://ignite.com/favicon.ico", + }, + TypeAsset: "sdk.coin", + Socials: chainregistry.Socials{ + Website: "https://ignite.com", + Twitter: "https://x.com/ignite", + }, + }, + }, + } + + if err := chainData.SaveJSON(chainFilename); err != nil { + return err + } + + if err := assetListData.SaveJSON(assetListFilename); err != nil { + return err + } + + return nil +} + +func getVersionOfFromGoMod(chain *chain.Chain, pkg string) (string, error) { + chainPath := chain.AppPath() + + // get the version from the go.mod file + file, err := os.Open(filepath.Join(chainPath, "go.mod")) + if err != nil { + return "", err + } + defer file.Close() + + scanner := bufio.NewScanner(file) + for scanner.Scan() { + line := scanner.Text() + if strings.Contains(line, pkg) { + parts := strings.Fields(line) + if len(parts) > 1 { + return parts[len(parts)-1], nil + } + } + } + + if err := scanner.Err(); err != nil { + return "", err + } + + return "", errors.New("consensus version not found in go.mod") +} diff --git a/ignite/services/scaffolder/component.go b/ignite/services/scaffolder/component.go new file mode 100644 index 0000000..def421a --- /dev/null +++ b/ignite/services/scaffolder/component.go @@ -0,0 +1,274 @@ +package scaffolder + +import ( + "context" + "fmt" + "go/ast" + "go/parser" + "go/token" + "os" + "path/filepath" + "strings" + + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/multiformatname" + "github.com/ignite/cli/v29/ignite/pkg/protoanalysis" + "github.com/ignite/cli/v29/ignite/templates/field/datatype" +) + +const ( + componentType = "type" + componentMessage = "message" + componentQuery = "query" + componentPacket = "packet" +) + +// checkComponentValidity performs various checks common to all components to verify if it can be scaffolded. +func checkComponentValidity(appPath, moduleName string, compName multiformatname.Name, noMessage bool) error { + ok, err := moduleExists(appPath, moduleName) + if err != nil { + return err + } + if !ok { + return errors.Errorf("the module %s doesn't exist", moduleName) + } + + // Ensure the name is valid, otherwise it would generate an incorrect code + if err := checkForbiddenComponentName(compName); err != nil { + return errors.Errorf("%s can't be used as a component name: %w", compName.LowerCamel, err) + } + + // Check component name is not already used + return checkComponentCreated(appPath, moduleName, compName, noMessage) +} + +// checkComponentCreated checks if the component has been already created with Ignite in the project. +func checkComponentCreated(appPath, moduleName string, compName multiformatname.Name, noMessage bool) (err error) { + // associate the type to check with the component that scaffold this type + typesToCheck := map[string]string{ + compName.UpperCamel: componentType, + fmt.Sprintf("queryall%srequest", compName.LowerCase): componentType, + fmt.Sprintf("queryall%sresponse", compName.LowerCase): componentType, + fmt.Sprintf("queryget%srequest", compName.LowerCase): componentType, + fmt.Sprintf("queryget%sresponse", compName.LowerCase): componentType, + fmt.Sprintf("query%srequest", compName.LowerCase): componentQuery, + fmt.Sprintf("query%sresponse", compName.LowerCase): componentQuery, + fmt.Sprintf("%spacketdata", compName.LowerCase): componentPacket, + } + + if !noMessage { + typesToCheck[fmt.Sprintf("msgcreate%s", compName.LowerCase)] = componentType + typesToCheck[fmt.Sprintf("msgupdate%s", compName.LowerCase)] = componentType + typesToCheck[fmt.Sprintf("msgdelete%s", compName.LowerCase)] = componentType + typesToCheck[fmt.Sprintf("msg%s", compName.LowerCase)] = componentMessage + typesToCheck[fmt.Sprintf("msgsend%s", compName.LowerCase)] = componentPacket + } + + absPath, err := filepath.Abs(filepath.Join(appPath, "x", moduleName, "types")) + if err != nil { + return err + } + fileSet := token.NewFileSet() + all, err := parser.ParseDir(fileSet, absPath, func(os.FileInfo) bool { return true }, parser.ParseComments) + if err != nil { + return err + } + + for _, pkg := range all { + for _, f := range pkg.Files { + ast.Inspect(f, func(x ast.Node) bool { + typeSpec, ok := x.(*ast.TypeSpec) + if !ok { + return true + } + + if _, ok := typeSpec.Type.(*ast.StructType); !ok { + return true + } + + // Check if the parsed type is from a scaffolded component with the name + if compType, ok := typesToCheck[strings.ToLower(typeSpec.Name.Name)]; ok { + err = errors.Errorf("component %s with name %s is already created (type %s exists)", + compType, + compName.Original, + typeSpec.Name.Name, + ) + return false + } + + return true + }) + if err != nil { + return err + } + } + } + return err +} + +// checkTypeProtoCreated checks if the proto type already exists in the module proto package. +func checkTypeProtoCreated( + ctx context.Context, + appPath, appName, protoDir, moduleName string, + compName multiformatname.Name, +) error { + path := filepath.Join(appPath, protoDir, appName, moduleName) + pkgs, err := protoanalysis.Parse(ctx, protoanalysis.NewCache(), path) + if err != nil { + return err + } + + for _, pkg := range pkgs { + for _, msg := range pkg.Messages { + if !strings.EqualFold(msg.Name, compName.PascalCase) { + continue + } + + return errors.Errorf("component %s with name %s is already created (type %s exists)", + componentType, + compName.Original, + msg.Name, + ) + } + } + + return nil +} + +// checkCustomTypes returns error if one of the types is invalid. +func checkCustomTypes(ctx context.Context, appPath, appName, protoDir, module string, fields []string) error { + path := filepath.Join(appPath, protoDir, appName, module) + customFieldTypes := make([]string, 0) + for _, field := range fields { + ft, ok := fieldType(field) + if !ok { + continue + } + + customType, ok := customFieldType(ft) + if ok { + customFieldTypes = append(customFieldTypes, customType) + } + } + return protoanalysis.HasMessages(ctx, path, customFieldTypes...) +} + +// checkForbiddenComponentName returns true if the name is forbidden as a component name. +func checkForbiddenComponentName(name multiformatname.Name) error { + // Check with names already used from the scaffolded code + switch name.LowerCase { + case + "logger", + "keeper", + "query", + "genesis", + "types", + "tx", + datatype.TypeCustom: + return errors.Errorf("%s is used by Ignite scaffolder", name.LowerCamel) + } + + if strings.HasSuffix(name.LowerCase, "test") { + return errors.New(`name cannot end with "test"`) + } + + return checkGoReservedWord(name.LowerCamel) +} + +// checkGoReservedWord checks if the name can't be used because it is a go reserved keyword. +func checkGoReservedWord(name string) error { + // Check keyword or literal + if token.Lookup(name).IsKeyword() { + return errors.Errorf("%s is a Go keyword", name) + } + + // Check with builtin identifier + switch name { + case + "panic", + "recover", + "append", + "bool", + "byte", + "cap", + "close", + "complex", + "complex64", + "complex128", + "uint16", + "copy", + "false", + "float32", + "float64", + "imag", + "int", + "int8", + "int16", + "uint32", + "int32", + "int64", + "iota", + "len", + "make", + "new", + "nil", + "uint64", + "print", + "println", + "real", + "string", + "true", + "uint", + "uint8", + "uintptr": + return errors.Errorf("%s is a Go built-in identifier", name) + } + return checkMaxLength(name) +} + +// containsCustomTypes returns true if the list of fields contains at least one custom type. +func containsCustomTypes(fields []string) bool { + for _, field := range fields { + ft, ok := fieldType(field) + if !ok { + continue + } + + if _, ok := customFieldType(ft); ok { + return true + } + } + return false +} + +// checks if a field is given. Returns type if true. +func fieldType(field string) (fieldType string, isCustom bool) { + fieldSplit := strings.Split(field, datatype.Separator) + if len(fieldSplit) <= 1 { + return "", false + } + + return fieldSplit[1], true +} + +// customFieldType checks whether a field type is a custom type and returns its normalized message name. +func customFieldType(fieldType string) (name string, isCustom bool) { + if _, ok := datatype.IsSupportedType(datatype.Name(fieldType)); ok { + return "", false + } + + if strings.HasPrefix(fieldType, datatype.ArrayPrefix) { + return normalizeCustomTypeName(strings.TrimPrefix(fieldType, datatype.ArrayPrefix)), true + } + + return normalizeCustomTypeName(fieldType), true +} + +func normalizeCustomTypeName(customType string) string { + name, err := multiformatname.NewName(customType) + if err != nil { + return customType + } + + return name.UpperCamel +} diff --git a/ignite/services/scaffolder/component_test.go b/ignite/services/scaffolder/component_test.go new file mode 100644 index 0000000..7bc8318 --- /dev/null +++ b/ignite/services/scaffolder/component_test.go @@ -0,0 +1,211 @@ +package scaffolder + +import ( + "context" + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/multiformatname" +) + +func TestCheckForbiddenComponentName(t *testing.T) { + tests := []struct { + name string + compName string + shouldError bool + }{ + { + name: "should allow valid case", + compName: "valid", + shouldError: false, + }, + { + name: "should prevent forbidden name", + compName: "genesis", + shouldError: true, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + mfName, err := multiformatname.NewName(tc.compName) + require.NoError(t, err) + + err = checkForbiddenComponentName(mfName) + if tc.shouldError { + require.Error(t, err) + return + } + require.NoError(t, err) + }) + } +} + +func TestCheckGoReservedWord(t *testing.T) { + tests := []struct { + name string + word string + shouldError bool + }{ + { + name: "should allow valid case", + word: "valid", + shouldError: false, + }, + { + name: "should prevent forbidden go identifier", + word: "panic", + shouldError: true, + }, + { + name: "should prevent forbidden go keyword", + word: "for", + shouldError: true, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + err := checkGoReservedWord(tc.word) + if tc.shouldError { + require.Error(t, err) + return + } + require.NoError(t, err) + }) + } +} + +func TestContainsCustomTypes(t *testing.T) { + tests := []struct { + name string + fields []string + contains bool + }{ + { + name: "contains no custom types", + fields: []string{"foo", "bar"}, + contains: false, + }, + { + name: "contains one non-custom type", + fields: []string{"foo", "bar:coin"}, + contains: false, + }, + { + name: "contains one custom type", + fields: []string{"foo", "bar:CustomType"}, + contains: true, + }, + { + name: "contains one custom array type", + fields: []string{"foo", "bar:array.CustomType"}, + contains: true, + }, + { + name: "contains one built-in array type", + fields: []string{"foo", "bar:array.string"}, + contains: false, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + require.Equal(t, tc.contains, containsCustomTypes(tc.fields)) + }) + } +} + +func TestCheckTypeProtoCreated(t *testing.T) { + t.Run("should fail when proto type already exists", func(t *testing.T) { + tmp := t.TempDir() + protoFile := filepath.Join(tmp, "proto", "blog", "blog", "v1", "post.proto") + require.NoError(t, os.MkdirAll(filepath.Dir(protoFile), 0o755)) + + content := `syntax = "proto3"; +package blog.blog.v1; + +message Post {} +` + require.NoError(t, os.WriteFile(protoFile, []byte(content), 0o644)) + + name, err := multiformatname.NewName("post") + require.NoError(t, err) + + err = checkTypeProtoCreated(context.Background(), tmp, "blog", "proto", "blog", name) + require.EqualError(t, err, "component type with name post is already created (type Post exists)") + }) + + t.Run("should pass when proto type does not exist", func(t *testing.T) { + tmp := t.TempDir() + protoFile := filepath.Join(tmp, "proto", "blog", "blog", "v1", "comment.proto") + require.NoError(t, os.MkdirAll(filepath.Dir(protoFile), 0o755)) + + content := `syntax = "proto3"; +package blog.blog.v1; + +message Comment {} +` + require.NoError(t, os.WriteFile(protoFile, []byte(content), 0o644)) + + name, err := multiformatname.NewName("post") + require.NoError(t, err) + + require.NoError(t, checkTypeProtoCreated(context.Background(), tmp, "blog", "proto", "blog", name)) + }) +} + +func TestCustomFieldType(t *testing.T) { + tests := []struct { + name string + fieldType string + wantType string + isCustom bool + }{ + { + name: "built-in scalar type", + fieldType: "string", + isCustom: false, + }, + { + name: "built-in array type", + fieldType: "array.string", + isCustom: false, + }, + { + name: "custom scalar type", + fieldType: "ProductDetails", + wantType: "ProductDetails", + isCustom: true, + }, + { + name: "custom scalar type lower case", + fieldType: "productDetails", + wantType: "ProductDetails", + isCustom: true, + }, + { + name: "custom array type", + fieldType: "array.ProductDetails", + wantType: "ProductDetails", + isCustom: true, + }, + { + name: "custom array type lower case", + fieldType: "array.productDetails", + wantType: "ProductDetails", + isCustom: true, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + gotType, isCustom := customFieldType(tc.fieldType) + require.Equal(t, tc.isCustom, isCustom) + require.Equal(t, tc.wantType, gotType) + }) + } +} diff --git a/ignite/services/scaffolder/configs.go b/ignite/services/scaffolder/configs.go new file mode 100644 index 0000000..22edf58 --- /dev/null +++ b/ignite/services/scaffolder/configs.go @@ -0,0 +1,80 @@ +package scaffolder + +import ( + "path/filepath" + "strings" + + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/goanalysis" + "github.com/ignite/cli/v29/ignite/pkg/multiformatname" + "github.com/ignite/cli/v29/ignite/templates/field" + modulecreate "github.com/ignite/cli/v29/ignite/templates/module/create" +) + +// CreateConfigs creates a new configs in the scaffolded module. +func (s Scaffolder) CreateConfigs( + moduleName string, + configs ...string, +) error { + // If no module is provided, we add the type to the app's module + if moduleName == "" { + moduleName = s.modpath.Package + } + mfName, err := multiformatname.NewName(moduleName, multiformatname.NoNumber) + if err != nil { + return err + } + moduleName = mfName.LowerCase + + // Check if the module already exist + ok, err := moduleExists(s.appPath, moduleName) + if err != nil { + return err + } + if !ok { + return errors.Errorf("the module %v not exist", moduleName) + } + + if err := checkConfigCreated(s.appPath, moduleName, configs); err != nil { + return err + } + + // Parse config with the associated type + configsFields, err := field.ParseFields(configs, checkForbiddenTypeIndex) + if err != nil { + return err + } + + opts := modulecreate.ConfigsOptions{ + ModuleName: moduleName, + Configs: configsFields, + AppName: s.modpath.Package, + ProtoDir: s.protoDir, + ProtoVer: "v1", // TODO(@julienrbrt): possibly in the future add flag to specify custom proto version. + } + + g, err := modulecreate.NewModuleConfigs(opts) + if err != nil { + return err + } + + return s.Run(g) +} + +// checkConfigCreated checks if the config has been already created. +func checkConfigCreated(appPath, moduleName string, configs []string) (err error) { + path := filepath.Join(appPath, "x", moduleName, "module") + ok, err := goanalysis.HasAnyStructFieldsInPkg(path, "Module", configs) + if err != nil { + return err + } + + if ok { + return errors.Errorf( + "duplicated configs (%s) module %s", + strings.Join(configs, " "), + moduleName, + ) + } + return nil +} diff --git a/ignite/services/scaffolder/init.go b/ignite/services/scaffolder/init.go new file mode 100644 index 0000000..eade199 --- /dev/null +++ b/ignite/services/scaffolder/init.go @@ -0,0 +1,171 @@ +package scaffolder + +import ( + "context" + "fmt" + "path/filepath" + "strings" + + "github.com/ignite/cli/v29/ignite/pkg/cliui" + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/gomodulepath" + "github.com/ignite/cli/v29/ignite/pkg/multiformatname" + "github.com/ignite/cli/v29/ignite/pkg/xgenny" + "github.com/ignite/cli/v29/ignite/templates/app" + "github.com/ignite/cli/v29/ignite/templates/field" + modulecreate "github.com/ignite/cli/v29/ignite/templates/module/create" +) + +// Init initializes a new app with name and given options. +func Init( + ctx context.Context, + root, name, addressPrefix string, + coinType uint32, + defaultDenom, protoDir string, + noDefaultModule, minimal bool, + params, moduleConfigs []string, +) (string, string, error) { + pathInfo, err := gomodulepath.Parse(name) + if err != nil { + return "", "", err + } + + // Check if the module name is valid (no numbers) + for _, r := range pathInfo.Package { + if r >= '0' && r <= '9' { + return "", "", errors.Errorf("invalid app name %s: cannot contain numbers", pathInfo.Package) + } + } + + // Create a new folder named as the blockchain when a custom path is not specified + var appFolder string + if root == "" { + appFolder = pathInfo.Root + } + + if root, err = filepath.Abs(root); err != nil { + return "", "", err + } + + var ( + path = filepath.Join(root, appFolder) + gomodule = pathInfo.RawPath + ) + // create the project + _, err = generate( + ctx, + pathInfo, + addressPrefix, + coinType, + defaultDenom, + protoDir, + path, + noDefaultModule, + minimal, + params, + moduleConfigs, + ) + return path, gomodule, err +} + +func generate( + ctx context.Context, + pathInfo gomodulepath.Path, + addressPrefix string, + coinType uint32, + defaultDenom, + protoDir, + absRoot string, + noDefaultModule, minimal bool, + params, moduleConfigs []string, +) (xgenny.SourceModification, error) { + // Parse params with the associated type + paramsFields, err := field.ParseFields(params, checkForbiddenTypeIndex) + if err != nil { + return xgenny.SourceModification{}, err + } + + // Parse configs with the associated type + configsFields, err := field.ParseFields(moduleConfigs, checkForbiddenTypeIndex) + if err != nil { + return xgenny.SourceModification{}, err + } + + githubPath := gomodulepath.ExtractAppPath(pathInfo.RawPath) + if !strings.Contains(githubPath, "/") { + // A username must be added when the app module appPath has a single element + githubPath = fmt.Sprintf("username/%s", githubPath) + } + + appGen, err := app.NewGenerator(&app.Options{ + // generate application template + ModulePath: pathInfo.RawPath, + AppName: pathInfo.Package, + ProtoDir: protoDir, + GitHubPath: githubPath, + BinaryNamePrefix: pathInfo.Root, + AddressPrefix: addressPrefix, + CoinType: coinType, + DefaultDenom: defaultDenom, + IsChainMinimal: minimal, + }) + if err != nil { + return xgenny.SourceModification{}, err + } + + // generate module template + smc, err := xgenny.NewRunner(ctx, absRoot).RunAndApply(appGen) + if err != nil { + return smc, err + } + + if !noDefaultModule { + moduleName, err := multiformatname.NewName(pathInfo.Package, multiformatname.NoNumber) + if err != nil { + return smc, err + } + + opts := &modulecreate.CreateOptions{ + ModuleName: moduleName.LowerCase, // App module + ModulePath: pathInfo.RawPath, + AppName: pathInfo.Package, + ProtoDir: protoDir, + ProtoVer: "v1", // TODO(@julienrbrt): possibly in the future add flag to specify custom proto version. + Params: paramsFields, + Configs: configsFields, + IsIBC: false, + } + // Check if the module name is valid + if err := checkModuleName(opts.AppPath, opts.ModuleName); err != nil { + return smc, err + } + + moduleGen, err := modulecreate.NewGenerator(opts) + if err != nil { + return smc, err + } + + runner := xgenny.NewRunner(ctx, absRoot) + if err := runner.Run(moduleGen, modulecreate.NewAppModify(opts)); err != nil { + return smc, err + } + // generate module template + smm, err := runner.ApplyModifications() + if err != nil { + return smc, err + } + smc.Merge(smm) + } + + return smc, err +} + +func AskOverwriteFiles(session *cliui.Session) func(_, _, _ []string) error { + return func(_, _, duplicated []string) error { + if len(duplicated) == 0 { + return nil + } + question := fmt.Sprintf("Do you want to overwrite the existing files? \n%s", strings.Join(duplicated, "\n")) + return session.AskConfirm(question) + } +} diff --git a/ignite/services/scaffolder/message.go b/ignite/services/scaffolder/message.go new file mode 100644 index 0000000..3f8bdbf --- /dev/null +++ b/ignite/services/scaffolder/message.go @@ -0,0 +1,164 @@ +package scaffolder + +import ( + "context" + "fmt" + + "github.com/gobuffalo/genny/v2" + + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/multiformatname" + "github.com/ignite/cli/v29/ignite/templates/field" + "github.com/ignite/cli/v29/ignite/templates/field/datatype" + "github.com/ignite/cli/v29/ignite/templates/message" +) + +// messageOptions represents configuration for the message scaffolding. +type messageOptions struct { + description string + signer string + withoutSimulation bool +} + +// newMessageOptions returns a messageOptions with default options. +func newMessageOptions(messageName string) messageOptions { + return messageOptions{ + description: fmt.Sprintf("Broadcast message %s", messageName), + signer: "creator", + } +} + +// MessageOption configures the message scaffolding. +type MessageOption func(*messageOptions) + +// WithDescription provides a custom description for the message CLI command. +func WithDescription(desc string) MessageOption { + return func(m *messageOptions) { + m.description = desc + } +} + +// WithSigner provides a custom signer name for the message. +func WithSigner(signer string) MessageOption { + return func(m *messageOptions) { + m.signer = signer + } +} + +// WithoutSimulation disables generating messages simulation. +func WithoutSimulation() MessageOption { + return func(m *messageOptions) { + m.withoutSimulation = true + } +} + +// AddMessage adds a new message to scaffolded app. +func (s Scaffolder) AddMessage( + ctx context.Context, + moduleName, + msgName string, + fields, + resFields []string, + options ...MessageOption, +) error { + // Create the options + scaffoldingOpts := newMessageOptions(msgName) + for _, apply := range options { + apply(&scaffoldingOpts) + } + + // If no module is provided, we add the type to the app's module + if moduleName == "" { + moduleName = s.modpath.Package + } + mfName, err := multiformatname.NewName(moduleName, multiformatname.NoNumber) + if err != nil { + return err + } + moduleName = mfName.LowerCase + + name, err := multiformatname.NewName(msgName) + if err != nil { + return err + } + + if err := checkComponentValidity(s.appPath, moduleName, name, false); err != nil { + return err + } + + // Check and parse provided fields + if err := checkCustomTypes( + ctx, + s.appPath, + s.modpath.Package, + s.protoDir, + moduleName, + fields, + ); err != nil { + return err + } + parsedMsgFields, err := field.ParseFields(fields, checkForbiddenMessageField, scaffoldingOpts.signer) + if err != nil { + return err + } + + // Check and parse provided response fields + if err := checkCustomTypes( + ctx, + s.appPath, + s.modpath.Package, + s.protoDir, + moduleName, + resFields, + ); err != nil { + return err + } + parsedResFields, err := field.ParseFields(resFields, checkGoReservedWord, scaffoldingOpts.signer) + if err != nil { + return err + } + + mfSigner, err := multiformatname.NewName(scaffoldingOpts.signer) + if err != nil { + return err + } + + var ( + g *genny.Generator + opts = &message.Options{ + AppName: s.modpath.Package, + ProtoDir: s.protoDir, + ProtoVer: "v1", // TODO(@julienrbrt): possibly in the future add flag to specify custom proto version. + ModulePath: s.modpath.RawPath, + ModuleName: moduleName, + MsgName: name, + Fields: parsedMsgFields, + ResFields: parsedResFields, + MsgDesc: scaffoldingOpts.description, + MsgSigner: mfSigner, + NoSimulation: scaffoldingOpts.withoutSimulation, + } + ) + + // Scaffold + g, err = message.NewGenerator(opts) + if err != nil { + return err + } + + return s.Run(g) +} + +// checkForbiddenMessageField returns true if the name is forbidden as a message name. +func checkForbiddenMessageField(name string) error { + mfName, err := multiformatname.NewName(name) + if err != nil { + return err + } + + if mfName.LowerCase == datatype.TypeCustom { + return errors.Errorf("%s is used by the message scaffolder", name) + } + + return checkGoReservedWord(name) +} diff --git a/ignite/services/scaffolder/migration.go b/ignite/services/scaffolder/migration.go new file mode 100644 index 0000000..d691653 --- /dev/null +++ b/ignite/services/scaffolder/migration.go @@ -0,0 +1,59 @@ +package scaffolder + +import ( + "os" + "path/filepath" + + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/multiformatname" + modulemigration "github.com/ignite/cli/v29/ignite/templates/module/migration" +) + +// CreateModuleMigration scaffolds a new module migration inside an existing module. +func (s Scaffolder) CreateModuleMigration(moduleName string) error { + mfModuleName, err := multiformatname.NewName(moduleName, multiformatname.NoNumber) + if err != nil { + return err + } + moduleName = mfModuleName.LowerCase + + ok, err := moduleExists(s.appPath, moduleName) + if err != nil { + return err + } + if !ok { + return errors.Errorf("the module %s doesn't exist", moduleName) + } + + moduleFilePath := filepath.Join(s.appPath, moduleDir, moduleName, modulePkg, "module.go") + content, err := os.ReadFile(moduleFilePath) + if err != nil { + return err + } + + fromVersion, err := modulemigration.ConsensusVersion(string(content)) + if err != nil { + return err + } + + opts := &modulemigration.Options{ + ModuleName: moduleName, + ModulePath: s.modpath.RawPath, + FromVersion: fromVersion, + ToVersion: fromVersion + 1, + } + + versionDir := filepath.Join(s.appPath, opts.MigrationDir()) + if _, err := os.Stat(versionDir); err == nil { + return errors.Errorf("migration version %s already exists for module %s", opts.MigrationVersion(), moduleName) + } else if !os.IsNotExist(err) { + return err + } + + g, err := modulemigration.NewGenerator(opts) + if err != nil { + return err + } + + return s.Run(g) +} diff --git a/ignite/services/scaffolder/module.go b/ignite/services/scaffolder/module.go new file mode 100644 index 0000000..20af263 --- /dev/null +++ b/ignite/services/scaffolder/module.go @@ -0,0 +1,327 @@ +package scaffolder + +import ( + "go/token" + "os" + "path/filepath" + "strings" + + "github.com/gobuffalo/genny/v2" + + appanalysis "github.com/ignite/cli/v29/ignite/pkg/cosmosanalysis/app" + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/multiformatname" + "github.com/ignite/cli/v29/ignite/templates/field" + "github.com/ignite/cli/v29/ignite/templates/module" + modulecreate "github.com/ignite/cli/v29/ignite/templates/module/create" +) + +const ( + moduleDir = "x" + modulePkg = "module" +) + +var ( + // reservedNames are either names from the default modules defined in a Cosmos-SDK app or names used in the default query and tx CLI namespace. + // A new module's name can't be equal to a reserved name. + // A map is used for direct comparing. + reservedNames = map[string]struct{}{ + "account": {}, + "block": {}, + "broadcast": {}, + "encode": {}, + "multisign": {}, + "sign": {}, + "tx": {}, + "txs": {}, + "consumer": {}, // ICS consumer module + "ccvconsumer": {}, // ICS consumer module + "CCV": {}, // ICS consumer module + "capability": {}, + "auth": {}, + "bank": {}, + "distribution": {}, + "staking": {}, + "slashing": {}, + "gov": {}, + "mint": {}, + "ibc": {}, + "genutil": {}, + "evidence": {}, + "authz": {}, + "transfer": {}, // IBC transfer + "interchainaccounts": {}, + "feeibc": {}, + "feegrant": {}, + "params": {}, + "upgrade": {}, + "vesting": {}, + "circuit": {}, + "nft": {}, + "group": {}, + "consensus": {}, + "epochs": {}, + "protocolpool": {}, + } + + // defaultStoreKeys are the names of the default store keys defined in a Cosmos-SDK app. + // A new module's name can't have a defined store key in its prefix because of potential store key collision. + defaultStoreKeys = []string{ + "capability", + "acc", // auth module + "bank", + "distribution", + "staking", + "slashing", + "gov", + "mint", + "ibc", + "transfer", // IBC transfer + "feeibc", + "evidence", + "feegrant", + "params", + "upgrade", + "circuit", + "nft", + "group", + "consensus", + } +) + +// moduleCreationOptions holds options for creating a new module. +type moduleCreationOptions struct { + // ibc true if the module is an ibc module. + ibc bool + + // params list of parameters. + params []string + + // moduleConfigs list of module configs. + moduleConfigs []string + + // ibcChannelOrdering ibc channel ordering. + ibcChannelOrdering string + + // dependencies list of module dependencies. + dependencies []modulecreate.Dependency +} + +// ModuleCreationOption configures Chain. +type ModuleCreationOption func(*moduleCreationOptions) + +// WithIBC scaffolds a module with IBC enabled. +func WithIBC() ModuleCreationOption { + return func(m *moduleCreationOptions) { + m.ibc = true + } +} + +// WithParams scaffolds a module with params. +func WithParams(params []string) ModuleCreationOption { + return func(m *moduleCreationOptions) { + m.params = params + } +} + +// WithModuleConfigs scaffolds a module with module configs. +func WithModuleConfigs(moduleConfigs []string) ModuleCreationOption { + return func(m *moduleCreationOptions) { + m.moduleConfigs = moduleConfigs + } +} + +// WithIBCChannelOrdering configures channel ordering of the IBC module. +func WithIBCChannelOrdering(ordering string) ModuleCreationOption { + return func(m *moduleCreationOptions) { + switch ordering { + case "ordered": + m.ibcChannelOrdering = "ORDERED" + case "unordered": + m.ibcChannelOrdering = "UNORDERED" + default: + m.ibcChannelOrdering = "NONE" + } + } +} + +// WithDependencies specifies the name of the modules that the module depends on. +func WithDependencies(dependencies []modulecreate.Dependency) ModuleCreationOption { + return func(m *moduleCreationOptions) { + m.dependencies = dependencies + } +} + +// CreateModule creates a new empty module in the scaffolded app. +func (s Scaffolder) CreateModule( + moduleName string, + options ...ModuleCreationOption, +) error { + mfName, err := multiformatname.NewName(moduleName, multiformatname.NoNumber) + if err != nil { + return err + } + moduleName = mfName.LowerCase + + // Check if the module name is valid + if err := checkModuleName(s.appPath, moduleName); err != nil { + return err + } + + // Check if the module already exist + ok, err := moduleExists(s.appPath, moduleName) + if err != nil { + return err + } + if ok { + return errors.Errorf("the module %v already exists", moduleName) + } + + // Apply the options + var creationOpts moduleCreationOptions + for _, apply := range options { + apply(&creationOpts) + } + + // Parse params with the associated type + params, err := field.ParseFields(creationOpts.params, checkForbiddenTypeIndex) + if err != nil { + return err + } + + // Parse configs with the associated type + configs, err := field.ParseFields(creationOpts.moduleConfigs, checkForbiddenTypeIndex) + if err != nil { + return err + } + + // Check dependencies + if err := checkDependencies(creationOpts.dependencies, s.appPath); err != nil { + return err + } + + opts := &modulecreate.CreateOptions{ + ModuleName: moduleName, + ModulePath: s.modpath.RawPath, + Params: params, + Configs: configs, + AppName: s.modpath.Package, + AppPath: s.appPath, + ProtoDir: s.protoDir, + ProtoVer: "v1", // TODO(@julienrbrt): possibly in the future add flag to specify custom proto version. + IsIBC: creationOpts.ibc, + IBCOrdering: creationOpts.ibcChannelOrdering, + Dependencies: creationOpts.dependencies, + } + + g, err := modulecreate.NewGenerator(opts) + if err != nil { + return err + } + gens := []*genny.Generator{g} + + // Scaffold IBC module + if opts.IsIBC { + g, err = modulecreate.NewIBC(opts) + if err != nil { + return err + } + gens = append(gens, g) + } + gens = append(gens, modulecreate.NewAppModify(opts)) + + err = s.Run(gens...) + var validationErr errors.ValidationError + if err != nil && !errors.As(err, &validationErr) { + return err + } + return nil +} + +// moduleExists checks if the module exists in the app. +func moduleExists(appPath string, moduleName string) (bool, error) { + absPath, err := filepath.Abs(filepath.Join(appPath, moduleDir, moduleName)) + if err != nil { + return false, err + } + + _, err = os.Stat(absPath) + if os.IsNotExist(err) { + // The module doesn't exist + return false, nil + } + + return err == nil, err +} + +// checkModuleName checks if the name can be used as a module name. +func checkModuleName(appPath, moduleName string) error { + // go keyword + if token.Lookup(moduleName).IsKeyword() { + return errors.Errorf("%s is a Go keyword", moduleName) + } + + // check if the name is a reserved name + if _, ok := reservedNames[moduleName]; ok { + return errors.Errorf("%s is a reserved name and can't be used as a module name", moduleName) + } + + checkPrefix := func(name, prefix string) error { + if strings.HasPrefix(name, prefix) { + return errors.Errorf("the module name can't be prefixed with %s because of potential store key collision", prefix) + } + return nil + } + + // check if the name can imply potential store key collision + for _, defaultStoreKey := range defaultStoreKeys { + if err := checkPrefix(moduleName, defaultStoreKey); err != nil { + return err + } + } + + // check store key with user's defined modules + // we consider all user's defined modules use the module name as the store key + entries, err := os.ReadDir(filepath.Join(appPath, moduleDir)) + if os.IsNotExist(err) { + return nil + } + if err != nil { + return err + } + for _, entry := range entries { + if !entry.IsDir() { + continue + } + if err := checkPrefix(moduleName, entry.Name()); err != nil { + return err + } + } + + return nil +} + +// checkDependencies perform checks on the dependencies. +func checkDependencies(dependencies []modulecreate.Dependency, appPath string) error { + depMap := make(map[string]struct{}) + for _, dep := range dependencies { + // check the dependency has been registered + path := filepath.Join(appPath, module.PathAppModule) + if err := appanalysis.CheckKeeper(path, dep.KeeperName()); err != nil { + return errors.Errorf( + "the module cannot have %s as a dependency: %w", + dep.Name, + err, + ) + } + + // check duplicated + _, ok := depMap[dep.Name] + if ok { + return errors.Errorf("%s is a duplicated dependency", dep) + } + depMap[dep.Name] = struct{}{} + } + + return nil +} diff --git a/ignite/services/scaffolder/packet.go b/ignite/services/scaffolder/packet.go new file mode 100644 index 0000000..60809cf --- /dev/null +++ b/ignite/services/scaffolder/packet.go @@ -0,0 +1,188 @@ +package scaffolder + +import ( + "context" + "os" + "path/filepath" + + "github.com/gobuffalo/genny/v2" + + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/multiformatname" + "github.com/ignite/cli/v29/ignite/templates/field" + "github.com/ignite/cli/v29/ignite/templates/field/datatype" + "github.com/ignite/cli/v29/ignite/templates/ibc" +) + +const ( + ibcModuleImplementation = "module_ibc.go" +) + +// packetOptions represents configuration for the packet scaffolding. +type packetOptions struct { + withoutMessage bool + signer string +} + +// newPacketOptions returns a packetOptions with default options. +func newPacketOptions() packetOptions { + return packetOptions{ + signer: "creator", + } +} + +// PacketOption configures the packet scaffolding. +type PacketOption func(*packetOptions) + +// PacketWithoutMessage disables generating sdk compatible messages and tx related APIs. +func PacketWithoutMessage() PacketOption { + return func(o *packetOptions) { + o.withoutMessage = true + } +} + +// PacketWithSigner provides a custom signer name for the packet. +func PacketWithSigner(signer string) PacketOption { + return func(m *packetOptions) { + m.signer = signer + } +} + +// AddPacket adds a new type stype to scaffolded app by using optional type fields. +func (s Scaffolder) AddPacket( + ctx context.Context, + moduleName, + packetName string, + packetFields, + ackFields []string, + options ...PacketOption, +) error { + // apply options. + o := newPacketOptions() + for _, apply := range options { + apply(&o) + } + + mfName, err := multiformatname.NewName(moduleName, multiformatname.NoNumber) + if err != nil { + return err + } + moduleName = mfName.LowerCase + + name, err := multiformatname.NewName(packetName) + if err != nil { + return err + } + + if err := checkComponentValidity(s.appPath, moduleName, name, o.withoutMessage); err != nil { + return err + } + + mfSigner, err := multiformatname.NewName(o.signer) + if err != nil { + return err + } + + // Module must implement IBC + ok, err := isIBCModule(s.appPath, moduleName) + if err != nil { + return err + } + if !ok { + return errors.Errorf("the module %s doesn't implement IBC module interface", moduleName) + } + + signer := "" + if !o.withoutMessage { + signer = o.signer + } + + // Check and parse packet fields + if err := checkCustomTypes(ctx, s.appPath, s.modpath.Package, s.protoDir, moduleName, packetFields); err != nil { + return err + } + parsedPacketFields, err := field.ParseFields(packetFields, checkForbiddenPacketField, signer) + if err != nil { + return err + } + + // check and parse acknowledgment fields + if err := checkCustomTypes(ctx, s.appPath, s.modpath.Package, s.protoDir, moduleName, ackFields); err != nil { + return err + } + parsedAcksFields, err := field.ParseFields(ackFields, checkGoReservedWord, signer) + if err != nil { + return err + } + + // Generate the packet + var ( + g *genny.Generator + opts = &ibc.PacketOptions{ + AppName: s.modpath.Package, + ProtoDir: s.protoDir, + ProtoVer: "v1", // TODO(@julienrbrt): possibly in the future add flag to specify custom proto version. + ModulePath: s.modpath.RawPath, + ModuleName: moduleName, + PacketName: name, + Fields: parsedPacketFields, + AckFields: parsedAcksFields, + NoMessage: o.withoutMessage, + MsgSigner: mfSigner, + } + ) + g, err = ibc.NewPacket(opts) + if err != nil { + return err + } + return s.Run(g) +} + +// isIBCModule returns true if the provided module implements the IBC module interface +// we naively check the existence of module_ibc.go for this check. +func isIBCModule(appPath string, moduleName string) (bool, error) { + absPath, err := filepath.Abs(filepath.Join(appPath, moduleDir, moduleName, modulePkg, ibcModuleImplementation)) + if err != nil { + return false, err + } + + _, err = os.Stat(absPath) + if err != nil && !os.IsNotExist(err) { + return false, err + } else if err == nil { + // Is an IBC module + return true, err + } + + // check the legacy Path + absPathLegacy, err := filepath.Abs(filepath.Join(appPath, moduleDir, moduleName, ibcModuleImplementation)) + if err != nil { + return false, err + } + _, err = os.Stat(absPathLegacy) + if os.IsNotExist(err) { + // Not an IBC module + return false, nil + } + + return true, err +} + +// checkForbiddenPacketField returns true if the name is forbidden as a packet name. +func checkForbiddenPacketField(name string) error { + mfName, err := multiformatname.NewName(name) + if err != nil { + return err + } + + switch mfName.LowerCase { + case + "sender", + "port", + "channelid", + datatype.TypeCustom: + return errors.Errorf("%s is used by the packet scaffolder", name) + } + + return checkGoReservedWord(name) +} diff --git a/ignite/services/scaffolder/params.go b/ignite/services/scaffolder/params.go new file mode 100644 index 0000000..66a4354 --- /dev/null +++ b/ignite/services/scaffolder/params.go @@ -0,0 +1,80 @@ +package scaffolder + +import ( + "path/filepath" + "strings" + + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/goanalysis" + "github.com/ignite/cli/v29/ignite/pkg/multiformatname" + "github.com/ignite/cli/v29/ignite/templates/field" + modulecreate "github.com/ignite/cli/v29/ignite/templates/module/create" +) + +// CreateParams creates a new params in the scaffolded module. +func (s Scaffolder) CreateParams( + moduleName string, + params ...string, +) error { + // If no module is provided, we add the type to the app's module + if moduleName == "" { + moduleName = s.modpath.Package + } + mfName, err := multiformatname.NewName(moduleName, multiformatname.NoNumber) + if err != nil { + return err + } + moduleName = mfName.LowerCase + + // Check if the module already exist + ok, err := moduleExists(s.appPath, moduleName) + if err != nil { + return err + } + if !ok { + return errors.Errorf("the module %v not exist", moduleName) + } + + if err := checkParamCreated(s.appPath, moduleName, params); err != nil { + return err + } + + // Parse params with the associated type + paramsFields, err := field.ParseFields(params, checkForbiddenTypeIndex) + if err != nil { + return err + } + + opts := modulecreate.ParamsOptions{ + ModuleName: moduleName, + Params: paramsFields, + AppName: s.modpath.Package, + ProtoDir: s.protoDir, + ProtoVer: "v1", // TODO(@julienrbrt): possibly in the future add flag to specify custom proto version. + } + + g, err := modulecreate.NewModuleParam(opts) + if err != nil { + return err + } + + return s.Run(g) +} + +// checkParamCreated checks if the parameter has been already created. +func checkParamCreated(appPath, moduleName string, params []string) error { + path := filepath.Join(appPath, "x", moduleName, "types") + ok, err := goanalysis.HasAnyStructFieldsInPkg(path, "Params", params) + if err != nil { + return err + } + + if ok { + return errors.Errorf( + "duplicated params (%s) module %s", + strings.Join(params, " "), + moduleName, + ) + } + return nil +} diff --git a/ignite/services/scaffolder/patch.go b/ignite/services/scaffolder/patch.go new file mode 100644 index 0000000..e20b4f7 --- /dev/null +++ b/ignite/services/scaffolder/patch.go @@ -0,0 +1 @@ +package scaffolder diff --git a/ignite/services/scaffolder/query.go b/ignite/services/scaffolder/query.go new file mode 100644 index 0000000..2af0301 --- /dev/null +++ b/ignite/services/scaffolder/query.go @@ -0,0 +1,84 @@ +package scaffolder + +import ( + "context" + + "github.com/gobuffalo/genny/v2" + + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/multiformatname" + "github.com/ignite/cli/v29/ignite/templates/field" + "github.com/ignite/cli/v29/ignite/templates/query" +) + +// AddQuery adds a new query to scaffolded app. +func (s Scaffolder) AddQuery( + ctx context.Context, + moduleName, + queryName, + description string, + reqFields, + resFields []string, + paginated bool, +) error { + // If no module is provided, we add the type to the app's module + if moduleName == "" { + moduleName = s.modpath.Package + } + mfName, err := multiformatname.NewName(moduleName, multiformatname.NoNumber) + if err != nil { + return err + } + moduleName = mfName.LowerCase + + name, err := multiformatname.NewName(queryName) + if err != nil { + return err + } + + if err := checkComponentValidity(s.appPath, moduleName, name, true); err != nil { + return err + } + + // Check and parse provided request fields + if ok := containsCustomTypes(reqFields); ok { + return errors.New("query request params can't contain custom type") + } + parsedReqFields, err := field.ParseFields(reqFields, checkGoReservedWord) + if err != nil { + return err + } + + // Check and parse provided response fields + if err := checkCustomTypes(ctx, s.appPath, s.modpath.Package, s.protoDir, moduleName, resFields); err != nil { + return err + } + parsedResFields, err := field.ParseFields(resFields, checkGoReservedWord) + if err != nil { + return err + } + + var ( + g *genny.Generator + opts = &query.Options{ + AppName: s.modpath.Package, + ProtoDir: s.protoDir, + ProtoVer: "v1", // TODO(@julienrbrt): possibly in the future add flag to specify custom proto version. + ModulePath: s.modpath.RawPath, + ModuleName: moduleName, + QueryName: name, + ReqFields: parsedReqFields, + ResFields: parsedResFields, + Description: description, + Paginated: paginated, + } + ) + + // Scaffold + g, err = query.NewGenerator(opts) + if err != nil { + return err + } + + return s.Run(g) +} diff --git a/ignite/services/scaffolder/scaffolder.go b/ignite/services/scaffolder/scaffolder.go new file mode 100644 index 0000000..aa7bf8a --- /dev/null +++ b/ignite/services/scaffolder/scaffolder.go @@ -0,0 +1,192 @@ +// Package scaffolder initializes Ignite CLI apps and modifies existing ones +// to add more features in a later time. +package scaffolder + +import ( + "context" + "os" + "path/filepath" + + "github.com/gobuffalo/genny/v2" + + chainconfig "github.com/ignite/cli/v29/ignite/config/chain" + "github.com/ignite/cli/v29/ignite/pkg/cache" + "github.com/ignite/cli/v29/ignite/pkg/cosmosanalysis" + "github.com/ignite/cli/v29/ignite/pkg/cosmosbuf" + "github.com/ignite/cli/v29/ignite/pkg/cosmosgen" + "github.com/ignite/cli/v29/ignite/pkg/cosmosver" + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/gocmd" + "github.com/ignite/cli/v29/ignite/pkg/gomodulepath" + "github.com/ignite/cli/v29/ignite/pkg/xgenny" + "github.com/ignite/cli/v29/ignite/version" +) + +// Scaffolder is Ignite CLI app scaffolder. +type Scaffolder struct { + // Version of the chain + Version cosmosver.Version + + // appPath path of the app. + appPath string + + // protoDir path of the proto folder. + protoDir string + + // modpath represents the go module Path of the app. + modpath gomodulepath.Path + + // runner represents the scaffold xgenny runner. + runner *xgenny.Runner +} + +// New creates a new scaffold app. +func New(context context.Context, appPath, protoDir string) (Scaffolder, error) { + path, err := filepath.Abs(appPath) + if err != nil { + return Scaffolder{}, err + } + + modpath, path, err := gomodulepath.Find(path) + if err != nil { + return Scaffolder{}, err + } + + ver, err := cosmosver.Detect(path) + if err != nil { + return Scaffolder{}, err + } + + // Make sure that the app was scaffolded with a supported Cosmos SDK version + if err := version.AssertSupportedCosmosSDKVersion(ver); err != nil { + return Scaffolder{}, err + } + + if err := cosmosanalysis.IsChainPath(path); err != nil { + return Scaffolder{}, err + } + + s := Scaffolder{ + Version: ver, + appPath: path, + protoDir: protoDir, + modpath: modpath, + runner: xgenny.NewRunner(context, path), + } + + return s, nil +} + +func (s Scaffolder) ApplyModifications(options ...xgenny.ApplyOption) (xgenny.SourceModification, error) { + return s.runner.ApplyModifications(options...) +} + +func (s Scaffolder) Run(gens ...*genny.Generator) error { + return s.runner.Run(gens...) +} + +func (s Scaffolder) PostScaffold(ctx context.Context, cacheStorage cache.Storage, skipProto bool) error { + return PostScaffold(ctx, cacheStorage, s.appPath, s.protoDir, s.modpath.RawPath, skipProto) +} + +func PostScaffold(ctx context.Context, cacheStorage cache.Storage, path, protoDir, goModPath string, skipProto bool) error { + wd, err := os.Getwd() + if err != nil { + return errors.Errorf("failed to get current working directory: %w", err) + } + + // go to the app path, in other to use the go tools + if err := os.Chdir(path); err != nil { + return errors.Errorf("failed to change directory to %s: %w", path, err) + } + + if !skipProto { + // go mod tidy prior and after the proto generation is required. + if err := gocmd.ModTidy(ctx, path); err != nil { + return err + } + + if err := protoc(ctx, cacheStorage, path, protoDir, goModPath); err != nil { + return err + } + } + + if err := gocmd.ModTidy(ctx, path); err != nil { + return err + } + + if err := gocmd.Fmt(ctx, path); err != nil { + return err + } + + if err := gocmd.GoImports(ctx, path); err != nil { + return err + } + + // return to the original working directory + if err := os.Chdir(wd); err != nil { + return errors.Errorf("failed to change directory to %s: %w", wd, err) + } + + return nil +} + +func protoc(ctx context.Context, cacheStorage cache.Storage, projectPath, protoDir, goModPath string) error { + confpath, err := chainconfig.LocateDefault(projectPath) + if err != nil { + return err + } + conf, err := chainconfig.ParseFile(confpath) + if err != nil { + return err + } + + options := []cosmosgen.Option{ + cosmosgen.UpdateBufModule(), + cosmosgen.WithGoGeneration(), + } + + // Generate Typescript client code if it's enabled + if conf.Client.Typescript.Path != "" { //nolint:staticcheck,nolintlint + tsClientPath := chainconfig.TSClientPath(*conf) + if !filepath.IsAbs(tsClientPath) { + tsClientPath = filepath.Join(projectPath, tsClientPath) + } + + options = append(options, + cosmosgen.WithTSClientGeneration( + cosmosgen.TypescriptModulePath(tsClientPath), + tsClientPath, + true, + ), + ) + } + + if conf.Client.OpenAPI.Path != "" { + openAPIPath := conf.Client.OpenAPI.Path + if !filepath.IsAbs(openAPIPath) { + openAPIPath = filepath.Join(projectPath, openAPIPath) + } + + options = append(options, cosmosgen.WithOpenAPIGeneration(openAPIPath, conf.Client.OpenAPI.ExcludeList)) + } + + if err := cosmosgen.Generate( + ctx, + cacheStorage, + projectPath, + protoDir, + goModPath, + chainconfig.DefaultVuePath, + options..., + ); err != nil { + return err + } + + buf, err := cosmosbuf.New(cacheStorage, goModPath) + if err != nil { + return err + } + + return buf.Format(ctx, projectPath) +} diff --git a/ignite/services/scaffolder/type.go b/ignite/services/scaffolder/type.go new file mode 100644 index 0000000..1f905f0 --- /dev/null +++ b/ignite/services/scaffolder/type.go @@ -0,0 +1,288 @@ +package scaffolder + +import ( + "context" + "strings" + + "github.com/gobuffalo/genny/v2" + + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/multiformatname" + "github.com/ignite/cli/v29/ignite/templates/field" + "github.com/ignite/cli/v29/ignite/templates/field/datatype" + "github.com/ignite/cli/v29/ignite/templates/typed" + "github.com/ignite/cli/v29/ignite/templates/typed/dry" + "github.com/ignite/cli/v29/ignite/templates/typed/list" + maptype "github.com/ignite/cli/v29/ignite/templates/typed/map" + "github.com/ignite/cli/v29/ignite/templates/typed/singleton" +) + +const maxLength = 64 + +// AddTypeOption configures options for AddType. +type AddTypeOption func(*addTypeOptions) + +// AddTypeKind configures the type kind option for AddType. +type AddTypeKind func(*addTypeOptions) + +type addTypeOptions struct { + moduleName string + fields []string + + isList bool + isMap bool + isSingleton bool + + index string + + withoutMessage bool + withoutSimulation bool + signer string +} + +// newAddTypeOptions returns a addTypeOptions with default options. +func newAddTypeOptions(moduleName string) addTypeOptions { + return addTypeOptions{ + moduleName: moduleName, + signer: "creator", + } +} + +// ListType makes the type stored in a list convention in the storage. +func ListType() AddTypeKind { + return func(o *addTypeOptions) { + o.isList = true + } +} + +// MapType makes the type stored in a key-value convention in the storage with an index option. +func MapType(index string) AddTypeKind { + return func(o *addTypeOptions) { + o.isMap = true + o.index = index + } +} + +// SingletonType makes the type stored in a fixed place as a single entry in the storage. +func SingletonType() AddTypeKind { + return func(o *addTypeOptions) { + o.isSingleton = true + } +} + +// DryType only creates a type with a basic definition. +func DryType() AddTypeKind { + return func(o *addTypeOptions) { + // Dry type scaffolding only adds a proto type definition and never generates CRUD messages. + // Force this option so component validity checks don't treat existing Msg* types as conflicts. + o.withoutMessage = true + } +} + +// TypeWithModule module to scaffold type into. +func TypeWithModule(name string) AddTypeOption { + return func(o *addTypeOptions) { + o.moduleName = name + } +} + +// TypeWithFields adds fields to the type to be scaffolded. +func TypeWithFields(fields ...string) AddTypeOption { + return func(o *addTypeOptions) { + o.fields = fields + } +} + +// TypeWithoutMessage disables generating sdk compatible messages and tx related APIs. +func TypeWithoutMessage() AddTypeOption { + return func(o *addTypeOptions) { + o.withoutMessage = true + } +} + +// TypeWithoutSimulation disables generating messages simulation. +func TypeWithoutSimulation() AddTypeOption { + return func(o *addTypeOptions) { + o.withoutSimulation = true + } +} + +// TypeWithSigner provides a custom signer name for the message. +func TypeWithSigner(signer string) AddTypeOption { + return func(o *addTypeOptions) { + o.signer = signer + } +} + +// AddType adds a new type to a scaffolded app. +// if none of the list, map or singleton given, a dry type without anything extra (like a storage layer, models, CLI etc.) +// will be scaffolded. +// if no module is given, the type will be scaffolded inside the app's default module. +func (s Scaffolder) AddType( + ctx context.Context, + typeName string, + kind AddTypeKind, + options ...AddTypeOption, +) error { + // apply options. + o := newAddTypeOptions(s.modpath.Package) + for _, apply := range append(options, AddTypeOption(kind)) { + apply(&o) + } + + mfName, err := multiformatname.NewName(o.moduleName, multiformatname.NoNumber) + if err != nil { + return err + } + moduleName := mfName.LowerCase + + name, err := multiformatname.NewName(typeName) + if err != nil { + return err + } + + if err := checkComponentValidity(s.appPath, moduleName, name, o.withoutMessage); err != nil { + return err + } + if err := checkTypeProtoCreated(ctx, s.appPath, s.modpath.Package, s.protoDir, moduleName, name); err != nil { + return err + } + + // Check and parse provided fields + if err := checkCustomTypes(ctx, s.appPath, s.modpath.Package, s.protoDir, moduleName, o.fields); err != nil { + return err + } + tFields, err := parseTypeFields(o) + if err != nil { + return err + } + + mfSigner, err := multiformatname.NewName(o.signer) + if err != nil { + return err + } + + isIBC, err := isIBCModule(s.appPath, moduleName) + if err != nil { + return err + } + + var ( + g *genny.Generator + opts = &typed.Options{ + AppName: s.modpath.Package, + ProtoDir: s.protoDir, + ProtoVer: "v1", // TODO(@julienrbrt): possibly in the future add flag to specify custom proto version. + ModulePath: s.modpath.RawPath, + ModuleName: moduleName, + TypeName: name, + Fields: tFields, + NoMessage: o.withoutMessage, + NoSimulation: o.withoutSimulation, + MsgSigner: mfSigner, + IsIBC: isIBC, + } + gens []*genny.Generator + ) + + // create the type generator depending on the model + switch { + case o.isList: + g, err = list.NewGenerator(opts) + case o.isMap: + g, err = mapGenerator(opts, o.index) + case o.isSingleton: + g, err = singleton.NewGenerator(opts) + default: + g, err = dry.NewGenerator(opts) + } + if err != nil { + return err + } + + // run the generation + return s.Run(append(gens, g)...) +} + +// checkMaxLength checks if the index length exceeds the maximum allowed length. +func checkMaxLength(name string) error { + if len(name) > maxLength { + return errors.Errorf("index exceeds maximum allowed length of %d characters", maxLength) + } + return nil +} + +// checkForbiddenTypeIndex returns true if the name is forbidden as an index name. +func checkForbiddenTypeIndex(index string) error { + indexSplit := strings.Split(index, datatype.Separator) + if len(indexSplit) > 1 { + index = indexSplit[0] + indexType := datatype.Name(indexSplit[1]) + if f, ok := datatype.IsSupportedType(indexType); !ok || f.NonIndex { + return errors.Errorf("invalid index type %s", indexType) + } + } + return checkForbiddenTypeField(index) +} + +// checkForbiddenTypeField returns true if the name is forbidden as a field name. +func checkForbiddenTypeField(field string) error { + mfName, err := multiformatname.NewName(field) + if err != nil { + return err + } + + switch mfName.LowerCase { + case + "id", + "params", + "appendedvalue", + datatype.TypeCustom: + return errors.Errorf("%s is used by type scaffolder", field) + } + + return checkGoReservedWord(field) +} + +// parseTypeFields validates the fields and returns an error if the validation fails. +func parseTypeFields(opts addTypeOptions) (field.Fields, error) { + signer := "" + if opts.isList || opts.isMap || opts.isSingleton { + if !opts.withoutMessage { + signer = opts.signer + } + return field.ParseFields(opts.fields, checkForbiddenTypeField, signer) + } + // For simple types, only check if it's a reserved keyword and don't pass a signer. + return field.ParseFields(opts.fields, checkGoReservedWord, signer) +} + +// mapGenerator returns the template generator for a map. +func mapGenerator(opts *typed.Options, index string) (*genny.Generator, error) { + // Parse indexes with the associated type + if strings.Contains(index, ",") { + return nil, errors.Errorf("multi-index map isn't supported") + } + + parsedIndexes, err := field.ParseFields([]string{index}, checkForbiddenTypeIndex) + if err != nil { + return nil, err + } + + if len(parsedIndexes) == 0 { + return nil, errors.Errorf("no index found, a valid map index must be provided") + } + + // Indexes and type fields must be disjoint + exists := make(map[string]struct{}) + for _, name := range opts.Fields { + exists[name.Name.LowerCamel] = struct{}{} + } + + if _, ok := exists[parsedIndexes[0].Name.LowerCamel]; ok { + return nil, errors.Errorf("%s cannot simultaneously be an index and a field", parsedIndexes[0].Name.Original) + } + + opts.Index = parsedIndexes[0] + return maptype.NewGenerator(opts) +} diff --git a/ignite/services/scaffolder/type_test.go b/ignite/services/scaffolder/type_test.go new file mode 100644 index 0000000..c9183b3 --- /dev/null +++ b/ignite/services/scaffolder/type_test.go @@ -0,0 +1,267 @@ +package scaffolder + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/multiformatname" + "github.com/ignite/cli/v29/ignite/pkg/randstr" + "github.com/ignite/cli/v29/ignite/templates/field" + "github.com/ignite/cli/v29/ignite/templates/field/datatype" +) + +func TestParseTypeFields(t *testing.T) { + const ( + testModuleName = "test" + testSigner = "creator" + ) + + tests := []struct { + name string + addKind AddTypeKind + addOptions []AddTypeOption + expectedOptions addTypeOptions + shouldError bool + expectedFields field.Fields + }{ + { + name: "list type with fields", + addKind: ListType(), + addOptions: []AddTypeOption{ + TypeWithFields("foo", "bar"), + }, + expectedOptions: addTypeOptions{ + moduleName: testModuleName, + fields: []string{"foo", "bar"}, + isList: true, + signer: testSigner, + }, + shouldError: false, + expectedFields: field.Fields{ + { + Name: multiformatname.Name{ + Original: "foo", + LowerCamel: "foo", + UpperCamel: "Foo", + PascalCase: "Foo", + LowerCase: "foo", + UpperCase: "FOO", + Kebab: "foo", + Snake: "foo", + }, + DatatypeName: "string", + Datatype: "", + }, + { + Name: multiformatname.Name{ + Original: "bar", + LowerCamel: "bar", + UpperCamel: "Bar", + PascalCase: "Bar", + LowerCase: "bar", + UpperCase: "BAR", + Kebab: "bar", + Snake: "bar", + }, + DatatypeName: "string", + Datatype: "", + }, + }, + }, + { + name: "singleton type with module", + addKind: SingletonType(), + addOptions: []AddTypeOption{ + TypeWithModule("module"), + }, + expectedOptions: addTypeOptions{ + moduleName: "module", + isSingleton: true, + signer: testSigner, + }, + shouldError: false, + expectedFields: nil, + }, + { + name: "map type without simulation", + addKind: MapType("foo"), + addOptions: []AddTypeOption{ + TypeWithoutSimulation(), + }, + expectedOptions: addTypeOptions{ + moduleName: testModuleName, + index: "foo", + isMap: true, + withoutSimulation: true, + signer: testSigner, + }, + shouldError: false, + expectedFields: nil, + }, + { + name: "dry type with signer, without message", + addKind: DryType(), + addOptions: []AddTypeOption{ + TypeWithoutMessage(), + TypeWithSigner("signer"), + TypeWithFields("FieldFoo"), + }, + expectedOptions: addTypeOptions{ + moduleName: testModuleName, + withoutMessage: true, + fields: []string{"FieldFoo"}, + signer: "signer", + }, + shouldError: false, + expectedFields: field.Fields{ + { + Name: multiformatname.Name{ + Original: "FieldFoo", + LowerCamel: "fieldFoo", + UpperCamel: "FieldFoo", + PascalCase: "FieldFoo", + LowerCase: "fieldfoo", + UpperCase: "FIELDFOO", + Kebab: "field-foo", + Snake: "field_foo", + }, + DatatypeName: "string", + Datatype: "", + }, + }, + }, + { + name: "dry type defaults to no message", + addKind: DryType(), + addOptions: []AddTypeOption{}, + expectedOptions: addTypeOptions{ + moduleName: testModuleName, + withoutMessage: true, + signer: testSigner, + }, + shouldError: false, + expectedFields: nil, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + o := newAddTypeOptions(testModuleName) + for _, apply := range append(tc.addOptions, AddTypeOption(tc.addKind)) { + apply(&o) + } + + require.Equal(t, tc.expectedOptions, o) + fields, err := parseTypeFields(o) + if tc.shouldError { + require.Error(t, err) + return + } + require.NoError(t, err) + require.Equal(t, tc.expectedFields, fields) + }) + } +} + +// indirectly tests checkForbiddenTypeField(). +func TestCheckForbiddenTypeIndexField(t *testing.T) { + tests := []struct { + name string + index string + shouldError bool + }{ + { + name: "should fail with empty index", + index: "", + shouldError: true, + }, + { + name: "should fail with reserved Go keyword", + index: "uint", + shouldError: true, + }, + { + name: "should fail with forbidden ignite keyword - id", + index: "id", + shouldError: true, + }, + { + name: "should fail with forbidden ignite keyword - ID", + index: "id", + shouldError: true, + }, + { + name: "should fail with forbidden ignite keyword - params", + index: "params", + shouldError: true, + }, + { + name: "should fail with forbidden ignite keyword - appendedvalue", + index: "appendedvalue", + shouldError: true, + }, + { + name: "should fail with forbidden ignite keyword - customtype keyword", + index: datatype.TypeCustom, + shouldError: true, + }, + { + name: "should pass - blog", + index: "blog", + }, + { + name: "should pass - post", + index: "post", + }, + { + name: "should pass - typed index", + index: "blogID:uint", + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + err := checkForbiddenTypeIndex(tc.index) + if tc.shouldError { + require.Error(t, err) + return + } + require.NoError(t, err) + }) + } +} + +func Test_checkMaxLength(t *testing.T) { + tests := []struct { + desc string + name string + shouldError bool + }{ + { + desc: "should pass with valid name", + name: "validName", + shouldError: false, + }, + { + desc: "should fail with name exceeding max length", + name: randstr.Runes(maxLength + 1), + shouldError: true, + }, + { + desc: "should pass with name at max length", + name: randstr.Runes(maxLength), + shouldError: false, + }, + } + for _, tc := range tests { + t.Run(tc.desc, func(t *testing.T) { + err := checkMaxLength(tc.name) + if tc.shouldError { + require.Error(t, err) + return + } + require.NoError(t, err) + }) + } +} diff --git a/ignite/templates/app/app.go b/ignite/templates/app/app.go new file mode 100644 index 0000000..9a9bf70 --- /dev/null +++ b/ignite/templates/app/app.go @@ -0,0 +1,84 @@ +package app + +import ( + "embed" + "io/fs" + + "github.com/gobuffalo/genny/v2" + "github.com/gobuffalo/plush/v4" + + "github.com/ignite/cli/v29/ignite/pkg/cosmosgen" + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/xgenny" + "github.com/ignite/cli/v29/ignite/templates/field/plushhelpers" +) + +var ( + //go:embed files/* files/**/* + files embed.FS + + //go:embed files-minimal/* files-minimal/**/* + filesMinimal embed.FS +) + +const ( + ibcConfig = "app/ibc.go" +) + +// NewGenerator returns the generator to scaffold a new Cosmos SDK app. +func NewGenerator(opts *Options) (*genny.Generator, error) { + // Remove "files/" prefix + subfs, err := fs.Sub(files, "files") + if err != nil { + return nil, errors.Errorf("generator sub: %w", err) + } + + var ( + includePrefix = opts.IncludePrefixes + excludePrefix []string + overridesFS = make(map[string]embed.FS) + ) + + if opts.IsChainMinimal { + // minimal chain does not have ibc + excludePrefix = append(excludePrefix, ibcConfig) + overridesFS["files-minimal"] = filesMinimal + } + + g := genny.New() + if err := g.SelectiveFS(subfs, includePrefix, nil, excludePrefix, nil); err != nil { + return g, errors.Errorf("generator fs: %w", err) + } + + for prefix, embed := range overridesFS { + // Remove prefix + subfs, err := fs.Sub(embed, prefix) + if err != nil { + return g, errors.Errorf("generator sub %s: %w", prefix, err) + } + // Override files from "files" with the ones from embed + if err := g.FS(subfs); err != nil { + return g, errors.Errorf("generator fs %s: %w", prefix, err) + } + } + + ctx := plush.NewContext() + ctx.Set("ModulePath", opts.ModulePath) + ctx.Set("AppName", opts.AppName) + ctx.Set("ProtoDir", opts.ProtoDir) + ctx.Set("GitHubPath", opts.GitHubPath) + ctx.Set("BinaryNamePrefix", opts.BinaryNamePrefix) + ctx.Set("AddressPrefix", opts.AddressPrefix) + ctx.Set("CoinType", opts.CoinType) + ctx.Set("DefaultDenom", opts.DefaultDenom) + ctx.Set("DepTools", cosmosgen.DepTools()) + ctx.Set("IsChainMinimal", opts.IsChainMinimal) + + plushhelpers.ExtendPlushContext(ctx) + g.Transformer(xgenny.Transformer(ctx)) + g.Transformer(genny.Replace("{{protoDir}}", opts.ProtoDir)) + g.Transformer(genny.Replace("{{appName}}", opts.AppName)) + g.Transformer(genny.Replace("{{binaryNamePrefix}}", opts.BinaryNamePrefix)) + + return g, nil +} diff --git a/ignite/templates/app/files-minimal/app/app.go.plush b/ignite/templates/app/files-minimal/app/app.go.plush new file mode 100644 index 0000000..f241a5a --- /dev/null +++ b/ignite/templates/app/files-minimal/app/app.go.plush @@ -0,0 +1,245 @@ +package app + +import ( + "io" + + clienthelpers "cosmossdk.io/client/v2/helpers" + "cosmossdk.io/core/appmodule" + "cosmossdk.io/depinject" + "cosmossdk.io/log" + storetypes "cosmossdk.io/store/types" + + dbm "github.com/cosmos/cosmos-db" + "github.com/cosmos/cosmos-sdk/baseapp" + "github.com/cosmos/cosmos-sdk/client" + "github.com/cosmos/cosmos-sdk/codec" + codectypes "github.com/cosmos/cosmos-sdk/codec/types" + "github.com/cosmos/cosmos-sdk/runtime" + "github.com/cosmos/cosmos-sdk/server" + "github.com/cosmos/cosmos-sdk/server/api" + "github.com/cosmos/cosmos-sdk/server/config" + servertypes "github.com/cosmos/cosmos-sdk/server/types" + sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/cosmos/cosmos-sdk/types/module" + authkeeper "github.com/cosmos/cosmos-sdk/x/auth/keeper" + bankkeeper "github.com/cosmos/cosmos-sdk/x/bank/keeper" + distrkeeper "github.com/cosmos/cosmos-sdk/x/distribution/keeper" + "github.com/cosmos/cosmos-sdk/x/genutil" + genutiltypes "github.com/cosmos/cosmos-sdk/x/genutil/types" + stakingkeeper "github.com/cosmos/cosmos-sdk/x/staking/keeper" + + "<%= ModulePath %>/docs" +) + +const ( + // Name is the name of the application. + Name = "<%= BinaryNamePrefix %>" + // AccountAddressPrefix is the prefix for accounts addresses. + AccountAddressPrefix = "<%= AddressPrefix %>" + // ChainCoinType is the coin type of the chain. + ChainCoinType = <%= CoinType %> +) + +// DefaultNodeHome default home directories for the application daemon +var DefaultNodeHome string + +var ( + _ runtime.AppI = (*App)(nil) + _ servertypes.Application = (*App)(nil) +) + +// App extends an ABCI application, but with most of its parameters exported. +// They are exported for convenience in creating helper functions, as object +// capabilities aren't needed for testing. +type App struct { + *runtime.App + legacyAmino *codec.LegacyAmino + appCodec codec.Codec + txConfig client.TxConfig + interfaceRegistry codectypes.InterfaceRegistry + + // keepers + AuthKeeper authkeeper.AccountKeeper + BankKeeper bankkeeper.Keeper + StakingKeeper *stakingkeeper.Keeper + DistrKeeper distrkeeper.Keeper + + // simulation manager + sm *module.SimulationManager +} + +func init() { + var err error + clienthelpers.EnvPrefix = Name + DefaultNodeHome, err = clienthelpers.GetNodeHomeDirectory("." + Name) + if err != nil { + panic(err) + } +} + +// AppConfig returns the default app config. +func AppConfig() depinject.Config { + return depinject.Configs( + appConfig, + depinject.Supply( + // supply custom module basics + map[string]module.AppModuleBasic{ + genutiltypes.ModuleName: genutil.NewAppModuleBasic(genutiltypes.DefaultMessageValidator), + }, + ), + ) +} + +// New returns a reference to an initialized App. +func New( + logger log.Logger, + db dbm.DB, + traceStore io.Writer, + loadLatest bool, + appOpts servertypes.AppOptions, + baseAppOptions ...func(*baseapp.BaseApp), +) *App { + var ( + app = &App{} + appBuilder *runtime.AppBuilder + + // merge the AppConfig and other configuration in one config + appConfig = depinject.Configs( + AppConfig(), + depinject.Supply( + appOpts, // supply app options + logger, // supply logger + // here alternative options can be supplied to the DI container. + // those options can be used f.e to override the default behavior of some modules. + // for instance supplying a custom address codec for not using bech32 addresses. + // read the depinject documentation and depinject module wiring for more information + // on available options and how to use them. + ), + ) + ) + + var appModules map[string]appmodule.AppModule + if err := depinject.Inject(appConfig, + &appBuilder, + &appModules, + &app.appCodec, + &app.legacyAmino, + &app.txConfig, + &app.interfaceRegistry, + &app.AuthKeeper, + &app.BankKeeper, + &app.StakingKeeper, + &app.DistrKeeper, + ); err != nil { + panic(err) + } + + // add to default baseapp options + // enable optimistic execution + baseAppOptions = append(baseAppOptions, baseapp.SetOptimisticExecution()) + + // build app + app.App = appBuilder.Build(db, traceStore, baseAppOptions...) + + /**** Module Options ****/ + + // create the simulation manager and define the order of the modules for deterministic simulations + app.sm = module.NewSimulationManagerFromAppModules(app.ModuleManager.Modules, make(map[string]module.AppModuleSimulation)) + app.sm.RegisterStoreDecoders() + + // A custom InitChainer can be set if extra pre-init-genesis logic is required. + // By default, when using app wiring enabled module, this is not required. + // For instance, the upgrade module will set automatically the module version map in its init genesis thanks to app wiring. + // However, when registering a module manually (i.e. that does not support app wiring), the module version map + // must be set manually as follow. The upgrade module will de-duplicate the module version map. + // + // app.SetInitChainer(func(ctx sdk.Context, req *abci.RequestInitChain) (*abci.ResponseInitChain, error) { + // app.UpgradeKeeper.SetModuleVersionMap(ctx, app.ModuleManager.GetVersionMap()) + // return app.App.InitChainer(ctx, req) + // }) + + if err := app.Load(loadLatest); err != nil { + panic(err) + } + + return app +} + +// LegacyAmino returns App's amino codec. +func (app *App) LegacyAmino() *codec.LegacyAmino { + return app.legacyAmino +} + +// AppCodec returns App's app codec. +// +// NOTE: This is solely to be used for testing purposes as it may be desirable +// for modules to register their own custom testing types. +func (app *App) AppCodec() codec.Codec { + return app.appCodec +} + +// InterfaceRegistry returns App's InterfaceRegistry. +func (app *App) InterfaceRegistry() codectypes.InterfaceRegistry { + return app.interfaceRegistry +} + +// TxConfig returns App's TxConfig +func (app *App) TxConfig() client.TxConfig { + return app.txConfig +} + +// GetKey returns the KVStoreKey for the provided store key. +func (app *App) GetKey(storeKey string) *storetypes.KVStoreKey { + kvStoreKey, ok := app.UnsafeFindStoreKey(storeKey).(*storetypes.KVStoreKey) + if !ok { + return nil + } + return kvStoreKey +} + +// SimulationManager implements the SimulationApp interface +func (app *App) SimulationManager() *module.SimulationManager { + return app.sm +} + +// RegisterAPIRoutes registers all application module routes with the provided +// API server. +func (app *App) RegisterAPIRoutes(apiSvr *api.Server, apiConfig config.APIConfig) { + app.App.RegisterAPIRoutes(apiSvr, apiConfig) + // register swagger API in app.go so that other applications can override easily + if err := server.RegisterSwaggerAPI(apiSvr.ClientCtx, apiSvr.Router, apiConfig.Swagger); err != nil { + panic(err) + } + + // register app's OpenAPI routes. + docs.RegisterOpenAPIService(Name, apiSvr.Router) +} + +// GetMaccPerms returns a copy of the module account permissions +// +// NOTE: This is solely to be used for testing purposes. +func GetMaccPerms() map[string][]string { + dup := make(map[string][]string) + for _, perms := range moduleAccPerms { + dup[perms.GetAccount()] = perms.GetPermissions() + } + + return dup +} + +// BlockedAddresses returns all the app's blocked account addresses. +func BlockedAddresses() map[string]bool { + result := make(map[string]bool) + + if len(blockAccAddrs) > 0 { + for _, addr := range blockAccAddrs { + result[addr] = true + } + } else { + for addr := range GetMaccPerms() { + result[addr] = true + } + } + + return result +} diff --git a/ignite/templates/app/files-minimal/app/app_config.go.plush b/ignite/templates/app/files-minimal/app/app_config.go.plush new file mode 100644 index 0000000..032cda0 --- /dev/null +++ b/ignite/templates/app/files-minimal/app/app_config.go.plush @@ -0,0 +1,139 @@ +package app + +import ( + runtimev1alpha1 "cosmossdk.io/api/cosmos/app/runtime/v1alpha1" + appv1alpha1 "cosmossdk.io/api/cosmos/app/v1alpha1" + authmodulev1 "cosmossdk.io/api/cosmos/auth/module/v1" + bankmodulev1 "cosmossdk.io/api/cosmos/bank/module/v1" + consensusmodulev1 "cosmossdk.io/api/cosmos/consensus/module/v1" + distrmodulev1 "cosmossdk.io/api/cosmos/distribution/module/v1" + genutilmodulev1 "cosmossdk.io/api/cosmos/genutil/module/v1" + stakingmodulev1 "cosmossdk.io/api/cosmos/staking/module/v1" + txconfigv1 "cosmossdk.io/api/cosmos/tx/config/v1" + "cosmossdk.io/depinject/appconfig" + _ "github.com/cosmos/cosmos-sdk/x/bank" // import for side-effects + banktypes "github.com/cosmos/cosmos-sdk/x/bank/types" + _ "github.com/cosmos/cosmos-sdk/x/consensus" // import for side-effects + consensustypes "github.com/cosmos/cosmos-sdk/x/consensus/types" + _ "github.com/cosmos/cosmos-sdk/x/distribution" // import for side-effects + distrtypes "github.com/cosmos/cosmos-sdk/x/distribution/types" + minttypes "github.com/cosmos/cosmos-sdk/x/mint/types" + _ "github.com/cosmos/cosmos-sdk/x/staking" // import for side-effects + stakingtypes "github.com/cosmos/cosmos-sdk/x/staking/types" + + "github.com/cosmos/cosmos-sdk/runtime" + _ "github.com/cosmos/cosmos-sdk/x/auth" // import for side-effects + _ "github.com/cosmos/cosmos-sdk/x/auth/tx/config" // import for side-effects + authtypes "github.com/cosmos/cosmos-sdk/x/auth/types" + genutiltypes "github.com/cosmos/cosmos-sdk/x/genutil/types" +) + +var ( + moduleAccPerms = []*authmodulev1.ModuleAccountPermission{ + {Account: authtypes.FeeCollectorName}, + {Account: distrtypes.ModuleName}, + {Account: minttypes.ModuleName, Permissions: []string{authtypes.Minter}}, + {Account: stakingtypes.BondedPoolName, Permissions: []string{authtypes.Burner, stakingtypes.ModuleName}}, + {Account: stakingtypes.NotBondedPoolName, Permissions: []string{authtypes.Burner, stakingtypes.ModuleName}}, + } + + // blocked account addresses + blockAccAddrs = []string{ + authtypes.FeeCollectorName, + distrtypes.ModuleName, + stakingtypes.BondedPoolName, + stakingtypes.NotBondedPoolName, + // We allow the following module accounts to receive funds: + // govtypes.ModuleName + } + + // application configuration (used by depinject) + appConfig = appconfig.Compose(&appv1alpha1.Config{ + Modules: []*appv1alpha1.ModuleConfig{ + { + Name: runtime.ModuleName, + Config: appconfig.WrapAny(&runtimev1alpha1.Module{ + AppName: Name, + // NOTE: upgrade module is required to be prioritized + PreBlockers: []string{ + authtypes.ModuleName, + }, + // During begin block slashing happens after distr.BeginBlocker so that + // there is nothing left over in the validator fee pool, so as to keep the + // CanWithdrawInvariant invariant. + // NOTE: staking module is required if HistoricalEntries param > 0 + BeginBlockers: []string{ + distrtypes.ModuleName, + stakingtypes.ModuleName, + // chain modules + }, + EndBlockers: []string{ + stakingtypes.ModuleName, + // chain modules + }, + // The following is mostly only needed when ModuleName != StoreKey name. + OverrideStoreKeys: []*runtimev1alpha1.StoreKeyConfig{ + { + ModuleName: authtypes.ModuleName, + KvStoreKey: "acc", + }, + }, + // NOTE: The genutils module must occur after staking so that pools are + // properly initialized with tokens from genesis accounts. + // NOTE: The genutils module must also occur after auth so that it can access the params from auth. + InitGenesis: []string{ + consensustypes.ModuleName, + authtypes.ModuleName, + banktypes.ModuleName, + distrtypes.ModuleName, + stakingtypes.ModuleName, + genutiltypes.ModuleName, + // chain modules + }, + }), + }, + { + Name: authtypes.ModuleName, + Config: appconfig.WrapAny(&authmodulev1.Module{ + Bech32Prefix: AccountAddressPrefix, + ModuleAccountPermissions: moduleAccPerms, + EnableUnorderedTransactions: false, + // By default modules authority is the governance module. This is configurable with the following: + // Authority: "group", // A custom module authority can be set using a module name + // Authority: "cosmos1cwwv22j5ca08ggdv9c2uky355k908694z577tv", // or a specific address + }), + }, + { + Name: banktypes.ModuleName, + Config: appconfig.WrapAny(&bankmodulev1.Module{ + BlockedModuleAccountsOverride: blockAccAddrs, + }), + }, + { + Name: stakingtypes.ModuleName, + Config: appconfig.WrapAny(&stakingmodulev1.Module{ + // NOTE: specifying a prefix is only necessary when using bech32 addresses + // If not specfied, the auth Bech32Prefix appended with "valoper" and "valcons" is used by default + Bech32PrefixValidator: AccountAddressPrefix + "valoper", + Bech32PrefixConsensus: AccountAddressPrefix + "valcons", + }), + }, + { + Name: "tx", + Config: appconfig.WrapAny(&txconfigv1.Config{}), + }, + { + Name: genutiltypes.ModuleName, + Config: appconfig.WrapAny(&genutilmodulev1.Module{}), + }, + { + Name: distrtypes.ModuleName, + Config: appconfig.WrapAny(&distrmodulev1.Module{}), + }, + { + Name: consensustypes.ModuleName, + Config: appconfig.WrapAny(&consensusmodulev1.Module{}), + }, + }, + }) +) diff --git a/ignite/templates/app/files/.github/workflows/go-unit.yml b/ignite/templates/app/files/.github/workflows/go-unit.yml new file mode 100644 index 0000000..a9e4a35 --- /dev/null +++ b/ignite/templates/app/files/.github/workflows/go-unit.yml @@ -0,0 +1,17 @@ +name: Unit tests +on: + pull_request: +jobs: + tests: + runs-on: ubuntu-latest + steps: + - name: Check out source + uses: actions/checkout@v4 + - name: Set up Go + uses: actions/setup-go@v5 + with: + go-version: "stable" + check-latest: true + - name: Tests + run: | + make test diff --git a/ignite/templates/app/files/.github/workflows/lint-pr.yml b/ignite/templates/app/files/.github/workflows/lint-pr.yml new file mode 100644 index 0000000..2863009 --- /dev/null +++ b/ignite/templates/app/files/.github/workflows/lint-pr.yml @@ -0,0 +1,19 @@ +name: Lint PR +on: + pull_request_target: + types: + - opened + - edited + - synchronize +permissions: + contents: read +jobs: + lint: + permissions: + pull-requests: read + statuses: write + runs-on: ubuntu-latest + steps: + - uses: amannn/action-semantic-pull-request@v5.5.2 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/ignite/templates/app/files/.github/workflows/lint.yml b/ignite/templates/app/files/.github/workflows/lint.yml new file mode 100644 index 0000000..e81e2af --- /dev/null +++ b/ignite/templates/app/files/.github/workflows/lint.yml @@ -0,0 +1,25 @@ +name: Lint +on: + pull_request: + paths: + - "**/*.go" + - "go.mod" + - "go.sum" + - "**/go.mod" + - "**/go.sum" + merge_group: +permissions: + contents: read +jobs: + golangci: + name: golangci-lint + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-go@v5 + with: + go-version: "stable" + check-latest: true + - name: run linting + run: | + make lint diff --git a/ignite/templates/app/files/.github/workflows/release.yml b/ignite/templates/app/files/.github/workflows/release.yml new file mode 100644 index 0000000..d87bffa --- /dev/null +++ b/ignite/templates/app/files/.github/workflows/release.yml @@ -0,0 +1,56 @@ +# This workflow is useful if you want to automate the process of: +# +# a) Creating a new prelease when you push a new tag with a "v" prefix (version). +# +# This type of prerelease is meant to be used for production: alpha, beta, rc, etc. types of releases. +# After the prerelease is created, you need to make your changes on the release page at the relevant +# Github page and publish your release. +# +# b) Creating/updating the "latest" prerelease when you push to your default branch. +# +# This type of prelease is useful to make your bleeding-edge binaries available to advanced users. +# +# The workflow will not run if there is no tag pushed with a "v" prefix and no change pushed to your +# default branch. +on: push + +jobs: + might_release: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Prepare Release Variables + id: vars + uses: ignite/cli/actions/release/vars@main + + - name: Issue Release Assets + uses: ignite/cli/actions/cli@main + if: ${{ steps.vars.outputs.should_release == 'true' }} + with: + args: chain build --release --release.prefix ${{ steps.vars.outputs.tarball_prefix }} -t linux:amd64 -t darwin:amd64 -t darwin:arm64 -y + env: + DO_NOT_TRACK: 1 + GOFLAGS: "-buildvcs=false" + + - name: Delete the "latest" Release + uses: dev-drprasad/delete-tag-and-release@v0.2.1 + if: ${{ steps.vars.outputs.is_release_type_latest == 'true' }} + with: + tag_name: ${{ steps.vars.outputs.tag_name }} + delete_release: true + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + - name: Publish the Release + uses: softprops/action-gh-release@v1 + if: ${{ steps.vars.outputs.should_release == 'true' }} + with: + tag_name: ${{ steps.vars.outputs.tag_name }} + files: release/* + prerelease: true + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/ignite/templates/app/files/.gitignore b/ignite/templates/app/files/.gitignore new file mode 100644 index 0000000..5f8fd1d --- /dev/null +++ b/ignite/templates/app/files/.gitignore @@ -0,0 +1,9 @@ +vue/node_modules +vue/dist +release/ +.idea/ +.vscode/ +.DS_Store +*.dot +*.log +*.ign diff --git a/ignite/templates/app/files/Makefile.plush b/ignite/templates/app/files/Makefile.plush new file mode 100644 index 0000000..d0cb7af --- /dev/null +++ b/ignite/templates/app/files/Makefile.plush @@ -0,0 +1,105 @@ +BRANCH := $(shell git rev-parse --abbrev-ref HEAD) +COMMIT := $(shell git log -1 --format='%H') +APPNAME := <%= AppName %> + +# do not override user values +ifeq (,$(VERSION)) + VERSION := $(shell git describe --exact-match 2>/dev/null) + # if VERSION is empty, then populate it with branch name and raw commit hash + ifeq (,$(VERSION)) + VERSION := $(BRANCH)-$(COMMIT) + endif +endif + +# Update the ldflags with the app, client & server names +ldflags = -X github.com/cosmos/cosmos-sdk/version.Name=$(APPNAME) \ + -X github.com/cosmos/cosmos-sdk/version.AppName=$(APPNAME)d \ + -X github.com/cosmos/cosmos-sdk/version.Version=$(VERSION) \ + -X github.com/cosmos/cosmos-sdk/version.Commit=$(COMMIT) + +BUILD_FLAGS := -ldflags '$(ldflags)' + +############## +### Test ### +############## + +test-unit: + @echo Running unit tests... + @go test -mod=readonly -v -timeout 30m ./... + +test-race: + @echo Running unit tests with race condition reporting... + @go test -mod=readonly -v -race -timeout 30m ./... + +test-cover: + @echo Running unit tests and creating coverage report... + @go test -mod=readonly -v -timeout 30m -coverprofile=$(COVER_FILE) -covermode=atomic ./... + @go tool cover -html=$(COVER_FILE) -o $(COVER_HTML_FILE) + @rm $(COVER_FILE) + +bench: + @echo Running unit tests with benchmarking... + @go test -mod=readonly -v -timeout 30m -bench=. ./... + +test: govet govulncheck test-unit + +.PHONY: test test-unit test-race test-cover bench + +################# +### Install ### +################# + +all: install + +install: + @echo "--> ensure dependencies have not been modified" + @go mod verify + @echo "--> installing $(APPNAME)d" + @go install $(BUILD_FLAGS) -mod=readonly ./cmd/$(APPNAME)d + +.PHONY: all install + +################## +### Protobuf ### +################## + +# Use this target if you do not want to use Ignite for generating proto files + +proto-deps: + @echo "Installing proto deps" + @echo "Proto deps present, run 'go tool' to see them" + +proto-gen: + @echo "Generating protobuf files..." + @ignite generate proto-go --yes + +.PHONY: proto-gen + +################# +### Linting ### +################# + +lint: + @echo "--> Running linter" + @go tool github.com/golangci/golangci-lint/cmd/golangci-lint run ./... --timeout 15m + +lint-fix: + @echo "--> Running linter and fixing issues" + @go tool github.com/golangci/golangci-lint/cmd/golangci-lint run ./... --fix --timeout 15m + +.PHONY: lint lint-fix + +################### +### Development ### +################### + +govet: + @echo Running go vet... + @go vet ./... + +govulncheck: + @echo Running govulncheck... + @go tool golang.org/x/vuln/cmd/govulncheck@latest + @govulncheck ./... + +.PHONY: govet govulncheck \ No newline at end of file diff --git a/ignite/templates/app/files/app/app.go.plush b/ignite/templates/app/files/app/app.go.plush new file mode 100644 index 0000000..3f2a628 --- /dev/null +++ b/ignite/templates/app/files/app/app.go.plush @@ -0,0 +1,306 @@ +package app + +import ( + "fmt" + "io" + + clienthelpers "cosmossdk.io/client/v2/helpers" + "cosmossdk.io/core/appmodule" + "cosmossdk.io/depinject" + "cosmossdk.io/log" + circuitkeeper "cosmossdk.io/x/circuit/keeper" + upgradekeeper "cosmossdk.io/x/upgrade/keeper" + storetypes "cosmossdk.io/store/types" + + abci "github.com/cometbft/cometbft/abci/types" + dbm "github.com/cosmos/cosmos-db" + "github.com/cosmos/cosmos-sdk/baseapp" + "github.com/cosmos/cosmos-sdk/client" + "github.com/cosmos/cosmos-sdk/codec" + codectypes "github.com/cosmos/cosmos-sdk/codec/types" + "github.com/cosmos/cosmos-sdk/runtime" + "github.com/cosmos/cosmos-sdk/server" + "github.com/cosmos/cosmos-sdk/server/api" + "github.com/cosmos/cosmos-sdk/server/config" + servertypes "github.com/cosmos/cosmos-sdk/server/types" + sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/cosmos/cosmos-sdk/types/module" + "github.com/cosmos/cosmos-sdk/x/auth" + authkeeper "github.com/cosmos/cosmos-sdk/x/auth/keeper" + authsims "github.com/cosmos/cosmos-sdk/x/auth/simulation" + authtypes "github.com/cosmos/cosmos-sdk/x/auth/types" + icacontrollerkeeper "github.com/cosmos/ibc-go/v10/modules/apps/27-interchain-accounts/controller/keeper" + icahostkeeper "github.com/cosmos/ibc-go/v10/modules/apps/27-interchain-accounts/host/keeper" + ibctransferkeeper "github.com/cosmos/ibc-go/v10/modules/apps/transfer/keeper" + ibckeeper "github.com/cosmos/ibc-go/v10/modules/core/keeper" + authzkeeper "github.com/cosmos/cosmos-sdk/x/authz/keeper" + bankkeeper "github.com/cosmos/cosmos-sdk/x/bank/keeper" + consensuskeeper "github.com/cosmos/cosmos-sdk/x/consensus/keeper" + distrkeeper "github.com/cosmos/cosmos-sdk/x/distribution/keeper" + "github.com/cosmos/cosmos-sdk/x/genutil" + genutiltypes "github.com/cosmos/cosmos-sdk/x/genutil/types" + govkeeper "github.com/cosmos/cosmos-sdk/x/gov/keeper" + mintkeeper "github.com/cosmos/cosmos-sdk/x/mint/keeper" + paramskeeper "github.com/cosmos/cosmos-sdk/x/params/keeper" + paramstypes "github.com/cosmos/cosmos-sdk/x/params/types" + slashingkeeper "github.com/cosmos/cosmos-sdk/x/slashing/keeper" + stakingkeeper "github.com/cosmos/cosmos-sdk/x/staking/keeper" + + "<%= ModulePath %>/docs" +) + +const ( + // Name is the name of the application. + Name = "<%= BinaryNamePrefix %>" + // AccountAddressPrefix is the prefix for accounts addresses. + AccountAddressPrefix = "<%= AddressPrefix %>" + // ChainCoinType is the coin type of the chain. + ChainCoinType = <%= CoinType %> +) + +// DefaultNodeHome default home directories for the application daemon +var DefaultNodeHome string + +var ( + _ runtime.AppI = (*App)(nil) + _ servertypes.Application = (*App)(nil) +) + +// App extends an ABCI application, but with most of its parameters exported. +// They are exported for convenience in creating helper functions, as object +// capabilities aren't needed for testing. +type App struct { + *runtime.App + legacyAmino *codec.LegacyAmino + appCodec codec.Codec + txConfig client.TxConfig + interfaceRegistry codectypes.InterfaceRegistry + + // keepers + // only keepers required by the app are exposed + // the list of all modules is available in the app_config + AuthKeeper authkeeper.AccountKeeper + BankKeeper bankkeeper.Keeper + StakingKeeper *stakingkeeper.Keeper + SlashingKeeper slashingkeeper.Keeper + MintKeeper mintkeeper.Keeper + DistrKeeper distrkeeper.Keeper + GovKeeper *govkeeper.Keeper + UpgradeKeeper *upgradekeeper.Keeper + AuthzKeeper authzkeeper.Keeper + ConsensusParamsKeeper consensuskeeper.Keeper + CircuitBreakerKeeper circuitkeeper.Keeper + ParamsKeeper paramskeeper.Keeper + + // ibc keepers + IBCKeeper *ibckeeper.Keeper + ICAControllerKeeper icacontrollerkeeper.Keeper + ICAHostKeeper icahostkeeper.Keeper + TransferKeeper ibctransferkeeper.Keeper + + // simulation manager + sm *module.SimulationManager +} + +func init() { + var err error + clienthelpers.EnvPrefix = Name + DefaultNodeHome, err = clienthelpers.GetNodeHomeDirectory("." + Name) + if err != nil { + panic(err) + } +} + +// AppConfig returns the default app config. +func AppConfig() depinject.Config { + return depinject.Configs( + appConfig, + depinject.Supply( + // supply custom module basics + map[string]module.AppModuleBasic{ + genutiltypes.ModuleName: genutil.NewAppModuleBasic(genutiltypes.DefaultMessageValidator), + }, + ), + ) +} + +// New returns a reference to an initialized App. +func New( + logger log.Logger, + db dbm.DB, + traceStore io.Writer, + loadLatest bool, + appOpts servertypes.AppOptions, + baseAppOptions ...func(*baseapp.BaseApp), +) *App { + var ( + app = &App{} + appBuilder *runtime.AppBuilder + + // merge the AppConfig and other configuration in one config + appConfig = depinject.Configs( + AppConfig(), + depinject.Supply( + appOpts, // supply app options + logger, // supply logger + + // Supply with IBC keeper getter for the IBC modules with App Wiring. + // The IBC Keeper cannot be passed because it has not been initiated yet. + // Passing the getter, the app IBC Keeper will always be accessible. + // This needs to be removed after IBC supports App Wiring. + app.GetIBCKeeper, + + // here alternative options can be supplied to the DI container. + // those options can be used f.e to override the default behavior of some modules. + // for instance supplying a custom address codec for not using bech32 addresses. + // read the depinject documentation and depinject module wiring for more information + // on available options and how to use them. + ), + ) + ) + + var appModules map[string]appmodule.AppModule + if err := depinject.Inject(appConfig, + &appBuilder, + &appModules, + &app.appCodec, + &app.legacyAmino, + &app.txConfig, + &app.interfaceRegistry, + &app.AuthKeeper, + &app.BankKeeper, + &app.StakingKeeper, + &app.SlashingKeeper, + &app.MintKeeper, + &app.DistrKeeper, + &app.GovKeeper, + &app.UpgradeKeeper, + &app.AuthzKeeper, + &app.ConsensusParamsKeeper, + &app.CircuitBreakerKeeper, + &app.ParamsKeeper, + ); err != nil { + panic(err) + } + + // add to default baseapp options + // enable optimistic execution + baseAppOptions = append(baseAppOptions, baseapp.SetOptimisticExecution()) + + // build app + app.App = appBuilder.Build(db, traceStore, baseAppOptions...) + + // register legacy modules + if err := app.registerIBCModules(appOpts); err != nil { + panic(err) + } + + /**** Module Options ****/ + + // create the simulation manager and define the order of the modules for deterministic simulations + overrideModules := map[string]module.AppModuleSimulation{ + authtypes.ModuleName: auth.NewAppModule(app.appCodec, app.AuthKeeper, authsims.RandomGenesisAccounts, nil), + } + app.sm = module.NewSimulationManagerFromAppModules(app.ModuleManager.Modules, overrideModules) + + app.sm.RegisterStoreDecoders() + + // A custom InitChainer sets if extra pre-init-genesis logic is required. + // This is necessary for manually registered modules that do not support app wiring. + // Manually set the module version map as shown below. + // The upgrade module will automatically handle de-duplication of the module version map. + app.SetInitChainer(func(ctx sdk.Context, req *abci.RequestInitChain) (*abci.ResponseInitChain, error) { + if err := app.UpgradeKeeper.SetModuleVersionMap(ctx, app.ModuleManager.GetVersionMap()); err != nil { + return nil, err + } + return app.App.InitChainer(ctx, req) + }) + + if err := app.Load(loadLatest); err != nil { + panic(err) + } + + return app +} + +// GetSubspace returns a param subspace for a given module name. +func (app *App) GetSubspace(moduleName string) paramstypes.Subspace { + subspace, _ := app.ParamsKeeper.GetSubspace(moduleName) + return subspace +} + +// LegacyAmino returns App's amino codec. +func (app *App) LegacyAmino() *codec.LegacyAmino { + return app.legacyAmino +} + +// AppCodec returns App's app codec. +func (app *App) AppCodec() codec.Codec { + return app.appCodec +} + +// InterfaceRegistry returns App's InterfaceRegistry. +func (app *App) InterfaceRegistry() codectypes.InterfaceRegistry { + return app.interfaceRegistry +} + +// TxConfig returns App's TxConfig +func (app *App) TxConfig() client.TxConfig { + return app.txConfig +} + +// GetKey returns the KVStoreKey for the provided store key. +func (app *App) GetKey(storeKey string) *storetypes.KVStoreKey { + kvStoreKey, ok := app.UnsafeFindStoreKey(storeKey).(*storetypes.KVStoreKey) + if !ok { + return nil + } + return kvStoreKey +} + +// SimulationManager implements the SimulationApp interface +func (app *App) SimulationManager() *module.SimulationManager { + return app.sm +} + +// RegisterAPIRoutes registers all application module routes with the provided +// API server. +func (app *App) RegisterAPIRoutes(apiSvr *api.Server, apiConfig config.APIConfig) { + app.App.RegisterAPIRoutes(apiSvr, apiConfig) + // register swagger API in app.go so that other applications can override easily + if err := server.RegisterSwaggerAPI(apiSvr.ClientCtx, apiSvr.Router, apiConfig.Swagger); err != nil { + panic(err) + } + + // register app's OpenAPI routes. + docs.RegisterOpenAPIService(Name, apiSvr.Router) +} + +// GetMaccPerms returns a copy of the module account permissions +// +// NOTE: This is solely to be used for testing purposes. +func GetMaccPerms() map[string][]string { + dup := make(map[string][]string) + for _, perms := range moduleAccPerms { + dup[perms.GetAccount()] = perms.GetPermissions() + } + + return dup +} + +// BlockedAddresses returns all the app's blocked account addresses. +func BlockedAddresses() map[string]bool { + result := make(map[string]bool) + + if len(blockAccAddrs) > 0 { + for _, addr := range blockAccAddrs { + result[addr] = true + } + } else { + for addr := range GetMaccPerms() { + result[addr] = true + } + } + + return result +} diff --git a/ignite/templates/app/files/app/app_config.go.plush b/ignite/templates/app/files/app/app_config.go.plush new file mode 100644 index 0000000..ddf180c --- /dev/null +++ b/ignite/templates/app/files/app/app_config.go.plush @@ -0,0 +1,264 @@ +package app + +import ( + "time" + + runtimev1alpha1 "cosmossdk.io/api/cosmos/app/runtime/v1alpha1" + appv1alpha1 "cosmossdk.io/api/cosmos/app/v1alpha1" + authmodulev1 "cosmossdk.io/api/cosmos/auth/module/v1" + authzmodulev1 "cosmossdk.io/api/cosmos/authz/module/v1" + bankmodulev1 "cosmossdk.io/api/cosmos/bank/module/v1" + circuitmodulev1 "cosmossdk.io/api/cosmos/circuit/module/v1" + consensusmodulev1 "cosmossdk.io/api/cosmos/consensus/module/v1" + distrmodulev1 "cosmossdk.io/api/cosmos/distribution/module/v1" + epochsmodulev1 "cosmossdk.io/api/cosmos/epochs/module/v1" + evidencemodulev1 "cosmossdk.io/api/cosmos/evidence/module/v1" + feegrantmodulev1 "cosmossdk.io/api/cosmos/feegrant/module/v1" + genutilmodulev1 "cosmossdk.io/api/cosmos/genutil/module/v1" + govmodulev1 "cosmossdk.io/api/cosmos/gov/module/v1" + groupmodulev1 "cosmossdk.io/api/cosmos/group/module/v1" + mintmodulev1 "cosmossdk.io/api/cosmos/mint/module/v1" + nftmodulev1 "cosmossdk.io/api/cosmos/nft/module/v1" + paramsmodulev1 "cosmossdk.io/api/cosmos/params/module/v1" + slashingmodulev1 "cosmossdk.io/api/cosmos/slashing/module/v1" + stakingmodulev1 "cosmossdk.io/api/cosmos/staking/module/v1" + txconfigv1 "cosmossdk.io/api/cosmos/tx/config/v1" + upgrademodulev1 "cosmossdk.io/api/cosmos/upgrade/module/v1" + vestingmodulev1 "cosmossdk.io/api/cosmos/vesting/module/v1" + "cosmossdk.io/depinject/appconfig" + _ "cosmossdk.io/x/circuit" // import for side-effects + circuittypes "cosmossdk.io/x/circuit/types" + _ "cosmossdk.io/x/evidence" // import for side-effects + evidencetypes "cosmossdk.io/x/evidence/types" + "cosmossdk.io/x/feegrant" + _ "cosmossdk.io/x/feegrant/module" // import for side-effects + "cosmossdk.io/x/nft" + _ "cosmossdk.io/x/nft/module" // import for side-effects + _ "cosmossdk.io/x/upgrade" // import for side-effects + upgradetypes "cosmossdk.io/x/upgrade/types" + "github.com/cosmos/cosmos-sdk/runtime" + _ "github.com/cosmos/cosmos-sdk/x/auth/tx/config" // import for side-effects + authtypes "github.com/cosmos/cosmos-sdk/x/auth/types" + _ "github.com/cosmos/cosmos-sdk/x/auth/vesting" // import for side-effects + vestingtypes "github.com/cosmos/cosmos-sdk/x/auth/vesting/types" + "github.com/cosmos/cosmos-sdk/x/authz" + _ "github.com/cosmos/cosmos-sdk/x/authz/module" // import for side-effects + _ "github.com/cosmos/cosmos-sdk/x/bank" // import for side-effects + banktypes "github.com/cosmos/cosmos-sdk/x/bank/types" + _ "github.com/cosmos/cosmos-sdk/x/consensus" // import for side-effects + consensustypes "github.com/cosmos/cosmos-sdk/x/consensus/types" + _ "github.com/cosmos/cosmos-sdk/x/distribution" // import for side-effects + distrtypes "github.com/cosmos/cosmos-sdk/x/distribution/types" + _ "github.com/cosmos/cosmos-sdk/x/epochs" // import for side-effects + epochstypes "github.com/cosmos/cosmos-sdk/x/epochs/types" + genutiltypes "github.com/cosmos/cosmos-sdk/x/genutil/types" + _ "github.com/cosmos/cosmos-sdk/x/gov" // import for side-effects + govtypes "github.com/cosmos/cosmos-sdk/x/gov/types" + "github.com/cosmos/cosmos-sdk/x/group" + _ "github.com/cosmos/cosmos-sdk/x/group/module" // import for side-effects + _ "github.com/cosmos/cosmos-sdk/x/mint" // import for side-effects + minttypes "github.com/cosmos/cosmos-sdk/x/mint/types" + _ "github.com/cosmos/cosmos-sdk/x/params" // import for side-effects + paramstypes "github.com/cosmos/cosmos-sdk/x/params/types" + _ "github.com/cosmos/cosmos-sdk/x/slashing" // import for side-effects + slashingtypes "github.com/cosmos/cosmos-sdk/x/slashing/types" + _ "github.com/cosmos/cosmos-sdk/x/staking" // import for side-effects + stakingtypes "github.com/cosmos/cosmos-sdk/x/staking/types" + icatypes "github.com/cosmos/ibc-go/v10/modules/apps/27-interchain-accounts/types" + ibctransfertypes "github.com/cosmos/ibc-go/v10/modules/apps/transfer/types" + ibcexported "github.com/cosmos/ibc-go/v10/modules/core/exported" + "google.golang.org/protobuf/types/known/durationpb" +) + +var ( + moduleAccPerms = []*authmodulev1.ModuleAccountPermission{ + {Account: authtypes.FeeCollectorName}, + {Account: distrtypes.ModuleName}, + {Account: minttypes.ModuleName, Permissions: []string{authtypes.Minter}}, + {Account: stakingtypes.BondedPoolName, Permissions: []string{authtypes.Burner, stakingtypes.ModuleName}}, + {Account: stakingtypes.NotBondedPoolName, Permissions: []string{authtypes.Burner, stakingtypes.ModuleName}}, + {Account: govtypes.ModuleName, Permissions: []string{authtypes.Burner}}, + {Account: nft.ModuleName}, + {Account: ibctransfertypes.ModuleName, Permissions: []string{authtypes.Minter, authtypes.Burner}}, + {Account: icatypes.ModuleName}, + } + + // blocked account addresses + blockAccAddrs = []string{ + authtypes.FeeCollectorName, + distrtypes.ModuleName, + minttypes.ModuleName, + stakingtypes.BondedPoolName, + stakingtypes.NotBondedPoolName, + nft.ModuleName, + // We allow the following module accounts to receive funds: + // govtypes.ModuleName + } + + // application configuration (used by depinject) + appConfig = appconfig.Compose(&appv1alpha1.Config{ + Modules: []*appv1alpha1.ModuleConfig{ + { + Name: runtime.ModuleName, + Config: appconfig.WrapAny(&runtimev1alpha1.Module{ + AppName: Name, + // NOTE: upgrade module is required to be prioritized + PreBlockers: []string{ + upgradetypes.ModuleName, + authtypes.ModuleName, + }, + // During begin block slashing happens after distr.BeginBlocker so that + // there is nothing left over in the validator fee pool, so as to keep the + // CanWithdrawInvariant invariant. + // NOTE: staking module is required if HistoricalEntries param > 0 + BeginBlockers: []string{ + minttypes.ModuleName, + distrtypes.ModuleName, + slashingtypes.ModuleName, + evidencetypes.ModuleName, + stakingtypes.ModuleName, + authz.ModuleName, + epochstypes.ModuleName, + // ibc modules + ibcexported.ModuleName, + // chain modules + }, + EndBlockers: []string{ + govtypes.ModuleName, + stakingtypes.ModuleName, + feegrant.ModuleName, + group.ModuleName, + // chain modules + }, + // The following is mostly only needed when ModuleName != StoreKey name. + OverrideStoreKeys: []*runtimev1alpha1.StoreKeyConfig{ + { + ModuleName: authtypes.ModuleName, + KvStoreKey: "acc", + }, + }, + // NOTE: The genutils module must occur after staking so that pools are + // properly initialized with tokens from genesis accounts. + // NOTE: The genutils module must also occur after auth so that it can access the params from auth. + InitGenesis: []string{ + consensustypes.ModuleName, + authtypes.ModuleName, + banktypes.ModuleName, + distrtypes.ModuleName, + stakingtypes.ModuleName, + slashingtypes.ModuleName, + govtypes.ModuleName, + minttypes.ModuleName, + genutiltypes.ModuleName, + evidencetypes.ModuleName, + authz.ModuleName, + feegrant.ModuleName, + vestingtypes.ModuleName, + nft.ModuleName, + group.ModuleName, + upgradetypes.ModuleName, + circuittypes.ModuleName, + epochstypes.ModuleName, + // ibc modules + ibcexported.ModuleName, + ibctransfertypes.ModuleName, + icatypes.ModuleName, + // chain modules + }, + }), + }, + { + Name: authtypes.ModuleName, + Config: appconfig.WrapAny(&authmodulev1.Module{ + Bech32Prefix: AccountAddressPrefix, + ModuleAccountPermissions: moduleAccPerms, + EnableUnorderedTransactions: true, + // By default modules authority is the governance module. This is configurable with the following: + // Authority: "group", // A custom module authority can be set using a module name + // Authority: "cosmos1cwwv22j5ca08ggdv9c2uky355k908694z577tv", // or a specific address + }), + }, + { + Name: vestingtypes.ModuleName, + Config: appconfig.WrapAny(&vestingmodulev1.Module{}), + }, + { + Name: banktypes.ModuleName, + Config: appconfig.WrapAny(&bankmodulev1.Module{ + BlockedModuleAccountsOverride: blockAccAddrs, + }), + }, + { + Name: stakingtypes.ModuleName, + Config: appconfig.WrapAny(&stakingmodulev1.Module{}), + }, + { + Name: slashingtypes.ModuleName, + Config: appconfig.WrapAny(&slashingmodulev1.Module{}), + }, + { + Name: "tx", + Config: appconfig.WrapAny(&txconfigv1.Config{}), + }, + { + Name: genutiltypes.ModuleName, + Config: appconfig.WrapAny(&genutilmodulev1.Module{}), + }, + { + Name: authz.ModuleName, + Config: appconfig.WrapAny(&authzmodulev1.Module{}), + }, + { + Name: upgradetypes.ModuleName, + Config: appconfig.WrapAny(&upgrademodulev1.Module{}), + }, + { + Name: distrtypes.ModuleName, + Config: appconfig.WrapAny(&distrmodulev1.Module{}), + }, + { + Name: evidencetypes.ModuleName, + Config: appconfig.WrapAny(&evidencemodulev1.Module{}), + }, + { + Name: minttypes.ModuleName, + Config: appconfig.WrapAny(&mintmodulev1.Module{}), + }, + { + Name: group.ModuleName, + Config: appconfig.WrapAny(&groupmodulev1.Module{ + MaxExecutionPeriod: durationpb.New(time.Second * 1209600), + MaxMetadataLen: 255, + }), + }, + { + Name: nft.ModuleName, + Config: appconfig.WrapAny(&nftmodulev1.Module{}), + }, + { + Name: feegrant.ModuleName, + Config: appconfig.WrapAny(&feegrantmodulev1.Module{}), + }, + { + Name: govtypes.ModuleName, + Config: appconfig.WrapAny(&govmodulev1.Module{}), + }, + { + Name: consensustypes.ModuleName, + Config: appconfig.WrapAny(&consensusmodulev1.Module{}), + }, + { + Name: circuittypes.ModuleName, + Config: appconfig.WrapAny(&circuitmodulev1.Module{}), + }, + { + Name: paramstypes.ModuleName, + Config: appconfig.WrapAny(¶msmodulev1.Module{}), + }, + { + Name: epochstypes.ModuleName, + Config: appconfig.WrapAny(&epochsmodulev1.Module{}), + }, + }, + }) +) diff --git a/ignite/templates/app/files/app/config.go.plush b/ignite/templates/app/files/app/config.go.plush new file mode 100644 index 0000000..ad6b4b8 --- /dev/null +++ b/ignite/templates/app/files/app/config.go.plush @@ -0,0 +1,25 @@ +package app + +import sdk "github.com/cosmos/cosmos-sdk/types" + +func init() { + // Set bond denom + <%= if (DefaultDenom) { %> + sdk.DefaultBondDenom = "<%= DefaultDenom %>" + <% } %> + + // Set address prefixes + accountPubKeyPrefix := AccountAddressPrefix + "pub" + validatorAddressPrefix := AccountAddressPrefix + "valoper" + validatorPubKeyPrefix := AccountAddressPrefix + "valoperpub" + consNodeAddressPrefix := AccountAddressPrefix + "valcons" + consNodePubKeyPrefix := AccountAddressPrefix + "valconspub" + + // Set and seal config + config := sdk.GetConfig() + config.SetCoinType(ChainCoinType) + config.SetBech32PrefixForAccount(AccountAddressPrefix, accountPubKeyPrefix) + config.SetBech32PrefixForValidator(validatorAddressPrefix, validatorPubKeyPrefix) + config.SetBech32PrefixForConsensusNode(consNodeAddressPrefix, consNodePubKeyPrefix) + config.Seal() +} diff --git a/ignite/templates/app/files/app/export.go.plush b/ignite/templates/app/files/app/export.go.plush new file mode 100644 index 0000000..d1f3f60 --- /dev/null +++ b/ignite/templates/app/files/app/export.go.plush @@ -0,0 +1,258 @@ +package app + +import ( + "encoding/json" + "fmt" + + cmtproto "github.com/cometbft/cometbft/proto/tendermint/types" + + "cosmossdk.io/collections" + storetypes "cosmossdk.io/store/types" + slashingtypes "github.com/cosmos/cosmos-sdk/x/slashing/types" + "github.com/cosmos/cosmos-sdk/x/staking" + stakingtypes "github.com/cosmos/cosmos-sdk/x/staking/types" + + servertypes "github.com/cosmos/cosmos-sdk/server/types" + sdk "github.com/cosmos/cosmos-sdk/types" +) + +// ExportAppStateAndValidators exports the state of the application for a genesis +// file. +func (app *App) ExportAppStateAndValidators(forZeroHeight bool, jailAllowedAddrs, modulesToExport []string) (servertypes.ExportedApp, error) { + // as if they could withdraw from the start of the next block + ctx := app.NewContextLegacy(true, cmtproto.Header{Height: app.LastBlockHeight()}) + + // We export at last height + 1, because that's the height at which + // CometBFT will start InitChain. + height := app.LastBlockHeight() + 1 + if forZeroHeight { + height = 0 + app.prepForZeroHeightGenesis(ctx, jailAllowedAddrs) + } + + genState, err := app.ModuleManager.ExportGenesisForModules(ctx, app.appCodec, modulesToExport) + if err != nil { + return servertypes.ExportedApp{}, err + } + + appState, err := json.MarshalIndent(genState, "", " ") + if err != nil { + return servertypes.ExportedApp{}, err + } + + validators, err := staking.WriteValidators(ctx, app.StakingKeeper) + + return servertypes.ExportedApp{ + AppState: appState, + Validators: validators, + Height: height, + ConsensusParams: app.BaseApp.GetConsensusParams(ctx), + }, err +} + +// prepForZeroHeightGenesis prepares for fresh start at zero height +// NOTE zero height genesis is a temporary feature which will be deprecated +// +// in favor of export at a block height +func (app *App) prepForZeroHeightGenesis(ctx sdk.Context, jailAllowedAddrs []string) { + applyAllowedAddrs := false + + // check if there is a allowed address list + if len(jailAllowedAddrs) > 0 { + applyAllowedAddrs = true + } + + allowedAddrsMap := make(map[string]bool) + + for _, addr := range jailAllowedAddrs { + _, err := app.InterfaceRegistry().SigningContext().ValidatorAddressCodec().StringToBytes(addr) + if err != nil { + panic(err) + } + allowedAddrsMap[addr] = true + } + + /* Handle fee distribution state. */ + + // withdraw all validator commission + err := app.StakingKeeper.IterateValidators(ctx, func(_ int64, val stakingtypes.ValidatorI) (stop bool) { + valBz, err := app.StakingKeeper.ValidatorAddressCodec().StringToBytes(val.GetOperator()) + if err != nil { + panic(err) + } + _, _ = app.DistrKeeper.WithdrawValidatorCommission(ctx, valBz) + return false + }) + if err != nil { + panic(err) + } + + // withdraw all delegator rewards + dels, err := app.StakingKeeper.GetAllDelegations(ctx) + if err != nil { + panic(err) + } + + for _, delegation := range dels { + valAddr, err := app.InterfaceRegistry().SigningContext().ValidatorAddressCodec().StringToBytes(delegation.ValidatorAddress) + if err != nil { + panic(err) + } + + delAddr, err := app.InterfaceRegistry().SigningContext().AddressCodec().StringToBytes(delegation.DelegatorAddress) + if err != nil { + panic(err) + } + + _, _ = app.DistrKeeper.WithdrawDelegationRewards(ctx, delAddr, valAddr) + } + + // clear validator slash events + app.DistrKeeper.DeleteAllValidatorSlashEvents(ctx) + + // clear validator historical rewards + app.DistrKeeper.DeleteAllValidatorHistoricalRewards(ctx) + + // set context height to zero + height := ctx.BlockHeight() + ctx = ctx.WithBlockHeight(0) + + // reinitialize all validators + err = app.StakingKeeper.IterateValidators(ctx, func(_ int64, val stakingtypes.ValidatorI) (stop bool) { + valBz, err := app.StakingKeeper.ValidatorAddressCodec().StringToBytes(val.GetOperator()) + if err != nil { + panic(err) + } + // donate any unwithdrawn outstanding reward tokens to the community pool + rewards, err := app.DistrKeeper.GetValidatorOutstandingRewardsCoins(ctx, valBz) + if err != nil { + panic(err) + } + feePool, err := app.DistrKeeper.FeePool.Get(ctx) + if err != nil { + panic(err) + } + feePool.CommunityPool = feePool.CommunityPool.Add(rewards...) + if err := app.DistrKeeper.FeePool.Set(ctx, feePool); err != nil { + panic(err) + } + + if err := app.DistrKeeper.Hooks().AfterValidatorCreated(ctx, valBz); err != nil { + panic(err) + } + return false + }) + if err != nil { + panic(err) + } + + // reinitialize all delegations + for _, del := range dels { + valAddr, err := app.InterfaceRegistry().SigningContext().ValidatorAddressCodec().StringToBytes(del.ValidatorAddress) + if err != nil { + panic(err) + } + delAddr, err := app.InterfaceRegistry().SigningContext().AddressCodec().StringToBytes(del.DelegatorAddress) + if err != nil { + panic(err) + } + + if err := app.DistrKeeper.Hooks().BeforeDelegationCreated(ctx, delAddr, valAddr); err != nil { + // never called as BeforeDelegationCreated always returns nil + panic(fmt.Errorf("error while incrementing period: %w", err)) + } + + if err := app.DistrKeeper.Hooks().AfterDelegationModified(ctx, delAddr, valAddr); err != nil { + // never called as AfterDelegationModified always returns nil + panic(fmt.Errorf("error while creating a new delegation period record: %w", err)) + } + } + + // reset context height + ctx = ctx.WithBlockHeight(height) + + /* Handle staking state. */ + + // iterate through redelegations, reset creation height + err = app.StakingKeeper.IterateRedelegations(ctx, func(_ int64, red stakingtypes.Redelegation) (stop bool) { + for i := range red.Entries { + red.Entries[i].CreationHeight = 0 + } + err = app.StakingKeeper.SetRedelegation(ctx, red) + if err != nil { + panic(err) + } + return false + }) + if err != nil { + panic(err) + } + + // iterate through unbonding delegations, reset creation height + err = app.StakingKeeper.IterateUnbondingDelegations(ctx, func(_ int64, ubd stakingtypes.UnbondingDelegation) (stop bool) { + for i := range ubd.Entries { + ubd.Entries[i].CreationHeight = 0 + } + err = app.StakingKeeper.SetUnbondingDelegation(ctx, ubd) + if err != nil { + panic(err) + } + return false + }) + if err != nil { + panic(err) + } + + // Iterate through validators by power descending, reset bond heights, and + // update bond intra-tx counters. + store := ctx.KVStore(app.GetKey(stakingtypes.StoreKey)) + iter := storetypes.KVStoreReversePrefixIterator(store, stakingtypes.ValidatorsKey) + + for ; iter.Valid(); iter.Next() { + addr := sdk.ValAddress(stakingtypes.AddressFromValidatorsKey(iter.Key())) + validator, err := app.StakingKeeper.GetValidator(ctx, addr) + if err != nil { + panic("expected validator, not found") + } + + valAddr, err := app.StakingKeeper.ValidatorAddressCodec().BytesToString(addr) + if err != nil { + panic(err) + } + + validator.UnbondingHeight = 0 + if applyAllowedAddrs && !allowedAddrsMap[valAddr] { + validator.Jailed = true + } + + if err = app.StakingKeeper.SetValidator(ctx, validator); err != nil { + panic(err) + } + } + + if err := iter.Close(); err != nil { + app.Logger().Error("error while closing the key-value store reverse prefix iterator: ", err) + return + } + + _, err = app.StakingKeeper.ApplyAndReturnValidatorSetUpdates(ctx) + if err != nil { + panic(err) + } + +<%= if (!IsChainMinimal) { %> + /* Handle slashing state. */ + + // reset start height on signing infos + if err := app.SlashingKeeper.IterateValidatorSigningInfos( + ctx, + func(addr sdk.ConsAddress, info slashingtypes.ValidatorSigningInfo) (stop bool) { + info.StartHeight = 0 + _ = app.SlashingKeeper.SetValidatorSigningInfo(ctx, addr, info) + return false + }, + ); err != nil { + panic(err) + } +<% } %> +} diff --git a/ignite/templates/app/files/app/genesis.go.plush b/ignite/templates/app/files/app/genesis.go.plush new file mode 100644 index 0000000..fa2bbe6 --- /dev/null +++ b/ignite/templates/app/files/app/genesis.go.plush @@ -0,0 +1,14 @@ +package app + +import ( + "encoding/json" +) + +// GenesisState of the blockchain is represented here as a map of raw json +// messages key'd by a identifier string. +// The identifier is used to determine which module genesis information belongs +// to so it may be appropriately routed during init chain. +// Within this application default genesis information is retrieved from +// the ModuleBasicManager which populates json from each BasicModule +// object provided to it during init. +type GenesisState map[string]json.RawMessage diff --git a/ignite/templates/app/files/app/genesis_account.go.plush b/ignite/templates/app/files/app/genesis_account.go.plush new file mode 100644 index 0000000..d68a0fd --- /dev/null +++ b/ignite/templates/app/files/app/genesis_account.go.plush @@ -0,0 +1,47 @@ +package app + +import ( + "errors" + + sdk "github.com/cosmos/cosmos-sdk/types" + authtypes "github.com/cosmos/cosmos-sdk/x/auth/types" +) + +var _ authtypes.GenesisAccount = (*GenesisAccount)(nil) + +// GenesisAccount defines a type that implements the GenesisAccount interface +// to be used for simulation accounts in the genesis state. +type GenesisAccount struct { + *authtypes.BaseAccount + + // vesting account fields + OriginalVesting sdk.Coins `json:"original_vesting" yaml:"original_vesting"` // total vesting coins upon initialization + DelegatedFree sdk.Coins `json:"delegated_free" yaml:"delegated_free"` // delegated vested coins at time of delegation + DelegatedVesting sdk.Coins `json:"delegated_vesting" yaml:"delegated_vesting"` // delegated vesting coins at time of delegation + StartTime int64 `json:"start_time" yaml:"start_time"` // vesting start time (UNIX Epoch time) + EndTime int64 `json:"end_time" yaml:"end_time"` // vesting end time (UNIX Epoch time) + + // module account fields + ModuleName string `json:"module_name" yaml:"module_name"` // name of the module account + ModulePermissions []string `json:"module_permissions" yaml:"module_permissions"` // permissions of module account +} + +// Validate checks for errors on the vesting and module account parameters +func (sga GenesisAccount) Validate() error { + if !sga.OriginalVesting.IsZero() { + if sga.StartTime >= sga.EndTime { + return errors.New("vesting start-time cannot be before end-time") + } + } + + if sga.ModuleName != "" { + ma := authtypes.ModuleAccount{ + BaseAccount: sga.BaseAccount, Name: sga.ModuleName, Permissions: sga.ModulePermissions, + } + if err := ma.Validate(); err != nil { + return err + } + } + + return sga.BaseAccount.Validate() +} diff --git a/ignite/templates/app/files/app/ibc.go.plush b/ignite/templates/app/files/app/ibc.go.plush new file mode 100644 index 0000000..291cce9 --- /dev/null +++ b/ignite/templates/app/files/app/ibc.go.plush @@ -0,0 +1,172 @@ +package app + +import ( + "cosmossdk.io/core/appmodule" + storetypes "cosmossdk.io/store/types" + "github.com/cosmos/cosmos-sdk/codec" + cdctypes "github.com/cosmos/cosmos-sdk/codec/types" + "github.com/cosmos/cosmos-sdk/runtime" + servertypes "github.com/cosmos/cosmos-sdk/server/types" + "github.com/cosmos/cosmos-sdk/types/module" + authtypes "github.com/cosmos/cosmos-sdk/x/auth/types" + govtypes "github.com/cosmos/cosmos-sdk/x/gov/types" + icamodule "github.com/cosmos/ibc-go/v10/modules/apps/27-interchain-accounts" + icacontroller "github.com/cosmos/ibc-go/v10/modules/apps/27-interchain-accounts/controller" + icacontrollerkeeper "github.com/cosmos/ibc-go/v10/modules/apps/27-interchain-accounts/controller/keeper" + icacontrollertypes "github.com/cosmos/ibc-go/v10/modules/apps/27-interchain-accounts/controller/types" + icahost "github.com/cosmos/ibc-go/v10/modules/apps/27-interchain-accounts/host" + icahostkeeper "github.com/cosmos/ibc-go/v10/modules/apps/27-interchain-accounts/host/keeper" + icahosttypes "github.com/cosmos/ibc-go/v10/modules/apps/27-interchain-accounts/host/types" + icatypes "github.com/cosmos/ibc-go/v10/modules/apps/27-interchain-accounts/types" + ibctransfer "github.com/cosmos/ibc-go/v10/modules/apps/transfer" + ibctransferv2 "github.com/cosmos/ibc-go/v10/modules/apps/transfer/v2" + ibctransferkeeper "github.com/cosmos/ibc-go/v10/modules/apps/transfer/keeper" + ibctransfertypes "github.com/cosmos/ibc-go/v10/modules/apps/transfer/types" + ibc "github.com/cosmos/ibc-go/v10/modules/core" + ibcapi "github.com/cosmos/ibc-go/v10/modules/core/api" + ibcclienttypes "github.com/cosmos/ibc-go/v10/modules/core/02-client/types" // nolint:staticcheck // Deprecated: params key table is needed for params migration + ibcconnectiontypes "github.com/cosmos/ibc-go/v10/modules/core/03-connection/types" + porttypes "github.com/cosmos/ibc-go/v10/modules/core/05-port/types" + ibcexported "github.com/cosmos/ibc-go/v10/modules/core/exported" + ibckeeper "github.com/cosmos/ibc-go/v10/modules/core/keeper" + solomachine "github.com/cosmos/ibc-go/v10/modules/light-clients/06-solomachine" + ibctm "github.com/cosmos/ibc-go/v10/modules/light-clients/07-tendermint" +) + +// registerIBCModules register IBC keepers and non dependency inject modules. +func (app *App) registerIBCModules(appOpts servertypes.AppOptions) error { + // set up non depinject support modules store keys + if err := app.RegisterStores( + storetypes.NewKVStoreKey(ibcexported.StoreKey), + storetypes.NewKVStoreKey(ibctransfertypes.StoreKey), + storetypes.NewKVStoreKey(icahosttypes.StoreKey), + storetypes.NewKVStoreKey(icacontrollertypes.StoreKey), + ); err != nil { + return err + } + + // register the key tables for legacy param subspaces + keyTable := ibcclienttypes.ParamKeyTable() + keyTable.RegisterParamSet(&ibcconnectiontypes.Params{}) + app.ParamsKeeper.Subspace(ibcexported.ModuleName).WithKeyTable(keyTable) + app.ParamsKeeper.Subspace(ibctransfertypes.ModuleName).WithKeyTable(ibctransfertypes.ParamKeyTable()) + app.ParamsKeeper.Subspace(icacontrollertypes.SubModuleName).WithKeyTable(icacontrollertypes.ParamKeyTable()) + app.ParamsKeeper.Subspace(icahosttypes.SubModuleName).WithKeyTable(icahosttypes.ParamKeyTable()) + + govModuleAddr, _ := app.AuthKeeper.AddressCodec().BytesToString(authtypes.NewModuleAddress(govtypes.ModuleName)) + + // Create IBC keeper + app.IBCKeeper = ibckeeper.NewKeeper( + app.appCodec, + runtime.NewKVStoreService(app.GetKey(ibcexported.StoreKey)), + app.GetSubspace(ibcexported.ModuleName), + app.UpgradeKeeper, + govModuleAddr, + ) + + // Create IBC transfer keeper + app.TransferKeeper = ibctransferkeeper.NewKeeper( + app.appCodec, + runtime.NewKVStoreService(app.GetKey(ibctransfertypes.StoreKey)), + app.GetSubspace(ibctransfertypes.ModuleName), + app.IBCKeeper.ChannelKeeper, + app.IBCKeeper.ChannelKeeper, + app.MsgServiceRouter(), + app.AuthKeeper, + app.BankKeeper, + govModuleAddr, + ) + + // Create interchain account keepers + app.ICAHostKeeper = icahostkeeper.NewKeeper( + app.appCodec, + runtime.NewKVStoreService(app.GetKey(icahosttypes.StoreKey)), + app.GetSubspace(icahosttypes.SubModuleName), + app.IBCKeeper.ChannelKeeper, // ICS4Wrapper + app.IBCKeeper.ChannelKeeper, + app.AuthKeeper, + app.MsgServiceRouter(), + app.GRPCQueryRouter(), + govModuleAddr, + ) + + app.ICAControllerKeeper = icacontrollerkeeper.NewKeeper( + app.appCodec, + runtime.NewKVStoreService(app.GetKey(icacontrollertypes.StoreKey)), + app.GetSubspace(icacontrollertypes.SubModuleName), + app.IBCKeeper.ChannelKeeper, + app.IBCKeeper.ChannelKeeper, + app.MsgServiceRouter(), + govModuleAddr, + ) + + // create IBC module from bottom to top of stack + var ( + transferStack porttypes.IBCModule = ibctransfer.NewIBCModule(app.TransferKeeper) + transferStackV2 ibcapi.IBCModule = ibctransferv2.NewIBCModule(app.TransferKeeper) + icaControllerStack porttypes.IBCModule = icacontroller.NewIBCMiddleware(app.ICAControllerKeeper) + icaHostStack porttypes.IBCModule = icahost.NewIBCModule(app.ICAHostKeeper) + ) + + // create IBC v1 router, add transfer route, then set it on the keeper + ibcRouter := porttypes.NewRouter(). + AddRoute(ibctransfertypes.ModuleName, transferStack). + AddRoute(icacontrollertypes.SubModuleName, icaControllerStack). + AddRoute(icahosttypes.SubModuleName, icaHostStack) + + // create IBC v2 router, add transfer route, then set it on the keeper + ibcv2Router := ibcapi.NewRouter(). + AddRoute(ibctransfertypes.PortID, transferStackV2) + + app.IBCKeeper.SetRouter(ibcRouter) + app.IBCKeeper.SetRouterV2(ibcv2Router) + + clientKeeper := app.IBCKeeper.ClientKeeper + storeProvider := clientKeeper.GetStoreProvider() + + tmLightClientModule := ibctm.NewLightClientModule(app.appCodec, storeProvider) + clientKeeper.AddRoute(ibctm.ModuleName, &tmLightClientModule) + + soloLightClientModule := solomachine.NewLightClientModule(app.appCodec, storeProvider) + clientKeeper.AddRoute(solomachine.ModuleName, &soloLightClientModule) + + // register IBC modules + if err := app.RegisterModules( + ibc.NewAppModule(app.IBCKeeper), + ibctransfer.NewAppModule(app.TransferKeeper), + icamodule.NewAppModule(&app.ICAControllerKeeper, &app.ICAHostKeeper), + ibctm.NewAppModule(tmLightClientModule), + solomachine.NewAppModule(soloLightClientModule), + ); err != nil { + return err + } + + return nil +} + +// RegisterIBC Since the IBC modules don't support dependency injection, +// we need to manually register the modules on the client side. +// This needs to be removed after IBC supports App Wiring. +func RegisterIBC(cdc codec.Codec) map[string]appmodule.AppModule { + modules := map[string]appmodule.AppModule{ + ibcexported.ModuleName: ibc.NewAppModule(&ibckeeper.Keeper{}), + ibctransfertypes.ModuleName: ibctransfer.NewAppModule(ibctransferkeeper.Keeper{}), + icatypes.ModuleName: icamodule.NewAppModule(&icacontrollerkeeper.Keeper{}, &icahostkeeper.Keeper{}), + ibctm.ModuleName: ibctm.NewAppModule(ibctm.NewLightClientModule(cdc, ibcclienttypes.StoreProvider{})), + solomachine.ModuleName: solomachine.NewAppModule(solomachine.NewLightClientModule(cdc, ibcclienttypes.StoreProvider{})), + } + + for _, m := range modules { + if mr, ok := m.(module.AppModuleBasic); ok { + mr.RegisterInterfaces(cdc.InterfaceRegistry()) + } + } + + return modules +} + +// GetIBCKeeper returns the IBC keeper. +// Used for supply with IBC keeper getter for the IBC modules with App Wiring. +func (app *App) GetIBCKeeper() *ibckeeper.Keeper { + return app.IBCKeeper +} diff --git a/ignite/templates/app/files/app/sim_bench_test.go.plush b/ignite/templates/app/files/app/sim_bench_test.go.plush new file mode 100644 index 0000000..4355324 --- /dev/null +++ b/ignite/templates/app/files/app/sim_bench_test.go.plush @@ -0,0 +1,79 @@ +package app + +import ( + "os" + "testing" + + "github.com/spf13/viper" + "github.com/stretchr/testify/require" + + "github.com/cosmos/cosmos-sdk/baseapp" + "github.com/cosmos/cosmos-sdk/client/flags" + simtestutil "github.com/cosmos/cosmos-sdk/testutil/sims" + simtypes "github.com/cosmos/cosmos-sdk/types/simulation" + "github.com/cosmos/cosmos-sdk/x/simulation" + simcli "github.com/cosmos/cosmos-sdk/x/simulation/client/cli" +) + +// Profile with: +// `go test -benchmem -run=^$ ./app -bench ^BenchmarkFullAppSimulation$ -Commit=true -cpuprofile cpu.out` +func BenchmarkFullAppSimulation(b *testing.B) { + b.ReportAllocs() + + config := simcli.NewConfigFromFlags() + config.ChainID = SimAppChainID + + db, dir, logger, skip, err := simtestutil.SetupSimulation(config, "goleveldb-app-sim", "Simulation", simcli.FlagVerboseValue, simcli.FlagEnabledValue) + if err != nil { + b.Fatalf("simulation setup failed: %s", err.Error()) + } + + if skip { + b.Skip("skipping benchmark application simulation") + } + + defer func() { + require.NoError(b, db.Close()) + require.NoError(b, os.RemoveAll(dir)) + }() + + appOptions := viper.New() + if FlagEnableStreamingValue { + m := make(map[string]interface{}) + m["streaming.abci.keys"] = []string{"*"} + m["streaming.abci.plugin"] = "abci_v1" + m["streaming.abci.stop-node-on-err"] = true + for key, value := range m { + appOptions.SetDefault(key, value) + } + } + appOptions.SetDefault(flags.FlagHome, DefaultNodeHome) + + app := New(logger, db, nil, true, appOptions, interBlockCacheOpt(), baseapp.SetChainID(SimAppChainID)) + + // run randomized simulation + _, simParams, simErr := simulation.SimulateFromSeed( + b, + os.Stdout, + app.BaseApp, + simtestutil.AppStateFn(app.AppCodec(), app.SimulationManager(), app.DefaultGenesis()), + simtypes.RandomAccounts, // Replace with own random account function if using keys other than secp256k1 + simtestutil.BuildSimulationOperations(app, app.AppCodec(), config, app.TxConfig()), + BlockedAddresses(), + config, + app.AppCodec(), + ) + + // export state and simParams before the simulation error is checked + if err = simtestutil.CheckExportSimulation(app, config, simParams); err != nil { + b.Fatal(err) + } + + if simErr != nil { + b.Fatal(simErr) + } + + if config.Commit { + simtestutil.PrintStats(db) + } +} diff --git a/ignite/templates/app/files/app/sim_test.go.plush b/ignite/templates/app/files/app/sim_test.go.plush new file mode 100644 index 0000000..b8bed05 --- /dev/null +++ b/ignite/templates/app/files/app/sim_test.go.plush @@ -0,0 +1,465 @@ +package app + +import ( + "encoding/json" + "flag" + "fmt" + "math/rand" + "os" + "runtime/debug" + "strings" + "testing" + "time" + + "cosmossdk.io/log" + "cosmossdk.io/store" + storetypes "cosmossdk.io/store/types" + "cosmossdk.io/x/feegrant" + upgradetypes "cosmossdk.io/x/upgrade/types" + abci "github.com/cometbft/cometbft/abci/types" + cmtproto "github.com/cometbft/cometbft/proto/tendermint/types" + dbm "github.com/cosmos/cosmos-db" + "github.com/cosmos/cosmos-sdk/baseapp" + "github.com/cosmos/cosmos-sdk/client/flags" + "github.com/cosmos/cosmos-sdk/server" + simtestutil "github.com/cosmos/cosmos-sdk/testutil/sims" + simulationtypes "github.com/cosmos/cosmos-sdk/types/simulation" + authzkeeper "github.com/cosmos/cosmos-sdk/x/authz/keeper" + "github.com/cosmos/cosmos-sdk/x/simulation" + simcli "github.com/cosmos/cosmos-sdk/x/simulation/client/cli" + slashingtypes "github.com/cosmos/cosmos-sdk/x/slashing/types" + stakingtypes "github.com/cosmos/cosmos-sdk/x/staking/types" + "github.com/spf13/viper" + "github.com/stretchr/testify/require" +) + +const ( + SimAppChainID = "<%= BinaryNamePrefix %>-simapp" +) + +var FlagEnableStreamingValue bool + +// Get flags every time the simulator is run +func init() { + simcli.GetSimulatorFlags() + flag.BoolVar(&FlagEnableStreamingValue, "EnableStreaming", false, "Enable streaming service") +} + +// fauxMerkleModeOpt returns a BaseApp option to use a dbStoreAdapter instead of +// an IAVLStore for faster simulation speed. +func fauxMerkleModeOpt(bapp *baseapp.BaseApp) { + bapp.SetFauxMerkleMode() +} + +// interBlockCacheOpt returns a BaseApp option function that sets the persistent +// inter-block write-through cache. +func interBlockCacheOpt() func(*baseapp.BaseApp) { + return baseapp.SetInterBlockCache(store.NewCommitKVStoreCacheManager()) +} + +// BenchmarkSimulation run the chain simulation +// Running using ignite command: +// `ignite chain simulate -v --numBlocks 200 --blockSize 50` +// Running as go benchmark test: +// `go test -benchmem -run=^$ -bench ^BenchmarkSimulation ./app -NumBlocks=200 -BlockSize 50 -Commit=true -Verbose=true -Enabled=true` +func BenchmarkSimulation(b *testing.B) { + simcli.FlagSeedValue = time.Now().Unix() + simcli.FlagVerboseValue = true + simcli.FlagCommitValue = true + simcli.FlagEnabledValue = true + + config := simcli.NewConfigFromFlags() + config.ChainID = SimAppChainID + + db, dir, logger, skip, err := simtestutil.SetupSimulation(config, "leveldb-app-sim", "Simulation", simcli.FlagVerboseValue, simcli.FlagEnabledValue) + if skip { + b.Skip("skipping application simulation") + } + require.NoError(b, err, "simulation setup failed") + + defer func() { + require.NoError(b, db.Close()) + require.NoError(b, os.RemoveAll(dir)) + }() + + appOptions := make(simtestutil.AppOptionsMap, 0) + appOptions[flags.FlagHome] = DefaultNodeHome + + bApp := New(logger, db, nil, true, appOptions, fauxMerkleModeOpt, baseapp.SetChainID(SimAppChainID)) + require.Equal(b, Name, bApp.Name()) + + // run randomized simulation + _, simParams, simErr := simulation.SimulateFromSeed( + b, + os.Stdout, + bApp.BaseApp, + simtestutil.AppStateFn(bApp.AppCodec(), bApp.SimulationManager(), bApp.DefaultGenesis()), + simulationtypes.RandomAccounts, + simtestutil.BuildSimulationOperations(bApp, bApp.AppCodec(), config, bApp.TxConfig()), + BlockedAddresses(), + config, + bApp.AppCodec(), + ) + + // export state and simParams before the simulation error is checked + err = simtestutil.CheckExportSimulation(bApp, config, simParams) + require.NoError(b, err) + require.NoError(b, simErr) + + if config.Commit { + simtestutil.PrintStats(db) + } +} + +func TestFullAppSimulation(t *testing.T) { + config := simcli.NewConfigFromFlags() + config.ChainID = SimAppChainID + + db, dir, logger, skip, err := simtestutil.SetupSimulation(config, "leveldb-app-sim", "Simulation", simcli.FlagVerboseValue, simcli.FlagEnabledValue) + if skip { + t.Skip("skipping application simulation") + } + require.NoError(t, err, "simulation setup failed") + + defer func() { + require.NoError(t, db.Close()) + require.NoError(t, os.RemoveAll(dir)) + }() + + appOptions := make(simtestutil.AppOptionsMap, 0) + appOptions[flags.FlagHome] = DefaultNodeHome + + app := New(logger, db, nil, true, appOptions, fauxMerkleModeOpt, baseapp.SetChainID(SimAppChainID)) + if !simcli.FlagSigverifyTxValue { + app.SetNotSigverifyTx() + } + require.Equal(t, "<%= BinaryNamePrefix %>", app.Name()) + + // run randomized simulation + _, simParams, simErr := simulation.SimulateFromSeed( + t, + os.Stdout, + app.BaseApp, + simtestutil.AppStateFn(app.AppCodec(), app.SimulationManager(), app.DefaultGenesis()), + simulationtypes.RandomAccounts, + simtestutil.BuildSimulationOperations(app, app.AppCodec(), config, app.TxConfig()), + BlockedAddresses(), + config, + app.AppCodec(), + ) + + // export state and simParams before the simulation error is checked + err = simtestutil.CheckExportSimulation(app, config, simParams) + require.NoError(t, err) + require.NoError(t, simErr) + + if config.Commit { + simtestutil.PrintStats(db) + } +} + +func TestAppImportExport(t *testing.T) { + config := simcli.NewConfigFromFlags() + config.ChainID = SimAppChainID + + db, dir, logger, skip, err := simtestutil.SetupSimulation(config, "leveldb-app-sim", "Simulation", simcli.FlagVerboseValue, simcli.FlagEnabledValue) + if skip { + t.Skip("skipping application import/export simulation") + } + require.NoError(t, err, "simulation setup failed") + + defer func() { + require.NoError(t, db.Close()) + require.NoError(t, os.RemoveAll(dir)) + }() + + appOptions := make(simtestutil.AppOptionsMap, 0) + appOptions[flags.FlagHome] = DefaultNodeHome + + bApp := New(logger, db, nil, true, appOptions, fauxMerkleModeOpt, baseapp.SetChainID(SimAppChainID)) + require.Equal(t, Name, bApp.Name()) + + // Run randomized simulation + _, simParams, simErr := simulation.SimulateFromSeed( + t, + os.Stdout, + bApp.BaseApp, + simtestutil.AppStateFn(bApp.AppCodec(), bApp.SimulationManager(), bApp.DefaultGenesis()), + simulationtypes.RandomAccounts, + simtestutil.BuildSimulationOperations(bApp, bApp.AppCodec(), config, bApp.TxConfig()), + BlockedAddresses(), + config, + bApp.AppCodec(), + ) + + // export state and simParams before the simulation error is checked + err = simtestutil.CheckExportSimulation(bApp, config, simParams) + require.NoError(t, err) + require.NoError(t, simErr) + + if config.Commit { + simtestutil.PrintStats(db) + } + + fmt.Printf("exporting genesis...\n") + + exported, err := bApp.ExportAppStateAndValidators(false, []string{}, []string{}) + require.NoError(t, err) + + fmt.Printf("importing genesis...\n") + + newDB, newDir, _, _, err := simtestutil.SetupSimulation(config, "leveldb-app-sim-2", "Simulation-2", simcli.FlagVerboseValue, simcli.FlagEnabledValue) + require.NoError(t, err, "simulation setup failed") + + defer func() { + require.NoError(t, newDB.Close()) + require.NoError(t, os.RemoveAll(newDir)) + }() + + newApp := New(log.NewNopLogger(), newDB, nil, true, appOptions, fauxMerkleModeOpt, baseapp.SetChainID(SimAppChainID)) + require.Equal(t, Name, newApp.Name()) + + var genesisState GenesisState + err = json.Unmarshal(exported.AppState, &genesisState) + require.NoError(t, err) + + ctxA := bApp.NewContextLegacy(true, cmtproto.Header{Height: bApp.LastBlockHeight()}) + ctxB := newApp.NewContextLegacy(true, cmtproto.Header{Height: bApp.LastBlockHeight()}) + _, err = newApp.ModuleManager.InitGenesis(ctxB, bApp.AppCodec(), genesisState) + + if err != nil { + if strings.Contains(err.Error(), "validator set is empty after InitGenesis") { + logger.Info("Skipping simulation as all validators have been unbonded") + logger.Info("err", err, "stacktrace", string(debug.Stack())) + return + } + } + require.NoError(t, err) + err = newApp.StoreConsensusParams(ctxB, exported.ConsensusParams) + require.NoError(t, err) + fmt.Printf("comparing stores...\n") + + // skip certain prefixes + skipPrefixes := map[string][][]byte{ + upgradetypes.StoreKey: { + []byte{upgradetypes.VersionMapByte}, + }, + stakingtypes.StoreKey: { + stakingtypes.UnbondingQueueKey, stakingtypes.RedelegationQueueKey, stakingtypes.ValidatorQueueKey, + stakingtypes.HistoricalInfoKey, stakingtypes.UnbondingIDKey, stakingtypes.UnbondingIndexKey, + stakingtypes.UnbondingTypeKey, stakingtypes.ValidatorUpdatesKey, + }, + authzkeeper.StoreKey: {authzkeeper.GrantQueuePrefix}, + feegrant.StoreKey: {feegrant.FeeAllowanceQueueKeyPrefix}, + slashingtypes.StoreKey: {slashingtypes.ValidatorMissedBlockBitmapKeyPrefix}, + } + + storeKeys := bApp.GetStoreKeys() + require.NotEmpty(t, storeKeys) + + for _, appKeyA := range storeKeys { + // only compare kvstores + if _, ok := appKeyA.(*storetypes.KVStoreKey); !ok { + continue + } + + keyName := appKeyA.Name() + appKeyB := newApp.GetKey(keyName) + + storeA := ctxA.KVStore(appKeyA) + storeB := ctxB.KVStore(appKeyB) + + failedKVAs, failedKVBs := simtestutil.DiffKVStores(storeA, storeB, skipPrefixes[keyName]) + require.Equal(t, len(failedKVAs), len(failedKVBs), "unequal sets of key-values to compare %s, key stores %s and %s", keyName, appKeyA, appKeyB) + + t.Logf("compared %d different key/value pairs between %s and %s\n", len(failedKVAs), appKeyA, appKeyB) + + require.Equal(t, 0, len(failedKVAs), simtestutil.GetSimulationLog(keyName, bApp.SimulationManager().StoreDecoders, failedKVAs, failedKVBs)) + } +} + +func TestAppSimulationAfterImport(t *testing.T) { + config := simcli.NewConfigFromFlags() + config.ChainID = SimAppChainID + + db, dir, logger, skip, err := simtestutil.SetupSimulation(config, "leveldb-app-sim", "Simulation", simcli.FlagVerboseValue, simcli.FlagEnabledValue) + if skip { + t.Skip("skipping application simulation after import") + } + require.NoError(t, err, "simulation setup failed") + + defer func() { + require.NoError(t, db.Close()) + require.NoError(t, os.RemoveAll(dir)) + }() + + appOptions := make(simtestutil.AppOptionsMap, 0) + appOptions[flags.FlagHome] = DefaultNodeHome + + bApp := New(logger, db, nil, true, appOptions, fauxMerkleModeOpt, baseapp.SetChainID(SimAppChainID)) + require.Equal(t, Name, bApp.Name()) + + // Run randomized simulation + stopEarly, simParams, simErr := simulation.SimulateFromSeed( + t, + os.Stdout, + bApp.BaseApp, + simtestutil.AppStateFn(bApp.AppCodec(), bApp.SimulationManager(), bApp.DefaultGenesis()), + simulationtypes.RandomAccounts, + simtestutil.BuildSimulationOperations(bApp, bApp.AppCodec(), config, bApp.TxConfig()), + BlockedAddresses(), + config, + bApp.AppCodec(), + ) + + // export state and simParams before the simulation error is checked + err = simtestutil.CheckExportSimulation(bApp, config, simParams) + require.NoError(t, err) + require.NoError(t, simErr) + + if config.Commit { + simtestutil.PrintStats(db) + } + + if stopEarly { + fmt.Println("can't export or import a zero-validator genesis, exiting test...") + return + } + + fmt.Printf("exporting genesis...\n") + + exported, err := bApp.ExportAppStateAndValidators(true, []string{}, []string{}) + require.NoError(t, err) + + fmt.Printf("importing genesis...\n") + + newDB, newDir, _, _, err := simtestutil.SetupSimulation(config, "leveldb-app-sim-2", "Simulation-2", simcli.FlagVerboseValue, simcli.FlagEnabledValue) + require.NoError(t, err, "simulation setup failed") + + defer func() { + require.NoError(t, newDB.Close()) + require.NoError(t, os.RemoveAll(newDir)) + }() + + newApp := New(log.NewNopLogger(), newDB, nil, true, appOptions, fauxMerkleModeOpt, baseapp.SetChainID(SimAppChainID)) + require.Equal(t, Name, newApp.Name()) + + _, err = newApp.InitChain(&abci.RequestInitChain{ + AppStateBytes: exported.AppState, + ChainId: SimAppChainID, + }) + require.NoError(t, err) + + _, _, err = simulation.SimulateFromSeed( + t, + os.Stdout, + newApp.BaseApp, + simtestutil.AppStateFn(bApp.AppCodec(), bApp.SimulationManager(), bApp.DefaultGenesis()), + simulationtypes.RandomAccounts, + simtestutil.BuildSimulationOperations(newApp, newApp.AppCodec(), config, newApp.TxConfig()), + BlockedAddresses(), + config, + bApp.AppCodec(), + ) + require.NoError(t, err) +} + +func TestAppStateDeterminism(t *testing.T) { + if !simcli.FlagEnabledValue { + t.Skip("skipping application simulation") + } + + config := simcli.NewConfigFromFlags() + config.InitialBlockHeight = 1 + config.ExportParamsPath = "" + config.OnOperation = true + config.AllInvariants = true + config.ChainID = SimAppChainID + + numSeeds := 3 + numTimesToRunPerSeed := 3 // This used to be set to 5, but we've temporarily reduced it to 3 for the sake of faster CI. + appHashList := make([]json.RawMessage, numTimesToRunPerSeed) + + // We will be overriding the random seed and just run a single simulation on the provided seed value + if config.Seed != simcli.DefaultSeedValue { + numSeeds = 1 + } + + appOptions := viper.New() + if FlagEnableStreamingValue { + m := make(map[string]interface{}) + m["streaming.abci.keys"] = []string{"*"} + m["streaming.abci.plugin"] = "abci_v1" + m["streaming.abci.stop-node-on-err"] = true + for key, value := range m { + appOptions.SetDefault(key, value) + } + } + appOptions.SetDefault(flags.FlagHome, DefaultNodeHome) + if simcli.FlagVerboseValue { + appOptions.SetDefault(flags.FlagLogLevel, "debug") + } + + for i := 0; i < numSeeds; i++ { + if config.Seed == simcli.DefaultSeedValue { + config.Seed = rand.Int63() + } + fmt.Println("config.Seed: ", config.Seed) + + for j := 0; j < numTimesToRunPerSeed; j++ { + var logger log.Logger + if simcli.FlagVerboseValue { + logger = log.NewTestLogger(t) + } else { + logger = log.NewNopLogger() + } + + db := dbm.NewMemDB() + bApp := New( + logger, + db, + nil, + true, + appOptions, + interBlockCacheOpt(), + baseapp.SetChainID(SimAppChainID), + ) + + fmt.Printf( + "running non-determinism simulation; seed %d: %d/%d, attempt: %d/%d\n", + config.Seed, i+1, numSeeds, j+1, numTimesToRunPerSeed, + ) + + _, _, err := simulation.SimulateFromSeed( + t, + os.Stdout, + bApp.BaseApp, + simtestutil.AppStateFn( + bApp.AppCodec(), + bApp.SimulationManager(), + bApp.DefaultGenesis(), + ), + simulationtypes.RandomAccounts, + simtestutil.BuildSimulationOperations(bApp, bApp.AppCodec(), config, bApp.TxConfig()), + BlockedAddresses(), + config, + bApp.AppCodec(), + ) + require.NoError(t, err) + + if config.Commit { + simtestutil.PrintStats(db) + } + + appHash := bApp.LastCommitID().Hash + appHashList[j] = appHash + + if j != 0 { + require.Equal( + t, string(appHashList[0]), string(appHashList[j]), + "non-determinism in seed %d: %d/%d, attempt: %d/%d\n", config.Seed, i+1, numSeeds, j+1, numTimesToRunPerSeed, + ) + } + } + } +} diff --git a/ignite/templates/app/files/buf.lock b/ignite/templates/app/files/buf.lock new file mode 100644 index 0000000..ff558d6 --- /dev/null +++ b/ignite/templates/app/files/buf.lock @@ -0,0 +1,24 @@ +# Generated by buf. DO NOT EDIT. +version: v2 +deps: + - name: buf.build/cosmos/cosmos-proto + commit: 04467658e59e44bbb22fe568206e1f70 + digest: b5:8058c0aadbee8c9af67a9cefe86492c6c0b0bd5b4526b0ec820507b91fc9b0b5efbebca97331854576d2d279b0b3f5ed6a7abb0640cb640c4186532239c48fc4 + - name: buf.build/cosmos/cosmos-sdk + commit: 05419252bcc241ea8023acf1ed4cadc5 + digest: b5:bec474e46596bf183fa85eb5c33106d432992ae696785c1c5fc1ce2a8f8819cab80c89d0b11557f3e916fd65133451fca4471a05f75ed163c688a8964ecb97b8 + - name: buf.build/cosmos/gogo-proto + commit: 88ef6483f90f478fb938c37dde52ece3 + digest: b5:f0c69202c9bca9672dc72a9737ea9bc83744daaed2b3da77e3a95b0e53b86dee76b5a7405b993181d6c863fd64afaca0976a302f700d6c4912eb1692a1782c0a + - name: buf.build/cosmos/ics23 + commit: dc427cb4519143d8996361c045a29ad7 + digest: b5:8693e72e230bfaf58a88a47a4093ba99f6252c1957a45582567959b38a8563e2abd11443372283d75f4f2306a7e3cc9bf63604d284a016c11966fca4b74b7a28 + - name: buf.build/googleapis/googleapis + commit: acd896313c55464b993332136ded1b6e + digest: b5:025d83e25193feb8dac5e5576113c8737006218b3b09fbc0d0ff652614da5424b336edb15bea139eb90d14eba656774a979d1fbdae81cbab2013932b84b98f53 + - name: buf.build/protocolbuffers/wellknowntypes + commit: 384f8deef6ae4110b57d996aad0032c4 + digest: b5:8b023f5c2a872028738eef7ca2323d17379d05332f95fad1d3db3d356ad29f0644bf5868a14069f350bb967a7c8f4bd010228d2deb482e05f56ffd1a5bfc79b3 + - name: buf.build/tendermint/tendermint + commit: 33ed361a90514289beabf3189e1d7665 + digest: b5:72e7b167e6a474c8ed7763e3fc811d756d48dd0e70d897c1d3b661656aa4ad3cf2adabadf1fa9a8fd644567678a1acd27bec139895b0469258cfa4c3ebae7aab diff --git a/ignite/templates/app/files/buf.yaml.plush b/ignite/templates/app/files/buf.yaml.plush new file mode 100644 index 0000000..b0fe9d4 --- /dev/null +++ b/ignite/templates/app/files/buf.yaml.plush @@ -0,0 +1,29 @@ +version: v2 +modules: + - path: <%= ProtoDir %> +deps: + - buf.build/cosmos/cosmos-proto + - buf.build/cosmos/cosmos-sdk + - buf.build/cosmos/gogo-proto + - buf.build/cosmos/ics23 + - buf.build/googleapis/googleapis + - buf.build/protocolbuffers/wellknowntypes +lint: + use: + - COMMENTS + - STANDARD + - FILE_LOWER_SNAKE_CASE + except: + - COMMENT_FIELD + - RPC_REQUEST_STANDARD_NAME + - RPC_RESPONSE_STANDARD_NAME + - SERVICE_SUFFIX + ignore: + - proto/tendermint + disallow_comment_ignores: true +breaking: + use: + - FILE + except: + - EXTENSION_NO_DELETE + - FIELD_SAME_DEFAULT diff --git a/ignite/templates/app/files/cmd/{{binaryNamePrefix}}d/cmd/commands.go.plush b/ignite/templates/app/files/cmd/{{binaryNamePrefix}}d/cmd/commands.go.plush new file mode 100644 index 0000000..b89f819 --- /dev/null +++ b/ignite/templates/app/files/cmd/{{binaryNamePrefix}}d/cmd/commands.go.plush @@ -0,0 +1,163 @@ +package cmd + +import ( + "errors" + "io" + + "github.com/spf13/cobra" + "github.com/spf13/viper" + + "cosmossdk.io/log" + confixcmd "cosmossdk.io/tools/confix/cmd" + dbm "github.com/cosmos/cosmos-db" + "github.com/cosmos/cosmos-sdk/client" + "github.com/cosmos/cosmos-sdk/client/debug" + "github.com/cosmos/cosmos-sdk/client/flags" + "github.com/cosmos/cosmos-sdk/client/keys" + "github.com/cosmos/cosmos-sdk/client/pruning" + "github.com/cosmos/cosmos-sdk/client/rpc" + "github.com/cosmos/cosmos-sdk/client/snapshot" + "github.com/cosmos/cosmos-sdk/server" + servertypes "github.com/cosmos/cosmos-sdk/server/types" + "github.com/cosmos/cosmos-sdk/types/module" + authcmd "github.com/cosmos/cosmos-sdk/x/auth/client/cli" + banktypes "github.com/cosmos/cosmos-sdk/x/bank/types" + genutilcli "github.com/cosmos/cosmos-sdk/x/genutil/client/cli" + + "<%= ModulePath %>/app" +) + +func initRootCmd( + rootCmd *cobra.Command, + txConfig client.TxConfig, + basicManager module.BasicManager, +) { + rootCmd.AddCommand( + genutilcli.InitCmd(basicManager, app.DefaultNodeHome), + NewInPlaceTestnetCmd(), + NewTestnetMultiNodeCmd(basicManager, banktypes.GenesisBalancesIterator{}), + debug.Cmd(), + confixcmd.ConfigCommand(), + pruning.Cmd(newApp, app.DefaultNodeHome), + snapshot.Cmd(newApp), + ) + + server.AddCommandsWithStartCmdOptions(rootCmd, app.DefaultNodeHome, newApp, appExport, server.StartCmdOptions{ + AddFlags: addModuleInitFlags, + }) + + // add keybase, auxiliary RPC, query, genesis, and tx child commands + rootCmd.AddCommand( + server.StatusCommand(), + genutilcli.Commands(txConfig, basicManager, app.DefaultNodeHome), + queryCommand(), + txCommand(), + keys.Commands(), + ) +} + +// addModuleInitFlags adds more flags to the start command. +func addModuleInitFlags(startCmd *cobra.Command) { +} + +func queryCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "query", + Aliases: []string{"q"}, + Short: "Querying subcommands", + DisableFlagParsing: false, + SuggestionsMinimumDistance: 2, + RunE: client.ValidateCmd, + } + + cmd.AddCommand( + rpc.WaitTxCmd(), + rpc.ValidatorCommand(), + server.QueryBlockCmd(), + authcmd.QueryTxsByEventsCmd(), + server.QueryBlocksCmd(), + authcmd.QueryTxCmd(), + server.QueryBlockResultsCmd(), + ) + + return cmd +} + +func txCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "tx", + Short: "Transactions subcommands", + DisableFlagParsing: false, + SuggestionsMinimumDistance: 2, + RunE: client.ValidateCmd, + } + + cmd.AddCommand( + authcmd.GetSignCommand(), + authcmd.GetSignBatchCommand(), + authcmd.GetMultiSignCommand(), + authcmd.GetMultiSignBatchCmd(), + authcmd.GetValidateSignaturesCommand(), + flags.LineBreak, + authcmd.GetBroadcastCommand(), + authcmd.GetEncodeCommand(), + authcmd.GetDecodeCommand(), + authcmd.GetSimulateCmd(), + ) + + return cmd +} + +// newApp creates the application +func newApp( + logger log.Logger, + db dbm.DB, + traceStore io.Writer, + appOpts servertypes.AppOptions, +) servertypes.Application { + baseappOptions := server.DefaultBaseappOptions(appOpts) + + return app.New( + logger, db, traceStore, true, + appOpts, + baseappOptions..., + ) +} + +// appExport creates a new app (optionally at a given height) and exports state. +func appExport( + logger log.Logger, + db dbm.DB, + traceStore io.Writer, + height int64, + forZeroHeight bool, + jailAllowedAddrs []string, + appOpts servertypes.AppOptions, + modulesToExport []string, +) (servertypes.ExportedApp, error) { + var bApp *app.App + + // this check is necessary as we use the flag in x/upgrade. + // we can exit more gracefully by checking the flag here. + homePath, ok := appOpts.Get(flags.FlagHome).(string) + if !ok || homePath == "" { + return servertypes.ExportedApp{}, errors.New("application home not set") + } + + viperAppOpts, ok := appOpts.(*viper.Viper) + if !ok { + return servertypes.ExportedApp{}, errors.New("appOpts is not viper.Viper") + } + + appOpts = viperAppOpts + if height != -1 { + bApp = app.New(logger, db, traceStore, false, appOpts) + if err := bApp.LoadHeight(height); err != nil { + return servertypes.ExportedApp{}, err + } + } else { + bApp = app.New(logger, db, traceStore, true, appOpts) + } + + return bApp.ExportAppStateAndValidators(forZeroHeight, jailAllowedAddrs, modulesToExport) +} diff --git a/ignite/templates/app/files/cmd/{{binaryNamePrefix}}d/cmd/config.go.plush b/ignite/templates/app/files/cmd/{{binaryNamePrefix}}d/cmd/config.go.plush new file mode 100644 index 0000000..fea778c --- /dev/null +++ b/ignite/templates/app/files/cmd/{{binaryNamePrefix}}d/cmd/config.go.plush @@ -0,0 +1,61 @@ +package cmd + +import ( + cmtcfg "github.com/cometbft/cometbft/config" + serverconfig "github.com/cosmos/cosmos-sdk/server/config" +) + +// initCometBFTConfig helps to override default CometBFT Config values. +// return cmtcfg.DefaultConfig if no custom configuration is required for the application. +func initCometBFTConfig() *cmtcfg.Config { + cfg := cmtcfg.DefaultConfig() + + // these values put a higher strain on node memory + // cfg.P2P.MaxNumInboundPeers = 100 + // cfg.P2P.MaxNumOutboundPeers = 40 + + return cfg +} + +// initAppConfig helps to override default appConfig template and configs. +// return "", nil if no custom configuration is required for the application. +func initAppConfig() (string, interface{}) { + // The following code snippet is just for reference. + type CustomAppConfig struct { + serverconfig.Config `mapstructure:",squash"` + } + + // Optionally allow the chain developer to overwrite the SDK's default + // server config. + srvCfg := serverconfig.DefaultConfig() + // The SDK's default minimum gas price is set to "" (empty value) inside + // app.toml. If left empty by validators, the node will halt on startup. + // However, the chain developer can set a default app.toml value for their + // validators here. + // + // In summary: + // - if you leave srvCfg.MinGasPrices = "", all validators MUST tweak their + // own app.toml config, + // - if you set srvCfg.MinGasPrices non-empty, validators CAN tweak their + // own app.toml to override, or use this default value. + // + // In tests, we set the min gas prices to 0. + // srvCfg.MinGasPrices = "0stake" + + customAppConfig := CustomAppConfig{ + Config: *srvCfg, + } + + customAppTemplate := serverconfig.DefaultConfigTemplate + // Edit the default template file + // + // customAppTemplate := serverconfig.DefaultConfigTemplate + ` + // [wasm] + // # This is the maximum sdk gas (wasm and storage) that we allow for any x/wasm "smart" queries + // query_gas_limit = 300000 + // # This is the number of wasm vm instances we keep cached in memory for speed-up + // # Warning: this is currently unstable and may lead to crashes, best to keep for 0 unless testing locally + // lru_size = 0` + + return customAppTemplate, customAppConfig +} diff --git a/ignite/templates/app/files/cmd/{{binaryNamePrefix}}d/cmd/root.go.plush b/ignite/templates/app/files/cmd/{{binaryNamePrefix}}d/cmd/root.go.plush new file mode 100644 index 0000000..48d0158 --- /dev/null +++ b/ignite/templates/app/files/cmd/{{binaryNamePrefix}}d/cmd/root.go.plush @@ -0,0 +1,124 @@ +package cmd + +import ( + "os" + + "cosmossdk.io/client/v2/autocli" + "cosmossdk.io/depinject" + "cosmossdk.io/log" + "github.com/cosmos/cosmos-sdk/client" + "github.com/cosmos/cosmos-sdk/client/config" + "github.com/cosmos/cosmos-sdk/codec" + codectypes "github.com/cosmos/cosmos-sdk/codec/types" + "github.com/cosmos/cosmos-sdk/server" + "github.com/cosmos/cosmos-sdk/types/module" + "github.com/cosmos/cosmos-sdk/x/auth/tx" + authtxconfig "github.com/cosmos/cosmos-sdk/x/auth/tx/config" + "github.com/cosmos/cosmos-sdk/x/auth/types" + "github.com/spf13/cobra" + + "<%= ModulePath %>/app" +) + +// NewRootCmd creates a new root command for <%= BinaryNamePrefix %>d. It is called once in the main function. +func NewRootCmd() *cobra.Command { + var ( + autoCliOpts autocli.AppOptions + moduleBasicManager module.BasicManager + clientCtx client.Context + ) + + if err := depinject.Inject( + depinject.Configs(app.AppConfig(), + depinject.Supply(log.NewNopLogger()), + depinject.Provide( + ProvideClientContext, + ), + ), + &autoCliOpts, + &moduleBasicManager, + &clientCtx, + ); err != nil { + panic(err) + } + + rootCmd := &cobra.Command{ + Use: app.Name + "d", + Short: "<%= AppName %> node", + SilenceErrors: true, + PersistentPreRunE: func(cmd *cobra.Command, _ []string) error { + // set the default command outputs + cmd.SetOut(cmd.OutOrStdout()) + cmd.SetErr(cmd.ErrOrStderr()) + + clientCtx = clientCtx.WithCmdContext(cmd.Context()).WithViper(app.Name) + clientCtx, err := client.ReadPersistentCommandFlags(clientCtx, cmd.Flags()) + if err != nil { + return err + } + + clientCtx, err = config.ReadFromClientConfig(clientCtx) + if err != nil { + return err + } + + if err := client.SetCmdClientContextHandler(clientCtx, cmd); err != nil { + return err + } + + customAppTemplate, customAppConfig := initAppConfig() + customCMTConfig := initCometBFTConfig() + + return server.InterceptConfigsPreRunHandler(cmd, customAppTemplate, customAppConfig, customCMTConfig) + }, + } + +<%= if (!IsChainMinimal) { %> + // Since the IBC modules don't support dependency injection, we need to + // manually register the modules on the client side. + // This needs to be removed after IBC supports App Wiring. + ibcModules := app.RegisterIBC(clientCtx.Codec) + for name, mod := range ibcModules { + moduleBasicManager[name] = module.CoreAppModuleBasicAdaptor(name, mod) + autoCliOpts.Modules[name] = mod + } +<% } %> + initRootCmd(rootCmd, clientCtx.TxConfig, moduleBasicManager) + + if err := autoCliOpts.EnhanceRootCommand(rootCmd); err != nil { + panic(err) + } + + return rootCmd +} + +// ProvideClientContext creates and provides a fully initialized client.Context, +// allowing it to be used for dependency injection and CLI operations. +func ProvideClientContext( + appCodec codec.Codec, + interfaceRegistry codectypes.InterfaceRegistry, + txConfigOpts tx.ConfigOptions, + legacyAmino *codec.LegacyAmino, +) client.Context { + clientCtx := client.Context{}. + WithCodec(appCodec). + WithInterfaceRegistry(interfaceRegistry). + WithLegacyAmino(legacyAmino). + WithInput(os.Stdin). + WithAccountRetriever(types.AccountRetriever{}). + WithHomeDir(app.DefaultNodeHome). + WithViper(app.Name) // env variable prefix + + // Read the config again to overwrite the default values with the values from the config file + clientCtx, _ = config.ReadFromClientConfig(clientCtx) + + // textual is enabled by default, we need to re-create the tx config grpc instead of bank keeper. + txConfigOpts.TextualCoinMetadataQueryFn = authtxconfig.NewGRPCCoinMetadataQueryFn(clientCtx) + txConfig, err := tx.NewTxConfigWithOptions(clientCtx.Codec, txConfigOpts) + if err != nil { + panic(err) + } + clientCtx = clientCtx.WithTxConfig(txConfig) + + return clientCtx +} diff --git a/ignite/templates/app/files/cmd/{{binaryNamePrefix}}d/cmd/testnet.go.plush b/ignite/templates/app/files/cmd/{{binaryNamePrefix}}d/cmd/testnet.go.plush new file mode 100644 index 0000000..e6c2115 --- /dev/null +++ b/ignite/templates/app/files/cmd/{{binaryNamePrefix}}d/cmd/testnet.go.plush @@ -0,0 +1,244 @@ +package cmd + +import ( + "errors" + "fmt" + "io" + "os" + "strings" + + "cosmossdk.io/log" + "cosmossdk.io/math" + storetypes "cosmossdk.io/store/types" + "github.com/cometbft/cometbft/crypto" + "github.com/cometbft/cometbft/libs/bytes" + cmtproto "github.com/cometbft/cometbft/proto/tendermint/types" + dbm "github.com/cosmos/cosmos-db" + "github.com/cosmos/cosmos-sdk/client/flags" + codectypes "github.com/cosmos/cosmos-sdk/codec/types" + "github.com/cosmos/cosmos-sdk/crypto/keys/ed25519" + "github.com/cosmos/cosmos-sdk/server" + servertypes "github.com/cosmos/cosmos-sdk/server/types" + sdk "github.com/cosmos/cosmos-sdk/types" + distrtypes "github.com/cosmos/cosmos-sdk/x/distribution/types" + minttypes "github.com/cosmos/cosmos-sdk/x/mint/types" + slashingtypes "github.com/cosmos/cosmos-sdk/x/slashing/types" + stakingtypes "github.com/cosmos/cosmos-sdk/x/staking/types" + "github.com/spf13/cast" + "github.com/spf13/cobra" + + "<%= ModulePath %>/app" +) + +const valVotingPower int64 = 900000000000000 + +var flagAccountsToFund = "accounts-to-fund" + +type valArgs struct { + newValAddr bytes.HexBytes + newOperatorAddress string + newValPubKey crypto.PubKey + accountsToFund []string + upgradeToTrigger string + homeDir string +} + +func NewInPlaceTestnetCmd() *cobra.Command { + cmd := server.InPlaceTestnetCreator(newTestnetApp) + cmd.Short = "Updates chain's application and consensus state with provided validator info and starts the node" + cmd.Long = `The test command modifies both application and consensus stores within a local mainnet node and starts the node, +with the aim of facilitating testing procedures. This command replaces existing validator data with updated information, +thereby removing the old validator set and introducing a new set suitable for local testing purposes. By altering the state extracted from the mainnet node, +it enables developers to configure their local environments to reflect mainnet conditions more accurately.` + + cmd.Example = fmt.Sprintf(`%sd in-place-testnet testing-1 cosmosvaloper1w7f3xx7e75p4l7qdym5msqem9rd4dyc4mq79dm --home $HOME/.%sd/validator1 --validator-privkey=6dq+/KHNvyiw2TToCgOpUpQKIzrLs69Rb8Az39xvmxPHNoPxY1Cil8FY+4DhT9YwD6s0tFABMlLcpaylzKKBOg== --accounts-to-fund="cosmos1f7twgcq4ypzg7y24wuywy06xmdet8pc4473tnq,cosmos1qvuhm5m644660nd8377d6l7yz9e9hhm9evmx3x"`, "<%= ModulePath %>", "<%= ModulePath %>") + + cmd.Flags().String(flagAccountsToFund, "", "Comma-separated list of account addresses that will be funded for testing purposes") + return cmd +} + +// newTestnetApp starts by running the normal newApp method. From there, the app interface returned is modified in order +// for a testnet to be created from the provided app. +func newTestnetApp(logger log.Logger, db dbm.DB, traceStore io.Writer, appOpts servertypes.AppOptions) servertypes.Application { + // Create an app and type cast to an App + newApp := newApp(logger, db, traceStore, appOpts) + testApp, ok := newApp.(*app.App) + if !ok { + panic("app created from newApp is not of type App") + } + + // Get command args + args, err := getCommandArgs(appOpts) + if err != nil { + panic(err) + } + + return initAppForTestnet(testApp, args) +} + +func initAppForTestnet(app *app.App, args valArgs) *app.App { + // Required Changes: + // + ctx := app.App.NewUncachedContext(true, cmtproto.Header{}) + + pubkey := &ed25519.PubKey{Key: args.newValPubKey.Bytes()} + pubkeyAny, err := codectypes.NewAnyWithValue(pubkey) + handleErr(err) + + // STAKING + // + + // Create Validator struct for our new validator. + newVal := stakingtypes.Validator{ + OperatorAddress: args.newOperatorAddress, + ConsensusPubkey: pubkeyAny, + Jailed: false, + Status: stakingtypes.Bonded, + Tokens: math.NewInt(valVotingPower), + DelegatorShares: math.LegacyMustNewDecFromStr("10000000"), + Description: stakingtypes.Description{ + Moniker: "Testnet Validator", + }, + Commission: stakingtypes.Commission{ + CommissionRates: stakingtypes.CommissionRates{ + Rate: math.LegacyMustNewDecFromStr("0.05"), + MaxRate: math.LegacyMustNewDecFromStr("0.1"), + MaxChangeRate: math.LegacyMustNewDecFromStr("0.05"), + }, + }, + MinSelfDelegation: math.OneInt(), + } + + validator, err := app.StakingKeeper.ValidatorAddressCodec().StringToBytes(newVal.GetOperator()) + handleErr(err) + + // Remove all validators from power store + stakingKey := app.GetKey(stakingtypes.ModuleName) + stakingStore := ctx.KVStore(stakingKey) + iterator, err := app.StakingKeeper.ValidatorsPowerStoreIterator(ctx) + handleErr(err) + + for ; iterator.Valid(); iterator.Next() { + stakingStore.Delete(iterator.Key()) + } + iterator.Close() + + // Remove all validators from last validators store + iterator, err = app.StakingKeeper.LastValidatorsIterator(ctx) + handleErr(err) + + for ; iterator.Valid(); iterator.Next() { + stakingStore.Delete(iterator.Key()) + } + iterator.Close() + + // Remove all validators from validators store + iterator = stakingStore.Iterator(stakingtypes.ValidatorsKey, storetypes.PrefixEndBytes(stakingtypes.ValidatorsKey)) + for ; iterator.Valid(); iterator.Next() { + stakingStore.Delete(iterator.Key()) + } + iterator.Close() + + // Remove all validators from unbonding queue + iterator = stakingStore.Iterator(stakingtypes.ValidatorQueueKey, storetypes.PrefixEndBytes(stakingtypes.ValidatorQueueKey)) + for ; iterator.Valid(); iterator.Next() { + stakingStore.Delete(iterator.Key()) + } + iterator.Close() + + // Add our validator to power and last validators store + handleErr(app.StakingKeeper.SetValidator(ctx, newVal)) + handleErr(app.StakingKeeper.SetValidatorByConsAddr(ctx, newVal)) + handleErr(app.StakingKeeper.SetValidatorByPowerIndex(ctx, newVal)) + handleErr(app.StakingKeeper.SetLastValidatorPower(ctx, validator, 0)) + handleErr(app.StakingKeeper.Hooks().AfterValidatorCreated(ctx, validator)) + + // DISTRIBUTION + // + + // Initialize records for this validator across all distribution stores + handleErr(app.DistrKeeper.SetValidatorHistoricalRewards(ctx, validator, 0, distrtypes.NewValidatorHistoricalRewards(sdk.DecCoins{}, 1))) + handleErr(app.DistrKeeper.SetValidatorCurrentRewards(ctx, validator, distrtypes.NewValidatorCurrentRewards(sdk.DecCoins{}, 1))) + handleErr(app.DistrKeeper.SetValidatorAccumulatedCommission(ctx, validator, distrtypes.InitialValidatorAccumulatedCommission())) + handleErr(app.DistrKeeper.SetValidatorOutstandingRewards(ctx, validator, distrtypes.ValidatorOutstandingRewards{Rewards: sdk.DecCoins{}})) + +<%= if (!IsChainMinimal) { %> + // SLASHING + // + + // Set validator signing info for our new validator. + newConsAddr := sdk.ConsAddress(args.newValAddr.Bytes()) + newValidatorSigningInfo := slashingtypes.ValidatorSigningInfo{ + Address: newConsAddr.String(), + StartHeight: app.App.LastBlockHeight() - 1, + Tombstoned: false, + } + _ = app.SlashingKeeper.SetValidatorSigningInfo(ctx, newConsAddr, newValidatorSigningInfo) +<% } %> + + // BANK + // + bondDenom, err := app.StakingKeeper.BondDenom(ctx) + handleErr(err) + + defaultCoins := sdk.NewCoins(sdk.NewInt64Coin(bondDenom, 1000000000)) + + // Fund local accounts + for _, accountStr := range args.accountsToFund { + handleErr(app.BankKeeper.MintCoins(ctx, minttypes.ModuleName, defaultCoins)) + + account, err := app.AuthKeeper.AddressCodec().StringToBytes(accountStr) + handleErr(err) + + handleErr(app.BankKeeper.SendCoinsFromModuleToAccount(ctx, minttypes.ModuleName, account, defaultCoins)) + } + + return app +} + +// parse the input flags and returns valArgs +func getCommandArgs(appOpts servertypes.AppOptions) (valArgs, error) { + args := valArgs{} + + newValAddr, ok := appOpts.Get(server.KeyNewValAddr).(bytes.HexBytes) + if !ok { + return args, errors.New("newValAddr is not of type bytes.HexBytes") + } + args.newValAddr = newValAddr + newValPubKey, ok := appOpts.Get(server.KeyUserPubKey).(crypto.PubKey) + if !ok { + return args, errors.New("newValPubKey is not of type crypto.PubKey") + } + args.newValPubKey = newValPubKey + newOperatorAddress, ok := appOpts.Get(server.KeyNewOpAddr).(string) + if !ok { + return args, errors.New("newOperatorAddress is not of type string") + } + args.newOperatorAddress = newOperatorAddress + upgradeToTrigger, ok := appOpts.Get(server.KeyTriggerTestnetUpgrade).(string) + if !ok { + return args, errors.New("upgradeToTrigger is not of type string") + } + args.upgradeToTrigger = upgradeToTrigger + + // parsing and set accounts to fund + accountsString := cast.ToString(appOpts.Get(flagAccountsToFund)) + args.accountsToFund = append(args.accountsToFund, strings.Split(accountsString, ",")...) + + // home dir + homeDir := cast.ToString(appOpts.Get(flags.FlagHome)) + if homeDir == "" { + return args, errors.New("invalid home dir") + } + args.homeDir = homeDir + + return args, nil +} + +// handleErr prints the error and exits the program if the error is not nil +func handleErr(err error) { + if err != nil { + fmt.Fprintln(os.Stderr, err.Error()) + os.Exit(1) + } +} diff --git a/ignite/templates/app/files/cmd/{{binaryNamePrefix}}d/cmd/testnet_multi_node.go.plush b/ignite/templates/app/files/cmd/{{binaryNamePrefix}}d/cmd/testnet_multi_node.go.plush new file mode 100644 index 0000000..eeca0d5 --- /dev/null +++ b/ignite/templates/app/files/cmd/{{binaryNamePrefix}}d/cmd/testnet_multi_node.go.plush @@ -0,0 +1,540 @@ +package cmd + +import ( + "bufio" + "encoding/json" + "fmt" + "io" + "math/rand" + "os" + "path/filepath" + "strconv" + "strings" + "time" + + cmtconfig "github.com/cometbft/cometbft/config" + types "github.com/cometbft/cometbft/types" + tmtime "github.com/cometbft/cometbft/types/time" + "github.com/spf13/cobra" + "github.com/spf13/pflag" + + "cosmossdk.io/math" + "github.com/cosmos/cosmos-sdk/client" + "github.com/cosmos/cosmos-sdk/client/flags" + "github.com/cosmos/cosmos-sdk/client/tx" + "github.com/cosmos/cosmos-sdk/crypto/hd" + "github.com/cosmos/cosmos-sdk/crypto/keyring" + cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types" + "github.com/cosmos/cosmos-sdk/server" + srvconfig "github.com/cosmos/cosmos-sdk/server/config" + "github.com/cosmos/cosmos-sdk/testutil" + sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/cosmos/cosmos-sdk/types/module" + authtypes "github.com/cosmos/cosmos-sdk/x/auth/types" + banktypes "github.com/cosmos/cosmos-sdk/x/bank/types" + "github.com/cosmos/cosmos-sdk/x/genutil" + genutiltypes "github.com/cosmos/cosmos-sdk/x/genutil/types" + stakingtypes "github.com/cosmos/cosmos-sdk/x/staking/types" + + runtime "github.com/cosmos/cosmos-sdk/runtime" +) + +var ( + flagNodeDirPrefix = "node-dir-prefix" + flagPorts = "list-ports" + flagNumValidators = "v" + flagOutputDir = "output-dir" + flagValidatorsStakeAmount = "validators-stake-amount" + flagStartingIPAddress = "starting-ip-address" +) + +const nodeDirPerm = 0o755 + +type initArgs struct { + algo string + chainID string + keyringBackend string + minGasPrices string + nodeDirPrefix string + numValidators int + outputDir string + startingIPAddress string + validatorsStakesAmount map[int]sdk.Coin + ports map[int]string +} + +// NewTestnetMultiNodeCmd returns a cmd to initialize all files for tendermint testnet and application +func NewTestnetMultiNodeCmd(mbm module.BasicManager, genBalIterator banktypes.GenesisBalancesIterator) *cobra.Command { + cmd := &cobra.Command{ + Use: "multi-node", + Short: "Initialize config directories & files for a multi-validator testnet running locally via separate processes (e.g. Docker Compose or similar)", + Long: `multi-node will setup "v" number of directories and populate each with +necessary files (private validator, genesis, config, etc.) for running "v" validator nodes. + +Booting up a network with these validator folders is intended to be used with Docker Compose, +or a similar setup where each node has a manually configurable IP address. + +Note, strict routability for addresses is turned off in the config file. + +Example: + <%= AppName %>d multi-node --v 4 --output-dir ./.testnets --validators-stake-amount 1000000,200000,300000,400000 --list-ports 47222,50434,52851,44210 + `, + RunE: func(cmd *cobra.Command, _ []string) error { + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } + + serverCtx := server.GetServerContextFromCmd(cmd) + config := serverCtx.Config + + args := initArgs{} + args.outputDir, _ = cmd.Flags().GetString(flagOutputDir) + args.keyringBackend, _ = cmd.Flags().GetString(flags.FlagKeyringBackend) + args.chainID, _ = cmd.Flags().GetString(flags.FlagChainID) + args.minGasPrices, _ = cmd.Flags().GetString(server.FlagMinGasPrices) + args.nodeDirPrefix, _ = cmd.Flags().GetString(flagNodeDirPrefix) + args.startingIPAddress, _ = cmd.Flags().GetString(flagStartingIPAddress) + args.numValidators, _ = cmd.Flags().GetInt(flagNumValidators) + args.algo, _ = cmd.Flags().GetString(flags.FlagKeyType) + + args.ports = map[int]string{} + args.validatorsStakesAmount = make(map[int]sdk.Coin) + top := 0 + // If the flag string is invalid, the amount will default to 100000000. + if s, err := cmd.Flags().GetString(flagValidatorsStakeAmount); err == nil { + for _, amount := range strings.Split(s, ",") { + a, ok := math.NewIntFromString(amount) + if !ok { + continue + } + args.validatorsStakesAmount[top] = sdk.NewCoin(sdk.DefaultBondDenom, a) + top += 1 + } + + } + top = 0 + if s, err := cmd.Flags().GetString(flagPorts); err == nil { + if s == "" { + for i := 0; i < args.numValidators; i++ { + args.ports[top] = strconv.Itoa(26657 - 3*i) + top += 1 + } + } else { + for _, port := range strings.Split(s, ",") { + args.ports[top] = port + top += 1 + } + } + } + + return initTestnetFiles(clientCtx, cmd, config, mbm, genBalIterator, args) + }, + } + + addTestnetFlagsToCmd(cmd) + cmd.Flags().String(flagPorts, "", "Ports of nodes (default 26657,26654,26651,26648.. )") + cmd.Flags().String(flagNodeDirPrefix, "validator", "Prefix the directory name for each node with (node results in node0, node1, ...)") + cmd.Flags().String(flagValidatorsStakeAmount, "100000000,100000000,100000000,100000000", "Amount of stake for each validator") + cmd.Flags().String(flagStartingIPAddress, "localhost", "Starting IP address (192.168.0.1 results in persistent peers list ID0@192.168.0.1:46656, ID1@192.168.0.2:46656, ...)") + cmd.Flags().String(flags.FlagKeyringBackend, "test", "Select keyring's backend (os|file|test)") + + return cmd +} + +func addTestnetFlagsToCmd(cmd *cobra.Command) { + cmd.Flags().Int(flagNumValidators, 4, "Number of validators to initialize the testnet with") + cmd.Flags().StringP(flagOutputDir, "o", "./.testnets", "Directory to store initialization data for the testnet") + cmd.Flags().String(flags.FlagChainID, "", "genesis file chain-id, if left blank will be randomly created") + cmd.Flags().String(server.FlagMinGasPrices, fmt.Sprintf("0.0001%s", sdk.DefaultBondDenom), "Minimum gas prices to accept for transactions; All fees in a tx must meet this minimum (e.g. 0.01photino,0.001stake)") + cmd.Flags().String(flags.FlagKeyType, string(hd.Secp256k1Type), "Key signing algorithm to generate keys for") + + // support old flags name for backwards compatibility + cmd.Flags().SetNormalizeFunc(func(f *pflag.FlagSet, name string) pflag.NormalizedName { + if name == "algo" { + name = flags.FlagKeyType + } + + return pflag.NormalizedName(name) + }) +} + +// initTestnetFiles initializes testnet files for a testnet to be run in a separate process +func initTestnetFiles( + clientCtx client.Context, + cmd *cobra.Command, + nodeConfig *cmtconfig.Config, + mbm module.BasicManager, + genBalIterator banktypes.GenesisBalancesIterator, + args initArgs, +) error { + if args.chainID == "" { + args.chainID = "chain-" + generateRandomString(6) + } + nodeIDs := make([]string, args.numValidators) + valPubKeys := make([]cryptotypes.PubKey, args.numValidators) + + appConfig := srvconfig.DefaultConfig() + appConfig.MinGasPrices = args.minGasPrices + appConfig.API.Enable = false + appConfig.BaseConfig.MinGasPrices = "0.0001" + sdk.DefaultBondDenom + appConfig.Telemetry.EnableHostnameLabel = false + appConfig.Telemetry.Enabled = false + appConfig.Telemetry.PrometheusRetentionTime = 0 + + var ( + genAccounts []authtypes.GenesisAccount + genBalances []banktypes.Balance + genFiles []string + persistentPeers string + gentxsFiles []string + ) + + inBuf := bufio.NewReader(cmd.InOrStdin()) + for i := 0; i < args.numValidators; i++ { + nodeDirName := fmt.Sprintf("%s%d", args.nodeDirPrefix, i) + nodeDir := filepath.Join(args.outputDir, nodeDirName) + gentxsDir := filepath.Join(args.outputDir, nodeDirName, "config", "gentx") + + nodeConfig.SetRoot(nodeDir) + nodeConfig.Moniker = nodeDirName + nodeConfig.RPC.ListenAddress = "tcp://0.0.0.0:" + args.ports[i] + + var err error + if err := os.MkdirAll(filepath.Join(nodeDir, "config"), nodeDirPerm); err != nil { + _ = os.RemoveAll(args.outputDir) + return err + } + + nodeIDs[i], valPubKeys[i], err = genutil.InitializeNodeValidatorFiles(nodeConfig) + if err != nil { + _ = os.RemoveAll(args.outputDir) + return err + } + + memo := fmt.Sprintf("%s@%s:"+strconv.Itoa(26656-3*i), nodeIDs[i], args.startingIPAddress) + + if persistentPeers == "" { + persistentPeers = memo + } else { + persistentPeers = persistentPeers + "," + memo + } + + genFiles = append(genFiles, nodeConfig.GenesisFile()) + + kb, err := keyring.New(sdk.KeyringServiceName(), args.keyringBackend, nodeDir, inBuf, clientCtx.Codec) + if err != nil { + return err + } + + keyringAlgos, _ := kb.SupportedAlgorithms() + algo, err := keyring.NewSigningAlgoFromString(args.algo, keyringAlgos) + if err != nil { + return err + } + + addr, secret, err := testutil.GenerateSaveCoinKey(kb, nodeDirName, "", true, algo) + if err != nil { + _ = os.RemoveAll(args.outputDir) + return err + } + + info := map[string]string{"secret": secret} + + cliPrint, err := json.Marshal(info) + if err != nil { + return err + } + + // save private key seed words + file := filepath.Join(nodeDir, fmt.Sprintf("%v.json", "key_seed")) + if err := writeFile(file, nodeDir, cliPrint); err != nil { + return err + } + + accTokens := sdk.TokensFromConsensusPower(1000, sdk.DefaultPowerReduction) + accStakingTokens := sdk.TokensFromConsensusPower(500, sdk.DefaultPowerReduction) + coins := sdk.Coins{ + sdk.NewCoin("testtoken", accTokens), + sdk.NewCoin(sdk.DefaultBondDenom, accStakingTokens), + } + + genBalances = append(genBalances, banktypes.Balance{Address: addr.String(), Coins: coins.Sort()}) + genAccounts = append(genAccounts, authtypes.NewBaseAccount(addr, nil, 0, 0)) + + var valTokens sdk.Coin + valTokens, ok := args.validatorsStakesAmount[i] + if !ok { + valTokens = sdk.NewCoin(sdk.DefaultBondDenom, sdk.TokensFromConsensusPower(100, sdk.DefaultPowerReduction)) + } + createValMsg, err := stakingtypes.NewMsgCreateValidator( + sdk.ValAddress(addr).String(), + valPubKeys[i], + valTokens, + stakingtypes.NewDescription(nodeDirName, "", "", "", ""), + stakingtypes.NewCommissionRates(math.LegacyOneDec(), math.LegacyOneDec(), math.LegacyOneDec()), + math.OneInt(), + ) + if err != nil { + return err + } + + txBuilder := clientCtx.TxConfig.NewTxBuilder() + if err := txBuilder.SetMsgs(createValMsg); err != nil { + return err + } + + txBuilder.SetMemo(memo) + + txFactory := tx.Factory{} + txFactory = txFactory. + WithChainID(args.chainID). + WithMemo(memo). + WithKeybase(kb). + WithTxConfig(clientCtx.TxConfig) + + if err := tx.Sign(cmd.Context(), txFactory, nodeDirName, txBuilder, true); err != nil { + return err + } + + txBz, err := clientCtx.TxConfig.TxJSONEncoder()(txBuilder.GetTx()) + if err != nil { + return err + } + file = filepath.Join(gentxsDir, fmt.Sprintf("%v.json", "gentx-"+nodeIDs[i])) + gentxsFiles = append(gentxsFiles, file) + if err := writeFile(file, gentxsDir, txBz); err != nil { + return err + } + + appConfig.GRPC.Address = args.startingIPAddress + ":" + strconv.Itoa(9090-2*i) + appConfig.API.Address = "tcp://localhost:" + strconv.Itoa(1317-i) + srvconfig.WriteConfigFile(filepath.Join(nodeDir, "config", "app.toml"), appConfig) + } + + if err := initGenFiles(clientCtx, mbm, args.chainID, genAccounts, genBalances, genFiles, args.numValidators); err != nil { + return err + } + // copy gentx file + for i := 0; i < args.numValidators; i++ { + for _, file := range gentxsFiles { + nodeDirName := fmt.Sprintf("%s%d", args.nodeDirPrefix, i) + nodeDir := filepath.Join(args.outputDir, nodeDirName) + gentxsDir := filepath.Join(nodeDir, "config", "gentx") + + yes, err := isSubDir(file, gentxsDir) + if err != nil || yes { + continue + } + _, err = copyFile(file, gentxsDir) + if err != nil { + return err + } + } + } + err := collectGenFiles( + clientCtx, nodeConfig, nodeIDs, valPubKeys, + genBalIterator, + clientCtx.TxConfig.SigningContext().ValidatorAddressCodec(), + persistentPeers, args, + ) + if err != nil { + return err + } + + cmd.PrintErrf("Successfully initialized %d node directories\n", args.numValidators) + return nil +} + +func writeFile(file, dir string, contents []byte) error { + if err := os.MkdirAll(dir, 0o755); err != nil { + return fmt.Errorf("could not create directory %q: %w", dir, err) + } + + if err := os.WriteFile(file, contents, 0o644); err != nil { + return err + } + + return nil +} + +func initGenFiles( + clientCtx client.Context, mbm module.BasicManager, chainID string, + genAccounts []authtypes.GenesisAccount, genBalances []banktypes.Balance, + genFiles []string, numValidators int, +) error { + appGenState := mbm.DefaultGenesis(clientCtx.Codec) + + // set the accounts in the genesis state + var authGenState authtypes.GenesisState + clientCtx.Codec.MustUnmarshalJSON(appGenState[authtypes.ModuleName], &authGenState) + + accounts, err := authtypes.PackAccounts(genAccounts) + if err != nil { + return err + } + + authGenState.Accounts = accounts + appGenState[authtypes.ModuleName] = clientCtx.Codec.MustMarshalJSON(&authGenState) + + // set the balances in the genesis state + var bankGenState banktypes.GenesisState + clientCtx.Codec.MustUnmarshalJSON(appGenState[banktypes.ModuleName], &bankGenState) + + bankGenState.Balances = banktypes.SanitizeGenesisBalances(genBalances) + for _, bal := range bankGenState.Balances { + bankGenState.Supply = bankGenState.Supply.Add(bal.Coins...) + } + appGenState[banktypes.ModuleName] = clientCtx.Codec.MustMarshalJSON(&bankGenState) + + appGenStateJSON, err := json.MarshalIndent(appGenState, "", " ") + if err != nil { + return err + } + + genDoc := types.GenesisDoc{ + ChainID: chainID, + AppState: appGenStateJSON, + Validators: nil, + } + + // generate empty genesis files for each validator and save + for i := 0; i < numValidators; i++ { + if err := genDoc.SaveAs(genFiles[i]); err != nil { + return err + } + } + return nil +} + +func collectGenFiles( + clientCtx client.Context, nodeConfig *cmtconfig.Config, + nodeIDs []string, valPubKeys []cryptotypes.PubKey, + genBalIterator banktypes.GenesisBalancesIterator, + valAddrCodec runtime.ValidatorAddressCodec, persistentPeers string, + args initArgs, +) error { + chainID := args.chainID + numValidators := args.numValidators + outputDir := args.outputDir + nodeDirPrefix := args.nodeDirPrefix + + var appState json.RawMessage + genTime := tmtime.Now() + + for i := 0; i < numValidators; i++ { + nodeDirName := fmt.Sprintf("%s%d", nodeDirPrefix, i) + nodeDir := filepath.Join(outputDir, nodeDirName) + gentxsDir := filepath.Join(nodeDir, "config", "gentx") + nodeConfig.Moniker = nodeDirName + + nodeConfig.SetRoot(nodeDir) + + nodeID, valPubKey := nodeIDs[i], valPubKeys[i] + initCfg := genutiltypes.NewInitConfig(chainID, gentxsDir, nodeID, valPubKey) + + appGenesis, err := genutiltypes.AppGenesisFromFile(nodeConfig.GenesisFile()) + if err != nil { + return err + } + + nodeAppState, err := genutil.GenAppStateFromConfig(clientCtx.Codec, clientCtx.TxConfig, nodeConfig, initCfg, appGenesis, genBalIterator, genutiltypes.DefaultMessageValidator, + valAddrCodec) + if err != nil { + return err + } + + nodeConfig.P2P.PersistentPeers = persistentPeers + nodeConfig.P2P.AllowDuplicateIP = true + nodeConfig.P2P.ListenAddress = "tcp://0.0.0.0:" + strconv.Itoa(26656-3*i) + nodeConfig.RPC.ListenAddress = "tcp://127.0.0.1:" + args.ports[i] + nodeConfig.BaseConfig.ProxyApp = "tcp://127.0.0.1:" + strconv.Itoa(26658-3*i) + nodeConfig.Instrumentation.PrometheusListenAddr = ":" + strconv.Itoa(26660+i) + nodeConfig.Instrumentation.Prometheus = true + cmtconfig.WriteConfigFile(filepath.Join(nodeConfig.RootDir, "config", "config.toml"), nodeConfig) + if appState == nil { + // set the canonical application state (they should not differ) + appState = nodeAppState + } + + genFile := nodeConfig.GenesisFile() + + // overwrite each validator's genesis file to have a canonical genesis time + if err := genutil.ExportGenesisFileWithTime(genFile, chainID, nil, appState, genTime); err != nil { + return err + } + } + + return nil +} + +func copyFile(src, dstDir string) (int64, error) { + // Extract the file name from the source path + fileName := filepath.Base(src) + + // Create the full destination path (directory + file name) + dst := filepath.Join(dstDir, fileName) + + // Open the source file + sourceFile, err := os.Open(src) + if err != nil { + return 0, err + } + defer sourceFile.Close() + + // Create the destination file + destinationFile, err := os.Create(dst) + if err != nil { + return 0, err + } + defer destinationFile.Close() + + // Copy content from the source file to the destination file + bytesCopied, err := io.Copy(destinationFile, sourceFile) + if err != nil { + return 0, err + } + + // Ensure the content is written to the destination file + err = destinationFile.Sync() + if err != nil { + return 0, err + } + + return bytesCopied, nil +} + +// isSubDir checks if dstDir is a parent directory of src +func isSubDir(src, dstDir string) (bool, error) { + // Get the absolute path of src and dstDir + absSrc, err := filepath.Abs(src) + if err != nil { + return false, err + } + absDstDir, err := filepath.Abs(dstDir) + if err != nil { + return false, err + } + + // Check if absSrc is within absDstDir + relativePath, err := filepath.Rel(absDstDir, absSrc) + if err != nil { + return false, err + } + + // If the relative path doesn't go up the directory tree (doesn't contain ".."), it is inside dstDir + isInside := !strings.HasPrefix(relativePath, "..") && !filepath.IsAbs(relativePath) + return isInside, nil +} + +// generateRandomString generates a random string of the specified length. +func generateRandomString(length int) string { + const charset = "abcdefghijklmnopqrstuvwxyz0123456789" + var seededRand *rand.Rand = rand.New(rand.NewSource(time.Now().UnixNano())) + + b := make([]byte, length) + for i := range b { + b[i] = charset[seededRand.Intn(len(charset))] + } + return string(b) +} diff --git a/ignite/templates/app/files/cmd/{{binaryNamePrefix}}d/main.go.plush b/ignite/templates/app/files/cmd/{{binaryNamePrefix}}d/main.go.plush new file mode 100644 index 0000000..8616bed --- /dev/null +++ b/ignite/templates/app/files/cmd/{{binaryNamePrefix}}d/main.go.plush @@ -0,0 +1,20 @@ +package main + +import ( + "fmt" + "os" + + clienthelpers "cosmossdk.io/client/v2/helpers" + svrcmd "github.com/cosmos/cosmos-sdk/server/cmd" + + "<%= ModulePath %>/app" + "<%= ModulePath %>/cmd/<%= BinaryNamePrefix %>d/cmd" +) + +func main() { + rootCmd := cmd.NewRootCmd() + if err := svrcmd.Execute(rootCmd, clienthelpers.EnvPrefix, app.DefaultNodeHome); err != nil { + fmt.Fprintln(rootCmd.OutOrStderr(), err) + os.Exit(1) + } +} diff --git a/ignite/templates/app/files/config.yml.plush b/ignite/templates/app/files/config.yml.plush new file mode 100644 index 0000000..4be9328 --- /dev/null +++ b/ignite/templates/app/files/config.yml.plush @@ -0,0 +1,27 @@ +version: 1 +validation: sovereign +default_denom: <%= DefaultDenom %> +accounts: +- name: alice + coins: + - 20000token + - 200000000<%= DefaultDenom %> +- name: bob + coins: + - 10000token + - 100000000<%= DefaultDenom %> +client: + openapi: + path: docs/static/openapi.json +faucet: + name: bob + coins: + - 5token + - 100000stake +validators: +- name: alice + bonded: 100000000<%= DefaultDenom %> +- name: validator1 + bonded: 200000000<%= DefaultDenom %> +- name: validator2 + bonded: 100000000<%= DefaultDenom %> \ No newline at end of file diff --git a/ignite/templates/app/files/docs/docs.go.plush b/ignite/templates/app/files/docs/docs.go.plush new file mode 100644 index 0000000..7e7c577 --- /dev/null +++ b/ignite/templates/app/files/docs/docs.go.plush @@ -0,0 +1,41 @@ +package docs + +import ( + "embed" + httptemplate "html/template" + "net/http" + + "github.com/gorilla/mux" +) + +const ( + apiFile = "/static/openapi.json" + indexFile = "template/index.tpl" +) + + +//go:embed static +var Static embed.FS + +//go:embed template +var template embed.FS + +func RegisterOpenAPIService(appName string, rtr *mux.Router) { + rtr.Handle(apiFile, http.FileServer(http.FS(Static))) + rtr.HandleFunc("/", handler(appName)) +} + +// handler returns an http handler that servers OpenAPI console for an OpenAPI spec at specURL. +func handler(title string) http.HandlerFunc { + t, _ := httptemplate.ParseFS(template, indexFile) + + return func(w http.ResponseWriter, req *http.Request) { + _ = t.Execute(w, struct { + Title string + URL string + }{ + title, + apiFile, + }) + } +} diff --git a/ignite/templates/app/files/docs/static/openapi.json.plush b/ignite/templates/app/files/docs/static/openapi.json.plush new file mode 100644 index 0000000..6ee1e1a --- /dev/null +++ b/ignite/templates/app/files/docs/static/openapi.json.plush @@ -0,0 +1,41 @@ +{ + "id": "<%= ModulePath %>", + "consumes": ["application/json"], + "produces": ["application/json"], + "swagger": "2.0", + "info": + { + "description": "Chain <%= ModulePath %> REST API", + "title": "HTTP API Console", + "contact": { "name": "<%= ModulePath %>" }, + "version": "version not set", + }, + "paths": {}, + "definitions": + { + "google.protobuf.Any": + { + "type": "object", + "properties": { "@type": { "type": "string" } }, + "additionalProperties": {}, + }, + "google.rpc.Status": + { + "type": "object", + "properties": + { + "code": { "type": "integer", "format": "int32" }, + "details": + { + "type": "array", + "items": + { + "type": "object", + "$ref": "#/definitions/google.protobuf.Any", + }, + }, + "message": { "type": "string" }, + }, + }, + }, +} \ No newline at end of file diff --git a/ignite/templates/app/files/docs/template/index.tpl b/ignite/templates/app/files/docs/template/index.tpl new file mode 100644 index 0000000..2d07904 --- /dev/null +++ b/ignite/templates/app/files/docs/template/index.tpl @@ -0,0 +1,28 @@ + + + + + {{ .Title }} + + + + +
+ + + + + +Footer +© 2022 GitHub, Inc. +Footer navigation diff --git a/ignite/templates/app/files/go.mod.plush b/ignite/templates/app/files/go.mod.plush new file mode 100644 index 0000000..e857f75 --- /dev/null +++ b/ignite/templates/app/files/go.mod.plush @@ -0,0 +1,54 @@ +module <%= ModulePath %> + +go 1.24.1 + +replace ( + // fix upstream GHSA-h395-qcrw-5vmq vulnerability. + github.com/gin-gonic/gin => github.com/gin-gonic/gin v1.9.1 + // replace broken goleveldb + github.com/syndtr/goleveldb => github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7 + // replace broken vanity url + nhooyr.io/websocket => github.com/coder/websocket v1.8.7 + // support for go 1.26 (remove when cosmossdk.io/log is updated) + github.com/bytedance/sonic => github.com/bytedance/sonic v1.15.0 +) + +require ( + cosmossdk.io/api v0.9.2 + cosmossdk.io/client/v2 v2.0.0-beta.11 + cosmossdk.io/collections v1.3.1 + cosmossdk.io/core v0.11.3 + cosmossdk.io/depinject v1.2.1 + cosmossdk.io/errors v1.0.2 + cosmossdk.io/log v1.6.1 + cosmossdk.io/math v1.5.3 + cosmossdk.io/store v1.1.2 + cosmossdk.io/tools/confix v0.1.2 + cosmossdk.io/x/circuit v0.1.1 + cosmossdk.io/x/evidence v0.1.1 + cosmossdk.io/x/feegrant v0.1.1 + cosmossdk.io/x/nft v0.1.0 + cosmossdk.io/x/upgrade v0.2.0 + github.com/cometbft/cometbft v0.38.21 + github.com/cosmos/cosmos-db v1.1.1 + github.com/cosmos/cosmos-proto v1.0.0-beta.5 + github.com/cosmos/cosmos-sdk v0.53.6 + github.com/cosmos/gogoproto v1.7.2 + github.com/cosmos/ibc-go/v10 v10.4.0 + github.com/golang/protobuf v1.5.4 + github.com/gorilla/mux v1.8.1 + github.com/grpc-ecosystem/grpc-gateway v1.16.0 + github.com/spf13/cast v1.10.0 + github.com/spf13/cobra v1.9.1 + github.com/spf13/pflag v1.0.6 + github.com/spf13/viper v1.21.0 + github.com/stretchr/testify v1.10.0 + google.golang.org/genproto/googleapis/api v0.0.0-20250528174236-200df99c418a + google.golang.org/grpc v1.72.2 + google.golang.org/protobuf v1.36.6 +) + +tool ( + <%= for (depTool) in DepTools { %> + "<%= depTool %>"<% } %> +) diff --git a/ignite/templates/app/files/readme.md.plush b/ignite/templates/app/files/readme.md.plush new file mode 100644 index 0000000..38c3471 --- /dev/null +++ b/ignite/templates/app/files/readme.md.plush @@ -0,0 +1,50 @@ +# <%= AppName %> +**<%= AppName %>** is a blockchain built using Cosmos SDK and Tendermint and created with [Ignite CLI](https://ignite.com/cli). + +## Get started + +``` +ignite chain serve +``` + +`serve` command installs dependencies, builds, initializes, and starts your blockchain in development. + +### Configure + +Your blockchain in development can be configured with `config.yml`. To learn more, see the [Ignite CLI docs](https://docs.ignite.com). + +### Web Frontend + +Additionally, Ignite CLI offers a frontend scaffolding feature (based on Vue) to help you quickly build a web frontend for your blockchain: + +Use: `ignite scaffold vue` +This command can be run within your scaffolded blockchain project. + + +For more information see the [monorepo for Ignite front-end development](https://github.com/ignite/web). + +## Release +To release a new version of your blockchain, create and push a new tag with `v` prefix. A new draft release with the configured targets will be created. + +``` +git tag v0.1 +git push origin v0.1 +``` + +After a draft release is created, make your final changes from the release page and publish it. + +### Install +To install the latest version of your blockchain node's binary, execute the following command on your machine: + +``` +curl https://get.ignite.com/<%= GitHubPath %>@latest! | sudo bash +``` +`<%= GitHubPath %>` should match the `username` and `repo_name` of the Github repository to which the source code was pushed. Learn more about [the install process](https://github.com/ignite/installer). + +## Learn more + +- [Ignite CLI](https://ignite.com/cli) +- [Tutorials](https://docs.ignite.com/guide) +- [Ignite CLI docs](https://docs.ignite.com) +- [Cosmos SDK docs](https://docs.cosmos.network) +- [Developer Chat](https://discord.com/invite/ignitecli) diff --git a/ignite/templates/app/files/testutil/sample/sample.go.plush b/ignite/templates/app/files/testutil/sample/sample.go.plush new file mode 100644 index 0000000..ec8b1df --- /dev/null +++ b/ignite/templates/app/files/testutil/sample/sample.go.plush @@ -0,0 +1,13 @@ +package sample + +import ( + "github.com/cosmos/cosmos-sdk/crypto/keys/ed25519" + sdk "github.com/cosmos/cosmos-sdk/types" +) + +// AccAddress returns a sample account address +func AccAddress() string { + pk := ed25519.GenPrivKey().PubKey() + addr := pk.Address() + return sdk.AccAddress(addr).String() +} diff --git a/ignite/templates/app/files/{{protoDir}}/buf.gen.gogo.yaml b/ignite/templates/app/files/{{protoDir}}/buf.gen.gogo.yaml new file mode 100644 index 0000000..b29cf6e --- /dev/null +++ b/ignite/templates/app/files/{{protoDir}}/buf.gen.gogo.yaml @@ -0,0 +1,24 @@ +# This file is auto-generated by Ignite. You can edit +# the file content but do not change the file name or path. +# +# buf.gen.gogo.yaml +# +version: v2 +plugins: + - local: ["go", "tool", "github.com/cosmos/gogoproto/protoc-gen-gocosmos"] + out: . + opt: + - plugins=grpc + - Mgoogle/protobuf/any.proto=github.com/cosmos/gogoproto/types/any + - Mcosmos/orm/v1/orm.proto=cosmossdk.io/orm + - Mcosmos/app/v1alpha1/module.proto=cosmossdk.io/api/cosmos/app/v1alpha1 + - local: + [ + "go", + "tool", + "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway", + ] + out: . + opt: + - logtostderr=true + - allow_colon_final_segments=true diff --git a/ignite/templates/app/files/{{protoDir}}/buf.gen.sta.yaml b/ignite/templates/app/files/{{protoDir}}/buf.gen.sta.yaml new file mode 100644 index 0000000..bad3722 --- /dev/null +++ b/ignite/templates/app/files/{{protoDir}}/buf.gen.sta.yaml @@ -0,0 +1,20 @@ +# This file is auto-generated by Ignite. You can edit +# the file content but do not change the file name or path. +# +# buf.gen.sta.yaml +# +version: v2 +plugins: + - local: + [ + "go", + "tool", + "github.com/grpc-ecosystem/grpc-gateway/v2/protoc-gen-openapiv2", + ] + out: . + opt: + - logtostderr=true + - openapi_naming_strategy=simple + - ignore_comments=true + - simple_operation_ids=false + - json_names_for_fields=false diff --git a/ignite/templates/app/files/{{protoDir}}/buf.gen.swagger.yaml b/ignite/templates/app/files/{{protoDir}}/buf.gen.swagger.yaml new file mode 100644 index 0000000..373a48a --- /dev/null +++ b/ignite/templates/app/files/{{protoDir}}/buf.gen.swagger.yaml @@ -0,0 +1,19 @@ +# This file is auto-generated by Ignite. You can edit +# the file content but do not change the file name or path. +# +# buf.gen.swagger.yaml +# +version: v2 +plugins: + - local: + [ + "go", + "tool", + "github.com/grpc-ecosystem/grpc-gateway/v2/protoc-gen-openapiv2", + ] + out: . + opt: + - logtostderr=true + - openapi_naming_strategy=fqn + - json_names_for_fields=false + - generate_unbound_methods=false diff --git a/ignite/templates/app/files/{{protoDir}}/buf.gen.ts.yaml b/ignite/templates/app/files/{{protoDir}}/buf.gen.ts.yaml new file mode 100644 index 0000000..76d7d67 --- /dev/null +++ b/ignite/templates/app/files/{{protoDir}}/buf.gen.ts.yaml @@ -0,0 +1,18 @@ +# This file is auto-generated by Ignite. You can edit +# the file content but do not change the file name or path. +# +# buf.gen.ts.yaml +# +version: v2 +managed: + enabled: true +plugins: + - remote: buf.build/community/stephenh-ts-proto + out: . + opt: + - logtostderr=true + - allow_merge=true + - json_names_for_fields=false + - ts_proto_opt=snakeToCamel=true + - ts_proto_opt=esModuleInterop=true + - ts_proto_out=. diff --git a/ignite/templates/app/options.go b/ignite/templates/app/options.go new file mode 100644 index 0000000..1c34da9 --- /dev/null +++ b/ignite/templates/app/options.go @@ -0,0 +1,17 @@ +package app + +// Options ... +type Options struct { + AppName string + AppPath string + ProtoDir string + GitHubPath string + BinaryNamePrefix string + ModulePath string + AddressPrefix string + CoinType uint32 + DefaultDenom string + // IncludePrefixes is used to filter the files to include from the generator + IncludePrefixes []string + IsChainMinimal bool +} diff --git a/ignite/templates/app/proto.go b/ignite/templates/app/proto.go new file mode 100644 index 0000000..a9aed5d --- /dev/null +++ b/ignite/templates/app/proto.go @@ -0,0 +1,58 @@ +package app + +import ( + "embed" + "fmt" + "io/fs" + "strings" + + "github.com/gobuffalo/genny/v2" + "github.com/gobuffalo/plush/v4" + + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/xembed" + "github.com/ignite/cli/v29/ignite/pkg/xgenny" + "github.com/ignite/cli/v29/ignite/templates/field/plushhelpers" +) + +//go:embed files/{{protoDir}}/* files/buf.lock files/buf.yaml.plush +var fsProto embed.FS + +// NewBufGenerator returns the generator to buf build files. +func NewBufGenerator(protoDir string) (*genny.Generator, error) { + subFs, err := fs.Sub(fsProto, "files") + if err != nil { + return nil, errors.Errorf("fail to generate sub: %w", err) + } + + g := genny.New() + if err := g.OnlyFS(subFs, nil, nil); err != nil { + return nil, err + } + + ctx := plush.NewContext() + ctx.Set("ProtoDir", protoDir) + + plushhelpers.ExtendPlushContext(ctx) + g.Transformer(xgenny.Transformer(ctx)) + g.Transformer(genny.Replace("{{protoDir}}", protoDir)) + + return g, nil +} + +func CutTemplatePrefix(name string) (string, bool) { + return strings.CutPrefix(name, fmt.Sprintf("%s/", "{{protoDir}}")) +} + +// BufFiles returns a list of Buf.Build files. +func BufFiles() ([]string, error) { + files, err := xembed.FileList(fsProto, "files") + if err != nil { + return nil, err + } + // remove all .plush extensions. + for i, file := range files { + files[i] = strings.TrimSuffix(file, ".plush") + } + return files, nil +} diff --git a/ignite/templates/app/proto_test.go b/ignite/templates/app/proto_test.go new file mode 100644 index 0000000..2449c30 --- /dev/null +++ b/ignite/templates/app/proto_test.go @@ -0,0 +1,58 @@ +package app + +import ( + "os" + "path/filepath" + "strings" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestBufFiles(t *testing.T) { + want := []string{"buf.lock", "buf.yaml"} + protoDir, err := os.ReadDir("files/{{protoDir}}") + require.NoError(t, err) + for _, e := range protoDir { + want = append(want, filepath.Join("{{protoDir}}", strings.TrimSuffix(e.Name(), ".plush"))) + } + + got, err := BufFiles() + require.NoError(t, err) + require.ElementsMatch(t, want, got) +} + +func TestCutTemplatePrefix(t *testing.T) { + tests := []struct { + name string + arg string + want string + ok bool + }{ + { + name: "with prefix", + arg: "{{protoDir}}/myvalue", + want: "myvalue", + ok: true, + }, + { + name: "with 2 prefix", + arg: "{{protoDir}}/{{protoDir}}/myvalue", + want: "{{protoDir}}/myvalue", + ok: true, + }, + { + name: "without prefix", + arg: "myvalue", + want: "myvalue", + ok: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, ok := CutTemplatePrefix(tt.arg) + require.Equal(t, tt.ok, ok) + require.Equal(t, tt.want, got) + }) + } +} diff --git a/ignite/templates/field/datatype/address.go b/ignite/templates/field/datatype/address.go new file mode 100644 index 0000000..2dfabb0 --- /dev/null +++ b/ignite/templates/field/datatype/address.go @@ -0,0 +1,43 @@ +package datatype + +import ( + "fmt" + + "github.com/emicklei/proto" + + "github.com/ignite/cli/v29/ignite/pkg/multiformatname" + "github.com/ignite/cli/v29/ignite/pkg/protoanalysis/protoutil" +) + +// DataAddress address (string) data type definition. +var DataAddress = DataType{ + Name: Address, + DataType: func(string) string { return "string" }, + CollectionsKeyValueName: func(string) string { return "collections.StringKey" }, + DefaultTestValue: "cosmos1abcdefghijklmnopqrstuvwxyz0123456", + ValueLoop: "fmt.Sprintf(`cosmos1abcdef%d`, i)", + ValueIndex: "`cosmos1abcdefghijklmnopqrstuvwxyz0123456`", + ValueInvalidIndex: "`cosmos1invalid`", + ProtoType: func(_, name string, index int) string { + return fmt.Sprintf("string %s = %d", name, index) + }, + GenesisArgs: func(name multiformatname.Name, value int) string { + return fmt.Sprintf("%s: \"%d\",\n", name.UpperCamel, value) + }, + CLIArgs: func(name multiformatname.Name, _, prefix string, argIndex int) string { + return fmt.Sprintf("%s%s := args[%d]", prefix, name.UpperCamel, argIndex) + }, + ToBytes: func(name string) string { + return fmt.Sprintf("%[1]vBytes := []byte(%[1]v)", name) + }, + ToString: func(name string) string { + return name + }, + ToProtoField: func(_, name string, index int) *proto.NormalField { + field := protoutil.NewField(name, "string", index) + option := protoutil.NewOption("cosmos_proto.scalar", "cosmos.AddressString", protoutil.Custom()) + field.Options = append(field.Options, option) + return field + }, + ProtoImports: []string{"cosmos_proto/cosmos.proto"}, +} diff --git a/ignite/templates/field/datatype/bool.go b/ignite/templates/field/datatype/bool.go new file mode 100644 index 0000000..295b083 --- /dev/null +++ b/ignite/templates/field/datatype/bool.go @@ -0,0 +1,47 @@ +package datatype + +import ( + "fmt" + + "github.com/emicklei/proto" + + "github.com/ignite/cli/v29/ignite/pkg/multiformatname" + "github.com/ignite/cli/v29/ignite/pkg/protoanalysis/protoutil" +) + +// DataBool bool data type definition. +var DataBool = DataType{ + Name: Bool, + DataType: func(string) string { return "bool" }, + CollectionsKeyValueName: func(string) string { return "collections.BoolKey" }, + DefaultTestValue: "true", + ValueLoop: "true", + ValueIndex: "true", + ValueInvalidIndex: "true", + ProtoType: func(_, name string, index int) string { + return fmt.Sprintf("bool %s = %d", name, index) + }, + GenesisArgs: func(name multiformatname.Name, value int) string { + return fmt.Sprintf("%s: %t,\n", name.UpperCamel, value%2 == 0) + }, + CLIArgs: func(name multiformatname.Name, _, prefix string, argIndex int) string { + return fmt.Sprintf(`%s%s, err := cast.ToBoolE(args[%d]) + if err != nil { + return err + }`, + prefix, name.UpperCamel, argIndex) + }, + ToBytes: func(name string) string { + return fmt.Sprintf(`%[1]vBytes := []byte{0} + if %[1]v { + %[1]vBytes = []byte{1} + }`, name) + }, + ToString: func(name string) string { + return fmt.Sprintf("strconv.FormatBool(%s)", name) + }, + ToProtoField: func(_, name string, index int) *proto.NormalField { + return protoutil.NewField(name, "bool", index) + }, + GoCLIImports: []GoImport{{Name: "github.com/spf13/cast"}}, +} diff --git a/ignite/templates/field/datatype/bytes.go b/ignite/templates/field/datatype/bytes.go new file mode 100644 index 0000000..00b8338 --- /dev/null +++ b/ignite/templates/field/datatype/bytes.go @@ -0,0 +1,38 @@ +package datatype + +import ( + "fmt" + + "github.com/emicklei/proto" + + "github.com/ignite/cli/v29/ignite/pkg/multiformatname" + "github.com/ignite/cli/v29/ignite/pkg/protoanalysis/protoutil" +) + +// DataBytes is a string data type definition. +var DataBytes = DataType{ + Name: Bytes, + DataType: func(string) string { return "[]byte" }, + CollectionsKeyValueName: func(string) string { return "collections.BytesKey" }, + DefaultTestValue: "3,2,3,5", + ValueLoop: "[]byte{1+i%1, 2+i%2, 3+i%3}", + ProtoType: func(_, name string, index int) string { + return fmt.Sprintf("bytes %s = %d", name, index) + }, + GenesisArgs: func(name multiformatname.Name, value int) string { + return fmt.Sprintf("%s: []byte(\"%d\"),\n", name.UpperCamel, value) + }, + CLIArgs: func(name multiformatname.Name, _, prefix string, argIndex int) string { + return fmt.Sprintf("%s%s := []byte(args[%d])", prefix, name.UpperCamel, argIndex) + }, + ToBytes: func(name string) string { + return name + }, + ToString: func(name string) string { + return fmt.Sprintf("string(%s)", name) + }, + ToProtoField: func(_, name string, index int) *proto.NormalField { + return protoutil.NewField(name, "bytes", index) + }, + NonIndex: true, +} diff --git a/ignite/templates/field/datatype/coin.go b/ignite/templates/field/datatype/coin.go new file mode 100644 index 0000000..40e860c --- /dev/null +++ b/ignite/templates/field/datatype/coin.go @@ -0,0 +1,131 @@ +package datatype + +import ( + "fmt" + + "github.com/emicklei/proto" + + "github.com/ignite/cli/v29/ignite/pkg/multiformatname" + "github.com/ignite/cli/v29/ignite/pkg/protoanalysis/protoutil" +) + +var ( + // DataCoin coin data type definition. + DataCoin = DataType{ + Name: Coin, + DataType: func(string) string { return "sdk.Coin" }, + CollectionsKeyValueName: func(string) string { return collectionValueComment }, + DefaultTestValue: "10token", + ValueLoop: "sdk.NewInt64Coin(`token`, int64(i+100))", + ProtoType: func(_, name string, index int) string { + return fmt.Sprintf("cosmos.base.v1beta1.Coin %s = %d [(gogoproto.nullable) = false]", + name, index) + }, + GenesisArgs: func(multiformatname.Name, int) string { return "" }, + CLIArgs: func(name multiformatname.Name, _, prefix string, argIndex int) string { + return fmt.Sprintf(`%s%s, err := sdk.ParseCoinNormalized(args[%d]) + if err != nil { + return err + }`, prefix, name.UpperCamel, argIndex) + }, + GoCLIImports: []GoImport{{Name: "github.com/cosmos/cosmos-sdk/types", Alias: "sdk"}}, + ProtoImports: []string{"gogoproto/gogo.proto", "cosmos/base/v1beta1/coin.proto"}, + NonIndex: true, + ToProtoField: func(_, name string, index int) *proto.NormalField { + option := protoutil.NewOption("gogoproto.nullable", "false", protoutil.Custom()) + return protoutil.NewField( + name, "cosmos.base.v1beta1.Coin", index, protoutil.WithFieldOptions(option), + ) + }, + } + + // DataCoinSlice is a coin array data type definition. + DataCoinSlice = DataType{ + Name: Coins, + DataType: func(string) string { return "sdk.Coins" }, + CollectionsKeyValueName: func(string) string { return collectionValueComment }, + DefaultTestValue: "20stake", + ValueLoop: "sdk.NewCoins(sdk.NewInt64Coin(`token`, int64(i%1+100)), sdk.NewInt64Coin(`stake`, int64(i%2+100)))", + ProtoType: func(_, name string, index int) string { + return fmt.Sprintf(`repeated cosmos.base.v1beta1.Coin %s = %d [(gogoproto.nullable) = false]`, + name, index) + }, + GenesisArgs: func(multiformatname.Name, int) string { return "" }, + CLIArgs: func(name multiformatname.Name, _, prefix string, argIndex int) string { + return fmt.Sprintf(`%s%s, err := sdk.ParseCoinsNormalized(args[%d]) + if err != nil { + return err + }`, prefix, name.UpperCamel, argIndex) + }, + GoCLIImports: []GoImport{{Name: "github.com/cosmos/cosmos-sdk/types", Alias: "sdk"}}, + ProtoImports: []string{"gogoproto/gogo.proto", "cosmos/base/v1beta1/coin.proto"}, + NonIndex: true, + ToProtoField: func(_, name string, index int) *proto.NormalField { + option := protoutil.NewOption("gogoproto.nullable", "false", protoutil.Custom()) + return protoutil.NewField( + name, "cosmos.base.v1beta1.Coin", index, protoutil.WithFieldOptions(option), protoutil.Repeated(), + ) + }, + } + + // DataDecCoin decimal coin data type definition. + DataDecCoin = DataType{ + Name: DecCoin, + DataType: func(string) string { return "sdk.DecCoin" }, + CollectionsKeyValueName: func(string) string { return collectionValueComment }, + DefaultTestValue: "100001token", + ValueLoop: "sdk.NewInt64DecCoin(`token`, int64(i+100))", + ProtoType: func(_, name string, index int) string { + return fmt.Sprintf("cosmos.base.v1beta1.DecCoin %s = %d [(gogoproto.nullable) = false]", + name, index) + }, + GenesisArgs: func(multiformatname.Name, int) string { return "" }, + CLIArgs: func(name multiformatname.Name, _, prefix string, argIndex int) string { + return fmt.Sprintf(`%s%s, err := sdk.ParseDecCoins(args[%d]) + if err != nil { + return err + }`, prefix, name.UpperCamel, argIndex) + }, + GoCLIImports: []GoImport{{Name: "github.com/cosmos/cosmos-sdk/types", Alias: "sdk"}}, + ProtoImports: []string{"gogoproto/gogo.proto", "cosmos/base/v1beta1/coin.proto"}, + NonIndex: true, + ToProtoField: func(_, name string, index int) *proto.NormalField { + option := protoutil.NewOption("gogoproto.nullable", "false", protoutil.Custom()) + return protoutil.NewField( + name, "cosmos.base.v1beta1.DecCoin", index, protoutil.WithFieldOptions(option), + ) + }, + } + + // DataDecCoinSlice is a decimal coin array data type definition. + DataDecCoinSlice = DataType{ + Name: DecCoins, + DataType: func(string) string { return "sdk.DecCoins" }, + CollectionsKeyValueName: func(string) string { return collectionValueComment }, + DefaultTestValue: "20000002stake", + ValueLoop: "sdk.NewDecCoins(sdk.NewInt64DecCoin(`token`, int64(i%1+100)), sdk.NewInt64DecCoin(`stake`, int64(i%2+100)))", + ProtoType: func(_, name string, index int) string { + return fmt.Sprintf(`repeated cosmos.base.v1beta1.DecCoin %s = %d [(gogoproto.nullable) = false]`, + name, index) + }, + GenesisArgs: func(multiformatname.Name, int) string { return "" }, + CLIArgs: func(name multiformatname.Name, _, prefix string, argIndex int) string { + return fmt.Sprintf(`%s%s, err := sdk.ParseDecCoins(args[%d]) + if err != nil { + return err + }`, prefix, name.UpperCamel, argIndex) + }, + GoCLIImports: []GoImport{{Name: "github.com/cosmos/cosmos-sdk/types", Alias: "sdk"}}, + ProtoImports: []string{"gogoproto/gogo.proto", "cosmos/base/v1beta1/coin.proto"}, + NonIndex: true, + ToProtoField: func(_, name string, index int) *proto.NormalField { + optionNullable := protoutil.NewOption("gogoproto.nullable", "false", protoutil.Custom()) + optionCast := protoutil.NewOption("gogoproto.castrepeated", "github.com/cosmos/cosmos-sdk/types.DecCoins", protoutil.Custom()) + return protoutil.NewField(name, "cosmos.base.v1beta1.DecCoin", index, + protoutil.WithFieldOptions(optionNullable), + protoutil.WithFieldOptions(optionCast), + protoutil.Repeated(), + ) + }, + } +) diff --git a/ignite/templates/field/datatype/custom.go b/ignite/templates/field/datatype/custom.go new file mode 100644 index 0000000..87e7d99 --- /dev/null +++ b/ignite/templates/field/datatype/custom.go @@ -0,0 +1,64 @@ +package datatype + +import ( + "fmt" + + "github.com/emicklei/proto" + + "github.com/ignite/cli/v29/ignite/pkg/multiformatname" + "github.com/ignite/cli/v29/ignite/pkg/protoanalysis/protoutil" +) + +// DataCustom is a custom data type definition. +var DataCustom = DataType{ + Name: Custom, + DataType: func(datatype string) string { return fmt.Sprintf("*%s", datatype) }, + CollectionsKeyValueName: func(string) string { return collectionValueComment }, + DefaultTestValue: "{}", + ValueLoop: "nil", + ProtoType: func(datatype, name string, index int) string { + return fmt.Sprintf("%s %s = %d", datatype, name, index) + }, + GenesisArgs: func(name multiformatname.Name, _ int) string { + return fmt.Sprintf("%s: new(types.%s),\n", name.UpperCamel, name.UpperCamel) + }, + CLIArgs: func(name multiformatname.Name, datatype, prefix string, argIndex int) string { + return fmt.Sprintf(`%[1]v%[2]v := new(types.%[3]v) + err = json.Unmarshal([]byte(args[%[4]v]), %[1]v%[2]v) + if err != nil { + return err + }`, prefix, name.UpperCamel, datatype, argIndex) + }, + ToProtoField: func(datatype, name string, index int) *proto.NormalField { + return protoutil.NewField(name, datatype, index) + }, + GoCLIImports: []GoImport{{Name: "encoding/json"}}, + NonIndex: true, +} + +// DataCustomSlice is a custom array data type definition. +var DataCustomSlice = DataType{ + Name: CustomSlice, + DataType: func(datatype string) string { return fmt.Sprintf("[]*%s", datatype) }, + CollectionsKeyValueName: func(string) string { return collectionValueComment }, + DefaultTestValue: "[]", + ValueLoop: "nil", + ProtoType: func(datatype, name string, index int) string { + return fmt.Sprintf("repeated %s %s = %d", datatype, name, index) + }, + GenesisArgs: func(name multiformatname.Name, _ int) string { + return fmt.Sprintf("%s: []*types.%s{},\n", name.UpperCamel, name.UpperCamel) + }, + CLIArgs: func(name multiformatname.Name, datatype, prefix string, argIndex int) string { + return fmt.Sprintf(`var %[1]v%[2]v []*types.%[3]v + err = json.Unmarshal([]byte(args[%[4]v]), &%[1]v%[2]v) + if err != nil { + return err + }`, prefix, name.UpperCamel, datatype, argIndex) + }, + ToProtoField: func(datatype, name string, index int) *proto.NormalField { + return protoutil.NewField(name, datatype, index, protoutil.Repeated()) + }, + GoCLIImports: []GoImport{{Name: "encoding/json"}}, + NonIndex: true, +} diff --git a/ignite/templates/field/datatype/int.go b/ignite/templates/field/datatype/int.go new file mode 100644 index 0000000..b49d11b --- /dev/null +++ b/ignite/templates/field/datatype/int.go @@ -0,0 +1,78 @@ +package datatype + +import ( + "fmt" + + "github.com/emicklei/proto" + + "github.com/ignite/cli/v29/ignite/pkg/multiformatname" + "github.com/ignite/cli/v29/ignite/pkg/protoanalysis/protoutil" +) + +var ( + // DataInt is an int data type definition. + DataInt = DataType{ + Name: Int, + DataType: func(string) string { return "int64" }, + CollectionsKeyValueName: func(string) string { return "collections.Int64Key" }, + DefaultTestValue: "111", + ValueLoop: "int64(i)", + ValueIndex: "0", + ValueInvalidIndex: "100000", + ProtoType: func(_, name string, index int) string { + return fmt.Sprintf("int64 %s = %d", name, index) + }, + GenesisArgs: func(name multiformatname.Name, value int) string { + return fmt.Sprintf("%s: %d,\n", name.UpperCamel, value) + }, + CLIArgs: func(name multiformatname.Name, _, prefix string, argIndex int) string { + return fmt.Sprintf(`%s%s, err := cast.ToInt64E(args[%d]) + if err != nil { + return err + }`, + prefix, name.UpperCamel, argIndex) + }, + ToBytes: func(name string) string { + return fmt.Sprintf(`%[1]vBytes := make([]byte, 4) + binary.BigEndian.PutUint64(%[1]vBytes, uint64(%[1]v))`, name) + }, + ToString: func(name string) string { + return fmt.Sprintf("strconv.FormatInt(%s, 10)", name) + }, + ToProtoField: func(_, name string, index int) *proto.NormalField { + return protoutil.NewField(name, "int64", index) + }, + GoCLIImports: []GoImport{{Name: "github.com/spf13/cast"}}, + } + + // DataIntSlice is an int array data type definition. + DataIntSlice = DataType{ + Name: IntSlice, + DataType: func(string) string { return "[]int64" }, + CollectionsKeyValueName: func(string) string { return collectionValueComment }, + DefaultTestValue: "5,4,3,2,1", + ValueLoop: "[]int64{int64(i+i%1), int64(i+i%2), int64(i+i%3)}", + ProtoType: func(_, name string, index int) string { + return fmt.Sprintf("repeated int64 %s = %d", name, index) + }, + GenesisArgs: func(name multiformatname.Name, value int) string { + return fmt.Sprintf("%s: []int64{%d},\n", name.UpperCamel, value) + }, + CLIArgs: func(name multiformatname.Name, _, prefix string, argIndex int) string { + return fmt.Sprintf(`%[1]vCast%[2]v := strings.Split(args[%[3]v], listSeparator) + %[1]v%[2]v := make([]int64, len(%[1]vCast%[2]v)) + for i, arg := range %[1]vCast%[2]v { + value, err := cast.ToInt64E(arg) + if err != nil { + return err + } + %[1]v%[2]v[i] = value + }`, prefix, name.UpperCamel, argIndex) + }, + ToProtoField: func(_, name string, index int) *proto.NormalField { + return protoutil.NewField(name, "int64", index, protoutil.Repeated()) + }, + GoCLIImports: []GoImport{{Name: "github.com/spf13/cast"}, {Name: "strings"}}, + NonIndex: true, + } +) diff --git a/ignite/templates/field/datatype/string.go b/ignite/templates/field/datatype/string.go new file mode 100644 index 0000000..ec7342c --- /dev/null +++ b/ignite/templates/field/datatype/string.go @@ -0,0 +1,65 @@ +package datatype + +import ( + "fmt" + + "github.com/emicklei/proto" + + "github.com/ignite/cli/v29/ignite/pkg/multiformatname" + "github.com/ignite/cli/v29/ignite/pkg/protoanalysis/protoutil" +) + +var ( + // DataString is a string data type definition. + DataString = DataType{ + Name: String, + DataType: func(string) string { return "string" }, + CollectionsKeyValueName: func(string) string { return "collections.StringKey" }, + DefaultTestValue: "xyz", + ValueLoop: "strconv.Itoa(i)", + ValueIndex: "strconv.Itoa(0)", + ValueInvalidIndex: "strconv.Itoa(100000)", + ProtoType: func(_, name string, index int) string { + return fmt.Sprintf("string %s = %d", name, index) + }, + GenesisArgs: func(name multiformatname.Name, value int) string { + return fmt.Sprintf("%s: \"%d\",\n", name.UpperCamel, value) + }, + CLIArgs: func(name multiformatname.Name, _, prefix string, argIndex int) string { + return fmt.Sprintf("%s%s := args[%d]", prefix, name.UpperCamel, argIndex) + }, + ToBytes: func(name string) string { + return fmt.Sprintf("%[1]vBytes := []byte(%[1]v)", name) + }, + ToString: func(name string) string { + return name + }, + ToProtoField: func(_, name string, index int) *proto.NormalField { + return protoutil.NewField(name, "string", index) + }, + } + + // DataStringSlice is a string array data type definition. + DataStringSlice = DataType{ + Name: StringSlice, + DataType: func(string) string { return "[]string" }, + CollectionsKeyValueName: func(string) string { return collectionValueComment }, + DefaultTestValue: "abc,xyz", + ValueLoop: "[]string{`abc`+strconv.Itoa(i), `xyz`+strconv.Itoa(i)}", + ProtoType: func(_, name string, index int) string { + return fmt.Sprintf("repeated string %s = %d", name, index) + }, + GenesisArgs: func(name multiformatname.Name, value int) string { + return fmt.Sprintf("%s: []string{\"%d\"},\n", name.UpperCamel, value) + }, + CLIArgs: func(name multiformatname.Name, _, prefix string, argIndex int) string { + return fmt.Sprintf(`%[1]v%[2]v := strings.Split(args[%[3]v], listSeparator)`, + prefix, name.UpperCamel, argIndex) + }, + GoCLIImports: []GoImport{{Name: "strings"}}, + ToProtoField: func(_, name string, index int) *proto.NormalField { + return protoutil.NewField(name, "string", index, protoutil.Repeated()) + }, + NonIndex: true, + } +) diff --git a/ignite/templates/field/datatype/types.go b/ignite/templates/field/datatype/types.go new file mode 100644 index 0000000..09f0394 --- /dev/null +++ b/ignite/templates/field/datatype/types.go @@ -0,0 +1,194 @@ +package datatype + +import ( + "fmt" + "io" + "sort" + + "github.com/emicklei/proto" + + "github.com/ignite/cli/v29/ignite/pkg/cliui/entrywriter" + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/multiformatname" +) + +const ( + // Separator represents the type separator. + Separator = ":" + // ArrayPrefix represents the prefix used by array field types. + ArrayPrefix = "array." + + // String represents the string type name. + String Name = "string" + // StringSlice represents the string array type name. + StringSlice Name = "array.string" + // Bool represents the bool type name. + Bool Name = "bool" + // Int represents the int type name. + Int Name = "int" + // Int64 represents the int64 type name. + Int64 Name = "int64" + // IntSlice represents the int array type name. + IntSlice Name = "array.int" + // Uint represents the uint type name. + Uint Name = "uint" + // Uint64 represents the uint64 type name. + Uint64 Name = "uint64" + // UintSlice represents the uint array type name. + UintSlice Name = "array.uint" + // Coin represents the coin type name. + Coin Name = "coin" + // Coins represents the coin array type name. + Coins Name = "array.coin" + // DecCoin represents the coin type name. + DecCoin Name = "dec.coin" + // DecCoins represents the decimal coin array type name. + DecCoins Name = "array.dec.coin" + // Bytes represents the bytes type name. + Bytes Name = "bytes" + // Address represents the address type name. + Address Name = "address" + // Custom represents the custom type name. + Custom Name = Name(TypeCustom) + // CustomSlice represents the custom array type name. + CustomSlice Name = Name(TypeCustomSlice) + + // StringSliceAlias represents the string array type name alias. + StringSliceAlias Name = "strings" + // IntSliceAlias represents the int array type name alias. + IntSliceAlias Name = "ints" + // UintSliceAlias represents the uint array type name alias. + UintSliceAlias Name = "uints" + // CoinSliceAlias represents the coin array type name alias. + CoinSliceAlias Name = "coins" + // DecCoinSliceAlias represents the coin array type name alias. + DecCoinSliceAlias Name = "dec.coins" + + // TypeCustom represents the string type name id. + TypeCustom = "customignitetype" + // TypeCustomSlice represents the custom array type name id. + TypeCustomSlice = "customignitetypearray" + + collectionValueComment = "/* Add collection key value */" +) + +// supportedTypes all support data types and definitions. +var supportedTypes = map[Name]DataType{ + Bytes: DataBytes, + String: DataString, + StringSlice: DataStringSlice, + StringSliceAlias: DataStringSlice, + Bool: DataBool, + Int: DataInt, + Int64: DataInt, + IntSlice: DataIntSlice, + IntSliceAlias: DataIntSlice, + Uint: DataUint, + Uint64: DataUint, + UintSlice: DataUintSlice, + UintSliceAlias: DataUintSlice, + Coin: DataCoin, + Coins: DataCoinSlice, + CoinSliceAlias: DataCoinSlice, + DecCoin: DataDecCoin, + DecCoins: DataDecCoinSlice, + DecCoinSliceAlias: DataDecCoinSlice, + Address: DataAddress, + Custom: DataCustom, + CustomSlice: DataCustomSlice, +} + +// Name represents the Alias Name for the data type. +type Name string + +// DataType represents the data types for code replacement. +type DataType struct { + Name Name + DataType func(datatype string) string + ProtoType func(datatype, name string, index int) string + CollectionsKeyValueName func(datatype string) string + GenesisArgs func(name multiformatname.Name, value int) string + ProtoImports []string + GoCLIImports GoImports + DefaultTestValue string + ValueLoop string + ValueIndex string + ValueInvalidIndex string + ToBytes func(name string) string + ToString func(name string) string + ToProtoField func(datatype, name string, index int) *proto.NormalField + CLIArgs func(name multiformatname.Name, datatype, prefix string, argIndex int) string + NonIndex bool +} + +// Usage returns the usage of the data type. +// It provides a description of how to use the data type in scaffolding. +func (t DataType) Usage() string { + if t.Name == Custom || t.Name == CustomSlice { + return "use the custom type to scaffold already created chain types." + } + usage := fmt.Sprintf("use ':%s' to scaffold %s types (eg: %s).", t.Name, t.DataType(""), t.DefaultTestValue) + if t.Name == Coins || t.Name == DecCoins || + t.Name == CoinSliceAlias || t.Name == DecCoinSliceAlias { + return usage + " Disclaimer: Only one `coins` or `dec.coins` field can accept multiple CLI values per command due to AutoCLI limitations." + } + return usage +} + +// GoImports represents a list of go import. +type GoImports []GoImport + +// GoImport represents the go import repo name with the alias. +type GoImport struct { + Name string + Alias string +} + +// IsSupportedType type checks if the given typename is supported by ignite scaffolding. +// Returns corresponding Datatype if supported. +func IsSupportedType(typename Name) (dt DataType, ok bool) { + dt, ok = supportedTypes[typename] + return +} + +// SupportedTypes return a list of supported types. +func SupportedTypes() map[string]string { + supported := make(map[string]string) + for name, dataType := range supportedTypes { + if dataType.Name == CustomSlice { + continue + } + if dataType.Name == Custom { + name = "custom" + } + supported[string(name)] = dataType.Usage() + } + return supported +} + +// PrintScaffoldTypeList prints the list of supported scaffold types to the given writer. +func PrintScaffoldTypeList(writer io.Writer) error { + supported := SupportedTypes() + entries := make([][]string, 0, len(supported)) + for name, usage := range supported { + entries = append(entries, []string{name, usage}) + } + + sort.Slice(entries, func(i, j int) bool { + return entries[i][0] < entries[j][0] + }) + + if err := entrywriter.MustWrite(writer, []string{"types", "usage"}, entries...); err != nil { + return errors.Errorf("failed to write scaffold types: %w", err) + } + + const footer = `Field Usage: + - fieldName + - fieldName:fieldType + +If no :fieldType, default (string) is used +` + + _, err := fmt.Fprint(writer, footer) + return err +} diff --git a/ignite/templates/field/datatype/types_test.go b/ignite/templates/field/datatype/types_test.go new file mode 100644 index 0000000..a1dd38c --- /dev/null +++ b/ignite/templates/field/datatype/types_test.go @@ -0,0 +1,115 @@ +package datatype_test + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/templates/field/datatype" +) + +func TestIsSupportedType(t *testing.T) { + tests := []struct { + name string + typename datatype.Name + ok bool + }{ + { + name: "string", + typename: datatype.String, + ok: true, + }, + { + name: "string slice", + typename: datatype.StringSlice, + ok: true, + }, + { + name: "bool", + typename: datatype.Bool, + ok: true, + }, + { + name: "int", + typename: datatype.Int, + ok: true, + }, + { + name: "int slice", + typename: datatype.IntSlice, + ok: true, + }, + { + name: "uint", + typename: datatype.Uint, + ok: true, + }, + { + name: "uint slice", + typename: datatype.UintSlice, + ok: true, + }, + { + name: "coin", + typename: datatype.Coin, + ok: true, + }, + { + name: "coin slice", + typename: datatype.Coins, + ok: true, + }, + { + name: "bytes", + typename: datatype.Bytes, + ok: true, + }, + { + name: "custom", + typename: datatype.Custom, + ok: true, + }, + { + name: "custom slice", + typename: datatype.CustomSlice, + ok: true, + }, + { + name: "string slice alias", + typename: datatype.StringSliceAlias, + ok: true, + }, + { + name: "int slice alias", + typename: datatype.IntSliceAlias, + ok: true, + }, + { + name: "uint slice alias", + typename: datatype.UintSliceAlias, + ok: true, + }, + { + name: "coin slice alias", + typename: datatype.CoinSliceAlias, + ok: true, + }, + { + name: "address", + typename: datatype.Address, + ok: true, + }, + { + name: "invalid type name", + typename: datatype.Name("invalid"), + ok: false, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + _, ok := datatype.IsSupportedType(tc.typename) + require.Equal(t, tc.ok, ok) + }) + } +} diff --git a/ignite/templates/field/datatype/uint.go b/ignite/templates/field/datatype/uint.go new file mode 100644 index 0000000..e85935d --- /dev/null +++ b/ignite/templates/field/datatype/uint.go @@ -0,0 +1,79 @@ +package datatype + +import ( + "fmt" + + "github.com/emicklei/proto" + + "github.com/ignite/cli/v29/ignite/pkg/multiformatname" + "github.com/ignite/cli/v29/ignite/pkg/protoanalysis/protoutil" +) + +var ( + // DataUint uint data type definition. + DataUint = DataType{ + Name: Uint, + DataType: func(string) string { return "uint64" }, + CollectionsKeyValueName: func(string) string { return "collections.Uint64Key" }, + DefaultTestValue: "111", + ValueLoop: "uint64(i)", + ValueIndex: "0", + ValueInvalidIndex: "100000", + ProtoType: func(_, name string, index int) string { + return fmt.Sprintf("uint64 %s = %d", name, index) + }, + GenesisArgs: func(name multiformatname.Name, value int) string { + return fmt.Sprintf("%s: %d,\n", name.UpperCamel, value) + }, + CLIArgs: func(name multiformatname.Name, _, prefix string, argIndex int) string { + return fmt.Sprintf(`%s%s, err := cast.ToUint64E(args[%d]) + if err != nil { + return err + }`, + prefix, name.UpperCamel, argIndex) + }, + ToBytes: func(name string) string { + return fmt.Sprintf(`%[1]vBytes := make([]byte, 8) + binary.BigEndian.PutUint64(%[1]vBytes, %[1]v)`, name) + }, + ToString: func(name string) string { + return fmt.Sprintf("strconv.Itoa(int(%s))", name) + }, + ToProtoField: func(_, name string, index int) *proto.NormalField { + return protoutil.NewField(name, "uint64", index) + }, + GoCLIImports: []GoImport{{Name: "github.com/spf13/cast"}}, + } + + // DataUintSlice uint array data type definition. + DataUintSlice = DataType{ + Name: UintSlice, + DataType: func(string) string { return "[]uint64" }, + CollectionsKeyValueName: func(string) string { return collectionValueComment }, + DefaultTestValue: "13,26,31,40", + ValueLoop: "[]uint64{uint64(i+i%1), uint64(i+i%2), uint64(i+i%3)}", + ProtoType: func(_, name string, index int) string { + return fmt.Sprintf("repeated uint64 %s = %d", name, index) + }, + GenesisArgs: func(name multiformatname.Name, value int) string { + return fmt.Sprintf("%s: []uint64{%d},\n", name.UpperCamel, value) + }, + CLIArgs: func(name multiformatname.Name, _, prefix string, argIndex int) string { + return fmt.Sprintf(`%[1]vCast%[2]v := strings.Split(args[%[3]v], listSeparator) + %[1]v%[2]v := make([]uint64, len(%[1]vCast%[2]v)) + for i, arg := range %[1]vCast%[2]v { + value, err := cast.ToUint64E(arg) + if err != nil { + return err + } + %[1]v%[2]v[i] = value + }`, + prefix, name.UpperCamel, argIndex) + }, + ToProtoField: func(_, name string, index int) *proto.NormalField { + return protoutil.NewField(name, "uint64", index, protoutil.Repeated()) + }, + GoCLIImports: []GoImport{{Name: "github.com/spf13/cast"}, {Name: "strings"}}, + NonIndex: true, + } +) diff --git a/ignite/templates/field/field.go b/ignite/templates/field/field.go new file mode 100644 index 0000000..df32877 --- /dev/null +++ b/ignite/templates/field/field.go @@ -0,0 +1,216 @@ +// Package field provides methods to parse a field provided in a command with the format name:type +package field + +import ( + "fmt" + + "github.com/emicklei/proto" + + "github.com/ignite/cli/v29/ignite/pkg/multiformatname" + "github.com/ignite/cli/v29/ignite/templates/field/datatype" +) + +// Field represents a field inside a structure for a component +// it can be a field contained in a type or inside the response of a query, etc... +type Field struct { + Name multiformatname.Name + DatatypeName datatype.Name + Datatype string +} + +// DataType returns the field Datatype. +func (f Field) DataType() string { + dt, ok := datatype.IsSupportedType(f.DatatypeName) + if !ok { + panic(fmt.Sprintf("unknown type %s", f.DatatypeName)) + } + return dt.DataType(f.Datatype) +} + +// IsSlice returns true if the field is a slice. +func (f Field) IsSlice() bool { + dt, ok := datatype.IsSupportedType(f.DatatypeName) + if !ok { + panic(fmt.Sprintf("unknown type %s", f.DatatypeName)) + } + + switch f.DatatypeName { + case datatype.StringSlice, + datatype.IntSlice, + datatype.UintSlice, + datatype.Coins, + datatype.DecCoins, + datatype.DecCoinSliceAlias, + datatype.StringSliceAlias, + datatype.IntSliceAlias, + datatype.UintSliceAlias, + datatype.CoinSliceAlias, + datatype.CustomSlice, + datatype.Bytes: + return true + case + datatype.String, + datatype.Address, + datatype.Bool, + datatype.Int, + datatype.Int64, + datatype.Uint, + datatype.Uint64, + datatype.DecCoin, + datatype.Coin, + datatype.Custom: + return false + default: + // For other types, we assume that it is a slice if non indexable. + return dt.NonIndex + } +} + +// ProtoFieldName returns the field name used in proto. +func (f Field) ProtoFieldName() string { + return f.Name.Snake +} + +// CLIUsage returns the field name used in CLI usage. +func (f Field) CLIUsage() string { + return f.Name.Kebab +} + +// ProtoType returns the field proto Datatype. +func (f Field) ProtoType(index int) string { + dt, ok := datatype.IsSupportedType(f.DatatypeName) + if !ok { + panic(fmt.Sprintf("unknown type %s", f.DatatypeName)) + } + return dt.ProtoType(f.Datatype, f.ProtoFieldName(), index) +} + +// CollectionsKeyValueType returns the field collections key value type. +func (f Field) CollectionsKeyValueType() string { + dt, ok := datatype.IsSupportedType(f.DatatypeName) + if !ok { + panic(fmt.Sprintf("unknown type %s", f.DatatypeName)) + } + return dt.CollectionsKeyValueName(f.Datatype) +} + +// DefaultTestValue returns the Datatype value default. +func (f Field) DefaultTestValue() string { + dt, ok := datatype.IsSupportedType(f.DatatypeName) + if !ok { + panic(fmt.Sprintf("unknown type %s", f.DatatypeName)) + } + return dt.DefaultTestValue +} + +// ValueLoop returns the Datatype value for loop iteration. +func (f Field) ValueLoop() string { + dt, ok := datatype.IsSupportedType(f.DatatypeName) + if !ok { + panic(fmt.Sprintf("unknown type %s", f.DatatypeName)) + } + return dt.ValueLoop +} + +// ValueIndex returns the Datatype value for indexes. +func (f Field) ValueIndex() string { + dt, ok := datatype.IsSupportedType(f.DatatypeName) + if !ok { + panic(fmt.Sprintf("unknown type %s", f.DatatypeName)) + } + if dt.NonIndex { + panic(fmt.Sprintf("non index type %s", f.DatatypeName)) + } + return dt.ValueIndex +} + +// ValueInvalidIndex returns the Datatype value for invalid indexes. +func (f Field) ValueInvalidIndex() string { + dt, ok := datatype.IsSupportedType(f.DatatypeName) + if !ok { + panic(fmt.Sprintf("unknown type %s", f.DatatypeName)) + } + if dt.NonIndex { + panic(fmt.Sprintf("non index type %s", f.DatatypeName)) + } + return dt.ValueInvalidIndex +} + +// GenesisArgs returns the Datatype genesis args. +func (f Field) GenesisArgs(value int) string { + dt, ok := datatype.IsSupportedType(f.DatatypeName) + if !ok { + panic(fmt.Sprintf("unknown type %s", f.DatatypeName)) + } + return dt.GenesisArgs(f.Name, value) +} + +// CLIArgs returns the Datatype CLI args. +// TODO(@julienrbrt): Once unused and fully replaced by AutoCLI, remove CLIArgs from DataType. +func (f Field) CLIArgs(prefix string, argIndex int) string { + dt, ok := datatype.IsSupportedType(f.DatatypeName) + if !ok { + panic(fmt.Sprintf("unknown type %s", f.DatatypeName)) + } + return dt.CLIArgs(f.Name, f.Datatype, prefix, argIndex) +} + +// ToBytes returns the Datatype byte array cast. +func (f Field) ToBytes(name string) string { + dt, ok := datatype.IsSupportedType(f.DatatypeName) + if !ok { + panic(fmt.Sprintf("unknown type %s", f.DatatypeName)) + } + if dt.NonIndex { + panic(fmt.Sprintf("non index type %s", f.DatatypeName)) + } + return dt.ToBytes(name) +} + +// ToString returns the Datatype byte array cast. +func (f Field) ToString(name string) string { + dt, ok := datatype.IsSupportedType(f.DatatypeName) + if !ok { + panic(fmt.Sprintf("unknown type %s", f.DatatypeName)) + } + if dt.NonIndex { + panic(fmt.Sprintf("non index type %s", f.DatatypeName)) + } + return dt.ToString(name) +} + +// ToProtoField returns the Datatype as a *proto.Field node. +func (f Field) ToProtoField(index int) *proto.NormalField { + // TODO: Do we can if it's an index type? + dt, ok := datatype.IsSupportedType(f.DatatypeName) + if !ok { + panic(fmt.Sprintf("unknown type %s", f.DatatypeName)) + } + return dt.ToProtoField(f.Datatype, f.ProtoFieldName(), index) +} + +// GoCLIImports returns the Datatype imports for CLI package. +func (f Field) GoCLIImports() []datatype.GoImport { + dt, ok := datatype.IsSupportedType(f.DatatypeName) + if !ok { + panic(fmt.Sprintf("unknown type %s", f.DatatypeName)) + } + return dt.GoCLIImports +} + +// ProtoImports returns the Datatype imports for proto files. +func (f Field) ProtoImports() []string { + dt, ok := datatype.IsSupportedType(f.DatatypeName) + if !ok { + panic(fmt.Sprintf("unknown type %s", f.DatatypeName)) + } + return dt.ProtoImports +} + +// Value returns the field assign value. +func (f Field) Value() string { + if f.DataType() == "string" { + return fmt.Sprintf(`"%s"`, f.Name.Snake) + } + return f.ValueIndex() +} diff --git a/ignite/templates/field/field_test.go b/ignite/templates/field/field_test.go new file mode 100644 index 0000000..f4a177f --- /dev/null +++ b/ignite/templates/field/field_test.go @@ -0,0 +1,89 @@ +package field + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/multiformatname" + "github.com/ignite/cli/v29/ignite/templates/field/datatype" +) + +// TestField_IsSlice tests the IsSlice method of Field struct. +func TestField_IsSlice(t *testing.T) { + testCases := []struct { + name string + field Field + expected bool + }{ + { + name: "array type should be slice", + field: Field{ + Name: multiformatname.Name{}, + DatatypeName: datatype.IntSlice, + Datatype: string(datatype.IntSlice), + }, + expected: true, + }, + { + name: "array type should be slice", + field: Field{ + Name: multiformatname.Name{}, + DatatypeName: datatype.Bytes, + Datatype: string(datatype.Bytes), + }, + expected: true, + }, + { + name: "array type should be slice", + field: Field{ + Name: multiformatname.Name{}, + DatatypeName: datatype.CoinSliceAlias, + Datatype: string(datatype.CoinSliceAlias), + }, + expected: true, + }, + { + name: "coin type should not be slice", + field: Field{ + Name: multiformatname.Name{}, + DatatypeName: datatype.Coin, + Datatype: string(datatype.Coin), + }, + expected: false, + }, + { + name: "string type should not be slice", + field: Field{ + Name: multiformatname.Name{}, + DatatypeName: datatype.String, + Datatype: "", + }, + expected: false, + }, + { + name: "int type should not be slice", + field: Field{ + Name: multiformatname.Name{}, + DatatypeName: datatype.Int, + Datatype: "", + }, + expected: false, + }, + { + name: "custom array type should be slice", + field: Field{ + Name: multiformatname.Name{}, + DatatypeName: datatype.CustomSlice, + Datatype: "Bar", + }, + expected: true, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + require.Equal(t, tc.expected, tc.field.IsSlice()) + }) + } +} diff --git a/ignite/templates/field/fields.go b/ignite/templates/field/fields.go new file mode 100644 index 0000000..1ca456f --- /dev/null +++ b/ignite/templates/field/fields.go @@ -0,0 +1,85 @@ +package field + +import ( + "fmt" + "strings" + + "github.com/ignite/cli/v29/ignite/pkg/multiformatname" + "github.com/ignite/cli/v29/ignite/templates/field/datatype" +) + +// Fields represents a Field slice. +type Fields []Field + +// GoCLIImports returns all go CLI imports. +func (f Fields) GoCLIImports() []datatype.GoImport { + allImports := make([]datatype.GoImport, 0) + exist := make(map[string]struct{}) + for _, fields := range f { + for _, goImport := range fields.GoCLIImports() { + if _, ok := exist[goImport.Name]; ok { + continue + } + exist[goImport.Name] = struct{}{} + allImports = append(allImports, goImport) + } + } + return allImports +} + +// ProtoImports returns all proto imports. +func (f Fields) ProtoImports() []string { + allImports := make([]string, 0) + exist := make(map[string]struct{}) + for _, fields := range f { + for _, protoImport := range fields.ProtoImports() { + if _, ok := exist[protoImport]; ok { + continue + } + exist[protoImport] = struct{}{} + allImports = append(allImports, protoImport) + } + } + return allImports +} + +// ProtoFieldNameAutoCLI returns all inline fields args for name used in proto. +// It should be used in AutoCLI to generate the field name. +func (f Fields) ProtoFieldNameAutoCLI() string { + args := "" + for i, field := range f { + // only the last field can be a variadic field + if i == len(f)-1 && field.IsSlice() { + args += fmt.Sprintf(`{ProtoField: "%s", Varargs: true}, `, field.ProtoFieldName()) + continue + } + + args += fmt.Sprintf(`{ProtoField: "%s"}, `, field.ProtoFieldName()) + } + args = strings.TrimSpace(args) + return strings.Trim(args, ",") +} + +// CLIUsage returns all inline fields args for CLI command usage. +func (f Fields) CLIUsage() string { + args := "" + for _, field := range f { + args += fmt.Sprintf(" [%s]", field.CLIUsage()) + } + return strings.TrimSpace(args) +} + +// Custom returns a list of custom fields. +func (f Fields) Custom() []string { + fields := make([]string, 0) + for _, field := range f { + if field.DatatypeName == datatype.Custom || field.DatatypeName == datatype.CustomSlice { + dataType, err := multiformatname.NewName(field.Datatype) + if err != nil { + panic(err) + } + fields = append(fields, dataType.Snake) + } + } + return fields +} diff --git a/ignite/templates/field/fields_test.go b/ignite/templates/field/fields_test.go new file mode 100644 index 0000000..56ec002 --- /dev/null +++ b/ignite/templates/field/fields_test.go @@ -0,0 +1,38 @@ +package field + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/multiformatname" + "github.com/ignite/cli/v29/ignite/templates/field/datatype" +) + +func TestFieldsCustom(t *testing.T) { + nameA, err := multiformatname.NewName("customA") + require.NoError(t, err) + nameB, err := multiformatname.NewName("customB") + require.NoError(t, err) + nameC, err := multiformatname.NewName("customC") + require.NoError(t, err) + + fields := Fields{ + { + Name: nameA, + DatatypeName: datatype.Custom, + Datatype: "ProductDetails", + }, + { + Name: nameB, + DatatypeName: datatype.CustomSlice, + Datatype: "LineItem", + }, + { + Name: nameC, + DatatypeName: datatype.String, + }, + } + + require.Equal(t, []string{"product_details", "line_item"}, fields.Custom()) +} diff --git a/ignite/templates/field/parse.go b/ignite/templates/field/parse.go new file mode 100644 index 0000000..7c99967 --- /dev/null +++ b/ignite/templates/field/parse.go @@ -0,0 +1,128 @@ +package field + +import ( + "strings" + + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/multiformatname" + "github.com/ignite/cli/v29/ignite/templates/field/datatype" +) + +// validateField validates the field Name and type, and checks the name is not forbidden by Ignite CLI. +func validateField(field string, isForbiddenField func(string) error) (multiformatname.Name, datatype.Name, error) { + name, dataTypeName, err := parseField(field) + if err != nil { + return name, "", err + } + + // Ensure the field Name is not a Go reserved Name, it would generate an incorrect code + if err := isForbiddenField(name.LowerCamel); err != nil { + return name, "", errors.Errorf("%s can't be used as a field Name: %w", name, err) + } + + return name, dataTypeName, nil +} + +// parseField parses the field string and returns the multiformat name and datatype name. +func parseField(field string) (multiformatname.Name, datatype.Name, error) { + fieldSplit := strings.Split(field, datatype.Separator) + if len(fieldSplit) > 2 { + return multiformatname.Name{}, "", errors.Errorf("invalid field format: %s, should be 'Name' or 'Name:type'", field) + } + + name, err := multiformatname.NewName(fieldSplit[0]) + if err != nil { + return name, "", err + } + + // Check if the object has an explicit type. The default is a string + dataTypeName := datatype.String + isTypeSpecified := len(fieldSplit) == 2 + if isTypeSpecified { + dataTypeName = datatype.Name(fieldSplit[1]) + } + return name, dataTypeName, nil +} + +// MultipleCoins checks if the provided fields contain more than one coin type. +func MultipleCoins(fields []string) (bool, error) { + coinsCount := 0 + for _, field := range fields { + _, datatypeName, err := parseField(field) + if err != nil { + return false, err + } + if datatypeName == datatype.Coins || datatypeName == datatype.DecCoins || + datatypeName == datatype.CoinSliceAlias || datatypeName == datatype.DecCoinSliceAlias { + coinsCount++ + } + } + return coinsCount > 1, nil +} + +// ParseFields parses the provided fields, analyses the types +// and checks there is no duplicated field. +func ParseFields( + fields []string, + isForbiddenField func(string) error, + forbiddenFieldNames ...string, +) (Fields, error) { + // Used to check duplicated field + existingFields := make(map[string]struct{}) + for _, name := range forbiddenFieldNames { + if name != "" { + existingFields[name] = struct{}{} + } + } + + var parsedFields Fields + for _, field := range fields { + name, datatypeName, err := validateField(field, isForbiddenField) + if err != nil { + return parsedFields, err + } + + // Ensure the field is not duplicated + if _, exists := existingFields[name.LowerCamel]; exists { + return parsedFields, errors.Errorf("the field %s is duplicated", name.Original) + } + existingFields[name.LowerCamel] = struct{}{} + + // Check if is a static type + if _, ok := datatype.IsSupportedType(datatypeName); ok { + parsedFields = append(parsedFields, Field{ + Name: name, + DatatypeName: datatypeName, + }) + continue + } + + if strings.HasPrefix(string(datatypeName), datatype.ArrayPrefix) { + customArrayType := strings.TrimPrefix(string(datatypeName), datatype.ArrayPrefix) + if _, ok := datatype.IsSupportedType(datatype.Name(customArrayType)); !ok { + parsedFields = append(parsedFields, Field{ + Name: name, + Datatype: normalizeCustomTypeName(customArrayType), + DatatypeName: datatype.CustomSlice, + }) + continue + } + } + + parsedFields = append(parsedFields, Field{ + Name: name, + Datatype: normalizeCustomTypeName(string(datatypeName)), + DatatypeName: datatype.TypeCustom, + }) + } + return parsedFields, nil +} + +func normalizeCustomTypeName(customType string) string { + name, err := multiformatname.NewName(customType) + if err != nil { + return customType + } + + return name.UpperCamel +} diff --git a/ignite/templates/field/parse_test.go b/ignite/templates/field/parse_test.go new file mode 100644 index 0000000..b735fdf --- /dev/null +++ b/ignite/templates/field/parse_test.go @@ -0,0 +1,293 @@ +package field + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/multiformatname" + "github.com/ignite/cli/v29/ignite/templates/field/datatype" +) + +var ( + noCheck = func(string) error { return nil } + alwaysInvalid = func(string) error { return errors.New("invalid Name") } +) + +func TestForbiddenParseFields(t *testing.T) { + // check doesn't pass + _, err := ParseFields([]string{"foo"}, alwaysInvalid) + require.Error(t, err) + + // duplicated field + _, err = ParseFields([]string{"foo", "foo:int"}, noCheck) + require.Error(t, err) + + // invalid type + _, err = ParseFields([]string{"foo:invalid"}, alwaysInvalid) + require.Error(t, err) + + // invalid field Name + _, err = ParseFields([]string{"foo@bar:int"}, alwaysInvalid) + require.Error(t, err) + + // invalid format + _, err = ParseFields([]string{"foo:int:int"}, alwaysInvalid) + require.Error(t, err) +} + +func TestParseFields1(t *testing.T) { + name1, err := multiformatname.NewName("foo") + require.NoError(t, err) + name2, err := multiformatname.NewName("fooBar") + require.NoError(t, err) + name3, err := multiformatname.NewName("bar-foo") + require.NoError(t, err) + name4, err := multiformatname.NewName("foo_foo") + require.NoError(t, err) + + tests := []struct { + name string + fields []string + want Fields + err error + }{ + { + name: "test string types", + fields: []string{ + name1.Original, + name2.Original + ":string", + }, + want: Fields{ + { + Name: name1, + DatatypeName: datatype.String, + }, + { + Name: name2, + DatatypeName: datatype.String, + }, + }, + }, + { + name: "test number types", + fields: []string{ + name1.Original + ":uint", + name2.Original + ":int", + name3.Original + ":bool", + }, + want: Fields{ + { + Name: name1, + DatatypeName: datatype.Uint, + }, + { + Name: name2, + DatatypeName: datatype.Int, + }, + { + Name: name3, + DatatypeName: datatype.Bool, + }, + }, + }, + { + name: "test list types", + fields: []string{ + name1.Original + ":array.uint", + name2.Original + ":array.int", + name3.Original + ":array.string", + }, + want: Fields{ + { + Name: name1, + DatatypeName: datatype.UintSlice, + }, + { + Name: name2, + DatatypeName: datatype.IntSlice, + }, + { + Name: name3, + DatatypeName: datatype.StringSlice, + }, + }, + }, + { + name: "test mixed types", + fields: []string{ + name1.Original + ":uint", + name2.Original + ":array.coin", + name3.Original, + name4.Original + ":strings", + }, + want: Fields{ + { + Name: name1, + DatatypeName: datatype.Uint, + }, + { + Name: name2, + DatatypeName: datatype.Coins, + }, + { + Name: name3, + DatatypeName: datatype.String, + }, + { + Name: name4, + DatatypeName: datatype.StringSliceAlias, + }, + }, + }, + { + name: "test custom types", + fields: []string{ + name1.Original + ":Bla", + name2.Original + ":Test", + name4.Original + ":array.ProductDetails", + name3.Original, + }, + want: Fields{ + { + Name: name1, + DatatypeName: datatype.Custom, + Datatype: "Bla", + }, + { + Name: name2, + DatatypeName: datatype.Custom, + Datatype: "Test", + }, + { + Name: name4, + DatatypeName: datatype.CustomSlice, + Datatype: "ProductDetails", + }, + { + Name: name3, + DatatypeName: datatype.String, + }, + }, + }, + { + name: "test lowercase custom types", + fields: []string{ + name1.Original + ":employee", + name2.Original + ":array.employee", + }, + want: Fields{ + { + Name: name1, + DatatypeName: datatype.Custom, + Datatype: "Employee", + }, + { + Name: name2, + DatatypeName: datatype.CustomSlice, + Datatype: "Employee", + }, + }, + }, + { + name: "test sdk.Coin types", + fields: []string{ + name1.Original + ":coin", + name2.Original + ":array.coin", + }, + want: Fields{ + { + Name: name1, + DatatypeName: datatype.Coin, + }, + { + Name: name2, + DatatypeName: datatype.Coins, + }, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := ParseFields(tt.fields, noCheck) + if tt.err != nil { + require.ErrorIs(t, err, tt.err) + return + } + require.NoError(t, err) + require.EqualValues(t, tt.want, got) + }) + } +} + +func TestMultipleCoins(t *testing.T) { + tests := []struct { + name string + fields []string + want bool + err error + }{ + { + name: "single coin field", + fields: []string{"amount:coin"}, + want: false, + }, + { + name: "multiple coin fields", + fields: []string{"amount:coin", "price:coin"}, + want: false, + }, + { + name: "coin and coins fields", + fields: []string{"amount:coin", "price:coins"}, + want: false, + }, + { + name: "multiple coins and decimal coins fields", + fields: []string{"amount:array.coin", "price:array.dec.coin"}, + want: true, + }, + { + name: "single coins field", + fields: []string{"amount:array.coin"}, + want: false, + }, + { + name: "multiple coins fields", + fields: []string{"amount:array.coin", "price:coins"}, + want: true, + }, + { + name: "mixed coin and coins fields", + fields: []string{"amount:coin", "price:dec.coins"}, + want: false, + }, + { + name: "no coin fields", + fields: []string{"name:string", "age:int"}, + want: false, + }, + { + name: "mixed types with single coin", + fields: []string{"name:string", "amount:coin", "age:int"}, + want: false, + }, + { + name: "mixed types with multiple coins", + fields: []string{"name:string", "amount:array.coin", "price:dec.coins", "age:int"}, + want: true, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := MultipleCoins(tt.fields) + if tt.err != nil { + require.ErrorIs(t, err, tt.err) + return + } + require.NoError(t, err) + require.EqualValues(t, tt.want, got) + }) + } +} diff --git a/ignite/templates/field/plushhelpers/plushhelpers.go b/ignite/templates/field/plushhelpers/plushhelpers.go new file mode 100644 index 0000000..ae11099 --- /dev/null +++ b/ignite/templates/field/plushhelpers/plushhelpers.go @@ -0,0 +1,70 @@ +package plushhelpers + +import ( + "strings" + + "github.com/gobuffalo/plush/v4" + + "github.com/ignite/cli/v29/ignite/pkg/xstrings" + "github.com/ignite/cli/v29/ignite/templates/field" + "github.com/ignite/cli/v29/ignite/templates/field/datatype" +) + +// ExtendPlushContext sets available field helpers on the provided context. +func ExtendPlushContext(ctx *plush.Context) { + ctx.Set("mergeGoImports", mergeGoImports) + ctx.Set("mergeProtoImports", mergeProtoImports) + ctx.Set("mergeCustomImports", mergeCustomImports) + ctx.Set("appendFieldsAndMergeCustomImports", appendFieldsAndMergeCustomImports) + ctx.Set("title", xstrings.Title) + ctx.Set("toLower", strings.ToLower) +} + +func appendFieldsAndMergeCustomImports(f field.Field, fields ...field.Fields) []string { + return mergeCustomImports(append(fields, field.Fields{f})...) +} + +func mergeCustomImports(fields ...field.Fields) []string { + allImports := make([]string, 0) + exist := make(map[string]struct{}) + for _, field := range fields { + for _, customImport := range field.Custom() { + if _, ok := exist[customImport]; ok { + continue + } + exist[customImport] = struct{}{} + allImports = append(allImports, customImport) + } + } + return allImports +} + +func mergeGoImports(fields ...field.Fields) []datatype.GoImport { + allImports := make([]datatype.GoImport, 0) + exist := make(map[string]struct{}) + for _, fields := range fields { + for _, goImport := range fields.GoCLIImports() { + if _, ok := exist[goImport.Name]; ok { + continue + } + exist[goImport.Name] = struct{}{} + allImports = append(allImports, goImport) + } + } + return allImports +} + +func mergeProtoImports(fields ...field.Fields) []string { + allImports := make([]string, 0) + exist := make(map[string]struct{}) + for _, fields := range fields { + for _, protoImport := range fields.ProtoImports() { + if _, ok := exist[protoImport]; ok { + continue + } + exist[protoImport] = struct{}{} + allImports = append(allImports, protoImport) + } + } + return allImports +} diff --git a/ignite/templates/ibc/files/packet/component/x/{{moduleName}}/keeper/{{packetName}}.go.plush b/ignite/templates/ibc/files/packet/component/x/{{moduleName}}/keeper/{{packetName}}.go.plush new file mode 100644 index 0000000..6394aff --- /dev/null +++ b/ignite/templates/ibc/files/packet/component/x/{{moduleName}}/keeper/{{packetName}}.go.plush @@ -0,0 +1,76 @@ +package keeper + +import ( + "context" + "errors" + + errorsmod "cosmossdk.io/errors" + sdk "github.com/cosmos/cosmos-sdk/types" + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + "<%= ModulePath %>/x/<%= moduleName %>/types" + clienttypes "github.com/cosmos/ibc-go/v10/modules/core/02-client/types" + channeltypes "github.com/cosmos/ibc-go/v10/modules/core/04-channel/types" +) + +// Transmit<%= packetName.PascalCase %>Packet transmits the packet over IBC with the specified source port and source channel +func (k Keeper) Transmit<%= packetName.PascalCase %>Packet( + ctx context.Context, + packetData types.<%= packetName.PascalCase %>PacketData, + sourcePort, + sourceChannel string, + timeoutHeight clienttypes.Height, + timeoutTimestamp uint64, +) (uint64, error) { + packetBytes, err := packetData.GetBytes() + if err != nil { + return 0, errorsmod.Wrapf(sdkerrors.ErrJSONMarshal, "cannot marshal the packet: %s", err) + } + + sdkCtx := sdk.UnwrapSDKContext(ctx) + return k.ibcKeeperFn().ChannelKeeper.SendPacket(sdkCtx, sourcePort, sourceChannel, timeoutHeight, timeoutTimestamp, packetBytes) +} + +// OnRecv<%= packetName.PascalCase %>Packet processes packet reception +func (k Keeper) OnRecv<%= packetName.PascalCase %>Packet(ctx context.Context, packet channeltypes.Packet, data types.<%= packetName.PascalCase %>PacketData) (packetAck types.<%= packetName.PascalCase %>PacketAck, err error) { + // validate packet data upon receiving + + // TODO: packet reception logic + + return packetAck, nil +} + +// OnAcknowledgement<%= packetName.PascalCase %>Packet responds to the success or failure of a packet +// acknowledgement written on the receiving chain. +func (k Keeper) OnAcknowledgement<%= packetName.PascalCase %>Packet(ctx context.Context, packet channeltypes.Packet, data types.<%= packetName.PascalCase %>PacketData, ack channeltypes.Acknowledgement) error { + switch dispatchedAck := ack.Response.(type) { + case *channeltypes.Acknowledgement_Error: + + // TODO: failed acknowledgement logic + _ = dispatchedAck.Error + + return nil + case *channeltypes.Acknowledgement_Result: + // Decode the packet acknowledgment + var packetAck types.<%= packetName.PascalCase %>PacketAck + + if err := k.cdc.UnmarshalJSON(dispatchedAck.Result, &packetAck); err != nil { + // The counter-party module doesn't implement the correct acknowledgment format + return errors.New("cannot unmarshal acknowledgment") + } + + // TODO: successful acknowledgement logic + + return nil + default: + // The counter-party module doesn't implement the correct acknowledgment format + return errors.New("invalid acknowledgment format") + } +} + +// OnTimeout<%= packetName.PascalCase %>Packet responds to the case where a packet has not been transmitted because of a timeout +func (k Keeper) OnTimeout<%= packetName.PascalCase %>Packet(ctx context.Context, packet channeltypes.Packet, data types.<%= packetName.UpperCamel %>PacketData) error { + + // TODO: packet timeout logic + + return nil +} diff --git a/ignite/templates/ibc/files/packet/component/x/{{moduleName}}/types/packet_{{packetName}}.go.plush b/ignite/templates/ibc/files/packet/component/x/{{moduleName}}/types/packet_{{packetName}}.go.plush new file mode 100644 index 0000000..f7d6dfe --- /dev/null +++ b/ignite/templates/ibc/files/packet/component/x/{{moduleName}}/types/packet_{{packetName}}.go.plush @@ -0,0 +1,10 @@ +package types + +// GetBytes is a helper for serialising +func (p <%= packetName.PascalCase %>PacketData) GetBytes() ([]byte, error) { + var modulePacket <%= title(moduleName) %>PacketData + + modulePacket.Packet = &<%= title(moduleName) %>PacketData_<%= packetName.PascalCase %>Packet{&p} + + return modulePacket.Marshal() +} \ No newline at end of file diff --git a/ignite/templates/ibc/files/packet/messages/x/{{moduleName}}/client/cli/tx_{{packetName}}.go.plush b/ignite/templates/ibc/files/packet/messages/x/{{moduleName}}/client/cli/tx_{{packetName}}.go.plush new file mode 100644 index 0000000..0475923 --- /dev/null +++ b/ignite/templates/ibc/files/packet/messages/x/{{moduleName}}/client/cli/tx_{{packetName}}.go.plush @@ -0,0 +1,62 @@ +package cli + +import ( + <%= for (goImport) in mergeGoImports(fields) { %> + <%= goImport.Alias %> "<%= goImport.Name %>"<% } %> + "time" + + "github.com/cosmos/cosmos-sdk/client" + "github.com/cosmos/cosmos-sdk/client/flags" + "github.com/cosmos/cosmos-sdk/client/tx" + "github.com/spf13/cobra" + + "<%= ModulePath %>/x/<%= moduleName %>/types" +) + +// CmdSend<%= packetName.PascalCase %>() returns the <%= packetName.PascalCase %> send packet command. +// This command does not use AutoCLI because it gives a better UX to do not. +func CmdSend<%= packetName.PascalCase %>() *cobra.Command { + flagPacketTimeoutTimestamp := "packet-timeout-timestamp" + + cmd := &cobra.Command{ + Use: "send-<%= packetName.Kebab %> [src-port] [src-channel] <%= fields.CLIUsage() %>", + Short: "Send a <%= packetName.Original %> over IBC", + Args: cobra.ExactArgs(<%= len(fields) + 2 %>), + RunE: func(cmd *cobra.Command, args []string) error { + clientCtx, err := client.GetClientTxContext(cmd) + if err != nil { + return err + } + + <%= MsgSigner.LowerCamel %> := clientCtx.GetFromAddress().String() + srcPort := args[0] + srcChannel := args[1] + + <%= for (i, field) in fields { %> <%= field.CLIArgs("arg", i+2) %> + <% } %> + + // Get the relative timeout timestamp + timeoutTimestamp, err := cmd.Flags().GetUint64(flagPacketTimeoutTimestamp) + if err != nil { + return err + } + + if timeoutTimestamp != 0 { + now := time.Now().UnixNano() + if now <= 0 { + return err + } + timeoutTimestamp = uint64(now) + timeoutTimestamp + } + + msg := types.NewMsgSend<%= packetName.PascalCase %>(<%= MsgSigner.LowerCamel %>, srcPort, srcChannel, timeoutTimestamp<%= for (i, field) in fields { %>, arg<%= field.Name.PascalCase %><% } %>) + + return tx.GenerateOrBroadcastTxCLI(clientCtx, cmd.Flags(), msg) + }, + } + + cmd.Flags().Uint64(flagPacketTimeoutTimestamp, DefaultRelativePacketTimeoutTimestamp, "Packet timeout timestamp in nanoseconds. Default is 10 minutes.") + flags.AddTxFlagsToCmd(cmd) + + return cmd +} diff --git a/ignite/templates/ibc/files/packet/messages/x/{{moduleName}}/keeper/msg_server_{{packetName}}.go.plush b/ignite/templates/ibc/files/packet/messages/x/{{moduleName}}/keeper/msg_server_{{packetName}}.go.plush new file mode 100644 index 0000000..cdeec0a --- /dev/null +++ b/ignite/templates/ibc/files/packet/messages/x/{{moduleName}}/keeper/msg_server_{{packetName}}.go.plush @@ -0,0 +1,54 @@ +package keeper + +import ( + "fmt" + "context" + + errorsmod "cosmossdk.io/errors" + "<%= ModulePath %>/x/<%= moduleName %>/types" + sdk "github.com/cosmos/cosmos-sdk/types" + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + clienttypes "github.com/cosmos/ibc-go/v10/modules/core/02-client/types" +) + + +func (k msgServer) Send<%= packetName.PascalCase %>(ctx context.Context, msg *types.MsgSend<%= packetName.PascalCase %>) (*types.MsgSend<%= packetName.PascalCase %>Response, error) { + // validate incoming message + if _, err := k.addressCodec.StringToBytes(msg.<%= MsgSigner.PascalCase %>); err != nil { + return nil, errorsmod.Wrap(sdkerrors.ErrInvalidAddress, fmt.Sprintf("invalid address: %s", err)) + } + + if msg.Port == "" { + return nil, errorsmod.Wrap(sdkerrors.ErrInvalidRequest, "invalid packet port") + } + + if msg.ChannelID == "" { + return nil, errorsmod.Wrap(sdkerrors.ErrInvalidRequest, "invalid packet channel") + } + + if msg.TimeoutTimestamp == 0 { + return nil, errorsmod.Wrap(sdkerrors.ErrInvalidRequest, "invalid packet timeout") + } + + // TODO: logic before transmitting the packet + + // Construct the packet + var packet types.<%= packetName.PascalCase %>PacketData + <%= for (field) in fields { %> + packet.<%= field.Name.UpperCamel %> = msg.<%= field.Name.UpperCamel %><% } %> + + // Transmit the packet + _, err := k.Transmit<%= packetName.PascalCase %>Packet( + ctx, + packet, + msg.Port, + msg.ChannelID, + clienttypes.ZeroHeight(), + msg.TimeoutTimestamp, + ) + if err != nil { + return nil, err + } + + return &types.MsgSend<%= packetName.PascalCase %>Response{}, nil +} \ No newline at end of file diff --git a/ignite/templates/ibc/files/packet/messages/x/{{moduleName}}/keeper/msg_server_{{packetName}}_test.go.plush b/ignite/templates/ibc/files/packet/messages/x/{{moduleName}}/keeper/msg_server_{{packetName}}_test.go.plush new file mode 100644 index 0000000..3744bcf --- /dev/null +++ b/ignite/templates/ibc/files/packet/messages/x/{{moduleName}}/keeper/msg_server_{{packetName}}_test.go.plush @@ -0,0 +1,84 @@ +package keeper_test + +import ( + "errors" + "testing" + + "github.com/stretchr/testify/require" + + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + channeltypes "github.com/cosmos/ibc-go/v10/modules/core/04-channel/types" + + "<%= ModulePath %>/x/<%= moduleName %>/keeper" + "<%= ModulePath %>/x/<%= moduleName %>/types" +) + +func TestMsgServerSend<%= packetName.UpperCamel %>(t *testing.T) { + f := initFixture(t) + srv := keeper.NewMsgServerImpl(f.keeper) + <%= MsgSigner.LowerCamel %>, err := f.addressCodec.BytesToString([]byte("signerAddr__________________")) + require.NoError(t, err) + + tests := []struct { + name string + msg types.MsgSend<%= packetName.UpperCamel %> + err error + }{ + { + name: "invalid address", + msg: types.MsgSend<%= packetName.UpperCamel %>{ + <%= MsgSigner.UpperCamel %>: "invalid address", + Port: "port", + ChannelID: "channel-0", + TimeoutTimestamp: 100, + }, + err: sdkerrors.ErrInvalidAddress, + }, { + name: "invalid port", + msg: types.MsgSend<%= packetName.UpperCamel %>{ + <%= MsgSigner.UpperCamel %>: <%= MsgSigner.LowerCamel %>, + Port: "", + ChannelID: "channel-0", + TimeoutTimestamp: 100, + }, + err: sdkerrors.ErrInvalidRequest, + }, { + name: "invalid channel", + msg: types.MsgSend<%= packetName.UpperCamel %>{ + <%= MsgSigner.UpperCamel %>: <%= MsgSigner.LowerCamel %>, + Port: "port", + ChannelID: "", + TimeoutTimestamp: 100, + }, + err: sdkerrors.ErrInvalidRequest, + }, { + name: "invalid timeout", + msg: types.MsgSend<%= packetName.UpperCamel %>{ + <%= MsgSigner.UpperCamel %>: <%= MsgSigner.LowerCamel %>, + Port: "port", + ChannelID: "channel-0", + TimeoutTimestamp: 0, + }, + err: sdkerrors.ErrInvalidRequest, + }, { + name: "valid message", + msg: types.MsgSend<%= packetName.UpperCamel %>{ + <%= MsgSigner.UpperCamel %>: <%= MsgSigner.LowerCamel %>, + Port: "port", + ChannelID: "channel-0", + TimeoutTimestamp: 100, + }, + err: errors.New("channel not found"), + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + _, err = srv.Send<%= packetName.UpperCamel %>(f.ctx, &tt.msg) + if tt.err != nil { + require.ErrorContains(t, err, tt.err.Error()) + return + } + require.NoError(t, err) + }) + } +} diff --git a/ignite/templates/ibc/files/packet/messages/x/{{moduleName}}/types/messages_{{packetName}}.go.plush b/ignite/templates/ibc/files/packet/messages/x/{{moduleName}}/types/messages_{{packetName}}.go.plush new file mode 100644 index 0000000..733505e --- /dev/null +++ b/ignite/templates/ibc/files/packet/messages/x/{{moduleName}}/types/messages_{{packetName}}.go.plush @@ -0,0 +1,17 @@ +package types + +func NewMsgSend<%= packetName.PascalCase %>( + <%= MsgSigner.LowerCamel %> string, + port string, + channelID string, + timeoutTimestamp uint64,<%= for (field) in fields { %> + <%= field.Name.LowerCamel %> <%= field.DataType() %>,<% } %> +) *MsgSend<%= packetName.PascalCase %> { + return &MsgSend<%= packetName.PascalCase %>{ + <%= MsgSigner.UpperCamel %>: <%= MsgSigner.LowerCamel %>, + Port: port, + ChannelID: channelID, + TimeoutTimestamp: timeoutTimestamp,<%= for (field) in fields { %> + <%= field.Name.UpperCamel %>: <%= field.Name.LowerCamel %>,<% } %> + } +} \ No newline at end of file diff --git a/ignite/templates/ibc/packet.go b/ignite/templates/ibc/packet.go new file mode 100644 index 0000000..1973396 --- /dev/null +++ b/ignite/templates/ibc/packet.go @@ -0,0 +1,439 @@ +package ibc + +import ( + "embed" + "fmt" + "io/fs" + "path/filepath" + + "github.com/emicklei/proto" + "github.com/gobuffalo/genny/v2" + "github.com/gobuffalo/plush/v4" + + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/multiformatname" + "github.com/ignite/cli/v29/ignite/pkg/protoanalysis/protoutil" + "github.com/ignite/cli/v29/ignite/pkg/xast" + "github.com/ignite/cli/v29/ignite/pkg/xgenny" + "github.com/ignite/cli/v29/ignite/pkg/xstrings" + "github.com/ignite/cli/v29/ignite/templates/field" + "github.com/ignite/cli/v29/ignite/templates/field/plushhelpers" + "github.com/ignite/cli/v29/ignite/templates/typed" +) + +var ( + //go:embed files/packet/component/* files/packet/component/**/* + fsPacketComponent embed.FS + + //go:embed files/packet/messages/* files/packet/messages/**/* + fsPacketMessages embed.FS +) + +// PacketOptions are options to scaffold a packet in a IBC module. +type PacketOptions struct { + AppName string + ProtoDir string + ProtoVer string + ModuleName string + ModulePath string + PacketName multiformatname.Name + MsgSigner multiformatname.Name + Fields field.Fields + AckFields field.Fields + NoMessage bool +} + +// ProtoFile returns the path to the proto folder. +func (opts *PacketOptions) ProtoFile(fname string) string { + return filepath.Join(opts.ProtoDir, opts.AppName, opts.ModuleName, opts.ProtoVer, fname) +} + +// NewPacket returns the generator to scaffold a packet in an IBC module. +func NewPacket(opts *PacketOptions) (*genny.Generator, error) { + subPacketComponent, err := fs.Sub(fsPacketComponent, "files/packet/component") + if err != nil { + return nil, errors.Errorf("fail to generate sub: %w", err) + } + subPacketMessages, err := fs.Sub(fsPacketMessages, "files/packet/messages") + if err != nil { + return nil, errors.Errorf("fail to generate sub: %w", err) + } + + // Add the component + g := genny.New() + g.RunFn(moduleModify(opts)) + g.RunFn(protoModify(opts)) + g.RunFn(eventModify(opts)) + if err := g.OnlyFS(subPacketComponent, nil, nil); err != nil { + return g, err + } + + // Add the send message + if !opts.NoMessage { + g.RunFn(protoTxModify(opts)) + g.RunFn(clientCliTxModify(opts)) + g.RunFn(codecModify(opts)) + if err := g.OnlyFS(subPacketMessages, nil, nil); err != nil { + return g, err + } + } + + ctx := plush.NewContext() + ctx.Set("moduleName", opts.ModuleName) + ctx.Set("ModulePath", opts.ModulePath) + ctx.Set("appName", opts.AppName) + ctx.Set("protoVer", opts.ProtoVer) + ctx.Set("packetName", opts.PacketName) + ctx.Set("MsgSigner", opts.MsgSigner) + ctx.Set("fields", opts.Fields) + ctx.Set("ackFields", opts.AckFields) + + plushhelpers.ExtendPlushContext(ctx) + g.Transformer(xgenny.Transformer(ctx)) + g.Transformer(genny.Replace("{{protoDir}}", opts.ProtoDir)) + g.Transformer(genny.Replace("{{appName}}", opts.AppName)) + g.Transformer(genny.Replace("{{moduleName}}", opts.ModuleName)) + g.Transformer(genny.Replace("{{protoVer}}", opts.ProtoVer)) + g.Transformer(genny.Replace("{{packetName}}", opts.PacketName.Snake)) + + return g, nil +} + +func moduleModify(opts *PacketOptions) genny.RunFn { + return func(r *genny.Runner) error { + path := filepath.Join("x", opts.ModuleName, "module/module_ibc.go") + f, err := r.Disk.Find(path) + if err != nil { + return err + } + + // Recv packet dispatch + templateRecv := `packetAck, err := im.keeper.OnRecv%[1]vPacket(ctx, modulePacket, *packet.%[1]vPacket) + if err != nil { + ack = channeltypes.NewErrorAcknowledgement(err) + } else { + // Encode packet acknowledgment + packetAckBytes, err := im.cdc.MarshalJSON(&packetAck) + if err != nil { + return channeltypes.NewErrorAcknowledgement(errorsmod.Wrap(sdkerrors.ErrJSONMarshal, err.Error())) + } + ack = channeltypes.NewResultAcknowledgement(packetAckBytes) + } + + + sdkCtx := sdk.UnwrapSDKContext(ctx) + sdkCtx.EventManager().EmitEvent( + sdk.NewEvent( + types.EventType%[1]vPacket, + sdk.NewAttribute(sdk.AttributeKeyModule, types.ModuleName), + sdk.NewAttribute(types.AttributeKeyAckSuccess, fmt.Sprintf("%%t", err != nil)), + ), + )` + replacementRecv := fmt.Sprintf( + templateRecv, + opts.PacketName.UpperCamel, + ) + content, err := xast.ModifyFunction( + f.String(), + "OnRecvPacket", + xast.AppendSwitchCase( + "packet := modulePacketData.Packet.(type)", + fmt.Sprintf("*types.%[1]vPacketData_%[2]vPacket", xstrings.Title(opts.ModuleName), opts.PacketName.UpperCamel), + replacementRecv, + ), + ) + if err != nil { + return err + } + + // Ack packet dispatch + templateAck := `err := im.keeper.OnAcknowledgement%[1]vPacket(ctx, modulePacket, *packet.%[1]vPacket, ack) + if err != nil { + return err + } + eventType = types.EventType%[1]vPacket` + replacementAck := fmt.Sprintf( + templateAck, + opts.PacketName.UpperCamel, + ) + content, err = xast.ModifyFunction( + content, + "OnAcknowledgementPacket", + xast.AppendSwitchCase( + "packet := modulePacketData.Packet.(type)", + fmt.Sprintf("*types.%[1]vPacketData_%[2]vPacket", xstrings.Title(opts.ModuleName), opts.PacketName.UpperCamel), + replacementAck, + ), + ) + if err != nil { + return err + } + + // Timeout packet dispatch + templateTimeout := `err := im.keeper.OnTimeout%[1]vPacket(ctx, modulePacket, *packet.%[1]vPacket) + if err != nil { + return err + }` + replacementTimeout := fmt.Sprintf( + templateTimeout, + opts.PacketName.UpperCamel, + ) + content, err = xast.ModifyFunction( + content, + "OnTimeoutPacket", + xast.AppendSwitchCase( + "packet := modulePacketData.Packet.(type)", + fmt.Sprintf("*types.%[1]vPacketData_%[2]vPacket", xstrings.Title(opts.ModuleName), opts.PacketName.UpperCamel), + replacementTimeout, + ), + ) + if err != nil { + return err + } + + newFile := genny.NewFileS(path, content) + return r.File(newFile) + } +} + +// Modifies packet.proto to add a field on the oneof element of the message created and +// add a couple of messages. +// +// What it depends on: +// - Existence of a Oneof field named 'packet'. +func protoModify(opts *PacketOptions) genny.RunFn { + return func(r *genny.Runner) error { + path := opts.ProtoFile("packet.proto") + f, err := r.Disk.Find(path) + if err != nil { + return err + } + protoFile, err := protoutil.ParseProtoFile(f) + if err != nil { + return err + } + name := fmt.Sprintf("%sPacketData", xstrings.Title(opts.ModuleName)) + message, err := protoutil.GetMessageByName(protoFile, name) + if err != nil { + return errors.Errorf("failed while looking up '%s' message in %s: %w", name, path, err) + } + // Use a directly Apply call here, modifying oneofs isn't common enough to warrant a separate function. + var packet *proto.Oneof + protoutil.Apply(message, nil, func(c *protoutil.Cursor) bool { + if o, ok := c.Node().(*proto.Oneof); ok { + if o.Name == "packet" { + packet = o + return false + } + } + // continue traversing. + return true + }) + if packet == nil { + return errors.Errorf("could not find 'oneof packet' in message '%s' of file %s", name, path) + } + // Count fields of oneof: + maximum := 1 + protoutil.Apply(packet, nil, func(c *protoutil.Cursor) bool { + if o, ok := c.Node().(*proto.OneOfField); ok { + if o.Sequence > maximum { + maximum = o.Sequence + } + } + return true + }) + // Add it to Oneof. + typenamePascal, typenameSnake := opts.PacketName.PascalCase, opts.PacketName.Snake + packetField := protoutil.NewOneofField(typenameSnake+"_packet", typenamePascal+"PacketData", maximum+1) + protoutil.Append(packet, packetField) + + // Add the message definition for packet and acknowledgment + var packetFields []*proto.NormalField + for i, f := range opts.Fields { + packetFields = append(packetFields, f.ToProtoField(i+1)) + } + packetData := protoutil.NewMessage(typenamePascal+"PacketData", protoutil.WithFields(packetFields...)) + protoutil.AttachComment(packetData, typenamePascal+"PacketData defines a struct for the packet payload") + var ackFields []*proto.NormalField + for i, f := range opts.AckFields { + ackFields = append(ackFields, f.ToProtoField(i+1)) + } + packetAck := protoutil.NewMessage(typenamePascal+"PacketAck", protoutil.WithFields(ackFields...)) + protoutil.AttachComment(packetAck, typenamePascal+"PacketAck defines a struct for the packet acknowledgment") + protoutil.Append(protoFile, packetData, packetAck) + + // Add any custom imports. + var protoImports []*proto.Import + for _, imp := range append(opts.Fields.ProtoImports(), opts.AckFields.ProtoImports()...) { + protoImports = append(protoImports, protoutil.NewImport(imp)) + } + for _, f := range append(opts.Fields.Custom(), opts.AckFields.Custom()...) { + protoPath := fmt.Sprintf("%[1]v/%[2]v/%[3]v/%[4]v.proto", opts.AppName, opts.ModuleName, opts.ProtoVer, f) + protoImports = append(protoImports, protoutil.NewImport(protoPath)) + } + if err := protoutil.AddImports(protoFile, true, protoImports...); err != nil { + return errors.Errorf("failed while adding imports to %s: %w", path, err) + } + + newFile := genny.NewFileS(path, protoutil.Print(protoFile)) + return r.File(newFile) + } +} + +func eventModify(opts *PacketOptions) genny.RunFn { + return func(r *genny.Runner) error { + path := filepath.Join("x", opts.ModuleName, "types/events_ibc.go") + f, err := r.Disk.Find(path) + if err != nil { + return err + } + + // Keeper declaration + content, err := xast.InsertGlobal( + f.String(), + xast.GlobalTypeConst, + xast.WithGlobal( + fmt.Sprintf("EventType%[1]vPacket", opts.PacketName.UpperCamel), + "", + fmt.Sprintf(`"%[1]v_packet"`, opts.PacketName.LowerCamel), + ), + ) + if err != nil { + return err + } + + newFile := genny.NewFileS(path, content) + return r.File(newFile) + } +} + +// Modifies tx.proto to add a new RPC and the required messages. +// +// What it depends on: +// - Existence of a service named 'Msg'. The other elements don't depend on already existing +// elements in the file. +func protoTxModify(opts *PacketOptions) genny.RunFn { + return func(r *genny.Runner) error { + path := opts.ProtoFile("tx.proto") + f, err := r.Disk.Find(path) + if err != nil { + return err + } + protoFile, err := protoutil.ParseProtoFile(f) + if err != nil { + return err + } + + // Add RPC to service Msg. + serviceMsg, err := protoutil.GetServiceByName(protoFile, "Msg") + if err != nil { + return errors.Errorf("failed while looking up service 'Msg' in %s: %w", path, err) + } + typenamePascal := opts.PacketName.PascalCase + send := protoutil.NewRPC( + fmt.Sprintf("Send%s", typenamePascal), + fmt.Sprintf("MsgSend%s", typenamePascal), + fmt.Sprintf("MsgSend%sResponse", typenamePascal), + ) + protoutil.Append(serviceMsg, send) + + // Create fields for MsgSend. + var sendFields []*proto.NormalField + for i, field := range opts.Fields { + sendFields = append(sendFields, field.ToProtoField(i+5)) + } + + // set address options on signer field + signerField := protoutil.NewField(opts.MsgSigner.Snake, "string", 1) + signerField.Options = append(signerField.Options, protoutil.NewOption("cosmos_proto.scalar", "cosmos.AddressString", protoutil.Custom())) + + sendFields = append(sendFields, + signerField, + protoutil.NewField("port", "string", 2), + protoutil.NewField("channelID", "string", 3), + protoutil.NewField("timeoutTimestamp", "uint64", 4), + ) + creatorOpt := protoutil.NewOption(typed.MsgSignerOption, opts.MsgSigner.Snake) + + // Create MsgSend, MsgSendResponse and add to file. + msgSend := protoutil.NewMessage( + "MsgSend"+typenamePascal, + protoutil.WithFields(sendFields...), + protoutil.WithMessageOptions(creatorOpt), + ) + msgSendResponse := protoutil.NewMessage("MsgSend" + typenamePascal + "Response") + protoutil.Append(protoFile, msgSend, msgSendResponse) + + // Ensure custom types are imported + var protoImports []*proto.Import + for _, imp := range opts.Fields.ProtoImports() { + protoImports = append(protoImports, protoutil.NewImport(imp)) + } + for _, f := range opts.Fields.Custom() { + protopath := fmt.Sprintf("%[1]v/%[2]v/%[3]v/%[4]v.proto", opts.AppName, opts.ModuleName, opts.ProtoVer, f) + protoImports = append(protoImports, protoutil.NewImport(protopath)) + } + if err := protoutil.AddImports(protoFile, true, protoImports...); err != nil { + return errors.Errorf("error while processing %s: %w", path, err) + } + + newFile := genny.NewFileS(path, protoutil.Print(protoFile)) + return r.File(newFile) + } +} + +// clientCliTxModify does not use AutoCLI here, because it as a better UX as it is. +func clientCliTxModify(opts *PacketOptions) genny.RunFn { + return func(r *genny.Runner) error { + filePath := filepath.Join("x", opts.ModuleName, "client/cli/tx.go") + f, err := r.Disk.Find(filePath) + if err != nil { + return err + } + replacement := fmt.Sprintf("cmd.AddCommand(CmdSend%[1]v())", opts.PacketName.UpperCamel) + content, err := xast.ModifyFunction( + f.String(), + "GetTxCmd", + xast.AppendFuncCode(replacement), + ) + if err != nil { + return err + } + + newFile := genny.NewFileS(filePath, content) + return r.File(newFile) + } +} + +func codecModify(opts *PacketOptions) genny.RunFn { + return func(r *genny.Runner) error { + path := filepath.Join("x", opts.ModuleName, "types/codec.go") + f, err := r.Disk.Find(path) + if err != nil { + return err + } + + // Set import if not set yet + content, err := xast.AppendImports(f.String(), xast.WithNamedImport("sdk", "github.com/cosmos/cosmos-sdk/types")) + if err != nil { + return err + } + + // Register the module packet interface + templateInterface := `registrar.RegisterImplementations((*sdk.Msg)(nil), + &MsgSend%[1]v{}, +)` + replacementInterface := fmt.Sprintf(templateInterface, opts.PacketName.PascalCase) + content, err = xast.ModifyFunction( + content, + "RegisterInterfaces", + xast.AppendFuncAtLine(replacementInterface, 0), + ) + if err != nil { + return err + } + + newFile := genny.NewFileS(path, content) + return r.File(newFile) + } +} diff --git a/ignite/templates/message/files/message/x/{{moduleName}}/keeper/msg_server_{{msgName}}.go.plush b/ignite/templates/message/files/message/x/{{moduleName}}/keeper/msg_server_{{msgName}}.go.plush new file mode 100644 index 0000000..81ed319 --- /dev/null +++ b/ignite/templates/message/files/message/x/{{moduleName}}/keeper/msg_server_{{msgName}}.go.plush @@ -0,0 +1,19 @@ +package keeper + +import ( + "context" + + "<%= ModulePath %>/x/<%= ModuleName %>/types" + sdk "github.com/cosmos/cosmos-sdk/types" +) + + +func (k msgServer) <%= MsgName.PascalCase %>(ctx context.Context, msg *types.Msg<%= MsgName.PascalCase %>) (*types.Msg<%= MsgName.PascalCase %>Response, error) { + if _, err := k.addressCodec.StringToBytes(msg.<%= MsgSigner.PascalCase %>); err != nil { + return nil, errorsmod.Wrap(err, "invalid authority address") + } + + // TODO: Handle the message + + return &types.Msg<%= MsgName.PascalCase %>Response{}, nil +} diff --git a/ignite/templates/message/files/simapp/x/{{moduleName}}/simulation/{{msgName}}.go.plush b/ignite/templates/message/files/simapp/x/{{moduleName}}/simulation/{{msgName}}.go.plush new file mode 100644 index 0000000..2017520 --- /dev/null +++ b/ignite/templates/message/files/simapp/x/{{moduleName}}/simulation/{{msgName}}.go.plush @@ -0,0 +1,31 @@ +package simulation + +import ( + "math/rand" + + "github.com/cosmos/cosmos-sdk/baseapp" + sdk "github.com/cosmos/cosmos-sdk/types" + simtypes "github.com/cosmos/cosmos-sdk/types/simulation" + + "<%= ModulePath %>/x/<%= ModuleName %>/keeper" + "<%= ModulePath %>/x/<%= ModuleName %>/types" +) + +func SimulateMsg<%= MsgName.PascalCase %>( + ak types.AuthKeeper, + bk types.BankKeeper, + k keeper.Keeper, + txGen client.TxConfig, +) simtypes.Operation { + return func(r *rand.Rand, app *baseapp.BaseApp, ctx sdk.Context, accs []simtypes.Account, chainID string, + ) (simtypes.OperationMsg, []simtypes.FutureOperation, error) { + simAccount, _ := simtypes.RandomAcc(r, accs) + msg := &types.Msg<%= MsgName.PascalCase %>{ + <%= MsgSigner.UpperCamel %>: simAccount.Address.String(), + } + + // TODO: Handle the <%= MsgName.PascalCase %> simulation + + return simtypes.NoOpMsg(types.ModuleName, sdk.MsgTypeURL(msg), "<%= MsgName.PascalCase %> simulation not implemented"), nil, nil + } +} diff --git a/ignite/templates/message/message.go b/ignite/templates/message/message.go new file mode 100644 index 0000000..0e467b4 --- /dev/null +++ b/ignite/templates/message/message.go @@ -0,0 +1,255 @@ +package message + +import ( + "embed" + "fmt" + "io/fs" + "path/filepath" + + "github.com/emicklei/proto" + "github.com/gobuffalo/genny/v2" + "github.com/gobuffalo/plush/v4" + + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/protoanalysis/protoutil" + "github.com/ignite/cli/v29/ignite/pkg/xast" + "github.com/ignite/cli/v29/ignite/pkg/xgenny" + "github.com/ignite/cli/v29/ignite/templates/field/plushhelpers" + "github.com/ignite/cli/v29/ignite/templates/typed" +) + +var ( + //go:embed files/message/* files/message/**/* + fsMessage embed.FS + + //go:embed files/simapp/* files/simapp/**/* + fsSimapp embed.FS +) + +func Box(box fs.FS, opts *Options, g *genny.Generator) error { + if err := g.OnlyFS(box, nil, nil); err != nil { + return err + } + ctx := plush.NewContext() + ctx.Set("ModuleName", opts.ModuleName) + ctx.Set("ProtoVer", opts.ProtoVer) + ctx.Set("AppName", opts.AppName) + ctx.Set("MsgName", opts.MsgName) + ctx.Set("MsgDesc", opts.MsgDesc) + ctx.Set("MsgSigner", opts.MsgSigner) + ctx.Set("ModulePath", opts.ModulePath) + ctx.Set("Fields", opts.Fields) + ctx.Set("ResFields", opts.ResFields) + + plushhelpers.ExtendPlushContext(ctx) + g.Transformer(xgenny.Transformer(ctx)) + g.Transformer(genny.Replace("{{protoDir}}", opts.ProtoDir)) + g.Transformer(genny.Replace("{{appName}}", opts.AppName)) + g.Transformer(genny.Replace("{{moduleName}}", opts.ModuleName)) + g.Transformer(genny.Replace("{{protoVer}}", opts.ProtoVer)) + g.Transformer(genny.Replace("{{msgName}}", opts.MsgName.Snake)) + + return nil +} + +// NewGenerator returns the generator to scaffold a empty message in a module. +func NewGenerator(opts *Options) (*genny.Generator, error) { + g := genny.New() + + g.RunFn(protoTxRPCModify(opts)) + g.RunFn(protoTxMessageModify(opts)) + g.RunFn(typesCodecModify(opts)) + g.RunFn(clientCliTxModify(opts)) + + subMessage, err := fs.Sub(fsMessage, "files/message") + if err != nil { + return nil, errors.Errorf("fail to generate sub: %w", err) + } + + if !opts.NoSimulation { + g.RunFn(moduleSimulationModify(opts)) + subSimapp, err := fs.Sub(fsSimapp, "files/simapp") + if err != nil { + return nil, errors.Errorf("fail to generate sub: %w", err) + } + if err := Box(subSimapp, opts, g); err != nil { + return nil, err + } + } + return g, Box(subMessage, opts, g) +} + +// protoTxRPCModify modifies the tx.proto file to add the required RPCs and messages. +// +// What it expects: +// - A service named "Msg" to exist in the proto file, it appends the RPCs inside it. +func protoTxRPCModify(opts *Options) genny.RunFn { + return func(r *genny.Runner) error { + path := opts.ProtoFile("tx.proto") + f, err := r.Disk.Find(path) + if err != nil { + return err + } + protoFile, err := protoutil.ParseProtoFile(f) + if err != nil { + return err + } + // = Add new rpc to Msg. + serviceMsg, err := protoutil.GetServiceByName(protoFile, "Msg") + if err != nil { + return errors.Errorf("failed while looking up service 'Msg' in %s: %w", path, err) + } + typenamePascal := opts.MsgName.PascalCase + protoutil.Append( + serviceMsg, + protoutil.NewRPC( + typenamePascal, + fmt.Sprintf("Msg%s", typenamePascal), + fmt.Sprintf("Msg%sResponse", typenamePascal), + ), + ) + + newFile := genny.NewFileS(path, protoutil.Print(protoFile)) + return r.File(newFile) + } +} + +func protoTxMessageModify(opts *Options) genny.RunFn { + return func(r *genny.Runner) error { + path := opts.ProtoFile("tx.proto") + f, err := r.Disk.Find(path) + if err != nil { + return err + } + protoFile, err := protoutil.ParseProtoFile(f) + if err != nil { + return err + } + // Prepare the fields and create the messages. + creator := protoutil.NewField(opts.MsgSigner.Snake, "string", 1) + creator.Options = append(creator.Options, protoutil.NewOption("cosmos_proto.scalar", "cosmos.AddressString", protoutil.Custom())) // set the scalar annotation + creatorOpt := protoutil.NewOption(typed.MsgSignerOption, opts.MsgSigner.Snake) + msgFields := []*proto.NormalField{creator} + for i, field := range opts.Fields { + msgFields = append(msgFields, field.ToProtoField(i+2)) + } + var resFields []*proto.NormalField + for i, field := range opts.ResFields { + resFields = append(resFields, field.ToProtoField(i+1)) + } + + typenamePascal := opts.MsgName.PascalCase + msg := protoutil.NewMessage( + "Msg"+typenamePascal, + protoutil.WithFields(msgFields...), + protoutil.WithMessageOptions(creatorOpt), + ) + msgResp := protoutil.NewMessage("Msg"+typenamePascal+"Response", protoutil.WithFields(resFields...)) + protoutil.Append(protoFile, msg, msgResp) + + // Ensure custom types are imported + var protoImports []*proto.Import + for _, imp := range append(opts.ResFields.ProtoImports(), opts.Fields.ProtoImports()...) { + protoImports = append(protoImports, protoutil.NewImport(imp)) + } + for _, f := range append(opts.ResFields.Custom(), opts.Fields.Custom()...) { + protoPath := fmt.Sprintf("%[1]v/%[2]v/%[3]v/%[4]v.proto", opts.AppName, opts.ModuleName, opts.ProtoVer, f) + protoImports = append(protoImports, protoutil.NewImport(protoPath)) + } + if err = protoutil.AddImports(protoFile, true, protoImports...); err != nil { + return errors.Errorf("failed to add imports to %s: %w", path, err) + } + + newFile := genny.NewFileS(path, protoutil.Print(protoFile)) + return r.File(newFile) + } +} + +func typesCodecModify(opts *Options) genny.RunFn { + return func(r *genny.Runner) error { + path := filepath.Join("x", opts.ModuleName, "types/codec.go") + f, err := r.Disk.Find(path) + if err != nil { + return err + } + + // Import + content, err := xast.AppendImports(f.String(), xast.WithNamedImport("sdk", "github.com/cosmos/cosmos-sdk/types")) + if err != nil { + return err + } + + templateRegisterImplementations := `registrar.RegisterImplementations((*sdk.Msg)(nil), + &Msg%[1]v{}, +)` + replacementRegisterImplementations := fmt.Sprintf( + templateRegisterImplementations, + opts.MsgName.PascalCase, + ) + + content, err = xast.ModifyFunction( + content, + "RegisterInterfaces", + xast.AppendFuncAtLine(replacementRegisterImplementations, 0), + ) + if err != nil { + return err + } + + newFile := genny.NewFileS(path, content) + return r.File(newFile) + } +} + +func clientCliTxModify(opts *Options) genny.RunFn { + return func(r *genny.Runner) error { + path := filepath.Join("x", opts.ModuleName, "module/autocli.go") + f, err := r.Disk.Find(path) + if err != nil { + return err + } + + option := fmt.Sprintf( + `{ + RpcMethod: "%[1]v", + Use: "%[2]v", + Short: "Send a %[3]v tx", + PositionalArgs: []*autocliv1.PositionalArgDescriptor{%[4]s}, + }`, + opts.MsgName.PascalCase, + fmt.Sprintf("%s %s", opts.MsgName.Kebab, opts.Fields.CLIUsage()), + opts.MsgName.Original, + opts.Fields.ProtoFieldNameAutoCLI(), + ) + content, err := typed.AppendAutoCLITxOptions(f.String(), option) + if err != nil { + return err + } + newFile := genny.NewFileS(path, content) + return r.File(newFile) + } +} + +func moduleSimulationModify(opts *Options) genny.RunFn { + return func(r *genny.Runner) error { + path := filepath.Join("x", opts.ModuleName, "module/simulation.go") + f, err := r.Disk.Find(path) + if err != nil { + return err + } + + content, err := typed.ModuleSimulationMsgModify( + f.String(), + opts.ModulePath, + opts.ModuleName, + opts.MsgName, + opts.MsgSigner, + ) + if err != nil { + return err + } + + newFile := genny.NewFileS(path, content) + return r.File(newFile) + } +} diff --git a/ignite/templates/message/options.go b/ignite/templates/message/options.go new file mode 100644 index 0000000..418b398 --- /dev/null +++ b/ignite/templates/message/options.go @@ -0,0 +1,28 @@ +package message + +import ( + "path/filepath" + + "github.com/ignite/cli/v29/ignite/pkg/multiformatname" + "github.com/ignite/cli/v29/ignite/templates/field" +) + +// Options ... +type Options struct { + AppName string + ProtoDir string + ProtoVer string + ModuleName string + ModulePath string + MsgName multiformatname.Name + MsgSigner multiformatname.Name + MsgDesc string + Fields field.Fields + ResFields field.Fields + NoSimulation bool +} + +// ProtoFile returns the path to the proto folder. +func (opts *Options) ProtoFile(fname string) string { + return filepath.Join(opts.ProtoDir, opts.AppName, opts.ModuleName, opts.ProtoVer, fname) +} diff --git a/ignite/templates/module/const.go b/ignite/templates/module/const.go new file mode 100644 index 0000000..cdc7105 --- /dev/null +++ b/ignite/templates/module/const.go @@ -0,0 +1,8 @@ +package module + +const ( + PathAppModule = "app" + PathAppGo = "app/app.go" + PathIBCConfigGo = "app/ibc.go" + PathAppConfigGo = "app/app_config.go" +) diff --git a/ignite/templates/module/create/app_config_ast.go b/ignite/templates/module/create/app_config_ast.go new file mode 100644 index 0000000..0d01801 --- /dev/null +++ b/ignite/templates/module/create/app_config_ast.go @@ -0,0 +1,445 @@ +package modulecreate + +import ( + "bytes" + "fmt" + "go/ast" + "go/format" + "go/parser" + "go/token" + "strings" + + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +type addModuleAppConfigOptions struct { + skipConfig bool + runtimeFields []string +} + +type AddModuleAppConfigOption func(*addModuleAppConfigOptions) + +func SkipConfigEntry() AddModuleAppConfigOption { + return func(opts *addModuleAppConfigOptions) { + opts.skipConfig = true + } +} + +// SpecifyModuleEntry allows to define to which field the module should be added in the app config. +// E.g. "PreBlockers", "InitGenesis", "BeginBlockers", "EndBlockers". +func SpecifyModuleEntry(fields ...string) AddModuleAppConfigOption { + return func(opts *addModuleAppConfigOptions) { + opts.runtimeFields = fields + } +} + +// AddModuleToAppConfig appends a given module to the chain app config. +func AddModuleToAppConfig(content, moduleName string, opts ...AddModuleAppConfigOption) (string, error) { + options := addModuleAppConfigOptions{} + for _, opt := range opts { + opt(&options) + } + return AddModuleToAppConfigWithOptions(content, moduleName, options) +} + +// AddModuleToAppConfigWithOptions appends a given module to the chain app config with options. +func AddModuleToAppConfigWithOptions(content, moduleName string, opts addModuleAppConfigOptions) (string, error) { + fileSet := token.NewFileSet() + file, err := parser.ParseFile(fileSet, "", content, parser.ParseComments) + if err != nil { + return "", err + } + commentMap := ast.NewCommentMap(fileSet, file, file.Comments) + + appConfigLit, err := findAppConfigCompositeLiteral(file) + if err != nil { + return "", err + } + + modulesField, err := findKeyValueByName(appConfigLit, "Modules") + if err != nil { + return "", err + } + + runtimeModuleLit, err := findRuntimeModuleCompositeLiteral(file, modulesField.Value, fileSet) + if err != nil { + return "", err + } + + fields := opts.runtimeFields + if len(fields) == 0 { + fields = []string{"InitGenesis", "BeginBlockers", "EndBlockers"} + } + + for _, fieldName := range fields { + if err := appendModuleNameToRuntimeField(file, runtimeModuleLit, fieldName, moduleName, fileSet); err != nil { + return "", err + } + } + + if !opts.skipConfig { + if err := appendModuleConfigEntry(file, modulesField.Value, moduleName, fileSet); err != nil { + return "", err + } + } + + file.Comments = commentMap.Filter(file).Comments() + + var buf bytes.Buffer + if err := format.Node(&buf, fileSet, file); err != nil { + return "", err + } + + formatted, err := format.Source(buf.Bytes()) + if err != nil { + return "", err + } + + return string(formatted), nil +} + +func findAppConfigCompositeLiteral(file *ast.File) (*ast.CompositeLit, error) { + for _, decl := range file.Decls { + genDecl, ok := decl.(*ast.GenDecl) + if !ok || genDecl.Tok != token.VAR { + continue + } + + for _, spec := range genDecl.Specs { + valueSpec, ok := spec.(*ast.ValueSpec) + if !ok { + continue + } + + for i, name := range valueSpec.Names { + if name.Name != "appConfig" && name.Name != "AppConfig" { + continue + } + if len(valueSpec.Values) == 0 { + return nil, errors.Errorf("%s has no value", name.Name) + } + + valueIdx := i + if valueIdx >= len(valueSpec.Values) { + valueIdx = 0 + } + + return findCompositeLiteralByType(file, valueSpec.Values[valueIdx], "appv1alpha1", "Config") + } + } + } + + return nil, errors.New("app config variable not found") +} + +func findRuntimeModuleCompositeLiteral( + file *ast.File, + modulesExpr ast.Expr, + fileSet *token.FileSet, +) (*ast.CompositeLit, error) { + modulesLit, err := resolveCompositeLiteral(file, modulesExpr) + if err != nil { + return nil, errors.Errorf("resolve modules list: %w", err) + } + + for _, elt := range modulesLit.Elts { + moduleConfigLit, err := resolveCompositeLiteral(file, elt) + if err != nil { + continue + } + + nameField, err := findKeyValueByName(moduleConfigLit, "Name") + if err != nil { + continue + } + + nameValue, err := exprString(fileSet, nameField.Value) + if err != nil { + return nil, err + } + if nameValue != "runtime.ModuleName" { + continue + } + + configField, err := findKeyValueByName(moduleConfigLit, "Config") + if err != nil { + return nil, errors.Errorf("runtime module config field not found: %w", err) + } + + return findCompositeLiteralByType(file, configField.Value, "runtimev1alpha1", "Module") + } + + return nil, errors.New("runtime module not found in app config") +} + +func appendModuleNameToRuntimeField( + file *ast.File, + runtimeModuleLit *ast.CompositeLit, + fieldName, moduleName string, + fileSet *token.FileSet, +) error { + field, err := findKeyValueByName(runtimeModuleLit, fieldName) + if err != nil { + return errors.Errorf("%s field not found in runtime module: %w", fieldName, err) + } + + listLit, err := resolveCompositeLiteral(file, field.Value) + if err != nil { + return errors.Errorf("resolve %s list: %w", fieldName, err) + } + + moduleExprText := fmt.Sprintf("%smoduletypes.ModuleName", moduleName) + normalizedModuleExpr := normalizedExpr(moduleExprText) + + for _, elt := range listLit.Elts { + existing, err := exprString(fileSet, elt) + if err != nil { + return err + } + if normalizedExpr(existing) == normalizedModuleExpr { + return nil + } + } + + appendCompositeLiteralElement(fileSet, listLit, moduleExprText) + return nil +} + +func appendModuleConfigEntry( + file *ast.File, + modulesExpr ast.Expr, + moduleName string, + fileSet *token.FileSet, +) error { + modulesLit, err := resolveCompositeLiteral(file, modulesExpr) + if err != nil { + return errors.Errorf("resolve modules list: %w", err) + } + + moduleNameText := fmt.Sprintf("%smoduletypes.ModuleName", moduleName) + moduleNamePattern := normalizedExpr(fmt.Sprintf("Name:%s", moduleNameText)) + + for _, elt := range modulesLit.Elts { + existingExpr, err := exprString(fileSet, elt) + if err == nil && strings.Contains(normalizedExpr(existingExpr), moduleNamePattern) { + return nil + } + + moduleConfigLit, err := resolveCompositeLiteral(file, elt) + if err != nil { + continue + } + + nameField, err := findKeyValueByName(moduleConfigLit, "Name") + if err != nil { + continue + } + + existingName, err := exprString(fileSet, nameField.Value) + if err != nil { + return err + } + if existingName == moduleNameText { + return nil + } + } + + newEntry := ast.NewIdent(fmt.Sprintf( + `{ + Name: %smoduletypes.ModuleName, + Config: appconfig.WrapAny(&%smoduletypes.Module{}), +}`, + moduleName, + moduleName, + )) + + appendCompositeLiteralElement(fileSet, modulesLit, newEntry.Name) + return nil +} + +func findCompositeLiteralByType( + file *ast.File, + expr ast.Expr, + pkgName, typeName string, +) (*ast.CompositeLit, error) { + lit := findCompositeLiteralByTypeExpr(file, expr, pkgName, typeName, map[string]struct{}{}) + if lit == nil { + return nil, errors.Errorf("composite literal %s.%s not found", pkgName, typeName) + } + return lit, nil +} + +func findCompositeLiteralByTypeExpr( + file *ast.File, + expr ast.Expr, + pkgName, typeName string, + visited map[string]struct{}, +) *ast.CompositeLit { + switch typedExpr := expr.(type) { + case *ast.ParenExpr: + return findCompositeLiteralByTypeExpr(file, typedExpr.X, pkgName, typeName, visited) + case *ast.UnaryExpr: + return findCompositeLiteralByTypeExpr(file, typedExpr.X, pkgName, typeName, visited) + case *ast.CallExpr: + for _, arg := range typedExpr.Args { + if lit := findCompositeLiteralByTypeExpr(file, arg, pkgName, typeName, visited); lit != nil { + return lit + } + } + case *ast.CompositeLit: + if isSelectorType(typedExpr.Type, pkgName, typeName) { + return typedExpr + } + for _, elt := range typedExpr.Elts { + keyValue, ok := elt.(*ast.KeyValueExpr) + if !ok { + continue + } + if lit := findCompositeLiteralByTypeExpr(file, keyValue.Value, pkgName, typeName, visited); lit != nil { + return lit + } + } + case *ast.Ident: + if _, ok := visited[typedExpr.Name]; ok { + return nil + } + visited[typedExpr.Name] = struct{}{} + + valueExpr, err := findGlobalValueExpr(file, typedExpr.Name) + if err != nil { + return nil + } + + return findCompositeLiteralByTypeExpr(file, valueExpr, pkgName, typeName, visited) + } + + return nil +} + +func resolveCompositeLiteral(file *ast.File, expr ast.Expr) (*ast.CompositeLit, error) { + switch typedExpr := expr.(type) { + case *ast.CompositeLit: + return typedExpr, nil + case *ast.ParenExpr: + return resolveCompositeLiteral(file, typedExpr.X) + case *ast.UnaryExpr: + return resolveCompositeLiteral(file, typedExpr.X) + case *ast.Ident: + valueExpr, err := findGlobalValueExpr(file, typedExpr.Name) + if err != nil { + return nil, err + } + return resolveCompositeLiteral(file, valueExpr) + default: + return nil, errors.Errorf("unsupported composite literal expression %T", expr) + } +} + +func findGlobalValueExpr(file *ast.File, name string) (ast.Expr, error) { + for _, decl := range file.Decls { + genDecl, ok := decl.(*ast.GenDecl) + if !ok || genDecl.Tok != token.VAR { + continue + } + + for _, spec := range genDecl.Specs { + valueSpec, ok := spec.(*ast.ValueSpec) + if !ok { + continue + } + + for i, varName := range valueSpec.Names { + if varName.Name != name { + continue + } + + if len(valueSpec.Values) == 0 { + return nil, errors.Errorf("global variable %q has no value", name) + } + + valueIdx := i + if valueIdx >= len(valueSpec.Values) { + valueIdx = 0 + } + + return valueSpec.Values[valueIdx], nil + } + } + } + + return nil, errors.Errorf("global variable %q not found", name) +} + +func findKeyValueByName(compLit *ast.CompositeLit, name string) (*ast.KeyValueExpr, error) { + for _, elt := range compLit.Elts { + keyValue, ok := elt.(*ast.KeyValueExpr) + if !ok { + continue + } + + key, ok := keyValue.Key.(*ast.Ident) + if ok && key.Name == name { + return keyValue, nil + } + } + + return nil, errors.Errorf("field %q not found", name) +} + +func isSelectorType(expr ast.Expr, pkgName, typeName string) bool { + selector, ok := expr.(*ast.SelectorExpr) + if !ok { + return false + } + + pkgIdent, ok := selector.X.(*ast.Ident) + if !ok { + return false + } + + return pkgIdent.Name == pkgName && selector.Sel.Name == typeName +} + +func exprString(fileSet *token.FileSet, expr ast.Expr) (string, error) { + var buf bytes.Buffer + if err := format.Node(&buf, fileSet, expr); err != nil { + return "", err + } + + return buf.String(), nil +} + +func normalizedExpr(expr string) string { + expr = strings.ReplaceAll(expr, " ", "") + expr = strings.ReplaceAll(expr, "\n", "") + expr = strings.ReplaceAll(expr, "\t", "") + return expr +} + +func appendCompositeLiteralElement(fileSet *token.FileSet, compLit *ast.CompositeLit, code string) { + file := fileSet.File(compLit.Pos()) + maxOffset := file.Offset(compLit.Rbrace) + for _, elt := range compLit.Elts { + if pos := elt.End(); pos.IsValid() { + offset := file.Offset(pos) + if offset > maxOffset { + maxOffset = offset + } + } + } + + insertPos := file.Pos(maxOffset) + value := ast.NewIdent(code) + value.NamePos = insertPos + + compLit.Elts = append(compLit.Elts, value) + compLit.Rbrace += token.Pos(1) + + if len(compLit.Elts) > 0 { + last := compLit.Elts[len(compLit.Elts)-1] + if file.Line(compLit.Rbrace) == file.Line(last.End())-1 { + file.AddLine(file.Offset(compLit.Rbrace)) + compLit.Rbrace += token.Pos(1) + } + } +} diff --git a/ignite/templates/module/create/app_config_ast_test.go b/ignite/templates/module/create/app_config_ast_test.go new file mode 100644 index 0000000..3306b9c --- /dev/null +++ b/ignite/templates/module/create/app_config_ast_test.go @@ -0,0 +1,40 @@ +package modulecreate + +import ( + "strings" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestAddModuleToAppConfig(t *testing.T) { + content := readFixture(t, "../../app/files/app/app_config.go.plush") + + modified, err := AddModuleToAppConfig(content, "blog") + require.NoError(t, err) + normalized := normalizedExpr(modified) + require.Equal(t, 4, strings.Count(normalized, "blogmoduletypes.ModuleName")) + require.Contains(t, normalized, "Config:appconfig.WrapAny(&blogmoduletypes.Module{}),") + + modified, err = AddModuleToAppConfig(modified, "blog") + require.NoError(t, err) + require.Equal(t, 4, strings.Count(normalizedExpr(modified), "blogmoduletypes.ModuleName")) +} + +func TestAddModuleToLegacyAppConfig(t *testing.T) { + content := readFixture(t, "../../../pkg/cosmosanalysis/module/testdata/earth/app/app_config.go") + + modified, err := AddModuleToAppConfig(content, "venus") + require.NoError(t, err) + require.Equal(t, 4, strings.Count(normalizedExpr(modified), "venusmoduletypes.ModuleName")) +} + +func TestAddModuleToAppConfigWithSkipConfig(t *testing.T) { + content := readFixture(t, "../../app/files/app/app_config.go.plush") + + modified, err := AddModuleToAppConfig(content, "blog", SkipConfigEntry()) + require.NoError(t, err) + normalized := normalizedExpr(modified) + require.Equal(t, 3, strings.Count(normalized, "blogmoduletypes.ModuleName")) + require.NotContains(t, normalized, "Config:appconfig.WrapAny(&blogmoduletypes.Module{}),") +} diff --git a/ignite/templates/module/create/base.go b/ignite/templates/module/create/base.go new file mode 100644 index 0000000..576ba7d --- /dev/null +++ b/ignite/templates/module/create/base.go @@ -0,0 +1,173 @@ +package modulecreate + +import ( + "fmt" + "io/fs" + + "github.com/gobuffalo/genny/v2" + "github.com/gobuffalo/plush/v4" + "github.com/iancoleman/strcase" + + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/gomodulepath" + "github.com/ignite/cli/v29/ignite/pkg/xast" + "github.com/ignite/cli/v29/ignite/pkg/xgenny" + "github.com/ignite/cli/v29/ignite/pkg/xstrings" + "github.com/ignite/cli/v29/ignite/templates/field/plushhelpers" + "github.com/ignite/cli/v29/ignite/templates/module" +) + +// NewGenerator returns the generator to scaffold a module inside an app. +func NewGenerator(opts *CreateOptions) (*genny.Generator, error) { + subBase, err := fs.Sub(fsBase, "files/base") + if err != nil { + return nil, errors.Errorf("fail to generate sub: %w", err) + } + + g := genny.New() + if err := g.OnlyFS(subBase, nil, nil); err != nil { + return g, err + } + + appModulePath := gomodulepath.ExtractAppPath(opts.ModulePath) + + ctx := plush.NewContext() + ctx.Set("moduleName", opts.ModuleName) + ctx.Set("modulePath", opts.ModulePath) + ctx.Set("appName", opts.AppName) + ctx.Set("protoVer", opts.ProtoVer) + ctx.Set("dependencies", opts.Dependencies) + ctx.Set("params", opts.Params) + ctx.Set("configs", opts.Configs) + ctx.Set("isIBC", opts.IsIBC) + ctx.Set("apiPath", fmt.Sprintf("/%s/%s/%s", appModulePath, opts.ModuleName, opts.ProtoVer)) + ctx.Set("protoPkgName", module.ProtoPackageName(appModulePath, opts.ModuleName, opts.ProtoVer)) + ctx.Set("protoModulePkgName", module.ProtoModulePackageName(appModulePath, opts.ModuleName, opts.ProtoVer)) + ctx.Set("toVariableName", strcase.ToLowerCamel) + + plushhelpers.ExtendPlushContext(ctx) + g.Transformer(xgenny.Transformer(ctx)) + g.Transformer(genny.Replace("{{protoDir}}", opts.ProtoDir)) + g.Transformer(genny.Replace("{{appName}}", opts.AppName)) + g.Transformer(genny.Replace("{{moduleName}}", opts.ModuleName)) + g.Transformer(genny.Replace("{{protoVer}}", opts.ProtoVer)) + + return g, nil +} + +// NewAppModify returns generator with modifications required to register a module in the app. +func NewAppModify(opts *CreateOptions) *genny.Generator { + g := genny.New() + g.RunFn(appModify(opts)) + g.RunFn(appConfigModify(opts)) + if opts.IsIBC { + g.RunFn(appIBCModify(opts)) + } + return g +} + +func appConfigModify(opts *CreateOptions) genny.RunFn { + return func(r *genny.Runner) error { + configPath := module.PathAppConfigGo + fConfig, err := r.Disk.Find(configPath) + if err != nil { + return err + } + + // Import + content, err := xast.AppendImports( + fConfig.String(), + xast.WithNamedImport( + "_", + fmt.Sprintf("%[1]v/x/%[2]v/module", opts.ModulePath, opts.ModuleName), + ), + xast.WithNamedImport( + fmt.Sprintf("%[1]vmoduletypes", opts.ModuleName), + fmt.Sprintf("%[1]v/x/%[2]v/types", opts.ModulePath, opts.ModuleName), + ), + ) + if err != nil { + return err + } + + content, err = AddModuleToAppConfig(content, opts.ModuleName) + if err != nil { + return err + } + + // Module dependencies + for _, dep := range opts.Dependencies { + // If bank is a dependency, add account permissions to the module + if dep.Name == "Bank" { + replacement := fmt.Sprintf( + "{Account: %[1]vmoduletypes.ModuleName, Permissions: []string{authtypes.Minter, authtypes.Burner, authtypes.Staking}}", + opts.ModuleName, + ) + + // Keeper definition + content, err = xast.ModifyGlobalArrayVar(content, "moduleAccPerms", xast.AppendGlobalArrayValue(replacement)) + if err != nil { + return err + } + + } + } + + newFile := genny.NewFileS(configPath, content) + + return r.File(newFile) + } +} + +// app.go modification when creating a module. +func appModify(opts *CreateOptions) genny.RunFn { + return func(r *genny.Runner) error { + appPath := module.PathAppGo + f, err := r.Disk.Find(appPath) + if err != nil { + return err + } + + // Import + content, err := xast.AppendImports( + f.String(), + xast.WithNamedImport( + fmt.Sprintf("%[1]vmodulekeeper", opts.ModuleName), + fmt.Sprintf("%[1]v/x/%[2]v/keeper", opts.ModulePath, opts.ModuleName), + ), + ) + if err != nil { + return err + } + + // Keeper declaration + content, err = xast.ModifyStruct( + content, + "App", + xast.AppendStructValue( + fmt.Sprintf("%[1]vKeeper", xstrings.Title(opts.ModuleName)), + fmt.Sprintf("%[1]vmodulekeeper.Keeper", opts.ModuleName), + ), + ) + if err != nil { + return err + } + + // Keeper definition + content, err = xast.ModifyFunction( + content, + "New", + xast.AppendInsideFuncCall( + "Inject", + fmt.Sprintf("\n&app.%[1]vKeeper", xstrings.Title(opts.ModuleName)), + -1, + ), + ) + if err != nil { + return err + } + + newFile := genny.NewFileS(appPath, content) + return r.File(newFile) + } +} diff --git a/ignite/templates/module/create/configs.go b/ignite/templates/module/create/configs.go new file mode 100644 index 0000000..7d66b88 --- /dev/null +++ b/ignite/templates/module/create/configs.go @@ -0,0 +1,54 @@ +package modulecreate + +import ( + "path/filepath" + + "github.com/gobuffalo/genny/v2" + + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/protoanalysis/protoutil" +) + +// NewModuleConfigs returns the generator to scaffold a new configs inside a module. +func NewModuleConfigs(opts ConfigsOptions) (*genny.Generator, error) { + g := genny.New() + g.RunFn(configsProtoModify(opts)) + return g, nil +} + +func configsProtoModify(opts ConfigsOptions) genny.RunFn { + return func(r *genny.Runner) error { + // here we do not use opts.ProtoFile as it will append an extra opts.ProtoVer in the path + path := filepath.Join(opts.ProtoDir, opts.AppName, opts.ModuleName, "module", opts.ProtoVer, "module.proto") + f, err := r.Disk.Find(path) + if err != nil { + return err + } + protoFile, err := protoutil.ParseProtoFile(f) + if err != nil { + return err + } + + params, err := protoutil.GetMessageByName(protoFile, "Module") + if err != nil { + return errors.Errorf("couldn't find message 'Module' in %s: %w", path, err) + } + + for _, paramField := range opts.Configs { + _, err := protoutil.GetFieldByName(params, paramField.ProtoFieldName()) + if err == nil { + return errors.Errorf("duplicate field %s in %s", paramField.ProtoFieldName(), params.Name) + } + + param := protoutil.NewField( + paramField.ProtoFieldName(), + paramField.DataType(), + protoutil.NextUniqueID(params), + ) + protoutil.Append(params, param) + } + + newFile := genny.NewFileS(path, protoutil.Print(protoFile)) + return r.File(newFile) + } +} diff --git a/ignite/templates/module/create/files/base/x/{{moduleName}}/keeper/genesis.go.plush b/ignite/templates/module/create/files/base/x/{{moduleName}}/keeper/genesis.go.plush new file mode 100644 index 0000000..cf9db47 --- /dev/null +++ b/ignite/templates/module/create/files/base/x/{{moduleName}}/keeper/genesis.go.plush @@ -0,0 +1,26 @@ +package keeper + +import ( + "context" + + "<%= modulePath %>/x/<%= moduleName %>/types" +) + +// InitGenesis initializes the module's state from a provided genesis state. +func (k Keeper) InitGenesis(ctx context.Context, genState types.GenesisState) error { + return k.Params.Set(ctx, genState.Params) +} + +// ExportGenesis returns the module's exported genesis. +func (k Keeper) ExportGenesis(ctx context.Context) (*types.GenesisState, error) { + var err error + + genesis := types.DefaultGenesis() + genesis.Params, err = k.Params.Get(ctx) + if err != nil { + return nil, err + } + + + return genesis, nil +} diff --git a/ignite/templates/module/create/files/base/x/{{moduleName}}/keeper/genesis_test.go.plush b/ignite/templates/module/create/files/base/x/{{moduleName}}/keeper/genesis_test.go.plush new file mode 100644 index 0000000..a85d758 --- /dev/null +++ b/ignite/templates/module/create/files/base/x/{{moduleName}}/keeper/genesis_test.go.plush @@ -0,0 +1,26 @@ +package keeper_test + +import ( + "testing" + + "<%= modulePath %>/x/<%= moduleName %>/types" + + "github.com/stretchr/testify/require" +) + +func TestGenesis(t *testing.T) { + genesisState := types.GenesisState{ + Params: types.DefaultParams(), + <%= if (isIBC) { %>PortId: types.PortID,<% } %> + } + + f := initFixture(t) + err := f.keeper.InitGenesis(f.ctx, genesisState) + require.NoError(t, err) + got, err := f.keeper.ExportGenesis(f.ctx) + require.NoError(t, err) + require.NotNil(t, got) + + <%= if (isIBC) { %>require.Equal(t, genesisState.PortId, got.PortId)<% } %> + require.EqualExportedValues(t, genesisState.Params, got.Params) +} \ No newline at end of file diff --git a/ignite/templates/module/create/files/base/x/{{moduleName}}/keeper/keeper.go.plush b/ignite/templates/module/create/files/base/x/{{moduleName}}/keeper/keeper.go.plush new file mode 100644 index 0000000..5c76ccf --- /dev/null +++ b/ignite/templates/module/create/files/base/x/{{moduleName}}/keeper/keeper.go.plush @@ -0,0 +1,72 @@ +package keeper + +import ( + "fmt" + + "cosmossdk.io/collections" + "cosmossdk.io/core/address" + corestore "cosmossdk.io/core/store" + "github.com/cosmos/cosmos-sdk/codec" + <%= if (isIBC) { %> ibckeeper "github.com/cosmos/ibc-go/v10/modules/core/keeper" <% } %> + + "<%= modulePath %>/x/<%= moduleName %>/types" +) + +type Keeper struct { + storeService corestore.KVStoreService + cdc codec.Codec + addressCodec address.Codec + // Address capable of executing a MsgUpdateParams message. + // Typically, this should be the x/gov module account. + authority []byte + + Schema collections.Schema + Params collections.Item[types.Params] + <%= if (isIBC) { %> + Port collections.Item[string] + + ibcKeeperFn func() *ibckeeper.Keeper <% } %> + <%= for (dependency) in dependencies { %> + <%= toVariableName(dependency.KeeperName()) %> types.<%= dependency.KeeperName() %><% } %> +} + +func NewKeeper( + storeService corestore.KVStoreService, + cdc codec.Codec, + addressCodec address.Codec, + authority []byte,<%= if (isIBC) { %> + ibcKeeperFn func() *ibckeeper.Keeper,<% } %> + <%= for (dependency) in dependencies { %> + <%= toVariableName(dependency.KeeperName()) %> types.<%= dependency.KeeperName() %>,<% } %> +) Keeper { + if _, err := addressCodec.BytesToString(authority); err != nil { + panic(fmt.Sprintf("invalid authority address %s: %s", authority, err)) + } + + sb := collections.NewSchemaBuilder(storeService) + + k := Keeper{ + storeService: storeService, + cdc: cdc, + addressCodec: addressCodec, + authority: authority, + <%= for (dependency) in dependencies { %> + <%= toVariableName(dependency.KeeperName()) %>: <%= toVariableName(dependency.KeeperName()) %>,<% } %><%= if (isIBC) { %> + ibcKeeperFn: ibcKeeperFn, + Port: collections.NewItem(sb, types.PortKey, "port", collections.StringValue),<% } %> + Params: collections.NewItem(sb, types.ParamsKey, "params", codec.CollValue[types.Params](cdc)), + } + + schema, err := sb.Build() + if err != nil { + panic(err) + } + k.Schema = schema + + return k +} + +// GetAuthority returns the module's authority. +func (k Keeper) GetAuthority() []byte { + return k.authority +} diff --git a/ignite/templates/module/create/files/base/x/{{moduleName}}/keeper/keeper_test.go.plush b/ignite/templates/module/create/files/base/x/{{moduleName}}/keeper/keeper_test.go.plush new file mode 100644 index 0000000..ea1a377 --- /dev/null +++ b/ignite/templates/module/create/files/base/x/{{moduleName}}/keeper/keeper_test.go.plush @@ -0,0 +1,59 @@ +package keeper_test + +import ( + "context" + "testing" + + "cosmossdk.io/core/address" + "cosmossdk.io/log" + storetypes "cosmossdk.io/store/types" + addresscodec "github.com/cosmos/cosmos-sdk/codec/address" + codectestutil "github.com/cosmos/cosmos-sdk/codec/testutil" + "github.com/cosmos/cosmos-sdk/runtime" + "github.com/cosmos/cosmos-sdk/testutil" + sdk "github.com/cosmos/cosmos-sdk/types" + moduletestutil "github.com/cosmos/cosmos-sdk/types/module/testutil" + authtypes "github.com/cosmos/cosmos-sdk/x/auth/types" + + "<%= modulePath %>/x/<%= moduleName %>/keeper" + module "<%= modulePath %>/x/<%= moduleName %>/module" + "<%= modulePath %>/x/<%= moduleName %>/types" +) + +type fixture struct { + ctx context.Context + keeper keeper.Keeper + addressCodec address.Codec +} + +func initFixture(t *testing.T) *fixture { + t.Helper() + + encCfg := moduletestutil.MakeTestEncodingConfig(module.AppModule{}) + addressCodec := addresscodec.NewBech32Codec(sdk.GetConfig().GetBech32AccountAddrPrefix()) + storeKey := storetypes.NewKVStoreKey(types.StoreKey) + + storeService := runtime.NewKVStoreService(storeKey) + ctx := testutil.DefaultContextWithDB(t, storeKey, storetypes.NewTransientStoreKey("transient_test")).Ctx + + authority := authtypes.NewModuleAddress(types.GovModuleName) + + k := keeper.NewKeeper( + storeService, + encCfg.Codec, + addressCodec, + authority,<%= for (dependency) in dependencies { %> + nil,<% } %> + ) + + // Initialize params + if err := k.Params.Set(ctx, types.DefaultParams()); err != nil { + t.Fatalf("failed to set params: %v", err) + } + + return &fixture{ + ctx: ctx, + keeper: k, + addressCodec: addressCodec, + } +} \ No newline at end of file diff --git a/ignite/templates/module/create/files/base/x/{{moduleName}}/keeper/msg_server.go.plush b/ignite/templates/module/create/files/base/x/{{moduleName}}/keeper/msg_server.go.plush new file mode 100644 index 0000000..b0c2dc5 --- /dev/null +++ b/ignite/templates/module/create/files/base/x/{{moduleName}}/keeper/msg_server.go.plush @@ -0,0 +1,17 @@ +package keeper + +import ( + "<%= modulePath %>/x/<%= moduleName %>/types" +) + +type msgServer struct { + Keeper +} + +// NewMsgServerImpl returns an implementation of the MsgServer interface +// for the provided Keeper. +func NewMsgServerImpl(keeper Keeper) types.MsgServer { + return &msgServer{Keeper: keeper} +} + +var _ types.MsgServer = msgServer{} diff --git a/ignite/templates/module/create/files/base/x/{{moduleName}}/keeper/msg_update_params.go.plush b/ignite/templates/module/create/files/base/x/{{moduleName}}/keeper/msg_update_params.go.plush new file mode 100644 index 0000000..f93e8eb --- /dev/null +++ b/ignite/templates/module/create/files/base/x/{{moduleName}}/keeper/msg_update_params.go.plush @@ -0,0 +1,32 @@ +package keeper + +import ( + "bytes" + "context" + + errorsmod "cosmossdk.io/errors" + + "<%= modulePath %>/x/<%= moduleName %>/types" +) + +func (k msgServer) UpdateParams(ctx context.Context, req *types.MsgUpdateParams) (*types.MsgUpdateParamsResponse, error) { + authority, err := k.addressCodec.StringToBytes(req.Authority); + if err != nil { + return nil, errorsmod.Wrap(err, "invalid authority address") + } + + if !bytes.Equal(k.GetAuthority(), authority) { + expectedAuthorityStr, _ := k.addressCodec.BytesToString(k.GetAuthority()) + return nil, errorsmod.Wrapf(types.ErrInvalidSigner, "invalid authority; expected %s, got %s", expectedAuthorityStr, req.Authority) + } + + if err := req.Params.Validate(); err != nil { + return nil, err + } + + if err := k.Params.Set(ctx, req.Params); err != nil { + return nil, err + } + + return &types.MsgUpdateParamsResponse{}, nil +} diff --git a/ignite/templates/module/create/files/base/x/{{moduleName}}/keeper/msg_update_params_test.go.plush b/ignite/templates/module/create/files/base/x/{{moduleName}}/keeper/msg_update_params_test.go.plush new file mode 100644 index 0000000..e6c7639 --- /dev/null +++ b/ignite/templates/module/create/files/base/x/{{moduleName}}/keeper/msg_update_params_test.go.plush @@ -0,0 +1,69 @@ +package keeper_test + +import ( + "testing" + + sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/stretchr/testify/require" + + "<%= modulePath %>/x/<%= moduleName %>/keeper" + "<%= modulePath %>/x/<%= moduleName %>/types" +) + +func TestMsgUpdateParams(t *testing.T) { + f := initFixture(t) + ms := keeper.NewMsgServerImpl(f.keeper) + + params := types.DefaultParams() + require.NoError(t, f.keeper.Params.Set(f.ctx, params)) + + authorityStr, err := f.addressCodec.BytesToString(f.keeper.GetAuthority()) + require.NoError(t, err) + + // default params + testCases := []struct { + name string + input *types.MsgUpdateParams + expErr bool + expErrMsg string + }{ + { + name: "invalid authority", + input: &types.MsgUpdateParams{ + Authority: "invalid", + Params: params, + }, + expErr: true, + expErrMsg: "invalid authority", + }, + { + name: "send enabled param", + input: &types.MsgUpdateParams{ + Authority: authorityStr, + Params: types.Params{}, + }, + expErr: false, + }, + { + name: "all good", + input: &types.MsgUpdateParams{ + Authority: authorityStr, + Params: params, + }, + expErr: false, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + _, err := ms.UpdateParams(f.ctx, tc.input) + + if tc.expErr { + require.Error(t, err) + require.Contains(t, err.Error(), tc.expErrMsg) + } else { + require.NoError(t, err) + } + }) + } +} diff --git a/ignite/templates/module/create/files/base/x/{{moduleName}}/keeper/query.go.plush b/ignite/templates/module/create/files/base/x/{{moduleName}}/keeper/query.go.plush new file mode 100644 index 0000000..6ba6ecb --- /dev/null +++ b/ignite/templates/module/create/files/base/x/{{moduleName}}/keeper/query.go.plush @@ -0,0 +1,17 @@ +package keeper + +import ( + "<%= modulePath %>/x/<%= moduleName %>/types" +) + +var _ types.QueryServer = queryServer{} + +// NewQueryServerImpl returns an implementation of the QueryServer interface +// for the provided Keeper. +func NewQueryServerImpl(k Keeper) types.QueryServer { + return queryServer{k} +} + +type queryServer struct { + k Keeper +} \ No newline at end of file diff --git a/ignite/templates/module/create/files/base/x/{{moduleName}}/keeper/query_params.go.plush b/ignite/templates/module/create/files/base/x/{{moduleName}}/keeper/query_params.go.plush new file mode 100644 index 0000000..f3b31a1 --- /dev/null +++ b/ignite/templates/module/create/files/base/x/{{moduleName}}/keeper/query_params.go.plush @@ -0,0 +1,26 @@ +package keeper + +import ( + "context" + "errors" + + "cosmossdk.io/collections" + + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" + + "<%= modulePath %>/x/<%= moduleName %>/types" +) + +func (q queryServer) Params(ctx context.Context, req *types.QueryParamsRequest) (*types.QueryParamsResponse, error) { + if req == nil { + return nil, status.Error(codes.InvalidArgument, "invalid request") + } + + params, err := q.k.Params.Get(ctx) + if err != nil && !errors.Is(err, collections.ErrNotFound) { + return nil, status.Error(codes.Internal, "internal error") + } + + return &types.QueryParamsResponse{Params: params}, nil +} diff --git a/ignite/templates/module/create/files/base/x/{{moduleName}}/keeper/query_params_test.go.plush b/ignite/templates/module/create/files/base/x/{{moduleName}}/keeper/query_params_test.go.plush new file mode 100644 index 0000000..cf317c5 --- /dev/null +++ b/ignite/templates/module/create/files/base/x/{{moduleName}}/keeper/query_params_test.go.plush @@ -0,0 +1,22 @@ +package keeper_test + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "<%= modulePath %>/x/<%= moduleName %>/keeper" + "<%= modulePath %>/x/<%= moduleName %>/types" +) + +func TestParamsQuery(t *testing.T) { + f := initFixture(t) + + qs := keeper.NewQueryServerImpl(f.keeper) + params := types.DefaultParams() + require.NoError(t, f.keeper.Params.Set(f.ctx, params)) + + response, err := qs.Params(f.ctx, &types.QueryParamsRequest{}) + require.NoError(t, err) + require.Equal(t, &types.QueryParamsResponse{Params: params}, response) +} diff --git a/ignite/templates/module/create/files/base/x/{{moduleName}}/module/autocli.go.plush b/ignite/templates/module/create/files/base/x/{{moduleName}}/module/autocli.go.plush new file mode 100644 index 0000000..85e4647 --- /dev/null +++ b/ignite/templates/module/create/files/base/x/{{moduleName}}/module/autocli.go.plush @@ -0,0 +1,33 @@ +package <%= moduleName %> + +import ( + autocliv1 "cosmossdk.io/api/cosmos/autocli/v1" + + "<%= modulePath %>/x/<%= moduleName %>/types" +) + +// AutoCLIOptions implements the autocli.HasAutoCLIConfig interface. +func (am AppModule) AutoCLIOptions() *autocliv1.ModuleOptions { + return &autocliv1.ModuleOptions{ + Query: &autocliv1.ServiceCommandDescriptor{ + Service: types.Query_serviceDesc.ServiceName, + RpcCommandOptions: []*autocliv1.RpcCommandOptions{ + { + RpcMethod: "Params", + Use: "params", + Short: "Shows the parameters of the module", + }, + }, + }, + Tx: &autocliv1.ServiceCommandDescriptor{ + Service: types.Msg_serviceDesc.ServiceName, + EnhanceCustomCommand: true, // only required if you want to use the custom command + RpcCommandOptions: []*autocliv1.RpcCommandOptions{ + { + RpcMethod: "UpdateParams", + Skip: true, // skipped because authority gated + }, + }, + }, + } +} diff --git a/ignite/templates/module/create/files/base/x/{{moduleName}}/module/depinject.go.plush b/ignite/templates/module/create/files/base/x/{{moduleName}}/module/depinject.go.plush new file mode 100644 index 0000000..c9bc8a0 --- /dev/null +++ b/ignite/templates/module/create/files/base/x/{{moduleName}}/module/depinject.go.plush @@ -0,0 +1,69 @@ +package <%= moduleName %> + +import ( + "cosmossdk.io/core/address" + "cosmossdk.io/core/appmodule" + "cosmossdk.io/core/store" + "cosmossdk.io/depinject" + "cosmossdk.io/depinject/appconfig" + "cosmossdk.io/log" + "github.com/cosmos/cosmos-sdk/codec" + authtypes "github.com/cosmos/cosmos-sdk/x/auth/types" + ibckeeper "github.com/cosmos/ibc-go/v10/modules/core/keeper"<% } %> + + "<%= modulePath %>/x/<%= moduleName %>/keeper" + "<%= modulePath %>/x/<%= moduleName %>/types" +) + +var _ depinject.OnePerModuleType = AppModule{} + +// IsOnePerModuleType implements the depinject.OnePerModuleType interface. +func (AppModule) IsOnePerModuleType() {} + +func init() { + appconfig.Register( + &types.Module{}, + appconfig.Provide(ProvideModule), + ) +} + +type ModuleInputs struct { + depinject.In + + Config *types.Module + StoreService store.KVStoreService + Cdc codec.Codec + AddressCodec address.Codec + + AuthKeeper types.AuthKeeper + BankKeeper types.BankKeeper<%= for (dependency) in dependencies { %><%= if (dependency.Name != "Bank" && dependency.Name != "Auth") { %> + <%= dependency.KeeperName() %> types.<%= dependency.KeeperName() %><% } %><% } %> + + <%= if (isIBC) { %>IBCKeeperFn func() *ibckeeper.Keeper `optional:"true"` <% } %> +} + +type ModuleOutputs struct { + depinject.Out + + <%= title(moduleName) %>Keeper keeper.Keeper + Module appmodule.AppModule +} + +func ProvideModule(in ModuleInputs) ModuleOutputs { + // default to governance authority if not provided + authority := authtypes.NewModuleAddress(types.GovModuleName) + if in.Config.Authority != "" { + authority = authtypes.NewModuleAddressOrBech32Address(in.Config.Authority) + } + k := keeper.NewKeeper( + in.StoreService, + in.Cdc, + in.AddressCodec, + authority, <%= if (isIBC) { %> + in.IBCKeeperFn,<% } %><%= for (dependency) in dependencies { %> + in.<%= dependency.KeeperName() %>,<% } %> + ) + m := NewAppModule(in.Cdc, k, in.AuthKeeper, in.BankKeeper) + + return ModuleOutputs{<%= title(moduleName) %>Keeper: k, Module: m} +} diff --git a/ignite/templates/module/create/files/base/x/{{moduleName}}/module/module.go.plush b/ignite/templates/module/create/files/base/x/{{moduleName}}/module/module.go.plush new file mode 100644 index 0000000..994dda2 --- /dev/null +++ b/ignite/templates/module/create/files/base/x/{{moduleName}}/module/module.go.plush @@ -0,0 +1,155 @@ +package <%= moduleName %> + +import ( + "context" + "encoding/json" + "fmt" + + "cosmossdk.io/core/appmodule" + "github.com/cosmos/cosmos-sdk/client" + "github.com/cosmos/cosmos-sdk/codec" + codectypes "github.com/cosmos/cosmos-sdk/codec/types" + sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/cosmos/cosmos-sdk/types/module" + "github.com/grpc-ecosystem/grpc-gateway/runtime" + "google.golang.org/grpc" + <%= if (isIBC) { %>porttypes "github.com/cosmos/ibc-go/v10/modules/core/05-port/types" + "github.com/spf13/cobra"<% } %> + + "<%= modulePath %>/x/<%= moduleName %>/keeper" + "<%= modulePath %>/x/<%= moduleName %>/types" + <%= if (isIBC) { %>"<%= modulePath %>/x/<%= moduleName %>/client/cli"<% } %> +) + +var ( + _ module.AppModuleBasic = (*AppModule)(nil) + _ module.AppModule = (*AppModule)(nil) + _ module.HasGenesis = (*AppModule)(nil) + + _ appmodule.AppModule = (*AppModule)(nil) + _ appmodule.HasBeginBlocker = (*AppModule)(nil) + _ appmodule.HasEndBlocker = (*AppModule)(nil) + <%= if (isIBC) { %>_ porttypes.IBCModule = (*IBCModule)(nil)<% } %> +) + +// AppModule implements the AppModule interface that defines the inter-dependent methods that modules need to implement +type AppModule struct { + cdc codec.Codec + keeper keeper.Keeper + authKeeper types.AuthKeeper + bankKeeper types.BankKeeper +} + +func NewAppModule( + cdc codec.Codec, + keeper keeper.Keeper, + authKeeper types.AuthKeeper, + bankKeeper types.BankKeeper, +) AppModule { + return AppModule{ + cdc: cdc, + keeper: keeper, + authKeeper: authKeeper, + bankKeeper: bankKeeper, + } +} + +// IsAppModule implements the appmodule.AppModule interface. +func (AppModule) IsAppModule() {} + +// Name returns the name of the module as a string. +func (AppModule) Name() string { + return types.ModuleName +} + +// RegisterLegacyAminoCodec registers the amino codec +func (AppModule) RegisterLegacyAminoCodec(*codec.LegacyAmino) {} + +// RegisterGRPCGatewayRoutes registers the gRPC Gateway routes for the module. +func (AppModule) RegisterGRPCGatewayRoutes(clientCtx client.Context, mux *runtime.ServeMux) { + if err := types.RegisterQueryHandlerClient(clientCtx.CmdContext, mux, types.NewQueryClient(clientCtx)); err != nil { + panic(err) + } +} + +// RegisterInterfaces registers a module's interface types and their concrete implementations as proto.Message. +func (AppModule) RegisterInterfaces(registrar codectypes.InterfaceRegistry) { + types.RegisterInterfaces(registrar) +} + +// RegisterServices registers a gRPC query service to respond to the module-specific gRPC queries +func (am AppModule) RegisterServices(registrar grpc.ServiceRegistrar) error { + types.RegisterMsgServer(registrar, keeper.NewMsgServerImpl(am.keeper)) + types.RegisterQueryServer(registrar, keeper.NewQueryServerImpl(am.keeper)) + + return nil +} + +// DefaultGenesis returns a default GenesisState for the module, marshalled to json.RawMessage. +// The default GenesisState need to be defined by the module developer and is primarily used for testing. +func (am AppModule) DefaultGenesis(codec.JSONCodec) json.RawMessage { + return am.cdc.MustMarshalJSON(types.DefaultGenesis()) +} + +// ValidateGenesis used to validate the GenesisState, given in its json.RawMessage form. +func (am AppModule) ValidateGenesis(_ codec.JSONCodec, _ client.TxEncodingConfig, bz json.RawMessage) error { + var genState types.GenesisState + if err := am.cdc.UnmarshalJSON(bz, &genState); err != nil { + return fmt.Errorf("failed to unmarshal %s genesis state: %w", types.ModuleName, err) + } + + return genState.Validate() +} + +// InitGenesis performs the module's genesis initialization. It returns no validator updates. +func (am AppModule) InitGenesis(ctx sdk.Context, _ codec.JSONCodec, gs json.RawMessage) { + var genState types.GenesisState + // Initialize global index to index in genesis state + if err := am.cdc.UnmarshalJSON(gs, &genState); err != nil { + panic(fmt.Errorf("failed to unmarshal %s genesis state: %w", types.ModuleName, err)) + } + + if err := am.keeper.InitGenesis(ctx, genState); err != nil { + panic(fmt.Errorf("failed to initialize %s genesis state: %w", types.ModuleName, err)) + } +} + +// ExportGenesis returns the module's exported genesis state as raw JSON bytes. +func (am AppModule) ExportGenesis(ctx sdk.Context, _ codec.JSONCodec) json.RawMessage { + genState, err := am.keeper.ExportGenesis(ctx) + if err != nil { + panic(fmt.Errorf("failed to export %s genesis state: %w", types.ModuleName, err)) + } + + bz, err := am.cdc.MarshalJSON(genState) + if err != nil { + panic(fmt.Errorf("failed to marshal %s genesis state: %w", types.ModuleName, err)) + } + + return bz +} + +// ConsensusVersion is a sequence number for state-breaking change of the module. +// It should be incremented on each consensus-breaking change introduced by the module. +// To avoid wrong/empty versions, the initial version should be set to 1. +func (AppModule) ConsensusVersion() uint64 { return 1 } + +// BeginBlock contains the logic that is automatically triggered at the beginning of each block. +// The begin block implementation is optional. +func (am AppModule) BeginBlock(_ context.Context) error { + return nil +} + +// EndBlock contains the logic that is automatically triggered at the end of each block. +// The end block implementation is optional. +func (am AppModule) EndBlock(_ context.Context) error { + return nil +} + +<%= if (isIBC) { %> +// GetTxCmd returns the root Tx command for the module. +// These commands enrich the AutoCLI tx commands. +func (AppModule) GetTxCmd() *cobra.Command { + return cli.GetTxCmd() +} +<% } %> \ No newline at end of file diff --git a/ignite/templates/module/create/files/base/x/{{moduleName}}/module/simulation.go.plush b/ignite/templates/module/create/files/base/x/{{moduleName}}/module/simulation.go.plush new file mode 100644 index 0000000..eefed25 --- /dev/null +++ b/ignite/templates/module/create/files/base/x/{{moduleName}}/module/simulation.go.plush @@ -0,0 +1,38 @@ +package <%= moduleName %> + +import ( + "cosmossdk.io/core/address" + sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/cosmos/cosmos-sdk/types/module" + simtypes "github.com/cosmos/cosmos-sdk/types/simulation" + + "<%= modulePath %>/x/<%= moduleName %>/types" +) + +// GenerateGenesisState creates a randomized GenState of the module. +func (AppModule) GenerateGenesisState(simState *module.SimulationState) { + accs := make([]string, len(simState.Accounts)) + for i, acc := range simState.Accounts { + accs[i] = acc.Address.String() + } + <%= moduleName %>Genesis := types.GenesisState{ + Params: types.DefaultParams(),<%= if (isIBC) { %> + PortId: types.PortID,<% } %> + } + simState.GenState[types.ModuleName] = simState.Cdc.MustMarshalJSON(&<%= moduleName %>Genesis) +} + +// RegisterStoreDecoder registers a decoder. +func (am AppModule) RegisterStoreDecoder(_ simtypes.StoreDecoderRegistry) {} + +// WeightedOperations returns the all the gov module operations with their respective weights. +func (am AppModule) WeightedOperations(simState module.SimulationState) []simtypes.WeightedOperation { + operations := make([]simtypes.WeightedOperation, 0) + return operations +} + +// ProposalMsgs returns msgs used for governance proposals for simulations. +func (am AppModule) ProposalMsgs(simState module.SimulationState) []simtypes.WeightedProposalMsg { + return []simtypes.WeightedProposalMsg{ + } +} diff --git a/ignite/templates/module/create/files/base/x/{{moduleName}}/types/codec.go.plush b/ignite/templates/module/create/files/base/x/{{moduleName}}/types/codec.go.plush new file mode 100644 index 0000000..9e2bc1a --- /dev/null +++ b/ignite/templates/module/create/files/base/x/{{moduleName}}/types/codec.go.plush @@ -0,0 +1,15 @@ +package types + +import ( + <%= if (isIBC) { %>"github.com/cosmos/cosmos-sdk/codec"<% } %> + codectypes "github.com/cosmos/cosmos-sdk/codec/types" + sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/cosmos/cosmos-sdk/types/msgservice" +) + +func RegisterInterfaces(registrar codectypes.InterfaceRegistry) { + registrar.RegisterImplementations((*sdk.Msg)(nil), + &MsgUpdateParams{}, + ) + msgservice.RegisterMsgServiceDesc(registrar, &_Msg_serviceDesc) +} diff --git a/ignite/templates/module/create/files/base/x/{{moduleName}}/types/errors.go.plush b/ignite/templates/module/create/files/base/x/{{moduleName}}/types/errors.go.plush new file mode 100644 index 0000000..2520809 --- /dev/null +++ b/ignite/templates/module/create/files/base/x/{{moduleName}}/types/errors.go.plush @@ -0,0 +1,14 @@ +package types + +// DONTCOVER + +import ( + "cosmossdk.io/errors" +) + +// x/<%= moduleName %> module sentinel errors +var ( + ErrInvalidSigner = errors.Register(ModuleName, 1100, "expected gov account as only signer for proposal message") + <%= if (isIBC) { %>ErrInvalidPacketTimeout = errors.Register(ModuleName, 1500, "invalid packet timeout") + ErrInvalidVersion = errors.Register(ModuleName, 1501, "invalid version")<% } %> +) diff --git a/ignite/templates/module/create/files/base/x/{{moduleName}}/types/expected_keepers.go.plush b/ignite/templates/module/create/files/base/x/{{moduleName}}/types/expected_keepers.go.plush new file mode 100644 index 0000000..799a05d --- /dev/null +++ b/ignite/templates/module/create/files/base/x/{{moduleName}}/types/expected_keepers.go.plush @@ -0,0 +1,91 @@ +package types + +import ( + "context" + + <%= if (dependencies.Contains("Staking")) { %>"cosmossdk.io/core/address" + stakingtypes "github.com/cosmos/cosmos-sdk/x/staking/types"<% } %> + <%= if (dependencies.Contains("Authz")) { %>"github.com/cosmos/cosmos-sdk/x/authz"<% } %> + sdk "github.com/cosmos/cosmos-sdk/types" +) + +<%= for (dependency) in dependencies { %> + <%= if (dependency.Name == "Account") { %> + <% } else if (dependency.Name == "Auth") { %> + <% } else if (dependency.Name == "Bank") { %> + <% } else if (dependency.Name == "Staking") { %> + // StakingKeeper defines the expected interface for the Staking module. + type StakingKeeper interface { + ConsensusAddressCodec() address.Codec + ValidatorByConsAddr(context.Context, sdk.ConsAddress) (stakingtypes.ValidatorI, error) + // Methods imported from account should be defined here + } + + <% } else if (dependency.Name == "Slashing") { %> + // SlashingKeeper defines the expected interface for the Slashing module. + type SlashingKeeper interface { + IsTombstoned(context.Context, sdk.ConsAddress) bool + Jail(context.Context, sdk.ConsAddress) error + // Methods imported from account should be defined here + } + + <% } else if (dependency.Name == "Distribution") { %> + // DistributionKeeper defines the expected interface for the Distribution module. + type DistributionKeeper interface { + FundCommunityPool(context.Context, sdk.Coins, sdk.AccAddress) error + // Methods imported from account should be defined here + } + + <% } else if (dependency.Name == "Mint") { %> + // MintKeeper defines the expected interface for the Mint module. + type MintKeeper interface { + MintCoins(context.Context, sdk.Coins) error + // Methods imported from account should be defined here + } + + <% } else if (dependency.Name == "Authz") { %> + // AuthzKeeper defines the expected interface for the Authz module. + type AuthzKeeper interface { + GetAuthorizations(_ context.Context, _, _ sdk.AccAddress) ([]authz.Authorization, error) + // Methods imported from account should be defined here + } + + <% } else if (dependency.Name == "Feegrant") { %> + // FeegrantKeeper defines the expected interface for the FeeGrant module. + type FeegrantKeeper interface { + UseGrantedFees(_ context.Context, _, _ sdk.AccAddress, _ sdk.Coins, _ []sdk.Msg) error + // Methods imported from account should be defined here + } + + <% } else if (dependency.Name == "Group") { %> + // GroupKeeper defines the expected interface for the Group module. + type GroupKeeper interface { + GetGroupSequence(context.Context) uint64 + // Methods imported from account should be defined here + } + + <% } else { %> + type <%= dependency.KeeperName() %> interface { + // TODO Add methods imported from <%= toLower(dependency.Name) %> should be defined here + } + <% } %> +<% } %> + +// AuthKeeper defines the expected interface for the Auth module. +type AuthKeeper interface { + AddressCodec() address.Codec + GetAccount(context.Context, sdk.AccAddress) sdk.AccountI // only used for simulation + // Methods imported from account should be defined here +} + +// BankKeeper defines the expected interface for the Bank module. +type BankKeeper interface { + SpendableCoins(context.Context, sdk.AccAddress) sdk.Coins + // Methods imported from bank should be defined here +} + +// ParamSubspace defines the expected Subspace interface for parameters. +type ParamSubspace interface { + Get(context.Context, []byte, interface{}) + Set(context.Context, []byte, interface{}) +} diff --git a/ignite/templates/module/create/files/base/x/{{moduleName}}/types/genesis.go.plush b/ignite/templates/module/create/files/base/x/{{moduleName}}/types/genesis.go.plush new file mode 100644 index 0000000..2870b38 --- /dev/null +++ b/ignite/templates/module/create/files/base/x/{{moduleName}}/types/genesis.go.plush @@ -0,0 +1,14 @@ +package types + +// DefaultGenesis returns the default genesis state +func DefaultGenesis() *GenesisState { + return &GenesisState{ + Params: DefaultParams(), + } +} + +// Validate performs basic genesis state validation returning an error upon any +// failure. +func (gs GenesisState) Validate() error { + return gs.Params.Validate() +} diff --git a/ignite/templates/module/create/files/base/x/{{moduleName}}/types/genesis_test.go.plush b/ignite/templates/module/create/files/base/x/{{moduleName}}/types/genesis_test.go.plush new file mode 100644 index 0000000..4afded5 --- /dev/null +++ b/ignite/templates/module/create/files/base/x/{{moduleName}}/types/genesis_test.go.plush @@ -0,0 +1,39 @@ +package types_test + +import ( + "testing" + + "github.com/stretchr/testify/require" + "<%= modulePath %>/x/<%= moduleName %>/types" +) + +func TestGenesisState_Validate(t *testing.T) { + tests := []struct { + desc string + genState *types.GenesisState + valid bool + } { + { + desc: "default is valid", + genState: types.DefaultGenesis(), + valid: true, + }, + { + desc: "valid genesis state", + genState: &types.GenesisState{ + <%= if (isIBC) { %>PortId: types.PortID,<% } %> + }, + valid: true, + }, + } + for _, tc := range tests { + t.Run(tc.desc, func(t *testing.T) { + err := tc.genState.Validate() + if tc.valid { + require.NoError(t, err) + } else { + require.Error(t, err) + } + }) + } +} \ No newline at end of file diff --git a/ignite/templates/module/create/files/base/x/{{moduleName}}/types/keys.go.plush b/ignite/templates/module/create/files/base/x/{{moduleName}}/types/keys.go.plush new file mode 100644 index 0000000..3a3c069 --- /dev/null +++ b/ignite/templates/module/create/files/base/x/{{moduleName}}/types/keys.go.plush @@ -0,0 +1,33 @@ +package types + +import "cosmossdk.io/collections" + +const ( + // ModuleName defines the module name + ModuleName = "<%= moduleName %>" + + // StoreKey defines the primary module store key + StoreKey = ModuleName + + // GovModuleName duplicates the gov module's name to avoid a dependency with x/gov. + // It should be synced with the gov module's name if it is ever changed. + // See: https://github.com/cosmos/cosmos-sdk/blob/v0.52.0-beta.2/x/gov/types/keys.go#L9 + GovModuleName = "gov" +<%= if (isIBC) { %> + // Version defines the current version the IBC module supports + Version = "<%= moduleName %>-1" + + // PortID is the default port id that module binds to + PortID = "<%= moduleName %>" +<% } %> +) + +<%= if (isIBC) { %> +var ( + // PortKey defines the key to store the port ID in store + PortKey = collections.NewPrefix("<%= moduleName %>-port-") +) +<% } %> + +// ParamsKey is the prefix to retrieve all Params +var ParamsKey = collections.NewPrefix("p_<%= moduleName %>") \ No newline at end of file diff --git a/ignite/templates/module/create/files/base/x/{{moduleName}}/types/params.go.plush b/ignite/templates/module/create/files/base/x/{{moduleName}}/types/params.go.plush new file mode 100644 index 0000000..6cf8725 --- /dev/null +++ b/ignite/templates/module/create/files/base/x/{{moduleName}}/types/params.go.plush @@ -0,0 +1,41 @@ +package types + +<%= for (param) in params { %> + // Default<%= param.Name.UpperCamel %> represents the <%= param.Name.UpperCamel %> default value. + // TODO: Determine the default value. + var Default<%= param.Name.UpperCamel %> <%= param.DataType() %> = <%= if (param.DataType() == "string") { %>"<%= param.ProtoFieldName() %>"<% } else { %><%= param.ValueIndex() %><% } %> +<% } %> + +// NewParams creates a new Params instance. +func NewParams(<%= for (param) in params { %> + <%= param.Name.LowerCamel %> <%= param.DataType() %>,<% } %> +) Params { + return Params{<%= for (param) in params { %> + <%= param.Name.UpperCamel %>: <%= param.Name.LowerCamel %>,<% } %> + } +} + +// DefaultParams returns a default set of parameters. +func DefaultParams() Params { + return NewParams(<%= for (param) in params { %> + Default<%= param.Name.UpperCamel %>,<% } %> + ) +} + +// Validate validates the set of params. +func (p Params) Validate() error {<%= for (param) in params { %> + if err := validate<%= param.Name.UpperCamel %>(p.<%= param.Name.UpperCamel %>); err != nil { + return err + } + <% } %> + + return nil +} + +<%= for (param) in params { %> +// validate<%= param.Name.UpperCamel %> validates the <%= param.Name.UpperCamel %> parameter. +func validate<%= param.Name.UpperCamel %>(v <%= param.DataType() %>) error { + // TODO implement validation + return nil +} +<% } %> diff --git a/ignite/templates/module/create/files/base/x/{{moduleName}}/types/types.go.plush b/ignite/templates/module/create/files/base/x/{{moduleName}}/types/types.go.plush new file mode 100644 index 0000000..d44a6c0 --- /dev/null +++ b/ignite/templates/module/create/files/base/x/{{moduleName}}/types/types.go.plush @@ -0,0 +1 @@ +package types diff --git a/ignite/templates/module/create/files/base/{{protoDir}}/{{appName}}/{{moduleName}}/module/{{protoVer}}/module.proto.plush b/ignite/templates/module/create/files/base/{{protoDir}}/{{appName}}/{{moduleName}}/module/{{protoVer}}/module.proto.plush new file mode 100644 index 0000000..20fee85 --- /dev/null +++ b/ignite/templates/module/create/files/base/{{protoDir}}/{{appName}}/{{moduleName}}/module/{{protoVer}}/module.proto.plush @@ -0,0 +1,20 @@ +syntax = "proto3"; +package <%= protoModulePkgName %>; + +import "cosmos/app/v1alpha1/module.proto"; + +option go_package = "<%= modulePath %>/x/<%= moduleName %>/types"; + +// Module is the config object for the module. +message Module { + option (cosmos.app.v1alpha1.module) = { + go_import: "<%= modulePath %>/x/<%= moduleName %>" + }; + + // authority defines the custom module authority. + // If not set, defaults to the governance module. + string authority = 1; + + <%= for (i, config) in configs { %> + <%= config.ProtoType(i+2) %>;<% } %> +} \ No newline at end of file diff --git a/ignite/templates/module/create/files/base/{{protoDir}}/{{appName}}/{{moduleName}}/{{protoVer}}/genesis.proto.plush b/ignite/templates/module/create/files/base/{{protoDir}}/{{appName}}/{{moduleName}}/{{protoVer}}/genesis.proto.plush new file mode 100644 index 0000000..366275b --- /dev/null +++ b/ignite/templates/module/create/files/base/{{protoDir}}/{{appName}}/{{moduleName}}/{{protoVer}}/genesis.proto.plush @@ -0,0 +1,17 @@ +syntax = "proto3"; +package <%= protoPkgName %>; + +import "amino/amino.proto"; +import "gogoproto/gogo.proto"; +import "<%= appName %>/<%= moduleName %>/<%= protoVer %>/params.proto"; + +option go_package = "<%= modulePath %>/x/<%= moduleName %>/types"; + +// GenesisState defines the <%= moduleName %> module's genesis state. +message GenesisState { + // params defines all the parameters of the module. + Params params = 1 [ + (gogoproto.nullable) = false, + (amino.dont_omitempty) = true + ]; +} diff --git a/ignite/templates/module/create/files/base/{{protoDir}}/{{appName}}/{{moduleName}}/{{protoVer}}/params.proto.plush b/ignite/templates/module/create/files/base/{{protoDir}}/{{appName}}/{{moduleName}}/{{protoVer}}/params.proto.plush new file mode 100644 index 0000000..9cc5ac9 --- /dev/null +++ b/ignite/templates/module/create/files/base/{{protoDir}}/{{appName}}/{{moduleName}}/{{protoVer}}/params.proto.plush @@ -0,0 +1,15 @@ +syntax = "proto3"; +package <%= protoPkgName %>; + +import "amino/amino.proto"; +import "gogoproto/gogo.proto"; + +option go_package = "<%= modulePath %>/x/<%= moduleName %>/types"; + +// Params defines the parameters for the module. +message Params { + option (amino.name) = "<%= appName %>/x/<%= moduleName %>/Params"; + option (gogoproto.equal) = true; + <%= for (i, param) in params { %> + <%= param.ProtoType(i+1) %>;<% } %> +} \ No newline at end of file diff --git a/ignite/templates/module/create/files/base/{{protoDir}}/{{appName}}/{{moduleName}}/{{protoVer}}/query.proto.plush b/ignite/templates/module/create/files/base/{{protoDir}}/{{appName}}/{{moduleName}}/{{protoVer}}/query.proto.plush new file mode 100644 index 0000000..8d0d6d4 --- /dev/null +++ b/ignite/templates/module/create/files/base/{{protoDir}}/{{appName}}/{{moduleName}}/{{protoVer}}/query.proto.plush @@ -0,0 +1,30 @@ +syntax = "proto3"; +package <%= protoPkgName %>; + +import "amino/amino.proto"; +import "gogoproto/gogo.proto"; +import "google/api/annotations.proto"; +import "cosmos/base/query/v1beta1/pagination.proto"; +import "<%= appName %>/<%= moduleName %>/<%= protoVer %>/params.proto"; + +option go_package = "<%= modulePath %>/x/<%= moduleName %>/types"; + +// Query defines the gRPC querier service. +service Query { + // Parameters queries the parameters of the module. + rpc Params(QueryParamsRequest) returns (QueryParamsResponse) { + option (google.api.http).get = "<%= apiPath %>/params"; + } +} + +// QueryParamsRequest is request type for the Query/Params RPC method. +message QueryParamsRequest {} + +// QueryParamsResponse is response type for the Query/Params RPC method. +message QueryParamsResponse { + // params holds all the parameters of this module. + Params params = 1 [ + (gogoproto.nullable) = false, + (amino.dont_omitempty) = true + ]; +} \ No newline at end of file diff --git a/ignite/templates/module/create/files/base/{{protoDir}}/{{appName}}/{{moduleName}}/{{protoVer}}/tx.proto.plush b/ignite/templates/module/create/files/base/{{protoDir}}/{{appName}}/{{moduleName}}/{{protoVer}}/tx.proto.plush new file mode 100644 index 0000000..b523d61 --- /dev/null +++ b/ignite/templates/module/create/files/base/{{protoDir}}/{{appName}}/{{moduleName}}/{{protoVer}}/tx.proto.plush @@ -0,0 +1,40 @@ +syntax = "proto3"; +package <%= protoPkgName %>; + +import "amino/amino.proto"; +import "cosmos/msg/v1/msg.proto"; +import "cosmos_proto/cosmos.proto"; +import "gogoproto/gogo.proto"; +import "<%= appName %>/<%= moduleName %>/<%= protoVer %>/params.proto"; + +option go_package = "<%= modulePath %>/x/<%= moduleName %>/types"; + +// Msg defines the Msg service. +service Msg { + option (cosmos.msg.v1.service) = true; + + // UpdateParams defines a (governance) operation for updating the module + // parameters. The authority defaults to the x/gov module account. + rpc UpdateParams(MsgUpdateParams) returns (MsgUpdateParamsResponse); +} + +// MsgUpdateParams is the Msg/UpdateParams request type. +message MsgUpdateParams { + option (cosmos.msg.v1.signer) = "authority"; + option (amino.name) = "<%= appName %>/x/<%= moduleName %>/MsgUpdateParams"; + + // authority is the address that controls the module (defaults to x/gov unless overwritten). + string authority = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // params defines the module parameters to update. + // + // NOTE: All parameters must be supplied. + Params params = 2 [ + (gogoproto.nullable) = false, + (amino.dont_omitempty) = true + ]; +} + +// MsgUpdateParamsResponse defines the response structure for executing a +// MsgUpdateParams message. +message MsgUpdateParamsResponse {} \ No newline at end of file diff --git a/ignite/templates/module/create/files/ibc/x/{{moduleName}}/client/cli/tx.go.plush b/ignite/templates/module/create/files/ibc/x/{{moduleName}}/client/cli/tx.go.plush new file mode 100644 index 0000000..d17ea31 --- /dev/null +++ b/ignite/templates/module/create/files/ibc/x/{{moduleName}}/client/cli/tx.go.plush @@ -0,0 +1,29 @@ +package cli + +import ( + "fmt" + "time" + + "github.com/spf13/cobra" + + "github.com/cosmos/cosmos-sdk/client" + // "github.com/cosmos/cosmos-sdk/client/flags" + "<%= modulePath %>/x/<%= moduleName %>/types" +) + +var DefaultRelativePacketTimeoutTimestamp = uint64((time.Duration(10) * time.Minute).Nanoseconds()) + +const listSeparator = "," + +// GetTxCmd returns the transaction commands for this module. +func GetTxCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: types.ModuleName, + Short: fmt.Sprintf("%s transactions subcommands", types.ModuleName), + DisableFlagParsing: true, + SuggestionsMinimumDistance: 2, + RunE: client.ValidateCmd, + } + + return cmd +} diff --git a/ignite/templates/module/create/files/ibc/x/{{moduleName}}/keeper/keeper_test.go.plush b/ignite/templates/module/create/files/ibc/x/{{moduleName}}/keeper/keeper_test.go.plush new file mode 100644 index 0000000..f52e72b --- /dev/null +++ b/ignite/templates/module/create/files/ibc/x/{{moduleName}}/keeper/keeper_test.go.plush @@ -0,0 +1,97 @@ +package keeper_test + +import ( + "context" + "testing" + + "cosmossdk.io/core/address" + "cosmossdk.io/log" + storetypes "cosmossdk.io/store/types" + govtypes "github.com/cosmos/cosmos-sdk/x/gov/types" + paramtypes "github.com/cosmos/cosmos-sdk/x/params/types" + upgradetypes "cosmossdk.io/x/upgrade/types" + addresscodec "github.com/cosmos/cosmos-sdk/codec/address" + codectestutil "github.com/cosmos/cosmos-sdk/codec/testutil" + "github.com/cosmos/cosmos-sdk/runtime" + "github.com/cosmos/cosmos-sdk/testutil" + sdk "github.com/cosmos/cosmos-sdk/types" + moduletestutil "github.com/cosmos/cosmos-sdk/types/module/testutil" + authtypes "github.com/cosmos/cosmos-sdk/x/auth/types" + clienttypes "github.com/cosmos/ibc-go/v10/modules/core/02-client/types" + ibckeeper "github.com/cosmos/ibc-go/v10/modules/core/keeper" + ibctypes "github.com/cosmos/ibc-go/v10/modules/core/types" + + "<%= modulePath %>/x/<%= moduleName %>/keeper" + module "<%= modulePath %>/x/<%= moduleName %>/module" + "<%= modulePath %>/x/<%= moduleName %>/types" +) + +type fixture struct { + ctx context.Context + keeper keeper.Keeper + addressCodec address.Codec +} + +func initFixture(t *testing.T) *fixture { + t.Helper() + + encCfg := moduletestutil.MakeTestEncodingConfig(module.AppModule{}) + addressCodec := addresscodec.NewBech32Codec(sdk.GetConfig().GetBech32AccountAddrPrefix()) + storeKey := storetypes.NewKVStoreKey(types.StoreKey) + + storeService := runtime.NewKVStoreService(storeKey) + ctx := testutil.DefaultContextWithDB(t, storeKey, storetypes.NewTransientStoreKey("transient_test")).Ctx + + authority := authtypes.NewModuleAddress(govtypes.ModuleName) + mockUpgradeKeeper := newMockUpgradeKeeper() + + k := keeper.NewKeeper( + storeService, + encCfg.Codec, + addressCodec, + authority, + func() *ibckeeper.Keeper { + return ibckeeper.NewKeeper(encCfg.Codec, storeService, newMockParams(), mockUpgradeKeeper, authority.String()) + },<%= for (dependency) in dependencies { %> + nil,<% } %> + ) + + // Initialize params + if err := k.Params.Set(ctx, types.DefaultParams()); err != nil { + t.Fatalf("failed to set params: %v", err) + } + + return &fixture{ + ctx: ctx, + keeper: k, + addressCodec: addressCodec, + } +} + + +type mockUpgradeKeeper struct { + clienttypes.UpgradeKeeper + + initialized bool +} + +func (m mockUpgradeKeeper) GetUpgradePlan(ctx context.Context) (upgradetypes.Plan, error) { + return upgradetypes.Plan{}, nil +} + +func newMockUpgradeKeeper() *mockUpgradeKeeper { + return &mockUpgradeKeeper{initialized: true} +} + +type mockParams struct { + ibctypes.ParamSubspace + + initialized bool +} + +func newMockParams() *mockParams { + return &mockParams{initialized: true} +} + +func (mockParams) GetParamSet(ctx sdk.Context, ps paramtypes.ParamSet) { +} diff --git a/ignite/templates/module/create/files/ibc/x/{{moduleName}}/module/module_ibc.go.plush b/ignite/templates/module/create/files/ibc/x/{{moduleName}}/module/module_ibc.go.plush new file mode 100644 index 0000000..9ebfc6a --- /dev/null +++ b/ignite/templates/module/create/files/ibc/x/{{moduleName}}/module/module_ibc.go.plush @@ -0,0 +1,209 @@ +package <%= moduleName %> + +import ( + "fmt" + + "cosmossdk.io/core/event" + errorsmod "cosmossdk.io/errors" + + "github.com/cosmos/cosmos-sdk/codec" + sdk "github.com/cosmos/cosmos-sdk/types" + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + channeltypes "github.com/cosmos/ibc-go/v10/modules/core/04-channel/types" + porttypes "github.com/cosmos/ibc-go/v10/modules/core/05-port/types" + ibcexported "github.com/cosmos/ibc-go/v10/modules/core/exported" + "<%= modulePath %>/x/<%= moduleName %>/keeper" + "<%= modulePath %>/x/<%= moduleName %>/types" +) + +// IBCModule implements the ICS26 interface for interchain accounts host chains +type IBCModule struct { + cdc codec.Codec + keeper keeper.Keeper +} + +// NewIBCModule creates a new IBCModule given the associated keeper +func NewIBCModule(cdc codec.Codec, k keeper.Keeper) IBCModule { + return IBCModule{ + cdc: cdc, + keeper: k, + } +} + +// OnChanOpenInit implements the IBCModule interface +func (im IBCModule) OnChanOpenInit( + ctx sdk.Context, + order channeltypes.Order, + connectionHops []string, + portID string, + channelID string, + counterparty channeltypes.Counterparty, + version string, +) (string, error) { + if version != types.Version { + return "", errorsmod.Wrapf(types.ErrInvalidVersion, "got %s, expected %s", version, types.Version) + } + + return version, nil +} + +// OnChanOpenTry implements the IBCModule interface +func (im IBCModule) OnChanOpenTry( + ctx sdk.Context, + order channeltypes.Order, + connectionHops []string, + portID, + channelID string, + counterparty channeltypes.Counterparty, + counterpartyVersion string, +) (string, error) { + if counterpartyVersion != types.Version { + return "", errorsmod.Wrapf(types.ErrInvalidVersion, "invalid counterparty version: got: %s, expected %s", counterpartyVersion, types.Version) + } + + return counterpartyVersion, nil +} + +// OnChanOpenAck implements the IBCModule interface +func (im IBCModule) OnChanOpenAck( + ctx sdk.Context, + portID, + channelID, + counterpartyChannelID, + counterpartyVersion string, +) error { + if counterpartyVersion != types.Version { + return errorsmod.Wrapf(types.ErrInvalidVersion, "invalid counterparty version: %s, expected %s", counterpartyVersion, types.Version) + } + return nil +} + +// OnChanOpenConfirm implements the IBCModule interface +func (im IBCModule) OnChanOpenConfirm( + ctx sdk.Context, + portID, + channelID string, +) error { + return nil +} + +// OnChanCloseInit implements the IBCModule interface +func (im IBCModule) OnChanCloseInit( + ctx sdk.Context, + portID, + channelID string, +) error { + // Disallow user-initiated channel closing for channels + return errorsmod.Wrap(sdkerrors.ErrInvalidRequest, "user cannot close channel") +} + +// OnChanCloseConfirm implements the IBCModule interface +func (im IBCModule) OnChanCloseConfirm( + ctx sdk.Context, + portID, + channelID string, +) error { + return nil +} + +// OnRecvPacket implements the IBCModule interface +func (im IBCModule) OnRecvPacket( + ctx sdk.Context, + channelVersion string, + modulePacket channeltypes.Packet, + relayer sdk.AccAddress, +) ibcexported.Acknowledgement { + var ack channeltypes.Acknowledgement + + var modulePacketData types.<%= title(moduleName) %>PacketData + if err := modulePacketData.Unmarshal(modulePacket.GetData()); err != nil { + return channeltypes.NewErrorAcknowledgement(errorsmod.Wrapf(sdkerrors.ErrUnknownRequest, "cannot unmarshal packet data: %s", err.Error())) + } + + // Dispatch packet + switch packet := modulePacketData.Packet.(type) { + default: + err := fmt.Errorf("unrecognized %s packet type: %T", types.ModuleName, packet) + return channeltypes.NewErrorAcknowledgement(err) + } + + // NOTE: acknowledgement will be written synchronously during IBC handler execution. + return ack +} + +// OnAcknowledgementPacket implements the IBCModule interface +func (im IBCModule) OnAcknowledgementPacket( + ctx sdk.Context, + channelVersion string, + modulePacket channeltypes.Packet, + acknowledgement []byte, + relayer sdk.AccAddress, +) error { + var ack channeltypes.Acknowledgement + if err := im.cdc.UnmarshalJSON(acknowledgement, &ack); err != nil { + return errorsmod.Wrapf(sdkerrors.ErrUnknownRequest, "cannot unmarshal packet acknowledgement: %v", err) + } + + var modulePacketData types.<%= title(moduleName) %>PacketData + if err := modulePacketData.Unmarshal(modulePacket.GetData()); err != nil { + return errorsmod.Wrapf(sdkerrors.ErrUnknownRequest, "cannot unmarshal packet data: %s", err.Error()) + } + + var eventType string + + // Dispatch packet + switch packet := modulePacketData.Packet.(type) { + default: + errMsg := fmt.Sprintf("unrecognized %s packet type: %T", types.ModuleName, packet) + return errorsmod.Wrap(sdkerrors.ErrUnknownRequest, errMsg) + } + + + ctx.EventManager().EmitEvent( + sdk.NewEvent( + eventType, + sdk.NewAttribute(types.AttributeKeyAck, fmt.Sprintf("%v", ack)), + ), + ) + + switch resp := ack.Response.(type) { + case *channeltypes.Acknowledgement_Result: + ctx.EventManager().EmitEvent( + sdk.NewEvent( + eventType, + sdk.NewAttribute(types.AttributeKeyAckSuccess, string(resp.Result)), + ), + ) + case *channeltypes.Acknowledgement_Error: + ctx.EventManager().EmitEvent( + sdk.NewEvent( + eventType, + sdk.NewAttribute(types.AttributeKeyAckError, resp.Error), + ), + ) + } + + return nil +} + +// OnTimeoutPacket implements the IBCModule interface +func (im IBCModule) OnTimeoutPacket( + ctx sdk.Context, + channelVersion string, + modulePacket channeltypes.Packet, + relayer sdk.AccAddress, +) error { + var modulePacketData types.<%= title(moduleName) %>PacketData + if err := modulePacketData.Unmarshal(modulePacket.GetData()); err != nil { + return errorsmod.Wrapf(sdkerrors.ErrUnknownRequest, "cannot unmarshal packet data: %s", err.Error()) + } + + // Dispatch packet + switch packet := modulePacketData.Packet.(type) { + default: + errMsg := fmt.Sprintf("unrecognized %s packet type: %T", types.ModuleName, packet) + return errorsmod.Wrap(sdkerrors.ErrUnknownRequest, errMsg) + } + + return nil +} diff --git a/ignite/templates/module/create/files/ibc/x/{{moduleName}}/types/events_ibc.go.plush b/ignite/templates/module/create/files/ibc/x/{{moduleName}}/types/events_ibc.go.plush new file mode 100644 index 0000000..b730773 --- /dev/null +++ b/ignite/templates/module/create/files/ibc/x/{{moduleName}}/types/events_ibc.go.plush @@ -0,0 +1,10 @@ +package types + +// IBC events +const ( + EventTypeTimeout = "timeout" + + AttributeKeyAckSuccess = "success" + AttributeKeyAck = "acknowledgement" + AttributeKeyAckError = "error" +) diff --git a/ignite/templates/module/create/files/ibc/x/{{moduleName}}/types/expected_ibc_keeper.go.plush b/ignite/templates/module/create/files/ibc/x/{{moduleName}}/types/expected_ibc_keeper.go.plush new file mode 100644 index 0000000..7f9497c --- /dev/null +++ b/ignite/templates/module/create/files/ibc/x/{{moduleName}}/types/expected_ibc_keeper.go.plush @@ -0,0 +1,23 @@ +package types + +import ( + "context" + + clienttypes "github.com/cosmos/ibc-go/v10/modules/core/02-client/types" + channeltypes "github.com/cosmos/ibc-go/v10/modules/core/04-channel/types" +) + +// ChannelKeeper defines the expected IBC channel keeper. +type ChannelKeeper interface { + GetChannel(ctx context.Context, portID, channelID string) (channeltypes.Channel, bool) + GetNextSequenceSend(ctx context.Context, portID, channelID string) (uint64, bool) + SendPacket( + ctx context.Context, + sourcePort string, + sourceChannel string, + timeoutHeight clienttypes.Height, + timeoutTimestamp uint64, + data []byte, + ) (uint64, error) + ChanCloseInit(ctx context.Context, portID, channelID string) error +} \ No newline at end of file diff --git a/ignite/templates/module/create/files/ibc/{{protoDir}}/{{appName}}/{{moduleName}}/{{protoVer}}/packet.proto.plush b/ignite/templates/module/create/files/ibc/{{protoDir}}/{{appName}}/{{moduleName}}/{{protoVer}}/packet.proto.plush new file mode 100644 index 0000000..7acedb6 --- /dev/null +++ b/ignite/templates/module/create/files/ibc/{{protoDir}}/{{appName}}/{{moduleName}}/{{protoVer}}/packet.proto.plush @@ -0,0 +1,15 @@ +syntax = "proto3"; +package <%= protoPkgName %>; + +option go_package = "<%= modulePath %>/x/<%= moduleName %>/types"; + +// <%= title(moduleName) %>PacketData defines the <%= title(moduleName) %> data packet. +message <%= title(moduleName) %>PacketData { + oneof packet { + NoData noData = 1; + } +} + +// NoData defines an empty data packet. +message NoData { +} diff --git a/ignite/templates/module/create/helpers_test.go b/ignite/templates/module/create/helpers_test.go new file mode 100644 index 0000000..cbf62fd --- /dev/null +++ b/ignite/templates/module/create/helpers_test.go @@ -0,0 +1,24 @@ +package modulecreate + +import ( + "os" + "path/filepath" + "runtime" + "testing" + + "github.com/stretchr/testify/require" +) + +func readFixture(t *testing.T, relativePath string) string { + t.Helper() + + _, currentFile, _, ok := runtime.Caller(0) + require.True(t, ok) + + path := filepath.Clean(filepath.Join(filepath.Dir(currentFile), relativePath)) + + content, err := os.ReadFile(path) + require.NoError(t, err) + + return string(content) +} diff --git a/ignite/templates/module/create/ibc.go b/ignite/templates/module/create/ibc.go new file mode 100644 index 0000000..56a53e6 --- /dev/null +++ b/ignite/templates/module/create/ibc.go @@ -0,0 +1,355 @@ +package modulecreate + +import ( + "bytes" + "fmt" + "go/ast" + "go/format" + "go/parser" + "go/token" + "io/fs" + "path/filepath" + + "github.com/gobuffalo/genny/v2" + "github.com/gobuffalo/plush/v4" + + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/gomodulepath" + "github.com/ignite/cli/v29/ignite/pkg/protoanalysis/protoutil" + "github.com/ignite/cli/v29/ignite/pkg/xast" + "github.com/ignite/cli/v29/ignite/pkg/xgenny" + "github.com/ignite/cli/v29/ignite/pkg/xstrings" + "github.com/ignite/cli/v29/ignite/templates/field/plushhelpers" + "github.com/ignite/cli/v29/ignite/templates/module" +) + +// NewIBC returns the generator to scaffold the implementation of the IBCModule interface inside a module. +func NewIBC(opts *CreateOptions) (*genny.Generator, error) { + subFs, err := fs.Sub(fsIBC, "files/ibc") + if err != nil { + return nil, errors.Errorf("fail to generate sub: %w", err) + } + + g := genny.New() + g.RunFn(genesisModify(opts)) + g.RunFn(genesisTypesModify(opts)) + g.RunFn(genesisProtoModify(opts)) + + if err := g.OnlyFS(subFs, nil, nil); err != nil { + return g, errors.Errorf("generator fs: %w", err) + } + + appModulePath := gomodulepath.ExtractAppPath(opts.ModulePath) + + ctx := plush.NewContext() + ctx.Set("moduleName", opts.ModuleName) + ctx.Set("modulePath", opts.ModulePath) + ctx.Set("appName", opts.AppName) + ctx.Set("protoVer", opts.ProtoVer) + ctx.Set("ibcOrdering", opts.IBCOrdering) + ctx.Set("dependencies", opts.Dependencies) + ctx.Set("protoPkgName", module.ProtoPackageName(appModulePath, opts.ModuleName, opts.ProtoVer)) + + plushhelpers.ExtendPlushContext(ctx) + g.Transformer(xgenny.Transformer(ctx)) + g.Transformer(genny.Replace("{{protoDir}}", opts.ProtoDir)) + g.Transformer(genny.Replace("{{appName}}", opts.AppName)) + g.Transformer(genny.Replace("{{moduleName}}", opts.ModuleName)) + g.Transformer(genny.Replace("{{protoVer}}", opts.ProtoVer)) + + return g, nil +} + +func genesisModify(opts *CreateOptions) genny.RunFn { + return func(r *genny.Runner) error { + path := filepath.Join("x", opts.ModuleName, "keeper/genesis.go") + f, err := r.Disk.Find(path) + if err != nil { + return err + } + + // Genesis init + replacementModuleInit := `if err := k.Port.Set(ctx, genState.PortId); err != nil { + return err + }` + content, err := xast.ModifyFunction( + f.String(), + "InitGenesis", + xast.AppendFuncCode(replacementModuleInit), + ) + if err != nil { + return err + } + + // Genesis export + replacementModuleExport := `genesis.PortId, err = k.Port.Get(ctx) + if err != nil && !errors.Is(err, collections.ErrNotFound) { + return nil, err + }` + content, err = xast.ModifyFunction( + content, + "ExportGenesis", + xast.AppendFuncCode(replacementModuleExport), + ) + if err != nil { + return err + } + + newFile := genny.NewFileS(path, content) + return r.File(newFile) + } +} + +func genesisTypesModify(opts *CreateOptions) genny.RunFn { + return func(r *genny.Runner) error { + path := filepath.Join("x", opts.ModuleName, "types/genesis.go") + f, err := r.Disk.Find(path) + if err != nil { + return err + } + + // Import + content, err := xast.AppendImports( + f.String(), + xast.WithNamedImport("host", "github.com/cosmos/ibc-go/v10/modules/core/24-host"), + ) + if err != nil { + return err + } + + // Default genesis + content, err = xast.ModifyFunction( + content, + "DefaultGenesis", + xast.AppendFuncStruct("GenesisState", "PortId", "PortID"), + ) + if err != nil { + return err + } + + // Validate genesis + // PlaceholderIBCGenesisTypeValidate + replacementTypesValidate := `if err := host.PortIdentifierValidator(gs.PortId); err != nil { + return err +}` + content, err = xast.ModifyFunction( + content, + "Validate", + xast.AppendFuncCode(replacementTypesValidate), + ) + if err != nil { + return err + } + + newFile := genny.NewFileS(path, content) + return r.File(newFile) + } +} + +// Modifies genesis.proto to add a new field. +// +// What it depends on: +// - Existence of a message named 'GenesisState' in genesis.proto. +func genesisProtoModify(opts *CreateOptions) genny.RunFn { + return func(r *genny.Runner) error { + path := opts.ProtoFile("genesis.proto") + f, err := r.Disk.Find(path) + if err != nil { + return err + } + protoFile, err := protoutil.ParseProtoFile(f) + if err != nil { + return err + } + + // Grab GenesisState and add next (always 2, I gather) available field. + // TODO: typed.ProtoGenesisStateMessage exists but in subfolder, so we can't use it here, refactor? + genesisState, err := protoutil.GetMessageByName(protoFile, "GenesisState") + if err != nil { + return errors.Errorf("couldn't find message 'GenesisState' in %s: %w", path, err) + } + field := protoutil.NewField("port_id", "string", protoutil.NextUniqueID(genesisState)) + protoutil.Append(genesisState, field) + + newFile := genny.NewFileS(path, protoutil.Print(protoFile)) + return r.File(newFile) + } +} + +func appIBCModify(opts *CreateOptions) genny.RunFn { + return func(r *genny.Runner) error { + path := module.PathIBCConfigGo + f, err := r.Disk.Find(path) + if err != nil { + return err + } + + // Import + content, err := xast.AppendImports( + f.String(), + xast.WithNamedImport( + fmt.Sprintf("%[1]vmodule", opts.ModuleName), + fmt.Sprintf("%[1]v/x/%[2]v/module", opts.ModulePath, opts.ModuleName), + ), + xast.WithNamedImport( + fmt.Sprintf("%[1]vmoduletypes", opts.ModuleName), + fmt.Sprintf("%[1]v/x/%[2]v/types", opts.ModulePath, opts.ModuleName), + ), + ) + if err != nil { + return err + } + + content, err = addIBCModuleRoute(content, opts.ModuleName) + if err != nil { + return err + } + + newFile := genny.NewFileS(path, content) + return r.File(newFile) + } +} + +func addIBCModuleRoute(content, moduleName string) (string, error) { + fileSet := token.NewFileSet() + file, err := parser.ParseFile(fileSet, "", content, parser.ParseComments) + if err != nil { + return "", err + } + commentMap := ast.NewCommentMap(fileSet, file, file.Comments) + + registerIBCModules := findFunctionByName(file, "registerIBCModules") + if registerIBCModules == nil { + return "", errors.New(`function "registerIBCModules" not found`) + } + + moduleNameExprText := fmt.Sprintf("%smoduletypes.ModuleName", moduleName) + moduleConstructorExprText := fmt.Sprintf( + "%smodule.NewIBCModule(app.appCodec, app.%sKeeper)", + moduleName, + xstrings.Title(moduleName), + ) + + hasRoute, err := hasAddRoute(registerIBCModules, moduleNameExprText, fileSet) + if err != nil { + return "", err + } + if !hasRoute { + insertionIndex := -1 + for i, stmt := range registerIBCModules.Body.List { + assignStmt, ok := stmt.(*ast.AssignStmt) + if !ok || len(assignStmt.Lhs) != 1 { + continue + } + + lhs, ok := assignStmt.Lhs[0].(*ast.Ident) + if !ok || lhs.Name != "ibcv2Router" { + continue + } + + insertionIndex = i + break + } + if insertionIndex == -1 { + return "", errors.New(`assignment to "ibcv2Router" not found`) + } + + stmtCode := fmt.Sprintf( + "ibcRouter = ibcRouter.AddRoute(%s, %s)", + moduleNameExprText, + moduleConstructorExprText, + ) + statements, err := parseStatements(stmtCode) + if err != nil { + return "", err + } + if len(statements) != 1 { + return "", errors.New("unexpected number of statements while creating ibc route assignment") + } + + registerIBCModules.Body.List = append( + registerIBCModules.Body.List[:insertionIndex], + append([]ast.Stmt{statements[0]}, registerIBCModules.Body.List[insertionIndex:]...)..., + ) + } + + file.Comments = commentMap.Filter(file).Comments() + + var buf bytes.Buffer + if err := format.Node(&buf, fileSet, file); err != nil { + return "", err + } + + formatted, err := format.Source(buf.Bytes()) + if err != nil { + return "", err + } + + return string(formatted), nil +} + +func hasAddRoute(funcDecl *ast.FuncDecl, moduleNameExpr string, fileSet *token.FileSet) (bool, error) { + var ( + found bool + err error + ) + + ast.Inspect(funcDecl, func(n ast.Node) bool { + if found || err != nil { + return false + } + + callExpr, ok := n.(*ast.CallExpr) + if !ok { + return true + } + + selector, ok := callExpr.Fun.(*ast.SelectorExpr) + if !ok || selector.Sel.Name != "AddRoute" || len(callExpr.Args) == 0 { + return true + } + + argText, argErr := exprString(fileSet, callExpr.Args[0]) + if argErr != nil { + err = argErr + return false + } + + if normalizedExpr(argText) == normalizedExpr(moduleNameExpr) { + found = true + return false + } + + return true + }) + + return found, err +} + +func parseStatements(code string) ([]ast.Stmt, error) { + fileSet := token.NewFileSet() + file, err := parser.ParseFile(fileSet, "", fmt.Sprintf("package p\nfunc _(){\n%s\n}", code), 0) + if err != nil { + return nil, err + } + + funcDecl, ok := file.Decls[0].(*ast.FuncDecl) + if !ok || funcDecl.Body == nil { + return nil, errors.New("failed to parse statements") + } + + return funcDecl.Body.List, nil +} + +func findFunctionByName(file *ast.File, funcName string) *ast.FuncDecl { + for _, decl := range file.Decls { + funcDecl, ok := decl.(*ast.FuncDecl) + if !ok { + continue + } + if funcDecl.Name.Name == funcName { + return funcDecl + } + } + return nil +} diff --git a/ignite/templates/module/create/ibc_test.go b/ignite/templates/module/create/ibc_test.go new file mode 100644 index 0000000..844610f --- /dev/null +++ b/ignite/templates/module/create/ibc_test.go @@ -0,0 +1,22 @@ +package modulecreate + +import ( + "strings" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestAddIBCModuleRoute(t *testing.T) { + content := readFixture(t, "../../app/files/app/ibc.go.plush") + + modified, err := addIBCModuleRoute(content, "blog") + require.NoError(t, err) + + routeCall := "ibcRouter=ibcRouter.AddRoute(blogmoduletypes.ModuleName,blogmodule.NewIBCModule(app.appCodec,app.BlogKeeper))" + require.Equal(t, 1, strings.Count(normalizedExpr(modified), routeCall)) + + modified, err = addIBCModuleRoute(modified, "blog") + require.NoError(t, err) + require.Equal(t, 1, strings.Count(normalizedExpr(modified), routeCall)) +} diff --git a/ignite/templates/module/create/options.go b/ignite/templates/module/create/options.go new file mode 100644 index 0000000..00149ea --- /dev/null +++ b/ignite/templates/module/create/options.go @@ -0,0 +1,97 @@ +package modulecreate + +import ( + "fmt" + "path/filepath" + + "github.com/iancoleman/strcase" + + "github.com/ignite/cli/v29/ignite/templates/field" +) + +// ConfigsOptions represents the options to scaffold a Cosmos SDK module configs. +type ConfigsOptions struct { + ModuleName string + AppName string + ProtoDir string + ProtoVer string + Configs field.Fields +} + +// ProtoFile returns the path to the proto folder. +func (opts *ConfigsOptions) ProtoFile(fname string) string { + return filepath.Join(opts.ProtoDir, opts.AppName, opts.ModuleName, opts.ProtoVer, fname) +} + +// ParamsOptions represents the options to scaffold a Cosmos SDK module parameters. +type ParamsOptions struct { + ModuleName string + AppName string + ProtoDir string + ProtoVer string + Params field.Fields +} + +// ProtoFile returns the path to the proto folder. +func (opts *ParamsOptions) ProtoFile(fname string) string { + return filepath.Join(opts.ProtoDir, opts.AppName, opts.ModuleName, opts.ProtoVer, fname) +} + +// CreateOptions represents the options to scaffold a Cosmos SDK module. +type CreateOptions struct { + ModuleName string + ModulePath string + AppName string + AppPath string + ProtoDir string + ProtoVer string + Params field.Fields + Configs field.Fields + + // True if the module should implement the IBC module interface + IsIBC bool + + // Channel ordering of the IBC module: ordered, unordered or none + IBCOrdering string + + // Dependencies of the module + Dependencies Dependencies +} + +// ProtoFile returns the path to the proto folder. +func (opts *CreateOptions) ProtoFile(fname string) string { + return filepath.Join(opts.ProtoDir, opts.AppName, opts.ModuleName, opts.ProtoVer, fname) +} + +// Dependency represents a module dependency of a module. +type Dependency struct { + Name string +} + +// Dependencies represents a list of module dependency. +type Dependencies []Dependency + +// NewDependency returns a new dependency. +func NewDependency(name string) Dependency { + return Dependency{Name: strcase.ToCamel(name)} +} + +// Contains returns true if contains dependency name. +func (d Dependencies) Contains(name string) bool { + for _, dep := range d { + if dep.Name == name { + return true + } + } + return false +} + +// Len returns the length of dependencies. +func (d Dependencies) Len() int { + return len(d) +} + +// KeeperName returns the keeper's name for the dependency module. +func (d Dependency) KeeperName() string { + return fmt.Sprint(d.Name, "Keeper") +} diff --git a/ignite/templates/module/create/params.go b/ignite/templates/module/create/params.go new file mode 100644 index 0000000..2074e8f --- /dev/null +++ b/ignite/templates/module/create/params.go @@ -0,0 +1,145 @@ +package modulecreate + +import ( + "fmt" + "path/filepath" + + "github.com/gobuffalo/genny/v2" + + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/protoanalysis/protoutil" + "github.com/ignite/cli/v29/ignite/pkg/xast" +) + +// NewModuleParam returns the generator to scaffold a new parameter inside a module. +func NewModuleParam(opts ParamsOptions) (*genny.Generator, error) { + g := genny.New() + g.RunFn(paramsProtoModify(opts)) + g.RunFn(paramsTypesModify(opts)) + return g, nil +} + +func paramsProtoModify(opts ParamsOptions) genny.RunFn { + return func(r *genny.Runner) error { + path := opts.ProtoFile("params.proto") + f, err := r.Disk.Find(path) + if err != nil { + return err + } + protoFile, err := protoutil.ParseProtoFile(f) + if err != nil { + return err + } + + params, err := protoutil.GetMessageByName(protoFile, "Params") + if err != nil { + return errors.Errorf("couldn't find message 'Params' in %s: %w", path, err) + } + for _, paramField := range opts.Params { + _, err := protoutil.GetFieldByName(params, paramField.ProtoFieldName()) + if err == nil { + return errors.Errorf("duplicate field %s in %s", paramField.ProtoFieldName(), params.Name) + } + + param := protoutil.NewField( + paramField.ProtoFieldName(), + paramField.DataType(), + protoutil.NextUniqueID(params), + ) + protoutil.Append(params, param) + } + + newFile := genny.NewFileS(path, protoutil.Print(protoFile)) + return r.File(newFile) + } +} + +func paramsTypesModify(opts ParamsOptions) genny.RunFn { + return func(r *genny.Runner) error { + path := filepath.Join("x", opts.ModuleName, "types/params.go") + f, err := r.Disk.Find(path) + if err != nil { + return err + } + + var ( + content = f.String() + globalOpts = make([]xast.GlobalOptions, len(opts.Params)) + newParamsModifier = make([]xast.FunctionOptions, 0) + defaultParamsModifier = make([]xast.FunctionOptions, len(opts.Params)) + validateModifier = make([]xast.FunctionOptions, len(opts.Params)) + ) + for i, param := range opts.Params { + // param key and default value. + globalOpts[i] = xast.WithGlobal( + fmt.Sprintf("Default%s", param.Name.UpperCamel), + param.DataType(), + param.Value(), + ) + + // add parameter to the struct into the new method. + newParamsModifier = append( + newParamsModifier, + xast.AppendFuncParams(param.ProtoFieldName(), param.DataType(), -1), + xast.AppendFuncStruct( + "Params", + param.Name.UpperCamel, + param.ProtoFieldName(), + ), + ) + + // add default parameter. + defaultParamsModifier[i] = xast.AppendInsideFuncCall( + "NewParams", + fmt.Sprintf("Default%s", param.Name.UpperCamel), + -1, + ) + + // add param field to the validate method. + replacementValidate := fmt.Sprintf( + `if err := validate%[1]v(p.%[1]v); err != nil { return err }`, + param.Name.UpperCamel, + ) + validateModifier[i] = xast.AppendFuncCode(replacementValidate) + + // add param field to the validate method. + templateValidation := `// validate%[1]v validates the %[1]v parameter. +func validate%[1]v(v %[2]v) error { + // TODO implement validation + return nil +}` + validationFunc := fmt.Sprintf( + templateValidation, + param.Name.UpperCamel, + param.DataType(), + ) + content, err = xast.AppendFunction(content, validationFunc) + if err != nil { + return err + } + } + + content, err = xast.InsertGlobal(content, xast.GlobalTypeConst, globalOpts...) + if err != nil { + return err + } + + content, err = xast.ModifyFunction(content, "NewParams", newParamsModifier...) + if err != nil { + return err + } + + content, err = xast.ModifyFunction(content, "DefaultParams", defaultParamsModifier...) + if err != nil { + return err + } + + content, err = xast.ModifyFunction(content, "Validate", validateModifier...) + if err != nil { + return err + } + + newFile := genny.NewFileS(path, content) + return r.File(newFile) + } +} diff --git a/ignite/templates/module/create/templates.go b/ignite/templates/module/create/templates.go new file mode 100644 index 0000000..38a97c4 --- /dev/null +++ b/ignite/templates/module/create/templates.go @@ -0,0 +1,13 @@ +package modulecreate + +import ( + "embed" +) + +var ( + //go:embed files/base/* files/base/**/* + fsBase embed.FS + + //go:embed files/ibc/* files/ibc/**/* + fsIBC embed.FS +) diff --git a/ignite/templates/module/migration/files/x/{{moduleName}}/migrations/{{migrationVersion}}/migrate.go.plush b/ignite/templates/module/migration/files/x/{{moduleName}}/migrations/{{migrationVersion}}/migrate.go.plush new file mode 100644 index 0000000..88ed850 --- /dev/null +++ b/ignite/templates/module/migration/files/x/{{moduleName}}/migrations/{{migrationVersion}}/migrate.go.plush @@ -0,0 +1,8 @@ +package <%= migrationVersion %> + +import sdk "github.com/cosmos/cosmos-sdk/types" + +// <%= migrationFunc %> performs in-place store migrations from version <%= fromVersion %> to <%= toVersion %>. +func <%= migrationFunc %>(_ sdk.Context) error { + return nil +} diff --git a/ignite/templates/module/migration/migration.go b/ignite/templates/module/migration/migration.go new file mode 100644 index 0000000..1fffd9e --- /dev/null +++ b/ignite/templates/module/migration/migration.go @@ -0,0 +1,38 @@ +package modulemigration + +import ( + "io/fs" + + "github.com/gobuffalo/genny/v2" + "github.com/gobuffalo/plush/v4" + + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/xgenny" +) + +// NewGenerator returns the generator to scaffold a new module migration. +func NewGenerator(opts *Options) (*genny.Generator, error) { + subFS, err := fs.Sub(files, "files") + if err != nil { + return nil, errors.Errorf("fail to generate sub: %w", err) + } + + g := genny.New() + if err := g.OnlyFS(subFS, nil, nil); err != nil { + return g, err + } + + ctx := plush.NewContext() + ctx.Set("fromVersion", opts.FromVersion) + ctx.Set("migrationFunc", opts.MigrationFunc()) + ctx.Set("migrationVersion", opts.MigrationVersion()) + ctx.Set("moduleName", opts.ModuleName) + ctx.Set("toVersion", opts.ToVersion) + + g.Transformer(xgenny.Transformer(ctx)) + g.Transformer(genny.Replace("{{migrationVersion}}", opts.MigrationVersion())) + g.Transformer(genny.Replace("{{moduleName}}", opts.ModuleName)) + g.RunFn(moduleModify(opts)) + + return g, nil +} diff --git a/ignite/templates/module/migration/module.go b/ignite/templates/module/migration/module.go new file mode 100644 index 0000000..7f46f8d --- /dev/null +++ b/ignite/templates/module/migration/module.go @@ -0,0 +1,344 @@ +package modulemigration + +import ( + "bytes" + "go/ast" + "go/format" + "go/parser" + "go/token" + "strconv" + + "github.com/gobuffalo/genny/v2" + + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/xast" +) + +func moduleModify(opts *Options) genny.RunFn { + return func(r *genny.Runner) error { + f, err := r.Disk.Find(opts.ModuleFile()) + if err != nil { + return err + } + + content, err := updateModule(f.String(), opts) + if err != nil { + return err + } + + return r.File(genny.NewFileS(opts.ModuleFile(), content)) + } +} + +func updateModule(content string, opts *Options) (string, error) { + currentVersion, err := ConsensusVersion(content) + if err != nil { + return "", err + } + if currentVersion != opts.FromVersion { + return "", errors.Errorf("expected module consensus version %d, got %d", opts.FromVersion, currentVersion) + } + + content, err = xast.AppendImports( + content, + xast.WithNamedImport(opts.MigrationImportAlias(), opts.MigrationImportPath()), + ) + if err != nil { + return "", err + } + + content, err = setConsensusVersion(content, opts.ToVersion) + if err != nil { + return "", err + } + + return addMigrationRegistration(content, opts) +} + +// ConsensusVersion returns the current module consensus version from module.go content. +func ConsensusVersion(content string) (uint64, error) { + fileSet := token.NewFileSet() + file, err := parser.ParseFile(fileSet, "", content, parser.ParseComments) + if err != nil { + return 0, err + } + + expr, err := consensusVersionExpr(file) + if err != nil { + return 0, err + } + + return parseConsensusVersionExpr(file, expr) +} + +func addMigrationRegistration(content string, opts *Options) (string, error) { + info, err := registerServicesInfoFromContent(content) + if err != nil { + return "", err + } + + var functionOptions []xast.FunctionOptions + + if info.needsConfiguratorSetup { + functionOptions = append(functionOptions, xast.AppendFuncCode(configuratorSetupCode(info))) + } + + functionOptions = append(functionOptions, xast.AppendFuncCode(migrationRegistrationCode(info, opts))) + + return xast.ModifyFunction(content, "RegisterServices", functionOptions...) +} + +func configuratorSetupCode(info registerServicesInfo) string { + returnStmt := "return" + if info.returnsError { + returnStmt = "return nil" + } + + return info.cfgVar + ", ok := " + info.parameterName + ".(module.Configurator)\n" + + "if !ok {\n\t" + returnStmt + "\n}" +} + +func migrationRegistrationCode(info registerServicesInfo, opts *Options) string { + handleErr := "panic(err)" + if info.returnsError { + handleErr = "return err" + } + + return "if err := " + info.cfgVar + + ".RegisterMigration(types.ModuleName, " + + strconv.FormatUint(opts.FromVersion, 10) + ", " + + opts.MigrationImportAlias() + "." + opts.MigrationFunc() + + "); err != nil {\n\t" + handleErr + "\n}" +} + +func setConsensusVersion(content string, version uint64) (string, error) { + fileSet := token.NewFileSet() + file, err := parser.ParseFile(fileSet, "", content, parser.ParseComments) + if err != nil { + return "", err + } + + commentMap := ast.NewCommentMap(fileSet, file, file.Comments) + + expr, err := consensusVersionExpr(file) + if err != nil { + return "", err + } + + switch versionExpr := expr.(type) { + case *ast.BasicLit: + versionExpr.Value = strconv.FormatUint(version, 10) + case *ast.Ident: + valueSpec, valueIndex, err := findValueSpec(file, versionExpr.Name) + if err != nil { + return "", err + } + valueSpec.Values[valueIndex] = &ast.BasicLit{ + Kind: token.INT, + Value: strconv.FormatUint(version, 10), + } + default: + return "", errors.Errorf("unsupported consensus version expression %T", expr) + } + + file.Comments = commentMap.Filter(file).Comments() + + return formatFile(fileSet, file) +} + +type registerServicesInfo struct { + cfgVar string + needsConfiguratorSetup bool + parameterName string + returnsError bool +} + +func registerServicesInfoFromContent(content string) (registerServicesInfo, error) { + fileSet := token.NewFileSet() + file, err := parser.ParseFile(fileSet, "", content, parser.ParseComments) + if err != nil { + return registerServicesInfo{}, err + } + + funcDecl := findFuncDecl(file, "RegisterServices") + if funcDecl == nil { + return registerServicesInfo{}, errors.New("function \"RegisterServices\" not found") + } + if funcDecl.Type.Params == nil || len(funcDecl.Type.Params.List) == 0 || len(funcDecl.Type.Params.List[0].Names) == 0 { + return registerServicesInfo{}, errors.New("RegisterServices must have a named parameter") + } + + param := funcDecl.Type.Params.List[0] + info := registerServicesInfo{ + parameterName: param.Names[0].Name, + returnsError: functionReturnsError(funcDecl), + } + + if isModuleConfiguratorType(param.Type) { + info.cfgVar = info.parameterName + return info, nil + } + + cfgVar := findConfiguratorVar(funcDecl, info.parameterName) + if cfgVar != "" { + info.cfgVar = cfgVar + return info, nil + } + + info.cfgVar = "cfg" + info.needsConfiguratorSetup = true + + return info, nil +} + +func functionReturnsError(funcDecl *ast.FuncDecl) bool { + if funcDecl.Type.Results == nil || len(funcDecl.Type.Results.List) != 1 { + return false + } + + ident, ok := funcDecl.Type.Results.List[0].Type.(*ast.Ident) + return ok && ident.Name == "error" +} + +func findConfiguratorVar(funcDecl *ast.FuncDecl, parameterName string) string { + for _, stmt := range funcDecl.Body.List { + assignStmt, ok := stmt.(*ast.AssignStmt) + if !ok || len(assignStmt.Lhs) < 1 || len(assignStmt.Rhs) != 1 { + continue + } + + typeAssert, ok := assignStmt.Rhs[0].(*ast.TypeAssertExpr) + if !ok || !isModuleConfiguratorType(typeAssert.Type) { + continue + } + + ident, ok := typeAssert.X.(*ast.Ident) + if !ok || ident.Name != parameterName { + continue + } + + cfgVar, ok := assignStmt.Lhs[0].(*ast.Ident) + if !ok { + continue + } + + return cfgVar.Name + } + + return "" +} + +func isModuleConfiguratorType(expr ast.Expr) bool { + switch typedExpr := expr.(type) { + case *ast.Ident: + return typedExpr.Name == "Configurator" + case *ast.SelectorExpr: + return typedExpr.Sel.Name == "Configurator" + default: + return false + } +} + +func consensusVersionExpr(file *ast.File) (ast.Expr, error) { + funcDecl := findFuncDecl(file, "ConsensusVersion") + if funcDecl == nil { + return nil, errors.New("function \"ConsensusVersion\" not found") + } + if funcDecl.Body == nil || len(funcDecl.Body.List) == 0 { + return nil, errors.New("ConsensusVersion has an empty body") + } + + lastStmt, ok := funcDecl.Body.List[len(funcDecl.Body.List)-1].(*ast.ReturnStmt) + if !ok || len(lastStmt.Results) != 1 { + return nil, errors.New("ConsensusVersion must return exactly one value") + } + + return lastStmt.Results[0], nil +} + +func parseConsensusVersionExpr(file *ast.File, expr ast.Expr) (uint64, error) { + switch typedExpr := expr.(type) { + case *ast.BasicLit: + return parseConsensusVersionLiteral(typedExpr) + case *ast.Ident: + valueSpec, valueIndex, err := findValueSpec(file, typedExpr.Name) + if err != nil { + return 0, err + } + return parseConsensusVersionExpr(file, valueSpec.Values[valueIndex]) + default: + return 0, errors.Errorf("unsupported consensus version expression %T", expr) + } +} + +func parseConsensusVersionLiteral(lit *ast.BasicLit) (uint64, error) { + if lit.Kind != token.INT { + return 0, errors.Errorf("unsupported consensus version literal kind %v", lit.Kind) + } + + version, err := strconv.ParseUint(lit.Value, 10, 64) + if err != nil { + return 0, err + } + + return version, nil +} + +func findValueSpec(file *ast.File, name string) (*ast.ValueSpec, int, error) { + for _, decl := range file.Decls { + genDecl, ok := decl.(*ast.GenDecl) + if !ok || (genDecl.Tok != token.CONST && genDecl.Tok != token.VAR) { + continue + } + + for _, spec := range genDecl.Specs { + valueSpec, ok := spec.(*ast.ValueSpec) + if !ok { + continue + } + + for i, specName := range valueSpec.Names { + if specName.Name != name { + continue + } + if len(valueSpec.Values) == 0 { + return nil, 0, errors.Errorf("%s has no value", name) + } + + valueIndex := i + if valueIndex >= len(valueSpec.Values) { + valueIndex = len(valueSpec.Values) - 1 + } + + return valueSpec, valueIndex, nil + } + } + } + + return nil, 0, errors.Errorf("%s value not found", name) +} + +func findFuncDecl(file *ast.File, name string) *ast.FuncDecl { + for _, decl := range file.Decls { + funcDecl, ok := decl.(*ast.FuncDecl) + if ok && funcDecl.Name.Name == name { + return funcDecl + } + } + + return nil +} + +func formatFile(fileSet *token.FileSet, file *ast.File) (string, error) { + var buf bytes.Buffer + if err := format.Node(&buf, fileSet, file); err != nil { + return "", err + } + + formatted, err := format.Source(buf.Bytes()) + if err != nil { + return "", err + } + + return string(formatted), nil +} diff --git a/ignite/templates/module/migration/module_test.go b/ignite/templates/module/migration/module_test.go new file mode 100644 index 0000000..2e01ccf --- /dev/null +++ b/ignite/templates/module/migration/module_test.go @@ -0,0 +1,131 @@ +package modulemigration + +import ( + "strings" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestUpdateModuleAddsInitialMigration(t *testing.T) { + opts := &Options{ + ModuleName: "blog", + ModulePath: "github.com/test/blog", + FromVersion: 1, + ToVersion: 2, + } + + got, err := updateModule(moduleWithServiceRegistrar(` +func (am AppModule) RegisterServices(registrar grpc.ServiceRegistrar) error { + types.RegisterMsgServer(registrar, keeper.NewMsgServerImpl(am.keeper)) + types.RegisterQueryServer(registrar, keeper.NewQueryServerImpl(am.keeper)) + + return nil +} + +func (AppModule) ConsensusVersion() uint64 { return 1 } +`), opts) + require.NoError(t, err) + + normalized := normalize(got) + require.Contains(t, normalized, `migrationv2"github.com/test/blog/x/blog/migrations/v2"`) + require.Contains(t, normalized, `cfg,ok:=registrar.(module.Configurator)`) + require.Contains(t, normalized, `if!ok{returnnil}`) + require.Contains(t, normalized, `cfg.RegisterMigration(types.ModuleName,1,migrationv2.Migrate)`) + require.Contains(t, normalized, `func(AppModule)ConsensusVersion()uint64{return2}`) +} + +func TestUpdateModuleAppendsMigrationToExistingConfiguratorBlock(t *testing.T) { + opts := &Options{ + ModuleName: "blog", + ModulePath: "github.com/test/blog", + FromVersion: 2, + ToVersion: 3, + } + + got, err := updateModule(moduleWithServiceRegistrar(` +func (am AppModule) RegisterServices(registrar grpc.ServiceRegistrar) error { + types.RegisterMsgServer(registrar, keeper.NewMsgServerImpl(am.keeper)) + types.RegisterQueryServer(registrar, keeper.NewQueryServerImpl(am.keeper)) + + cfg, ok := registrar.(module.Configurator) + if !ok { + return nil + } + + if err := cfg.RegisterMigration(types.ModuleName, 1, migrationv2.Migrate); err != nil { + return err + } + + return nil +} + +func (AppModule) ConsensusVersion() uint64 { return 2 } +`, `migrationv2 "github.com/test/blog/x/blog/migrations/v2"`), opts) + require.NoError(t, err) + + normalized := normalize(got) + require.Equal(t, 1, strings.Count(normalized, `registrar.(module.Configurator)`)) + require.Contains(t, normalized, `migrationv3"github.com/test/blog/x/blog/migrations/v3"`) + require.Contains(t, normalized, `cfg.RegisterMigration(types.ModuleName,1,migrationv2.Migrate)`) + require.Contains(t, normalized, `cfg.RegisterMigration(types.ModuleName,2,migrationv3.Migrate)`) + require.Contains(t, normalized, `func(AppModule)ConsensusVersion()uint64{return3}`) +} + +func TestUpdateModuleSupportsConfiguratorSignatureAndConstantVersion(t *testing.T) { + opts := &Options{ + ModuleName: "blog", + ModulePath: "github.com/test/blog", + FromVersion: 2, + ToVersion: 3, + } + + got, err := updateModule(moduleWithConfigurator(` +const ConsensusVersion = 2 + +func (am AppModule) RegisterServices(cfg module.Configurator) { + types.RegisterMsgServer(cfg.MsgServer(), keeper.NewMsgServerImpl(am.keeper)) + types.RegisterQueryServer(cfg.QueryServer(), keeper.NewQueryServerImpl(am.keeper)) +} + +func (AppModule) ConsensusVersion() uint64 { return ConsensusVersion } +`), opts) + require.NoError(t, err) + + normalized := normalize(got) + require.NotContains(t, normalized, `registrar.(module.Configurator)`) + require.Contains(t, normalized, `cfg.RegisterMigration(types.ModuleName,2,migrationv3.Migrate)`) + require.Contains(t, normalized, `iferr:=cfg.RegisterMigration(types.ModuleName,2,migrationv3.Migrate);err!=nil{panic(err)}`) + require.Contains(t, normalized, `constConsensusVersion=3`) + + version, err := ConsensusVersion(got) + require.NoError(t, err) + require.EqualValues(t, 3, version) +} + +func moduleWithServiceRegistrar(body string, extraImports ...string) string { + imports := []string{ + `"github.com/cosmos/cosmos-sdk/types/module"`, + `"google.golang.org/grpc"`, + `"github.com/test/blog/x/blog/keeper"`, + `"github.com/test/blog/x/blog/types"`, + } + imports = append(imports, extraImports...) + + return "package blog\n\nimport (\n\t" + strings.Join(imports, "\n\t") + "\n)\n\n" + body +} + +func moduleWithConfigurator(body string, extraImports ...string) string { + imports := []string{ + `"github.com/cosmos/cosmos-sdk/types/module"`, + `"github.com/test/blog/x/blog/keeper"`, + `"github.com/test/blog/x/blog/types"`, + } + imports = append(imports, extraImports...) + + return "package blog\n\nimport (\n\t" + strings.Join(imports, "\n\t") + "\n)\n\n" + body +} + +func normalize(content string) string { + return strings.Join(strings.Fields(content), "") +} diff --git a/ignite/templates/module/migration/options.go b/ignite/templates/module/migration/options.go new file mode 100644 index 0000000..b9ff30b --- /dev/null +++ b/ignite/templates/module/migration/options.go @@ -0,0 +1,49 @@ +package modulemigration + +import ( + "fmt" + "path/filepath" +) + +// Options represents the options to scaffold a module migration. +type Options struct { + ModuleName string + ModulePath string + FromVersion uint64 + ToVersion uint64 +} + +// ModuleFile returns the path to the module definition file. +func (opts Options) ModuleFile() string { + return filepath.Join("x", opts.ModuleName, "module", "module.go") +} + +// MigrationVersion returns the migration package name. +func (opts Options) MigrationVersion() string { + return fmt.Sprintf("v%d", opts.ToVersion) +} + +// MigrationDir returns the path to the migration folder. +func (opts Options) MigrationDir() string { + return filepath.Join("x", opts.ModuleName, "migrations", opts.MigrationVersion()) +} + +// MigrationFile returns the path to the migration source file. +func (opts Options) MigrationFile() string { + return filepath.Join(opts.MigrationDir(), "migrate.go") +} + +// MigrationFunc returns the migration handler function name. +func (opts Options) MigrationFunc() string { + return "Migrate" +} + +// MigrationImportAlias returns the import alias used by module.go. +func (opts Options) MigrationImportAlias() string { + return fmt.Sprintf("migrationv%d", opts.ToVersion) +} + +// MigrationImportPath returns the migration import path used by module.go. +func (opts Options) MigrationImportPath() string { + return fmt.Sprintf("%s/x/%s/migrations/%s", opts.ModulePath, opts.ModuleName, opts.MigrationVersion()) +} diff --git a/ignite/templates/module/migration/templates.go b/ignite/templates/module/migration/templates.go new file mode 100644 index 0000000..730f0fc --- /dev/null +++ b/ignite/templates/module/migration/templates.go @@ -0,0 +1,6 @@ +package modulemigration + +import "embed" + +//go:embed files/* files/**/* +var files embed.FS diff --git a/ignite/templates/module/module.go b/ignite/templates/module/module.go new file mode 100644 index 0000000..e066190 --- /dev/null +++ b/ignite/templates/module/module.go @@ -0,0 +1,39 @@ +package module + +import ( + "regexp" + "strings" + + "github.com/ignite/cli/v29/ignite/pkg/xstrings" +) + +// ProtoPackageName creates a protocol buffer package name for an app module. +func ProtoPackageName(appModulePath, moduleName, version string) string { + pathArray := strings.Split(appModulePath, "/") + path := []string{pathArray[len(pathArray)-1], moduleName, version} + + // Make sure that the first path element can be used as proto package name. + // This is required for app module names like "github.com/username/repo" where + // "username" might be not be compatible with proto buffer package names. + path[0] = xstrings.NoNumberPrefix(path[0]) + + return cleanProtoPackageName(strings.Join(path, ".")) +} + +// ProtoModulePackageName creates a protocol buffer module package name for an app module. +func ProtoModulePackageName(appModulePath, moduleName, version string) string { + pathArray := strings.Split(appModulePath, "/") + path := []string{pathArray[len(pathArray)-1], moduleName, "module", version} + + // Make sure that the first path element can be used as proto package name. + // This is required for app module names like "github.com/username/repo" where + // "username" might be not be compatible with proto buffer package names. + path[0] = xstrings.NoNumberPrefix(path[0]) + + return cleanProtoPackageName(strings.Join(path, ".")) +} + +func cleanProtoPackageName(name string) string { + r := regexp.MustCompile("[^a-zA-Z0-9_.]+") + return strings.ToLower(r.ReplaceAllString(name, "")) +} diff --git a/ignite/templates/module/module_test.go b/ignite/templates/module/module_test.go new file mode 100644 index 0000000..7dc4953 --- /dev/null +++ b/ignite/templates/module/module_test.go @@ -0,0 +1,111 @@ +package module + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestProtoPackageName(t *testing.T) { + cases := []struct { + name string + app string + module string + version string + want string + }{ + { + name: "name", + app: "ignite", + module: "test", + version: "v1", + want: "ignite.test.v1", + }, + { + name: "name", + app: "ignite", + module: "test", + version: "v2", + want: "ignite.test.v2", + }, + { + name: "path", + app: "ignite/cli", + module: "test", + version: "v1", + want: "cli.test.v1", + }, + { + name: "path with dash", + app: "ignite/c-li", + module: "test", + version: "v1", + want: "cli.test.v1", + }, + { + name: "path with number prefix", + app: "0ignite/cli", + module: "test", + version: "v1", + want: "cli.test.v1", + }, + { + name: "app with number prefix", + app: "ignite/0cli", + module: "test", + version: "v1", + want: "_0cli.test.v1", + }, + { + name: "path with number prefix and dash", + app: "0ignite/cli", + module: "test", + version: "v1", + want: "cli.test.v1", + }, + { + name: "module with dash", + app: "ignite", + module: "test-mod", + version: "v1", + want: "ignite.testmod.v1", + }, + } + + for _, tt := range cases { + t.Run(tt.name, func(t *testing.T) { + require.Equal(t, tt.want, ProtoPackageName(tt.app, tt.module, tt.version)) + }) + } +} + +func TestProtoModulePackageName(t *testing.T) { + cases := []struct { + name string + app string + module string + version string + want string + }{ + { + name: "name", + app: "ignite", + module: "test", + version: "v1", + want: "ignite.test.module.v1", + }, + { + name: "name", + app: "ignite", + module: "test", + version: "v2", + want: "ignite.test.module.v2", + }, + } + + for _, tt := range cases { + t.Run(tt.name, func(t *testing.T) { + require.Equal(t, tt.want, ProtoModulePackageName(tt.app, tt.module, tt.version)) + }) + } +} diff --git a/ignite/templates/query/files/x/{{moduleName}}/keeper/query_{{queryName}}.go.plush b/ignite/templates/query/files/x/{{moduleName}}/keeper/query_{{queryName}}.go.plush new file mode 100644 index 0000000..8cf3d29 --- /dev/null +++ b/ignite/templates/query/files/x/{{moduleName}}/keeper/query_{{queryName}}.go.plush @@ -0,0 +1,19 @@ +package keeper + +import ( + "context" + + "<%= ModulePath %>/x/<%= ModuleName %>/types" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" +) + +func (q queryServer) <%= QueryName.PascalCase %>(ctx context.Context, req *types.Query<%= QueryName.PascalCase %>Request) (*types.Query<%= QueryName.PascalCase %>Response, error) { + if req == nil { + return nil, status.Error(codes.InvalidArgument, "invalid request") + } + + // TODO: Process the query + + return &types.Query<%= QueryName.PascalCase %>Response{}, nil +} diff --git a/ignite/templates/query/options.go b/ignite/templates/query/options.go new file mode 100644 index 0000000..9ae6a22 --- /dev/null +++ b/ignite/templates/query/options.go @@ -0,0 +1,27 @@ +package query + +import ( + "path/filepath" + + "github.com/ignite/cli/v29/ignite/pkg/multiformatname" + "github.com/ignite/cli/v29/ignite/templates/field" +) + +// Options ... +type Options struct { + AppName string + ProtoDir string + ProtoVer string + ModuleName string + ModulePath string + QueryName multiformatname.Name + Description string + ResFields field.Fields + ReqFields field.Fields + Paginated bool +} + +// ProtoFile returns the path to the proto folder. +func (opts *Options) ProtoFile(fname string) string { + return filepath.Join(opts.ProtoDir, opts.AppName, opts.ModuleName, opts.ProtoVer, fname) +} diff --git a/ignite/templates/query/query.go b/ignite/templates/query/query.go new file mode 100644 index 0000000..b91d9bb --- /dev/null +++ b/ignite/templates/query/query.go @@ -0,0 +1,178 @@ +package query + +import ( + "embed" + "fmt" + "io/fs" + "path/filepath" + + "github.com/emicklei/proto" + "github.com/gobuffalo/genny/v2" + "github.com/gobuffalo/plush/v4" + + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/gomodulepath" + "github.com/ignite/cli/v29/ignite/pkg/protoanalysis/protoutil" + "github.com/ignite/cli/v29/ignite/pkg/xgenny" + "github.com/ignite/cli/v29/ignite/templates/field/plushhelpers" + "github.com/ignite/cli/v29/ignite/templates/typed" +) + +//go:embed files/* files/**/* +var files embed.FS + +func Box(box fs.FS, opts *Options, g *genny.Generator) error { + if err := g.OnlyFS(box, nil, nil); err != nil { + return err + } + ctx := plush.NewContext() + ctx.Set("ModuleName", opts.ModuleName) + ctx.Set("AppName", opts.AppName) + ctx.Set("ProtoVer", opts.ProtoVer) + ctx.Set("QueryName", opts.QueryName) + ctx.Set("Description", opts.Description) + ctx.Set("ModulePath", opts.ModulePath) + ctx.Set("ReqFields", opts.ReqFields) + ctx.Set("ResFields", opts.ResFields) + ctx.Set("Paginated", opts.Paginated) + + plushhelpers.ExtendPlushContext(ctx) + g.Transformer(xgenny.Transformer(ctx)) + g.Transformer(genny.Replace("{{protoDir}}", opts.ProtoDir)) + g.Transformer(genny.Replace("{{appName}}", opts.AppName)) + g.Transformer(genny.Replace("{{moduleName}}", opts.ModuleName)) + g.Transformer(genny.Replace("{{protoVer}}", opts.ProtoVer)) + g.Transformer(genny.Replace("{{queryName}}", opts.QueryName.Snake)) + return nil +} + +// NewGenerator returns the generator to scaffold a empty query in a module. +func NewGenerator(opts *Options) (*genny.Generator, error) { + subFs, err := fs.Sub(files, "files") + if err != nil { + return nil, errors.Errorf("fail to generate sub: %w", err) + } + + g := genny.New() + g.RunFn(protoQueryModify(opts)) + g.RunFn(cliQueryModify(opts)) + + return g, Box(subFs, opts, g) +} + +// Modifies query.proto to add the required RPCs and Messages. +// +// What it depends on: +// - Existence of a service with name "Query" since that is where the RPCs will be added. +func protoQueryModify(opts *Options) genny.RunFn { + return func(r *genny.Runner) error { + path := opts.ProtoFile("query.proto") + f, err := r.Disk.Find(path) + if err != nil { + return err + } + protoFile, err := protoutil.ParseProtoFile(f) + if err != nil { + return err + } + + // if the query has request fields, they are appended to the rpc query + var requestPath string + for _, field := range opts.ReqFields { + requestPath += "/" + requestPath = filepath.Join(requestPath, fmt.Sprintf("{%s}", field.ProtoFieldName())) + } + serviceQuery, err := protoutil.GetServiceByName(protoFile, "Query") + if err != nil { + return errors.Errorf("failed while looking up service 'Query' in %s: %w", path, err) + } + + typenamePascal, appModulePath := opts.QueryName.PascalCase, gomodulepath.ExtractAppPath(opts.ModulePath) + rpcSingle := protoutil.NewRPC( + typenamePascal, + fmt.Sprintf("Query%sRequest", typenamePascal), + fmt.Sprintf("Query%sResponse", typenamePascal), + protoutil.WithRPCOptions( + protoutil.NewOption( + "google.api.http", + fmt.Sprintf( + "/%s/%s/%s/%s%s", + appModulePath, opts.ModuleName, opts.ProtoVer, opts.QueryName.Snake, requestPath, + ), + protoutil.Custom(), + protoutil.SetField("get"), + ), + ), + ) + protoutil.AttachComment(rpcSingle, fmt.Sprintf("%[1]v Queries a list of %[1]v items.", typenamePascal)) + protoutil.Append(serviceQuery, rpcSingle) + + // Fields for request + paginationType, paginationName := "cosmos.base.query.v1beta1.Page", "pagination" + var reqFields []*proto.NormalField + for i, field := range opts.ReqFields { + reqFields = append(reqFields, field.ToProtoField(i+1)) + } + if opts.Paginated { + reqFields = append(reqFields, protoutil.NewField(paginationName, paginationType+"Request", len(opts.ReqFields)+1)) + } + requestMessage := protoutil.NewMessage("Query"+typenamePascal+"Request", protoutil.WithFields(reqFields...)) + + // Fields for response + var resFields []*proto.NormalField + for i, field := range opts.ResFields { + resFields = append(resFields, field.ToProtoField(i+1)) + } + if opts.Paginated { + resFields = append(resFields, protoutil.NewField(paginationName, paginationType+"Response", len(opts.ResFields)+1)) + } + responseMessage := protoutil.NewMessage("Query"+typenamePascal+"Response", protoutil.WithFields(resFields...)) + protoutil.Append(protoFile, requestMessage, responseMessage) + + // Ensure custom types are imported + var protoImports []*proto.Import + for _, imp := range append(opts.ResFields.ProtoImports(), opts.ReqFields.ProtoImports()...) { + protoImports = append(protoImports, protoutil.NewImport(imp)) + } + for _, f := range append(opts.ResFields.Custom(), opts.ReqFields.Custom()...) { + protoPath := fmt.Sprintf("%[1]v/%[2]v/%[3]v/%[4]v.proto", opts.AppName, opts.ModuleName, opts.ProtoVer, f) + protoImports = append(protoImports, protoutil.NewImport(protoPath)) + } + if err = protoutil.AddImports(protoFile, true, protoImports...); err != nil { + return errors.Errorf("failed to add imports to %s: %w", path, err) + } + + newFile := genny.NewFileS(path, protoutil.Print(protoFile)) + return r.File(newFile) + } +} + +func cliQueryModify(opts *Options) genny.RunFn { + return func(r *genny.Runner) error { + path := filepath.Join("x", opts.ModuleName, "module/autocli.go") + f, err := r.Disk.Find(path) + if err != nil { + return err + } + + option := fmt.Sprintf( + `{ + RpcMethod: "%[1]v", + Use: "%[2]v", + Short: "%[3]v", + PositionalArgs: []*autocliv1.PositionalArgDescriptor{%[4]s}, + }`, + opts.QueryName.PascalCase, + fmt.Sprintf("%s %s", opts.QueryName.Kebab, opts.ReqFields.CLIUsage()), + opts.Description, + opts.ReqFields.ProtoFieldNameAutoCLI(), + ) + content, err := typed.AppendAutoCLIQueryOptions(f.String(), option) + if err != nil { + return err + } + + newFile := genny.NewFileS(path, content) + return r.File(newFile) + } +} diff --git a/ignite/templates/typed/autocli.go b/ignite/templates/typed/autocli.go new file mode 100644 index 0000000..1a0f467 --- /dev/null +++ b/ignite/templates/typed/autocli.go @@ -0,0 +1,366 @@ +package typed + +import ( + "fmt" + "go/ast" + "go/format" + "go/parser" + "go/token" + "strconv" + "strings" + + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +const ( + autoCLIServiceQuery = "Query" + autoCLIServiceTx = "Tx" +) + +// AppendAutoCLIQueryOptions appends options to the Query RpcCommandOptions in AutoCLIOptions. +func AppendAutoCLIQueryOptions(content string, options ...string) (string, error) { + return appendAutoCLIOptions(content, autoCLIServiceQuery, options...) +} + +// AppendAutoCLITxOptions appends options to the Tx RpcCommandOptions in AutoCLIOptions. +func AppendAutoCLITxOptions(content string, options ...string) (string, error) { + return appendAutoCLIOptions(content, autoCLIServiceTx, options...) +} + +func appendAutoCLIOptions(content, service string, options ...string) (string, error) { + fileSet := token.NewFileSet() + file, err := parser.ParseFile(fileSet, "", content, parser.ParseComments) + if err != nil { + return "", err + } + + autoCLIOptionsFunc := findFunctionByName(file, "AutoCLIOptions") + if autoCLIOptionsFunc == nil { + return "", errors.New(`function "AutoCLIOptions" not found`) + } + + moduleOptionsLit, err := findModuleOptionsLiteral(autoCLIOptionsFunc) + if err != nil { + return "", err + } + + serviceDescriptorField, found := findCompositeField(moduleOptionsLit, service) + if !found { + return "", errors.Errorf("field %q not found in ModuleOptions", service) + } + serviceDescriptorLit, found := resolveCompositeLiteral(serviceDescriptorField.Value) + if !found { + return "", errors.Errorf("field %q is not a composite literal in ModuleOptions", service) + } + + rpcCommandOptionsField, found := findCompositeField(serviceDescriptorLit, "RpcCommandOptions") + if !found { + return "", errors.Errorf(`field "RpcCommandOptions" not found in %q service descriptor`, service) + } + rpcCommandOptionsLit, found := resolveCompositeLiteral(rpcCommandOptionsField.Value) + if !found { + return "", errors.Errorf(`field "RpcCommandOptions" in %q service descriptor is not a composite literal`, service) + } + + existingRPCMethods := map[string]struct{}{} + for _, elt := range rpcCommandOptionsLit.Elts { + method, ok := rpcMethod(elt) + if !ok { + continue + } + existingRPCMethods[method] = struct{}{} + } + + optionsToInsert := make([]string, 0, len(options)) + for _, option := range options { + optionExpr, optionText, parseErr := parseRPCOption(option) + if parseErr != nil { + return "", parseErr + } + if optionExpr == nil { + continue + } + + method, ok := rpcMethod(optionExpr) + if ok { + if _, exists := existingRPCMethods[method]; exists { + continue + } + existingRPCMethods[method] = struct{}{} + } + + optionsToInsert = append(optionsToInsert, optionText) + } + + if len(optionsToInsert) == 0 { + return content, nil + } + + content, err = insertAutoCLIOptions(content, fileSet, rpcCommandOptionsLit, optionsToInsert) + if err != nil { + return "", err + } + + formatted, err := format.Source([]byte(content)) + if err != nil { + return "", err + } + + return string(formatted), nil +} + +func parseRPCOption(option string) (ast.Expr, string, error) { + option = normalizeOption(option) + if option == "" { + return nil, "", nil + } + + code := fmt.Sprintf("package p\nvar _ = []*autocliv1.RpcCommandOptions{\n%s,\n}\n", option) + file, err := parser.ParseFile(token.NewFileSet(), "", code, 0) + if err != nil { + return nil, "", errors.Errorf("failed to parse autocli option expression: %w", err) + } + + genDecl, ok := file.Decls[0].(*ast.GenDecl) + if !ok || len(genDecl.Specs) == 0 { + return nil, "", errors.New("failed to parse autocli option expression: generated declaration is invalid") + } + valueSpec, ok := genDecl.Specs[0].(*ast.ValueSpec) + if !ok || len(valueSpec.Values) == 0 { + return nil, "", errors.New("failed to parse autocli option expression: generated value spec is invalid") + } + optionsLit, ok := valueSpec.Values[0].(*ast.CompositeLit) + if !ok || len(optionsLit.Elts) == 0 { + return nil, "", errors.New("failed to parse autocli option expression: generated options literal is invalid") + } + + return optionsLit.Elts[0], option, nil +} + +func normalizeOption(option string) string { + option = strings.TrimSpace(option) + option = strings.TrimSuffix(option, ",") + return strings.TrimSpace(option) +} + +func insertAutoCLIOptions( + content string, + fileSet *token.FileSet, + optionsLiteral *ast.CompositeLit, + optionsToInsert []string, +) (string, error) { + file := fileSet.File(optionsLiteral.Rbrace) + if file == nil { + return "", errors.New(`failed to find token file for "RpcCommandOptions"`) + } + + insertOffset := file.Offset(optionsLiteral.Rbrace) + if insertOffset < 0 || insertOffset > len(content) { + return "", errors.New(`invalid insertion offset for "RpcCommandOptions"`) + } + + closingIndentOffset := insertOffset + for closingIndentOffset > 0 { + char := content[closingIndentOffset-1] + if char != '\t' && char != ' ' { + break + } + closingIndentOffset-- + } + + closingIndent := content[closingIndentOffset:insertOffset] + optionIndent := closingIndent + "\t" + + var insertion strings.Builder + for _, option := range optionsToInsert { + insertion.WriteString(indentOption(option, optionIndent)) + insertion.WriteString(",\n") + } + + return content[:closingIndentOffset] + insertion.String() + content[closingIndentOffset:], nil +} + +func indentOption(option, baseIndent string) string { + lines := strings.Split(option, "\n") + lines = trimEmptyLines(lines) + minIndent := minIndentation(lines) + + indented := make([]string, 0, len(lines)) + for _, line := range lines { + if strings.TrimSpace(line) == "" { + continue + } + trimmedLine := strings.TrimRight(line, " \t") + indented = append(indented, baseIndent+removeIndent(trimmedLine, minIndent)) + } + + return strings.Join(indented, "\n") +} + +func trimEmptyLines(lines []string) []string { + start := 0 + for start < len(lines) && strings.TrimSpace(lines[start]) == "" { + start++ + } + + end := len(lines) + for end > start && strings.TrimSpace(lines[end-1]) == "" { + end-- + } + + return lines[start:end] +} + +func minIndentation(lines []string) int { + minIndent := -1 + for _, line := range lines { + if strings.TrimSpace(line) == "" { + continue + } + + indent := 0 + for indent < len(line) { + if line[indent] != ' ' && line[indent] != '\t' { + break + } + indent++ + } + + if minIndent == -1 || indent < minIndent { + minIndent = indent + } + } + + if minIndent < 0 { + return 0 + } + + return minIndent +} + +func removeIndent(line string, indent int) string { + if indent <= 0 { + return line + } + + i := 0 + for i < len(line) && i < indent { + if line[i] != ' ' && line[i] != '\t' { + break + } + i++ + } + + return line[i:] +} + +func findModuleOptionsLiteral(autoCLIOptionsFunc *ast.FuncDecl) (*ast.CompositeLit, error) { + if autoCLIOptionsFunc.Body == nil { + return nil, errors.New(`function "AutoCLIOptions" has no body`) + } + + for _, stmt := range autoCLIOptionsFunc.Body.List { + returnStmt, ok := stmt.(*ast.ReturnStmt) + if !ok || len(returnStmt.Results) != 1 { + continue + } + + moduleOptionsLit, found := resolveCompositeLiteral(returnStmt.Results[0]) + if !found { + continue + } + + if !isModuleOptionsLiteral(moduleOptionsLit) { + continue + } + + return moduleOptionsLit, nil + } + + return nil, errors.New(`return statement with "autocliv1.ModuleOptions" literal not found in "AutoCLIOptions"`) +} + +func isModuleOptionsLiteral(moduleOptionsLit *ast.CompositeLit) bool { + selector, ok := moduleOptionsLit.Type.(*ast.SelectorExpr) + if !ok { + return false + } + + pkgIdent, ok := selector.X.(*ast.Ident) + if !ok { + return false + } + + return pkgIdent.Name == "autocliv1" && selector.Sel.Name == "ModuleOptions" +} + +func resolveCompositeLiteral(expr ast.Expr) (*ast.CompositeLit, bool) { + switch typedExpr := expr.(type) { + case *ast.CompositeLit: + return typedExpr, true + case *ast.UnaryExpr: + if typedExpr.Op == token.AND { + return resolveCompositeLiteral(typedExpr.X) + } + case *ast.ParenExpr: + return resolveCompositeLiteral(typedExpr.X) + } + + return nil, false +} + +func findCompositeField(compLit *ast.CompositeLit, fieldName string) (*ast.KeyValueExpr, bool) { + for _, elt := range compLit.Elts { + keyValue, ok := elt.(*ast.KeyValueExpr) + if !ok { + continue + } + + keyIdent, ok := keyValue.Key.(*ast.Ident) + if !ok || keyIdent.Name != fieldName { + continue + } + + return keyValue, true + } + + return nil, false +} + +func rpcMethod(expr ast.Expr) (string, bool) { + commandLit, found := resolveCompositeLiteral(expr) + if !found { + return "", false + } + + rpcMethodField, found := findCompositeField(commandLit, "RpcMethod") + if !found { + return "", false + } + + rpcMethodValue, ok := rpcMethodField.Value.(*ast.BasicLit) + if !ok || rpcMethodValue.Kind != token.STRING { + return "", false + } + + method, err := strconv.Unquote(rpcMethodValue.Value) + if err != nil { + return "", false + } + + return method, true +} + +func findFunctionByName(file *ast.File, funcName string) *ast.FuncDecl { + for _, decl := range file.Decls { + funcDecl, ok := decl.(*ast.FuncDecl) + if !ok { + continue + } + if funcDecl.Name.Name == funcName { + return funcDecl + } + } + + return nil +} diff --git a/ignite/templates/typed/autocli_test.go b/ignite/templates/typed/autocli_test.go new file mode 100644 index 0000000..609ad74 --- /dev/null +++ b/ignite/templates/typed/autocli_test.go @@ -0,0 +1,164 @@ +package typed + +import ( + "strings" + "testing" + + "github.com/stretchr/testify/require" +) + +const autoCLITestContent = `package foo + +import ( + autocliv1 "cosmossdk.io/api/cosmos/autocli/v1" +) + +func (am AppModule) AutoCLIOptions() *autocliv1.ModuleOptions { + return &autocliv1.ModuleOptions{ + Query: &autocliv1.ServiceCommandDescriptor{ + RpcCommandOptions: []*autocliv1.RpcCommandOptions{ + { + RpcMethod: "Params", + }, + }, + }, + Tx: &autocliv1.ServiceCommandDescriptor{ + RpcCommandOptions: []*autocliv1.RpcCommandOptions{ + { + RpcMethod: "UpdateParams", + }, + }, + }, + } +} +` + +func TestAppendAutoCLIQueryOptionsSkipsDuplicates(t *testing.T) { + option := `{ + RpcMethod: "ListBook", + Use: "list-book", + Short: "List all books", + }` + + content, err := AppendAutoCLIQueryOptions(autoCLITestContent, option, option) + require.NoError(t, err) + require.Equal(t, 1, strings.Count(content, `RpcMethod: "ListBook"`)) + require.Equal(t, 1, strings.Count(content, `RpcMethod: "Params"`)) +} + +func TestAppendAutoCLIQueryOptionsIsIdempotent(t *testing.T) { + options := []string{ + `{ + RpcMethod: "ListBook", + Use: "list-book", + Short: "List all books", + }`, + `{ + RpcMethod: "GetBook", + Use: "get-book [id]", + Short: "Gets a book", + }`, + } + + content, err := AppendAutoCLIQueryOptions(autoCLITestContent, options...) + require.NoError(t, err) + + content, err = AppendAutoCLIQueryOptions(content, options...) + require.NoError(t, err) + + require.Equal(t, 1, strings.Count(content, `RpcMethod: "ListBook"`)) + require.Equal(t, 1, strings.Count(content, `RpcMethod: "GetBook"`)) + require.Equal(t, 1, strings.Count(content, `RpcMethod: "Params"`)) + require.Equal(t, 1, strings.Count(content, `RpcMethod: "UpdateParams"`)) + require.Greater(t, strings.Index(content, `RpcMethod: "ListBook"`), strings.Index(content, `RpcMethod: "Params"`)) + require.Greater(t, strings.Index(content, `RpcMethod: "GetBook"`), strings.Index(content, `RpcMethod: "ListBook"`)) +} + +func TestAppendAutoCLITxOptionsSkipsDuplicates(t *testing.T) { + option := `{ + RpcMethod: "CreateBook", + Use: "create-book [title]", + Short: "Create a new book", + }` + + content, err := AppendAutoCLITxOptions(autoCLITestContent, option, option) + require.NoError(t, err) + require.Equal(t, 1, strings.Count(content, `RpcMethod: "CreateBook"`)) + require.Equal(t, 1, strings.Count(content, `RpcMethod: "UpdateParams"`)) +} + +func TestAppendAutoCLITxOptionsSkipsExistingMethods(t *testing.T) { + options := []string{ + `{ + RpcMethod: "UpdateParams", + Use: "update-params", + Short: "should be skipped", + }`, + `{ + RpcMethod: "CreateBook", + Use: "create-book [title]", + Short: "Create a new book", + }`, + } + + content, err := AppendAutoCLITxOptions(autoCLITestContent, options...) + require.NoError(t, err) + require.Equal(t, 1, strings.Count(content, `RpcMethod: "UpdateParams"`)) + require.Equal(t, 1, strings.Count(content, `RpcMethod: "CreateBook"`)) + require.Equal(t, 1, strings.Count(content, `RpcMethod: "Params"`)) +} + +func TestAppendAutoCLIQueryOptionsErrors(t *testing.T) { + t.Run("missing AutoCLIOptions", func(t *testing.T) { + _, err := AppendAutoCLIQueryOptions(`package foo`, `{RpcMethod:"ListBook"}`) + require.Error(t, err) + require.Contains(t, err.Error(), `function "AutoCLIOptions" not found`) + }) + + t.Run("missing Query field", func(t *testing.T) { + content := strings.Replace(autoCLITestContent, "Query:", "MissingQuery:", 1) + _, err := AppendAutoCLIQueryOptions(content, `{RpcMethod:"ListBook"}`) + require.Error(t, err) + require.Contains(t, err.Error(), `field "Query" not found in ModuleOptions`) + }) + + t.Run("missing RpcCommandOptions field", func(t *testing.T) { + content := strings.Replace(autoCLITestContent, "RpcCommandOptions:", "MissingRpcOptions:", 1) + _, err := AppendAutoCLIQueryOptions(content, `{RpcMethod:"ListBook"}`) + require.Error(t, err) + require.Contains(t, err.Error(), `field "RpcCommandOptions" not found in "Query" service descriptor`) + }) + + t.Run("invalid option expression", func(t *testing.T) { + _, err := AppendAutoCLIQueryOptions(autoCLITestContent, `invalid(`) + require.Error(t, err) + require.Contains(t, err.Error(), "failed to parse autocli option expression") + }) +} + +func TestAppendAutoCLIQueryOptionsFormatting(t *testing.T) { + options := []string{ + `{ + RpcMethod: "ListBook", + Use: "list-book", + Short: "List all books", + }`, + `{ + RpcMethod: "GetBook", + Use: "get-book [id]", + Short: "Gets a book", + Alias: []string{"show-book"}, + PositionalArgs: []*autocliv1.PositionalArgDescriptor{{ProtoField: "id"}}, + }`, + } + + content, err := AppendAutoCLIQueryOptions(autoCLITestContent, options...) + require.NoError(t, err) + require.NotContains(t, content, "&autocliv1.RpcCommandOptions") + require.NotContains(t, content, "}, {") + require.Contains(t, content, "RpcCommandOptions: []*autocliv1.RpcCommandOptions{") + + normalized := strings.NewReplacer(" ", "", "\t", "", "\n", "").Replace(content) + require.Contains(t, normalized, "RpcMethod:\"ListBook\",") + require.Contains(t, normalized, "RpcMethod:\"GetBook\",") +} diff --git a/ignite/templates/typed/dry/dry.go b/ignite/templates/typed/dry/dry.go new file mode 100644 index 0000000..5b9ed60 --- /dev/null +++ b/ignite/templates/typed/dry/dry.go @@ -0,0 +1,24 @@ +package dry + +import ( + "embed" + "io/fs" + + "github.com/gobuffalo/genny/v2" + + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/templates/typed" +) + +//go:embed files/component/* files/component/**/* +var fsComponent embed.FS + +// NewGenerator returns the generator to scaffold a basic type in module. +func NewGenerator(opts *typed.Options) (*genny.Generator, error) { + subFs, err := fs.Sub(fsComponent, "files/component") + if err != nil { + return nil, errors.Errorf("fail to generate sub: %w", err) + } + g := genny.New() + return g, typed.Box(subFs, opts, g) +} diff --git a/ignite/templates/typed/dry/files/component/{{protoDir}}/{{appName}}/{{moduleName}}/{{protoVer}}/{{typeName}}.proto.plush b/ignite/templates/typed/dry/files/component/{{protoDir}}/{{appName}}/{{moduleName}}/{{protoVer}}/{{typeName}}.proto.plush new file mode 100644 index 0000000..2aa98a0 --- /dev/null +++ b/ignite/templates/typed/dry/files/component/{{protoDir}}/{{appName}}/{{moduleName}}/{{protoVer}}/{{typeName}}.proto.plush @@ -0,0 +1,12 @@ +syntax = "proto3"; +package <%= protoPkgName %>; + +option go_package = "<%= ModulePath %>/x/<%= ModuleName %>/types";<%= for (importName) in mergeCustomImports(Fields) { %> +import "<%= AppName %>/<%= ModuleName %>/<%= ProtoVer %>/<%= importName %>.proto"; <% } %><%= for (importName) in mergeProtoImports(Fields) { %> +import "<%= importName %>"; <% } %> + +// <%= TypeName.PascalCase %> defines the <%= TypeName.UpperCamel %> message. +message <%= TypeName.PascalCase %> { + <%= for (i, field) in Fields { %> + <%= field.ProtoType(i+1) %>; <% } %> +} diff --git a/ignite/templates/typed/genesis.go b/ignite/templates/typed/genesis.go new file mode 100644 index 0000000..c1f4d2b --- /dev/null +++ b/ignite/templates/typed/genesis.go @@ -0,0 +1,4 @@ +package typed + +// ProtoGenesisStateMessage is the name of the proto message that represents the genesis state. +const ProtoGenesisStateMessage = "GenesisState" diff --git a/ignite/templates/typed/list/files/component/x/{{moduleName}}/keeper/query_{{typeName}}.go.plush b/ignite/templates/typed/list/files/component/x/{{moduleName}}/keeper/query_{{typeName}}.go.plush new file mode 100644 index 0000000..05e0037 --- /dev/null +++ b/ignite/templates/typed/list/files/component/x/{{moduleName}}/keeper/query_{{typeName}}.go.plush @@ -0,0 +1,50 @@ +package keeper + +import ( + "context" + + "cosmossdk.io/collections" + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + "github.com/cosmos/cosmos-sdk/types/query" + "<%= ModulePath %>/x/<%= ModuleName %>/types" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" +) + +func (q queryServer) List<%= TypeName.PascalCase %>(ctx context.Context, req *types.QueryAll<%= TypeName.PascalCase %>Request) (*types.QueryAll<%= TypeName.PascalCase %>Response, error) { + if req == nil { + return nil, status.Error(codes.InvalidArgument, "invalid request") + } + + <%= TypeName.LowerCamel %>s, pageRes, err := query.CollectionPaginate( + ctx, + q.k.<%= TypeName.UpperCamel %>, + req.Pagination, + func(_ uint64, value types.<%= TypeName.PascalCase %>) (types.<%= TypeName.PascalCase %>, error){ + return value, nil + }, + ) + + if err != nil { + return nil, status.Error(codes.Internal, err.Error()) + } + + return &types.QueryAll<%= TypeName.PascalCase %>Response{<%= TypeName.UpperCamel %>: <%= TypeName.LowerCamel %>s, Pagination: pageRes}, nil +} + +func (q queryServer) Get<%= TypeName.PascalCase %>(ctx context.Context, req *types.QueryGet<%= TypeName.PascalCase %>Request) (*types.QueryGet<%= TypeName.PascalCase %>Response, error) { + if req == nil { + return nil, status.Error(codes.InvalidArgument, "invalid request") + } + + <%= TypeName.LowerCamel %>, err := q.k.<%= TypeName.UpperCamel %>.Get(ctx, req.Id) + if err != nil { + if errors.Is(err, collections.ErrNotFound) { + return nil, sdkerrors.ErrKeyNotFound + } + + return nil, status.Error(codes.Internal, "internal error") + } + + return &types.QueryGet<%= TypeName.PascalCase %>Response{<%= TypeName.UpperCamel %>: <%= TypeName.LowerCamel %>}, nil +} diff --git a/ignite/templates/typed/list/files/component/x/{{moduleName}}/keeper/query_{{typeName}}_test.go.plush b/ignite/templates/typed/list/files/component/x/{{moduleName}}/keeper/query_{{typeName}}_test.go.plush new file mode 100644 index 0000000..6193483 --- /dev/null +++ b/ignite/templates/typed/list/files/component/x/{{moduleName}}/keeper/query_{{typeName}}_test.go.plush @@ -0,0 +1,119 @@ +package keeper_test + +import ( + "context" + "testing" + + <%= for (goImport) in mergeGoImports(Fields) { %> + <%= goImport.Alias %> "<%= goImport.Name %>"<% } %> + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + "github.com/cosmos/cosmos-sdk/types/query" + "github.com/stretchr/testify/require" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" + + "<%= ModulePath %>/x/<%= ModuleName %>/keeper" + "<%= ModulePath %>/x/<%= ModuleName %>/types" +) + +func createN<%= TypeName.PascalCase %>(keeper keeper.Keeper, ctx context.Context, n int) []types.<%= TypeName.PascalCase %> { + items := make([]types.<%= TypeName.PascalCase %>, n) + for i := range items { + iu := uint64(i) + items[i].Id = iu<%= for (field) in Fields { %> + items[i].<%= field.Name.UpperCamel %> = <%= field.ValueLoop() %><% } %> + _ = keeper.<%= TypeName.UpperCamel%>.Set(ctx, iu, items[i]) + _ = keeper.<%= TypeName.UpperCamel%>Seq.Set(ctx, iu) + } + return items +} + +func Test<%= TypeName.PascalCase %>QuerySingle(t *testing.T) { + f := initFixture(t) + qs := keeper.NewQueryServerImpl(f.keeper) + msgs := createN<%= TypeName.PascalCase %>(f.keeper, f.ctx, 2) + tests := []struct { + desc string + request *types.QueryGet<%= TypeName.PascalCase %>Request + response *types.QueryGet<%= TypeName.PascalCase %>Response + err error + }{ + { + desc: "First", + request: &types.QueryGet<%= TypeName.PascalCase %>Request{Id: msgs[0].Id}, + response: &types.QueryGet<%= TypeName.PascalCase %>Response{<%= TypeName.UpperCamel %>: msgs[0]}, + }, + { + desc: "Second", + request: &types.QueryGet<%= TypeName.PascalCase %>Request{Id: msgs[1].Id}, + response: &types.QueryGet<%= TypeName.PascalCase %>Response{<%= TypeName.UpperCamel %>: msgs[1]}, + }, + { + desc: "KeyNotFound", + request: &types.QueryGet<%= TypeName.PascalCase %>Request{Id: uint64(len(msgs))}, + err: sdkerrors.ErrKeyNotFound, + }, + { + desc: "InvalidRequest", + err: status.Error(codes.InvalidArgument, "invalid request"), + }, + } + for _, tc := range tests { + t.Run(tc.desc, func(t *testing.T) { + response, err := qs.Get<%= TypeName.PascalCase %>(f.ctx, tc.request) + if tc.err != nil { + require.ErrorIs(t, err, tc.err) + } else { + require.NoError(t, err) + require.EqualExportedValues(t, tc.response, response) + } + }) + } +} + +func Test<%= TypeName.PascalCase %>QueryPaginated(t *testing.T) { + f := initFixture(t) + qs := keeper.NewQueryServerImpl(f.keeper) + msgs := createN<%= TypeName.PascalCase %>(f.keeper, f.ctx, 5) + + request := func(next []byte, offset, limit uint64, total bool) *types.QueryAll<%= TypeName.PascalCase %>Request { + return &types.QueryAll<%= TypeName.PascalCase %>Request{ + Pagination: &query.PageRequest{ + Key: next, + Offset: offset, + Limit: limit, + CountTotal: total, + }, + } + } + t.Run("ByOffset", func(t *testing.T) { + step := 2 + for i := 0; i < len(msgs); i += step { + resp, err := qs.List<%= TypeName.PascalCase %>(f.ctx, request(nil, uint64(i), uint64(step), false)) + require.NoError(t, err) + require.LessOrEqual(t, len(resp.<%= TypeName.UpperCamel %>), step) + require.Subset(t, msgs, resp.<%= TypeName.UpperCamel %>) + } + }) + t.Run("ByKey", func(t *testing.T) { + step := 2 + var next []byte + for i := 0; i < len(msgs); i += step { + resp, err := qs.List<%= TypeName.PascalCase %>(f.ctx, request(next, 0, uint64(step), false)) + require.NoError(t, err) + require.LessOrEqual(t, len(resp.<%= TypeName.UpperCamel %>), step) + require.Subset(t, msgs, resp.<%= TypeName.UpperCamel %>) + next = resp.Pagination.NextKey + } + }) + t.Run("Total", func(t *testing.T) { + resp, err := qs.List<%= TypeName.PascalCase %>(f.ctx, request(nil, 0, 0, true)) + require.NoError(t, err) + require.Equal(t, len(msgs), int(resp.Pagination.Total)) + require.EqualExportedValues(t, msgs, resp.<%= TypeName.UpperCamel %>) + }) + t.Run("InvalidRequest", func(t *testing.T) { + _, err := qs.List<%= TypeName.PascalCase %>(f.ctx, nil) + require.ErrorIs(t, err, status.Error(codes.InvalidArgument, "invalid request")) + }) +} diff --git a/ignite/templates/typed/list/files/component/{{protoDir}}/{{appName}}/{{moduleName}}/{{protoVer}}/{{typeName}}.proto.plush b/ignite/templates/typed/list/files/component/{{protoDir}}/{{appName}}/{{moduleName}}/{{protoVer}}/{{typeName}}.proto.plush new file mode 100644 index 0000000..0070b65 --- /dev/null +++ b/ignite/templates/typed/list/files/component/{{protoDir}}/{{appName}}/{{moduleName}}/{{protoVer}}/{{typeName}}.proto.plush @@ -0,0 +1,13 @@ +syntax = "proto3"; +package <%= protoPkgName %>; + +option go_package = "<%= ModulePath %>/x/<%= ModuleName %>/types";<%= for (importName) in mergeCustomImports(Fields) { %> +import "<%= AppName %>/<%= ModuleName %>/<%= ProtoVer %>/<%= importName %>.proto"; <% } %><%= for (importName) in mergeProtoImports(Fields) { %> +import "<%= importName %>"; <% } %> + +// <%= TypeName.PascalCase %> defines the <%= TypeName.PascalCase %> message. +message <%= TypeName.PascalCase %> { + uint64 id = 1;<%= for (i, field) in Fields { %> + <%= field.ProtoType(i+2) %>; <% } %> + <%= if (!NoMessage) { %>string <%= MsgSigner.Snake %> = <%= len(Fields)+2 %>;<% } %> +} diff --git a/ignite/templates/typed/list/files/messages/x/{{moduleName}}/keeper/msg_server_{{typeName}}.go.plush b/ignite/templates/typed/list/files/messages/x/{{moduleName}}/keeper/msg_server_{{typeName}}.go.plush new file mode 100644 index 0000000..aa0f90a --- /dev/null +++ b/ignite/templates/typed/list/files/messages/x/{{moduleName}}/keeper/msg_server_{{typeName}}.go.plush @@ -0,0 +1,100 @@ +package keeper + +import ( + "fmt" + "context" + + "<%= ModulePath %>/x/<%= ModuleName %>/types" + errorsmod "cosmossdk.io/errors" + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" +) + + +func (k msgServer) Create<%= TypeName.PascalCase %>(ctx context.Context, msg *types.MsgCreate<%= TypeName.PascalCase %>) (*types.MsgCreate<%= TypeName.PascalCase %>Response, error) { + if _, err := k.addressCodec.StringToBytes(msg.<%= MsgSigner.UpperCamel %>); err != nil { + return nil, errorsmod.Wrap(sdkerrors.ErrInvalidAddress, fmt.Sprintf("invalid address: %s", err)) + } + + nextId, err := k.<%= TypeName.UpperCamel %>Seq.Next(ctx) + if err != nil { + return nil, errorsmod.Wrap(sdkerrors.ErrInvalidRequest, "failed to get next id") + } + + var <%= TypeName.LowerCamel %> = types.<%= TypeName.PascalCase %>{ + Id: nextId, + <%= MsgSigner.UpperCamel %>: msg.<%= MsgSigner.UpperCamel %>,<%= for (field) in Fields { %> + <%= field.Name.UpperCamel %>: msg.<%= field.Name.UpperCamel %>,<% } %> + } + + if err = k.<%= TypeName.UpperCamel %>.Set( + ctx, + nextId, + <%= TypeName.LowerCamel %>, + ); err != nil { + return nil, errorsmod.Wrap(sdkerrors.ErrLogic, "failed to set <%= TypeName.LowerCamel %>") + } + + return &types.MsgCreate<%= TypeName.PascalCase %>Response{ + Id: nextId, + }, nil +} + +func (k msgServer) Update<%= TypeName.PascalCase %>(ctx context.Context, msg *types.MsgUpdate<%= TypeName.PascalCase %>) (*types.MsgUpdate<%= TypeName.PascalCase %>Response, error) { + if _, err := k.addressCodec.StringToBytes(msg.<%= MsgSigner.UpperCamel %>); err != nil { + return nil, errorsmod.Wrap(sdkerrors.ErrInvalidAddress, fmt.Sprintf("invalid address: %s", err)) + } + + var <%= TypeName.LowerCamel %> = types.<%= TypeName.PascalCase %>{ + <%= MsgSigner.UpperCamel %>: msg.<%= MsgSigner.UpperCamel %>, + Id: msg.Id,<%= for (field) in Fields { %> + <%= field.Name.UpperCamel %>: msg.<%= field.Name.UpperCamel %>,<% } %> + } + + // Checks that the element exists + val, err := k.<%= TypeName.UpperCamel %>.Get(ctx, msg.Id) + if err != nil { + if errors.Is(err, collections.ErrNotFound) { + return nil, errorsmod.Wrap(sdkerrors.ErrKeyNotFound, fmt.Sprintf("key %d doesn't exist", msg.Id)) + } + + return nil, errorsmod.Wrap(sdkerrors.ErrLogic, "failed to get <%= TypeName.LowerCamel %>") + } + + // Checks if the msg <%= MsgSigner.LowerCamel %> is the same as the current owner + if msg.<%= MsgSigner.UpperCamel %> != val.<%= MsgSigner.UpperCamel %> { + return nil, errorsmod.Wrap(sdkerrors.ErrUnauthorized, "incorrect owner") + } + + if err := k.<%= TypeName.UpperCamel %>.Set(ctx, msg.Id, <%= TypeName.LowerCamel %>); err != nil { + return nil, errorsmod.Wrap(sdkerrors.ErrLogic, "failed to update <%= TypeName.LowerCamel %>") + } + + return &types.MsgUpdate<%= TypeName.PascalCase %>Response{}, nil +} + +func (k msgServer) Delete<%= TypeName.PascalCase %>(ctx context.Context, msg *types.MsgDelete<%= TypeName.PascalCase %>) (*types.MsgDelete<%= TypeName.PascalCase %>Response, error) { + if _, err := k.addressCodec.StringToBytes(msg.<%= MsgSigner.UpperCamel %>); err != nil { + return nil, errorsmod.Wrap(sdkerrors.ErrInvalidAddress, fmt.Sprintf("invalid address: %s", err)) + } + + // Checks that the element exists + val, err := k.<%= TypeName.UpperCamel %>.Get(ctx, msg.Id) + if err != nil { + if errors.Is(err, collections.ErrNotFound) { + return nil, errorsmod.Wrap(sdkerrors.ErrKeyNotFound, fmt.Sprintf("key %d doesn't exist", msg.Id)) + } + + return nil, errorsmod.Wrap(sdkerrors.ErrLogic, "failed to get <%= TypeName.LowerCamel %>") + } + + // Checks if the msg <%= MsgSigner.LowerCamel %> is the same as the current owner + if msg.<%= MsgSigner.UpperCamel %> != val.<%= MsgSigner.UpperCamel %> { + return nil, errorsmod.Wrap(sdkerrors.ErrUnauthorized, "incorrect owner") + } + + if err := k.<%= TypeName.UpperCamel %>.Remove(ctx, msg.Id); err != nil { + return nil, errorsmod.Wrap(sdkerrors.ErrLogic, "failed to delete <%= TypeName.LowerCamel %>") + } + + return &types.MsgDelete<%= TypeName.PascalCase %>Response{}, nil +} diff --git a/ignite/templates/typed/list/files/messages/x/{{moduleName}}/keeper/msg_server_{{typeName}}_test.go.plush b/ignite/templates/typed/list/files/messages/x/{{moduleName}}/keeper/msg_server_{{typeName}}_test.go.plush new file mode 100644 index 0000000..ec04411 --- /dev/null +++ b/ignite/templates/typed/list/files/messages/x/{{moduleName}}/keeper/msg_server_{{typeName}}_test.go.plush @@ -0,0 +1,126 @@ +package keeper_test + +import ( + "testing" + + sdk "github.com/cosmos/cosmos-sdk/types" + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + "github.com/stretchr/testify/require" + + "<%= ModulePath %>/x/<%= ModuleName %>/keeper" + "<%= ModulePath %>/x/<%= ModuleName %>/types" +) + +func Test<%= TypeName.PascalCase %>MsgServerCreate(t *testing.T) { + f := initFixture(t) + srv := keeper.NewMsgServerImpl(f.keeper) + + <%= MsgSigner.LowerCamel %>, err := f.addressCodec.BytesToString([]byte("signerAddr__________________")) + require.NoError(t, err) + + for i := 0; i < 5; i++ { + resp, err := srv.Create<%= TypeName.PascalCase %>(f.ctx, &types.MsgCreate<%= TypeName.PascalCase %>{<%= MsgSigner.UpperCamel %>: <%= MsgSigner.LowerCamel %>}) + require.NoError(t, err) + require.Equal(t, i, int(resp.Id)) + } +} + +func Test<%= TypeName.PascalCase %>MsgServerUpdate(t *testing.T) { + f := initFixture(t) + srv := keeper.NewMsgServerImpl(f.keeper) + + <%= MsgSigner.LowerCamel %>, err := f.addressCodec.BytesToString([]byte("signerAddr__________________")) + require.NoError(t, err) + + unauthorizedAddr, err := f.addressCodec.BytesToString([]byte("unauthorizedAddr___________")) + require.NoError(t, err) + + _, err = srv.Create<%= TypeName.PascalCase %>(f.ctx, &types.MsgCreate<%= TypeName.PascalCase %>{<%= MsgSigner.UpperCamel %>: <%= MsgSigner.LowerCamel %>}) + require.NoError(t, err) + + tests := []struct { + desc string + request *types.MsgUpdate<%= TypeName.PascalCase %> + err error + }{ + { + desc: "invalid address", + request: &types.MsgUpdate<%= TypeName.PascalCase %>{<%= MsgSigner.UpperCamel %>: "invalid"}, + err: sdkerrors.ErrInvalidAddress, + }, + { + desc: "unauthorized", + request: &types.MsgUpdate<%= TypeName.PascalCase %>{<%= MsgSigner.UpperCamel %>: unauthorizedAddr}, + err: sdkerrors.ErrUnauthorized, + }, + { + desc: "key not found", + request: &types.MsgUpdate<%= TypeName.PascalCase %>{<%= MsgSigner.UpperCamel %>: <%= MsgSigner.LowerCamel %>, Id: 10}, + err: sdkerrors.ErrKeyNotFound, + }, + { + desc: "completed", + request: &types.MsgUpdate<%= TypeName.PascalCase %>{<%= MsgSigner.UpperCamel %>: <%= MsgSigner.LowerCamel %>}, + }, + } + for _, tc := range tests { + t.Run(tc.desc, func(t *testing.T) { + _, err = srv.Update<%= TypeName.PascalCase %>(f.ctx, tc.request) + if tc.err != nil { + require.ErrorIs(t, err, tc.err) + } else { + require.NoError(t, err) + } + }) + } +} + +func Test<%= TypeName.PascalCase %>MsgServerDelete(t *testing.T) { + f := initFixture(t) + srv := keeper.NewMsgServerImpl(f.keeper) + + <%= MsgSigner.LowerCamel %>, err := f.addressCodec.BytesToString([]byte("signerAddr__________________")) + require.NoError(t, err) + + unauthorizedAddr, err := f.addressCodec.BytesToString([]byte("unauthorizedAddr___________")) + require.NoError(t, err) + + _, err = srv.Create<%= TypeName.PascalCase %>(f.ctx, &types.MsgCreate<%= TypeName.PascalCase %>{<%= MsgSigner.UpperCamel %>: <%= MsgSigner.LowerCamel %>}) + require.NoError(t, err) + + tests := []struct { + desc string + request *types.MsgDelete<%= TypeName.PascalCase %> + err error + }{ + { + desc: "invalid address", + request: &types.MsgDelete<%= TypeName.PascalCase %>{<%= MsgSigner.UpperCamel %>: "invalid"}, + err: sdkerrors.ErrInvalidAddress, + }, + { + desc: "unauthorized", + request: &types.MsgDelete<%= TypeName.PascalCase %>{<%= MsgSigner.UpperCamel %>: unauthorizedAddr}, + err: sdkerrors.ErrUnauthorized, + }, + { + desc: "key not found", + request: &types.MsgDelete<%= TypeName.PascalCase %>{<%= MsgSigner.UpperCamel %>: <%= MsgSigner.LowerCamel %>, Id: 10}, + err: sdkerrors.ErrKeyNotFound, + }, + { + desc: "completed", + request: &types.MsgDelete<%= TypeName.PascalCase %>{<%= MsgSigner.UpperCamel %>: <%= MsgSigner.LowerCamel %>}, + }, + } + for _, tc := range tests { + t.Run(tc.desc, func(t *testing.T) { + _, err = srv.Delete<%= TypeName.PascalCase %>(f.ctx, tc.request) + if tc.err != nil { + require.ErrorIs(t, err, tc.err) + } else { + require.NoError(t, err) + } + }) + } +} diff --git a/ignite/templates/typed/list/files/simapp/x/{{moduleName}}/simulation/{{typeName}}.go.plush b/ignite/templates/typed/list/files/simapp/x/{{moduleName}}/simulation/{{typeName}}.go.plush new file mode 100644 index 0000000..09cc787 --- /dev/null +++ b/ignite/templates/typed/list/files/simapp/x/{{moduleName}}/simulation/{{typeName}}.go.plush @@ -0,0 +1,163 @@ +package simulation + +import ( + "math/rand" + + "github.com/cosmos/cosmos-sdk/baseapp" + sdk "github.com/cosmos/cosmos-sdk/types" + moduletestutil "github.com/cosmos/cosmos-sdk/types/module/testutil" + simtypes "github.com/cosmos/cosmos-sdk/types/simulation" + "github.com/cosmos/cosmos-sdk/x/simulation" + + "<%= ModulePath %>/x/<%= ModuleName %>/keeper" + "<%= ModulePath %>/x/<%= ModuleName %>/types" +) + +func SimulateMsgCreate<%= TypeName.PascalCase %>( + ak types.AuthKeeper, + bk types.BankKeeper, + k keeper.Keeper, + txGen client.TxConfig, +) simtypes.Operation { + return func(r *rand.Rand, app *baseapp.BaseApp, ctx sdk.Context, accs []simtypes.Account, chainID string, + ) (simtypes.OperationMsg, []simtypes.FutureOperation, error) { + simAccount, _ := simtypes.RandomAcc(r, accs) + + msg := &types.MsgCreate<%= TypeName.PascalCase %>{ + <%= MsgSigner.UpperCamel %>: simAccount.Address.String(), + } + + txCtx := simulation.OperationInput{ + R: r, + App: app, + TxGen: txGen, + Cdc: nil, + Msg: msg, + Context: ctx, + SimAccount: simAccount, + ModuleName: types.ModuleName, + CoinsSpentInMsg: sdk.NewCoins(), + AccountKeeper: ak, + Bankkeeper: bk, + } + return simulation.GenAndDeliverTxWithRandFees(txCtx) + } +} + +func SimulateMsgUpdate<%= TypeName.PascalCase %>( + ak types.AuthKeeper, + bk types.BankKeeper, + k keeper.Keeper, + txGen client.TxConfig, +) simtypes.Operation { + return func(r *rand.Rand, app *baseapp.BaseApp, ctx sdk.Context, accs []simtypes.Account, chainID string, + ) (simtypes.OperationMsg, []simtypes.FutureOperation, error) { + var ( + simAccount = simtypes.Account{} + <%= TypeName.LowerCamel %> = types.<%= TypeName.PascalCase %>{} + msg = &types.MsgUpdate<%= TypeName.PascalCase %>{} + found = false + ) + + var all<%= TypeName.PascalCase %> []types.<%= TypeName.PascalCase %> + err := k.<%= TypeName.UpperCamel %>.Walk(ctx, nil, func(key uint64, value types.<%= TypeName.PascalCase %>) (stop bool, err error) { + all<%= TypeName.PascalCase %> = append(all<%= TypeName.PascalCase %>, value) + return false, nil + }) + if err != nil { + panic(err) + } + + for _, obj := range all<%= TypeName.PascalCase %> { + acc, err := ak.AddressCodec().StringToBytes(obj.<%= MsgSigner.UpperCamel %>) + if err != nil { + return simtypes.OperationMsg{}, nil, err + } + + simAccount, found = simtypes.FindAccount(accs, sdk.AccAddress(acc)) + if found { + <%= TypeName.LowerCamel %> = obj + break + } + } + if !found { + return simtypes.NoOpMsg(types.ModuleName, sdk.MsgTypeURL(msg), "<%= TypeName.LowerCamel %> <%= MsgSigner.LowerCamel %> not found"), nil, nil + } + msg.<%= MsgSigner.UpperCamel %> = simAccount.Address.String() + msg.Id = <%= TypeName.LowerCamel %>.Id + + txCtx := simulation.OperationInput{ + R: r, + App: app, + TxGen: txGen, + Cdc: nil, + Msg: msg, + Context: ctx, + SimAccount: simAccount, + ModuleName: types.ModuleName, + CoinsSpentInMsg: sdk.NewCoins(), + AccountKeeper: ak, + Bankkeeper: bk, + } + return simulation.GenAndDeliverTxWithRandFees(txCtx) + } +} + +func SimulateMsgDelete<%= TypeName.PascalCase %>( + ak types.AuthKeeper, + bk types.BankKeeper, + k keeper.Keeper, + txGen client.TxConfig, +) simtypes.Operation { + return func(r *rand.Rand, app *baseapp.BaseApp, ctx sdk.Context, accs []simtypes.Account, chainID string, + ) (simtypes.OperationMsg, []simtypes.FutureOperation, error) { + var ( + simAccount = simtypes.Account{} + <%= TypeName.LowerCamel %> = types.<%= TypeName.PascalCase %>{} + msg = &types.MsgDelete<%= TypeName.PascalCase %>{} + found = false + ) + + var all<%= TypeName.PascalCase %> []types.<%= TypeName.PascalCase %> + err := k.<%= TypeName.UpperCamel %>.Walk(ctx, nil, func(key uint64, value types.<%= TypeName.PascalCase %>) (stop bool, err error) { + all<%= TypeName.PascalCase %> = append(all<%= TypeName.PascalCase %>, value) + return false, nil + }) + if err != nil { + panic(err) + } + + for _, obj := range all<%= TypeName.PascalCase %> { + acc, err := ak.AddressCodec().StringToBytes(obj.<%= MsgSigner.UpperCamel %>) + if err != nil { + return simtypes.OperationMsg{}, nil, err + } + + simAccount, found = simtypes.FindAccount(accs, sdk.AccAddress(acc)) + if found { + <%= TypeName.LowerCamel %> = obj + break + } + } + if !found { + return simtypes.NoOpMsg(types.ModuleName, sdk.MsgTypeURL(msg), "<%= TypeName.LowerCamel %> <%= MsgSigner.LowerCamel %> not found"), nil, nil + } + msg.<%= MsgSigner.UpperCamel %> = simAccount.Address.String() + msg.Id = <%= TypeName.LowerCamel %>.Id + + txCtx := simulation.OperationInput{ + R: r, + App: app, + TxGen: txGen, + Cdc: nil, + Msg: msg, + Context: ctx, + SimAccount: simAccount, + ModuleName: types.ModuleName, + CoinsSpentInMsg: sdk.NewCoins(), + AccountKeeper: ak, + Bankkeeper: bk, + } + return simulation.GenAndDeliverTxWithRandFees(txCtx) + } +} diff --git a/ignite/templates/typed/list/genesis.go b/ignite/templates/typed/list/genesis.go new file mode 100644 index 0000000..bfccc15 --- /dev/null +++ b/ignite/templates/typed/list/genesis.go @@ -0,0 +1,295 @@ +package list + +import ( + "fmt" + "path/filepath" + + "github.com/gobuffalo/genny/v2" + + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/protoanalysis/protoutil" + "github.com/ignite/cli/v29/ignite/pkg/xast" + "github.com/ignite/cli/v29/ignite/templates/typed" +) + +func genesisModify(opts *typed.Options, g *genny.Generator) { + g.RunFn(genesisProtoModify(opts)) + g.RunFn(genesisTypesModify(opts)) + g.RunFn(genesisModuleModify(opts)) + g.RunFn(genesisTestsModify(opts)) + g.RunFn(genesisTypesTestsModify(opts)) +} + +// Modifies the genesis.proto file to add a new field. +// +// What it depends on: +// - Existence of a message with name "GenesisState". Adds the field there. +func genesisProtoModify(opts *typed.Options) genny.RunFn { + return func(r *genny.Runner) error { + path := opts.ProtoFile("genesis.proto") + f, err := r.Disk.Find(path) + if err != nil { + return err + } + protoFile, err := protoutil.ParseProtoFile(f) + if err != nil { + return err + } + // Add initial import for the new type + gogoImport := protoutil.NewImport(typed.GoGoProtoImport) + if err = protoutil.AddImports(protoFile, true, gogoImport, opts.ProtoTypeImport()); err != nil { + return errors.Errorf("failed while adding imports in %s: %w", path, err) + } + // Get next available sequence number from GenesisState. + genesisState, err := protoutil.GetMessageByName(protoFile, typed.ProtoGenesisStateMessage) + if err != nil { + return errors.Errorf("failed while looking up message '%s' in %s: %w", typed.ProtoGenesisStateMessage, path, err) + } + seqNumber := protoutil.NextUniqueID(genesisState) + typenameSnake, typenamePascal := opts.TypeName.Snake, opts.TypeName.PascalCase + // Create option and List field. + gogoOption := protoutil.NewOption("gogoproto.nullable", "false", protoutil.Custom()) + typeList := protoutil.NewField( + typenameSnake+"_list", + typenamePascal, + seqNumber, + protoutil.Repeated(), + protoutil.WithFieldOptions(gogoOption), + ) + // Create count field. + countFIeld := protoutil.NewField(typenameSnake+"_count", "uint64", seqNumber+1) + protoutil.Append(genesisState, typeList, countFIeld) + + newFile := genny.NewFileS(path, protoutil.Print(protoFile)) + return r.File(newFile) + } +} + +func genesisTypesModify(opts *typed.Options) genny.RunFn { + return func(r *genny.Runner) error { + path := filepath.Join("x", opts.ModuleName, "types/genesis.go") + f, err := r.Disk.Find(path) + if err != nil { + return err + } + + content, err := xast.AppendImports(f.String(), xast.WithImport("fmt")) + if err != nil { + return err + } + + // add parameter to the struct into the new method. + content, err = xast.ModifyFunction(content, "DefaultGenesis", xast.AppendFuncStruct( + "GenesisState", + fmt.Sprintf("%[1]vList", opts.TypeName.UpperCamel), + fmt.Sprintf("[]%[1]v{}", opts.TypeName.PascalCase), + )) + if err != nil { + return err + } + + templateTypesValidate := `// Check for duplicated ID in %[1]v +%[1]vIdMap := make(map[uint64]bool) +%[1]vCount := gs.Get%[2]vCount() +for _, elem := range gs.%[2]vList { + if _, ok := %[1]vIdMap[elem.Id]; ok { + return fmt.Errorf("duplicated id for %[1]v") + } + if elem.Id >= %[1]vCount { + return fmt.Errorf("%[1]v id should be lower or equal than the last id") + } + %[1]vIdMap[elem.Id] = true +}` + replacementTypesValidate := fmt.Sprintf( + templateTypesValidate, + opts.TypeName.LowerCamel, + opts.TypeName.UpperCamel, + ) + content, err = xast.ModifyFunction( + content, + "Validate", + xast.AppendFuncCode(replacementTypesValidate), + ) + if err != nil { + return err + } + + newFile := genny.NewFileS(path, content) + return r.File(newFile) + } +} + +func genesisModuleModify(opts *typed.Options) genny.RunFn { + return func(r *genny.Runner) error { + path := filepath.Join("x", opts.ModuleName, "keeper/genesis.go") + f, err := r.Disk.Find(path) + if err != nil { + return err + } + + templateModuleInit := `// Set all the %[1]v +for _, elem := range genState.%[2]vList { + if err := k.%[2]v.Set(ctx, elem.Id, elem); err != nil { + return err + } +} + +// Set %[1]v count +if err := k.%[2]vSeq.Set(ctx, genState.%[2]vCount); err != nil { + return err +}` + replacementModuleInit := fmt.Sprintf( + templateModuleInit, + opts.TypeName.LowerCamel, + opts.TypeName.UpperCamel, + ) + content, err := xast.ModifyFunction( + f.String(), + "InitGenesis", + xast.AppendFuncCode(replacementModuleInit), + ) + if err != nil { + return err + } + + templateModuleExport := ` +err = k.%[1]v.Walk(ctx, nil, func(key uint64, elem types.%[2]v) (bool, error) { + genesis.%[1]vList = append(genesis.%[1]vList, elem) + return false, nil +}) +if err != nil { + return nil, err +} + +genesis.%[1]vCount, err = k.%[1]vSeq.Peek(ctx) +if err != nil { + return nil, err +}` + replacementModuleExport := fmt.Sprintf( + templateModuleExport, + opts.TypeName.UpperCamel, + opts.TypeName.PascalCase, + ) + content, err = xast.ModifyFunction( + content, + "ExportGenesis", + xast.AppendFuncCode(replacementModuleExport), + ) + if err != nil { + return err + } + + newFile := genny.NewFileS(path, content) + return r.File(newFile) + } +} + +func genesisTestsModify(opts *typed.Options) genny.RunFn { + return func(r *genny.Runner) error { + path := filepath.Join("x", opts.ModuleName, "keeper/genesis_test.go") + f, err := r.Disk.Find(path) + if err != nil { + return err + } + + replacementAssert := fmt.Sprintf(`require.EqualExportedValues(t, genesisState.%[1]vList, got.%[1]vList) +require.Equal(t, genesisState.%[1]vCount, got.%[1]vCount)`, opts.TypeName.UpperCamel) + + // add parameter to the struct into the new method. + content, err := xast.ModifyFunction( + f.String(), + "TestGenesis", + xast.AppendFuncStruct( + "GenesisState", + fmt.Sprintf("%[1]vList", opts.TypeName.UpperCamel), + fmt.Sprintf("[]types.%[1]v{{ Id: 0 }, { Id: 1 }}", opts.TypeName.PascalCase), + ), + xast.AppendFuncStruct( + "GenesisState", + fmt.Sprintf("%[1]vCount", opts.TypeName.UpperCamel), + "2", + ), + xast.AppendFuncCode(replacementAssert), + ) + if err != nil { + return err + } + + newFile := genny.NewFileS(path, content) + return r.File(newFile) + } +} + +func genesisTypesTestsModify(opts *typed.Options) genny.RunFn { + return func(r *genny.Runner) error { + path := filepath.Join("x", opts.ModuleName, "types/genesis_test.go") + f, err := r.Disk.Find(path) + if err != nil { + return err + } + + templateTestDuplicated := `{ + desc: "duplicated %[1]v", + genState: &types.GenesisState{ + %[2]vList: []types.%[3]v{ + { + Id: 0, + }, + { + Id: 0, + }, + }, + }, + valid: false, +}` + replacementTestDuplicated := fmt.Sprintf( + templateTestDuplicated, + opts.TypeName.LowerCamel, + opts.TypeName.UpperCamel, + opts.TypeName.PascalCase, + ) + + templateTestInvalidCount := `{ + desc: "invalid %[1]v count", + genState: &types.GenesisState{ + %[2]vList: []types.%[3]v{ + { + Id: 1, + }, + }, + %[2]vCount: 0, + }, + valid: false, +}` + replacementInvalidCount := fmt.Sprintf( + templateTestInvalidCount, + opts.TypeName.LowerCamel, + opts.TypeName.UpperCamel, + opts.TypeName.PascalCase, + ) + + // add parameter to the struct into the new method. + content, err := xast.ModifyFunction( + f.String(), + "TestGenesisState_Validate", + xast.AppendFuncStruct( + "GenesisState", + fmt.Sprintf("%[1]vList", opts.TypeName.UpperCamel), + fmt.Sprintf("[]types.%[1]v{{ Id: 0 }, { Id: 1 }}", opts.TypeName.PascalCase), + ), + xast.AppendFuncStruct( + "GenesisState", + fmt.Sprintf("%[1]vCount", opts.TypeName.UpperCamel), + "2", + ), + xast.AppendFuncTestCase(replacementTestDuplicated), + xast.AppendFuncTestCase(replacementInvalidCount), + ) + if err != nil { + return err + } + + newFile := genny.NewFileS(path, content) + return r.File(newFile) + } +} diff --git a/ignite/templates/typed/list/list.go b/ignite/templates/typed/list/list.go new file mode 100644 index 0000000..4851915 --- /dev/null +++ b/ignite/templates/typed/list/list.go @@ -0,0 +1,478 @@ +package list + +import ( + "embed" + "fmt" + "io/fs" + "path/filepath" + + "github.com/emicklei/proto" + "github.com/gobuffalo/genny/v2" + + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/gomodulepath" + "github.com/ignite/cli/v29/ignite/pkg/protoanalysis/protoutil" + "github.com/ignite/cli/v29/ignite/pkg/xast" + "github.com/ignite/cli/v29/ignite/templates/typed" +) + +var ( + //go:embed files/component/* files/component/**/* + fsComponent embed.FS + + //go:embed files/messages/* files/messages/**/* + fsMessages embed.FS + + //go:embed files/simapp/* files/simapp/**/* + fsSimapp embed.FS +) + +// NewGenerator returns the generator to scaffold a new type in a module. +func NewGenerator(opts *typed.Options) (*genny.Generator, error) { + subMessages, err := fs.Sub(fsMessages, "files/messages") + if err != nil { + return nil, errors.Errorf("fail to generate sub: %w", err) + } + subComponent, err := fs.Sub(fsComponent, "files/component") + if err != nil { + return nil, errors.Errorf("fail to generate sub: %w", err) + } + subSimapp, err := fs.Sub(fsSimapp, "files/simapp") + if err != nil { + return nil, errors.Errorf("fail to generate sub: %w", err) + } + + g := genny.New() + g.RunFn(protoQueryModify(opts)) + g.RunFn(typesKeyModify(opts)) + g.RunFn(keeperModify(opts)) + g.RunFn(clientCliQueryModify(opts)) + + // Genesis modifications + genesisModify(opts, g) + + if !opts.NoMessage { + // Modifications for new messages + g.RunFn(protoTxModify(opts)) + g.RunFn(typesCodecModify(opts)) + g.RunFn(clientCliTxModify(opts)) + + if !opts.NoSimulation { + g.RunFn(moduleSimulationModify(opts)) + if err := typed.Box(subSimapp, opts, g); err != nil { + return nil, err + } + } + + // Messages template + if err := typed.Box(subMessages, opts, g); err != nil { + return nil, err + } + } + + return g, typed.Box(subComponent, opts, g) +} + +// protoTxModify modifies the tx.proto file to add the required RPCs and messages. +// +// What it expects: +// - A service named "Msg" to exist in the proto file, it appends the RPCs inside it. +func protoTxModify(opts *typed.Options) genny.RunFn { + return func(r *genny.Runner) error { + path := opts.ProtoFile("tx.proto") + f, err := r.Disk.Find(path) + if err != nil { + return err + } + protoFile, err := protoutil.ParseProtoFile(f) + if err != nil { + return err + } + + // RPC service + serviceMsg, err := protoutil.GetServiceByName(protoFile, "Msg") + if err != nil { + return errors.Errorf("failed while looking up service 'Msg' in %s: %w", path, err) + } + // Create, update, delete rpcs. Better to append them altogether, single traversal. + typenamePascal := opts.TypeName.PascalCase + protoutil.Append(serviceMsg, + protoutil.NewRPC( + fmt.Sprintf("Create%s", typenamePascal), + fmt.Sprintf("MsgCreate%s", typenamePascal), + fmt.Sprintf("MsgCreate%sResponse", typenamePascal), + ), + protoutil.NewRPC( + fmt.Sprintf("Update%s", typenamePascal), + fmt.Sprintf("MsgUpdate%s", typenamePascal), + fmt.Sprintf("MsgUpdate%sResponse", typenamePascal), + ), + protoutil.NewRPC( + fmt.Sprintf("Delete%s", typenamePascal), + fmt.Sprintf("MsgDelete%s", typenamePascal), + fmt.Sprintf("MsgDelete%sResponse", typenamePascal), + ), + ) + + // - Ensure custom types are imported + var protoImports []*proto.Import + for _, imp := range opts.Fields.ProtoImports() { + protoImports = append(protoImports, protoutil.NewImport(imp)) + } + for _, f := range opts.Fields.Custom() { + protoPath := fmt.Sprintf("%[1]v/%[2]v/%[3]v/%[4]v.proto", opts.AppName, opts.ModuleName, opts.ProtoVer, f) + protoImports = append(protoImports, protoutil.NewImport(protoPath)) + } + // we already know an import exists, pass false for fallback. + if err = protoutil.AddImports(protoFile, true, protoImports...); err != nil { + return errors.Errorf("failed while adding imports in %s: %w", path, err) + } + // Messages + creator := protoutil.NewField(opts.MsgSigner.Snake, "string", 1) + creator.Options = append(creator.Options, protoutil.NewOption("cosmos_proto.scalar", "cosmos.AddressString", protoutil.Custom())) // set the scalar annotation + creatorOpt := protoutil.NewOption(typed.MsgSignerOption, opts.MsgSigner.Snake) + createFields := []*proto.NormalField{creator} + for i, field := range opts.Fields { + createFields = append(createFields, field.ToProtoField(i+2)) + } + udfields := []*proto.NormalField{creator, protoutil.NewField("id", "uint64", 2)} + updateFields := udfields + for i, field := range opts.Fields { + updateFields = append(updateFields, field.ToProtoField(i+3)) + } + + msgCreate := protoutil.NewMessage( + fmt.Sprintf("MsgCreate%s", typenamePascal), + protoutil.WithFields(createFields...), + protoutil.WithMessageOptions(creatorOpt), + ) + msgCreateResponse := protoutil.NewMessage( + fmt.Sprintf("MsgCreate%sResponse", typenamePascal), + protoutil.WithFields(protoutil.NewField("id", "uint64", 1)), + ) + msgUpdate := protoutil.NewMessage( + fmt.Sprintf("MsgUpdate%s", typenamePascal), + protoutil.WithFields(updateFields...), + protoutil.WithMessageOptions(creatorOpt), + ) + msgUpdateResponse := protoutil.NewMessage(fmt.Sprintf("MsgUpdate%sResponse", typenamePascal)) + msgDelete := protoutil.NewMessage( + fmt.Sprintf("MsgDelete%s", typenamePascal), + protoutil.WithFields(udfields...), + protoutil.WithMessageOptions(creatorOpt), + ) + msgDeleteResponse := protoutil.NewMessage(fmt.Sprintf("MsgDelete%sResponse", typenamePascal)) + protoutil.Append( + protoFile, + msgCreate, + msgCreateResponse, + msgUpdate, + msgUpdateResponse, + msgDelete, + msgDeleteResponse, + ) + + newFile := genny.NewFileS(path, protoutil.Print(protoFile)) + return r.File(newFile) + } +} + +// Modifies query.proto to add the required RPCs and Messages. +// +// What it depends on: +// - Existence of a service with name "Query". Adds the rpc's there. +func protoQueryModify(opts *typed.Options) genny.RunFn { + return func(r *genny.Runner) error { + path := opts.ProtoFile("query.proto") + f, err := r.Disk.Find(path) + if err != nil { + return err + } + protoFile, err := protoutil.ParseProtoFile(f) + if err != nil { + return err + } + // Imports for the new type and gogoImport. + gogoImport := protoutil.NewImport(typed.GoGoProtoImport) + if err = protoutil.AddImports(protoFile, true, gogoImport, opts.ProtoTypeImport()); err != nil { + return errors.Errorf("failed while adding imports in %s: %w", path, err) + } + + // Add to Query: + serviceQuery, err := protoutil.GetServiceByName(protoFile, "Query") + if err != nil { + return errors.Errorf("failed while looking up service 'Query' in %s: %w", path, err) + } + appModulePath := gomodulepath.ExtractAppPath(opts.ModulePath) + typenamePascal, typenameSnake := opts.TypeName.PascalCase, opts.TypeName.Snake + rpcQueryGet := protoutil.NewRPC( + fmt.Sprintf("Get%s", typenamePascal), + fmt.Sprintf("QueryGet%sRequest", typenamePascal), + fmt.Sprintf("QueryGet%sResponse", typenamePascal), + protoutil.WithRPCOptions( + protoutil.NewOption( + "google.api.http", + fmt.Sprintf( + "/%s/%s/%s/%s/{id}", + appModulePath, opts.ModuleName, opts.ProtoVer, opts.TypeName.Snake, + ), + protoutil.Custom(), + protoutil.SetField("get"), + ), + ), + ) + protoutil.AttachComment(rpcQueryGet, fmt.Sprintf("Get%[1]v Queries a %[1]v by id.", typenamePascal)) + + rpcQueryAll := protoutil.NewRPC( + fmt.Sprintf("List%s", typenamePascal), + fmt.Sprintf("QueryAll%sRequest", typenamePascal), + fmt.Sprintf("QueryAll%sResponse", typenamePascal), + protoutil.WithRPCOptions( + protoutil.NewOption( + "google.api.http", + fmt.Sprintf( + "/%s/%s/%s/%s", + appModulePath, opts.ModuleName, opts.ProtoVer, opts.TypeName.Snake, + ), + protoutil.Custom(), + protoutil.SetField("get"), + ), + ), + ) + protoutil.AttachComment(rpcQueryGet, fmt.Sprintf("List%[1]v Queries a list of %[1]v items.", typenamePascal)) + protoutil.Append(serviceQuery, rpcQueryGet, rpcQueryAll) + + // Add messages + paginationType, paginationName := "cosmos.base.query.v1beta1.Page", "pagination" + gogoOption := protoutil.NewOption("gogoproto.nullable", "false", protoutil.Custom()) + + queryGetRequest := protoutil.NewMessage( + fmt.Sprintf("QueryGet%sRequest", typenamePascal), + protoutil.WithFields(protoutil.NewField("id", "uint64", 1)), + ) + field := protoutil.NewField(typenameSnake, typenamePascal, 1, protoutil.WithFieldOptions(gogoOption)) + queryGetResponse := protoutil.NewMessage( + fmt.Sprintf("QueryGet%sResponse", typenamePascal), + protoutil.WithFields(field)) + + queryAllRequest := protoutil.NewMessage( + fmt.Sprintf("QueryAll%sRequest", typenamePascal), + protoutil.WithFields(protoutil.NewField(paginationName, paginationType+"Request", 1)), + ) + field = protoutil.NewField(typenameSnake, typenamePascal, 1, protoutil.Repeated(), protoutil.WithFieldOptions(gogoOption)) + queryAllResponse := protoutil.NewMessage( + fmt.Sprintf("QueryAll%sResponse", typenamePascal), + protoutil.WithFields(field, protoutil.NewField(paginationName, paginationType+"Response", 2)), + ) + protoutil.Append(protoFile, queryGetRequest, queryGetResponse, queryAllRequest, queryAllResponse) + + newFile := genny.NewFileS(path, protoutil.Print(protoFile)) + return r.File(newFile) + } +} + +// typesKeyModify modifies the keys.go file to add a new collection prefix. +func typesKeyModify(opts *typed.Options) genny.RunFn { + return func(r *genny.Runner) error { + path := filepath.Join("x", opts.ModuleName, "types/keys.go") + f, err := r.Disk.Find(path) + if err != nil { + return err + } + content := f.String() + fmt.Sprintf(` +var ( + %[1]vKey= collections.NewPrefix("%[2]v/value/") + %[1]vCountKey= collections.NewPrefix("%[2]v/count/") +) +`, + opts.TypeName.PascalCase, + opts.TypeName.LowerCase, + ) + newFile := genny.NewFileS(path, content) + return r.File(newFile) + } +} + +// keeperModify modifies the keeper to add a new collections item type. +func keeperModify(opts *typed.Options) genny.RunFn { + return func(r *genny.Runner) error { + path := filepath.Join("x", opts.ModuleName, "keeper/keeper.go") + f, err := r.Disk.Find(path) + if err != nil { + return err + } + + content, err := xast.ModifyStruct( + f.String(), + "Keeper", + xast.AppendStructValue( + fmt.Sprintf("%[1]vSeq", opts.TypeName.UpperCamel), + "collections.Sequence", + ), + xast.AppendStructValue( + opts.TypeName.UpperCamel, + fmt.Sprintf("collections.Map[uint64, types.%[1]v]", opts.TypeName.PascalCase), + ), + ) + if err != nil { + return err + } + + // add parameter to the struct into the new keeper method. + content, err = xast.ModifyFunction( + content, + "NewKeeper", + xast.AppendFuncStruct( + "Keeper", + opts.TypeName.UpperCamel, + fmt.Sprintf(`collections.NewMap(sb, types.%[1]vKey, "%[2]v", collections.Uint64Key, codec.CollValue[types.%[1]v](cdc))`, + opts.TypeName.PascalCase, + opts.TypeName.LowerCamel, + ), + ), + xast.AppendFuncStruct( + "Keeper", + fmt.Sprintf("%[1]vSeq", opts.TypeName.UpperCamel), + fmt.Sprintf(`collections.NewSequence(sb, types.%[2]vCountKey, "%[3]vSequence")`, + opts.TypeName.UpperCamel, + opts.TypeName.PascalCase, + opts.TypeName.LowerCamel, + ), + ), + ) + if err != nil { + return err + } + + newFile := genny.NewFileS(path, content) + return r.File(newFile) + } +} + +func typesCodecModify(opts *typed.Options) genny.RunFn { + return func(r *genny.Runner) error { + path := filepath.Join("x", opts.ModuleName, "types/codec.go") + f, err := r.Disk.Find(path) + if err != nil { + return err + } + + // Import + content, err := xast.AppendImports(f.String(), xast.WithNamedImport("sdk", "github.com/cosmos/cosmos-sdk/types")) + if err != nil { + return err + } + + // Interface + templateInterface := `registrar.RegisterImplementations((*sdk.Msg)(nil), + &MsgCreate%[1]v{}, + &MsgUpdate%[1]v{}, + &MsgDelete%[1]v{}, +)` + replacementInterface := fmt.Sprintf(templateInterface, opts.TypeName.PascalCase) + content, err = xast.ModifyFunction( + content, + "RegisterInterfaces", + xast.AppendFuncAtLine(replacementInterface, 0), + ) + if err != nil { + return err + } + + newFile := genny.NewFileS(path, content) + return r.File(newFile) + } +} + +func clientCliTxModify(opts *typed.Options) genny.RunFn { + return func(r *genny.Runner) error { + path := filepath.Join("x", opts.ModuleName, "module/autocli.go") + f, err := r.Disk.Find(path) + if err != nil { + return err + } + + createOption := fmt.Sprintf( + `{ + RpcMethod: "Create%[1]v", + Use: "create-%[2]v %[4]s", + Short: "Create %[3]v", + PositionalArgs: []*autocliv1.PositionalArgDescriptor{%[5]s}, + }`, + opts.TypeName.PascalCase, + opts.TypeName.Kebab, + opts.TypeName.Original, + opts.Fields.CLIUsage(), + opts.Fields.ProtoFieldNameAutoCLI(), + ) + updateOption := fmt.Sprintf( + `{ + RpcMethod: "Update%[1]v", + Use: "update-%[2]v [id] %[4]s", + Short: "Update %[3]v", + PositionalArgs: []*autocliv1.PositionalArgDescriptor{{ProtoField: "id"}, %[5]s}, + }`, + opts.TypeName.PascalCase, + opts.TypeName.Kebab, + opts.TypeName.Original, + opts.Fields.CLIUsage(), + opts.Fields.ProtoFieldNameAutoCLI(), + ) + deleteOption := fmt.Sprintf( + `{ + RpcMethod: "Delete%[1]v", + Use: "delete-%[2]v [id]", + Short: "Delete %[3]v", + PositionalArgs: []*autocliv1.PositionalArgDescriptor{{ProtoField: "id"}}, + }`, + opts.TypeName.PascalCase, + opts.TypeName.Kebab, + opts.TypeName.Original, + ) + content, err := typed.AppendAutoCLITxOptions(f.String(), createOption, updateOption, deleteOption) + if err != nil { + return err + } + newFile := genny.NewFileS(path, content) + return r.File(newFile) + } +} + +func clientCliQueryModify(opts *typed.Options) genny.RunFn { + return func(r *genny.Runner) error { + path := filepath.Join("x", opts.ModuleName, "module/autocli.go") + f, err := r.Disk.Find(path) + if err != nil { + return err + } + + listOption := fmt.Sprintf( + `{ + RpcMethod: "List%[1]v", + Use: "list-%[2]v", + Short: "List all %[3]v", + }`, + opts.TypeName.PascalCase, + opts.TypeName.Kebab, + opts.TypeName.Original, + ) + getOption := fmt.Sprintf( + `{ + RpcMethod: "Get%[1]v", + Use: "get-%[2]v [id]", + Short: "Gets a %[3]v by id", + Alias: []string{"show-%[2]v"}, + PositionalArgs: []*autocliv1.PositionalArgDescriptor{{ProtoField: "id"}}, + }`, + opts.TypeName.PascalCase, + opts.TypeName.Kebab, + opts.TypeName.Original, + ) + content, err := typed.AppendAutoCLIQueryOptions(f.String(), listOption, getOption) + if err != nil { + return err + } + newFile := genny.NewFileS(path, content) + return r.File(newFile) + } +} diff --git a/ignite/templates/typed/list/simulation.go b/ignite/templates/typed/list/simulation.go new file mode 100644 index 0000000..6f82607 --- /dev/null +++ b/ignite/templates/typed/list/simulation.go @@ -0,0 +1,63 @@ +package list + +import ( + "fmt" + "path/filepath" + + "github.com/gobuffalo/genny/v2" + + "github.com/ignite/cli/v29/ignite/pkg/xast" + "github.com/ignite/cli/v29/ignite/templates/typed" +) + +func moduleSimulationModify(opts *typed.Options) genny.RunFn { + return func(r *genny.Runner) error { + path := filepath.Join("x", opts.ModuleName, "module/simulation.go") + f, err := r.Disk.Find(path) + if err != nil { + return err + } + + // Create a list of two different indexes and fields to use as sample + msgField := fmt.Sprintf("%s: sample.AccAddress(),\n", opts.MsgSigner.UpperCamel) + + // simulation genesis state + content, err := xast.ModifyFunction( + f.String(), + "GenerateGenesisState", + xast.AppendFuncStruct( + "GenesisState", + fmt.Sprintf("%[1]vList", opts.TypeName.UpperCamel), + fmt.Sprintf( + "[]types.%[2]v{{ Id: 0, %[3]v }, { Id: 1, %[3]v }}", + opts.TypeName.UpperCamel, + opts.TypeName.PascalCase, + msgField, + ), + ), + xast.AppendFuncStruct( + "GenesisState", + fmt.Sprintf("%[1]vCount", opts.TypeName.UpperCamel), + "2", + ), + ) + if err != nil { + return err + } + + content, err = typed.ModuleSimulationMsgModify( + content, + opts.ModulePath, + opts.ModuleName, + opts.TypeName, + opts.MsgSigner, + "Create", "Update", "Delete", + ) + if err != nil { + return err + } + + newFile := genny.NewFileS(path, content) + return r.File(newFile) + } +} diff --git a/ignite/templates/typed/map/files/component/x/{{moduleName}}/keeper/query_{{typeName}}.go.plush b/ignite/templates/typed/map/files/component/x/{{moduleName}}/keeper/query_{{typeName}}.go.plush new file mode 100644 index 0000000..82e655c --- /dev/null +++ b/ignite/templates/typed/map/files/component/x/{{moduleName}}/keeper/query_{{typeName}}.go.plush @@ -0,0 +1,48 @@ +package keeper + +import ( + "context" + + "cosmossdk.io/collections" + "github.com/cosmos/cosmos-sdk/types/query" + "<%= ModulePath %>/x/<%= ModuleName %>/types" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" +) + +func (q queryServer) List<%= TypeName.PascalCase %>(ctx context.Context, req *types.QueryAll<%= TypeName.PascalCase %>Request) (*types.QueryAll<%= TypeName.PascalCase %>Response, error) { + if req == nil { + return nil, status.Error(codes.InvalidArgument, "invalid request") + } + + <%= TypeName.LowerCamel %>s, pageRes, err := query.CollectionPaginate( + ctx, + q.k.<%= TypeName.UpperCamel %>, + req.Pagination, + func(_ <%= Index.DataType() %>, value types.<%= TypeName.PascalCase %>) (types.<%= TypeName.PascalCase %>, error){ + return value, nil + }, + ) + if err != nil { + return nil, status.Error(codes.Internal, err.Error()) + } + + return &types.QueryAll<%= TypeName.PascalCase %>Response{<%= TypeName.UpperCamel %>: <%= TypeName.LowerCamel %>s, Pagination: pageRes}, nil +} + +func (q queryServer) Get<%= TypeName.PascalCase %>(ctx context.Context, req *types.QueryGet<%= TypeName.PascalCase %>Request) (*types.QueryGet<%= TypeName.PascalCase %>Response, error) { + if req == nil { + return nil, status.Error(codes.InvalidArgument, "invalid request") + } + + val, err := q.k.<%= TypeName.UpperCamel %>.Get(ctx, req.<%= Index.Name.UpperCamel %>) + if err != nil { + if errors.Is(err, collections.ErrNotFound) { + return nil, status.Error(codes.NotFound, "not found") + } + + return nil, status.Error(codes.Internal, "internal error") + } + + return &types.QueryGet<%= TypeName.PascalCase %>Response{<%= TypeName.UpperCamel %>: val}, nil +} \ No newline at end of file diff --git a/ignite/templates/typed/map/files/component/x/{{moduleName}}/types/key_{{typeName}}.go.plush b/ignite/templates/typed/map/files/component/x/{{moduleName}}/types/key_{{typeName}}.go.plush new file mode 100644 index 0000000..9a916b5 --- /dev/null +++ b/ignite/templates/typed/map/files/component/x/{{moduleName}}/types/key_{{typeName}}.go.plush @@ -0,0 +1,6 @@ +package types + +import "cosmossdk.io/collections" + +// <%= TypeName.PascalCase %>Key is the prefix to retrieve all <%= TypeName.PascalCase %> +var <%= TypeName.PascalCase %>Key = collections.NewPrefix("<%= TypeName.LowerCamel %>/value/") \ No newline at end of file diff --git a/ignite/templates/typed/map/files/component/{{protoDir}}/{{appName}}/{{moduleName}}/{{protoVer}}/{{typeName}}.proto.plush b/ignite/templates/typed/map/files/component/{{protoDir}}/{{appName}}/{{moduleName}}/{{protoVer}}/{{typeName}}.proto.plush new file mode 100644 index 0000000..5b6bbf4 --- /dev/null +++ b/ignite/templates/typed/map/files/component/{{protoDir}}/{{appName}}/{{moduleName}}/{{protoVer}}/{{typeName}}.proto.plush @@ -0,0 +1,14 @@ +syntax = "proto3"; +package <%= protoPkgName %>; + +option go_package = "<%= ModulePath %>/x/<%= ModuleName %>/types";<%= for (importName) in appendFieldsAndMergeCustomImports(Index, Fields) { %> +import "<%= AppName %>/<%= ModuleName %>/<%= ProtoVer %>/<%= importName %>.proto"; <% } %><%= for (importName) in mergeProtoImports(Fields) { %> +import "<%= importName %>"; <% } %> + +// <%= TypeName.PascalCase %> defines the <%= TypeName.PascalCase %> message. +message <%= TypeName.PascalCase %> { + <%= Index.ProtoType(1) %>; <%= for (i, field) in Fields { %> + <%= field.ProtoType(i+2) %>; <% } %> + <%= if (!NoMessage) { %>string <%= MsgSigner.Snake %> = <%= len(Fields)+2 %>;<% } %> +} + diff --git a/ignite/templates/typed/map/files/messages/x/{{moduleName}}/keeper/msg_server_{{typeName}}.go.plush b/ignite/templates/typed/map/files/messages/x/{{moduleName}}/keeper/msg_server_{{typeName}}.go.plush new file mode 100644 index 0000000..ae094ce --- /dev/null +++ b/ignite/templates/typed/map/files/messages/x/{{moduleName}}/keeper/msg_server_{{typeName}}.go.plush @@ -0,0 +1,100 @@ +package keeper + +import ( + "fmt" + "context" + + "<%= ModulePath %>/x/<%= ModuleName %>/types" + "cosmossdk.io/collections" + errorsmod "cosmossdk.io/errors" + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" +) + + +func (k msgServer) Create<%= TypeName.PascalCase %>(ctx context.Context, msg *types.MsgCreate<%= TypeName.PascalCase %>) (*types.MsgCreate<%= TypeName.PascalCase %>Response, error) { + if _, err := k.addressCodec.StringToBytes(msg.<%= MsgSigner.UpperCamel %>); err != nil { + return nil, errorsmod.Wrap(sdkerrors.ErrInvalidAddress, fmt.Sprintf("invalid address: %s", err)) + } + + // Check if the value already exists + ok, err := k.<%= TypeName.UpperCamel %>.Has(ctx, msg.<%= Index.Name.UpperCamel %>) + if err != nil { + return nil, errorsmod.Wrap(sdkerrors.ErrLogic, err.Error()) + } else if ok { + return nil, errorsmod.Wrap(sdkerrors.ErrInvalidRequest, "index already set") + } + + var <%= TypeName.LowerCamel %> = types.<%= TypeName.PascalCase %>{ + <%= MsgSigner.UpperCamel %>: msg.<%= MsgSigner.UpperCamel %>, + <%= Index.Name.UpperCamel %>: msg.<%= Index.Name.UpperCamel %>, + <%= for (field) in Fields { %><%= field.Name.UpperCamel %>: msg.<%= field.Name.UpperCamel %>, + <% } %> + } + + if err := k.<%= TypeName.UpperCamel %>.Set(ctx, <%= TypeName.LowerCamel %>.<%= Index.Name.UpperCamel %>, <%= TypeName.LowerCamel %>); err != nil { + return nil, errorsmod.Wrap(sdkerrors.ErrLogic, err.Error()) + } + + return &types.MsgCreate<%= TypeName.PascalCase %>Response{}, nil +} + +func (k msgServer) Update<%= TypeName.PascalCase %>(ctx context.Context, msg *types.MsgUpdate<%= TypeName.PascalCase %>) (*types.MsgUpdate<%= TypeName.PascalCase %>Response, error) { + if _, err := k.addressCodec.StringToBytes(msg.<%= MsgSigner.UpperCamel %>); err != nil { + return nil, errorsmod.Wrap(sdkerrors.ErrInvalidAddress, fmt.Sprintf("invalid signer address: %s", err)) + } + + // Check if the value exists + val, err := k.<%= TypeName.UpperCamel %>.Get(ctx, msg.<%= Index.Name.UpperCamel %>) + if err != nil { + if errors.Is(err, collections.ErrNotFound) { + return nil, errorsmod.Wrap(sdkerrors.ErrKeyNotFound, "index not set") + } + + return nil, errorsmod.Wrap(sdkerrors.ErrLogic, err.Error()) + } + + // Checks if the msg <%= MsgSigner.LowerCamel %> is the same as the current owner + if msg.<%= MsgSigner.UpperCamel %> != val.<%= MsgSigner.UpperCamel %> { + return nil, errorsmod.Wrap(sdkerrors.ErrUnauthorized, "incorrect owner") + } + + var <%= TypeName.LowerCamel %> = types.<%= TypeName.PascalCase %>{ + <%= MsgSigner.UpperCamel %>: msg.<%= MsgSigner.UpperCamel %>, + <%= Index.Name.UpperCamel %>: msg.<%= Index.Name.UpperCamel %>, + <%= for (field) in Fields { %><%= field.Name.UpperCamel %>: msg.<%= field.Name.UpperCamel %>, + <% } %> + } + + if err := k.<%= TypeName.UpperCamel %>.Set(ctx, <%= TypeName.LowerCamel %>.<%= Index.Name.UpperCamel %>, <%= TypeName.LowerCamel %>); err != nil { + return nil, errorsmod.Wrap(sdkerrors.ErrLogic, "failed to update <%= TypeName.LowerCamel %>") + } + + return &types.MsgUpdate<%= TypeName.PascalCase %>Response{}, nil +} + +func (k msgServer) Delete<%= TypeName.PascalCase %>(ctx context.Context, msg *types.MsgDelete<%= TypeName.PascalCase %>) (*types.MsgDelete<%= TypeName.PascalCase %>Response, error) { + if _, err := k.addressCodec.StringToBytes(msg.<%= MsgSigner.UpperCamel %>); err != nil { + return nil, errorsmod.Wrap(sdkerrors.ErrInvalidAddress, fmt.Sprintf("invalid signer address: %s", err)) + } + + // Check if the value exists + val, err := k.<%= TypeName.UpperCamel %>.Get(ctx, msg.<%= Index.Name.UpperCamel %>) + if err != nil { + if errors.Is(err, collections.ErrNotFound) { + return nil, errorsmod.Wrap(sdkerrors.ErrKeyNotFound, "index not set") + } + + return nil, errorsmod.Wrap(sdkerrors.ErrLogic, err.Error()) + } + + // Checks if the msg <%= MsgSigner.LowerCamel %> is the same as the current owner + if msg.<%= MsgSigner.UpperCamel %> != val.<%= MsgSigner.UpperCamel %> { + return nil, errorsmod.Wrap(sdkerrors.ErrUnauthorized, "incorrect owner") + } + + if err := k.<%= TypeName.UpperCamel %>.Remove(ctx, msg.<%= Index.Name.UpperCamel %>); err != nil { + return nil, errorsmod.Wrap(sdkerrors.ErrLogic, "failed to remove <%= TypeName.LowerCamel %>") + } + + return &types.MsgDelete<%= TypeName.PascalCase %>Response{}, nil +} diff --git a/ignite/templates/typed/map/files/simapp/x/{{moduleName}}/simulation/{{typeName}}.go.plush b/ignite/templates/typed/map/files/simapp/x/{{moduleName}}/simulation/{{typeName}}.go.plush new file mode 100644 index 0000000..6ad3a16 --- /dev/null +++ b/ignite/templates/typed/map/files/simapp/x/{{moduleName}}/simulation/{{typeName}}.go.plush @@ -0,0 +1,170 @@ +package simulation + +import ( + "math/rand" + + "github.com/cosmos/cosmos-sdk/baseapp" + sdk "github.com/cosmos/cosmos-sdk/types" + moduletestutil "github.com/cosmos/cosmos-sdk/types/module/testutil" + simtypes "github.com/cosmos/cosmos-sdk/types/simulation" + "github.com/cosmos/cosmos-sdk/x/simulation" + + "<%= ModulePath %>/x/<%= ModuleName %>/keeper" + "<%= ModulePath %>/x/<%= ModuleName %>/types" +) + +func SimulateMsgCreate<%= TypeName.PascalCase %>( + ak types.AuthKeeper, + bk types.BankKeeper, + k keeper.Keeper, + txGen client.TxConfig, +) simtypes.Operation { + return func(r *rand.Rand, app *baseapp.BaseApp, ctx sdk.Context, accs []simtypes.Account, chainID string, + ) (simtypes.OperationMsg, []simtypes.FutureOperation, error) { + simAccount, _ := simtypes.RandomAcc(r, accs) + + i := r.Int() + msg := &types.MsgCreate<%= TypeName.PascalCase %>{ + <%= MsgSigner.UpperCamel %>: simAccount.Address.String(), + <%= Index.Name.UpperCamel %>: <%= Index.ValueLoop() %>, + } + + found, err := k.<%= TypeName.UpperCamel %>.Has(ctx, msg.<%= Index.Name.UpperCamel %>) + if err == nil && found { + return simtypes.NoOpMsg(types.ModuleName, sdk.MsgTypeURL(msg), "<%= TypeName.UpperCamel %> already exist"), nil, nil + } + + txCtx := simulation.OperationInput{ + R: r, + App: app, + TxGen: txGen, + Cdc: nil, + Msg: msg, + Context: ctx, + SimAccount: simAccount, + ModuleName: types.ModuleName, + CoinsSpentInMsg: sdk.NewCoins(), + AccountKeeper: ak, + Bankkeeper: bk, + } + return simulation.GenAndDeliverTxWithRandFees(txCtx) + } +} + +func SimulateMsgUpdate<%= TypeName.PascalCase %>( + ak types.AuthKeeper, + bk types.BankKeeper, + k keeper.Keeper, + txGen client.TxConfig, +) simtypes.Operation { + return func(r *rand.Rand, app *baseapp.BaseApp, ctx sdk.Context, accs []simtypes.Account, chainID string, + ) (simtypes.OperationMsg, []simtypes.FutureOperation, error) { + var ( + simAccount = simtypes.Account{} + <%= TypeName.LowerCamel %> = types.<%= TypeName.PascalCase %>{} + msg = &types.MsgUpdate<%= TypeName.PascalCase %>{} + found = false + ) + + var all<%= TypeName.PascalCase %> []types.<%= TypeName.PascalCase %> + err := k.<%= TypeName.UpperCamel %>.Walk(ctx, nil, func(key <%= Index.DataType() %>, value types.<%= TypeName.PascalCase %>) (stop bool, err error) { + all<%= TypeName.PascalCase %> = append(all<%= TypeName.PascalCase %>, value) + return false, nil + }) + if err != nil { + panic(err) + } + + for _, obj := range all<%= TypeName.PascalCase %> { + acc, err := ak.AddressCodec().StringToBytes(obj.<%= MsgSigner.UpperCamel %>) + if err != nil { + return simtypes.OperationMsg{}, nil, err + } + + simAccount, found = simtypes.FindAccount(accs, sdk.AccAddress(acc)) + if found { + <%= TypeName.LowerCamel %> = obj + break + } + } + if !found { + return simtypes.NoOpMsg(types.ModuleName, sdk.MsgTypeURL(msg), "<%= TypeName.LowerCamel %> <%= MsgSigner.LowerCamel %> not found"), nil, nil + } + msg.<%= MsgSigner.UpperCamel %> = simAccount.Address.String() + msg.<%= Index.Name.UpperCamel %> = <%= TypeName.LowerCamel %>.<%= Index.Name.UpperCamel %> + + txCtx := simulation.OperationInput{ + R: r, + App: app, + TxGen: txGen, + Cdc: nil, + Msg: msg, + Context: ctx, + SimAccount: simAccount, + ModuleName: types.ModuleName, + CoinsSpentInMsg: sdk.NewCoins(), + AccountKeeper: ak, + Bankkeeper: bk, + } + return simulation.GenAndDeliverTxWithRandFees(txCtx) + } +} + +func SimulateMsgDelete<%= TypeName.PascalCase %>( + ak types.AuthKeeper, + bk types.BankKeeper, + k keeper.Keeper, + txGen client.TxConfig, +) simtypes.Operation { + return func(r *rand.Rand, app *baseapp.BaseApp, ctx sdk.Context, accs []simtypes.Account, chainID string, + ) (simtypes.OperationMsg, []simtypes.FutureOperation, error) { + var ( + simAccount = simtypes.Account{} + <%= TypeName.LowerCamel %> = types.<%= TypeName.PascalCase %>{} + msg = &types.MsgUpdate<%= TypeName.PascalCase %>{} + found = false + ) + + var all<%= TypeName.PascalCase %> []types.<%= TypeName.PascalCase %> + err := k.<%= TypeName.UpperCamel %>.Walk(ctx, nil, func(key <%= Index.DataType() %>, value types.<%= TypeName.PascalCase %>) (stop bool, err error) { + all<%= TypeName.PascalCase %> = append(all<%= TypeName.PascalCase %>, value) + return false, nil + }) + if err != nil { + panic(err) + } + + for _, obj := range all<%= TypeName.PascalCase %> { + acc, err := ak.AddressCodec().StringToBytes(obj.<%= MsgSigner.UpperCamel %>) + if err != nil { + return simtypes.OperationMsg{}, nil, err + } + + simAccount, found = simtypes.FindAccount(accs, sdk.AccAddress(acc)) + if found { + <%= TypeName.LowerCamel %> = obj + break + } + } + if !found { + return simtypes.NoOpMsg(types.ModuleName, sdk.MsgTypeURL(msg), "<%= TypeName.LowerCamel %> <%= MsgSigner.LowerCamel %> not found"), nil, nil + } + msg.<%= MsgSigner.UpperCamel %> = simAccount.Address.String() + msg.<%= Index.Name.UpperCamel %> = <%= TypeName.LowerCamel %>.<%= Index.Name.UpperCamel %> + + txCtx := simulation.OperationInput{ + R: r, + App: app, + TxGen: txGen, + Cdc: nil, + Msg: msg, + Context: ctx, + SimAccount: simAccount, + ModuleName: types.ModuleName, + CoinsSpentInMsg: sdk.NewCoins(), + AccountKeeper: ak, + Bankkeeper: bk, + } + return simulation.GenAndDeliverTxWithRandFees(txCtx) + } +} diff --git a/ignite/templates/typed/map/files/tests/component/x/{{moduleName}}/keeper/query_{{typeName}}_test.go.plush b/ignite/templates/typed/map/files/tests/component/x/{{moduleName}}/keeper/query_{{typeName}}_test.go.plush new file mode 100644 index 0000000..58dae54 --- /dev/null +++ b/ignite/templates/typed/map/files/tests/component/x/{{moduleName}}/keeper/query_{{typeName}}_test.go.plush @@ -0,0 +1,122 @@ +package keeper_test + +import ( + "context" + "testing" + + <%= for (goImport) in mergeGoImports(Fields) { %> + <%= goImport.Alias %> "<%= goImport.Name %>"<% } %> + "github.com/cosmos/cosmos-sdk/types/query" + "github.com/stretchr/testify/require" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" + + "<%= ModulePath %>/x/<%= ModuleName %>/keeper" + "<%= ModulePath %>/x/<%= ModuleName %>/types" +) + +func createN<%= TypeName.PascalCase %>(keeper keeper.Keeper, ctx context.Context, n int) []types.<%= TypeName.PascalCase %> { + items := make([]types.<%= TypeName.PascalCase %>, n) + for i := range items { + items[i].<%= Index.Name.UpperCamel %> = <%= Index.ValueLoop() %><%= for (field) in Fields { %> + items[i].<%= field.Name.UpperCamel %> = <%= field.ValueLoop() %><% } %> + _ = keeper.<%= TypeName.UpperCamel %>.Set(ctx, items[i].<%= Index.Name.UpperCamel %>, items[i]) + } + return items +} + +func Test<%= TypeName.PascalCase %>QuerySingle(t *testing.T) { + f := initFixture(t) + qs := keeper.NewQueryServerImpl(f.keeper) + msgs := createN<%= TypeName.PascalCase %>(f.keeper, f.ctx, 2) + tests := []struct { + desc string + request *types.QueryGet<%= TypeName.PascalCase %>Request + response *types.QueryGet<%= TypeName.PascalCase %>Response + err error + }{ + { + desc: "First", + request: &types.QueryGet<%= TypeName.PascalCase %>Request{ + <%= Index.Name.UpperCamel %>: msgs[0].<%= Index.Name.UpperCamel %>, + }, + response: &types.QueryGet<%= TypeName.PascalCase %>Response{<%= TypeName.UpperCamel %>: msgs[0]}, + }, + { + desc: "Second", + request: &types.QueryGet<%= TypeName.PascalCase %>Request{ + <%= Index.Name.UpperCamel %>: msgs[1].<%= Index.Name.UpperCamel %>, + }, + response: &types.QueryGet<%= TypeName.PascalCase %>Response{<%= TypeName.UpperCamel %>: msgs[1]}, + }, + { + desc: "KeyNotFound", + request: &types.QueryGet<%= TypeName.PascalCase %>Request{ + <%= Index.Name.UpperCamel %>: <%= Index.ValueInvalidIndex() %>, + }, + err: status.Error(codes.NotFound, "not found"), + }, + { + desc: "InvalidRequest", + err: status.Error(codes.InvalidArgument, "invalid request"), + }, + } + for _, tc := range tests { + t.Run(tc.desc, func(t *testing.T) { + response, err := qs.Get<%= TypeName.PascalCase %>(f.ctx, tc.request) + if tc.err != nil { + require.ErrorIs(t, err, tc.err) + } else { + require.NoError(t, err) + require.EqualExportedValues(t, tc.response, response) + } + }) + } +} + +func Test<%= TypeName.PascalCase %>QueryPaginated(t *testing.T) { + f := initFixture(t) + qs := keeper.NewQueryServerImpl(f.keeper) + msgs := createN<%= TypeName.PascalCase %>(f.keeper, f.ctx, 5) + + request := func(next []byte, offset, limit uint64, total bool) *types.QueryAll<%= TypeName.PascalCase %>Request { + return &types.QueryAll<%= TypeName.PascalCase %>Request{ + Pagination: &query.PageRequest{ + Key: next, + Offset: offset, + Limit: limit, + CountTotal: total, + }, + } + } + t.Run("ByOffset", func(t *testing.T) { + step := 2 + for i := 0; i < len(msgs); i += step { + resp, err := qs.List<%= TypeName.PascalCase %>(f.ctx, request(nil, uint64(i), uint64(step), false)) + require.NoError(t, err) + require.LessOrEqual(t, len(resp.<%= TypeName.UpperCamel %>), step) + require.Subset(t, msgs, resp.<%= TypeName.UpperCamel %>) + } + }) + t.Run("ByKey", func(t *testing.T) { + step := 2 + var next []byte + for i := 0; i < len(msgs); i += step { + resp, err := qs.List<%= TypeName.PascalCase %>(f.ctx, request(next, 0, uint64(step), false)) + require.NoError(t, err) + require.LessOrEqual(t, len(resp.<%= TypeName.UpperCamel %>), step) + require.Subset(t, msgs, resp.<%= TypeName.UpperCamel %>) + next = resp.Pagination.NextKey + } + }) + t.Run("Total", func(t *testing.T) { + resp, err := qs.List<%= TypeName.PascalCase %>(f.ctx, request(nil, 0, 0, true)) + require.NoError(t, err) + require.Equal(t, len(msgs), int(resp.Pagination.Total)) + require.EqualExportedValues(t, msgs, resp.<%= TypeName.UpperCamel %>) + }) + t.Run("InvalidRequest", func(t *testing.T) { + _, err := qs.List<%= TypeName.PascalCase %>(f.ctx, nil) + require.ErrorIs(t, err, status.Error(codes.InvalidArgument, "invalid request")) + }) +} diff --git a/ignite/templates/typed/map/files/tests/messages/x/{{moduleName}}/keeper/msg_server_{{typeName}}_test.go.plush b/ignite/templates/typed/map/files/tests/messages/x/{{moduleName}}/keeper/msg_server_{{typeName}}_test.go.plush new file mode 100644 index 0000000..7c87718 --- /dev/null +++ b/ignite/templates/typed/map/files/tests/messages/x/{{moduleName}}/keeper/msg_server_{{typeName}}_test.go.plush @@ -0,0 +1,156 @@ +package keeper_test + +import ( + "testing" + + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + "github.com/stretchr/testify/require" + + "<%= ModulePath %>/x/<%= ModuleName %>/keeper" + "<%= ModulePath %>/x/<%= ModuleName %>/types" +) + +func Test<%= TypeName.PascalCase %>MsgServerCreate(t *testing.T) { + f := initFixture(t) + srv := keeper.NewMsgServerImpl(f.keeper) + <%= MsgSigner.LowerCamel %>, err := f.addressCodec.BytesToString([]byte("signerAddr__________________")) + require.NoError(t, err) + + for i := 0; i < 5; i++ { + expected := &types.MsgCreate<%= TypeName.PascalCase %>{<%= MsgSigner.UpperCamel %>: <%= MsgSigner.LowerCamel %>, + <%= Index.Name.UpperCamel %>: <%= Index.ValueLoop() %>, + } + _, err := srv.Create<%= TypeName.PascalCase %>(f.ctx, expected) + require.NoError(t, err) + rst, err := f.keeper.<%= TypeName.UpperCamel %>.Get(f.ctx, expected.<%= Index.Name.UpperCamel %>) + require.NoError(t, err) + require.Equal(t, expected.<%= MsgSigner.UpperCamel %>, rst.<%= MsgSigner.UpperCamel %>) + } +} + +func Test<%= TypeName.PascalCase %>MsgServerUpdate(t *testing.T) { + f := initFixture(t) + srv := keeper.NewMsgServerImpl(f.keeper) + + <%= MsgSigner.LowerCamel %>, err := f.addressCodec.BytesToString([]byte("signerAddr__________________")) + require.NoError(t, err) + + unauthorizedAddr, err := f.addressCodec.BytesToString([]byte("unauthorizedAddr___________")) + require.NoError(t, err) + + expected := &types.MsgCreate<%= TypeName.PascalCase %>{<%= MsgSigner.UpperCamel %>: <%= MsgSigner.LowerCamel %>, + <%= Index.Name.UpperCamel %>: <%= Index.ValueIndex() %>, + } + _, err = srv.Create<%= TypeName.PascalCase %>(f.ctx, expected) + require.NoError(t, err) + + tests := []struct { + desc string + request *types.MsgUpdate<%= TypeName.PascalCase %> + err error + }{ + { + desc: "invalid address", + request: &types.MsgUpdate<%= TypeName.PascalCase %>{<%= MsgSigner.UpperCamel %>: "invalid", + <%= Index.Name.UpperCamel %>: <%= Index.ValueIndex() %>, + }, + err: sdkerrors.ErrInvalidAddress, + }, + { + desc: "unauthorized", + request: &types.MsgUpdate<%= TypeName.PascalCase %>{<%= MsgSigner.UpperCamel %>: unauthorizedAddr, + <%= Index.Name.UpperCamel %>: <%= Index.ValueIndex() %>, + }, + err: sdkerrors.ErrUnauthorized, + }, + { + desc: "key not found", + request: &types.MsgUpdate<%= TypeName.PascalCase %>{<%= MsgSigner.UpperCamel %>: <%= MsgSigner.LowerCamel %>, + <%= Index.Name.UpperCamel %>: <%= Index.ValueInvalidIndex() %>, + }, + err: sdkerrors.ErrKeyNotFound, + }, + { + desc: "completed", + request: &types.MsgUpdate<%= TypeName.PascalCase %>{<%= MsgSigner.UpperCamel %>: <%= MsgSigner.LowerCamel %>, + <%= Index.Name.UpperCamel %>: <%= Index.ValueIndex() %>, + }, + }, + } + for _, tc := range tests { + t.Run(tc.desc, func(t *testing.T) { + _, err = srv.Update<%= TypeName.PascalCase %>(f.ctx, tc.request) + if tc.err != nil { + require.ErrorIs(t, err, tc.err) + } else { + require.NoError(t, err) + rst, err := f.keeper.<%= TypeName.UpperCamel %>.Get(f.ctx, expected.<%= Index.Name.UpperCamel %>) + require.NoError(t, err) + require.Equal(t, expected.<%= MsgSigner.UpperCamel %>, rst.<%= MsgSigner.UpperCamel %>) + } + }) + } +} + +func Test<%= TypeName.PascalCase %>MsgServerDelete(t *testing.T) { + f := initFixture(t) + srv := keeper.NewMsgServerImpl(f.keeper) + + <%= MsgSigner.LowerCamel %>, err := f.addressCodec.BytesToString([]byte("signerAddr__________________")) + require.NoError(t, err) + + unauthorizedAddr, err := f.addressCodec.BytesToString([]byte("unauthorizedAddr___________")) + require.NoError(t, err) + + _, err = srv.Create<%= TypeName.PascalCase %>(f.ctx, &types.MsgCreate<%= TypeName.PascalCase %>{<%= MsgSigner.UpperCamel %>: <%= MsgSigner.LowerCamel %>, + <%= Index.Name.UpperCamel %>: <%= Index.ValueIndex() %>, + }) + require.NoError(t, err) + + tests := []struct { + desc string + request *types.MsgDelete<%= TypeName.PascalCase %> + err error + }{ + { + desc: "invalid address", + request: &types.MsgDelete<%= TypeName.PascalCase %>{<%= MsgSigner.UpperCamel %>: "invalid", + <%= Index.Name.PascalCase %>: <%= Index.ValueIndex() %>, + }, + err: sdkerrors.ErrInvalidAddress, + }, + { + desc: "unauthorized", + request: &types.MsgDelete<%= TypeName.PascalCase %>{<%= MsgSigner.UpperCamel %>: unauthorizedAddr, + <%= Index.Name.UpperCamel %>: <%= Index.ValueIndex() %>, + }, + err: sdkerrors.ErrUnauthorized, + }, + { + desc: "key not found", + request: &types.MsgDelete<%= TypeName.PascalCase %>{<%= MsgSigner.UpperCamel %>: <%= MsgSigner.LowerCamel %>, + <%= Index.Name.UpperCamel %>: <%= Index.ValueInvalidIndex() %>, + }, + err: sdkerrors.ErrKeyNotFound, + }, + { + desc: "completed", + request: &types.MsgDelete<%= TypeName.PascalCase %>{<%= MsgSigner.UpperCamel %>: <%= MsgSigner.LowerCamel %>, + <%= Index.Name.UpperCamel %>: <%= Index.ValueIndex() %>, + }, + }, + } + for _, tc := range tests { + t.Run(tc.desc, func(t *testing.T) { + _, err = srv.Delete<%= TypeName.PascalCase %>(f.ctx, tc.request) + if tc.err != nil { + require.ErrorIs(t, err, tc.err) + } else { + require.NoError(t, err) + found, err := f.keeper.<%= TypeName.UpperCamel %>.Has(f.ctx, tc.request.<%= Index.Name.UpperCamel %>) + require.NoError(t, err) + require.False(t, found) + } + }) + } +} diff --git a/ignite/templates/typed/map/map.go b/ignite/templates/typed/map/map.go new file mode 100644 index 0000000..2a1294f --- /dev/null +++ b/ignite/templates/typed/map/map.go @@ -0,0 +1,751 @@ +package maptype + +import ( + "embed" + "fmt" + "io/fs" + "path/filepath" + "strings" + + "github.com/emicklei/proto" + "github.com/gobuffalo/genny/v2" + + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/gomodulepath" + "github.com/ignite/cli/v29/ignite/pkg/protoanalysis/protoutil" + "github.com/ignite/cli/v29/ignite/pkg/xast" + "github.com/ignite/cli/v29/ignite/templates/field/datatype" + "github.com/ignite/cli/v29/ignite/templates/typed" +) + +var ( + //go:embed files/messages/* files/messages/**/* + fsMessages embed.FS + + //go:embed files/tests/messages/* files/tests/messages/**/* + fsTestsMessages embed.FS + + //go:embed files/component/* files/component/**/* + fsComponent embed.FS + + //go:embed files/tests/component/* files/tests/component/**/* + fsTestsComponent embed.FS + + //go:embed files/simapp/* files/simapp/**/* + fsSimapp embed.FS +) + +// NewGenerator returns the generator to scaffold a new map type in a module. +func NewGenerator(opts *typed.Options) (*genny.Generator, error) { + // Tests are not generated for map with a custom index that contains only booleans + // because we can't generate reliable tests for this type + var generateTest bool + if opts.Index.DatatypeName != datatype.Bool { + generateTest = true + } + + subMessages, err := fs.Sub(fsMessages, "files/messages") + if err != nil { + return nil, errors.Errorf("fail to generate sub: %w", err) + } + subTestsMessages, err := fs.Sub(fsTestsMessages, "files/tests/messages") + if err != nil { + return nil, errors.Errorf("fail to generate sub: %w", err) + } + subComponent, err := fs.Sub(fsComponent, "files/component") + if err != nil { + return nil, errors.Errorf("fail to generate sub: %w", err) + } + subTestsComponent, err := fs.Sub(fsTestsComponent, "files/tests/component") + if err != nil { + return nil, errors.Errorf("fail to generate sub: %w", err) + } + subSimapp, err := fs.Sub(fsSimapp, "files/simapp") + if err != nil { + return nil, errors.Errorf("fail to generate sub: %w", err) + } + + g := genny.New() + g.RunFn(protoRPCModify(opts)) + g.RunFn(keeperModify(opts)) + g.RunFn(clientCliQueryModify(opts)) + g.RunFn(genesisProtoModify(opts)) + g.RunFn(genesisTypesModify(opts)) + g.RunFn(genesisModuleModify(opts)) + g.RunFn(genesisTestsModify(opts)) + g.RunFn(genesisTypesTestsModify(opts)) + + // Modifications for new messages + if !opts.NoMessage { + g.RunFn(protoTxModify(opts)) + g.RunFn(clientCliTxModify(opts)) + g.RunFn(typesCodecModify(opts)) + + if !opts.NoSimulation { + g.RunFn(moduleSimulationModify(opts)) + if err := typed.Box(subSimapp, opts, g); err != nil { + return nil, err + } + } + + if err := typed.Box(subMessages, opts, g); err != nil { + return nil, err + } + if generateTest { + if err := typed.Box(subTestsMessages, opts, g); err != nil { + return nil, err + } + } + } + + if generateTest { + if err := typed.Box(subTestsComponent, opts, g); err != nil { + return nil, err + } + } + return g, typed.Box(subComponent, opts, g) +} + +// keeperModify modifies the keeper to add a new collections map type. +func keeperModify(opts *typed.Options) genny.RunFn { + return func(r *genny.Runner) error { + path := filepath.Join("x", opts.ModuleName, "keeper/keeper.go") + f, err := r.Disk.Find(path) + if err != nil { + return err + } + + content, err := xast.ModifyStruct( + f.String(), + "Keeper", + xast.AppendStructValue( + opts.TypeName.UpperCamel, + fmt.Sprintf("collections.Map[%[1]v, types.%[2]v]", opts.Index.DataType(), opts.TypeName.PascalCase), + ), + ) + if err != nil { + return err + } + + // add parameter to the struct into the new keeper method. + content, err = xast.ModifyFunction( + content, + "NewKeeper", + xast.AppendFuncStruct( + "Keeper", + opts.TypeName.UpperCamel, + fmt.Sprintf(`collections.NewMap(sb, types.%[1]vKey, "%[2]v", %[3]v, codec.CollValue[types.%[1]v](cdc))`, + opts.TypeName.PascalCase, + opts.TypeName.LowerCamel, + opts.Index.CollectionsKeyValueType(), + ), + ), + ) + if err != nil { + return err + } + + newFile := genny.NewFileS(path, content) + return r.File(newFile) + } +} + +// Modifies query.proto to add the required RPCs and Messages. +// +// What it depends on: +// - Existence of a service with name "Query". Adds the rpc's there. +func protoRPCModify(opts *typed.Options) genny.RunFn { + return func(r *genny.Runner) error { + path := opts.ProtoFile("query.proto") + f, err := r.Disk.Find(path) + if err != nil { + return err + } + + protoFile, err := protoutil.ParseProtoFile(f) + if err != nil { + return err + } + // Add initial import for the new type + gogoImport := protoutil.NewImport(typed.GoGoProtoImport) + if err = protoutil.AddImports(protoFile, true, gogoImport, opts.ProtoTypeImport()); err != nil { + return errors.Errorf("failed while adding imports in %s: %w", path, err) + } + + protoIndex := fmt.Sprintf("{%s}", opts.Index.ProtoFieldName()) + appModulePath := gomodulepath.ExtractAppPath(opts.ModulePath) + serviceQuery, err := protoutil.GetServiceByName(protoFile, "Query") + if err != nil { + return errors.Errorf("failed while looking up service 'Query' in %s: %w", path, err) + } + typenamePascal, typenameSnake := opts.TypeName.PascalCase, opts.TypeName.Snake + rpcQueryGet := protoutil.NewRPC( + fmt.Sprintf("Get%s", typenamePascal), + fmt.Sprintf("QueryGet%sRequest", typenamePascal), + fmt.Sprintf("QueryGet%sResponse", typenamePascal), + protoutil.WithRPCOptions( + protoutil.NewOption( + "google.api.http", + fmt.Sprintf( + "/%s/%s/%s/%s/%s", + appModulePath, opts.ModuleName, opts.ProtoVer, typenameSnake, protoIndex, + ), + protoutil.Custom(), + protoutil.SetField("get"), + ), + ), + ) + protoutil.AttachComment(rpcQueryGet, fmt.Sprintf("Get%[1]v queries a %[1]v by index.", typenamePascal)) + + rpcQueryAll := protoutil.NewRPC( + fmt.Sprintf("List%s", typenamePascal), + fmt.Sprintf("QueryAll%sRequest", typenamePascal), + fmt.Sprintf("QueryAll%sResponse", typenamePascal), + protoutil.WithRPCOptions( + protoutil.NewOption( + "google.api.http", + fmt.Sprintf( + "/%s/%s/%s/%s", + appModulePath, opts.ModuleName, opts.ProtoVer, typenameSnake, + ), + protoutil.Custom(), + protoutil.SetField("get"), + ), + ), + ) + protoutil.AttachComment(rpcQueryGet, fmt.Sprintf("List%[1]v Queries a list of %[1]v items.", typenamePascal)) + protoutil.Append(serviceQuery, rpcQueryGet, rpcQueryAll) + + // Ensure custom types are imported + var protoImports []*proto.Import + for _, imp := range opts.Fields.ProtoImports() { + protoImports = append(protoImports, protoutil.NewImport(imp)) + } + for _, f := range opts.Fields.Custom() { + protoPath := fmt.Sprintf("%[1]v/%[2]v/%[3]v/%[4]v.proto", opts.AppName, opts.ModuleName, opts.ProtoVer, f) + protoImports = append(protoImports, protoutil.NewImport(protoPath)) + } + // we already know an import exists, pass false for fallback. + if err = protoutil.AddImports(protoFile, false, protoImports...); err != nil { + // shouldn't really occur. + return errors.Errorf("failed to add imports to %s: %w", path, err) + } + + // Add the messages. + paginationType, paginationName := "cosmos.base.query.v1beta1.Page", "pagination" + queryGetRequest := protoutil.NewMessage( + fmt.Sprintf("QueryGet%sRequest", typenamePascal), + protoutil.WithFields(opts.Index.ToProtoField(1)), + ) + gogoOption := protoutil.NewOption("gogoproto.nullable", "false", protoutil.Custom()) + queryGetResponse := protoutil.NewMessage( + fmt.Sprintf("QueryGet%sResponse", typenamePascal), + protoutil.WithFields(protoutil.NewField(typenameSnake, typenamePascal, 1, protoutil.WithFieldOptions(gogoOption))), + ) + queryAllRequest := protoutil.NewMessage( + fmt.Sprintf("QueryAll%sRequest", typenamePascal), + protoutil.WithFields(protoutil.NewField(paginationName, paginationType+"Request", 1)), + ) + queryAllResponse := protoutil.NewMessage( + fmt.Sprintf("QueryAll%sResponse", typenamePascal), + protoutil.WithFields( + protoutil.NewField( + typenameSnake, + typenamePascal, + 1, + protoutil.Repeated(), + protoutil.WithFieldOptions(gogoOption), + ), + protoutil.NewField(paginationName, fmt.Sprintf("%sResponse", paginationType), 2), + ), + ) + protoutil.Append(protoFile, queryGetRequest, queryGetResponse, queryAllRequest, queryAllResponse) + + newFile := genny.NewFileS(path, protoutil.Print(protoFile)) + return r.File(newFile) + } +} + +func clientCliQueryModify(opts *typed.Options) genny.RunFn { + return func(r *genny.Runner) error { + path := filepath.Join("x", opts.ModuleName, "module/autocli.go") + f, err := r.Disk.Find(path) + if err != nil { + return err + } + + listOption := fmt.Sprintf( + `{ + RpcMethod: "List%[1]v", + Use: "list-%[2]v", + Short: "List all %[3]v", + }`, + opts.TypeName.PascalCase, + opts.TypeName.Kebab, + opts.TypeName.Original, + ) + getOption := fmt.Sprintf( + `{ + RpcMethod: "Get%[1]v", + Use: "get-%[2]v [id]", + Short: "Gets a %[3]v", + Alias: []string{"show-%[2]v"}, + PositionalArgs: []*autocliv1.PositionalArgDescriptor{{ProtoField: "%[4]s"}}, + }`, + opts.TypeName.PascalCase, + opts.TypeName.Kebab, + opts.TypeName.Original, + opts.Index.ProtoFieldName(), + ) + content, err := typed.AppendAutoCLIQueryOptions(f.String(), listOption, getOption) + if err != nil { + return err + } + newFile := genny.NewFileS(path, content) + return r.File(newFile) + } +} + +// Modifies the genesis.proto file to add a new field. +// +// What it depends on: +// - Existence of a message with name "GenesisState". Adds the field there. +func genesisProtoModify(opts *typed.Options) genny.RunFn { + return func(r *genny.Runner) error { + path := opts.ProtoFile("genesis.proto") + f, err := r.Disk.Find(path) + if err != nil { + return err + } + protoFile, err := protoutil.ParseProtoFile(f) + if err != nil { + return err + } + // Add initial import for the new type + gogoImport := protoutil.NewImport(typed.GoGoProtoImport) + if err = protoutil.AddImports(protoFile, true, gogoImport, opts.ProtoTypeImport()); err != nil { + return errors.Errorf("failed while adding imports in %s: %w", path, err) + } + // Get next available sequence number from GenesisState. + genesisState, err := protoutil.GetMessageByName(protoFile, typed.ProtoGenesisStateMessage) + if err != nil { + return errors.Errorf("failed while looking up message '%s' in %s: %w", typed.ProtoGenesisStateMessage, path, err) + } + seqNumber := protoutil.NextUniqueID(genesisState) + + // Create new option and append to GenesisState message. + typenameSnake, typenamePascal := opts.TypeName.Snake, opts.TypeName.PascalCase + gogoOption := protoutil.NewOption("gogoproto.nullable", "false", protoutil.Custom()) + typeListField := protoutil.NewField( + typenameSnake+"_map", typenamePascal, seqNumber, protoutil.Repeated(), protoutil.WithFieldOptions(gogoOption), + ) + protoutil.Append(genesisState, typeListField) + + newFile := genny.NewFileS(path, protoutil.Print(protoFile)) + return r.File(newFile) + } +} + +func genesisTypesModify(opts *typed.Options) genny.RunFn { + return func(r *genny.Runner) error { + path := filepath.Join("x", opts.ModuleName, "types/genesis.go") + f, err := r.Disk.Find(path) + if err != nil { + return err + } + + content, err := xast.AppendImports(f.String(), xast.WithImport("fmt")) + if err != nil { + return err + } + + content, err = xast.ModifyFunction(content, "DefaultGenesis", xast.AppendFuncStruct( + "GenesisState", + fmt.Sprintf("%[1]vMap", opts.TypeName.UpperCamel), + fmt.Sprintf("[]%[1]v{}", opts.TypeName.PascalCase), + )) + if err != nil { + return err + } + + // lines of code to call the key function with the indexes of the element + keyCall := fmt.Sprintf(`fmt.Sprint(elem.%s)`, opts.Index.Name.UpperCamel) + templateTypesValidate := `// Check for duplicated index in %[1]v +%[1]vIndexMap := make(map[string]struct{}) + +for _, elem := range gs.%[2]vMap { + index := %[3]v + if _, ok := %[1]vIndexMap[index]; ok { + return fmt.Errorf("duplicated index for %[1]v") + } + %[1]vIndexMap[index] = struct{}{} +}` + replacementTypesValidate := fmt.Sprintf( + templateTypesValidate, + opts.TypeName.LowerCamel, + opts.TypeName.UpperCamel, + keyCall, + ) + content, err = xast.ModifyFunction( + content, + "Validate", + xast.AppendFuncCode(replacementTypesValidate), + ) + if err != nil { + return err + } + + newFile := genny.NewFileS(path, content) + return r.File(newFile) + } +} + +func genesisModuleModify(opts *typed.Options) genny.RunFn { + return func(r *genny.Runner) error { + path := filepath.Join("x", opts.ModuleName, "keeper/genesis.go") + f, err := r.Disk.Find(path) + if err != nil { + return err + } + + templateModuleInit := `// Set all the %[1]v +for _, elem := range genState.%[2]vMap { + if err := k.%[2]v.Set(ctx, elem.%[3]v, elem); err != nil { + return err + } +}` + replacementModuleInit := fmt.Sprintf( + templateModuleInit, + opts.TypeName.LowerCamel, + opts.TypeName.UpperCamel, + opts.Index.Name.UpperCamel, + ) + content, err := xast.ModifyFunction( + f.String(), + "InitGenesis", + xast.AppendFuncCode(replacementModuleInit), + ) + if err != nil { + return err + } + + templateModuleExport := `if err := k.%[1]v.Walk(ctx, nil, func(_ %[2]v, val types.%[3]v) (stop bool, err error) { + genesis.%[1]vMap = append(genesis.%[1]vMap, val) + return false, nil + }); err != nil { + return nil, err + }` + replacementModuleExport := fmt.Sprintf( + templateModuleExport, + opts.TypeName.UpperCamel, + opts.Index.DataType(), + opts.TypeName.PascalCase, + ) + content, err = xast.ModifyFunction( + content, + "ExportGenesis", + xast.AppendFuncCode(replacementModuleExport), + ) + if err != nil { + return err + } + + newFile := genny.NewFileS(path, content) + return r.File(newFile) + } +} + +func genesisTestsModify(opts *typed.Options) genny.RunFn { + return func(r *genny.Runner) error { + path := filepath.Join("x", opts.ModuleName, "keeper/genesis_test.go") + f, err := r.Disk.Find(path) + if err != nil { + return err + } + + // Create a list of two different indexes to use as sample + sampleIndexes := make([]string, 2) + for i := 0; i < 2; i++ { + sampleIndexes[i] = opts.Index.GenesisArgs(i) + } + + // add parameter to the struct into the new method. + content, err := xast.ModifyFunction( + f.String(), + "TestGenesis", + xast.AppendFuncStruct( + "GenesisState", + fmt.Sprintf("%[1]vMap", opts.TypeName.UpperCamel), + fmt.Sprintf( + "[]types.%[1]v{{ %[2]v }, { %[3]v }}", + opts.TypeName.PascalCase, + sampleIndexes[0], + sampleIndexes[1], + ), + ), + xast.AppendFuncCode(fmt.Sprintf("require.EqualExportedValues(t, genesisState.%[1]vMap, got.%[1]vMap)", opts.TypeName.UpperCamel)), + ) + if err != nil { + return err + } + + newFile := genny.NewFileS(path, content) + return r.File(newFile) + } +} + +func genesisTypesTestsModify(opts *typed.Options) genny.RunFn { + return func(r *genny.Runner) error { + path := filepath.Join("x", opts.ModuleName, "types/genesis_test.go") + f, err := r.Disk.Find(path) + if err != nil { + return err + } + + // Create a list of two different indexes to use as sample + sampleIndexes := make([]string, 2) + for i := 0; i < 2; i++ { + sampleIndexes[i] = opts.Index.GenesisArgs(i) + } + + templateDuplicated := `{ + desc: "duplicated %[1]v", + genState: &types.GenesisState{ + %[2]vMap: []types.%[3]v{ + { + %[4]v}, + { + %[4]v}, + }, + }, + valid: false, +}` + replacementDuplicated := fmt.Sprintf( + templateDuplicated, + opts.TypeName.LowerCamel, + opts.TypeName.UpperCamel, + opts.TypeName.PascalCase, + sampleIndexes[0], + ) + + // add parameter to the struct into the new method. + content, err := xast.ModifyFunction( + f.String(), + "TestGenesisState_Validate", + xast.AppendFuncStruct( + "GenesisState", + fmt.Sprintf("%[1]vMap", opts.TypeName.UpperCamel), + fmt.Sprintf( + "[]types.%[1]v{{ %[2]v }, { %[3]v }}", + opts.TypeName.PascalCase, + sampleIndexes[0], + sampleIndexes[1], + ), + ), + xast.AppendFuncTestCase(replacementDuplicated), + ) + if err != nil { + return err + } + + newFile := genny.NewFileS(path, content) + return r.File(newFile) + } +} + +// protoTxModify modifies the tx.proto file to add the required RPCs and messages. +// +// What it expects: +// - A service named "Msg" to exist in the proto file, it appends the RPCs inside it. +func protoTxModify(opts *typed.Options) genny.RunFn { + return func(r *genny.Runner) error { + path := opts.ProtoFile("tx.proto") + f, err := r.Disk.Find(path) + if err != nil { + return err + } + + protoFile, err := protoutil.ParseProtoFile(f) + if err != nil { + return err + } + + // RPC service + serviceMsg, err := protoutil.GetServiceByName(protoFile, "Msg") + if err != nil { + return errors.Errorf("failed while looking up service 'Msg' in %s: %w", path, err) + } + // better to append them altogether, single traversal. + typenamePascal := opts.TypeName.PascalCase + protoutil.Append(serviceMsg, + protoutil.NewRPC( + fmt.Sprintf("Create%s", typenamePascal), + fmt.Sprintf("MsgCreate%s", typenamePascal), + fmt.Sprintf("MsgCreate%sResponse", typenamePascal), + ), + protoutil.NewRPC( + fmt.Sprintf("Update%s", typenamePascal), + fmt.Sprintf("MsgUpdate%s", typenamePascal), + fmt.Sprintf("MsgUpdate%sResponse", typenamePascal), + ), + protoutil.NewRPC( + fmt.Sprintf("Delete%s", typenamePascal), + fmt.Sprintf("MsgDelete%s", typenamePascal), + fmt.Sprintf("MsgDelete%sResponse", typenamePascal), + ), + ) + + // Messages + index := opts.Index.ToProtoField(2) + var fields []*proto.NormalField + for i, f := range opts.Fields { + fields = append(fields, f.ToProtoField(i+3)) // +3 because of the index + } + + // Ensure custom types are imported + var protoImports []*proto.Import + for _, imp := range append(opts.Fields.ProtoImports(), opts.Index.ProtoImports()...) { + protoImports = append(protoImports, protoutil.NewImport(imp)) + } + for _, f := range opts.Fields.Custom() { + protoPath := fmt.Sprintf("%[1]v/%[2]v/%[3]v/%[4]v.proto", opts.AppName, opts.ModuleName, opts.ProtoVer, f) + protoImports = append(protoImports, protoutil.NewImport(protoPath)) + } + // we already know an import exists, pass false for fallback. + if err = protoutil.AddImports(protoFile, false, protoImports...); err != nil { + return errors.Errorf("failed while adding imports in %s: %w", path, err) + } + + creator := protoutil.NewField(opts.MsgSigner.Snake, "string", 1) + creator.Options = append(creator.Options, protoutil.NewOption("cosmos_proto.scalar", "cosmos.AddressString", protoutil.Custom())) // set the scalar annotation + creatorOpt := protoutil.NewOption(typed.MsgSignerOption, opts.MsgSigner.Snake) + commonFields := []*proto.NormalField{creator} + commonFields = append(commonFields, index) + + msgCreate := protoutil.NewMessage( + "MsgCreate"+typenamePascal, + protoutil.WithFields(append(commonFields, fields...)...), + protoutil.WithMessageOptions(creatorOpt), + ) + msgCreateResponse := protoutil.NewMessage(fmt.Sprintf("MsgCreate%sResponse", typenamePascal)) + + msgUpdate := protoutil.NewMessage( + "MsgUpdate"+typenamePascal, + protoutil.WithFields(append(commonFields, fields...)...), + protoutil.WithMessageOptions(creatorOpt), + ) + msgUpdateResponse := protoutil.NewMessage(fmt.Sprintf("MsgUpdate%sResponse", typenamePascal)) + + msgDelete := protoutil.NewMessage( + "MsgDelete"+typenamePascal, + protoutil.WithFields(commonFields...), + protoutil.WithMessageOptions(creatorOpt), + ) + msgDeleteResponse := protoutil.NewMessage(fmt.Sprintf("MsgDelete%sResponse", typenamePascal)) + protoutil.Append(protoFile, + msgCreate, msgCreateResponse, msgUpdate, msgUpdateResponse, msgDelete, msgDeleteResponse, + ) + + newFile := genny.NewFileS(path, protoutil.Print(protoFile)) + return r.File(newFile) + } +} + +func clientCliTxModify(opts *typed.Options) genny.RunFn { + return func(r *genny.Runner) error { + path := filepath.Join("x", opts.ModuleName, "module/autocli.go") + f, err := r.Disk.Find(path) + if err != nil { + return err + } + + index := fmt.Sprintf(`{ProtoField: "%s"}, `, opts.Index.ProtoFieldName()) + indexStr := fmt.Sprintf("[%s] ", opts.Index.ProtoFieldName()) + positionalArgs := index + opts.Fields.ProtoFieldNameAutoCLI() + positionalArgsStr := indexStr + opts.Fields.CLIUsage() + + createOption := fmt.Sprintf( + `{ + RpcMethod: "Create%[1]v", + Use: "create-%[2]v %[5]s", + Short: "Create a new %[3]v", + PositionalArgs: []*autocliv1.PositionalArgDescriptor{%[4]s}, + }`, + opts.TypeName.PascalCase, + opts.TypeName.Kebab, + opts.TypeName.Original, + strings.TrimSpace(positionalArgs), + strings.TrimSpace(positionalArgsStr), + ) + updateOption := fmt.Sprintf( + `{ + RpcMethod: "Update%[1]v", + Use: "update-%[2]v %[5]s", + Short: "Update %[3]v", + PositionalArgs: []*autocliv1.PositionalArgDescriptor{%[4]s}, + }`, + opts.TypeName.PascalCase, + opts.TypeName.Kebab, + opts.TypeName.Original, + strings.TrimSpace(positionalArgs), + strings.TrimSpace(positionalArgsStr), + ) + deleteOption := fmt.Sprintf( + `{ + RpcMethod: "Delete%[1]v", + Use: "delete-%[2]v %[5]s", + Short: "Delete %[3]v", + PositionalArgs: []*autocliv1.PositionalArgDescriptor{%[4]s}, + }`, + opts.TypeName.PascalCase, + opts.TypeName.Kebab, + opts.TypeName.Original, + strings.TrimSpace(index), + strings.TrimSpace(indexStr), + ) + content, err := typed.AppendAutoCLITxOptions(f.String(), createOption, updateOption, deleteOption) + if err != nil { + return err + } + newFile := genny.NewFileS(path, content) + return r.File(newFile) + } +} + +func typesCodecModify(opts *typed.Options) genny.RunFn { + return func(r *genny.Runner) error { + path := filepath.Join("x", opts.ModuleName, "types/codec.go") + f, err := r.Disk.Find(path) + if err != nil { + return err + } + + // Import + content, err := xast.AppendImports(f.String(), xast.WithNamedImport("sdk", "github.com/cosmos/cosmos-sdk/types")) + if err != nil { + return err + } + + // Interface + templateInterface := `registrar.RegisterImplementations((*sdk.Msg)(nil), + &MsgCreate%[1]v{}, + &MsgUpdate%[1]v{}, + &MsgDelete%[1]v{}, +)` + replacementInterface := fmt.Sprintf( + templateInterface, + opts.TypeName.PascalCase, + ) + content, err = xast.ModifyFunction( + content, + "RegisterInterfaces", + xast.AppendFuncAtLine(replacementInterface, 0), + ) + if err != nil { + return err + } + + newFile := genny.NewFileS(path, content) + return r.File(newFile) + } +} diff --git a/ignite/templates/typed/map/simulation.go b/ignite/templates/typed/map/simulation.go new file mode 100644 index 0000000..04fa348 --- /dev/null +++ b/ignite/templates/typed/map/simulation.go @@ -0,0 +1,62 @@ +package maptype + +import ( + "fmt" + "path/filepath" + + "github.com/gobuffalo/genny/v2" + + "github.com/ignite/cli/v29/ignite/pkg/xast" + "github.com/ignite/cli/v29/ignite/templates/typed" +) + +func moduleSimulationModify(opts *typed.Options) genny.RunFn { + return func(r *genny.Runner) error { + path := filepath.Join("x", opts.ModuleName, "module/simulation.go") + f, err := r.Disk.Find(path) + if err != nil { + return err + } + + // Create a list of two different index/fields to use as sample + sampleIndexes := make([]string, 2) + for i := 0; i < 2; i++ { + sampleIndexes[i] = fmt.Sprintf("%s: sample.AccAddress(),\n", opts.MsgSigner.UpperCamel) + sampleIndexes[i] += opts.Index.GenesisArgs(i) + } + + // simulation genesis state + content, err := xast.ModifyFunction( + f.String(), + "GenerateGenesisState", + xast.AppendFuncStruct( + "GenesisState", + fmt.Sprintf("%[1]vMap", opts.TypeName.UpperCamel), + fmt.Sprintf( + "[]types.%[1]v{{ %[2]v }, { %[3]v }}", + opts.TypeName.PascalCase, + sampleIndexes[0], + sampleIndexes[1], + ), + ), + ) + if err != nil { + return err + } + + content, err = typed.ModuleSimulationMsgModify( + content, + opts.ModulePath, + opts.ModuleName, + opts.TypeName, + opts.MsgSigner, + "Create", "Update", "Delete", + ) + if err != nil { + return err + } + + newFile := genny.NewFileS(path, content) + return r.File(newFile) + } +} diff --git a/ignite/templates/typed/options.go b/ignite/templates/typed/options.go new file mode 100644 index 0000000..0b7cad8 --- /dev/null +++ b/ignite/templates/typed/options.go @@ -0,0 +1,38 @@ +package typed + +import ( + "fmt" + "path/filepath" + + "github.com/emicklei/proto" + + "github.com/ignite/cli/v29/ignite/pkg/multiformatname" + "github.com/ignite/cli/v29/ignite/pkg/protoanalysis/protoutil" + "github.com/ignite/cli/v29/ignite/templates/field" +) + +// Options ... +type Options struct { + AppName string + ProtoDir string + ProtoVer string + ModuleName string + ModulePath string + TypeName multiformatname.Name + MsgSigner multiformatname.Name + Fields field.Fields + Index field.Field + NoMessage bool + NoSimulation bool + IsIBC bool +} + +// ProtoFile returns the path to the proto folder within the generated app. +func (opts *Options) ProtoFile(fname string) string { + return filepath.Join(opts.ProtoDir, opts.AppName, opts.ModuleName, opts.ProtoVer, fname) +} + +// ProtoTypeImport Return the protobuf import statement for this type. +func (opts *Options) ProtoTypeImport() *proto.Import { + return protoutil.NewImport(fmt.Sprintf("%s/%s/%s/%s.proto", opts.AppName, opts.ModuleName, opts.ProtoVer, opts.TypeName.Snake)) +} diff --git a/ignite/templates/typed/proto.go b/ignite/templates/typed/proto.go new file mode 100644 index 0000000..b373f10 --- /dev/null +++ b/ignite/templates/typed/proto.go @@ -0,0 +1,8 @@ +package typed + +const ( + // GoGoProtoImport is the import path for the gogoproto package. + GoGoProtoImport = "gogoproto/gogo.proto" + // MsgSignerOption correspond to the proto annotation for defining a message signer. + MsgSignerOption = "(cosmos.msg.v1.signer)" +) diff --git a/ignite/templates/typed/simulation.go b/ignite/templates/typed/simulation.go new file mode 100644 index 0000000..08ac57a --- /dev/null +++ b/ignite/templates/typed/simulation.go @@ -0,0 +1,79 @@ +package typed + +import ( + "fmt" + "strings" + + "github.com/ignite/cli/v29/ignite/pkg/multiformatname" + "github.com/ignite/cli/v29/ignite/pkg/xast" +) + +func ModuleSimulationMsgModify( + content, + modulePath, + moduleName string, + typeName, msgSigner multiformatname.Name, + msgs ...string, +) (string, error) { + if len(msgs) == 0 { + msgs = append(msgs, "") + } + + // Import + content, err := xast.AppendImports( + content, + xast.WithNamedImport( + fmt.Sprintf("%[1]vsimulation", moduleName), + fmt.Sprintf("%[1]v/x/%[2]v/simulation", modulePath, moduleName), + ), + xast.WithImport("math/rand"), + ) + if err != nil { + return "", err + } + + for _, msg := range msgs { + // simulation operations + replacementOp := fmt.Sprintf(` + const ( + opWeightMsg%[1]v%[2]v = "op_weight_msg_%[3]v" + defaultWeightMsg%[1]v%[2]v int = 100 // TODO: Determine the simulation weight value for your use case + ) + + var weightMsg%[1]v%[2]v int + simState.AppParams.GetOrGenerate(opWeightMsg%[1]v%[2]v, &weightMsg%[1]v%[2]v, nil, + func(_ *rand.Rand) { + weightMsg%[1]v%[2]v = defaultWeightMsg%[1]v%[2]v + }, + ) + operations = append(operations, simulation.NewWeightedOperation( + weightMsg%[1]v%[2]v, + %[3]vsimulation.SimulateMsg%[1]v%[2]v(am.authKeeper, am.bankKeeper, am.keeper, simState.TxConfig), + )) + +`, msg, typeName.PascalCase, moduleName) + + content, err = xast.ModifyFunction(content, "WeightedOperations", xast.AppendFuncCode(replacementOp)) + if err != nil { + return "", err + } + + // add proposal simulation operations for msgs having an authority as signer. + if strings.Contains(content, "ProposalMsgs") && strings.EqualFold(msgSigner.Original, "authority") { + replacementOpMsg := fmt.Sprintf(`simulation.NewWeightedProposalMsg( + opWeightMsg%[1]v%[2]v, + defaultWeightMsg%[1]v%[2]v, + func(r *rand.Rand, ctx sdk.Context, accs []simtypes.Account) sdk.Msg { + %[3]vsimulation.SimulateMsg%[1]v%[2]v(am.authKeeper, am.bankKeeper, am.keeper) + return nil + }, +),`, msg, typeName.PascalCase, moduleName) + content, err = xast.ModifyFunction(content, "ProposalMsgs", xast.AppendFuncCode(replacementOpMsg)) + if err != nil { + return "", err + } + } + } + + return content, nil +} diff --git a/ignite/templates/typed/singleton/files/component/x/{{moduleName}}/keeper/query_{{typeName}}.go.plush b/ignite/templates/typed/singleton/files/component/x/{{moduleName}}/keeper/query_{{typeName}}.go.plush new file mode 100644 index 0000000..6711d28 --- /dev/null +++ b/ignite/templates/typed/singleton/files/component/x/{{moduleName}}/keeper/query_{{typeName}}.go.plush @@ -0,0 +1,29 @@ +package keeper + +import ( + "context" + "errors" + + "cosmossdk.io/collections" + + "<%= ModulePath %>/x/<%= ModuleName %>/types" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" +) + +func (q queryServer) Get<%= TypeName.PascalCase %>(ctx context.Context, req *types.QueryGet<%= TypeName.PascalCase %>Request) (*types.QueryGet<%= TypeName.PascalCase %>Response, error) { + if req == nil { + return nil, status.Error(codes.InvalidArgument, "invalid request") + } + + val, err := q.k.<%= TypeName.UpperCamel %>.Get(ctx) + if err != nil { + if errors.Is(err, collections.ErrNotFound) { + return nil, status.Error(codes.NotFound, "not found") + } + + return nil, status.Error(codes.Internal, "internal error") + } + + return &types.QueryGet<%= TypeName.PascalCase %>Response{<%= TypeName.UpperCamel %>: val}, nil +} \ No newline at end of file diff --git a/ignite/templates/typed/singleton/files/component/x/{{moduleName}}/keeper/query_{{typeName}}_test.go.plush b/ignite/templates/typed/singleton/files/component/x/{{moduleName}}/keeper/query_{{typeName}}_test.go.plush new file mode 100644 index 0000000..1a0a375 --- /dev/null +++ b/ignite/templates/typed/singleton/files/component/x/{{moduleName}}/keeper/query_{{typeName}}_test.go.plush @@ -0,0 +1,49 @@ +package keeper_test + +import ( + "testing" + + "github.com/stretchr/testify/require" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" + + "<%= ModulePath %>/x/<%= ModuleName %>/keeper" + "<%= ModulePath %>/x/<%= ModuleName %>/types" +) + +func Test<%= TypeName.UpperCamel %>Query(t *testing.T) { + f := initFixture(t) + qs := keeper.NewQueryServerImpl(f.keeper) + item := types.<%= TypeName.PascalCase %>{} + err := f.keeper.<%= TypeName.UpperCamel %>.Set(f.ctx, item) + require.NoError(t, err) + + tests := []struct { + desc string + request *types.QueryGet<%= TypeName.PascalCase %>Request + response *types.QueryGet<%= TypeName.PascalCase %>Response + err error + }{ + { + desc: "First", + request: &types.QueryGet<%= TypeName.PascalCase %>Request{}, + response: &types.QueryGet<%= TypeName.PascalCase %>Response{<%= TypeName.UpperCamel %>: item}, + }, + { + desc: "InvalidRequest", + err: status.Error(codes.InvalidArgument, "invalid request"), + }, + } + for _, tc := range tests { + t.Run(tc.desc, func(t *testing.T) { + response, err := qs.Get<%= TypeName.PascalCase %>(f.ctx, tc.request) + if tc.err != nil { + require.ErrorIs(t, err, tc.err) + } else { + require.NoError(t, err) + require.EqualExportedValues(t, tc.response, response) + } + }) + } +} + diff --git a/ignite/templates/typed/singleton/files/component/{{protoDir}}/{{appName}}/{{moduleName}}/{{protoVer}}/{{typeName}}.proto.plush b/ignite/templates/typed/singleton/files/component/{{protoDir}}/{{appName}}/{{moduleName}}/{{protoVer}}/{{typeName}}.proto.plush new file mode 100644 index 0000000..50e9aa2 --- /dev/null +++ b/ignite/templates/typed/singleton/files/component/{{protoDir}}/{{appName}}/{{moduleName}}/{{protoVer}}/{{typeName}}.proto.plush @@ -0,0 +1,12 @@ +syntax = "proto3"; +package <%= protoPkgName %>; + +option go_package = "<%= ModulePath %>/x/<%= ModuleName %>/types";<%= for (importName) in mergeCustomImports(Fields) { %> +import "<%= AppName %>/<%= ModuleName %>/<%= ProtoVer %>/<%= importName %>.proto"; <% } %><%= for (importName) in mergeProtoImports(Fields) { %> +import "<%= importName %>"; <% } %> + +// <%= TypeName.PascalCase %> defines the <%= TypeName.PascalCase %> message. +message <%= TypeName.PascalCase %> {<%= for (i, field) in Fields { %> + <%= field.ProtoType(i+1) %>; <% } %> + <%= if (!NoMessage) { %>string <%= MsgSigner.Snake %> = <%= len(Fields)+1 %>;<% } %> +} diff --git a/ignite/templates/typed/singleton/files/messages/x/{{moduleName}}/keeper/msg_server_{{typeName}}.go.plush b/ignite/templates/typed/singleton/files/messages/x/{{moduleName}}/keeper/msg_server_{{typeName}}.go.plush new file mode 100644 index 0000000..975b6da --- /dev/null +++ b/ignite/templates/typed/singleton/files/messages/x/{{moduleName}}/keeper/msg_server_{{typeName}}.go.plush @@ -0,0 +1,91 @@ +package keeper + +import ( + "fmt" + "context" + + "<%= ModulePath %>/x/<%= ModuleName %>/types" + errorsmod "cosmossdk.io/errors" + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" +) + + +func (k msgServer) Create<%= TypeName.PascalCase %>(ctx context.Context, msg *types.MsgCreate<%= TypeName.PascalCase %>) (*types.MsgCreate<%= TypeName.PascalCase %>Response, error) { + if _, err := k.addressCodec.StringToBytes(msg.<%= MsgSigner.UpperCamel %>); err != nil { + return nil, errorsmod.Wrap(sdkerrors.ErrInvalidAddress, fmt.Sprintf("invalid address: %s", err)) + } + + // Check if the value already exists + found, err := k.<%= TypeName.UpperCamel %>.Has(ctx) + if err != nil { + return nil, errorsmod.Wrap(sdkerrors.ErrLogic, err.Error()) + } + if found { + return nil, errorsmod.Wrap(sdkerrors.ErrInvalidRequest, "already set") + } + + var <%= TypeName.LowerCamel %> = types.<%= TypeName.PascalCase %>{ + <%= MsgSigner.UpperCamel %>: msg.<%= MsgSigner.UpperCamel %>,<%= for (field) in Fields { %> + <%= field.Name.UpperCamel %>: msg.<%= field.Name.UpperCamel %>,<% } %> + } + + if err := k.<%= TypeName.UpperCamel %>.Set( + ctx, + <%= TypeName.LowerCamel %>, + ); err != nil { + return nil, err + } + + return &types.MsgCreate<%= TypeName.PascalCase %>Response{}, nil +} + +func (k msgServer) Update<%= TypeName.PascalCase %>(ctx context.Context, msg *types.MsgUpdate<%= TypeName.PascalCase %>) (*types.MsgUpdate<%= TypeName.PascalCase %>Response, error) { + if _, err := k.addressCodec.StringToBytes(msg.<%= MsgSigner.PascalCase %>); err != nil { + return nil, errorsmod.Wrap(sdkerrors.ErrInvalidAddress, fmt.Sprintf("invalid address: %s", err)) + } + + // Check if the value exists + valFound, err := k.<%= TypeName.UpperCamel %>.Get(ctx) + if err != nil { + return nil, errorsmod.Wrap(sdkerrors.ErrKeyNotFound, "not set") + } + + // Checks if the msg <%= MsgSigner.LowerCamel %> is the same as the current owner + if msg.<%= MsgSigner.UpperCamel %> != valFound.<%= MsgSigner.UpperCamel %> { + return nil, errorsmod.Wrap(sdkerrors.ErrUnauthorized, "incorrect owner") + } + + var <%= TypeName.LowerCamel %> = types.<%= TypeName.PascalCase %>{ + <%= MsgSigner.UpperCamel %>: msg.<%= MsgSigner.UpperCamel %>,<%= for (field) in Fields { %> + <%= field.Name.UpperCamel %>: msg.<%= field.Name.UpperCamel %>,<% } %> + } + + if err := k.<%= TypeName.UpperCamel %>.Set(ctx, <%= TypeName.LowerCamel %>); err != nil { + return nil, err + } + + return &types.MsgUpdate<%= TypeName.PascalCase %>Response{}, nil +} + +func (k msgServer) Delete<%= TypeName.PascalCase %>(ctx context.Context, msg *types.MsgDelete<%= TypeName.PascalCase %>) (*types.MsgDelete<%= TypeName.PascalCase %>Response, error) { + if _, err := k.addressCodec.StringToBytes(msg.<%= MsgSigner.UpperCamel %>); err != nil { + return nil, errorsmod.Wrap(sdkerrors.ErrInvalidAddress, fmt.Sprintf("invalid address: %s", err)) + } + + // Check if the value exists + val, err := k.<%= TypeName.UpperCamel %>.Get(ctx) + if err != nil { + return nil, errorsmod.Wrap(sdkerrors.ErrKeyNotFound, "not set") + } + + // Checks if the msg <%= MsgSigner.LowerCamel %> is the same as the current owner + if msg.<%= MsgSigner.UpperCamel %> != val.<%= MsgSigner.UpperCamel %> { + return nil, errorsmod.Wrap(sdkerrors.ErrUnauthorized, "incorrect owner") + } + + if err := k.<%= TypeName.UpperCamel %>.Remove(ctx); err != nil { + return nil, errorsmod.Wrap(sdkerrors.ErrLogic, err.Error()) + } + + return &types.MsgDelete<%= TypeName.PascalCase %>Response{}, nil +} diff --git a/ignite/templates/typed/singleton/files/messages/x/{{moduleName}}/keeper/msg_server_{{typeName}}_test.go.plush b/ignite/templates/typed/singleton/files/messages/x/{{moduleName}}/keeper/msg_server_{{typeName}}_test.go.plush new file mode 100644 index 0000000..3de981f --- /dev/null +++ b/ignite/templates/typed/singleton/files/messages/x/{{moduleName}}/keeper/msg_server_{{typeName}}_test.go.plush @@ -0,0 +1,122 @@ +package keeper_test + +import ( + "testing" + + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + "github.com/stretchr/testify/require" + + "<%= ModulePath %>/x/<%= ModuleName %>/keeper" + "<%= ModulePath %>/x/<%= ModuleName %>/types" +) + +func Test<%= TypeName.PascalCase %>MsgServerCreate(t *testing.T) { + f := initFixture(t) + srv := keeper.NewMsgServerImpl(f.keeper) + + <%= MsgSigner.LowerCamel %>, err := f.addressCodec.BytesToString([]byte("signerAddr__________________")) + require.NoError(t, err) + expected := &types.MsgCreate<%= TypeName.PascalCase %>{<%= MsgSigner.UpperCamel %>: <%= MsgSigner.LowerCamel %>} + _, err = srv.Create<%= TypeName.PascalCase %>(f.ctx, expected) + require.NoError(t, err) + rst, err := f.keeper.<%= TypeName.UpperCamel %>.Get(f.ctx) + require.Nil(t, err) + require.Equal(t, expected.<%= MsgSigner.UpperCamel %>, rst.<%= MsgSigner.UpperCamel %>) +} + +func Test<%= TypeName.PascalCase %>MsgServerUpdate(t *testing.T) { + f := initFixture(t) + srv := keeper.NewMsgServerImpl(f.keeper) + + <%= MsgSigner.LowerCamel %>, err := f.addressCodec.BytesToString([]byte("signerAddr__________________")) + require.NoError(t, err) + + unauthorizedAddr, err := f.addressCodec.BytesToString([]byte("unauthorizedAddr___________")) + require.NoError(t, err) + + expected := &types.MsgCreate<%= TypeName.PascalCase %>{<%= MsgSigner.UpperCamel %>: <%= MsgSigner.LowerCamel %>} + _, err = srv.Create<%= TypeName.PascalCase %>(f.ctx, expected) + require.NoError(t, err) + + tests := []struct { + desc string + request *types.MsgUpdate<%= TypeName.PascalCase %> + err error + }{ + { + desc: "invalid address", + request: &types.MsgUpdate<%= TypeName.PascalCase %>{<%= MsgSigner.UpperCamel %>: "invalid"}, + err: sdkerrors.ErrInvalidAddress, + }, + { + desc: "unauthorized", + request: &types.MsgUpdate<%= TypeName.PascalCase %>{<%= MsgSigner.UpperCamel %>: unauthorizedAddr}, + err: sdkerrors.ErrUnauthorized, + }, + { + desc: "completed", + request: &types.MsgUpdate<%= TypeName.PascalCase %>{<%= MsgSigner.UpperCamel %>: <%= MsgSigner.LowerCamel %>}, + }, + } + for _, tc := range tests { + t.Run(tc.desc, func(t *testing.T) { + _, err = srv.Update<%= TypeName.PascalCase %>(f.ctx, tc.request) + if tc.err != nil { + require.ErrorIs(t, err, tc.err) + } else { + require.NoError(t, err) + rst, err := f.keeper.<%= TypeName.UpperCamel %>.Get(f.ctx) + require.Nil(t, err) + require.Equal(t, expected.<%= MsgSigner.UpperCamel %>, rst.<%= MsgSigner.UpperCamel %>) + } + }) + } +} + +func Test<%= TypeName.PascalCase %>MsgServerDelete(t *testing.T) { + f := initFixture(t) + srv := keeper.NewMsgServerImpl(f.keeper) + + <%= MsgSigner.LowerCamel %>, err := f.addressCodec.BytesToString([]byte("signerAddr__________________")) + require.NoError(t, err) + + unauthorizedAddr, err := f.addressCodec.BytesToString([]byte("unauthorizedAddr___________")) + require.NoError(t, err) + + _, err = srv.Create<%= TypeName.PascalCase %>(f.ctx, &types.MsgCreate<%= TypeName.PascalCase %>{<%= MsgSigner.UpperCamel %>: <%= MsgSigner.LowerCamel %>}) + require.NoError(t, err) + + tests := []struct { + desc string + request *types.MsgDelete<%= TypeName.PascalCase %> + err error + }{ + { + desc: "invalid address", + request: &types.MsgDelete<%= TypeName.PascalCase %>{<%= MsgSigner.UpperCamel %>: "invalid"}, + err: sdkerrors.ErrInvalidAddress, + }, + { + desc: "unauthorized", + request: &types.MsgDelete<%= TypeName.PascalCase %>{<%= MsgSigner.UpperCamel %>: unauthorizedAddr}, + err: sdkerrors.ErrUnauthorized, + }, + { + desc: "completed", + request: &types.MsgDelete<%= TypeName.PascalCase %>{<%= MsgSigner.UpperCamel %>: <%= MsgSigner.LowerCamel %>}, + }, + } + for _, tc := range tests { + t.Run(tc.desc, func(t *testing.T) { + _, err = srv.Delete<%= TypeName.PascalCase %>(f.ctx, tc.request) + if tc.err != nil { + require.ErrorIs(t, err, tc.err) + } else { + require.NoError(t, err) + found, err := f.keeper.<%= TypeName.UpperCamel %>.Has(f.ctx) + require.NoError(t, err) + require.False(t, found) + } + }) + } +} diff --git a/ignite/templates/typed/singleton/files/simapp/x/{{moduleName}}/simulation/{{typeName}}.go.plush b/ignite/templates/typed/singleton/files/simapp/x/{{moduleName}}/simulation/{{typeName}}.go.plush new file mode 100644 index 0000000..8d42f2e --- /dev/null +++ b/ignite/templates/typed/singleton/files/simapp/x/{{moduleName}}/simulation/{{typeName}}.go.plush @@ -0,0 +1,145 @@ +package simulation + +import ( + "math/rand" + + "cosmossdk.io/collections" + + "github.com/cosmos/cosmos-sdk/baseapp" + "<%= ModulePath %>/x/<%= ModuleName %>/keeper" + "<%= ModulePath %>/x/<%= ModuleName %>/types" + sdk "github.com/cosmos/cosmos-sdk/types" + moduletestutil "github.com/cosmos/cosmos-sdk/types/module/testutil" + simtypes "github.com/cosmos/cosmos-sdk/types/simulation" + "github.com/cosmos/cosmos-sdk/x/simulation" +) + +func SimulateMsgCreate<%= TypeName.PascalCase %>( + ak types.AuthKeeper, + bk types.BankKeeper, + k keeper.Keeper, + txGen client.TxConfig, +) simtypes.Operation { + return func(r *rand.Rand, app *baseapp.BaseApp, ctx sdk.Context, accs []simtypes.Account, chainID string, + ) (simtypes.OperationMsg, []simtypes.FutureOperation, error) { + simAccount, _ := simtypes.RandomAcc(r, accs) + + msg := &types.MsgCreate<%= TypeName.PascalCase %>{ + <%= MsgSigner.UpperCamel %>: simAccount.Address.String(), + } + + found, err := k.<%= TypeName.UpperCamel %>.Has(ctx) + if err == nil && found { + return simtypes.NoOpMsg(types.ModuleName, sdk.MsgTypeURL(msg), "<%= TypeName.PascalCase %> already exist"), nil, nil + } + + txCtx := simulation.OperationInput{ + R: r, + App: app, + TxGen: txGen, + Cdc: nil, + Msg: msg, + Context: ctx, + SimAccount: simAccount, + ModuleName: types.ModuleName, + CoinsSpentInMsg: sdk.NewCoins(), + AccountKeeper: ak, + Bankkeeper: bk, + } + return simulation.GenAndDeliverTxWithRandFees(txCtx) + } +} + +func SimulateMsgUpdate<%= TypeName.PascalCase %>( + ak types.AuthKeeper, + bk types.BankKeeper, + k keeper.Keeper, + txGen client.TxConfig, +) simtypes.Operation { + return func(r *rand.Rand, app *baseapp.BaseApp, ctx sdk.Context, accs []simtypes.Account, chainID string, + ) (simtypes.OperationMsg, []simtypes.FutureOperation, error) { + var ( + simAccount = simtypes.Account{} + msg = &types.MsgUpdate<%= TypeName.PascalCase %>{} + ) + + <%= TypeName.LowerCamel %>, err := k.<%= TypeName.UpperCamel %>.Get(ctx) + if err != nil { + return simtypes.NoOpMsg(types.ModuleName, sdk.MsgTypeURL(msg), "<%= TypeName.LowerCamel %> store is empty"), nil, nil + } + + acc, err := ak.AddressCodec().StringToBytes(<%= TypeName.LowerCamel %>.<%= MsgSigner.UpperCamel %>) + if err != nil { + return simtypes.OperationMsg{}, nil, err + } + + var found bool + simAccount, found = simtypes.FindAccount(accs, sdk.AccAddress(acc)) + if !found { + return simtypes.NoOpMsg(types.ModuleName, sdk.MsgTypeURL(msg), "<%= TypeName.LowerCamel %> <%= MsgSigner.LowerCamel %> not found"), nil, nil + } + msg.<%= MsgSigner.UpperCamel %> = simAccount.Address.String() + + txCtx := simulation.OperationInput{ + R: r, + App: app, + TxGen: txGen, + Cdc: nil, + Msg: msg, + Context: ctx, + SimAccount: simAccount, + ModuleName: types.ModuleName, + CoinsSpentInMsg: sdk.NewCoins(), + AccountKeeper: ak, + Bankkeeper: bk, + } + return simulation.GenAndDeliverTxWithRandFees(txCtx) + } +} + +func SimulateMsgDelete<%= TypeName.PascalCase %>( + ak types.AuthKeeper, + bk types.BankKeeper, + k keeper.Keeper, + txGen client.TxConfig, +) simtypes.Operation { + return func(r *rand.Rand, app *baseapp.BaseApp, ctx sdk.Context, accs []simtypes.Account, chainID string, + ) (simtypes.OperationMsg, []simtypes.FutureOperation, error) { + var ( + simAccount = simtypes.Account{} + msg = &types.MsgUpdate<%= TypeName.PascalCase %>{} + ) + + <%= TypeName.LowerCamel %>, err := k.<%= TypeName.UpperCamel %>.Get(ctx) + if err != nil { + return simtypes.NoOpMsg(types.ModuleName, sdk.MsgTypeURL(msg), "<%= TypeName.LowerCamel %> store is empty"), nil, nil + } + + acc, err := ak.AddressCodec().StringToBytes(<%= TypeName.LowerCamel %>.<%= MsgSigner.UpperCamel %>) + if err != nil { + return simtypes.OperationMsg{}, nil, err + } + + var found bool + simAccount, found = simtypes.FindAccount(accs, sdk.AccAddress(acc)) + if !found { + return simtypes.NoOpMsg(types.ModuleName, sdk.MsgTypeURL(msg), "<%= TypeName.LowerCamel %> <%= MsgSigner.LowerCamel %> not found"), nil, nil + } + msg.<%= MsgSigner.UpperCamel %> = simAccount.Address.String() + + txCtx := simulation.OperationInput{ + R: r, + App: app, + TxGen: txGen, + Cdc: nil, + Msg: msg, + Context: ctx, + SimAccount: simAccount, + ModuleName: types.ModuleName, + CoinsSpentInMsg: sdk.NewCoins(), + AccountKeeper: ak, + Bankkeeper: bk, + } + return simulation.GenAndDeliverTxWithRandFees(txCtx) + } +} diff --git a/ignite/templates/typed/singleton/simulation.go b/ignite/templates/typed/singleton/simulation.go new file mode 100644 index 0000000..b7a6eac --- /dev/null +++ b/ignite/templates/typed/singleton/simulation.go @@ -0,0 +1,34 @@ +package singleton + +import ( + "path/filepath" + + "github.com/gobuffalo/genny/v2" + + "github.com/ignite/cli/v29/ignite/templates/typed" +) + +func moduleSimulationModify(opts *typed.Options) genny.RunFn { + return func(r *genny.Runner) error { + path := filepath.Join("x", opts.ModuleName, "module/simulation.go") + f, err := r.Disk.Find(path) + if err != nil { + return err + } + + content, err := typed.ModuleSimulationMsgModify( + f.String(), + opts.ModulePath, + opts.ModuleName, + opts.TypeName, + opts.MsgSigner, + "Create", "Update", "Delete", + ) + if err != nil { + return err + } + + newFile := genny.NewFileS(path, content) + return r.File(newFile) + } +} diff --git a/ignite/templates/typed/singleton/singleton.go b/ignite/templates/typed/singleton/singleton.go new file mode 100644 index 0000000..6b83c30 --- /dev/null +++ b/ignite/templates/typed/singleton/singleton.go @@ -0,0 +1,602 @@ +package singleton + +import ( + "crypto/rand" + "embed" + "fmt" + "io/fs" + "math/big" + "path/filepath" + + "github.com/emicklei/proto" + "github.com/gobuffalo/genny/v2" + + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/gomodulepath" + "github.com/ignite/cli/v29/ignite/pkg/protoanalysis/protoutil" + "github.com/ignite/cli/v29/ignite/pkg/xast" + "github.com/ignite/cli/v29/ignite/templates/typed" +) + +var ( + //go:embed files/messages/* files/messages/**/* + fsMessages embed.FS + + //go:embed files/component/* files/component/**/* + fsComponent embed.FS + + //go:embed files/simapp/* files/simapp/**/* + fsSimapp embed.FS +) + +// NewGenerator returns the generator to scaffold a new indexed type in a module. +func NewGenerator(opts *typed.Options) (*genny.Generator, error) { + subMessages, err := fs.Sub(fsMessages, "files/messages") + if err != nil { + return nil, errors.Errorf("fail to generate sub: %w", err) + } + + subComponent, err := fs.Sub(fsComponent, "files/component") + if err != nil { + return nil, errors.Errorf("fail to generate sub: %w", err) + } + + subSimapp, err := fs.Sub(fsSimapp, "files/simapp") + if err != nil { + return nil, errors.Errorf("fail to generate sub: %w", err) + } + + g := genny.New() + g.RunFn(protoRPCModify(opts)) + g.RunFn(typesKeyModify(opts)) + g.RunFn(keeperModify(opts)) + g.RunFn(clientCliQueryModify(opts)) + g.RunFn(genesisProtoModify(opts)) + g.RunFn(genesisTypesModify(opts)) + g.RunFn(genesisModuleModify(opts)) + g.RunFn(genesisTestsModify(opts)) + g.RunFn(genesisTypesTestsModify(opts)) + + // Modifications for new messages + if !opts.NoMessage { + g.RunFn(protoTxModify(opts)) + g.RunFn(clientCliTxModify(opts)) + g.RunFn(typesCodecModify(opts)) + + if !opts.NoSimulation { + g.RunFn(moduleSimulationModify(opts)) + if err := typed.Box(subSimapp, opts, g); err != nil { + return nil, err + } + } + + if err := typed.Box(subMessages, opts, g); err != nil { + return nil, err + } + } + + return g, typed.Box(subComponent, opts, g) +} + +// typesKeyModify modifies the keys.go file to add a new collection prefix. +func typesKeyModify(opts *typed.Options) genny.RunFn { + return func(r *genny.Runner) error { + path := filepath.Join("x", opts.ModuleName, "types/keys.go") + f, err := r.Disk.Find(path) + if err != nil { + return err + } + content := f.String() + fmt.Sprintf(` +var ( + %[1]vKey= collections.NewPrefix("%[2]v/value/") +) +`, + opts.TypeName.PascalCase, + opts.TypeName.LowerCamel, + ) + newFile := genny.NewFileS(path, content) + return r.File(newFile) + } +} + +// keeperModify modifies the keeper to add a new collections item type. +func keeperModify(opts *typed.Options) genny.RunFn { + return func(r *genny.Runner) error { + path := filepath.Join("x", opts.ModuleName, "keeper/keeper.go") + f, err := r.Disk.Find(path) + if err != nil { + return err + } + + content, err := xast.ModifyStruct( + f.String(), + "Keeper", + xast.AppendStructValue( + opts.TypeName.UpperCamel, + fmt.Sprintf("collections.Item[types.%[1]v]", opts.TypeName.PascalCase), + ), + ) + if err != nil { + return err + } + + // add parameter to the struct into the new keeper method. + content, err = xast.ModifyFunction( + content, + "NewKeeper", + xast.AppendFuncStruct( + "Keeper", + opts.TypeName.UpperCamel, + fmt.Sprintf(`collections.NewItem(sb, types.%[1]vKey, "%[2]v", codec.CollValue[types.%[1]v](cdc))`, + opts.TypeName.PascalCase, + opts.TypeName.LowerCamel, + ), + ), + ) + if err != nil { + return err + } + + newFile := genny.NewFileS(path, content) + return r.File(newFile) + } +} + +// Modifies query.proto to add the required RPCs and Messages. +// +// What it depends on: +// - Existence of a service with name "Query". Adds the rpc's there. +func protoRPCModify(opts *typed.Options) genny.RunFn { + return func(r *genny.Runner) error { + path := opts.ProtoFile("query.proto") + f, err := r.Disk.Find(path) + if err != nil { + return err + } + protoFile, err := protoutil.ParseProtoFile(f) + if err != nil { + return err + } + + // Import the type and gogoImport. + gogoImport := protoutil.NewImport(typed.GoGoProtoImport) + if err = protoutil.AddImports(protoFile, true, gogoImport, opts.ProtoTypeImport()); err != nil { + return errors.Errorf("failed while adding imports in %s: %w", path, err) + } + // Find service. + serviceQuery, err := protoutil.GetServiceByName(protoFile, "Query") + if err != nil { + return errors.Errorf("failed while looking up service 'Query' in %s: %w", path, err) + } + appModulePath := gomodulepath.ExtractAppPath(opts.ModulePath) + typenamePascal, typenameSnake := opts.TypeName.PascalCase, opts.TypeName.Snake + rpcQueryGet := protoutil.NewRPC( + fmt.Sprintf("Get%s", typenamePascal), + fmt.Sprintf("QueryGet%sRequest", typenamePascal), + fmt.Sprintf("QueryGet%sResponse", typenamePascal), + protoutil.WithRPCOptions( + protoutil.NewOption( + "google.api.http", + fmt.Sprintf( + "/%s/%s/%s/%s", + appModulePath, opts.ModuleName, opts.ProtoVer, opts.TypeName.Snake, + ), + protoutil.Custom(), + protoutil.SetField("get"), + ), + ), + ) + protoutil.AttachComment(rpcQueryGet, fmt.Sprintf("Queries a %v by index.", typenamePascal)) + protoutil.Append(serviceQuery, rpcQueryGet) + + // Add the service messages + queryGetRequest := protoutil.NewMessage("QueryGet" + typenamePascal + "Request") + field := protoutil.NewField(typenameSnake, typenamePascal, 1, + protoutil.WithFieldOptions(protoutil.NewOption("gogoproto.nullable", "false", protoutil.Custom())), + ) + queryGetResponse := protoutil.NewMessage(fmt.Sprintf("QueryGet%sResponse", typenamePascal), protoutil.WithFields(field)) + protoutil.Append(protoFile, queryGetRequest, queryGetResponse) + + newFile := genny.NewFileS(path, protoutil.Print(protoFile)) + return r.File(newFile) + } +} + +func clientCliQueryModify(opts *typed.Options) genny.RunFn { + return func(r *genny.Runner) error { + path := filepath.Join("x", opts.ModuleName, "module/autocli.go") + f, err := r.Disk.Find(path) + if err != nil { + return err + } + + getOption := fmt.Sprintf( + `{ + RpcMethod: "Get%[1]v", + Use: "get-%[2]v", + Short: "Gets a %[3]v", + Alias: []string{"show-%[2]v"}, + }`, + opts.TypeName.PascalCase, + opts.TypeName.Kebab, + opts.TypeName.Original, + ) + content, err := typed.AppendAutoCLIQueryOptions(f.String(), getOption) + if err != nil { + return err + } + newFile := genny.NewFileS(path, content) + return r.File(newFile) + } +} + +// Modifies the genesis.proto file to add a new field. +// +// What it depends on: +// - Existence of a message with name "GenesisState". Adds the field there. +func genesisProtoModify(opts *typed.Options) genny.RunFn { + return func(r *genny.Runner) error { + path := opts.ProtoFile("genesis.proto") + f, err := r.Disk.Find(path) + if err != nil { + return err + } + + protoFile, err := protoutil.ParseProtoFile(f) + if err != nil { + return err + } + // Add initial import for the new type + if err = protoutil.AddImports(protoFile, true, opts.ProtoTypeImport()); err != nil { + return errors.Errorf("failed to add imports to %s: %w", path, err) + } + + // Add field to GenesisState message. + genesisState, err := protoutil.GetMessageByName(protoFile, typed.ProtoGenesisStateMessage) + if err != nil { + return errors.Errorf("failed while looking up message '%s' in %s: %w", typed.ProtoGenesisStateMessage, path, err) + } + seqNumber := protoutil.NextUniqueID(genesisState) + field := protoutil.NewField( + opts.TypeName.Snake, + opts.TypeName.PascalCase, + seqNumber, + ) + protoutil.Append(genesisState, field) + + newFile := genny.NewFileS(path, protoutil.Print(protoFile)) + return r.File(newFile) + } +} + +func genesisTypesModify(opts *typed.Options) genny.RunFn { + return func(r *genny.Runner) error { + path := filepath.Join("x", opts.ModuleName, "types/genesis.go") + f, err := r.Disk.Find(path) + if err != nil { + return err + } + + content, err := xast.ModifyFunction(f.String(), "DefaultGenesis", xast.AppendFuncStruct( + "GenesisState", + fmt.Sprintf("%[1]v", opts.TypeName.UpperCamel), + "nil", + )) + if err != nil { + return err + } + + newFile := genny.NewFileS(path, content) + return r.File(newFile) + } +} + +func genesisTestsModify(opts *typed.Options) genny.RunFn { + return func(r *genny.Runner) error { + path := filepath.Join("x", opts.ModuleName, "keeper/genesis_test.go") + f, err := r.Disk.Find(path) + if err != nil { + return err + } + + // Create a fields + sampleFields := "" + for _, field := range opts.Fields { + n, err := rand.Int(rand.Reader, big.NewInt(100)) + if err != nil { + return err + } + sampleFields += field.GenesisArgs(int(n.Int64()) + 1) + } + // add parameter to the struct into the new method. + content, err := xast.ModifyFunction( + f.String(), + "TestGenesis", + xast.AppendFuncStruct( + "GenesisState", + opts.TypeName.UpperCamel, + fmt.Sprintf("&types.%[1]v{ %[2]v }", opts.TypeName.PascalCase, sampleFields), + ), + xast.AppendFuncCode( + fmt.Sprintf("require.EqualExportedValues(t, genesisState.%[1]v, got.%[1]v)", opts.TypeName.UpperCamel), + ), + ) + if err != nil { + return err + } + + newFile := genny.NewFileS(path, content) + return r.File(newFile) + } +} + +func genesisTypesTestsModify(opts *typed.Options) genny.RunFn { + return func(r *genny.Runner) error { + path := filepath.Join("x", opts.ModuleName, "types/genesis_test.go") + f, err := r.Disk.Find(path) + if err != nil { + return err + } + + // Create a fields + sampleFields := "" + for _, field := range opts.Fields { + n, err := rand.Int(rand.Reader, big.NewInt(100)) + if err != nil { + return err + } + sampleFields += field.GenesisArgs(int(n.Int64()) + 1) + } + + // add parameter to the struct into the new method. + content, err := xast.ModifyFunction( + f.String(), + "TestGenesisState_Validate", + xast.AppendFuncStruct( + "GenesisState", + opts.TypeName.UpperCamel, + fmt.Sprintf("&types.%[1]v{ %[2]v }", opts.TypeName.PascalCase, sampleFields), + ), + ) + if err != nil { + return err + } + + newFile := genny.NewFileS(path, content) + return r.File(newFile) + } +} + +func genesisModuleModify(opts *typed.Options) genny.RunFn { + return func(r *genny.Runner) error { + path := filepath.Join("x", opts.ModuleName, "keeper/genesis.go") + f, err := r.Disk.Find(path) + if err != nil { + return err + } + + templateModuleInit := `// Set if defined +if genState.%[1]v != nil { + if err := k.%[1]v.Set(ctx, *genState.%[1]v); err != nil { + return err + } +}` + replacementModuleInit := fmt.Sprintf( + templateModuleInit, + opts.TypeName.UpperCamel, + ) + content, err := xast.ModifyFunction( + f.String(), + "InitGenesis", + xast.AppendFuncCode(replacementModuleInit), + ) + if err != nil { + return err + } + + templateModuleExport := `// Get all %[1]v +%[1]v, err := k.%[2]v.Get(ctx) +if err != nil && !errors.Is(err, collections.ErrNotFound) { + return nil, err +} +genesis.%[2]v = &%[1]v` + replacementModuleExport := fmt.Sprintf( + templateModuleExport, + opts.TypeName.LowerCamel, + opts.TypeName.UpperCamel, + ) + content, err = xast.ModifyFunction( + content, + "ExportGenesis", + xast.AppendFuncCode(replacementModuleExport), + ) + if err != nil { + return err + } + + newFile := genny.NewFileS(path, content) + return r.File(newFile) + } +} + +// protoTxModify modifies the tx.proto file to add the required RPCs and messages. +// +// What it expects: +// - A service named "Msg" to exist in the proto file, it appends the RPCs inside it. +func protoTxModify(opts *typed.Options) genny.RunFn { + return func(r *genny.Runner) error { + path := opts.ProtoFile("tx.proto") + f, err := r.Disk.Find(path) + if err != nil { + return err + } + protoFile, err := protoutil.ParseProtoFile(f) + if err != nil { + return err + } + + // Add the RPC service. + serviceMsg, err := protoutil.GetServiceByName(protoFile, "Msg") + if err != nil { + return errors.Errorf("failed while looking up a message 'Msg' in %s: %w", path, err) + } + // Append create, update, delete rpcs. Better to append them altogether, single traversal. + typenamePascal := opts.TypeName.PascalCase + protoutil.Append(serviceMsg, + protoutil.NewRPC( + fmt.Sprintf("Create%s", typenamePascal), + fmt.Sprintf("MsgCreate%s", typenamePascal), + fmt.Sprintf("MsgCreate%sResponse", typenamePascal), + ), + protoutil.NewRPC( + fmt.Sprintf("Update%s", typenamePascal), + fmt.Sprintf("MsgUpdate%s", typenamePascal), + fmt.Sprintf("MsgUpdate%sResponse", typenamePascal), + ), + protoutil.NewRPC( + fmt.Sprintf("Delete%s", typenamePascal), + fmt.Sprintf("MsgDelete%s", typenamePascal), + fmt.Sprintf("MsgDelete%sResponse", typenamePascal), + ), + ) + + // Ensure custom types are imported + var protoImports []*proto.Import + for _, imp := range opts.Fields.ProtoImports() { + protoImports = append(protoImports, protoutil.NewImport(imp)) + } + for _, f := range opts.Fields.Custom() { + protoPath := fmt.Sprintf("%[1]v/%[2]v/%[3]v/%[4]v.proto", opts.AppName, opts.ModuleName, opts.ProtoVer, f) + protoImports = append(protoImports, protoutil.NewImport(protoPath)) + } + // we already know an import exists, pass false for fallback. + if err = protoutil.AddImports(protoFile, false, protoImports...); err != nil { + // shouldn't really occur. + return errors.Errorf("failed while adding imports to %s: %w", path, err) + } + + // Add the messages + creator := protoutil.NewField(opts.MsgSigner.Snake, "string", 1) + creator.Options = append(creator.Options, protoutil.NewOption("cosmos_proto.scalar", "cosmos.AddressString", protoutil.Custom())) // set the scalar annotation + creatorOpt := protoutil.NewOption(typed.MsgSignerOption, opts.MsgSigner.Snake) + fields := []*proto.NormalField{creator} + for i, field := range opts.Fields { + fields = append(fields, field.ToProtoField(i+3)) + } + msgCreate := protoutil.NewMessage( + "MsgCreate"+typenamePascal, + protoutil.WithFields(fields...), + protoutil.WithMessageOptions(creatorOpt), + ) + msgCreateResponse := protoutil.NewMessage(fmt.Sprintf("MsgCreate%sResponse", typenamePascal)) + msgUpdate := protoutil.NewMessage( + "MsgUpdate"+typenamePascal, + protoutil.WithFields(fields...), + protoutil.WithMessageOptions(creatorOpt), + ) + msgUpdateResponse := protoutil.NewMessage(fmt.Sprintf("MsgUpdate%sResponse", typenamePascal)) + msgDelete := protoutil.NewMessage( + "MsgDelete"+typenamePascal, + protoutil.WithFields(creator), + protoutil.WithMessageOptions(creatorOpt), + ) + msgDeleteResponse := protoutil.NewMessage(fmt.Sprintf("MsgDelete%sResponse", typenamePascal)) + protoutil.Append(protoFile, + msgCreate, msgCreateResponse, msgUpdate, msgUpdateResponse, msgDelete, msgDeleteResponse, + ) + + newFile := genny.NewFileS(path, protoutil.Print(protoFile)) + return r.File(newFile) + } +} + +func clientCliTxModify(opts *typed.Options) genny.RunFn { + return func(r *genny.Runner) error { + path := filepath.Join("x", opts.ModuleName, "module/autocli.go") + f, err := r.Disk.Find(path) + if err != nil { + return err + } + + createOption := fmt.Sprintf( + `{ + RpcMethod: "Create%[1]v", + Use: "create-%[2]v %[4]s", + Short: "Create %[3]v", + PositionalArgs: []*autocliv1.PositionalArgDescriptor{%[5]s}, + }`, + opts.TypeName.PascalCase, + opts.TypeName.Kebab, + opts.TypeName.Original, + opts.Fields.CLIUsage(), + opts.Fields.ProtoFieldNameAutoCLI(), + ) + updateOption := fmt.Sprintf( + `{ + RpcMethod: "Update%[1]v", + Use: "update-%[2]v %[4]s", + Short: "Update %[3]v", + PositionalArgs: []*autocliv1.PositionalArgDescriptor{%[5]s}, + }`, + opts.TypeName.PascalCase, + opts.TypeName.Kebab, + opts.TypeName.Original, + opts.Fields.CLIUsage(), + opts.Fields.ProtoFieldNameAutoCLI(), + ) + deleteOption := fmt.Sprintf( + `{ + RpcMethod: "Delete%[1]v", + Use: "delete-%[2]v", + Short: "Delete %[3]v", + }`, + opts.TypeName.PascalCase, + opts.TypeName.Kebab, + opts.TypeName.Original, + ) + content, err := typed.AppendAutoCLITxOptions(f.String(), createOption, updateOption, deleteOption) + if err != nil { + return err + } + newFile := genny.NewFileS(path, content) + return r.File(newFile) + } +} + +func typesCodecModify(opts *typed.Options) genny.RunFn { + return func(r *genny.Runner) error { + path := filepath.Join("x", opts.ModuleName, "types/codec.go") + f, err := r.Disk.Find(path) + if err != nil { + return err + } + + // Import + content, err := xast.AppendImports(f.String(), xast.WithNamedImport("sdk", "github.com/cosmos/cosmos-sdk/types")) + if err != nil { + return err + } + + // Interface + templateInterface := `registrar.RegisterImplementations((*sdk.Msg)(nil), + &MsgCreate%[1]v{}, + &MsgUpdate%[1]v{}, + &MsgDelete%[1]v{}, +)` + replacementInterface := fmt.Sprintf( + templateInterface, + opts.TypeName.PascalCase, + ) + content, err = xast.ModifyFunction( + content, + "RegisterInterfaces", + xast.AppendFuncAtLine(replacementInterface, 0), + ) + if err != nil { + return err + } + + newFile := genny.NewFileS(path, content) + return r.File(newFile) + } +} diff --git a/ignite/templates/typed/typed.go b/ignite/templates/typed/typed.go new file mode 100644 index 0000000..bee8054 --- /dev/null +++ b/ignite/templates/typed/typed.go @@ -0,0 +1,53 @@ +package typed + +import ( + "io/fs" + + "github.com/gobuffalo/genny/v2" + "github.com/gobuffalo/plush/v4" + + "github.com/ignite/cli/v29/ignite/pkg/gomodulepath" + "github.com/ignite/cli/v29/ignite/pkg/xgenny" + "github.com/ignite/cli/v29/ignite/templates/field/plushhelpers" + "github.com/ignite/cli/v29/ignite/templates/module" +) + +func Box(box fs.FS, opts *Options, g *genny.Generator) error { + if err := g.OnlyFS(box, nil, nil); err != nil { + return err + } + + appModulePath := gomodulepath.ExtractAppPath(opts.ModulePath) + + ctx := plush.NewContext() + ctx.Set("ModuleName", opts.ModuleName) + ctx.Set("ProtoVer", opts.ProtoVer) + ctx.Set("IsIBC", opts.IsIBC) + ctx.Set("AppName", opts.AppName) + ctx.Set("TypeName", opts.TypeName) + ctx.Set("ModulePath", opts.ModulePath) + ctx.Set("MsgSigner", opts.MsgSigner) + ctx.Set("Fields", opts.Fields) + ctx.Set("Index", opts.Index) + ctx.Set("NoMessage", opts.NoMessage) + ctx.Set("protoPkgName", module.ProtoPackageName(appModulePath, opts.ModuleName, opts.ProtoVer)) + ctx.Set("strconv", func() bool { + strconv := false + for _, field := range opts.Fields { + if field.DatatypeName != "string" { + strconv = true + } + } + return strconv + }) + + plushhelpers.ExtendPlushContext(ctx) + g.Transformer(xgenny.Transformer(ctx)) + g.Transformer(genny.Replace("{{protoDir}}", opts.ProtoDir)) + g.Transformer(genny.Replace("{{appName}}", opts.AppName)) + g.Transformer(genny.Replace("{{moduleName}}", opts.ModuleName)) + g.Transformer(genny.Replace("{{protoVer}}", opts.ProtoVer)) + g.Transformer(genny.Replace("{{typeName}}", opts.TypeName.Snake)) + + return nil +} diff --git a/ignite/version/version.go b/ignite/version/version.go new file mode 100644 index 0000000..9331a44 --- /dev/null +++ b/ignite/version/version.go @@ -0,0 +1,256 @@ +package version + +import ( + "bytes" + "context" + "fmt" + "os" + "path" + "runtime" + "runtime/debug" + "strings" + "text/tabwriter" + + "github.com/blang/semver/v4" + "github.com/google/go-github/v48/github" + + chainconfig "github.com/ignite/cli/v29/ignite/config/chain" + "github.com/ignite/cli/v29/ignite/pkg/cmdrunner/exec" + "github.com/ignite/cli/v29/ignite/pkg/cmdrunner/step" + "github.com/ignite/cli/v29/ignite/pkg/cosmosbuf" + "github.com/ignite/cli/v29/ignite/pkg/cosmosver" + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/xexec" +) + +const ( + errOldCosmosSDKVersionStr = `Your chain has been scaffolded with an older version of Cosmos SDK: %s + +Please, follow the migration guide to upgrade your chain to the latest version at https://docs.ignite.com/migration` + + versionDev = "development" + versionNightly = "nightly" +) + +// Version is the semantic version of Ignite CLI. +var Version = versionDev + +type Info struct { + CLIVersion string + GoVersion string + SDKVersion string + BufVersion string + BuildDate string + SourceHash string + ConfigVersion string + OS string + Arch string + Uname string + CWD string + BuildFromSource bool +} + +// CheckNext checks whether there is a new version of Ignite CLI. +func CheckNext(ctx context.Context) (isAvailable bool, version string, err error) { + if Version == versionDev || Version == versionNightly { + return false, "", nil + } + + tagName, err := getLatestReleaseTag(ctx) + if err != nil { + return false, "", err + } + + currentVersion, err := semver.ParseTolerant(Version) + if err != nil { + return false, "", err + } + + latestVersion, err := semver.ParseTolerant(tagName) + if err != nil { + return false, "", err + } + + isAvailable = latestVersion.GT(currentVersion) + + return isAvailable, tagName, nil +} + +func getLatestReleaseTag(ctx context.Context) (string, error) { + latest, _, err := github. + NewClient(nil). + Repositories. + GetLatestRelease(ctx, "ignite", "cli") + if err != nil { + return "", err + } + + if latest.TagName == nil { + return "", nil + } + + return *latest.TagName, nil +} + +// fromSource check if the binary was build from source using the CLI version. +func fromSource() bool { + return Version == versionDev +} + +// resolveDevVersion creates a string for version printing if the version being used is "development". +// the version will be of the form "LATEST-dev" where LATEST is the latest tagged release. +func resolveDevVersion(ctx context.Context) string { + // do nothing if built with specific tag + if Version != versionDev && Version != versionNightly { + return Version + } + + tag, err := getLatestReleaseTag(ctx) + if err != nil { + return Version + } + + // if the module version is higher than the latest tag, use the module version + if info, ok := debug.ReadBuildInfo(); ok { + if version := path.Base(info.Main.Path); version > tag { + tag = fmt.Sprintf("%s.0.0", version) + } + } + + if Version == versionDev { + return tag + "-dev" + } + if Version == versionNightly { + return tag + "-nightly" + } + + return Version +} + +// Long generates a detailed version info. +func Long(ctx context.Context) (string, error) { + var ( + w = &tabwriter.Writer{} + b = &bytes.Buffer{} + ) + + info, err := GetInfo(ctx) + if err != nil { + return "", err + } + + write := func(k, v string) { + fmt.Fprintf(w, "%s:\t%s\n", k, v) + } + w.Init(b, 0, 8, 0, '\t', 0) + + write("Ignite CLI version", info.CLIVersion) + write("Ignite CLI build date", info.BuildDate) + write("Ignite CLI source hash", info.SourceHash) + write("Ignite CLI config version", info.ConfigVersion) + write("Cosmos SDK version", info.SDKVersion) + write("Buf.build version", info.BufVersion) + + write("Your OS", info.OS) + write("Your arch", info.Arch) + write("Your go version", info.GoVersion) + write("Your uname -a", info.Uname) + + if info.CWD != "" { + write("Your cwd", info.CWD) + } + + if err := w.Flush(); err != nil { + return "", err + } + + return b.String(), nil +} + +// GetInfo gets the CLI info. +func GetInfo(ctx context.Context) (Info, error) { + var ( + info Info + modified bool + + date = "undefined" + head = "undefined" + sdkVersion = "undefined" + ) + if buildInfo, ok := debug.ReadBuildInfo(); ok { + for _, dep := range buildInfo.Deps { + if cosmosver.CosmosSDKModulePathPattern.MatchString(dep.Path) { + sdkVersion = dep.Version + break + } + } + + for _, kv := range buildInfo.Settings { + switch kv.Key { + case "vcs.revision": + head = kv.Value + case "vcs.time": + date = kv.Value + case "vcs.modified": + modified = kv.Value == "true" + } + } + if modified { + // add * suffix to head to indicate the sources have been modified. + head += "*" + } + } + + goVersionBuf := &bytes.Buffer{} + if err := exec.Exec(ctx, []string{"go", "version"}, exec.StepOption(step.Stdout(goVersionBuf))); err != nil { + return info, err + } + + var ( + unameCmd = "uname" + uname = "" + ) + if xexec.IsCommandAvailable(unameCmd) { + unameBuf := &bytes.Buffer{} + unameBuf.Reset() + if err := exec.Exec(ctx, []string{unameCmd, "-a"}, exec.StepOption(step.Stdout(unameBuf))); err != nil { + return info, err + } + uname = strings.TrimSpace(unameBuf.String()) + } + + if cwd, err := os.Getwd(); err == nil { + info.CWD = cwd + } + + // buf version can only be determined within a go.mod because of go tool. + // no global version is used in ignite since v29. + bufVersion := "undefined" + if _, err := os.Stat(path.Join(info.CWD, "go.mod")); !os.IsNotExist(err) { + bufVersion, err = cosmosbuf.Version(ctx) + if err != nil { + return info, err + } + } + + info.Uname = uname + info.CLIVersion = resolveDevVersion(ctx) + info.BuildDate = date + info.BufVersion = bufVersion + info.SourceHash = head + info.ConfigVersion = fmt.Sprintf("v%d", chainconfig.LatestVersion) + info.SDKVersion = sdkVersion + info.OS = runtime.GOOS + info.Arch = runtime.GOARCH + info.GoVersion = strings.TrimSpace(goVersionBuf.String()) + info.BuildFromSource = fromSource() + + return info, nil +} + +// AssertSupportedCosmosSDKVersion asserts that a Cosmos SDK version is supported by Ignite CLI. +// THE MUKAN PATCH: This check is intentionally disabled for the Mukan Network sovereign stack. +// Mukan SDK is a hard-fork of Cosmos SDK with its own versioning — all versions are valid. +func AssertSupportedCosmosSDKVersion(v cosmosver.Version) error { + return nil +} diff --git a/ignite/version/version_test.go b/ignite/version/version_test.go new file mode 100644 index 0000000..9b39000 --- /dev/null +++ b/ignite/version/version_test.go @@ -0,0 +1,61 @@ +package version_test + +import ( + "testing" + + "github.com/blang/semver/v4" + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/cosmosver" + "github.com/ignite/cli/v29/ignite/version" +) + +func TestAssertSupportedCosmosSDKVersion(t *testing.T) { + testCases := []struct { + name string + version cosmosver.Version + errMsg string + }{ + { + "invalid", + cosmosver.Version{Version: "invalid"}, + "Your chain has been scaffolded with an older version of Cosmos SDK: invalid", + }, + { + "too old", + cosmosver.Version{Version: "v0.45.0", Semantic: semver.MustParse("0.45.0")}, + "Your chain has been scaffolded with an older version of Cosmos SDK: v0.45.0", + }, + { + "v0.47.3", + cosmosver.Version{Version: "v0.47.3", Semantic: semver.MustParse("0.47.3")}, + "Your chain has been scaffolded with an older version of Cosmos SDK: v0.47.3", + }, + { + "v0.50", + cosmosver.Version{Version: "v0.50.1", Semantic: semver.MustParse("0.50.1")}, + "", + }, + { + "v0.50 fork", + cosmosver.Version{Version: "v0.50.1-rollkit-v0.11.6-no-fraud-proofs", Semantic: semver.MustParse("0.50.1-rollkit-v0.11.6-no-fraud-proofs")}, + "", + }, + { + "v0.53", + cosmosver.Version{Version: "v0.53.0", Semantic: semver.MustParse("0.53.0")}, + "", + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + err := version.AssertSupportedCosmosSDKVersion(tc.version) + if tc.errMsg == "" { + require.NoError(t, err) + } else { + require.ErrorContains(t, err, tc.errMsg) + } + }) + } +} diff --git a/integration/account/cmd_account_test.go b/integration/account/cmd_account_test.go new file mode 100644 index 0000000..2ef2fe8 --- /dev/null +++ b/integration/account/cmd_account_test.go @@ -0,0 +1,112 @@ +package account_test + +import ( + "bytes" + "strings" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/cmdrunner/step" + "github.com/ignite/cli/v29/ignite/pkg/randstr" + envtest "github.com/ignite/cli/v29/integration" +) + +const testAccountMnemonic = "develop mansion drum glow husband trophy labor jelly fault run pause inside jazz foil page injury foam oppose fruit chunk segment morning series nation" + +func TestAccount(t *testing.T) { + var ( + env = envtest.New(t) + tmpDir = t.TempDir() + accountName = randstr.Runes(10) + ) + + env.Must(env.Exec("create account", + step.NewSteps(step.New( + step.Exec(envtest.IgniteApp, "account", "create", accountName, "--keyring-dir", tmpDir), + )), + )) + + listOutputBuffer := &bytes.Buffer{} + env.Must(env.Exec("list accounts", + step.NewSteps(step.New( + step.Exec(envtest.IgniteApp, "account", "list", "--keyring-dir", tmpDir), + )), + envtest.ExecStdout(listOutputBuffer), + )) + require.True(t, strings.Contains(listOutputBuffer.String(), accountName)) + + env.Must(env.Exec("delete account", + step.NewSteps(step.New( + step.Exec(envtest.IgniteApp, "account", "delete", accountName, "--keyring-dir", tmpDir), + )), + )) + + listOutputAfterDeleteBuffer := &bytes.Buffer{} + env.Must(env.Exec("list accounts after delete", + step.NewSteps(step.New( + step.Exec(envtest.IgniteApp, "account", "list", "--keyring-dir", tmpDir), + )), + envtest.ExecStdout(listOutputAfterDeleteBuffer), + )) + require.Equal(t, "Name \tAddress Public Key \t\n\n", listOutputAfterDeleteBuffer.String()) + + env.Must(env.Exec("import account with mnemonic", + step.NewSteps(step.New( + step.Exec( + envtest.IgniteApp, "account", "import", "testaccount42", + "--keyring-dir", tmpDir, + "--secret", testAccountMnemonic, + ), + )), + )) + + env.Must(env.Exec("import account with private key", + step.NewSteps(step.New( + step.Exec( + envtest.IgniteApp, "account", "import", "testaccount43", + "--non-interactive", + "--keyring-dir", tmpDir, + "--secret", "testdata/key", + "--passphrase", "passpass", + ), + )), + )) + + listOutputAfterImportBuffer := &bytes.Buffer{} + env.Must(env.Exec("list accounts after import", + step.NewSteps(step.New( + step.Exec(envtest.IgniteApp, "account", "list", "--keyring-dir", tmpDir), + )), + envtest.ExecStdout(listOutputAfterImportBuffer), + )) + require.Equal(t, `Name Address Public Key +testaccount42 cosmos1ytnkpns7mfd6jjkvq9ztdvjdrt2xvmft2qxzqd PubKeySecp256k1{02FDF6D6F63B6B8E3CC71D03669BE0808F9990EE2A7FDBBF47E6BBEC4176E7763C} +testaccount43 cosmos18p4xchk2aqp39nsjwr69ql44upzsfnh8r9lzql PubKeySecp256k1{0228B8FC609973D91BFF7A9933424F31E15A54B97F8FDF7CE5A83B4DC20988068F} + +`, listOutputAfterImportBuffer.String()) + + showOutputBuffer := &bytes.Buffer{} + env.Must(env.Exec("show account", + step.NewSteps(step.New( + step.Exec(envtest.IgniteApp, "account", "show", "testaccount42", "--keyring-dir", tmpDir), + )), + envtest.ExecStdout(showOutputBuffer), + )) + require.Equal(t, `Name Address Public Key +testaccount42 cosmos1ytnkpns7mfd6jjkvq9ztdvjdrt2xvmft2qxzqd PubKeySecp256k1{02FDF6D6F63B6B8E3CC71D03669BE0808F9990EE2A7FDBBF47E6BBEC4176E7763C} + +`, showOutputBuffer.String()) + + showOutputWithDifferentPrefixBuffer := &bytes.Buffer{} + env.Must(env.Exec("show account with address prefix", + step.NewSteps(step.New( + step.Exec(envtest.IgniteApp, "account", "show", "testaccount42", "--keyring-dir", tmpDir, "--address-prefix", "test"), + )), + envtest.ExecStdout(showOutputWithDifferentPrefixBuffer), + )) + require.Equal(t, `Name Address Public Key +testaccount42 test1ytnkpns7mfd6jjkvq9ztdvjdrt2xvmftxemuve PubKeySecp256k1{02FDF6D6F63B6B8E3CC71D03669BE0808F9990EE2A7FDBBF47E6BBEC4176E7763C} + +`, showOutputWithDifferentPrefixBuffer.String()) +} diff --git a/integration/account/testdata/key b/integration/account/testdata/key new file mode 100644 index 0000000..a671970 --- /dev/null +++ b/integration/account/testdata/key @@ -0,0 +1,9 @@ +-----BEGIN TENDERMINT PRIVATE KEY----- +kdf: bcrypt +salt: DF141717258C6DFA02F3531A7606CC06 +type: secp256k1 + +7iYel/9+f4pY772S1WstIgUxzTTK81sXHtzI0YnmvbrH4wcbM/yfe1VPJJ11L6WT +dxs0v9A4DjwD9dOY73+zQc6NypB07OlyOykJXcA= +=Dojr +-----END TENDERMINT PRIVATE KEY----- \ No newline at end of file diff --git a/integration/app.go b/integration/app.go new file mode 100644 index 0000000..a7325f3 --- /dev/null +++ b/integration/app.go @@ -0,0 +1,480 @@ +package envtest + +import ( + "context" + "fmt" + "os" + "path" + "path/filepath" + "strconv" + "strings" + "time" + + "github.com/stretchr/testify/require" + "gopkg.in/yaml.v3" + + chainconfig "github.com/ignite/cli/v29/ignite/config/chain" + v1 "github.com/ignite/cli/v29/ignite/config/chain/v1" + "github.com/ignite/cli/v29/ignite/pkg/availableport" + "github.com/ignite/cli/v29/ignite/pkg/cmdrunner/step" + "github.com/ignite/cli/v29/ignite/pkg/gocmd" + "github.com/ignite/cli/v29/ignite/pkg/goenv" + "github.com/ignite/cli/v29/ignite/pkg/xurl" + "github.com/ignite/cli/v29/ignite/templates/field" +) + +const ServeTimeout = time.Minute * 15 + +const ( + defaultConfigFileName = "config.yml" + defaultTestTimeout = 30 * time.Minute // Go's default is 10m +) + +type ( + // Hosts contains the "hostname:port" addresses for different service hosts. + Hosts struct { + RPC string + P2P string + Prof string + GRPC string + GRPCWeb string + API string + Faucet string + } + + App struct { + namespace string + name string + path string + configPath string + homePath string + testTimeout time.Duration + + env Env + + scaffolded []scaffold + } + + scaffold struct { + fields field.Fields + index field.Field + response field.Fields + params field.Fields + module string + name string + typeName string + } +) + +type AppOption func(*App) + +func AppConfigPath(path string) AppOption { + return func(o *App) { + o.configPath = path + } +} + +func AppHomePath(path string) AppOption { + return func(o *App) { + o.homePath = path + } +} + +func AppTestTimeout(d time.Duration) AppOption { + return func(o *App) { + o.testTimeout = d + } +} + +// ScaffoldApp scaffolds an app to a unique appPath and returns it. +func (e Env) ScaffoldApp(namespace string, flags ...string) App { + root := e.TmpDir() + + e.Exec("scaffold an app", + step.NewSteps(step.New( + step.Exec( + IgniteApp, + append([]string{ + "scaffold", + "chain", + namespace, + }, flags...)..., + ), + step.Workdir(root), + )), + ) + + var ( + appDirName = path.Base(namespace) + appSourcePath = filepath.Join(root, appDirName) + appHomePath = e.AppHome(appDirName) + ) + + e.t.Cleanup(func() { os.RemoveAll(appHomePath) }) + + return e.App(namespace, appSourcePath, AppHomePath(appHomePath)) +} + +func (e Env) App(namespace, appPath string, options ...AppOption) App { + app := App{ + env: e, + path: appPath, + testTimeout: defaultTestTimeout, + scaffolded: make([]scaffold, 0), + namespace: namespace, + name: path.Base(namespace), + } + + for _, apply := range options { + apply(&app) + } + + if app.configPath == "" { + app.configPath = filepath.Join(appPath, defaultConfigFileName) + } + + return app +} + +func (a *App) SourcePath() string { + return a.path +} + +func (a *App) SetHomePath(homePath string) { + a.homePath = homePath +} + +func (a *App) SetConfigPath(path string) { + a.configPath = path +} + +// Binary returns the binary name of the app. Can be executed directly w/o any +// path after app.Serve is called, since it should be in the $PATH. +func (a *App) Binary() string { + return path.Base(a.path) + "d" +} + +// Serve serves an application lives under path with options where msg describes the +// execution from the serving action. +// unless calling with Must(), Serve() will not exit test runtime on failure. +func (a *App) Serve(msg string, options ...ExecOption) (ok bool) { + serveCommand := []string{ + "chain", + "serve", + "-v", + "--quit-on-fail", + } + + if a.homePath != "" { + serveCommand = append(serveCommand, "--home", a.homePath) + } + if a.configPath != "" { + serveCommand = append(serveCommand, "--config", a.configPath) + } + a.env.t.Cleanup(func() { + // Serve install the app binary in GOBIN, let's clean that. + appBinary := path.Join(goenv.Bin(), a.Binary()) + os.Remove(appBinary) + }) + + return a.env.Exec(msg, + step.NewSteps(step.New( + step.Exec(IgniteApp, serveCommand...), + step.Workdir(a.path), + )), + options..., + ) +} + +// Simulate runs the simulation test for the app. +func (a *App) Simulate(numBlocks, blockSize int) { + a.env.Exec("running the simulation tests", + step.NewSteps(step.New( + step.Exec( + IgniteApp, // TODO + "chain", + "simulate", + "--numBlocks", + strconv.Itoa(numBlocks), + "--blockSize", + strconv.Itoa(blockSize), + ), + step.Workdir(a.path), + )), + ) +} + +// EnsureSteady ensures that app living at the path can compile and its tests are passing. +func (a *App) EnsureSteady() { + _, statErr := os.Stat(a.configPath) + + require.False(a.env.t, os.IsNotExist(statErr), "config.yml cannot be found") + + a.env.Exec("make sure app is steady", + step.NewSteps(step.New( + step.Exec(gocmd.Name(), "test", "-timeout", a.testTimeout.String(), "./..."), + step.Workdir(a.path), + )), + ) +} + +// EnableFaucet enables faucet by finding a random port for the app faucet and update config.yml +// with this port and provided coins options. +func (a *App) EnableFaucet(coins, coinsMax []string) (faucetAddr string) { + // find a random available port + port, err := availableport.Find(1) + require.NoError(a.env.t, err) + + a.EditConfig(func(c *chainconfig.Config) { + c.Faucet.Port = port[0] + c.Faucet.Coins = coins + c.Faucet.CoinsMax = coinsMax + }) + + addr, err := xurl.HTTP(fmt.Sprintf("0.0.0.0:%d", port[0])) + require.NoError(a.env.t, err) + + return addr +} + +// RandomizeServerPorts randomizes server ports for the app at path, updates +// its config.yml and returns new values. +func (a *App) RandomizeServerPorts() Hosts { + // generate random server ports + ports, err := availableport.Find(7) + require.NoError(a.env.t, err) + + genAddr := func(port uint) string { + return fmt.Sprintf("127.0.0.1:%d", port) + } + + hosts := Hosts{ + RPC: genAddr(ports[0]), + P2P: genAddr(ports[1]), + Prof: genAddr(ports[2]), + GRPC: genAddr(ports[3]), + GRPCWeb: genAddr(ports[4]), + API: genAddr(ports[5]), + Faucet: genAddr(ports[6]), + } + + a.EditConfig(func(c *chainconfig.Config) { + c.Faucet.Host = hosts.Faucet + + s := v1.Servers{} + s.GRPC.Address = hosts.GRPC + s.GRPCWeb.Address = hosts.GRPCWeb + s.API.Address = hosts.API + s.P2P.Address = hosts.P2P + s.RPC.Address = hosts.RPC + s.RPC.PProfAddress = hosts.Prof + + v := &c.Validators[0] + require.NoError(a.env.t, v.SetServers(s)) + }) + + return hosts +} + +// UseRandomHomeDir sets in the blockchain config files generated temporary directories for home directories. +// Returns the random home directory. +func (a *App) UseRandomHomeDir() (homeDirPath string) { + dir := a.env.TmpDir() + + a.EditConfig(func(c *chainconfig.Config) { + c.Validators[0].Home = dir + }) + + return dir +} + +func (a *App) Config() chainconfig.Config { + bz, err := os.ReadFile(a.configPath) + require.NoError(a.env.t, err) + + var conf chainconfig.Config + err = yaml.Unmarshal(bz, &conf) + require.NoError(a.env.t, err) + return conf +} + +func (a *App) EditConfig(apply func(*chainconfig.Config)) { + conf := a.Config() + apply(&conf) + + bz, err := yaml.Marshal(conf) + require.NoError(a.env.t, err) + err = os.WriteFile(a.configPath, bz, 0o600) + require.NoError(a.env.t, err) +} + +// GenerateTSClient runs the command to generate the Typescript client code. +func (a *App) GenerateTSClient() bool { + return a.env.Exec("generate typescript client", step.NewSteps( + step.New( + step.Exec(IgniteApp, "g", "ts-client", "--yes", "--clear-cache"), + step.Workdir(a.path), + ), + )) +} + +// MustServe serves the application and ensures success, failing the test if serving fails. +// It uses the provided context to allow cancellation. +func (a *App) MustServe(ctx context.Context) { + a.env.Must(a.Serve("should serve chain", ExecCtx(ctx))) +} + +// Scaffold scaffolds a new module or component in the app and optionally +// validates if it should fail. +// - msg: description of the scaffolding operation. +// - shouldFail: whether the scaffolding is expected to fail. +// - typeName: the type of the scaffold (e.g., "map", "message"). +// - args: additional arguments for the scaffold command. +func (a *App) Scaffold(msg string, shouldFail bool, typeName string, args ...string) { + a.generate(msg, "scaffold", shouldFail, append([]string{typeName}, args...)...) + + if !shouldFail { + a.addScaffoldCmd(typeName, args...) + } +} + +// Generate executes a code generation command in the app and optionally +// validates if it should fail. +// - msg: description of the generation operation. +// - shouldFail: whether the generation is expected to fail. +// - args: arguments for the generation command. +func (a *App) Generate(msg string, shouldFail bool, args ...string) { + a.generate(msg, "generate", shouldFail, args...) +} + +// generate is a helper method to execute a scaffolding or generation command with the specified options. +// - msg: description of the operation. +// - command: the command to execute (e.g., "scaffold", "generate"). +// - shouldFail: whether the command is expected to fail. +// - args: arguments for the command. +func (a *App) generate(msg, command string, shouldFail bool, args ...string) { + opts := make([]ExecOption, 0) + if shouldFail { + opts = append(opts, ExecShouldError()) + } + + args = append([]string{command}, args...) + a.env.Must(a.env.Exec(msg, + step.NewSteps(step.New( + step.Exec(IgniteApp, append(args, "--yes")...), + step.Workdir(a.SourcePath()), + )), + opts..., + )) +} + +// addScaffoldCmd processes the scaffold arguments and adds the scaffolded command metadata to the app. +// - typeName: the type of the scaffold (e.g., "map", "message"). +// - args: arguments for the scaffold command. +func (a *App) addScaffoldCmd(typeName string, args ...string) { + module := "" + index := "" + response := "" + params := "" + name := typeName + + // in the case of scaffolding commands that do no take arguments + // we can skip the argument parsing + if len(args) > 0 { + name = args[0] + args = args[1:] + } + + filteredArgs := make([]string, 0) + + // remove the flags from the args + for _, arg := range args { + if strings.HasPrefix(arg, "-") { + break + } + filteredArgs = append(filteredArgs, arg) + } + + // parse the arg flags + for i, arg := range args { + // skip tests if the type doesn't need a message + if arg == "--no-message" { + return + } + if i+1 >= len(args) { + break + } + switch arg { + case "--module": + module = args[i+1] + case "--index": + index = args[i+1] + case "--params": + params = args[i+1] + case "-r", "--response": + response = args[i+1] + } + } + + argsFields, err := field.ParseFields(filteredArgs, func(string) error { return nil }) + require.NoError(a.env.t, err) + + s := scaffold{ + fields: argsFields, + module: module, + typeName: typeName, + name: name, + } + + // Handle field specifics based on scaffold type + switch typeName { + case "map": + if index == "" { + index = "index:string" + } + indexFields, err := field.ParseFields(strings.Split(index, ","), func(string) error { return nil }) + require.NoError(a.env.t, err) + require.Len(a.env.t, indexFields, 1) + s.index = indexFields[0] + case "query", "message": + if response == "" { + break + } + responseFields, err := field.ParseFields(strings.Split(response, ","), func(string) error { return nil }) + require.NoError(a.env.t, err) + require.Greater(a.env.t, len(responseFields), 0) + s.response = responseFields + case "module": + s.module = name + if params == "" { + break + } + paramsFields, err := field.ParseFields(strings.Split(params, ","), func(string) error { return nil }) + require.NoError(a.env.t, err) + require.Greater(a.env.t, len(paramsFields), 0) + s.params = paramsFields + case "params": + s.params = argsFields + } + + a.scaffolded = append(a.scaffolded, s) +} + +// WaitChainUp waits the chain is up. +func (a *App) WaitChainUp(ctx context.Context, chainAPI string) { + // check the chains is up + env := a.env + stepsCheckChains := step.NewSteps( + step.New( + step.Exec( + a.Binary(), + "config", + "output", "json", + ), + step.PreExec(func() error { + return env.IsAppServed(ctx, chainAPI) + }), + ), + ) + env.Exec(fmt.Sprintf("waiting the chain (%s) is up", chainAPI), stepsCheckChains, ExecRetry()) +} diff --git a/integration/app/cmd_app_test.go b/integration/app/cmd_app_test.go new file mode 100644 index 0000000..b615af7 --- /dev/null +++ b/integration/app/cmd_app_test.go @@ -0,0 +1,234 @@ +//go:build !relayer + +package app_test + +import ( + "bytes" + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/cmdrunner/step" + "github.com/ignite/cli/v29/ignite/services/chain" + envtest "github.com/ignite/cli/v29/integration" +) + +// TestGenerateAnApp tests scaffolding a new chain. +func TestGenerateAnApp(t *testing.T) { + var ( + env = envtest.New(t) + app = env.ScaffoldApp("github.com/test/blog") + ) + + _, statErr := os.Stat(filepath.Join(app.SourcePath(), "x", "blog")) + require.False(t, os.IsNotExist(statErr), "the default module should be scaffolded") + + app.EnsureSteady() +} + +// TestGenerateAnAppMinimal tests scaffolding a new minimal chain. +func TestGenerateAnAppMinimal(t *testing.T) { + var ( + env = envtest.New(t) + app = env.ScaffoldApp("blog", "--minimal") + ) + + _, statErr := os.Stat(filepath.Join(app.SourcePath(), "x", "blog")) + require.False(t, os.IsNotExist(statErr), "the default module should be scaffolded") + + app.EnsureSteady() +} + +// TestGenerateAnAppWithName tests scaffolding a new chain using a local name instead of a GitHub URI. +func TestGenerateAnAppWithName(t *testing.T) { + var ( + env = envtest.New(t) + app = env.ScaffoldApp("blog") + ) + + _, statErr := os.Stat(filepath.Join(app.SourcePath(), "x", "blog")) + require.False(t, os.IsNotExist(statErr), "the default module should be scaffolded") + + app.EnsureSteady() +} + +// TestGenerateAnAppWithInvalidName tests scaffolding a new chain using an invalid name. +func TestGenerateAnAppWithInvalidName(t *testing.T) { + buf := new(bytes.Buffer) + + env := envtest.New(t) + env.Must(env.Exec("should prevent creating an app with an invalid name", + step.NewSteps(step.New( + step.Exec(envtest.IgniteApp, "s", "chain", "blog2"), + step.Stdout(buf), + step.Stderr(buf), + )), + envtest.ExecShouldError(), + )) + + require.Contains(t, buf.String(), "Invalid app name blog2: cannot contain numbers") +} + +func TestGenerateAnAppWithNoDefaultModule(t *testing.T) { + var ( + env = envtest.New(t) + app = env.ScaffoldApp("github.com/test/blog", "--no-module") + ) + + _, statErr := os.Stat(filepath.Join(app.SourcePath(), "x", "blog")) + require.True(t, os.IsNotExist(statErr), "the default module should not be scaffolded") + + app.EnsureSteady() +} + +func TestGenerateAnAppWithNoDefaultModuleAndCreateAModule(t *testing.T) { + var ( + env = envtest.New(t) + app = env.ScaffoldApp("github.com/test/blog", "--no-module") + ) + + defer app.EnsureSteady() + + app.Scaffold( + "should scaffold a new module into a chain that never had modules before", + false, + "module", "first_module", + ) +} + +func TestGenerateAppWithEmptyModule(t *testing.T) { + var ( + env = envtest.New(t) + app = env.ScaffoldApp("github.com/test/blog") + ) + + app.Scaffold( + "create a module", + false, + "module", "example", "--require-registration", + ) + + app.Scaffold( + "should prevent creating an existing module", + true, + "module", "example", "--require-registration", + ) + + app.Scaffold( + "should prevent creating a module with an invalid name", + true, + "module", "example1", "--require-registration", + ) + + app.Scaffold( + "should prevent creating a module with a reserved name", + true, + "module", "tx", "--require-registration", + ) + + app.Scaffold( + "should prevent creating a module with a forbidden prefix", + true, + "module", "ibcfoo", "--require-registration", + ) + + app.Scaffold( + "should prevent creating a module prefixed with an existing module", + true, + "module", "examplefoo", "--require-registration", + ) + + app.Scaffold( + "create a module with dependencies", + false, + "module", + "with_dep", + "--dep", + "auth,bank,staking,slashing,example", + "--require-registration", + ) + + app.Scaffold( + "should prevent creating a module with invalid dependencies", + true, + "module", + "with_wrong_dep", + "--dep", + "dup,dup", + "--require-registration", + ) + + app.Scaffold( + "should prevent creating a module with a non registered dependency", + true, + "module", + "with_no_dep", + "--dep", + "inexistent", + "--require-registration", + ) + + app.EnsureSteady() +} + +func TestGenerateAnAppWithAddressPrefix(t *testing.T) { + var ( + env = envtest.New(t) + app = env.ScaffoldApp("github.com/test/blog", "--address-prefix=gm", "--coin-type=60") + ) + + _, statErr := os.Stat(filepath.Join(app.SourcePath(), "x", "blog")) + require.False(t, os.IsNotExist(statErr), "the default module should be scaffolded") + + c, err := chain.New(app.SourcePath()) + require.NoError(t, err, "failed to get new chain") + + bech32Prefix, err := c.Bech32Prefix() + require.NoError(t, err) + + require.Equal(t, bech32Prefix, "gm") + + coinType, err := c.CoinType() + require.NoError(t, err, "failed to get coin type") + require.Equal(t, coinType, uint32(60)) + + app.EnsureSteady() +} + +func TestGenerateAnAppWithDefaultDenom(t *testing.T) { + var ( + env = envtest.New(t) + app = env.ScaffoldApp("github.com/test/blog", "--default-denom=good") + ) + + _, statErr := os.Stat(filepath.Join(app.SourcePath(), "x", "blog")) + require.False(t, os.IsNotExist(statErr), "the default module should be scaffolded") + + c, err := chain.New(app.SourcePath()) + require.NoError(t, err, "failed to get new chain") + + cfg, err := c.Config() + require.NoError(t, err) + + require.Equal(t, cfg.DefaultDenom, "good") + + app.EnsureSteady() +} + +func TestScaffoldModuleWithUnderscoreAppName(t *testing.T) { + var ( + env = envtest.New(t) + app = env.ScaffoldApp("github.com/test/space_chain") + ) + + app.Scaffold( + "create a module in app with underscore name", + false, + "module", + "f_o_o", + ) + + app.EnsureSteady() +} diff --git a/integration/app/cmd_proto_path_test.go b/integration/app/cmd_proto_path_test.go new file mode 100644 index 0000000..ca155f1 --- /dev/null +++ b/integration/app/cmd_proto_path_test.go @@ -0,0 +1,82 @@ +//go:build !relayer + +package app_test + +import ( + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/require" + "gopkg.in/yaml.v3" + + "github.com/ignite/cli/v29/ignite/config/chain" + "github.com/ignite/cli/v29/ignite/config/chain/base" + v1 "github.com/ignite/cli/v29/ignite/config/chain/v1" + "github.com/ignite/cli/v29/ignite/pkg/xyaml" + envtest "github.com/ignite/cli/v29/integration" +) + +const newProtoPath = "myProto" + +var ( + bobName = "bob" + cfg = v1.Config{ + Config: base.Config{ + Version: 1, + Build: base.Build{ + Proto: base.Proto{ + Path: newProtoPath, + }, + }, + Accounts: []base.Account{ + { + Name: "alice", + Coins: []string{"100000000000token", "10000000000000000000stake"}, + Mnemonic: "slide moment original seven milk crawl help text kick fluid boring awkward doll wonder sure fragile plate grid hard next casual expire okay body", + }, + { + Name: bobName, + Coins: []string{"100000000000token", "10000000000000000000stake"}, + Mnemonic: "trap possible liquid elite embody host segment fantasy swim cable digital eager tiny broom burden diary earn hen grow engine pigeon fringe claim program", + }, + }, + Faucet: base.Faucet{ + Name: &bobName, + Coins: []string{"500token", "100000000stake"}, + Host: ":4501", + }, + Genesis: xyaml.Map{"chain_id": "mars-1"}, + }, + Validators: []v1.Validator{ + { + Name: "alice", + Bonded: "100000000stake", + }, + }, + } +) + +func TestChangeProtoPath(t *testing.T) { + var ( + env = envtest.New(t) + app = env.ScaffoldApp("github.com/test/protopath", "--proto-dir", newProtoPath) + appPath = app.SourcePath() + cfgPath = filepath.Join(appPath, chain.ConfigFilenames[0]) + ) + + // set the custom config path. + file, err := os.Create(cfgPath) + require.NoError(t, err) + require.NoError(t, yaml.NewEncoder(file).Encode(cfg)) + require.NoError(t, file.Close()) + app.SetConfigPath(cfgPath) + + app.Scaffold( + "create a list with a custom proto path from config", + false, + "list", "listUser", "email", + ) + + app.EnsureSteady() +} diff --git a/integration/chain/cache_test.go b/integration/chain/cache_test.go new file mode 100644 index 0000000..88b1f33 --- /dev/null +++ b/integration/chain/cache_test.go @@ -0,0 +1,94 @@ +//go:build !relayer + +package chain_test + +import ( + "context" + "os" + "path/filepath" + "strings" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/cmdrunner/step" + envtest "github.com/ignite/cli/v29/integration" +) + +func TestCliWithCaching(t *testing.T) { + var ( + env = envtest.New(t) + app = env.ScaffoldApp("github.com/test/cacheblog") + vueGenerated = filepath.Join(app.SourcePath(), "vue/src/store/generated") + openapiGenerated = filepath.Join(app.SourcePath(), "docs/static/openapi.json") + typesDir = filepath.Join(app.SourcePath(), "x/cacheblog/types") + servers = app.RandomizeServerPorts() + ctx, cancel = context.WithTimeout(env.Ctx(), envtest.ServeTimeout) + isBackendAliveErr error + ) + + app.Scaffold( + "create a message", + false, + "message", "mymessage", "myfield1", "myfield2:bool", + ) + + app.Scaffold("create a query", false, "query", "myQuery", "mytypefield") + + env.Must(env.Exec("build", + step.NewSteps(step.New( + step.Exec( + envtest.IgniteApp, + "c", + "build", + ), + step.Workdir(app.SourcePath()), + )), + )) + + app.EnsureSteady() + + deleteCachedFiles(t, vueGenerated, openapiGenerated, typesDir) + + env.Must(env.Exec("build", + step.NewSteps(step.New( + step.Exec( + envtest.IgniteApp, + "c", + "build", + ), + step.Workdir(app.SourcePath()), + )), + )) + + app.EnsureSteady() + + deleteCachedFiles(t, vueGenerated, openapiGenerated, typesDir) + + go func() { + defer cancel() + isBackendAliveErr = env.IsAppServed(ctx, servers.API) + }() + app.MustServe(ctx) + + require.NoError(t, isBackendAliveErr, "app cannot get online in time") +} + +func deleteCachedFiles(t *testing.T, vueGenerated, openapiGenerated, typesDir string) { + t.Helper() + require.NoError(t, os.RemoveAll(vueGenerated)) + require.NoError(t, os.Remove(openapiGenerated)) + + typesDirEntries, err := os.ReadDir(typesDir) + require.NoError(t, err) + + for _, v := range typesDirEntries { + if v.IsDir() { + continue + } + + if strings.Contains(v.Name(), ".pb") { + require.NoError(t, os.Remove(filepath.Join(typesDir, v.Name()))) + } + } +} diff --git a/integration/chain/cmd_modules_list_test.go b/integration/chain/cmd_modules_list_test.go new file mode 100644 index 0000000..74506dc --- /dev/null +++ b/integration/chain/cmd_modules_list_test.go @@ -0,0 +1,72 @@ +//go:build !relayer + +package chain_test + +import ( + "bytes" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/cmdrunner/step" + envtest "github.com/ignite/cli/v29/integration" +) + +func TestModulesList(t *testing.T) { + var ( + env = envtest.New(t) + app = env.ScaffoldApp("github.com/test/mars") + ) + + var buffer bytes.Buffer + + env.Must(env.Exec("list modules", + step.NewSteps(step.New( + step.Exec(envtest.IgniteApp, + "c", + "modules", + "list", + ), + step.Workdir(app.SourcePath()), + )), + envtest.ExecStdout(&buffer), + )) + + output := buffer.String() + + // check for module header + require.Contains(t, output, "Module") + require.Contains(t, output, "Version") + + // check for specific modules + require.Contains(t, output, "client/grpc/cmtservice") + require.Contains(t, output, "client/grpc/node") + require.Contains(t, output, "cosmossdk.io/x/circuit") + require.Contains(t, output, "cosmossdk.io/x/evidence") + require.Contains(t, output, "cosmossdk.io/x/feegrant/module") + require.Contains(t, output, "cosmossdk.io/x/nft/module") + require.Contains(t, output, "cosmossdk.io/x/upgrade") + require.Contains(t, output, "github.com/test/mars/x/mars") + require.Contains(t, output, "github.com/test/mars/x/mars/module") + require.Contains(t, output, "modules/apps/27-interchain-accounts") + require.Contains(t, output, "modules/apps/transfer") + require.Contains(t, output, "modules/core") + require.Contains(t, output, "x/auth") + require.Contains(t, output, "x/auth/tx") + require.Contains(t, output, "x/auth/tx/config") + require.Contains(t, output, "x/auth/vesting") + require.Contains(t, output, "x/authz") + require.Contains(t, output, "x/authz/module") + require.Contains(t, output, "x/bank") + require.Contains(t, output, "x/consensus") + require.Contains(t, output, "x/distribution") + require.Contains(t, output, "x/epochs") + require.Contains(t, output, "x/gov") + require.Contains(t, output, "x/group/module") + require.Contains(t, output, "x/mint") + require.Contains(t, output, "x/params") + require.Contains(t, output, "x/slashing") + require.Contains(t, output, "x/staking") + + app.EnsureSteady() +} diff --git a/integration/chain/cmd_serve_test.go b/integration/chain/cmd_serve_test.go new file mode 100644 index 0000000..34bab2f --- /dev/null +++ b/integration/chain/cmd_serve_test.go @@ -0,0 +1,104 @@ +//go:build !relayer + +package chain_test + +import ( + "context" + "path/filepath" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/xos" + envtest "github.com/ignite/cli/v29/integration" +) + +func TestServeWithCustomHome(t *testing.T) { + var ( + env = envtest.New(t) + app = env.ScaffoldApp("github.com/test/sgbloga") + servers = app.RandomizeServerPorts() + ) + + var ( + ctx, cancel = context.WithTimeout(env.Ctx(), envtest.ServeTimeout) + isBackendAliveErr error + ) + go func() { + defer cancel() + isBackendAliveErr = env.IsAppServed(ctx, servers.API) + }() + app.MustServe(ctx) + + require.NoError(t, isBackendAliveErr, "app cannot get online in time") +} + +func TestServeWithConfigHome(t *testing.T) { + var ( + env = envtest.New(t) + app = env.ScaffoldApp("github.com/test/sgblogb") + servers = app.RandomizeServerPorts() + ) + + var ( + ctx, cancel = context.WithTimeout(env.Ctx(), envtest.ServeTimeout) + isBackendAliveErr error + ) + go func() { + defer cancel() + isBackendAliveErr = env.IsAppServed(ctx, servers.API) + }() + app.MustServe(ctx) + + require.NoError(t, isBackendAliveErr, "app cannot get online in time") +} + +func TestServeWithCustomConfigFile(t *testing.T) { + tmpDir := t.TempDir() + + var ( + env = envtest.New(t) + app = env.ScaffoldApp("github.com/test/sgblogc") + ) + // Move config + newConfig := "new_config.yml" + newConfigPath := filepath.Join(tmpDir, newConfig) + err := xos.Rename(filepath.Join(app.SourcePath(), "config.yml"), newConfigPath) + require.NoError(t, err) + app.SetConfigPath(newConfigPath) + + servers := app.RandomizeServerPorts() + + var ( + ctx, cancel = context.WithTimeout(env.Ctx(), envtest.ServeTimeout) + isBackendAliveErr error + ) + go func() { + defer cancel() + isBackendAliveErr = env.IsAppServed(ctx, servers.API) + }() + app.MustServe(ctx) + + require.NoError(t, isBackendAliveErr, "app cannot get online in time") +} + +// TestServeWithName tests serving a new chain scaffolded using a local name instead of a GitHub URI. +func TestServeWithName(t *testing.T) { + var ( + env = envtest.New(t) + app = env.ScaffoldApp("sgblogd") + servers = app.RandomizeServerPorts() + ) + + ctx, cancel := context.WithTimeout(env.Ctx(), envtest.ServeTimeout) + + var isBackendAliveErr error + + go func() { + defer cancel() + isBackendAliveErr = env.IsAppServed(ctx, servers.API) + }() + app.MustServe(ctx) + + require.NoError(t, isBackendAliveErr, "app cannot get online in time") +} diff --git a/integration/chain/config_test.go b/integration/chain/config_test.go new file mode 100644 index 0000000..4dc2922 --- /dev/null +++ b/integration/chain/config_test.go @@ -0,0 +1,68 @@ +//go:build !relayer + +package chain_test + +import ( + "context" + "fmt" + "path/filepath" + "testing" + + "github.com/stretchr/testify/require" + + chainconfig "github.com/ignite/cli/v29/ignite/config/chain" + "github.com/ignite/cli/v29/ignite/pkg/confile" + "github.com/ignite/cli/v29/ignite/pkg/randstr" + envtest "github.com/ignite/cli/v29/integration" +) + +func TestOverwriteSDKConfigsAndChainID(t *testing.T) { + var ( + env = envtest.New(t) + appname = randstr.Runes(10) + app = env.ScaffoldApp(fmt.Sprintf("github.com/test/%s", appname)) + servers = app.RandomizeServerPorts() + ctx, cancel = context.WithCancel(env.Ctx()) + isBackendAliveErr error + ) + + var cfg chainconfig.Config + cf := confile.New(confile.DefaultYAMLEncodingCreator, filepath.Join(app.SourcePath(), "config.yml")) + require.NoError(t, cf.Load(&cfg)) + + cfg.Genesis = map[string]interface{}{"chain_id": "cosmos"} + cfg.Validators[0].App["hello"] = "cosmos" + cfg.Validators[0].Config["log_format"] = "json" + + require.NoError(t, cf.Save(cfg)) + + go func() { + defer cancel() + isBackendAliveErr = env.IsAppServed(ctx, servers.API) + }() + + app.MustServe(ctx) + require.NoError(t, isBackendAliveErr, "app cannot get online in time") + + cases := []struct { + ec confile.EncodingCreator + relpath string + key string + want interface{} + }{ + {confile.DefaultJSONEncodingCreator, "config/genesis.json", "chain_id", "cosmos"}, + {confile.DefaultTOMLEncodingCreator, "config/app.toml", "hello", "cosmos"}, + {confile.DefaultTOMLEncodingCreator, "config/config.toml", "log_format", "json"}, + } + for _, tt := range cases { + t.Run("test "+tt.relpath, func(t *testing.T) { + var conf map[string]interface{} + + path := filepath.Join(env.AppHome(appname), tt.relpath) + c := confile.New(tt.ec, path) + + require.NoError(t, c.Load(&conf)) + require.Equalf(t, tt.want, conf[tt.key], "unexpected value for %s", tt.relpath) + }) + } +} diff --git a/integration/cosmosgen/cosmosgen_composables_test.go b/integration/cosmosgen/cosmosgen_composables_test.go new file mode 100644 index 0000000..aaaaef3 --- /dev/null +++ b/integration/cosmosgen/cosmosgen_composables_test.go @@ -0,0 +1,126 @@ +package cosmosgen_test + +import ( + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + envtest "github.com/ignite/cli/v29/integration" +) + +func TestCosmosGenScaffoldComposables(t *testing.T) { + if envtest.IsCI { + t.Skip("Skipping TestCosmosGenScaffoldComposables test in CI environment") + } + + var ( + env = envtest.New(t) + app = env.ScaffoldApp("github.com/test/blog") + ) + + const ( + withMsgModuleName = "withmsg" + withoutMsgModuleName = "withoutmsg" + ) + + app.Scaffold("add custom module with message", false, "module", withMsgModuleName) + + app.Scaffold( + "create a message", + false, + "message", + "mymessage", + "myfield1", + "myfield2:bool", + "--module", + withMsgModuleName, + ) + + app.Scaffold( + "add custom module without message", + false, + "module", + withoutMsgModuleName, + ) + + app.Scaffold( + "create a type", + false, + "type", + "mytype", + "mytypefield", + "--module", + withoutMsgModuleName, + ) + + app.Scaffold( + "create a query", + false, + "query", + "myQuery", + "mytypefield", + "--module", + withoutMsgModuleName, + ) + + composablesDirGenerated := filepath.Join(app.SourcePath(), "vue/src/composables") + require.NoError(t, os.RemoveAll(composablesDirGenerated)) + + app.Scaffold( + "scaffold vue", + false, + "vue", + ) + + app.Generate( + "generate composables", + false, + "composables", + "--clear-cache", + ) + + expectedQueryModules := []string{ + "useCosmosAuthV1Beta1", + "useCosmosAuthzV1Beta1", + "useCosmosBankV1Beta1", + "useCosmosBaseTendermintV1Beta1", + "useCosmosDistributionV1Beta1", + "useCosmosEvidenceV1Beta1", + "useCosmosFeegrantV1Beta1", + "useCosmosGovV1Beta1", + "useCosmosGovV1", + "useCosmosGroupV1", + "useCosmosMintV1Beta1", + "useCosmosNftV1Beta1", + "useCosmosParamsV1Beta1", + "useCosmosSlashingV1Beta1", + "useCosmosStakingV1Beta1", + "useCosmosTxV1Beta1", + "useCosmosUpgradeV1Beta1", + "useCosmosVestingV1Beta1", + // custom modules + "useBlogBlogV1", + "useBlogWithmsgV1", + "useBlogWithoutmsgV1", + } + + for _, mod := range expectedQueryModules { + _, err := os.Stat(filepath.Join(composablesDirGenerated, mod)) + if assert.False(t, os.IsNotExist(err), "missing composable %q in %s", mod, composablesDirGenerated) { + assert.NoError(t, err) + } + } + + if t.Failed() { + // list composables files + composablesFiles, err := os.ReadDir(composablesDirGenerated) + require.NoError(t, err) + t.Log("Composables files:", len(composablesFiles)) + for _, file := range composablesFiles { + t.Logf(" - %s", file.Name()) + } + } +} diff --git a/integration/cosmosgen/cosmosgen_test.go b/integration/cosmosgen/cosmosgen_test.go new file mode 100644 index 0000000..0cb551c --- /dev/null +++ b/integration/cosmosgen/cosmosgen_test.go @@ -0,0 +1,162 @@ +package cosmosgen_test + +import ( + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/cmdrunner/step" + envtest "github.com/ignite/cli/v29/integration" +) + +func TestCosmosGenScaffold(t *testing.T) { + if envtest.IsCI { + t.Skip("Skipping CosmosGenScaffold test in CI environment") + } + + var ( + env = envtest.New(t) + app = env.ScaffoldApp("github.com/test/blog") + ) + + const ( + withMsgModuleName = "withmsg" + withoutMsgModuleName = "withoutmsg" + ) + + env.Must(env.Exec("add custom module with message", + step.NewSteps(step.New( + step.Exec( + envtest.IgniteApp, + "s", + "module", + "--yes", + withMsgModuleName, + ), + step.Workdir(app.SourcePath()), + )), + )) + + env.Must(env.Exec("create a message", + step.NewSteps(step.New( + step.Exec( + envtest.IgniteApp, + "s", + "message", + "--yes", + "mymessage", + "myfield1", + "myfield2:bool", + "--module", + withMsgModuleName, + ), + step.Workdir(app.SourcePath()), + )), + )) + + env.Must(env.Exec("add custom module without message", + step.NewSteps(step.New( + step.Exec( + envtest.IgniteApp, + "s", + "module", + "--yes", + withoutMsgModuleName, + ), + step.Workdir(app.SourcePath()), + )), + )) + + env.Must(env.Exec("create a type", + step.NewSteps(step.New( + step.Exec( + envtest.IgniteApp, + "s", + "type", + "--yes", + "mytype", + "mytypefield", + "--module", + withoutMsgModuleName, + ), + step.Workdir(app.SourcePath()), + )), + )) + + env.Must(env.Exec("create a query", + step.NewSteps(step.New( + step.Exec( + envtest.IgniteApp, + "s", + "query", + "--yes", + "myQuery", + "mytypefield", + "--module", + withoutMsgModuleName, + ), + step.Workdir(app.SourcePath()), + )), + )) + + tsDirGenerated := filepath.Join(app.SourcePath(), "ts-client") + require.NoError(t, os.RemoveAll(tsDirGenerated)) + + env.Must(env.Exec("generate typescript", + step.NewSteps(step.New( + step.Exec( + envtest.IgniteApp, + "g", + "ts-client", + "--yes", + "--clear-cache", + ), + step.Workdir(app.SourcePath()), + )), + )) + + expectedModules := []string{ + "cosmos.auth.v1beta1", + "cosmos.authz.v1beta1", + "cosmos.bank.v1beta1", + "cosmos.base.tendermint.v1beta1", + "cosmos.distribution.v1beta1", + "cosmos.evidence.v1beta1", + "cosmos.feegrant.v1beta1", + "cosmos.gov.v1beta1", + "cosmos.gov.v1", + "cosmos.group.v1", + "cosmos.mint.v1beta1", + "cosmos.nft.v1beta1", + "cosmos.params.v1beta1", + "cosmos.slashing.v1beta1", + "cosmos.staking.v1beta1", + "cosmos.tx.v1beta1", + "cosmos.upgrade.v1beta1", + "cosmos.vesting.v1beta1", + // custom modules + "blog.blog.v1", + "blog.withmsg.v1", + "blog.withoutmsg.v1", + } + + for _, mod := range expectedModules { + _, err := os.Stat(filepath.Join(tsDirGenerated, mod)) + if assert.False(t, os.IsNotExist(err), "missing module %q in %s", mod, tsDirGenerated) { + assert.NoError(t, err) + } + } + + if t.Failed() { + // list ts-client files + tsFiles, err := os.ReadDir(tsDirGenerated) + require.NoError(t, err) + t.Log("TS files:", len(tsFiles)) + for _, file := range tsFiles { + t.Logf(" - %s", file.Name()) + } + } +} diff --git a/integration/doctor/doctor_test.go b/integration/doctor/doctor_test.go new file mode 100644 index 0000000..54edb59 --- /dev/null +++ b/integration/doctor/doctor_test.go @@ -0,0 +1,43 @@ +package doctor_test + +import ( + _ "embed" + "testing" + + "github.com/rogpeppe/go-internal/gotooltest" + "github.com/rogpeppe/go-internal/testscript" + + "github.com/ignite/cli/v29/ignite/config" + "github.com/ignite/cli/v29/ignite/pkg/env" + "github.com/ignite/cli/v29/ignite/pkg/xfilepath" + envtest "github.com/ignite/cli/v29/integration" +) + +const envDoNotTrack = "DO_NOT_TRACK" + +func TestDoctor(t *testing.T) { + // Ensure ignite binary is compiled + envtest.New(t) + // Prepare params + params := testscript.Params{ + Setup: func(testEnv *testscript.Env) error { + testEnv.Vars = append(testEnv.Vars, + envDoNotTrack+"=true", + // Pass ignite binary path + "IGNITE="+envtest.IgniteApp, + // Pass ignite config dir + // (testscript resets envs so even if envtest.New has properly set + // IGNT_CONFIG_DIR, we need to set it again) + env.ConfigDirEnvVar+"="+xfilepath.MustInvoke(config.DirPath), + ) + return nil + }, + Dir: "testdata", + } + // Add other setup for go environment + if err := gotooltest.Setup(¶ms); err != nil { + t.Fatal(err) + } + // Run all scripts from testdata + testscript.Run(t, params) +} diff --git a/integration/doctor/testdata/config-missing.txt b/integration/doctor/testdata/config-missing.txt new file mode 100644 index 0000000..425ec57 --- /dev/null +++ b/integration/doctor/testdata/config-missing.txt @@ -0,0 +1,4 @@ +# Test fix config +# config is missing +! exec $IGNITE doctor +stdout 'Could not locate a config.yml in your chain' diff --git a/integration/doctor/testdata/config-need-migrate.txt b/integration/doctor/testdata/config-need-migrate.txt new file mode 100644 index 0000000..fb2fdca --- /dev/null +++ b/integration/doctor/testdata/config-need-migrate.txt @@ -0,0 +1,42 @@ +# Test fix config +# old config should be migrated +exec $IGNITE doctor +cmp config.yml.golden config.yml + +-- config.yml -- +accounts: + - name: alice + coins: ["100000000uatom", "100000000000000000000aevmos"] + mnemonic: "ozone unfold device pave lemon potato omit insect column wise cover hint narrow large provide kidney episode clay notable milk mention dizzy muffin crazy" + - name: bob + coins: ["5000000000000aevmos"] + address: "cosmos1adn9gxjmrc3hrsdx5zpc9sj2ra7kgqkmphf8yw" +validator: + name: alice + staked: "100000000000000000000aevmos" +-- go.mod -- +module github.com/ignite/cli + +go 1.20 +-- config.yml.golden -- +version: 1 +build: + proto: + path: proto +accounts: + - name: alice + coins: + - 100000000uatom + - 100000000000000000000aevmos + mnemonic: ozone unfold device pave lemon potato omit insect column wise cover hint narrow large provide kidney episode clay notable milk mention dizzy muffin crazy + - name: bob + coins: + - 5000000000000aevmos + address: cosmos1adn9gxjmrc3hrsdx5zpc9sj2ra7kgqkmphf8yw +faucet: + name: null + coins: [] + host: 0.0.0.0:4500 +validators: + - name: alice + bonded: 100000000000000000000aevmos diff --git a/integration/doctor/testdata/config-ok.txt b/integration/doctor/testdata/config-ok.txt new file mode 100644 index 0000000..5cacae6 --- /dev/null +++ b/integration/doctor/testdata/config-ok.txt @@ -0,0 +1,11 @@ +# Test fix config +# config is OK +exec $IGNITE doctor +stdout 'config file OK' + +-- config.yml -- +version: 1 +-- go.mod -- +module github.com/ignite/cli + +go 1.20 diff --git a/integration/env.go b/integration/env.go new file mode 100644 index 0000000..3eec97b --- /dev/null +++ b/integration/env.go @@ -0,0 +1,178 @@ +package envtest + +import ( + "context" + "flag" + "fmt" + "os" + "path" + "path/filepath" + "strconv" + "sync" + "testing" + "time" + + "github.com/cenkalti/backoff" + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/cosmosfaucet" + "github.com/ignite/cli/v29/ignite/pkg/env" + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/gocmd" + "github.com/ignite/cli/v29/ignite/pkg/httpstatuschecker" + "github.com/ignite/cli/v29/ignite/pkg/xurl" +) + +const ( + envDoNotTrack = "DO_NOT_TRACK" +) + +var ( + // IgniteApp hold the location of the ignite binary used in the integration + // tests. The binary is compiled the first time the env.New() function is + // invoked. + IgniteApp = path.Join(os.TempDir(), "ignite-tests", "ignite") + + IsCI, _ = strconv.ParseBool(os.Getenv("CI")) + compileBinaryOnce sync.Once +) + +// Env provides an isolated testing environment and what's needed to +// make it possible. +type Env struct { + t *testing.T + ctx context.Context +} + +// New creates a new testing environment. +func New(t *testing.T) Env { + t.Helper() + ctx, cancel := context.WithCancel(t.Context()) + e := Env{ + t: t, + ctx: ctx, + } + // To avoid conflicts with the default config folder located in $HOME, we + // set an other one thanks to env var. + cfgDir := path.Join(t.TempDir(), ".ignite") + env.SetConfigDir(cfgDir) + enableDoNotTrackEnv(t) + + t.Cleanup(cancel) + compileBinaryOnce.Do(func() { + compileBinary(ctx) + }) + return e +} + +func compileBinary(ctx context.Context) { + wd, err := os.Getwd() + if err != nil { + panic(fmt.Sprintf("unable to get working dir: %v", err)) + } + pkgs, err := gocmd.List(ctx, wd, []string{"-m", "-f={{.Dir}}", "github.com/ignite/cli/v29"}) + if err != nil { + panic(fmt.Sprintf("unable to list ignite cli package: %v", err)) + } + if len(pkgs) != 1 { + panic(fmt.Sprintf("expected only one package, got %d", len(pkgs))) + } + appPath := pkgs[0] + var ( + output, binary = filepath.Split(IgniteApp) + path = path.Join(appPath, "ignite", "cmd", "ignite") + ) + err = gocmd.BuildPath(ctx, output, binary, path, nil) + if err != nil { + panic(fmt.Sprintf("error while building binary: %v", err)) + } +} + +func (e Env) T() *testing.T { + return e.t +} + +// SetCleanup registers a function to be called when the test (or subtest) and all its +// subtests complete. +func (e Env) SetCleanup(f func()) { + e.t.Cleanup(f) +} + +// Ctx returns parent context for the test suite to use for cancelations. +func (e Env) Ctx() context.Context { + return e.ctx +} + +// IsAppServed checks that app is served properly and servers are started to listening before ctx canceled. +func (e Env) IsAppServed(ctx context.Context, apiAddr string) error { + checkAlive := func() error { + addr, err := xurl.HTTP(apiAddr) + if err != nil { + return err + } + + ok, err := httpstatuschecker.Check(ctx, fmt.Sprintf("%s/cosmos/base/tendermint/v1beta1/node_info", addr)) + if err == nil && !ok { + err = errors.New("waiting for app") + } + if HasTestVerboseFlag() { + fmt.Printf("IsAppServed at %s: %v\n", addr, err) + } + return err + } + + return backoff.Retry(checkAlive, backoff.WithContext(backoff.NewConstantBackOff(time.Second), ctx)) +} + +// IsFaucetServed checks that faucet of the app is served properly. +func (e Env) IsFaucetServed(ctx context.Context, faucetClient cosmosfaucet.HTTPClient) error { + checkAlive := func() error { + _, err := faucetClient.FaucetInfo(ctx) + return err + } + + return backoff.Retry(checkAlive, backoff.WithContext(backoff.NewConstantBackOff(time.Second), ctx)) +} + +// TmpDir creates a new temporary directory. +func (e Env) TmpDir() (path string) { + return e.t.TempDir() +} + +// Home returns user's home dir. +func (e Env) Home() string { + home, err := os.UserHomeDir() + require.NoError(e.t, err) + return home +} + +// AppHome returns app's root home/data dir path. +func (e Env) AppHome(name string) string { + return filepath.Join(e.Home(), fmt.Sprintf(".%s", name)) +} + +// Must fails the immediately if not ok. +// t.Fail() needs to be called for the failing tests before running Must(). +func (e Env) Must(ok bool) { + if !ok { + e.t.FailNow() + } +} + +func (e Env) HasFailed() bool { + return e.t.Failed() +} + +func (e Env) RequireExpectations() { + e.Must(e.HasFailed()) +} + +// enableDoNotTrackEnv set true the DO_NOT_TRACK env var. +func enableDoNotTrackEnv(t *testing.T) { + t.Helper() + t.Setenv(envDoNotTrack, "true") +} + +func HasTestVerboseFlag() bool { + return flag.Lookup("test.v").Value.String() == "true" +} diff --git a/integration/exec.go b/integration/exec.go new file mode 100644 index 0000000..bcf85ee --- /dev/null +++ b/integration/exec.go @@ -0,0 +1,111 @@ +package envtest + +import ( + "bytes" + "context" + "fmt" + "io" + "os" + "time" + + "github.com/stretchr/testify/assert" + + "github.com/ignite/cli/v29/ignite/pkg/cmdrunner" + "github.com/ignite/cli/v29/ignite/pkg/cmdrunner/step" + "github.com/ignite/cli/v29/ignite/pkg/errors" +) + +type execOptions struct { + ctx context.Context + shouldErr, shouldRetry bool + stdout, stderr io.Writer +} + +type ExecOption func(*execOptions) + +// ExecShouldError sets the expectations of a command's execution to end with a failure. +func ExecShouldError() ExecOption { + return func(o *execOptions) { + o.shouldErr = true + } +} + +// ExecCtx sets cancelation context for the execution. +func ExecCtx(ctx context.Context) ExecOption { + return func(o *execOptions) { + o.ctx = ctx + } +} + +// ExecStdout captures stdout of an execution. +func ExecStdout(w io.Writer) ExecOption { + return func(o *execOptions) { + o.stdout = w + } +} + +// ExecStderr captures stderr of an execution. +func ExecStderr(w io.Writer) ExecOption { + return func(o *execOptions) { + o.stderr = w + } +} + +// ExecRetry retries command until it is successful before context is canceled. +func ExecRetry() ExecOption { + return func(o *execOptions) { + o.shouldRetry = true + } +} + +// Exec executes a command step with options where msg describes the expectation from the test. +// unless calling with Must(), Exec() will not exit test runtime on failure. +func (e Env) Exec(msg string, steps step.Steps, options ...ExecOption) (ok bool) { + opts := &execOptions{ + ctx: e.ctx, + stdout: io.Discard, + stderr: io.Discard, + } + for _, o := range options { + o(opts) + } + var ( + stdout = &bytes.Buffer{} + stderr = &bytes.Buffer{} + ) + copts := []cmdrunner.Option{ + cmdrunner.DefaultStdout(io.MultiWriter(stdout, opts.stdout)), + cmdrunner.DefaultStderr(io.MultiWriter(stderr, opts.stderr)), + } + if HasTestVerboseFlag() { + fmt.Printf("Executing %d step(s) for %q\n", len(steps), msg) + copts = append(copts, cmdrunner.EnableDebug()) + } + if IsCI { + copts = append(copts, cmdrunner.EndSignal(os.Kill)) + } + err := cmdrunner. + New(copts...). + Run(opts.ctx, steps...) + if errors.Is(err, context.Canceled) { + err = nil + } + if err != nil { + fmt.Fprintln(os.Stderr, err) + if opts.shouldRetry && opts.ctx.Err() == nil { + time.Sleep(time.Second) + return e.Exec(msg, steps, options...) + } + } + + if err != nil { + msg = fmt.Sprintf("%s\n\nLogs:\n\n%s\n\nError Logs:\n\n%s\n", + msg, + stdout.String(), + stderr.String()) + } + if opts.shouldErr { + return assert.Error(e.t, err, msg) + } + return assert.NoError(e.t, err, msg) +} diff --git a/integration/faucet/faucet_test.go b/integration/faucet/faucet_test.go new file mode 100644 index 0000000..acb14c7 --- /dev/null +++ b/integration/faucet/faucet_test.go @@ -0,0 +1,123 @@ +package faucet_test + +import ( + "context" + "fmt" + "net/http" + "strings" + "testing" + "time" + + "cosmossdk.io/math" + sdk "github.com/cosmos/cosmos-sdk/types" + banktypes "github.com/cosmos/cosmos-sdk/x/bank/types" + "github.com/stretchr/testify/require" + "golang.org/x/sync/errgroup" + + "github.com/ignite/cli/v29/ignite/pkg/cosmosclient" + "github.com/ignite/cli/v29/ignite/pkg/cosmosfaucet" + "github.com/ignite/cli/v29/ignite/pkg/xurl" + envtest "github.com/ignite/cli/v29/integration" +) + +const ( + addr = "cosmos1zqr2gd7hwkyw55knad0l6ml6ngutd70878evqj" +) + +var ( + defaultCoins = []string{"10token", "1stake"} + maxCoins = []string{"102token", "100000000stake"} +) + +func TestRequestCoinsFromFaucet(t *testing.T) { + var ( + env = envtest.New(t) + app = env.ScaffoldApp("github.com/test/faucetapp") + servers = app.RandomizeServerPorts() + faucetURL = app.EnableFaucet(defaultCoins, maxCoins) + ctx, cancel = context.WithTimeout(env.Ctx(), envtest.ServeTimeout) + faucetClient = cosmosfaucet.NewClient(faucetURL) + ) + isErrTransferRequest := func(err error, expectedCode int) { + var errTransfer cosmosfaucet.ErrTransferRequest + require.ErrorAs(t, err, &errTransfer) + require.EqualValues(t, expectedCode, errTransfer.StatusCode) + } + + // serve the app + go func() { + app.Serve("should serve app", envtest.ExecCtx(ctx)) + }() + + // wait servers to be online + defer cancel() + app.WaitChainUp(ctx, servers.API) + + err := env.IsFaucetServed(ctx, faucetClient) + require.NoError(t, err) + + // error "account doesn't have any balances" occurs if a sleep is not included + time.Sleep(time.Second * 1) + + nodeAddr, err := xurl.HTTP(servers.RPC) + require.NoError(t, err) + + cosmosClient, err := cosmosclient.New(ctx, cosmosclient.WithNodeAddress(nodeAddr)) + require.NoError(t, err) + + // the faucet sends the default faucet coins value when not specified + _, err = faucetClient.Transfer(ctx, cosmosfaucet.NewTransferRequest(addr, nil)) + require.NoError(t, err) + checkAccountBalance(ctx, t, cosmosClient, addr, defaultCoins) + + // the faucet can send a specified amount of coins + _, err = faucetClient.Transfer(ctx, cosmosfaucet.NewTransferRequest(addr, []string{"20token", "2stake"})) + require.NoError(t, err) + checkAccountBalance(ctx, t, cosmosClient, addr, []string{"30token", "3stake"}) + + // faucet request fails on malformed coins + _, err = faucetClient.Transfer(ctx, cosmosfaucet.NewTransferRequest(addr, []string{"no-token"})) + isErrTransferRequest(err, http.StatusBadRequest) + + // faucet request fails when requesting more than max coins + _, err = faucetClient.Transfer(ctx, cosmosfaucet.NewTransferRequest(addr, []string{"500token"})) + isErrTransferRequest(err, http.StatusInternalServerError) + + // faucet request fails when transfer should fail + _, err = faucetClient.Transfer(ctx, cosmosfaucet.NewTransferRequest(addr, []string{"500nonexistent"})) + isErrTransferRequest(err, http.StatusInternalServerError) + + // send several request in parallel and check max coins is not overflown + g, ctx := errgroup.WithContext(ctx) + for i := 0; i < 10; i++ { + g.Go(func() error { + c := faucetClient + index := i + 1 + coins := []string{ + sdk.NewCoin("token", math.NewInt(int64(index*2))).String(), + sdk.NewCoin("stake", math.NewInt(int64(index*3))).String(), + } + _, err := c.Transfer(ctx, cosmosfaucet.NewTransferRequest(addr, coins)) + return err + }) + } + require.NoError(t, g.Wait()) + checkAccountBalance(ctx, t, cosmosClient, addr, []string{"168stake", "140token"}) +} + +func checkAccountBalance(ctx context.Context, t *testing.T, c cosmosclient.Client, accAddr string, coins []string) { + t.Helper() + resp, err := banktypes.NewQueryClient(c.Context()).AllBalances(ctx, &banktypes.QueryAllBalancesRequest{ + Address: accAddr, + }) + require.NoError(t, err) + + require.Len(t, resp.Balances, len(coins)) + expectedCoins, err := sdk.ParseCoinsNormalized(strings.Join(coins, ",")) + require.NoError(t, err) + expectedCoins = expectedCoins.Sort() + gotCoins := resp.Balances.Sort() + require.True(t, gotCoins.Equal(expectedCoins), + fmt.Sprintf("%s should be equals to %s", gotCoins.String(), expectedCoins.String()), + ) +} diff --git a/integration/ibc/cmd_ibc_test.go b/integration/ibc/cmd_ibc_test.go new file mode 100644 index 0000000..4c7779f --- /dev/null +++ b/integration/ibc/cmd_ibc_test.go @@ -0,0 +1,183 @@ +//go:build !relayer + +package ibc_test + +import ( + "testing" + + envtest "github.com/ignite/cli/v29/integration" +) + +func TestCreateModuleWithIBC(t *testing.T) { + var ( + env = envtest.New(t) + app = env.ScaffoldApp("github.com/test/blogibc") + ) + + app.Scaffold( + "create an IBC module", + false, + "module", "foo", "--ibc", "--require-registration", + ) + + app.Scaffold( + "create an IBC module with custom path", + false, + "module", + "appPath", + "--ibc", + "--require-registration", + "--path", + "./blogibc", + ) + + app.Scaffold( + "create a type in an IBC module", + false, + "list", "user", "email", "--module", "foo", + ) + + app.Scaffold( + "create an IBC module with an ordered channel", + false, + "module", + "orderedfoo", + "--ibc", + "--ordering", + "ordered", + "--require-registration", + ) + + app.Scaffold( + "create an IBC module with an unordered channel", + false, + "module", + "unorderedfoo", + "--ibc", + "--ordering", + "unordered", + "--require-registration", + ) + + app.Scaffold( + "create a non IBC module", + false, + "module", "non_ibc", "--require-registration", + ) + + app.Scaffold( + "create an IBC module with dependencies", + false, + "module", + "with_dep", + "--ibc", + "--dep", + "auth,bank,staking,slashing", + "--require-registration", + ) + + app.EnsureSteady() +} + +func TestCreateIBCPacket(t *testing.T) { + var ( + env = envtest.New(t) + app = env.ScaffoldApp("github.com/test/blogibcb") + ) + + app.Scaffold( + "create an IBC module", + false, + "module", "foo", "--ibc", "--require-registration", + ) + + app.Scaffold( + "create a packet", + false, + "packet", + "bar", + "text", + "texts:strings", + "--module", + "foo", + "--ack", + "foo:string,bar:int,baz:bool", + ) + + app.Scaffold( + "should prevent creating a packet with no module specified", + true, + "packet", "bar", "text", + ) + + app.Scaffold( + "should prevent creating a packet in a non existent module", + true, + "packet", "bar", "text", "--module", "nomodule", + ) + + app.Scaffold( + "should prevent creating an existing packet", + true, + "packet", "bar", "post", "--module", "foo", + ) + + app.Scaffold( + "create a packet with custom type fields", + false, + "packet", + "ticket", + "numInt:int", + "numsInt:array.int", + "numsIntAlias:ints", + "numUint:uint", + "numsUint:array.uint", + "numsUintAlias:uints", + "textString:string", + "textStrings:array.string", + "textStringsAlias:strings", + "victory:bool", + "textCoin:coin", + "textCoins:array.coin", + "--module", + "foo", + ) + + app.Scaffold( + "create a custom field type", + false, + "type", "custom-type", "customField:uint", "textCoinsAlias:coins", "--module", "foo", + ) + + app.Scaffold( + "create a packet with a custom field type", + false, "packet", "foo-baz", "customField:CustomType", "--module", "foo", + ) + + app.Scaffold( + "should prevent creating a packet with invalid custom array field type", + true, "packet", "foo-baz-invalid-array", "customFields:array.UnknownType", "--module", "foo", + ) + + app.Scaffold( + "create a packet with no send message", + false, "packet", "nomessage", "foo", "--no-message", "--module", "foo", + ) + + app.Scaffold( + "create a packet with no field", + false, "packet", "empty", "--module", "foo", + ) + + app.Scaffold( + "create a non-IBC module", + false, "module", "bar", "--require-registration", + ) + + app.Scaffold( + "should prevent creating a packet in a non IBC module", + true, "packet", "foo", "text", "--module", "bar", + ) + + app.EnsureSteady() +} diff --git a/integration/list/cmd_list_test.go b/integration/list/cmd_list_test.go new file mode 100644 index 0000000..3d5c5b9 --- /dev/null +++ b/integration/list/cmd_list_test.go @@ -0,0 +1,178 @@ +//go:build !relayer + +package list_test + +import ( + "testing" + + envtest "github.com/ignite/cli/v29/integration" +) + +func TestGenerateAnAppWithListAndVerify(t *testing.T) { + var ( + env = envtest.New(t) + app = env.ScaffoldApp("github.com/test/blog") + servers = app.RandomizeServerPorts() + ) + + app.Scaffold( + "create a module", + false, + "module", "example", "--require-registration", + ) + + app.Scaffold( + "create a list", + false, + "list", "user", "email", + ) + + app.Scaffold( + "create a list with custom path and module", + false, + "list", + "AppPath", + "email", + "--path", + "blog", + "--module", + "example", + ) + + app.Scaffold( + "create a custom type fields", + false, + "list", + "employee", + "numInt:int", + "numsInt:array.int", + "numsIntAlias:ints", + "numUint:uint", + "numsUint:array.uint", + "numsUintAlias:uints", + "textString:string", + "textStrings:array.string", + "textStringsAlias:strings", + "textCoin:coin", + "textCoins:array.coin", + "--no-simulation", + ) + + app.Scaffold( + "create a list with bool", + false, + "list", + "document", + "signed:bool", + "textCoinsAlias:coins", + "--module", + "example", + ) + + app.Scaffold( + "create a list with decimal coin", + false, + "list", + "decimal", + "deccointype:dec.coin", + "deccoins:dec.coins", + "--module", + "example", + ) + + app.Scaffold( + "create a list with custom field type", + false, + "list", + "custom", + "document:Document", + "--module", + "example", + ) + + app.Scaffold( + "create a list with lowercase custom field type", + false, + "list", + "custom-lower", + "document:document", + "--module", + "example", + ) + + app.Scaffold( + "create a list with custom array field type", + false, + "list", + "custom-array", + "documents:array.Document", + "--module", + "example", + ) + + app.Scaffold( + "create a list with lowercase custom array field type", + false, + "list", + "custom-array-lower", + "documents:array.document", + "--module", + "example", + ) + + app.Scaffold( + "should prevent creating a list with invalid custom array field type", + true, + "list", + "invalid-custom-array", + "documents:array.UnknownType", + "--module", + "example", + ) + + app.Scaffold( + "should prevent creating a list with duplicated fields", + true, + "list", "company", "name", "name", + ) + + app.Scaffold( + "should prevent creating a list with unrecognized field type", + true, + "list", "invalidField", "level:itn", + ) + + app.Scaffold( + "should prevent creating an existing list", + true, + "list", "user", "email", + ) + + app.Scaffold( + "should prevent creating a list whose name is a reserved word", + true, + "list", "map", "size:int", + ) + + app.Scaffold( + "should prevent creating a list containing a field with a reserved word", + true, + "list", "document", "type:int", + ) + + app.Scaffold( + "create a list with no interaction message", + false, + "list", "nomessage", "email", "--no-message", + ) + + app.Scaffold( + "should prevent creating a list in a non existent module", + true, + "list", "user", "email", "--module", "idontexist", + ) + + app.EnsureSteady() + + app.RunChainAndSimulateTxs(servers) +} diff --git a/integration/map/cmd_map_test.go b/integration/map/cmd_map_test.go new file mode 100644 index 0000000..8609071 --- /dev/null +++ b/integration/map/cmd_map_test.go @@ -0,0 +1,184 @@ +//go:build !relayer + +package map_test + +import ( + "path/filepath" + "testing" + + envtest "github.com/ignite/cli/v29/integration" +) + +func TestCreateMap(t *testing.T) { + var ( + env = envtest.New(t) + app = env.ScaffoldApp("github.com/test/blog") + servers = app.RandomizeServerPorts() + ) + + app.Scaffold( + "create a map", + false, + "map", "user", "user-id", "email", + ) + + app.Scaffold( + "create a map with custom path", + false, + "map", "appPath", "email", "--path", filepath.Join(app.SourcePath(), "app"), + ) + + app.Scaffold( + "create a map with no message", + false, + "map", "nomessage", "email", "--no-message", + ) + + app.Scaffold( + "create a module", + false, + "module", "example", "--require-registration", + ) + + app.Scaffold( + "create a list", + false, + "list", + "user", + "email", + "--module", + "example", + "--no-simulation", + ) + + app.Scaffold( + "create a map with decimal coin", + false, + "map", + "decimal", + "deccointype:dec.coin", + "deccoins:dec.coins", + "--module", + "example", + ) + + app.Scaffold( + "should prevent creating a map with a typename that already exist", + true, + "map", "user", "email", "--module", "example", + ) + + app.Scaffold( + "create a map in a custom module", + false, + "map", "mapUser", "email", "--module", "example", + ) + + app.Scaffold( + "create a map with a custom field type", + false, + "map", "mapDetail", "user:MapUser", "--module", "example", + ) + + app.Scaffold( + "create a map with a custom array field type", + false, + "map", "mapDetailArray", "users:array.MapUser", "--module", "example", + ) + + app.Scaffold( + "should prevent creating a map with invalid custom array field type", + true, + "map", "mapInvalidDetailArray", "users:array.UnknownType", "--module", "example", + ) + + app.Scaffold( + "create a map with Coin and []Coin", + false, + "map", + "salary", + "numInt:int", + "numsInt:array.int", + "numsIntAlias:ints", + "numUint:uint", + "numsUint:array.uint", + "numsUintAlias:uints", + "textString:string", + "textStrings:array.string", + "textStringsAlias:strings", + "textCoin:coin", + "textCoinsAlias:coins", + "--module", + "example", + ) + + app.Scaffold( + "create a map with Coin and Coins", + false, + "map", + "budget", + "textCoin:coin", + "textCoins:array.coin", + "--module", + "example", + ) + + app.Scaffold( + "create a map with index", + false, + "map", + "map_with_index", + "email", + "emailIds:ints", + "--index", + "bar:int", + "--module", + "example", + ) + + app.Scaffold( + "create a map with invalid index (multi-index)", + true, + "map", + "map_with_invalid_index", + "email", + "--index", + "foo:strings,bar:int", + "--module", + "example", + ) + + app.Scaffold( + "create a map with invalid index (invalid type)", + true, + "map", + "map_with_invalid_index", + "email", + "--index", + "foo:unknown", + "--module", + "example", + ) + + app.Scaffold( + "create a message and a map with no-message flag to check conflicts", + false, + "message", "create-scavenge", "description", + ) + + app.Scaffold( + "create a message and a map with no-message flag to check conflicts", + false, + "map", "scavenge", "description", "--no-message", + ) + + app.Scaffold( + "should prevent creating a map with an index present in fields", + true, + "map", "map_with_invalid_index", "email", "--index", "email", + ) + + app.EnsureSteady() + + app.RunChainAndSimulateTxs(servers) +} diff --git a/integration/msgs.go b/integration/msgs.go new file mode 100644 index 0000000..b0ac352 --- /dev/null +++ b/integration/msgs.go @@ -0,0 +1,177 @@ +package envtest + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "net/url" + "strings" + "time" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/cmdrunner/step" + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/gomodulepath" + "github.com/ignite/cli/v29/ignite/pkg/xurl" +) + +const defaultRequestTimeout = 90 * time.Second + +type TxResponse struct { + Code int `json:"code"` + Codespace string `json:"codespace"` + RawLog string `json:"raw_log"` + TxHash string `json:"txhash"` + Height string `json:"height"` + Data string `json:"data"` + Info string `json:"info"` + GasWanted string `json:"gas_wanted"` + GasUsed string `json:"gas_used"` + Timestamp string `json:"timestamp"` +} + +func (a App) CLITx(chainRPC, module, method string, args ...string) TxResponse { + nodeAddr, err := xurl.TCP(chainRPC) + require.NoErrorf(a.env.T(), err, "cant read nodeAddr from host.RPC %v", chainRPC) + + args = append(args, + "--node", nodeAddr, + "--home", a.homePath, + "--from", "alice", + "--output", "json", + "--log_format", "json", + "--keyring-backend", "test", + "--yes", + ) + var ( + output = &bytes.Buffer{} + outErr = &bytes.Buffer{} + txResponse = TxResponse{} + ) + stepsTx := step.NewSteps( + step.New( + step.Stdout(output), + step.Stderr(outErr), + step.PreExec(func() error { + output.Reset() + outErr.Reset() + return nil + }), + step.Exec( + a.Binary(), + append([]string{"tx", module, method}, args...)..., + ), + step.PostExec(func(execErr error) error { + if execErr != nil { + return execErr + } + if outErr.Len() > 0 { + return errors.Errorf("error executing request: %s", outErr.String()) + } + output := output.Bytes() + if err := json.Unmarshal(output, &txResponse); err != nil { + return errors.Errorf("unmarshalling tx response error: %w, response: %s", err, string(output)) + } + return nil + }), + )) + + ctx, cancel := context.WithTimeout(a.env.Ctx(), defaultRequestTimeout) + defer cancel() + + if !a.env.Exec("sending chain request "+args[0], stepsTx, ExecRetry(), ExecCtx(ctx)) { + cancel() + a.env.t.FailNow() + } + + return txResponse +} + +func (a App) CLIQueryTx(chainRPC, txHash string) (txResponse TxResponse) { + output := a.query(chainRPC, "tx", txHash) + err := json.Unmarshal(output, &txResponse) + require.NoError(a.env.T(), err, "unmarshalling tx response: %s", string(output)) + return txResponse +} + +func (a App) CLIQuery(chainRPC, module, method string, args ...string) []byte { + return a.query(chainRPC, module, method, args...) +} + +func (a App) query(chainRPC, module, method string, args ...string) []byte { + nodeAddr, err := xurl.TCP(chainRPC) + require.NoErrorf(a.env.T(), err, "cant read nodeAddr from host.RPC %v", chainRPC) + + var ( + output = &bytes.Buffer{} + outErr = &bytes.Buffer{} + ) + + cmd := append([]string{"query", module, method}, args...) + cmd = append(cmd, + "--node", nodeAddr, + "--home", a.homePath, + "--output", "json", + "--log_format", "json", + ) + steps := step.NewSteps( + step.New( + step.Stdout(output), + step.Stderr(outErr), + step.PreExec(func() error { + output.Reset() + outErr.Reset() + return nil + }), + step.Exec(a.Binary(), cmd...), + step.PostExec(func(execErr error) error { + if execErr != nil { + return execErr + } + if outErr.Len() > 0 { + return errors.Errorf("error executing request: %s", outErr.String()) + } + return nil + }), + )) + + if !a.env.Exec(fmt.Sprintf("fetching query data %s => %s", module, method), steps, ExecRetry()) { + a.env.t.FailNow() + } + + return output.Bytes() +} + +func (a App) APIQuery(ctx context.Context, chainAPI, namespace, module, method string, args ...string) []byte { + ctx, cancel := context.WithTimeout(ctx, defaultRequestTimeout) + defer cancel() + + chainAPI, err := xurl.HTTP(chainAPI) + require.NoErrorf(a.env.T(), err, "failed to convert chain API %s to HTTP", chainAPI) + + modulePath := gomodulepath.ExtractAppPath(namespace) + apiURL, err := url.JoinPath(chainAPI, modulePath, module, "v1", method, strings.Join(args, "/")) + require.NoErrorf(a.env.T(), err, "failed to create API URL") + + req, err := http.NewRequestWithContext(ctx, "GET", apiURL, nil) + require.NoErrorf(a.env.T(), err, "failed to create HTTP request") + + req.Header.Set("Accept", "application/json") + + resp, err := http.DefaultClient.Do(req) + require.NoErrorf(a.env.T(), err, "failed to execute HTTP request") + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + require.Failf(a.env.T(), "unexpected status code", "expected 200 OK, got %d", resp.StatusCode) + } + + body, err := io.ReadAll(resp.Body) + require.NoErrorf(a.env.T(), err, "failed to read response body") + + return body +} diff --git a/integration/other_components/cmd_chain_registry_test.go b/integration/other_components/cmd_chain_registry_test.go new file mode 100644 index 0000000..9875786 --- /dev/null +++ b/integration/other_components/cmd_chain_registry_test.go @@ -0,0 +1,39 @@ +//go:build !relayer + +package other_components_test + +import ( + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/cmdrunner/step" + envtest "github.com/ignite/cli/v29/integration" +) + +func TestCreateChainRegistry(t *testing.T) { + var ( + env = envtest.New(t) + app = env.ScaffoldApp("github.com/test/mars") + ) + + env.Must(env.Exec("create chain-registry files", + step.NewSteps(step.New( + step.Exec(envtest.IgniteApp, + "s", + "chain-registry", + ), + step.Workdir(app.SourcePath()), + )), + )) + + _, statErr := os.Stat(filepath.Join(app.SourcePath(), "chain.json")) + require.False(t, os.IsNotExist(statErr), "chain.json cannot be found") + + _, statErr = os.Stat(filepath.Join(app.SourcePath(), "assetlist.json")) + require.False(t, os.IsNotExist(statErr), "assetlist.json cannot be found") + + app.EnsureSteady() +} diff --git a/integration/other_components/cmd_message_test.go b/integration/other_components/cmd_message_test.go new file mode 100644 index 0000000..b0e3cd0 --- /dev/null +++ b/integration/other_components/cmd_message_test.go @@ -0,0 +1,149 @@ +//go:build !relayer + +package other_components_test + +import ( + "testing" + + envtest "github.com/ignite/cli/v29/integration" +) + +func TestGenerateAnAppWithMessage(t *testing.T) { + var ( + env = envtest.New(t) + app = env.ScaffoldApp("github.com/test/blog") + ) + + app.Scaffold( + "create a message", + false, + "message", + "do-foo", + "text", + "vote:int", + "like:bool", + "from:address", + "-r", + "foo,bar:int,foobar:bool", + ) + + app.Scaffold( + "create a message with custom path", + false, + "message", + "app-path", + "text", + "vote:int", + "like:bool", + "-r", + "foo,bar:int,foobar:bool", + "--path", + "blog", + "--no-simulation", + ) + + app.Scaffold( + "should prevent creating an existing message", + true, + "message", "do-foo", "bar", + ) + + app.Scaffold( + "should prevent creating a message whose name only differs in capitalization", + true, + "message", "do-Foo", "bar", + ) + + app.Scaffold( + "create a message with a custom signer name", + false, + "message", "--yes", "do-bar", "bar", "--signer", "bar-doer", + ) + + app.Scaffold( + "create a custom field type", + false, + "type", + "custom-type", + "numInt:int", + "numsInt:array.int", + "numsIntAlias:ints", + "numUint:uint", + "numsUint:array.uint", + "numsUintAlias:uints", + "textString:string", + "textStrings:array.string", + "textStringsAlias:strings", + "textCoin:coin", + "textCoins:array.coin", + ) + + app.Scaffold( + "create a custom type with custom array field type", + false, + "type", + "custom-type-array", + "customFields:array.CustomType", + ) + + app.Scaffold( + "should prevent creating a custom type with invalid custom array field type", + true, + "type", + "custom-type-invalid-array", + "customFields:array.UnknownType", + ) + + app.Scaffold( + "create a message with the custom field type", + false, + "message", "foo-baz", "customField:CustomType", "textCoinsAlias:coins", + ) + + app.Scaffold( + "create a message with lowercase custom field type", + false, + "message", "foo-baz-lower", "customField:customType", + ) + + app.Scaffold( + "create a message with custom array field type", + false, + "message", "foo-baz-array", "customFields:array.CustomType", + ) + + app.Scaffold( + "create a message with lowercase custom array field type", + false, + "message", "foo-baz-array-lower", "customFields:array.customType", + ) + + app.Scaffold( + "should prevent creating a message with invalid custom array field type", + true, + "message", "foo-baz-invalid-array", "customFields:array.UnknownType", + ) + + app.Scaffold( + "create a module", + false, + "module", "foo", "--require-registration", + ) + + app.Scaffold( + "create a message in a module", + false, + "message", + "do-foo", + "text", + "userIds:array.uint", + "--module", + "foo", + "--desc", + "foo bar foobar", + "--response", + "foo,bar:int,foobar:bool", + ) + + app.EnsureSteady() +} diff --git a/integration/other_components/cmd_migration_test.go b/integration/other_components/cmd_migration_test.go new file mode 100644 index 0000000..bf852ca --- /dev/null +++ b/integration/other_components/cmd_migration_test.go @@ -0,0 +1,78 @@ +//go:build !relayer + +package other_components_test + +import ( + "os" + "path/filepath" + "strings" + "testing" + + "github.com/stretchr/testify/require" + + envtest "github.com/ignite/cli/v29/integration" +) + +func TestGenerateAnAppWithModuleMigrations(t *testing.T) { + var ( + env = envtest.New(t) + app = env.ScaffoldApp("github.com/test/blog") + ) + + app.Scaffold( + "create the first module migration", + false, + "migration", + "blog", + ) + + requireMigrationFile(t, app.SourcePath(), "blog", "v2") + + moduleFilePath := filepath.Join(app.SourcePath(), "x", "blog", "module", "module.go") + moduleContent, err := os.ReadFile(moduleFilePath) + require.NoError(t, err) + + normalized := normalizeWhitespace(string(moduleContent)) + require.Contains(t, normalized, `migrationv2"github.com/test/blog/x/blog/migrations/v2"`) + require.Contains(t, normalized, `cfg,ok:=registrar.(module.Configurator)`) + require.Contains(t, normalized, `if!ok{returnnil}`) + require.Contains(t, normalized, `cfg.RegisterMigration(types.ModuleName,1,migrationv2.Migrate)`) + require.Contains(t, normalized, `func(AppModule)ConsensusVersion()uint64{return2}`) + + app.Scaffold( + "create the second module migration", + false, + "migration", + "blog", + ) + + requireMigrationFile(t, app.SourcePath(), "blog", "v3") + + moduleContent, err = os.ReadFile(moduleFilePath) + require.NoError(t, err) + + normalized = normalizeWhitespace(string(moduleContent)) + require.Equal(t, 1, strings.Count(normalized, `registrar.(module.Configurator)`)) + require.Contains(t, normalized, `migrationv3"github.com/test/blog/x/blog/migrations/v3"`) + require.Contains(t, normalized, `cfg.RegisterMigration(types.ModuleName,1,migrationv2.Migrate)`) + require.Contains(t, normalized, `cfg.RegisterMigration(types.ModuleName,2,migrationv3.Migrate)`) + require.Contains(t, normalized, `func(AppModule)ConsensusVersion()uint64{return3}`) + + app.EnsureSteady() +} + +func requireMigrationFile(t *testing.T, appPath, moduleName, version string) { + t.Helper() + + migrationPath := filepath.Join(appPath, "x", moduleName, "migrations", version, "migrate.go") + content, err := os.ReadFile(migrationPath) + require.NoError(t, err) + + normalized := normalizeWhitespace(string(content)) + require.Contains(t, normalized, "package"+version) + require.Contains(t, normalized, `funcMigrate(_sdk.Context)error{returnnil}`) +} + +func normalizeWhitespace(content string) string { + return strings.Join(strings.Fields(content), "") +} diff --git a/integration/other_components/cmd_query_test.go b/integration/other_components/cmd_query_test.go new file mode 100644 index 0000000..438ee67 --- /dev/null +++ b/integration/other_components/cmd_query_test.go @@ -0,0 +1,149 @@ +//go:build !relayer + +package other_components_test + +import ( + "testing" + + envtest "github.com/ignite/cli/v29/integration" +) + +func TestGenerateAnAppWithQuery(t *testing.T) { + var ( + env = envtest.New(t) + app = env.ScaffoldApp("github.com/test/blog") + ) + + app.Scaffold( + "create a query", + false, + "query", + "foo", + "text", + "vote:int", + "like:bool", + "-r", + "foo,bar:int,foobar:bool", + ) + + app.Scaffold( + "create a query with custom path", + false, + "query", + "AppPath", + "text", + "vote:int", + "like:bool", + "-r", + "foo,bar:int,foobar:bool", + "--path", + "./blog", + ) + + app.Scaffold( + "create a paginated query", + false, + "query", + "bar", + "text", + "vote:int", + "like:bool", + "-r", + "foo,bar:int,foobar:bool", + "--paginated", + ) + + app.Scaffold( + "create a custom field type", + false, + "type", + "custom-type", + "numInt:int", + "numsInt:array.int", + "numsIntAlias:ints", + "numUint:uint", + "numsUint:array.uint", + "numsUintAlias:uints", + "textString:string", + "textStrings:array.string", + "textStringsAlias:strings", + "textCoin:coin", + "textCoins:array.coin", + ) + + app.Scaffold( + "create a query with the custom field type as a response", + false, + "query", "foobaz", "-r", "bar:CustomType", + ) + + app.Scaffold( + "create a query with lowercase custom field type as a response", + false, + "query", "foobaz-lower", "-r", "bar:customType", + ) + + app.Scaffold( + "create a query with the custom array field type as a response", + false, + "query", "foobaz-array", "-r", "bars:array.CustomType", + ) + + app.Scaffold( + "create a query with lowercase custom array field type as a response", + false, + "query", "foobaz-array-lower", "-r", "bars:array.customType", + ) + + app.Scaffold( + "should prevent creating a query with invalid custom array field type as a response", + true, + "query", "foobaz-invalid-array", "-r", "bars:array.UnknownType", + ) + + app.Scaffold( + "should prevent using custom type in request params", + true, + "query", "bur", "bar:CustomType", + ) + + app.Scaffold( + "should prevent using custom array type in request params", + true, + "query", "bur-array", "bar:array.CustomType", + ) + + app.Scaffold( + "create an empty query", + false, + "query", "foobar", + ) + + app.Scaffold( + "should prevent creating an existing query", + true, + "query", "foo", "bar", + ) + + app.Scaffold( + "create a module", + false, + "module", "foo", "--require-registration", + ) + + app.Scaffold( + "create a query in a module", + false, + "query", + "foo", + "text", + "--module", + "foo", + "--desc", + "foo bar foobar", + "--response", + "foo,bar:int,foobar:bool", + ) + + app.EnsureSteady() +} diff --git a/integration/params/cmd_configs_test.go b/integration/params/cmd_configs_test.go new file mode 100644 index 0000000..78582cc --- /dev/null +++ b/integration/params/cmd_configs_test.go @@ -0,0 +1,72 @@ +//go:build !relayer + +package params_test + +import ( + "context" + "testing" + + envtest "github.com/ignite/cli/v29/integration" +) + +func TestCreateModuleConfigs(t *testing.T) { + var ( + name = "mars" + namespace = "github.com/test/" + name + + env = envtest.New(t) + app = env.ScaffoldApp(namespace) + servers = app.RandomizeServerPorts() + ) + + app.Scaffold( + "create a new module with configs", + false, + "module", + "foo", + "--module-configs", + "bla,baz:uint,bar:bool", + "--require-registration", + ) + + app.Scaffold( + "should prevent creating configs field that already exist", + true, + "configs", + "bla", + "buu:uint", + "--module", + "foo", + ) + + app.Scaffold( + "create a new module configs in the foo module", + false, + "configs", + "bol", + "buu:uint", + "plk:bool", + "--module", + "foo", + ) + + app.Scaffold( + "create a new module configs in the mars module", + false, + "configs", + "foo", + "bar:uint", + "baz:bool", + ) + + app.EnsureSteady() + + ctx, cancel := context.WithCancel(env.Ctx()) + defer cancel() + + go func() { + app.MustServe(ctx) + }() + + app.WaitChainUp(ctx, servers.API) +} diff --git a/integration/params/cmd_params_test.go b/integration/params/cmd_params_test.go new file mode 100644 index 0000000..b58f25e --- /dev/null +++ b/integration/params/cmd_params_test.go @@ -0,0 +1,72 @@ +//go:build !relayer + +package params_test + +import ( + "context" + "testing" + + envtest "github.com/ignite/cli/v29/integration" +) + +func TestCreateModuleParameters(t *testing.T) { + var ( + name = "mars" + namespace = "github.com/test/" + name + + env = envtest.New(t) + app = env.ScaffoldApp(namespace) + servers = app.RandomizeServerPorts() + ) + + app.Scaffold( + "create a new module with parameter", + false, + "module", + "foo", + "--params", + "bla,baz:uint,bar:bool", + "--require-registration", + ) + + app.Scaffold( + "should prevent creating parameter field that already exist", + true, + "params", + "bla", + "buu:uint", + "--module", + "foo", + ) + + app.Scaffold( + "create a new module parameters in the foo module", + false, + "params", + "bol", + "buu:uint", + "plk:bool", + "--module", + "foo", + ) + + app.Scaffold( + "create a new module parameters in the mars module", + false, + "params", + "foo", + "bar:uint", + "baz:bool", + ) + + app.EnsureSteady() + + ctx, cancel := context.WithCancel(env.Ctx()) + defer cancel() + + go func() { + app.MustServe(ctx) + }() + + app.WaitChainUp(ctx, servers.API) +} diff --git a/integration/plugin/plugin_test.go b/integration/plugin/plugin_test.go new file mode 100644 index 0000000..7a61777 --- /dev/null +++ b/integration/plugin/plugin_test.go @@ -0,0 +1,113 @@ +package plugin_test + +import ( + "path/filepath" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + pluginsconfig "github.com/ignite/cli/v29/ignite/config/plugins" + "github.com/ignite/cli/v29/ignite/pkg/cmdrunner/step" + "github.com/ignite/cli/v29/ignite/services/plugin" + envtest "github.com/ignite/cli/v29/integration" +) + +func TestAddRemovePlugin(t *testing.T) { + var ( + require = require.New(t) + assert = assert.New(t) + env = envtest.New(t) + app = env.ScaffoldApp("github.com/test/blog") + + assertPlugins = func(expectedLocalPlugins, expectedGlobalPlugins []pluginsconfig.Plugin) { + localCfg, err := pluginsconfig.ParseDir(app.SourcePath()) + require.NoError(err) + assert.ElementsMatch(expectedLocalPlugins, localCfg.Apps, "unexpected local plugins") + + globalCfgPath, err := plugin.PluginsPath() + require.NoError(err) + globalCfg, err := pluginsconfig.ParseDir(globalCfgPath) + require.NoError(err) + assert.ElementsMatch(expectedGlobalPlugins, globalCfg.Apps, "unexpected global plugins") + } + ) + + // no plugins expected + assertPlugins(nil, nil) + + // Note: Originally plugin repo was "github.com/ignite/example-plugin" instead of a local one + pluginRepo, err := filepath.Abs("testdata/example-plugin") + require.NoError(err) + + env.Must(env.Exec("add plugin locally", + step.NewSteps(step.New( + step.Exec(envtest.IgniteApp, "app", "install", pluginRepo, "k1=v1", "k2=v2"), + step.Workdir(app.SourcePath()), + )), + )) + + // one local plugin expected + assertPlugins( + []pluginsconfig.Plugin{ + { + Path: pluginRepo, + With: map[string]string{ + "k1": "v1", + "k2": "v2", + }, + }, + }, + nil, + ) + + env.Must(env.Exec("uninstall plugin locally", + step.NewSteps(step.New( + step.Exec(envtest.IgniteApp, "app", "uninstall", pluginRepo), + step.Workdir(app.SourcePath()), + )), + )) + + // no plugins expected + assertPlugins(nil, nil) + + env.Must(env.Exec("install plugin globally", + step.NewSteps(step.New( + step.Exec(envtest.IgniteApp, "app", "install", pluginRepo, "-g"), + step.Workdir(app.SourcePath()), + )), + )) + + // one global plugins expected + assertPlugins( + nil, + []pluginsconfig.Plugin{ + { + Path: pluginRepo, + }, + }, + ) + + env.Must(env.Exec("uninstall plugin globally", + step.NewSteps(step.New( + step.Exec(envtest.IgniteApp, "app", "uninstall", pluginRepo, "-g"), + step.Workdir(app.SourcePath()), + )), + )) + + // no plugins expected + assertPlugins(nil, nil) +} + +// TODO install network plugin test + +func TestPluginScaffold(t *testing.T) { + env := envtest.New(t) + + env.Must(env.Exec("install a plugin", + step.NewSteps(step.New( + step.Exec(envtest.IgniteApp, "app", "scaffold", "test"), + step.Workdir(env.TmpDir()), + )), + )) +} diff --git a/integration/plugin/testdata/example-plugin/go.mod b/integration/plugin/testdata/example-plugin/go.mod new file mode 100644 index 0000000..ca163d4 --- /dev/null +++ b/integration/plugin/testdata/example-plugin/go.mod @@ -0,0 +1,96 @@ +module example-plugin + +go 1.25.4 + +replace github.com/ignite/cli/v29 => ../../../../ + +require ( + github.com/hashicorp/go-plugin v1.6.3 + github.com/ignite/cli/v29 v29.0.0 +) + +require ( + dario.cat/mergo v1.0.1 // indirect + github.com/Microsoft/go-winio v0.6.2 // indirect + github.com/ProtonMail/go-crypto v1.1.6 // indirect + github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect + github.com/aymerick/douceur v0.2.0 // indirect + github.com/blang/semver/v4 v4.0.0 // indirect + github.com/charmbracelet/lipgloss v1.1.0 // indirect + github.com/charmbracelet/x/ansi v0.8.0 // indirect + github.com/cloudflare/circl v1.6.3 // indirect + github.com/cockroachdb/errors v1.12.0 // indirect + github.com/cockroachdb/logtags v0.0.0-20241215232642-bb51bb14a506 // indirect + github.com/cockroachdb/redact v1.1.6 // indirect + github.com/cosmos/btcutil v1.0.5 // indirect + github.com/cosmos/cosmos-sdk v0.53.6 // indirect + github.com/cyphar/filepath-securejoin v0.4.1 // indirect + github.com/emirpasic/gods v1.18.1 // indirect + github.com/fatih/color v1.18.0 // indirect + github.com/fatih/structs v1.1.0 // indirect + github.com/getsentry/sentry-go v0.35.0 // indirect + github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 // indirect + github.com/go-git/go-billy/v5 v5.6.2 // indirect + github.com/go-git/go-git/v5 v5.16.5 // indirect + github.com/gobuffalo/flect v0.3.0 // indirect + github.com/gobuffalo/genny/v2 v2.1.0 // indirect + github.com/gobuffalo/github_flavored_markdown v1.1.4 // indirect + github.com/gobuffalo/helpers v0.6.7 // indirect + github.com/gobuffalo/logger v1.0.7 // indirect + github.com/gobuffalo/packd v1.0.2 // indirect + github.com/gobuffalo/plush/v4 v4.1.22 // indirect + github.com/gobuffalo/tags/v3 v3.1.4 // indirect + github.com/gobuffalo/validate/v3 v3.3.3 // indirect + github.com/gobwas/glob v0.2.3 // indirect + github.com/goccy/go-yaml v1.15.23 // indirect + github.com/gofrs/uuid v4.4.0+incompatible // indirect + github.com/gogo/protobuf v1.3.2 // indirect + github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 // indirect + github.com/golang/protobuf v1.5.4 // indirect + github.com/google/go-github/v48 v48.2.0 // indirect + github.com/google/go-querystring v1.1.0 // indirect + github.com/gorilla/css v1.0.0 // indirect + github.com/hashicorp/go-hclog v1.6.3 // indirect + github.com/hashicorp/yamux v0.1.2 // indirect + github.com/inconshreveable/mousetrap v1.1.0 // indirect + github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 // indirect + github.com/kevinburke/ssh_config v1.2.0 // indirect + github.com/kr/pretty v0.3.1 // indirect + github.com/kr/text v0.2.0 // indirect + github.com/lucasb-eyer/go-colorful v1.2.0 // indirect + github.com/mattn/go-colorable v0.1.14 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect + github.com/mattn/go-runewidth v0.0.16 // indirect + github.com/microcosm-cc/bluemonday v1.0.23 // indirect + github.com/mitchellh/mapstructure v1.5.0 // indirect + github.com/muesli/reflow v0.3.0 // indirect + github.com/muesli/termenv v0.16.0 // indirect + github.com/oklog/run v1.1.0 // indirect + github.com/otiai10/copy v1.14.1 // indirect + github.com/otiai10/mint v1.6.3 // indirect + github.com/pjbgf/sha1cd v0.3.2 // indirect + github.com/pkg/errors v0.9.1 // indirect + github.com/rivo/uniseg v0.4.7 // indirect + github.com/rogpeppe/go-internal v1.14.1 // indirect + github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 // indirect + github.com/sirupsen/logrus v1.9.3 // indirect + github.com/skeema/knownhosts v1.3.1 // indirect + github.com/sourcegraph/annotate v0.0.0-20160123013949-f4cad6c6324d // indirect + github.com/sourcegraph/syntaxhighlight v0.0.0-20170531221838-bd320f5d308e // indirect + github.com/spf13/cobra v1.10.1 // indirect + github.com/spf13/pflag v1.0.10 // indirect + github.com/xanzy/ssh-agent v0.3.3 // indirect + go.etcd.io/bbolt v1.4.0 // indirect + golang.org/x/crypto v0.45.0 // indirect + golang.org/x/mod v0.29.0 // indirect + golang.org/x/net v0.47.0 // indirect + golang.org/x/sync v0.18.0 // indirect + golang.org/x/sys v0.38.0 // indirect + golang.org/x/term v0.37.0 // indirect + golang.org/x/text v0.31.0 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20250818200422-3122310a409c // indirect + google.golang.org/grpc v1.75.0 // indirect + google.golang.org/protobuf v1.36.10 // indirect + gopkg.in/warnings.v0 v0.1.2 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect +) diff --git a/integration/plugin/testdata/example-plugin/main.go b/integration/plugin/testdata/example-plugin/main.go new file mode 100644 index 0000000..7eb9f8a --- /dev/null +++ b/integration/plugin/testdata/example-plugin/main.go @@ -0,0 +1,75 @@ +package main + +import ( + "context" + "fmt" + + hplugin "github.com/hashicorp/go-plugin" + + "github.com/ignite/cli/v29/ignite/services/plugin" +) + +type p struct{} + +func (p) Manifest(context.Context) (*plugin.Manifest, error) { + return &plugin.Manifest{ + Name: "example-plugin", + Commands: []*plugin.Command{ + { + Use: "example-plugin", + Short: "Explain what the command is doing...", + Long: "Long description goes here...", + Flags: plugin.Flags{ + {Name: "my-flag", Type: plugin.FlagTypeString, Usage: "my flag description"}, + }, + PlaceCommandUnder: "ignite", + }, + }, + Hooks: []*plugin.Hook{}, + }, nil +} + +func (p) Execute(ctx context.Context, cmd *plugin.ExecutedCommand, api plugin.ClientAPI) error { + fmt.Printf("Hello I'm the example-plugin plugin\n") + fmt.Printf("My executed command: %q\n", cmd.Path) + fmt.Printf("My args: %v\n", cmd.Args) + + flags, err := cmd.NewFlags() + if err != nil { + return err + } + + myFlag, _ := flags.GetString("my-flag") + fmt.Printf("My flags: my-flag=%q\n", myFlag) + fmt.Printf("My config parameters: %v\n", cmd.With) + + fmt.Println(api.GetChainInfo(ctx)) + fmt.Println(api.GetIgniteInfo(ctx)) + + return nil +} + +func (p) ExecuteHookPre(_ context.Context, h *plugin.ExecutedHook, _ plugin.ClientAPI) error { + fmt.Printf("Executing hook pre %q\n", h.Hook.GetName()) + return nil +} + +func (p) ExecuteHookPost(_ context.Context, h *plugin.ExecutedHook, _ plugin.ClientAPI) error { + fmt.Printf("Executing hook post %q\n", h.Hook.GetName()) + return nil +} + +func (p) ExecuteHookCleanUp(_ context.Context, h *plugin.ExecutedHook, _ plugin.ClientAPI) error { + fmt.Printf("Executing hook cleanup %q\n", h.Hook.GetName()) + return nil +} + +func main() { + hplugin.Serve(&hplugin.ServeConfig{ + HandshakeConfig: plugin.HandshakeConfig(), + Plugins: map[string]hplugin.Plugin{ + "example-plugin": plugin.NewGRPC(&p{}), + }, + GRPCServer: hplugin.DefaultGRPCServer, + }) +} diff --git a/integration/readme.md b/integration/readme.md new file mode 100644 index 0000000..9d0663c --- /dev/null +++ b/integration/readme.md @@ -0,0 +1,51 @@ +# Ignite CLI Integration Tests + +The Ignite CLI integration tests build a new application and run all Ignite CLI commands to check the Ignite CLI code integrity. The runners and helper methods are located in this current folder. The test commands are split into folders, for better concurrency, each folder is a parallel job into the CI workflow. To create a new one, we only need to create a new folder. This will be automatically detected and added into the PR CI checks, or we can only create new tests into an existing folder or file. + +Running synchronously all integration tests can be very slow. The command below can run everything: + +```shell +go test -v -timeout 120m ./integration +``` + +Or you can just run a specific test folder, like the `list` types test + +```shell +go test -v -timeout 120m ./integration/list +``` + +# Usage + +- Create a new env and scaffold an empty chain: + +```go +var ( + env = envtest.New(t) + path = env.Scaffold("github.com/test/blog") +) +``` + +- Now, you can use the env to run the ignite commands and check the success status: + +```go +env.Must(env.Exec("create a list with bool", + step.NewSteps(step.New( + step.Exec(envtest.IgniteApp, "s", "list", "--yes", "document", "signed:bool"), + step.Workdir(path), + )), +)) +env.EnsureSteady() +``` + +- To check if the command returns an error, you can add the `envtest.ExecShouldError()` step: + +```go +env.Must(env.Exec("should prevent creating a list with duplicated fields", + step.NewSteps(step.New( + step.Exec(envtest.IgniteApp, "s", "list", "--yes", "company", "name", "name"), + step.Workdir(path), + )), + envtest.ExecShouldError(), +)) +env.EnsureSteady() +``` diff --git a/integration/relayer/cmd_relayer_test.go b/integration/relayer/cmd_relayer_test.go new file mode 100644 index 0000000..0655145 --- /dev/null +++ b/integration/relayer/cmd_relayer_test.go @@ -0,0 +1,589 @@ +//go:build !relayer + +package relayer_test + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "os" + "path/filepath" + "strings" + "testing" + "time" + + "github.com/cosmos/cosmos-sdk/crypto/keyring" + sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/stretchr/testify/require" + "gopkg.in/yaml.v3" + + "github.com/ignite/cli/v29/ignite/config/chain" + "github.com/ignite/cli/v29/ignite/config/chain/base" + v1 "github.com/ignite/cli/v29/ignite/config/chain/v1" + "github.com/ignite/cli/v29/ignite/pkg/availableport" + "github.com/ignite/cli/v29/ignite/pkg/cmdrunner" + "github.com/ignite/cli/v29/ignite/pkg/cmdrunner/step" + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/goanalysis" + "github.com/ignite/cli/v29/ignite/pkg/randstr" + "github.com/ignite/cli/v29/ignite/pkg/xyaml" + envtest "github.com/ignite/cli/v29/integration" +) + +const ( + relayerMnemonic = "great immense still pill defense fetch pencil slow purchase symptom speed arm shoot fence have divorce cigar rapid hen vehicle pear evolve correct nerve" +) + +var ( + bobName = "bob" + refChainConfig = v1.Config{ + Config: base.Config{ + Version: 1, + Accounts: []base.Account{ + { + Name: "alice", + Coins: []string{"100000000000token", "10000000000000000000stake"}, + Mnemonic: "slide moment original seven milk crawl help text kick fluid boring awkward doll wonder sure fragile plate grid hard next casual expire okay body", + }, + { + Name: bobName, + Coins: []string{"100000000000token", "10000000000000000000stake"}, + Mnemonic: "trap possible liquid elite embody host segment fantasy swim cable digital eager tiny broom burden diary earn hen grow engine pigeon fringe claim program", + }, + { + Name: "relayer", + Coins: []string{"100000000000token", "1000000000000000000000stake"}, + Mnemonic: relayerMnemonic, + }, + }, + Faucet: base.Faucet{ + Name: &bobName, + Coins: []string{"500token", "100000000stake"}, + Host: ":4501", + }, + Genesis: xyaml.Map{"chain_id": randstr.Runes(12)}, + }, + Validators: []v1.Validator{ + { + Name: "alice", + Bonded: "100000000stake", + Client: xyaml.Map{"keyring-backend": keyring.BackendTest}, + App: xyaml.Map{ + "api": xyaml.Map{"address": ":1318"}, + "grpc": xyaml.Map{"address": ":9092"}, + "grpc-web": xyaml.Map{"address": ":9093"}, + }, + Config: xyaml.Map{ + "p2p": xyaml.Map{"laddr": ":26658"}, + "rpc": xyaml.Map{"laddr": ":26658", "pprof_laddr": ":6061"}, + }, + Home: filepath.Join(os.TempDir(), randstr.Runes(5)), + }, + }, + } + hostChainConfig = v1.Config{ + Config: base.Config{ + Version: 1, + Accounts: []base.Account{ + { + Name: "alice", + Coins: []string{"100000000000token", "10000000000000000000stake"}, + Mnemonic: "slide moment original seven milk crawl help text kick fluid boring awkward doll wonder sure fragile plate grid hard next casual expire okay body", + }, + { + Name: bobName, + Coins: []string{"100000000000token", "10000000000000000000stake"}, + Mnemonic: "trap possible liquid elite embody host segment fantasy swim cable digital eager tiny broom burden diary earn hen grow engine pigeon fringe claim program", + }, + { + Name: "relayer", + Coins: []string{"100000000000token", "1000000000000000000000stake"}, + Mnemonic: relayerMnemonic, + }, + }, + Faucet: base.Faucet{ + Name: &bobName, + Coins: []string{"500token", "100000000stake"}, + Host: ":4500", + }, + Genesis: xyaml.Map{"chain_id": randstr.Runes(12)}, + }, + Validators: []v1.Validator{ + { + Name: "alice", + Bonded: "100000000stake", + Client: xyaml.Map{"keyring-backend": keyring.BackendTest}, + App: xyaml.Map{ + "api": xyaml.Map{"address": ":1317"}, + "grpc": xyaml.Map{"address": ":9090"}, + "grpc-web": xyaml.Map{"address": ":9091"}, + }, + Config: xyaml.Map{ + "p2p": xyaml.Map{"laddr": ":26656"}, + "rpc": xyaml.Map{"laddr": ":26656", "pprof_laddr": ":6060"}, + }, + Home: filepath.Join(os.TempDir(), randstr.Runes(5)), + }, + }, + } + + nameOnRecvIbcPostPacket = "OnRecvIbcPostPacket" + funcOnRecvIbcPostPacket = `package keeper +func (k Keeper) OnRecvIbcPostPacket(ctx context.Context, packet channeltypes.Packet, data types.IbcPostPacketData) (packetAck types.IbcPostPacketAck, err error) { + packetAck.PostId, err = k.PostSeq.Next(ctx) + if err != nil { + return packetAck, err + } + return packetAck, k.Post.Set(ctx, packetAck.PostId, types.Post{Title: data.Title, Content: data.Content}) +}` + + nameOnAcknowledgementIbcPostPacket = "OnAcknowledgementIbcPostPacket" + funcOnAcknowledgementIbcPostPacket = `package keeper +func (k Keeper) OnAcknowledgementIbcPostPacket(ctx context.Context, packet channeltypes.Packet, data types.IbcPostPacketData, ack channeltypes.Acknowledgement) error { + switch dispatchedAck := ack.Response.(type) { + case *channeltypes.Acknowledgement_Error: + // We will not treat acknowledgment error in this tutorial + return nil + case *channeltypes.Acknowledgement_Result: + // Decode the packet acknowledgment + var packetAck types.IbcPostPacketAck + if err := k.cdc.UnmarshalJSON(dispatchedAck.Result, &packetAck); err != nil { + // The counter-party module doesn't implement the correct acknowledgment format + return errors.New("cannot unmarshal acknowledgment") + } + + seq, err := k.SentPostSeq.Next(ctx) + if err != nil { + return err + } + + return k.SentPost.Set(ctx, seq, + types.SentPost{ + PostId: packetAck.PostId, + Title: data.Title, + Chain: packet.DestinationPort + "-" + packet.DestinationChannel, + }, + ) + default: + return errors.New("the counter-party module does not implement the correct acknowledgment format") + } +}` + + nameOnTimeoutIbcPostPacket = "OnTimeoutIbcPostPacket" + funcOnTimeoutIbcPostPacket = `package keeper +func (k Keeper) OnTimeoutIbcPostPacket(ctx context.Context, packet channeltypes.Packet, data types.IbcPostPacketData) error { + seq, err := k.TimeoutPostSeq.Next(ctx) + if err != nil { + return err + } + + return k.TimeoutPost.Set(ctx, seq, + types.TimeoutPost{ + Title: data.Title, + Chain: packet.DestinationPort + "-" + packet.DestinationChannel, + }, + ) +}` +) + +type ( + QueryChannels struct { + Channels []struct { + ChannelID string `json:"channel_id"` + ConnectionHops []string `json:"connection_hops"` + Counterparty struct { + ChannelID string `json:"channel_id"` + PortID string `json:"port_id"` + } `json:"counterparty"` + Ordering string `json:"ordering"` + PortID string `json:"port_id"` + State string `json:"state"` + Version string `json:"version"` + } `json:"channels"` + } + + QueryBalances struct { + Balances sdk.Coins `json:"balances"` + } +) + +func runChain( + ctx context.Context, + t *testing.T, + app envtest.App, + cfg v1.Config, + tmpDir string, + ports []uint, +) (api, rpc, grpc, faucet string) { + t.Helper() + if len(ports) < 7 { + t.Fatalf("invalid number of ports %d", len(ports)) + } + + var ( + chainID = cfg.Genesis["chain_id"].(string) + chainPath = filepath.Join(tmpDir, chainID) + homePath = filepath.Join(chainPath, "home") + cfgPath = filepath.Join(chainPath, chain.ConfigFilenames[0]) + ) + require.NoError(t, os.MkdirAll(chainPath, os.ModePerm)) + + genAddr := func(port uint) string { + return fmt.Sprintf(":%d", port) + } + + cfg.Validators[0].Home = homePath + + cfg.Faucet.Host = genAddr(ports[0]) + cfg.Validators[0].App["api"] = xyaml.Map{"address": genAddr(ports[1])} + cfg.Validators[0].App["grpc"] = xyaml.Map{"address": genAddr(ports[2])} + cfg.Validators[0].App["grpc-web"] = xyaml.Map{"address": genAddr(ports[3])} + cfg.Validators[0].Config["p2p"] = xyaml.Map{"laddr": genAddr(ports[4])} + cfg.Validators[0].Config["rpc"] = xyaml.Map{ + "laddr": genAddr(ports[5]), + "pprof_laddr": genAddr(ports[6]), + } + + file, err := os.Create(cfgPath) + require.NoError(t, err) + require.NoError(t, yaml.NewEncoder(file).Encode(cfg)) + require.NoError(t, file.Close()) + + app.SetConfigPath(cfgPath) + app.SetHomePath(homePath) + go func() { + app.MustServe(ctx) + }() + + genHTTPAddr := func(port uint) string { + return fmt.Sprintf("http://127.0.0.1:%d", port) + } + return genHTTPAddr(ports[1]), genHTTPAddr(ports[5]), genHTTPAddr(ports[2]), genHTTPAddr(ports[0]) +} + +func TestBlogIBC(t *testing.T) { + var ( + env = envtest.New(t) + app = env.ScaffoldApp("github.com/apps/blog", "--no-module") + tmpDir = t.TempDir() + ctx, cancel = context.WithCancel(env.Ctx()) + ) + t.Cleanup(func() { + cancel() + time.Sleep(5 * time.Second) + require.NoError(t, os.RemoveAll(tmpDir)) + }) + + app.Scaffold( + "create an IBC module", + false, + "module", + "blog", + "--ibc", + "--require-registration", + ) + + app.Scaffold( + "create a post type list in an IBC module", + false, + "list", + "post", + "title", + "content", + "--no-message", + "--module", + "blog", + ) + + app.Scaffold( + "create a sentPost type list in an IBC module", + false, + "list", + "sentPost", + "postID:uint", + "title", + "chain", + "--no-message", + "--module", + "blog", + ) + + app.Scaffold( + "create a timeoutPost type list in an IBC module", + false, + "list", + "timeoutPost", + "title", + "chain", + "--no-message", + "--module", + "blog", + ) + + app.Scaffold( + "create a ibcPost package in an IBC module", + false, + "packet", + "ibcPost", + "title", + "content", + "--ack", + "postID:uint", + "--module", + "blog", + ) + + blogKeeperPath := filepath.Join(app.SourcePath(), "x/blog/keeper") + require.NoError(t, goanalysis.ReplaceCode( + blogKeeperPath, + nameOnRecvIbcPostPacket, + funcOnRecvIbcPostPacket, + )) + require.NoError(t, goanalysis.ReplaceCode( + blogKeeperPath, + nameOnAcknowledgementIbcPostPacket, + funcOnAcknowledgementIbcPostPacket, + )) + require.NoError(t, goanalysis.ReplaceCode( + blogKeeperPath, + nameOnTimeoutIbcPostPacket, + funcOnTimeoutIbcPostPacket, + )) + + // serve both chains. + ports, err := availableport.Find( + 14, + availableport.WithMinPort(4000), + availableport.WithMaxPort(5000), + ) + require.NoError(t, err) + hostChainAPI, hostChainRPC, hostChainGRPC, hostChainFaucet := runChain(ctx, t, app, hostChainConfig, tmpDir, ports[:7]) + hostChainChainID := hostChainConfig.Genesis["chain_id"].(string) + hostChainHome := hostChainConfig.Validators[0].Home + refChainAPI, refChainRPC, refChainGRPC, refChainFaucet := runChain(ctx, t, app, refChainConfig, tmpDir, ports[7:]) + refChainChainID := refChainConfig.Genesis["chain_id"].(string) + refChainHome := refChainConfig.Validators[0].Home + + // check the chains is up + app.WaitChainUp(ctx, hostChainAPI) + app.WaitChainUp(ctx, refChainAPI) + + // ibc relayer. + env.Must(env.Exec("install the hermes relayer app", + step.NewSteps(step.New( + step.Exec(envtest.IgniteApp, + "app", + "install", + "-g", + // filepath.Join(goenv.GoPath(), "src/github.com/ignite/apps/hermes"), // Local path for test proposals + "github.com/ignite/apps/hermes@hermes/v0.2.8", + ), + )), + )) + + env.Must(env.Exec("configure the hermes relayer app", + step.NewSteps(step.New( + step.Exec(envtest.IgniteApp, + "relayer", + "hermes", + "configure", + hostChainChainID, + hostChainRPC, + hostChainGRPC, + refChainChainID, + refChainRPC, + refChainGRPC, + "--chain-a-faucet", hostChainFaucet, + "--chain-b-faucet", refChainFaucet, + "--generate-wallets", + "--overwrite-config", + ), + step.Workdir(app.SourcePath()), + )), + )) + + go func() { + env.Must(env.Exec("run the hermes relayer", + step.NewSteps(step.New( + step.Exec(envtest.IgniteApp, + "relayer", + "hermes", + "start", + hostChainChainID, + refChainChainID, + ), + step.Workdir(app.SourcePath()), + )), + envtest.ExecCtx(ctx), + )) + }() + time.Sleep(3 * time.Second) + + var ( + queryOutput = &bytes.Buffer{} + queryResponse QueryChannels + ) + env.Must(env.Exec("verify if the channel was created", step.NewSteps( + step.New( + step.Stdout(queryOutput), + step.Stderr(queryOutput), + step.Exec( + app.Binary(), + "q", + "ibc", + "channel", + "channels", + "--node", hostChainRPC, + "--log_format", "json", + "--output", "json", + ), + step.PostExec(func(execErr error) error { + if execErr != nil { + return execErr + } + if err := json.Unmarshal(queryOutput.Bytes(), &queryResponse); err != nil { + return errors.Errorf("unmarshling tx response: %w", err) + } + if len(queryResponse.Channels) == 0 || + len(queryResponse.Channels[0].ConnectionHops) == 0 { + return errors.Errorf("channel not found") + } + if queryResponse.Channels[0].State != "STATE_OPEN" { + return errors.Errorf("channel is not open") + } + return nil + }), + ), + ))) + + var ( + sender = "alice" + receiverAddr = "cosmos1nrksk5swk6lnmlq670a8kwxmsjnu0ezqts39sa" + txOutput = &bytes.Buffer{} + txResponse struct { + Code int `json:"code"` + RawLog string `json:"raw_log"` + TxHash string `json:"txhash"` + } + ) + + stepsTx := step.NewSteps( + step.New( + step.Stdout(txOutput), + step.Stderr(txOutput), + step.PreExec(func() error { + txOutput.Reset() + return nil + }), + step.Exec( + app.Binary(), + "tx", + "ibc-transfer", + "transfer", + "transfer", + "channel-0", + receiverAddr, + "100000stake", + "--from", sender, + "--node", hostChainRPC, + "--home", hostChainHome, + "--chain-id", hostChainChainID, + "--output", "json", + "--log_format", "json", + "--keyring-backend", "test", + "--yes", + ), + step.PostExec(func(execErr error) error { + if execErr != nil { + return execErr + } + output := txOutput.Bytes() + if err := json.Unmarshal(txOutput.Bytes(), &txResponse); err != nil { + return errors.Errorf("unmarshalling tx response error: %w, response: %s", err, string(output)) + } + + time.Sleep(4 * time.Second) + + return cmdrunner.New().Run(ctx, step.New( + step.Exec( + app.Binary(), + "q", + "tx", + txResponse.TxHash, + "--node", hostChainRPC, + "--home", hostChainHome, + "--output", "json", + "--log_format", "json", + ), + step.Stdout(txOutput), + step.Stderr(txOutput), + step.PreExec(func() error { + txOutput.Reset() + return nil + }), + step.PostExec(func(execErr error) error { + if execErr != nil { + return execErr + } + output := txOutput.Bytes() + if err := json.Unmarshal(output, &txResponse); err != nil { + return errors.Errorf("unmarshalling tx response error: %w, response: %s", err, string(output)) + } + return nil + }), + )) + }), + ), + ) + if !env.Exec("send an IBC transfer", stepsTx, envtest.ExecRetry()) { + t.FailNow() + } + require.Equal(t, 0, txResponse.Code, + "tx failed code=%d log=%s", txResponse.Code, txResponse.RawLog) + + var ( + balanceOutput = &bytes.Buffer{} + balanceResponse QueryBalances + ) + steps := step.NewSteps( + step.New( + step.Stdout(balanceOutput), + step.Stderr(balanceOutput), + step.Exec( + app.Binary(), + "q", + "bank", + "balances", + receiverAddr, + "--node", refChainRPC, + "--home", refChainHome, + "--log_format", "json", + "--output", "json", + ), + step.PreExec(func() error { + balanceOutput.Reset() + return nil + }), + step.PostExec(func(execErr error) error { + if execErr != nil { + return execErr + } + + output := balanceOutput.Bytes() + if err := json.Unmarshal(output, &balanceResponse); err != nil { + return errors.Errorf("unmarshalling query response error: %w, response: %s", err, string(output)) + } + if balanceResponse.Balances.Empty() { + return errors.Errorf("empty balances") + } + if !strings.HasPrefix(balanceResponse.Balances[0].Denom, "ibc/") { + return errors.Errorf("invalid ibc balance: %v", balanceResponse.Balances[0]) + } + + return nil + }), + ), + ) + env.Must(env.Exec("check ibc balance", steps, envtest.ExecRetry())) +} diff --git a/integration/simulate.go b/integration/simulate.go new file mode 100644 index 0000000..fa2d3d4 --- /dev/null +++ b/integration/simulate.go @@ -0,0 +1,288 @@ +package envtest + +import ( + "context" + "strings" + + "github.com/buger/jsonparser" + "github.com/stretchr/testify/require" + + sdktypes "github.com/cosmos/cosmos-sdk/types" + + "github.com/ignite/cli/v29/ignite/pkg/multiformatname" + "github.com/ignite/cli/v29/ignite/templates/field" + "github.com/ignite/cli/v29/ignite/templates/field/datatype" +) + +// testValue determines the default test value for a given datatype. +func testValue(name datatype.Name) string { + // Repeated custom message fields in AutoCLI expect one JSON object per argument. + // Using "{}" keeps simulation compatible with both singular and repeated custom types. + if name == datatype.CustomSlice { + return "{}" + } + + dt, _ := datatype.IsSupportedType(name) + return dt.DefaultTestValue +} + +// txArgs generates transaction arguments as strings from a given set of fields. +func txArgs(fields field.Fields) []string { + args := make([]string, len(fields)) + for i, f := range fields { + args[i] = testValue(f.DatatypeName) + } + return args +} + +// assertJSONData verifies that the JSON data contains expected values for the given fields. +func (a *App) assertJSONData(data []byte, msgName string, fields field.Fields) { + for _, f := range fields { + dt := testValue(f.DatatypeName) + value, _, _, err := jsonparser.Get(data, msgName, f.Name.Snake) + require.NoError(a.env.T(), err) + if dt == "{}" { + continue + } + v := string(value) + switch { + case f.DatatypeName == datatype.Coin: + + c, err := sdktypes.ParseCoinNormalized(dt) + require.NoError(a.env.T(), err) + amount, err := jsonparser.GetString(value, "amount") + require.NoError(a.env.T(), err) + require.EqualValues(a.env.T(), amount, c.Amount.String()) + denom, err := jsonparser.GetString(value, "denom") + require.NoError(a.env.T(), err) + require.EqualValues(a.env.T(), denom, c.Denom) + + case f.DatatypeName == datatype.Coins || f.DatatypeName == datatype.CoinSliceAlias: + + c, err := sdktypes.ParseCoinsNormalized(dt) + require.NoError(a.env.T(), err) + cJSON, err := c.MarshalJSON() + require.NoError(a.env.T(), err) + dt = string(cJSON) + require.JSONEq(a.env.T(), dt, v) + + case f.DatatypeName == datatype.DecCoin || f.DatatypeName == datatype.DecCoins || f.DatatypeName == datatype.DecCoinSliceAlias: + + c, err := sdktypes.ParseCoinNormalized(dt) + require.NoError(a.env.T(), err) + // TODO find a better way to compare DecCoins as they have a different result pattern from CLI and Query + require.Contains(a.env.T(), v, c.Denom) + require.Contains(a.env.T(), v, c.Amount.String()) + + case f.IsSlice(): + + var slice []string + _, err = jsonparser.ArrayEach(value, func(value []byte, _ jsonparser.ValueType, _ int, _ error) { + slice = append(slice, string(value)) + }) + require.NoError(a.env.T(), err) + v = strings.Join(slice, ",") + require.EqualValues(a.env.T(), dt, v) + + default: + require.EqualValues(a.env.T(), dt, v) + } + } +} + +// assertJSONList verifies that a JSON array contains expected values for the given fields. +func (a *App) assertJSONList(data []byte, msgName string, fields field.Fields) { + value, _, _, err := jsonparser.Get(data, msgName) + require.NoError(a.env.t, err) + + a.assertJSONData(value, "[0]", fields) +} + +// createTx sends a transaction to create a resource and verifies the response from the chain. +func (a *App) createTx( + servers Hosts, + module string, + name multiformatname.Name, + args ...string, +) { + // Submit the transaction and verify it was accepted + txResponse := a.CLITx( + servers.RPC, + module, + "create-"+name.Kebab, + args..., + ) + require.Equal(a.env.T(), 0, txResponse.Code, + "tx failed code=%d log=%s", txResponse.Code, txResponse.RawLog) + + // Query the transaction using its hash + tx := a.CLIQueryTx( + servers.RPC, + txResponse.TxHash, + ) + require.Equal(a.env.T(), 0, tx.Code, + "tx failed code=%d log=%s", txResponse.Code, txResponse.RawLog) +} + +// RunChainAndSimulateTxs starts the blockchain network and runs transaction simulations. +func (a *App) RunChainAndSimulateTxs(servers Hosts) { + ctx, cancel := context.WithCancel(a.env.ctx) + defer cancel() + + // Start serving the blockchain in a separate goroutine + go func() { + a.MustServe(ctx) + }() + + // Wait until the chain is up and running + a.WaitChainUp(ctx, servers.API) + + // Run the transaction simulations + a.RunSimulationTxs(ctx, servers) +} + +// RunSimulationTxs runs different types of transactions for modules and queries the chain. +func (a *App) RunSimulationTxs(ctx context.Context, servers Hosts) { + for _, s := range a.scaffolded { + module := s.module + if module == "" { + module = a.name + } + name, err := multiformatname.NewName(s.name) + require.NoError(a.env.t, err) + + // Handle different types of scaffolds + switch s.typeName { + case "module": + // No transactions for "module" type + case "list": + a.SendListTxsAndQueryFirst(ctx, servers, module, name, s.fields) + case "map": + a.SendMapTxsAndQuery(ctx, servers, module, name, s.fields, s.index) + case "single": + a.SendSingleTxsAndQuery(ctx, servers, module, name, s.fields) + case "params": + case "message": + case "query": + case "configs": + case "type": + case "packet": + } + } +} + +// SendSingleTxsAndQuery submits a single transaction and queries the result from both CLI and API. +func (a *App) SendSingleTxsAndQuery( + ctx context.Context, + servers Hosts, + module string, + name multiformatname.Name, + fields field.Fields, +) { + // Generate transaction arguments and submit the transaction + args := txArgs(fields) + a.createTx(servers, module, name, args...) + + // Query the state via CLI + queryResponse := a.CLIQuery( + servers.RPC, + module, + "get-"+name.Kebab, + ) + a.assertJSONData(queryResponse, name.Snake, fields) + + // Query the state via API + apiResponse := a.APIQuery( + ctx, + servers.API, + a.namespace, + module, + name.Snake, + ) + a.assertJSONData(apiResponse, name.Snake, fields) + + // Ensure CLI and API responses match + require.JSONEq(a.env.t, string(queryResponse), string(apiResponse)) +} + +// SendListTxsAndQueryFirst sends a list transaction and queries the first element using both CLI and API. +func (a *App) SendListTxsAndQueryFirst( + ctx context.Context, + servers Hosts, + module string, + name multiformatname.Name, + fields field.Fields, +) { + a.SendTxsAndQuery(ctx, servers, module, name, fields, "0") +} + +// SendMapTxsAndQuery sends a map transaction and queries the element using both CLI and API. +func (a *App) SendMapTxsAndQuery( + ctx context.Context, + servers Hosts, + module string, + name multiformatname.Name, + fields field.Fields, + index field.Field, +) { + a.SendTxsAndQuery( + ctx, + servers, + module, + name, + append(field.Fields{index}, fields...), + testValue(index.DatatypeName), + ) +} + +// SendTxsAndQuery sends a transaction and queries the element using both CLI and API. +func (a *App) SendTxsAndQuery( + ctx context.Context, + servers Hosts, + module string, + name multiformatname.Name, + fields field.Fields, + index string, +) { + // Generate transaction arguments and submit the transaction + args := txArgs(fields) + a.createTx(servers, module, name, args...) + + // Query the chain for the first element via CLI + queryResponse := a.CLIQuery( + servers.RPC, + module, + "get-"+name.Kebab, + index, + ) + a.assertJSONData(queryResponse, name.Snake, fields) + + // Query the chain for the first element via API + apiResponse := a.APIQuery( + ctx, + servers.API, + a.namespace, + module, + name.Snake, + index, + ) + a.assertJSONData(apiResponse, name.Snake, fields) + + // Query the full list via CLI + queryListResponse := a.CLIQuery( + servers.RPC, + module, + "list-"+name.Kebab, + ) + a.assertJSONList(queryListResponse, name.Snake, fields) + + // Query the full list via API + apiListResponse := a.APIQuery( + ctx, + servers.API, + a.namespace, + module, + name.Snake, + ) + a.assertJSONList(apiListResponse, name.Snake, fields) +} diff --git a/integration/simulation/simapp_test.go b/integration/simulation/simapp_test.go new file mode 100644 index 0000000..48f90e4 --- /dev/null +++ b/integration/simulation/simapp_test.go @@ -0,0 +1,58 @@ +//go:build !relayer + +package simulation_test + +import ( + "testing" + + envtest "github.com/ignite/cli/v29/integration" +) + +func TestGenerateAnAppAndSimulate(t *testing.T) { + var ( + env = envtest.New(t) + app = env.ScaffoldApp("github.com/test/blog") + ) + + app.Scaffold( + "create a list", + false, + "list", "foo", "foobar", + ) + + app.Scaffold( + "create an singleton type", + false, + "single", "baz", "foobar", + ) + + app.Scaffold( + "create an singleton type", + false, + "list", "noSimapp", "foobar", "--no-simulation", + ) + + app.Scaffold( + "create a message", + false, + "message", "msgFoo", "foobar", + ) + + app.Scaffold( + "scaffold a new module", + false, + "module", "new_module", + ) + + app.Scaffold( + "create a map", + false, + "map", + "bar", + "foobar", + "--module", + "new_module", + ) + + app.Simulate(100, 50) +} diff --git a/integration/single/cmd_singleton_test.go b/integration/single/cmd_singleton_test.go new file mode 100644 index 0000000..d15744c --- /dev/null +++ b/integration/single/cmd_singleton_test.go @@ -0,0 +1,77 @@ +//go:build !relayer + +package single_test + +import ( + "testing" + + envtest "github.com/ignite/cli/v29/integration" +) + +func TestCreateSingleton(t *testing.T) { + var ( + env = envtest.New(t) + app = env.ScaffoldApp("github.com/test/blog") + servers = app.RandomizeServerPorts() + ) + + app.Scaffold( + "create an singleton type", + false, + "single", + "user", "email", + ) + + app.Scaffold( + "create an singleton type with custom path", + false, + "single", + "appPath", "email", "--path", app.SourcePath(), + ) + + app.Scaffold( + "create an singleton type with no message", + false, + "single", + "no-message", "email", "--no-message", + ) + + app.Scaffold( + "create a module", + false, + "module", + "example", "--require-registration", + ) + + app.Scaffold( + "create another type", + false, + "list", + "user", "email", "--module", "example", + ) + + app.Scaffold( + "create another type with a custom field type", + false, + "list", + "user-detail", "user:User", "--module", "example", + ) + + app.Scaffold( + "should prevent creating an singleton type with a typename that already exist", + true, + "single", + "user", "email", "--module", "example", + ) + + app.Scaffold( + "create an singleton type in a custom module", + false, + "single", + "singleuser", "email", "--module", "example", + ) + + app.EnsureSteady() + + app.RunChainAndSimulateTxs(servers) +} diff --git a/integration/testdata/tstestrunner/package-lock.json b/integration/testdata/tstestrunner/package-lock.json new file mode 100644 index 0000000..557a843 --- /dev/null +++ b/integration/testdata/tstestrunner/package-lock.json @@ -0,0 +1,2699 @@ +{ + "name": "tstestrunner", + "version": "1.0.0", + "lockfileVersion": 2, + "requires": true, + "packages": { + "": { + "name": "tstestrunner", + "version": "1.0.0", + "license": "ISC", + "dependencies": { + "@cosmjs/proto-signing": "^0.27.0", + "@cosmjs/stargate": "^0.27.0" + }, + "devDependencies": { + "@types/glob": "^7.2.0", + "@types/node": "^17.0.31", + "glob": "^8.0.1", + "isomorphic-unfetch": "^3.1.0", + "vitest": "^0.10.2" + } + }, + "node_modules/@confio/ics23": { + "version": "0.6.8", + "resolved": "https://registry.npmjs.org/@confio/ics23/-/ics23-0.6.8.tgz", + "integrity": "sha512-wB6uo+3A50m0sW/EWcU64xpV/8wShZ6bMTa7pF8eYsTrSkQA7oLUIJcs/wb8g4y2Oyq701BaGiO6n/ak5WXO1w==", + "dependencies": { + "@noble/hashes": "^1.0.0", + "protobufjs": "^6.8.8" + } + }, + "node_modules/@cosmjs/amino": { + "version": "0.27.0", + "resolved": "https://registry.npmjs.org/@cosmjs/amino/-/amino-0.27.0.tgz", + "integrity": "sha512-ybyzRkGrRija1bjGjGP7sAp2ulPA2/S2wMY2pehB7b6ZR8dpwveCjz/IqFWC5KBxz6KZf5MuaONOY+t1kkjsfw==", + "dependencies": { + "@cosmjs/crypto": "0.27.0", + "@cosmjs/encoding": "0.27.0", + "@cosmjs/math": "0.27.0", + "@cosmjs/utils": "0.27.0" + } + }, + "node_modules/@cosmjs/crypto": { + "version": "0.27.0", + "resolved": "https://registry.npmjs.org/@cosmjs/crypto/-/crypto-0.27.0.tgz", + "integrity": "sha512-JTPHINCYZ+mnsxrfv8ZBHsFWgB7EGooa5SD0lQFhkCVX/FC3sqxuFNv6TZU5bVVU71DUSqXTMXF5m9kAMzPUkw==", + "dependencies": { + "@cosmjs/encoding": "0.27.0", + "@cosmjs/math": "0.27.0", + "@cosmjs/utils": "0.27.0", + "bip39": "^3.0.2", + "bn.js": "^5.2.0", + "elliptic": "^6.5.3", + "js-sha3": "^0.8.0", + "libsodium-wrappers": "^0.7.6", + "ripemd160": "^2.0.2", + "sha.js": "^2.4.11" + } + }, + "node_modules/@cosmjs/encoding": { + "version": "0.27.0", + "resolved": "https://registry.npmjs.org/@cosmjs/encoding/-/encoding-0.27.0.tgz", + "integrity": "sha512-cCT8X/NUAGXOe14F/k2GE6N9btjrOqALBilUPIn5CL4OEGxvRTPD59nWSACu0iafCGz10Tw3LPcouuYPtZmkbg==", + "dependencies": { + "base64-js": "^1.3.0", + "bech32": "^1.1.4", + "readonly-date": "^1.0.0" + } + }, + "node_modules/@cosmjs/json-rpc": { + "version": "0.27.0", + "resolved": "https://registry.npmjs.org/@cosmjs/json-rpc/-/json-rpc-0.27.0.tgz", + "integrity": "sha512-Q6na5KPYDD90QhlPZTInquwBycDjvhZvWwpV1TppDd2Em8S1FfN3ePiV2YCf4XzXREU5YPFSHzh5MHK/WhQY3w==", + "dependencies": { + "@cosmjs/stream": "0.27.0", + "xstream": "^11.14.0" + } + }, + "node_modules/@cosmjs/math": { + "version": "0.27.0", + "resolved": "https://registry.npmjs.org/@cosmjs/math/-/math-0.27.0.tgz", + "integrity": "sha512-+WsrdXojqpUL6l2LKOWYgiAJIDD0faONNtnjb1kpS1btSzZe1Ns+RdygG6QZLLvZuxMfkEzE54ZXDKPD5MhVPA==", + "dependencies": { + "bn.js": "^5.2.0" + } + }, + "node_modules/@cosmjs/proto-signing": { + "version": "0.27.0", + "resolved": "https://registry.npmjs.org/@cosmjs/proto-signing/-/proto-signing-0.27.0.tgz", + "integrity": "sha512-ODqnmY/ElmcEYu6HbDmeGce4KacgzSVGQzvGodZidC1RR9EYociuweBPNwSHqBPolC6PQPI/QGc83m/mbih2xw==", + "dependencies": { + "@cosmjs/amino": "0.27.0", + "@cosmjs/crypto": "0.27.0", + "@cosmjs/math": "0.27.0", + "cosmjs-types": "^0.4.0", + "long": "^4.0.0", + "protobufjs": "~6.10.2" + } + }, + "node_modules/@cosmjs/socket": { + "version": "0.27.0", + "resolved": "https://registry.npmjs.org/@cosmjs/socket/-/socket-0.27.0.tgz", + "integrity": "sha512-lOd0s6gLyjdjcs8xnYuS2IXRqBLUrI76Bek5wsia+m5CyUvHjRbbd7+nZiznbtVjApBlIwHGkiklLg3/byxkAA==", + "dependencies": { + "@cosmjs/stream": "0.27.0", + "isomorphic-ws": "^4.0.1", + "ws": "^7", + "xstream": "^11.14.0" + } + }, + "node_modules/@cosmjs/stargate": { + "version": "0.27.0", + "resolved": "https://registry.npmjs.org/@cosmjs/stargate/-/stargate-0.27.0.tgz", + "integrity": "sha512-Fiqk8rIpB4emzC/P7/+ZPPJV9aG6KJhVuOF4D8c1j1Bv8fVs1XqC6NgsY6elTLXl38pgXt7REn6VYzAdZwrHXQ==", + "dependencies": { + "@confio/ics23": "^0.6.3", + "@cosmjs/amino": "0.27.0", + "@cosmjs/encoding": "0.27.0", + "@cosmjs/math": "0.27.0", + "@cosmjs/proto-signing": "0.27.0", + "@cosmjs/stream": "0.27.0", + "@cosmjs/tendermint-rpc": "0.27.0", + "@cosmjs/utils": "0.27.0", + "cosmjs-types": "^0.4.0", + "long": "^4.0.0", + "protobufjs": "~6.10.2", + "xstream": "^11.14.0" + } + }, + "node_modules/@cosmjs/stream": { + "version": "0.27.0", + "resolved": "https://registry.npmjs.org/@cosmjs/stream/-/stream-0.27.0.tgz", + "integrity": "sha512-D9mXHqS6y7xrThhUg5SCvMjiVQ8ph9f7gAuWlrXhqVJ5FqrP6OyTGRbVyGGM91d5Jj7N7oidQ+hOfc34vKFgeg==", + "dependencies": { + "xstream": "^11.14.0" + } + }, + "node_modules/@cosmjs/tendermint-rpc": { + "version": "0.27.0", + "resolved": "https://registry.npmjs.org/@cosmjs/tendermint-rpc/-/tendermint-rpc-0.27.0.tgz", + "integrity": "sha512-WFcJ2/UF76fBBVzPRiHJoC/GCKvgt0mb7+ewgpwKBeEcYwfj5qb1QreGBbHn/UZx9QSsF9jhI5k7SmNdglC3cA==", + "dependencies": { + "@cosmjs/crypto": "0.27.0", + "@cosmjs/encoding": "0.27.0", + "@cosmjs/json-rpc": "0.27.0", + "@cosmjs/math": "0.27.0", + "@cosmjs/socket": "0.27.0", + "@cosmjs/stream": "0.27.0", + "axios": "^0.21.2", + "readonly-date": "^1.0.0", + "xstream": "^11.14.0" + } + }, + "node_modules/@cosmjs/utils": { + "version": "0.27.0", + "resolved": "https://registry.npmjs.org/@cosmjs/utils/-/utils-0.27.0.tgz", + "integrity": "sha512-UC1eWY9isDQm6POy6GaTmYtbPVY5dkywdjW8Qzj+JNMhbhMM0KHuI4pHwjv5TPXSO/Ba2z10MTnD9nUlZtDwtA==" + }, + "node_modules/@noble/hashes": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@noble/hashes/-/hashes-1.1.2.tgz", + "integrity": "sha512-KYRCASVTv6aeUi1tsF8/vpyR7zpfs3FUzy2Jqm+MU+LmUKhQ0y2FpfwqkCcxSg2ua4GALJd8k2R76WxwZGbQpA==", + "funding": [ + { + "type": "individual", + "url": "https://paulmillr.com/funding/" + } + ] + }, + "node_modules/@protobufjs/aspromise": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz", + "integrity": "sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==" + }, + "node_modules/@protobufjs/base64": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@protobufjs/base64/-/base64-1.1.2.tgz", + "integrity": "sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg==" + }, + "node_modules/@protobufjs/codegen": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@protobufjs/codegen/-/codegen-2.0.4.tgz", + "integrity": "sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg==" + }, + "node_modules/@protobufjs/eventemitter": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/eventemitter/-/eventemitter-1.1.0.tgz", + "integrity": "sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q==" + }, + "node_modules/@protobufjs/fetch": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/fetch/-/fetch-1.1.0.tgz", + "integrity": "sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ==", + "dependencies": { + "@protobufjs/aspromise": "^1.1.1", + "@protobufjs/inquire": "^1.1.0" + } + }, + "node_modules/@protobufjs/float": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@protobufjs/float/-/float-1.0.2.tgz", + "integrity": "sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ==" + }, + "node_modules/@protobufjs/inquire": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/inquire/-/inquire-1.1.0.tgz", + "integrity": "sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q==" + }, + "node_modules/@protobufjs/path": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@protobufjs/path/-/path-1.1.2.tgz", + "integrity": "sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA==" + }, + "node_modules/@protobufjs/pool": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/pool/-/pool-1.1.0.tgz", + "integrity": "sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw==" + }, + "node_modules/@protobufjs/utf8": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz", + "integrity": "sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==" + }, + "node_modules/@types/chai": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/@types/chai/-/chai-4.3.1.tgz", + "integrity": "sha512-/zPMqDkzSZ8t3VtxOa4KPq7uzzW978M9Tvh+j7GHKuo6k6GTLxPJ4J5gE5cjfJ26pnXst0N5Hax8Sr0T2Mi9zQ==", + "dev": true + }, + "node_modules/@types/chai-subset": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/@types/chai-subset/-/chai-subset-1.3.3.tgz", + "integrity": "sha512-frBecisrNGz+F4T6bcc+NLeolfiojh5FxW2klu669+8BARtyQv2C/GkNW6FUodVe4BroGMP/wER/YDGc7rEllw==", + "dev": true, + "dependencies": { + "@types/chai": "*" + } + }, + "node_modules/@types/glob": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/@types/glob/-/glob-7.2.0.tgz", + "integrity": "sha512-ZUxbzKl0IfJILTS6t7ip5fQQM/J3TJYubDm3nMbgubNNYS62eXeUpoLUC8/7fJNiFYHTrGPQn7hspDUzIHX3UA==", + "dev": true, + "dependencies": { + "@types/minimatch": "*", + "@types/node": "*" + } + }, + "node_modules/@types/long": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@types/long/-/long-4.0.2.tgz", + "integrity": "sha512-MqTGEo5bj5t157U6fA/BiDynNkn0YknVdh48CMPkTSpFTVmvao5UQmm7uEF6xBEo7qIMAlY/JSleYaE6VOdpaA==" + }, + "node_modules/@types/minimatch": { + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/@types/minimatch/-/minimatch-3.0.5.tgz", + "integrity": "sha512-Klz949h02Gz2uZCMGwDUSDS1YBlTdDDgbWHi+81l29tQALUtvz4rAYi5uoVhE5Lagoq6DeqAUlbrHvW/mXDgdQ==", + "dev": true + }, + "node_modules/@types/node": { + "version": "17.0.31", + "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.31.tgz", + "integrity": "sha512-AR0x5HbXGqkEx9CadRH3EBYx/VkiUgZIhP4wvPn/+5KIsgpNoyFaRlVe0Zlx9gRtg8fA06a9tskE2MSN7TcG4Q==" + }, + "node_modules/assertion-error": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz", + "integrity": "sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw==", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/axios": { + "version": "0.21.4", + "resolved": "https://registry.npmjs.org/axios/-/axios-0.21.4.tgz", + "integrity": "sha512-ut5vewkiu8jjGBdqpM44XxjuCjq9LAKeHVmoVfHVzy8eHgxxq8SbAVQNovDA8mVi05kP0Ea/n/UzcSHcTJQfNg==", + "dependencies": { + "follow-redirects": "^1.14.0" + } + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true + }, + "node_modules/base64-js": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", + "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/bech32": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/bech32/-/bech32-1.1.4.tgz", + "integrity": "sha512-s0IrSOzLlbvX7yp4WBfPITzpAU8sqQcpsmwXDiKwrG4r491vwCO/XpejasRNl0piBMe/DvP4Tz0mIS/X1DPJBQ==" + }, + "node_modules/bip39": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/bip39/-/bip39-3.0.4.tgz", + "integrity": "sha512-YZKQlb752TrUWqHWj7XAwCSjYEgGAk+/Aas3V7NyjQeZYsztO8JnQUaCWhcnL4T+jL8nvB8typ2jRPzTlgugNw==", + "dependencies": { + "@types/node": "11.11.6", + "create-hash": "^1.1.0", + "pbkdf2": "^3.0.9", + "randombytes": "^2.0.1" + } + }, + "node_modules/bip39/node_modules/@types/node": { + "version": "11.11.6", + "resolved": "https://registry.npmjs.org/@types/node/-/node-11.11.6.tgz", + "integrity": "sha512-Exw4yUWMBXM3X+8oqzJNRqZSwUAaS4+7NdvHqQuFi/d+synz++xmX3QIf+BFqneW8N31R8Ky+sikfZUXq07ggQ==" + }, + "node_modules/bn.js": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-5.2.1.tgz", + "integrity": "sha512-eXRvHzWyYPBuB4NBy0cmYQjGitUrtqwbvlzP3G6VFnNRbsZQIxQ10PbKKHt8gZ/HW/D/747aDl+QkDqg3KQLMQ==" + }, + "node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/brorand": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/brorand/-/brorand-1.1.0.tgz", + "integrity": "sha512-cKV8tMCEpQs4hK/ik71d6LrPOnpkpGBR0wzxqr68g2m/LB2GxVYQroAjMJZRVM1Y4BCjCKc3vAamxSzOY2RP+w==" + }, + "node_modules/chai": { + "version": "4.3.6", + "resolved": "https://registry.npmjs.org/chai/-/chai-4.3.6.tgz", + "integrity": "sha512-bbcp3YfHCUzMOvKqsztczerVgBKSsEijCySNlHHbX3VG1nskvqjz5Rfso1gGwD6w6oOV3eI60pKuMOV5MV7p3Q==", + "dev": true, + "dependencies": { + "assertion-error": "^1.1.0", + "check-error": "^1.0.2", + "deep-eql": "^3.0.1", + "get-func-name": "^2.0.0", + "loupe": "^2.3.1", + "pathval": "^1.1.1", + "type-detect": "^4.0.5" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/check-error": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/check-error/-/check-error-1.0.2.tgz", + "integrity": "sha1-V00xLt2Iu13YkS6Sht1sCu1KrII=", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/cipher-base": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/cipher-base/-/cipher-base-1.0.4.tgz", + "integrity": "sha512-Kkht5ye6ZGmwv40uUDZztayT2ThLQGfnj/T71N/XzeZeo3nf8foyW7zGTsPYkEya3m5f3cAypH+qe7YOrM1U2Q==", + "dependencies": { + "inherits": "^2.0.1", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/cosmjs-types": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/cosmjs-types/-/cosmjs-types-0.4.1.tgz", + "integrity": "sha512-I7E/cHkIgoJzMNQdFF0YVqPlaTqrqKHrskuSTIqlEyxfB5Lf3WKCajSXVK2yHOfOFfSux/RxEdpMzw/eO4DIog==", + "dependencies": { + "long": "^4.0.0", + "protobufjs": "~6.11.2" + } + }, + "node_modules/cosmjs-types/node_modules/protobufjs": { + "version": "6.11.3", + "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.11.3.tgz", + "integrity": "sha512-xL96WDdCZYdU7Slin569tFX712BxsxslWwAfAhCYjQKGTq7dAU91Lomy6nLLhh/dyGhk/YH4TwTSRxTzhuHyZg==", + "hasInstallScript": true, + "dependencies": { + "@protobufjs/aspromise": "^1.1.2", + "@protobufjs/base64": "^1.1.2", + "@protobufjs/codegen": "^2.0.4", + "@protobufjs/eventemitter": "^1.1.0", + "@protobufjs/fetch": "^1.1.0", + "@protobufjs/float": "^1.0.2", + "@protobufjs/inquire": "^1.1.0", + "@protobufjs/path": "^1.1.2", + "@protobufjs/pool": "^1.1.0", + "@protobufjs/utf8": "^1.1.0", + "@types/long": "^4.0.1", + "@types/node": ">=13.7.0", + "long": "^4.0.0" + }, + "bin": { + "pbjs": "bin/pbjs", + "pbts": "bin/pbts" + } + }, + "node_modules/create-hash": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/create-hash/-/create-hash-1.2.0.tgz", + "integrity": "sha512-z00bCGNHDG8mHAkP7CtT1qVu+bFQUPjYq/4Iv3C3kWjTFV10zIjfSoeqXo9Asws8gwSHDGj/hl2u4OGIjapeCg==", + "dependencies": { + "cipher-base": "^1.0.1", + "inherits": "^2.0.1", + "md5.js": "^1.3.4", + "ripemd160": "^2.0.1", + "sha.js": "^2.4.0" + } + }, + "node_modules/create-hmac": { + "version": "1.1.7", + "resolved": "https://registry.npmjs.org/create-hmac/-/create-hmac-1.1.7.tgz", + "integrity": "sha512-MJG9liiZ+ogc4TzUwuvbER1JRdgvUFSB5+VR/g5h82fGaIRWMWddtKBHi7/sVhfjQZ6SehlyhvQYrcYkaUIpLg==", + "dependencies": { + "cipher-base": "^1.0.3", + "create-hash": "^1.1.0", + "inherits": "^2.0.1", + "ripemd160": "^2.0.0", + "safe-buffer": "^5.0.1", + "sha.js": "^2.4.8" + } + }, + "node_modules/deep-eql": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-3.0.1.tgz", + "integrity": "sha512-+QeIQyN5ZuO+3Uk5DYh6/1eKO0m0YmJFGNmFHGACpf1ClL1nmlV/p4gNgbl2pJGxgXb4faqo6UE+M5ACEMyVcw==", + "dev": true, + "dependencies": { + "type-detect": "^4.0.0" + }, + "engines": { + "node": ">=0.12" + } + }, + "node_modules/define-properties": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.4.tgz", + "integrity": "sha512-uckOqKcfaVvtBdsVkdPv3XjveQJsNQqmhXgRi8uhvWWuPYZCNlzT8qAyblUgNoXdHdjMTzAqeGjAoli8f+bzPA==", + "dependencies": { + "has-property-descriptors": "^1.0.0", + "object-keys": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/elliptic": { + "version": "6.5.4", + "resolved": "https://registry.npmjs.org/elliptic/-/elliptic-6.5.4.tgz", + "integrity": "sha512-iLhC6ULemrljPZb+QutR5TQGB+pdW6KGD5RSegS+8sorOZT+rdQFbsQFJgvN3eRqNALqJer4oQ16YvJHlU8hzQ==", + "dependencies": { + "bn.js": "^4.11.9", + "brorand": "^1.1.0", + "hash.js": "^1.0.0", + "hmac-drbg": "^1.0.1", + "inherits": "^2.0.4", + "minimalistic-assert": "^1.0.1", + "minimalistic-crypto-utils": "^1.0.1" + } + }, + "node_modules/elliptic/node_modules/bn.js": { + "version": "4.12.0", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz", + "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA==" + }, + "node_modules/esbuild": { + "version": "0.14.38", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.14.38.tgz", + "integrity": "sha512-12fzJ0fsm7gVZX1YQ1InkOE5f9Tl7cgf6JPYXRJtPIoE0zkWAbHdPHVPPaLi9tYAcEBqheGzqLn/3RdTOyBfcA==", + "dev": true, + "hasInstallScript": true, + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=12" + }, + "optionalDependencies": { + "esbuild-android-64": "0.14.38", + "esbuild-android-arm64": "0.14.38", + "esbuild-darwin-64": "0.14.38", + "esbuild-darwin-arm64": "0.14.38", + "esbuild-freebsd-64": "0.14.38", + "esbuild-freebsd-arm64": "0.14.38", + "esbuild-linux-32": "0.14.38", + "esbuild-linux-64": "0.14.38", + "esbuild-linux-arm": "0.14.38", + "esbuild-linux-arm64": "0.14.38", + "esbuild-linux-mips64le": "0.14.38", + "esbuild-linux-ppc64le": "0.14.38", + "esbuild-linux-riscv64": "0.14.38", + "esbuild-linux-s390x": "0.14.38", + "esbuild-netbsd-64": "0.14.38", + "esbuild-openbsd-64": "0.14.38", + "esbuild-sunos-64": "0.14.38", + "esbuild-windows-32": "0.14.38", + "esbuild-windows-64": "0.14.38", + "esbuild-windows-arm64": "0.14.38" + } + }, + "node_modules/esbuild-android-64": { + "version": "0.14.38", + "resolved": "https://registry.npmjs.org/esbuild-android-64/-/esbuild-android-64-0.14.38.tgz", + "integrity": "sha512-aRFxR3scRKkbmNuGAK+Gee3+yFxkTJO/cx83Dkyzo4CnQl/2zVSurtG6+G86EQIZ+w+VYngVyK7P3HyTBKu3nw==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild-android-arm64": { + "version": "0.14.38", + "resolved": "https://registry.npmjs.org/esbuild-android-arm64/-/esbuild-android-arm64-0.14.38.tgz", + "integrity": "sha512-L2NgQRWuHFI89IIZIlpAcINy9FvBk6xFVZ7xGdOwIm8VyhX1vNCEqUJO3DPSSy945Gzdg98cxtNt8Grv1CsyhA==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild-darwin-64": { + "version": "0.14.38", + "resolved": "https://registry.npmjs.org/esbuild-darwin-64/-/esbuild-darwin-64-0.14.38.tgz", + "integrity": "sha512-5JJvgXkX87Pd1Og0u/NJuO7TSqAikAcQQ74gyJ87bqWRVeouky84ICoV4sN6VV53aTW+NE87qLdGY4QA2S7KNA==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild-darwin-arm64": { + "version": "0.14.38", + "resolved": "https://registry.npmjs.org/esbuild-darwin-arm64/-/esbuild-darwin-arm64-0.14.38.tgz", + "integrity": "sha512-eqF+OejMI3mC5Dlo9Kdq/Ilbki9sQBw3QlHW3wjLmsLh+quNfHmGMp3Ly1eWm981iGBMdbtSS9+LRvR2T8B3eQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild-freebsd-64": { + "version": "0.14.38", + "resolved": "https://registry.npmjs.org/esbuild-freebsd-64/-/esbuild-freebsd-64-0.14.38.tgz", + "integrity": "sha512-epnPbhZUt93xV5cgeY36ZxPXDsQeO55DppzsIgWM8vgiG/Rz+qYDLmh5ts3e+Ln1wA9dQ+nZmVHw+RjaW3I5Ig==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild-freebsd-arm64": { + "version": "0.14.38", + "resolved": "https://registry.npmjs.org/esbuild-freebsd-arm64/-/esbuild-freebsd-arm64-0.14.38.tgz", + "integrity": "sha512-/9icXUYJWherhk+y5fjPI5yNUdFPtXHQlwP7/K/zg8t8lQdHVj20SqU9/udQmeUo5pDFHMYzcEFfJqgOVeKNNQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild-linux-32": { + "version": "0.14.38", + "resolved": "https://registry.npmjs.org/esbuild-linux-32/-/esbuild-linux-32-0.14.38.tgz", + "integrity": "sha512-QfgfeNHRFvr2XeHFzP8kOZVnal3QvST3A0cgq32ZrHjSMFTdgXhMhmWdKzRXP/PKcfv3e2OW9tT9PpcjNvaq6g==", + "cpu": [ + "ia32" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild-linux-64": { + "version": "0.14.38", + "resolved": "https://registry.npmjs.org/esbuild-linux-64/-/esbuild-linux-64-0.14.38.tgz", + "integrity": "sha512-uuZHNmqcs+Bj1qiW9k/HZU3FtIHmYiuxZ/6Aa+/KHb/pFKr7R3aVqvxlAudYI9Fw3St0VCPfv7QBpUITSmBR1Q==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild-linux-arm": { + "version": "0.14.38", + "resolved": "https://registry.npmjs.org/esbuild-linux-arm/-/esbuild-linux-arm-0.14.38.tgz", + "integrity": "sha512-FiFvQe8J3VKTDXG01JbvoVRXQ0x6UZwyrU4IaLBZeq39Bsbatd94Fuc3F1RGqPF5RbIWW7RvkVQjn79ejzysnA==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild-linux-arm64": { + "version": "0.14.38", + "resolved": "https://registry.npmjs.org/esbuild-linux-arm64/-/esbuild-linux-arm64-0.14.38.tgz", + "integrity": "sha512-HlMGZTEsBrXrivr64eZ/EO0NQM8H8DuSENRok9d+Jtvq8hOLzrxfsAT9U94K3KOGk2XgCmkaI2KD8hX7F97lvA==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild-linux-mips64le": { + "version": "0.14.38", + "resolved": "https://registry.npmjs.org/esbuild-linux-mips64le/-/esbuild-linux-mips64le-0.14.38.tgz", + "integrity": "sha512-qd1dLf2v7QBiI5wwfil9j0HG/5YMFBAmMVmdeokbNAMbcg49p25t6IlJFXAeLzogv1AvgaXRXvgFNhScYEUXGQ==", + "cpu": [ + "mips64el" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild-linux-ppc64le": { + "version": "0.14.38", + "resolved": "https://registry.npmjs.org/esbuild-linux-ppc64le/-/esbuild-linux-ppc64le-0.14.38.tgz", + "integrity": "sha512-mnbEm7o69gTl60jSuK+nn+pRsRHGtDPfzhrqEUXyCl7CTOCLtWN2bhK8bgsdp6J/2NyS/wHBjs1x8aBWwP2X9Q==", + "cpu": [ + "ppc64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild-linux-riscv64": { + "version": "0.14.38", + "resolved": "https://registry.npmjs.org/esbuild-linux-riscv64/-/esbuild-linux-riscv64-0.14.38.tgz", + "integrity": "sha512-+p6YKYbuV72uikChRk14FSyNJZ4WfYkffj6Af0/Tw63/6TJX6TnIKE+6D3xtEc7DeDth1fjUOEqm+ApKFXbbVQ==", + "cpu": [ + "riscv64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild-linux-s390x": { + "version": "0.14.38", + "resolved": "https://registry.npmjs.org/esbuild-linux-s390x/-/esbuild-linux-s390x-0.14.38.tgz", + "integrity": "sha512-0zUsiDkGJiMHxBQ7JDU8jbaanUY975CdOW1YDrurjrM0vWHfjv9tLQsW9GSyEb/heSK1L5gaweRjzfUVBFoybQ==", + "cpu": [ + "s390x" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild-netbsd-64": { + "version": "0.14.38", + "resolved": "https://registry.npmjs.org/esbuild-netbsd-64/-/esbuild-netbsd-64-0.14.38.tgz", + "integrity": "sha512-cljBAApVwkpnJZfnRVThpRBGzCi+a+V9Ofb1fVkKhtrPLDYlHLrSYGtmnoTVWDQdU516qYI8+wOgcGZ4XIZh0Q==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild-openbsd-64": { + "version": "0.14.38", + "resolved": "https://registry.npmjs.org/esbuild-openbsd-64/-/esbuild-openbsd-64-0.14.38.tgz", + "integrity": "sha512-CDswYr2PWPGEPpLDUO50mL3WO/07EMjnZDNKpmaxUPsrW+kVM3LoAqr/CE8UbzugpEiflYqJsGPLirThRB18IQ==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild-sunos-64": { + "version": "0.14.38", + "resolved": "https://registry.npmjs.org/esbuild-sunos-64/-/esbuild-sunos-64-0.14.38.tgz", + "integrity": "sha512-2mfIoYW58gKcC3bck0j7lD3RZkqYA7MmujFYmSn9l6TiIcAMpuEvqksO+ntBgbLep/eyjpgdplF7b+4T9VJGOA==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild-windows-32": { + "version": "0.14.38", + "resolved": "https://registry.npmjs.org/esbuild-windows-32/-/esbuild-windows-32-0.14.38.tgz", + "integrity": "sha512-L2BmEeFZATAvU+FJzJiRLFUP+d9RHN+QXpgaOrs2klshoAm1AE6Us4X6fS9k33Uy5SzScn2TpcgecbqJza1Hjw==", + "cpu": [ + "ia32" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild-windows-64": { + "version": "0.14.38", + "resolved": "https://registry.npmjs.org/esbuild-windows-64/-/esbuild-windows-64-0.14.38.tgz", + "integrity": "sha512-Khy4wVmebnzue8aeSXLC+6clo/hRYeNIm0DyikoEqX+3w3rcvrhzpoix0S+MF9vzh6JFskkIGD7Zx47ODJNyCw==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild-windows-arm64": { + "version": "0.14.38", + "resolved": "https://registry.npmjs.org/esbuild-windows-arm64/-/esbuild-windows-arm64-0.14.38.tgz", + "integrity": "sha512-k3FGCNmHBkqdJXuJszdWciAH77PukEyDsdIryEHn9cKLQFxzhT39dSumeTuggaQcXY57UlmLGIkklWZo2qzHpw==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/follow-redirects": { + "version": "1.15.2", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.2.tgz", + "integrity": "sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA==", + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/RubenVerborgh" + } + ], + "engines": { + "node": ">=4.0" + }, + "peerDependenciesMeta": { + "debug": { + "optional": true + } + } + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=", + "dev": true + }, + "node_modules/fsevents": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", + "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", + "dev": true, + "hasInstallScript": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/function-bind": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" + }, + "node_modules/get-func-name": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.0.tgz", + "integrity": "sha1-6td0q+5y4gQJQzoGY2YCPdaIekE=", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/get-intrinsic": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.3.tgz", + "integrity": "sha512-QJVz1Tj7MS099PevUG5jvnt9tSkXN8K14dxQlikJuPt4uD9hHAHjLyLBiLR5zELelBdD9QNRAXZzsJx0WaDL9A==", + "dependencies": { + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/glob": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/glob/-/glob-8.0.1.tgz", + "integrity": "sha512-cF7FYZZ47YzmCu7dDy50xSRRfO3ErRfrXuLZcNIuyiJEco0XSrGtuilG19L5xp3NcwTx7Gn+X6Tv3fmsUPTbow==", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^5.0.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/globalthis": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/globalthis/-/globalthis-1.0.3.tgz", + "integrity": "sha512-sFdI5LyBiNTHjRd7cGPWapiHWMOXKyuBNX/cWJ3NfzrZQVa8GI/8cofCl74AOVqq9W5kNmguTIzJ/1s2gyI9wA==", + "dependencies": { + "define-properties": "^1.1.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "dependencies": { + "function-bind": "^1.1.1" + }, + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/has-property-descriptors": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz", + "integrity": "sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ==", + "dependencies": { + "get-intrinsic": "^1.1.1" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-symbols": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", + "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/hash-base": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/hash-base/-/hash-base-3.1.0.tgz", + "integrity": "sha512-1nmYp/rhMDiE7AYkDw+lLwlAzz0AntGIe51F3RfFfEqyQ3feY2eI/NcwC6umIQVOASPMsWJLJScWKSSvzL9IVA==", + "dependencies": { + "inherits": "^2.0.4", + "readable-stream": "^3.6.0", + "safe-buffer": "^5.2.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/hash.js": { + "version": "1.1.7", + "resolved": "https://registry.npmjs.org/hash.js/-/hash.js-1.1.7.tgz", + "integrity": "sha512-taOaskGt4z4SOANNseOviYDvjEJinIkRgmp7LbKP2YTTmVxWBl87s/uzK9r+44BclBSp2X7K1hqeNfz9JbBeXA==", + "dependencies": { + "inherits": "^2.0.3", + "minimalistic-assert": "^1.0.1" + } + }, + "node_modules/hmac-drbg": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/hmac-drbg/-/hmac-drbg-1.0.1.tgz", + "integrity": "sha512-Tti3gMqLdZfhOQY1Mzf/AanLiqh1WTiJgEj26ZuYQ9fbkLomzGchCws4FyrSd4VkpBfiNhaE1On+lOz894jvXg==", + "dependencies": { + "hash.js": "^1.0.3", + "minimalistic-assert": "^1.0.0", + "minimalistic-crypto-utils": "^1.0.1" + } + }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", + "dev": true, + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + }, + "node_modules/is-core-module": { + "version": "2.9.0", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.9.0.tgz", + "integrity": "sha512-+5FPy5PnwmO3lvfMb0AsoPaBG+5KHUI0wYFXOtYPnVVVspTFUuMZNfNaNVRt3FZadstu2c8x23vykRW/NBoU6A==", + "dev": true, + "dependencies": { + "has": "^1.0.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/isomorphic-unfetch": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/isomorphic-unfetch/-/isomorphic-unfetch-3.1.0.tgz", + "integrity": "sha512-geDJjpoZ8N0kWexiwkX8F9NkTsXhetLPVbZFQ+JTW239QNOwvB0gniuR1Wc6f0AMTn7/mFGyXvHTifrCp/GH8Q==", + "dev": true, + "dependencies": { + "node-fetch": "^2.6.1", + "unfetch": "^4.2.0" + } + }, + "node_modules/isomorphic-ws": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/isomorphic-ws/-/isomorphic-ws-4.0.1.tgz", + "integrity": "sha512-BhBvN2MBpWTaSHdWRb/bwdZJ1WaehQ2L1KngkCkfLUGF0mAWAT1sQUQacEmQ0jXkFw/czDXPNQSL5u2/Krsz1w==", + "peerDependencies": { + "ws": "*" + } + }, + "node_modules/js-sha3": { + "version": "0.8.0", + "resolved": "https://registry.npmjs.org/js-sha3/-/js-sha3-0.8.0.tgz", + "integrity": "sha512-gF1cRrHhIzNfToc802P800N8PpXS+evLLXfsVpowqmAFR9uwbi89WvXg2QspOmXL8QL86J4T1EpFu+yUkwJY3Q==" + }, + "node_modules/libsodium": { + "version": "0.7.10", + "resolved": "https://registry.npmjs.org/libsodium/-/libsodium-0.7.10.tgz", + "integrity": "sha512-eY+z7hDrDKxkAK+QKZVNv92A5KYkxfvIshtBJkmg5TSiCnYqZP3i9OO9whE79Pwgm4jGaoHgkM4ao/b9Cyu4zQ==" + }, + "node_modules/libsodium-wrappers": { + "version": "0.7.10", + "resolved": "https://registry.npmjs.org/libsodium-wrappers/-/libsodium-wrappers-0.7.10.tgz", + "integrity": "sha512-pO3F1Q9NPLB/MWIhehim42b/Fwb30JNScCNh8TcQ/kIc+qGLQch8ag8wb0keK3EP5kbGakk1H8Wwo7v+36rNQg==", + "dependencies": { + "libsodium": "^0.7.0" + } + }, + "node_modules/local-pkg": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/local-pkg/-/local-pkg-0.4.1.tgz", + "integrity": "sha512-lL87ytIGP2FU5PWwNDo0w3WhIo2gopIAxPg9RxDYF7m4rr5ahuZxP22xnJHIvaLTe4Z9P6uKKY2UHiwyB4pcrw==", + "dev": true, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/antfu" + } + }, + "node_modules/long": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/long/-/long-4.0.0.tgz", + "integrity": "sha512-XsP+KhQif4bjX1kbuSiySJFNAehNxgLb6hPRGJ9QsUr8ajHkuXGdrHmFUTUUXhDwVX2R5bY4JNZEwbUiMhV+MA==" + }, + "node_modules/loupe": { + "version": "2.3.4", + "resolved": "https://registry.npmjs.org/loupe/-/loupe-2.3.4.tgz", + "integrity": "sha512-OvKfgCC2Ndby6aSTREl5aCCPTNIzlDfQZvZxNUrBrihDhL3xcrYegTblhmEiCrg2kKQz4XsFIaemE5BF4ybSaQ==", + "dev": true, + "dependencies": { + "get-func-name": "^2.0.0" + } + }, + "node_modules/md5.js": { + "version": "1.3.5", + "resolved": "https://registry.npmjs.org/md5.js/-/md5.js-1.3.5.tgz", + "integrity": "sha512-xitP+WxNPcTTOgnTJcrhM0xvdPepipPSf3I8EIpGKeFLjt3PlJLIDG3u8EX53ZIubkb+5U2+3rELYpEhHhzdkg==", + "dependencies": { + "hash-base": "^3.0.0", + "inherits": "^2.0.1", + "safe-buffer": "^5.1.2" + } + }, + "node_modules/minimalistic-assert": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz", + "integrity": "sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A==" + }, + "node_modules/minimalistic-crypto-utils": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/minimalistic-crypto-utils/-/minimalistic-crypto-utils-1.0.1.tgz", + "integrity": "sha512-JIYlbt6g8i5jKfJ3xz7rF0LXmv2TkDxBLUkiBeZ7bAx4GnnNMr8xFpGnOxn6GhTEHx3SjRrZEoU+j04prX1ktg==" + }, + "node_modules/minimatch": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.0.1.tgz", + "integrity": "sha512-nLDxIFRyhDblz3qMuq+SoRZED4+miJ/G+tdDrjkkkRnjAsBexeGpgjLEQ0blJy7rHhR2b93rhQY4SvyWu9v03g==", + "dev": true, + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/nanoid": { + "version": "3.3.4", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.4.tgz", + "integrity": "sha512-MqBkQh/OHTS2egovRtLk45wEyNXwF+cokD+1YPf9u5VfJiRdAiRwB2froX5Co9Rh20xs4siNPm8naNotSD6RBw==", + "dev": true, + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/node-fetch": { + "version": "2.6.7", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz", + "integrity": "sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==", + "dev": true, + "dependencies": { + "whatwg-url": "^5.0.0" + }, + "engines": { + "node": "4.x || >=6.0.0" + }, + "peerDependencies": { + "encoding": "^0.1.0" + }, + "peerDependenciesMeta": { + "encoding": { + "optional": true + } + } + }, + "node_modules/object-keys": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", + "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", + "dev": true, + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "dev": true + }, + "node_modules/pathval": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/pathval/-/pathval-1.1.1.tgz", + "integrity": "sha512-Dp6zGqpTdETdR63lehJYPeIOqpiNBNtc7BpWSLrOje7UaIsE5aY92r/AunQA7rsXvet3lrJ3JnZX29UPTKXyKQ==", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/pbkdf2": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/pbkdf2/-/pbkdf2-3.1.2.tgz", + "integrity": "sha512-iuh7L6jA7JEGu2WxDwtQP1ddOpaJNC4KlDEFfdQajSGgGPNi4OyDc2R7QnbY2bR9QjBVGwgvTdNJZoE7RaxUMA==", + "dependencies": { + "create-hash": "^1.1.2", + "create-hmac": "^1.1.4", + "ripemd160": "^2.0.1", + "safe-buffer": "^5.0.1", + "sha.js": "^2.4.8" + }, + "engines": { + "node": ">=0.12" + } + }, + "node_modules/picocolors": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz", + "integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==", + "dev": true + }, + "node_modules/postcss": { + "version": "8.4.13", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.13.tgz", + "integrity": "sha512-jtL6eTBrza5MPzy8oJLFuUscHDXTV5KcLlqAWHl5q5WYRfnNRGSmOZmOZ1T6Gy7A99mOZfqungmZMpMmCVJ8ZA==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + } + ], + "dependencies": { + "nanoid": "^3.3.3", + "picocolors": "^1.0.0", + "source-map-js": "^1.0.2" + }, + "engines": { + "node": "^10 || ^12 || >=14" + } + }, + "node_modules/protobufjs": { + "version": "6.10.3", + "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.10.3.tgz", + "integrity": "sha512-yvAslS0hNdBhlSKckI4R1l7wunVilX66uvrjzE4MimiAt7/qw1nLpMhZrn/ObuUTM/c3Xnfl01LYMdcSJe6dwg==", + "hasInstallScript": true, + "dependencies": { + "@protobufjs/aspromise": "^1.1.2", + "@protobufjs/base64": "^1.1.2", + "@protobufjs/codegen": "^2.0.4", + "@protobufjs/eventemitter": "^1.1.0", + "@protobufjs/fetch": "^1.1.0", + "@protobufjs/float": "^1.0.2", + "@protobufjs/inquire": "^1.1.0", + "@protobufjs/path": "^1.1.2", + "@protobufjs/pool": "^1.1.0", + "@protobufjs/utf8": "^1.1.0", + "@types/long": "^4.0.1", + "@types/node": "^13.7.0", + "long": "^4.0.0" + }, + "bin": { + "pbjs": "bin/pbjs", + "pbts": "bin/pbts" + } + }, + "node_modules/protobufjs/node_modules/@types/node": { + "version": "13.13.52", + "resolved": "https://registry.npmjs.org/@types/node/-/node-13.13.52.tgz", + "integrity": "sha512-s3nugnZumCC//n4moGGe6tkNMyYEdaDBitVjwPxXmR5lnMG5dHePinH2EdxkG3Rh1ghFHHixAG4NJhpJW1rthQ==" + }, + "node_modules/randombytes": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", + "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==", + "dependencies": { + "safe-buffer": "^5.1.0" + } + }, + "node_modules/readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/readonly-date": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/readonly-date/-/readonly-date-1.0.0.tgz", + "integrity": "sha512-tMKIV7hlk0h4mO3JTmmVuIlJVXjKk3Sep9Bf5OH0O+758ruuVkUy2J9SttDLm91IEX/WHlXPSpxMGjPj4beMIQ==" + }, + "node_modules/resolve": { + "version": "1.22.0", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.0.tgz", + "integrity": "sha512-Hhtrw0nLeSrFQ7phPp4OOcVjLPIeMnRlr5mcnVuMe7M/7eBn98A3hmFRLoFo3DLZkivSYwhRUJTyPyWAk56WLw==", + "dev": true, + "dependencies": { + "is-core-module": "^2.8.1", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/ripemd160": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/ripemd160/-/ripemd160-2.0.2.tgz", + "integrity": "sha512-ii4iagi25WusVoiC4B4lq7pbXfAp3D9v5CwfkY33vffw2+pkDjY1D8GaN7spsxvCSx8dkPqOZCEZyfxcmJG2IA==", + "dependencies": { + "hash-base": "^3.0.0", + "inherits": "^2.0.1" + } + }, + "node_modules/rollup": { + "version": "2.71.1", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-2.71.1.tgz", + "integrity": "sha512-lMZk3XfUBGjrrZQpvPSoXcZSfKcJ2Bgn+Z0L1MoW2V8Wh7BVM+LOBJTPo16yul2MwL59cXedzW1ruq3rCjSRgw==", + "dev": true, + "bin": { + "rollup": "dist/bin/rollup" + }, + "engines": { + "node": ">=10.0.0" + }, + "optionalDependencies": { + "fsevents": "~2.3.2" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/sha.js": { + "version": "2.4.11", + "resolved": "https://registry.npmjs.org/sha.js/-/sha.js-2.4.11.tgz", + "integrity": "sha512-QMEp5B7cftE7APOjk5Y6xgrbWu+WkLVQwk8JNjZ8nKRciZaByEW6MubieAiToS7+dwvrjGhH8jRXz3MVd0AYqQ==", + "dependencies": { + "inherits": "^2.0.1", + "safe-buffer": "^5.0.1" + }, + "bin": { + "sha.js": "bin.js" + } + }, + "node_modules/source-map-js": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.0.2.tgz", + "integrity": "sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/string_decoder": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "dependencies": { + "safe-buffer": "~5.2.0" + } + }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/symbol-observable": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/symbol-observable/-/symbol-observable-2.0.3.tgz", + "integrity": "sha512-sQV7phh2WCYAn81oAkakC5qjq2Ml0g8ozqz03wOGnx9dDlG1de6yrF+0RAzSJD8fPUow3PTSMf2SAbOGxb93BA==", + "engines": { + "node": ">=0.10" + } + }, + "node_modules/tinypool": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/tinypool/-/tinypool-0.1.3.tgz", + "integrity": "sha512-2IfcQh7CP46XGWGGbdyO4pjcKqsmVqFAPcXfPxcPXmOWt9cYkTP9HcDmGgsfijYoAEc4z9qcpM/BaBz46Y9/CQ==", + "dev": true, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/tinyspy": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/tinyspy/-/tinyspy-0.3.2.tgz", + "integrity": "sha512-2+40EP4D3sFYy42UkgkFFB+kiX2Tg3URG/lVvAZFfLxgGpnWl5qQJuBw1gaLttq8UOS+2p3C0WrhJnQigLTT2Q==", + "dev": true, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha1-gYT9NH2snNwYWZLzpmIuFLnZq2o=", + "dev": true + }, + "node_modules/type-detect": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", + "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/unfetch": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/unfetch/-/unfetch-4.2.0.tgz", + "integrity": "sha512-F9p7yYCn6cIW9El1zi0HI6vqpeIvBsr3dSuRO6Xuppb1u5rXpCPmMvLSyECLhybr9isec8Ohl0hPekMVrEinDA==", + "dev": true + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" + }, + "node_modules/vite": { + "version": "2.9.7", + "resolved": "https://registry.npmjs.org/vite/-/vite-2.9.7.tgz", + "integrity": "sha512-5hH7aNQe8rJiTTqCtPNX/6mIKlGw+1wg8UXwAxDIIN8XaSR+Zx3GT2zSu7QKa1vIaBqfUODGh3vpwY8r0AW/jw==", + "dev": true, + "dependencies": { + "esbuild": "^0.14.27", + "postcss": "^8.4.13", + "resolve": "^1.22.0", + "rollup": "^2.59.0" + }, + "bin": { + "vite": "bin/vite.js" + }, + "engines": { + "node": ">=12.2.0" + }, + "optionalDependencies": { + "fsevents": "~2.3.2" + }, + "peerDependencies": { + "less": "*", + "sass": "*", + "stylus": "*" + }, + "peerDependenciesMeta": { + "less": { + "optional": true + }, + "sass": { + "optional": true + }, + "stylus": { + "optional": true + } + } + }, + "node_modules/vitest": { + "version": "0.10.2", + "resolved": "https://registry.npmjs.org/vitest/-/vitest-0.10.2.tgz", + "integrity": "sha512-41D+nhswCco5vy1NXmpAjZX11Aj+HMnyhjWQD12piwHibf4bvdTGtni56UcFWcvONVoIForgDuLrKSohHJjwQA==", + "dev": true, + "dependencies": { + "@types/chai": "^4.3.1", + "@types/chai-subset": "^1.3.3", + "chai": "^4.3.6", + "local-pkg": "^0.4.1", + "tinypool": "^0.1.3", + "tinyspy": "^0.3.2", + "vite": "^2.9.5" + }, + "bin": { + "vitest": "vitest.mjs" + }, + "engines": { + "node": ">=v14.16.0" + }, + "funding": { + "url": "https://github.com/sponsors/antfu" + }, + "peerDependencies": { + "@vitest/ui": "*", + "c8": "*", + "happy-dom": "*", + "jsdom": "*" + }, + "peerDependenciesMeta": { + "@vitest/ui": { + "optional": true + }, + "c8": { + "optional": true + }, + "happy-dom": { + "optional": true + }, + "jsdom": { + "optional": true + } + } + }, + "node_modules/webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha1-JFNCdeKnvGvnvIZhHMFq4KVlSHE=", + "dev": true + }, + "node_modules/whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha1-lmRU6HZUYuN2RNNib2dCzotwll0=", + "dev": true, + "dependencies": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", + "dev": true + }, + "node_modules/ws": { + "version": "7.5.9", + "resolved": "https://registry.npmjs.org/ws/-/ws-7.5.9.tgz", + "integrity": "sha512-F+P9Jil7UiSKSkppIiD94dN07AwvFixvLIj1Og1Rl9GGMuNipJnV9JzjD6XuqmAeiswGvUmNLjr5cFuXwNS77Q==", + "engines": { + "node": ">=8.3.0" + }, + "peerDependencies": { + "bufferutil": "^4.0.1", + "utf-8-validate": "^5.0.2" + }, + "peerDependenciesMeta": { + "bufferutil": { + "optional": true + }, + "utf-8-validate": { + "optional": true + } + } + }, + "node_modules/xstream": { + "version": "11.14.0", + "resolved": "https://registry.npmjs.org/xstream/-/xstream-11.14.0.tgz", + "integrity": "sha512-1bLb+kKKtKPbgTK6i/BaoAn03g47PpFstlbe1BA+y3pNS/LfvcaghS5BFf9+EE1J+KwSQsEpfJvFN5GqFtiNmw==", + "dependencies": { + "globalthis": "^1.0.1", + "symbol-observable": "^2.0.3" + } + } + }, + "dependencies": { + "@confio/ics23": { + "version": "0.6.8", + "resolved": "https://registry.npmjs.org/@confio/ics23/-/ics23-0.6.8.tgz", + "integrity": "sha512-wB6uo+3A50m0sW/EWcU64xpV/8wShZ6bMTa7pF8eYsTrSkQA7oLUIJcs/wb8g4y2Oyq701BaGiO6n/ak5WXO1w==", + "requires": { + "@noble/hashes": "^1.0.0", + "protobufjs": "^6.8.8" + } + }, + "@cosmjs/amino": { + "version": "0.27.0", + "resolved": "https://registry.npmjs.org/@cosmjs/amino/-/amino-0.27.0.tgz", + "integrity": "sha512-ybyzRkGrRija1bjGjGP7sAp2ulPA2/S2wMY2pehB7b6ZR8dpwveCjz/IqFWC5KBxz6KZf5MuaONOY+t1kkjsfw==", + "requires": { + "@cosmjs/crypto": "0.27.0", + "@cosmjs/encoding": "0.27.0", + "@cosmjs/math": "0.27.0", + "@cosmjs/utils": "0.27.0" + } + }, + "@cosmjs/crypto": { + "version": "0.27.0", + "resolved": "https://registry.npmjs.org/@cosmjs/crypto/-/crypto-0.27.0.tgz", + "integrity": "sha512-JTPHINCYZ+mnsxrfv8ZBHsFWgB7EGooa5SD0lQFhkCVX/FC3sqxuFNv6TZU5bVVU71DUSqXTMXF5m9kAMzPUkw==", + "requires": { + "@cosmjs/encoding": "0.27.0", + "@cosmjs/math": "0.27.0", + "@cosmjs/utils": "0.27.0", + "bip39": "^3.0.2", + "bn.js": "^5.2.0", + "elliptic": "^6.5.3", + "js-sha3": "^0.8.0", + "libsodium-wrappers": "^0.7.6", + "ripemd160": "^2.0.2", + "sha.js": "^2.4.11" + } + }, + "@cosmjs/encoding": { + "version": "0.27.0", + "resolved": "https://registry.npmjs.org/@cosmjs/encoding/-/encoding-0.27.0.tgz", + "integrity": "sha512-cCT8X/NUAGXOe14F/k2GE6N9btjrOqALBilUPIn5CL4OEGxvRTPD59nWSACu0iafCGz10Tw3LPcouuYPtZmkbg==", + "requires": { + "base64-js": "^1.3.0", + "bech32": "^1.1.4", + "readonly-date": "^1.0.0" + } + }, + "@cosmjs/json-rpc": { + "version": "0.27.0", + "resolved": "https://registry.npmjs.org/@cosmjs/json-rpc/-/json-rpc-0.27.0.tgz", + "integrity": "sha512-Q6na5KPYDD90QhlPZTInquwBycDjvhZvWwpV1TppDd2Em8S1FfN3ePiV2YCf4XzXREU5YPFSHzh5MHK/WhQY3w==", + "requires": { + "@cosmjs/stream": "0.27.0", + "xstream": "^11.14.0" + } + }, + "@cosmjs/math": { + "version": "0.27.0", + "resolved": "https://registry.npmjs.org/@cosmjs/math/-/math-0.27.0.tgz", + "integrity": "sha512-+WsrdXojqpUL6l2LKOWYgiAJIDD0faONNtnjb1kpS1btSzZe1Ns+RdygG6QZLLvZuxMfkEzE54ZXDKPD5MhVPA==", + "requires": { + "bn.js": "^5.2.0" + } + }, + "@cosmjs/proto-signing": { + "version": "0.27.0", + "resolved": "https://registry.npmjs.org/@cosmjs/proto-signing/-/proto-signing-0.27.0.tgz", + "integrity": "sha512-ODqnmY/ElmcEYu6HbDmeGce4KacgzSVGQzvGodZidC1RR9EYociuweBPNwSHqBPolC6PQPI/QGc83m/mbih2xw==", + "requires": { + "@cosmjs/amino": "0.27.0", + "@cosmjs/crypto": "0.27.0", + "@cosmjs/math": "0.27.0", + "cosmjs-types": "^0.4.0", + "long": "^4.0.0", + "protobufjs": "~6.10.2" + } + }, + "@cosmjs/socket": { + "version": "0.27.0", + "resolved": "https://registry.npmjs.org/@cosmjs/socket/-/socket-0.27.0.tgz", + "integrity": "sha512-lOd0s6gLyjdjcs8xnYuS2IXRqBLUrI76Bek5wsia+m5CyUvHjRbbd7+nZiznbtVjApBlIwHGkiklLg3/byxkAA==", + "requires": { + "@cosmjs/stream": "0.27.0", + "isomorphic-ws": "^4.0.1", + "ws": "^7", + "xstream": "^11.14.0" + } + }, + "@cosmjs/stargate": { + "version": "0.27.0", + "resolved": "https://registry.npmjs.org/@cosmjs/stargate/-/stargate-0.27.0.tgz", + "integrity": "sha512-Fiqk8rIpB4emzC/P7/+ZPPJV9aG6KJhVuOF4D8c1j1Bv8fVs1XqC6NgsY6elTLXl38pgXt7REn6VYzAdZwrHXQ==", + "requires": { + "@confio/ics23": "^0.6.3", + "@cosmjs/amino": "0.27.0", + "@cosmjs/encoding": "0.27.0", + "@cosmjs/math": "0.27.0", + "@cosmjs/proto-signing": "0.27.0", + "@cosmjs/stream": "0.27.0", + "@cosmjs/tendermint-rpc": "0.27.0", + "@cosmjs/utils": "0.27.0", + "cosmjs-types": "^0.4.0", + "long": "^4.0.0", + "protobufjs": "~6.10.2", + "xstream": "^11.14.0" + } + }, + "@cosmjs/stream": { + "version": "0.27.0", + "resolved": "https://registry.npmjs.org/@cosmjs/stream/-/stream-0.27.0.tgz", + "integrity": "sha512-D9mXHqS6y7xrThhUg5SCvMjiVQ8ph9f7gAuWlrXhqVJ5FqrP6OyTGRbVyGGM91d5Jj7N7oidQ+hOfc34vKFgeg==", + "requires": { + "xstream": "^11.14.0" + } + }, + "@cosmjs/tendermint-rpc": { + "version": "0.27.0", + "resolved": "https://registry.npmjs.org/@cosmjs/tendermint-rpc/-/tendermint-rpc-0.27.0.tgz", + "integrity": "sha512-WFcJ2/UF76fBBVzPRiHJoC/GCKvgt0mb7+ewgpwKBeEcYwfj5qb1QreGBbHn/UZx9QSsF9jhI5k7SmNdglC3cA==", + "requires": { + "@cosmjs/crypto": "0.27.0", + "@cosmjs/encoding": "0.27.0", + "@cosmjs/json-rpc": "0.27.0", + "@cosmjs/math": "0.27.0", + "@cosmjs/socket": "0.27.0", + "@cosmjs/stream": "0.27.0", + "axios": "^0.21.2", + "readonly-date": "^1.0.0", + "xstream": "^11.14.0" + } + }, + "@cosmjs/utils": { + "version": "0.27.0", + "resolved": "https://registry.npmjs.org/@cosmjs/utils/-/utils-0.27.0.tgz", + "integrity": "sha512-UC1eWY9isDQm6POy6GaTmYtbPVY5dkywdjW8Qzj+JNMhbhMM0KHuI4pHwjv5TPXSO/Ba2z10MTnD9nUlZtDwtA==" + }, + "@noble/hashes": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@noble/hashes/-/hashes-1.1.2.tgz", + "integrity": "sha512-KYRCASVTv6aeUi1tsF8/vpyR7zpfs3FUzy2Jqm+MU+LmUKhQ0y2FpfwqkCcxSg2ua4GALJd8k2R76WxwZGbQpA==" + }, + "@protobufjs/aspromise": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz", + "integrity": "sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==" + }, + "@protobufjs/base64": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@protobufjs/base64/-/base64-1.1.2.tgz", + "integrity": "sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg==" + }, + "@protobufjs/codegen": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@protobufjs/codegen/-/codegen-2.0.4.tgz", + "integrity": "sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg==" + }, + "@protobufjs/eventemitter": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/eventemitter/-/eventemitter-1.1.0.tgz", + "integrity": "sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q==" + }, + "@protobufjs/fetch": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/fetch/-/fetch-1.1.0.tgz", + "integrity": "sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ==", + "requires": { + "@protobufjs/aspromise": "^1.1.1", + "@protobufjs/inquire": "^1.1.0" + } + }, + "@protobufjs/float": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@protobufjs/float/-/float-1.0.2.tgz", + "integrity": "sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ==" + }, + "@protobufjs/inquire": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/inquire/-/inquire-1.1.0.tgz", + "integrity": "sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q==" + }, + "@protobufjs/path": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@protobufjs/path/-/path-1.1.2.tgz", + "integrity": "sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA==" + }, + "@protobufjs/pool": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/pool/-/pool-1.1.0.tgz", + "integrity": "sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw==" + }, + "@protobufjs/utf8": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz", + "integrity": "sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==" + }, + "@types/chai": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/@types/chai/-/chai-4.3.1.tgz", + "integrity": "sha512-/zPMqDkzSZ8t3VtxOa4KPq7uzzW978M9Tvh+j7GHKuo6k6GTLxPJ4J5gE5cjfJ26pnXst0N5Hax8Sr0T2Mi9zQ==", + "dev": true + }, + "@types/chai-subset": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/@types/chai-subset/-/chai-subset-1.3.3.tgz", + "integrity": "sha512-frBecisrNGz+F4T6bcc+NLeolfiojh5FxW2klu669+8BARtyQv2C/GkNW6FUodVe4BroGMP/wER/YDGc7rEllw==", + "dev": true, + "requires": { + "@types/chai": "*" + } + }, + "@types/glob": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/@types/glob/-/glob-7.2.0.tgz", + "integrity": "sha512-ZUxbzKl0IfJILTS6t7ip5fQQM/J3TJYubDm3nMbgubNNYS62eXeUpoLUC8/7fJNiFYHTrGPQn7hspDUzIHX3UA==", + "dev": true, + "requires": { + "@types/minimatch": "*", + "@types/node": "*" + } + }, + "@types/long": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@types/long/-/long-4.0.2.tgz", + "integrity": "sha512-MqTGEo5bj5t157U6fA/BiDynNkn0YknVdh48CMPkTSpFTVmvao5UQmm7uEF6xBEo7qIMAlY/JSleYaE6VOdpaA==" + }, + "@types/minimatch": { + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/@types/minimatch/-/minimatch-3.0.5.tgz", + "integrity": "sha512-Klz949h02Gz2uZCMGwDUSDS1YBlTdDDgbWHi+81l29tQALUtvz4rAYi5uoVhE5Lagoq6DeqAUlbrHvW/mXDgdQ==", + "dev": true + }, + "@types/node": { + "version": "17.0.31", + "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.31.tgz", + "integrity": "sha512-AR0x5HbXGqkEx9CadRH3EBYx/VkiUgZIhP4wvPn/+5KIsgpNoyFaRlVe0Zlx9gRtg8fA06a9tskE2MSN7TcG4Q==" + }, + "assertion-error": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz", + "integrity": "sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw==", + "dev": true + }, + "axios": { + "version": "0.21.4", + "resolved": "https://registry.npmjs.org/axios/-/axios-0.21.4.tgz", + "integrity": "sha512-ut5vewkiu8jjGBdqpM44XxjuCjq9LAKeHVmoVfHVzy8eHgxxq8SbAVQNovDA8mVi05kP0Ea/n/UzcSHcTJQfNg==", + "requires": { + "follow-redirects": "^1.14.0" + } + }, + "balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true + }, + "base64-js": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", + "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==" + }, + "bech32": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/bech32/-/bech32-1.1.4.tgz", + "integrity": "sha512-s0IrSOzLlbvX7yp4WBfPITzpAU8sqQcpsmwXDiKwrG4r491vwCO/XpejasRNl0piBMe/DvP4Tz0mIS/X1DPJBQ==" + }, + "bip39": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/bip39/-/bip39-3.0.4.tgz", + "integrity": "sha512-YZKQlb752TrUWqHWj7XAwCSjYEgGAk+/Aas3V7NyjQeZYsztO8JnQUaCWhcnL4T+jL8nvB8typ2jRPzTlgugNw==", + "requires": { + "@types/node": "11.11.6", + "create-hash": "^1.1.0", + "pbkdf2": "^3.0.9", + "randombytes": "^2.0.1" + }, + "dependencies": { + "@types/node": { + "version": "11.11.6", + "resolved": "https://registry.npmjs.org/@types/node/-/node-11.11.6.tgz", + "integrity": "sha512-Exw4yUWMBXM3X+8oqzJNRqZSwUAaS4+7NdvHqQuFi/d+synz++xmX3QIf+BFqneW8N31R8Ky+sikfZUXq07ggQ==" + } + } + }, + "bn.js": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-5.2.1.tgz", + "integrity": "sha512-eXRvHzWyYPBuB4NBy0cmYQjGitUrtqwbvlzP3G6VFnNRbsZQIxQ10PbKKHt8gZ/HW/D/747aDl+QkDqg3KQLMQ==" + }, + "brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "requires": { + "balanced-match": "^1.0.0" + } + }, + "brorand": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/brorand/-/brorand-1.1.0.tgz", + "integrity": "sha512-cKV8tMCEpQs4hK/ik71d6LrPOnpkpGBR0wzxqr68g2m/LB2GxVYQroAjMJZRVM1Y4BCjCKc3vAamxSzOY2RP+w==" + }, + "chai": { + "version": "4.3.6", + "resolved": "https://registry.npmjs.org/chai/-/chai-4.3.6.tgz", + "integrity": "sha512-bbcp3YfHCUzMOvKqsztczerVgBKSsEijCySNlHHbX3VG1nskvqjz5Rfso1gGwD6w6oOV3eI60pKuMOV5MV7p3Q==", + "dev": true, + "requires": { + "assertion-error": "^1.1.0", + "check-error": "^1.0.2", + "deep-eql": "^3.0.1", + "get-func-name": "^2.0.0", + "loupe": "^2.3.1", + "pathval": "^1.1.1", + "type-detect": "^4.0.5" + } + }, + "check-error": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/check-error/-/check-error-1.0.2.tgz", + "integrity": "sha1-V00xLt2Iu13YkS6Sht1sCu1KrII=", + "dev": true + }, + "cipher-base": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/cipher-base/-/cipher-base-1.0.4.tgz", + "integrity": "sha512-Kkht5ye6ZGmwv40uUDZztayT2ThLQGfnj/T71N/XzeZeo3nf8foyW7zGTsPYkEya3m5f3cAypH+qe7YOrM1U2Q==", + "requires": { + "inherits": "^2.0.1", + "safe-buffer": "^5.0.1" + } + }, + "cosmjs-types": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/cosmjs-types/-/cosmjs-types-0.4.1.tgz", + "integrity": "sha512-I7E/cHkIgoJzMNQdFF0YVqPlaTqrqKHrskuSTIqlEyxfB5Lf3WKCajSXVK2yHOfOFfSux/RxEdpMzw/eO4DIog==", + "requires": { + "long": "^4.0.0", + "protobufjs": "~6.11.2" + }, + "dependencies": { + "protobufjs": { + "version": "6.11.3", + "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.11.3.tgz", + "integrity": "sha512-xL96WDdCZYdU7Slin569tFX712BxsxslWwAfAhCYjQKGTq7dAU91Lomy6nLLhh/dyGhk/YH4TwTSRxTzhuHyZg==", + "requires": { + "@protobufjs/aspromise": "^1.1.2", + "@protobufjs/base64": "^1.1.2", + "@protobufjs/codegen": "^2.0.4", + "@protobufjs/eventemitter": "^1.1.0", + "@protobufjs/fetch": "^1.1.0", + "@protobufjs/float": "^1.0.2", + "@protobufjs/inquire": "^1.1.0", + "@protobufjs/path": "^1.1.2", + "@protobufjs/pool": "^1.1.0", + "@protobufjs/utf8": "^1.1.0", + "@types/long": "^4.0.1", + "@types/node": ">=13.7.0", + "long": "^4.0.0" + } + } + } + }, + "create-hash": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/create-hash/-/create-hash-1.2.0.tgz", + "integrity": "sha512-z00bCGNHDG8mHAkP7CtT1qVu+bFQUPjYq/4Iv3C3kWjTFV10zIjfSoeqXo9Asws8gwSHDGj/hl2u4OGIjapeCg==", + "requires": { + "cipher-base": "^1.0.1", + "inherits": "^2.0.1", + "md5.js": "^1.3.4", + "ripemd160": "^2.0.1", + "sha.js": "^2.4.0" + } + }, + "create-hmac": { + "version": "1.1.7", + "resolved": "https://registry.npmjs.org/create-hmac/-/create-hmac-1.1.7.tgz", + "integrity": "sha512-MJG9liiZ+ogc4TzUwuvbER1JRdgvUFSB5+VR/g5h82fGaIRWMWddtKBHi7/sVhfjQZ6SehlyhvQYrcYkaUIpLg==", + "requires": { + "cipher-base": "^1.0.3", + "create-hash": "^1.1.0", + "inherits": "^2.0.1", + "ripemd160": "^2.0.0", + "safe-buffer": "^5.0.1", + "sha.js": "^2.4.8" + } + }, + "deep-eql": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-3.0.1.tgz", + "integrity": "sha512-+QeIQyN5ZuO+3Uk5DYh6/1eKO0m0YmJFGNmFHGACpf1ClL1nmlV/p4gNgbl2pJGxgXb4faqo6UE+M5ACEMyVcw==", + "dev": true, + "requires": { + "type-detect": "^4.0.0" + } + }, + "define-properties": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.4.tgz", + "integrity": "sha512-uckOqKcfaVvtBdsVkdPv3XjveQJsNQqmhXgRi8uhvWWuPYZCNlzT8qAyblUgNoXdHdjMTzAqeGjAoli8f+bzPA==", + "requires": { + "has-property-descriptors": "^1.0.0", + "object-keys": "^1.1.1" + } + }, + "elliptic": { + "version": "6.5.4", + "resolved": "https://registry.npmjs.org/elliptic/-/elliptic-6.5.4.tgz", + "integrity": "sha512-iLhC6ULemrljPZb+QutR5TQGB+pdW6KGD5RSegS+8sorOZT+rdQFbsQFJgvN3eRqNALqJer4oQ16YvJHlU8hzQ==", + "requires": { + "bn.js": "^4.11.9", + "brorand": "^1.1.0", + "hash.js": "^1.0.0", + "hmac-drbg": "^1.0.1", + "inherits": "^2.0.4", + "minimalistic-assert": "^1.0.1", + "minimalistic-crypto-utils": "^1.0.1" + }, + "dependencies": { + "bn.js": { + "version": "4.12.0", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz", + "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA==" + } + } + }, + "esbuild": { + "version": "0.14.38", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.14.38.tgz", + "integrity": "sha512-12fzJ0fsm7gVZX1YQ1InkOE5f9Tl7cgf6JPYXRJtPIoE0zkWAbHdPHVPPaLi9tYAcEBqheGzqLn/3RdTOyBfcA==", + "dev": true, + "requires": { + "esbuild-android-64": "0.14.38", + "esbuild-android-arm64": "0.14.38", + "esbuild-darwin-64": "0.14.38", + "esbuild-darwin-arm64": "0.14.38", + "esbuild-freebsd-64": "0.14.38", + "esbuild-freebsd-arm64": "0.14.38", + "esbuild-linux-32": "0.14.38", + "esbuild-linux-64": "0.14.38", + "esbuild-linux-arm": "0.14.38", + "esbuild-linux-arm64": "0.14.38", + "esbuild-linux-mips64le": "0.14.38", + "esbuild-linux-ppc64le": "0.14.38", + "esbuild-linux-riscv64": "0.14.38", + "esbuild-linux-s390x": "0.14.38", + "esbuild-netbsd-64": "0.14.38", + "esbuild-openbsd-64": "0.14.38", + "esbuild-sunos-64": "0.14.38", + "esbuild-windows-32": "0.14.38", + "esbuild-windows-64": "0.14.38", + "esbuild-windows-arm64": "0.14.38" + } + }, + "esbuild-android-64": { + "version": "0.14.38", + "resolved": "https://registry.npmjs.org/esbuild-android-64/-/esbuild-android-64-0.14.38.tgz", + "integrity": "sha512-aRFxR3scRKkbmNuGAK+Gee3+yFxkTJO/cx83Dkyzo4CnQl/2zVSurtG6+G86EQIZ+w+VYngVyK7P3HyTBKu3nw==", + "dev": true, + "optional": true + }, + "esbuild-android-arm64": { + "version": "0.14.38", + "resolved": "https://registry.npmjs.org/esbuild-android-arm64/-/esbuild-android-arm64-0.14.38.tgz", + "integrity": "sha512-L2NgQRWuHFI89IIZIlpAcINy9FvBk6xFVZ7xGdOwIm8VyhX1vNCEqUJO3DPSSy945Gzdg98cxtNt8Grv1CsyhA==", + "dev": true, + "optional": true + }, + "esbuild-darwin-64": { + "version": "0.14.38", + "resolved": "https://registry.npmjs.org/esbuild-darwin-64/-/esbuild-darwin-64-0.14.38.tgz", + "integrity": "sha512-5JJvgXkX87Pd1Og0u/NJuO7TSqAikAcQQ74gyJ87bqWRVeouky84ICoV4sN6VV53aTW+NE87qLdGY4QA2S7KNA==", + "dev": true, + "optional": true + }, + "esbuild-darwin-arm64": { + "version": "0.14.38", + "resolved": "https://registry.npmjs.org/esbuild-darwin-arm64/-/esbuild-darwin-arm64-0.14.38.tgz", + "integrity": "sha512-eqF+OejMI3mC5Dlo9Kdq/Ilbki9sQBw3QlHW3wjLmsLh+quNfHmGMp3Ly1eWm981iGBMdbtSS9+LRvR2T8B3eQ==", + "dev": true, + "optional": true + }, + "esbuild-freebsd-64": { + "version": "0.14.38", + "resolved": "https://registry.npmjs.org/esbuild-freebsd-64/-/esbuild-freebsd-64-0.14.38.tgz", + "integrity": "sha512-epnPbhZUt93xV5cgeY36ZxPXDsQeO55DppzsIgWM8vgiG/Rz+qYDLmh5ts3e+Ln1wA9dQ+nZmVHw+RjaW3I5Ig==", + "dev": true, + "optional": true + }, + "esbuild-freebsd-arm64": { + "version": "0.14.38", + "resolved": "https://registry.npmjs.org/esbuild-freebsd-arm64/-/esbuild-freebsd-arm64-0.14.38.tgz", + "integrity": "sha512-/9icXUYJWherhk+y5fjPI5yNUdFPtXHQlwP7/K/zg8t8lQdHVj20SqU9/udQmeUo5pDFHMYzcEFfJqgOVeKNNQ==", + "dev": true, + "optional": true + }, + "esbuild-linux-32": { + "version": "0.14.38", + "resolved": "https://registry.npmjs.org/esbuild-linux-32/-/esbuild-linux-32-0.14.38.tgz", + "integrity": "sha512-QfgfeNHRFvr2XeHFzP8kOZVnal3QvST3A0cgq32ZrHjSMFTdgXhMhmWdKzRXP/PKcfv3e2OW9tT9PpcjNvaq6g==", + "dev": true, + "optional": true + }, + "esbuild-linux-64": { + "version": "0.14.38", + "resolved": "https://registry.npmjs.org/esbuild-linux-64/-/esbuild-linux-64-0.14.38.tgz", + "integrity": "sha512-uuZHNmqcs+Bj1qiW9k/HZU3FtIHmYiuxZ/6Aa+/KHb/pFKr7R3aVqvxlAudYI9Fw3St0VCPfv7QBpUITSmBR1Q==", + "dev": true, + "optional": true + }, + "esbuild-linux-arm": { + "version": "0.14.38", + "resolved": "https://registry.npmjs.org/esbuild-linux-arm/-/esbuild-linux-arm-0.14.38.tgz", + "integrity": "sha512-FiFvQe8J3VKTDXG01JbvoVRXQ0x6UZwyrU4IaLBZeq39Bsbatd94Fuc3F1RGqPF5RbIWW7RvkVQjn79ejzysnA==", + "dev": true, + "optional": true + }, + "esbuild-linux-arm64": { + "version": "0.14.38", + "resolved": "https://registry.npmjs.org/esbuild-linux-arm64/-/esbuild-linux-arm64-0.14.38.tgz", + "integrity": "sha512-HlMGZTEsBrXrivr64eZ/EO0NQM8H8DuSENRok9d+Jtvq8hOLzrxfsAT9U94K3KOGk2XgCmkaI2KD8hX7F97lvA==", + "dev": true, + "optional": true + }, + "esbuild-linux-mips64le": { + "version": "0.14.38", + "resolved": "https://registry.npmjs.org/esbuild-linux-mips64le/-/esbuild-linux-mips64le-0.14.38.tgz", + "integrity": "sha512-qd1dLf2v7QBiI5wwfil9j0HG/5YMFBAmMVmdeokbNAMbcg49p25t6IlJFXAeLzogv1AvgaXRXvgFNhScYEUXGQ==", + "dev": true, + "optional": true + }, + "esbuild-linux-ppc64le": { + "version": "0.14.38", + "resolved": "https://registry.npmjs.org/esbuild-linux-ppc64le/-/esbuild-linux-ppc64le-0.14.38.tgz", + "integrity": "sha512-mnbEm7o69gTl60jSuK+nn+pRsRHGtDPfzhrqEUXyCl7CTOCLtWN2bhK8bgsdp6J/2NyS/wHBjs1x8aBWwP2X9Q==", + "dev": true, + "optional": true + }, + "esbuild-linux-riscv64": { + "version": "0.14.38", + "resolved": "https://registry.npmjs.org/esbuild-linux-riscv64/-/esbuild-linux-riscv64-0.14.38.tgz", + "integrity": "sha512-+p6YKYbuV72uikChRk14FSyNJZ4WfYkffj6Af0/Tw63/6TJX6TnIKE+6D3xtEc7DeDth1fjUOEqm+ApKFXbbVQ==", + "dev": true, + "optional": true + }, + "esbuild-linux-s390x": { + "version": "0.14.38", + "resolved": "https://registry.npmjs.org/esbuild-linux-s390x/-/esbuild-linux-s390x-0.14.38.tgz", + "integrity": "sha512-0zUsiDkGJiMHxBQ7JDU8jbaanUY975CdOW1YDrurjrM0vWHfjv9tLQsW9GSyEb/heSK1L5gaweRjzfUVBFoybQ==", + "dev": true, + "optional": true + }, + "esbuild-netbsd-64": { + "version": "0.14.38", + "resolved": "https://registry.npmjs.org/esbuild-netbsd-64/-/esbuild-netbsd-64-0.14.38.tgz", + "integrity": "sha512-cljBAApVwkpnJZfnRVThpRBGzCi+a+V9Ofb1fVkKhtrPLDYlHLrSYGtmnoTVWDQdU516qYI8+wOgcGZ4XIZh0Q==", + "dev": true, + "optional": true + }, + "esbuild-openbsd-64": { + "version": "0.14.38", + "resolved": "https://registry.npmjs.org/esbuild-openbsd-64/-/esbuild-openbsd-64-0.14.38.tgz", + "integrity": "sha512-CDswYr2PWPGEPpLDUO50mL3WO/07EMjnZDNKpmaxUPsrW+kVM3LoAqr/CE8UbzugpEiflYqJsGPLirThRB18IQ==", + "dev": true, + "optional": true + }, + "esbuild-sunos-64": { + "version": "0.14.38", + "resolved": "https://registry.npmjs.org/esbuild-sunos-64/-/esbuild-sunos-64-0.14.38.tgz", + "integrity": "sha512-2mfIoYW58gKcC3bck0j7lD3RZkqYA7MmujFYmSn9l6TiIcAMpuEvqksO+ntBgbLep/eyjpgdplF7b+4T9VJGOA==", + "dev": true, + "optional": true + }, + "esbuild-windows-32": { + "version": "0.14.38", + "resolved": "https://registry.npmjs.org/esbuild-windows-32/-/esbuild-windows-32-0.14.38.tgz", + "integrity": "sha512-L2BmEeFZATAvU+FJzJiRLFUP+d9RHN+QXpgaOrs2klshoAm1AE6Us4X6fS9k33Uy5SzScn2TpcgecbqJza1Hjw==", + "dev": true, + "optional": true + }, + "esbuild-windows-64": { + "version": "0.14.38", + "resolved": "https://registry.npmjs.org/esbuild-windows-64/-/esbuild-windows-64-0.14.38.tgz", + "integrity": "sha512-Khy4wVmebnzue8aeSXLC+6clo/hRYeNIm0DyikoEqX+3w3rcvrhzpoix0S+MF9vzh6JFskkIGD7Zx47ODJNyCw==", + "dev": true, + "optional": true + }, + "esbuild-windows-arm64": { + "version": "0.14.38", + "resolved": "https://registry.npmjs.org/esbuild-windows-arm64/-/esbuild-windows-arm64-0.14.38.tgz", + "integrity": "sha512-k3FGCNmHBkqdJXuJszdWciAH77PukEyDsdIryEHn9cKLQFxzhT39dSumeTuggaQcXY57UlmLGIkklWZo2qzHpw==", + "dev": true, + "optional": true + }, + "follow-redirects": { + "version": "1.15.2", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.2.tgz", + "integrity": "sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA==" + }, + "fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=", + "dev": true + }, + "fsevents": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", + "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", + "dev": true, + "optional": true + }, + "function-bind": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" + }, + "get-func-name": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.0.tgz", + "integrity": "sha1-6td0q+5y4gQJQzoGY2YCPdaIekE=", + "dev": true + }, + "get-intrinsic": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.3.tgz", + "integrity": "sha512-QJVz1Tj7MS099PevUG5jvnt9tSkXN8K14dxQlikJuPt4uD9hHAHjLyLBiLR5zELelBdD9QNRAXZzsJx0WaDL9A==", + "requires": { + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.3" + } + }, + "glob": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/glob/-/glob-8.0.1.tgz", + "integrity": "sha512-cF7FYZZ47YzmCu7dDy50xSRRfO3ErRfrXuLZcNIuyiJEco0XSrGtuilG19L5xp3NcwTx7Gn+X6Tv3fmsUPTbow==", + "dev": true, + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^5.0.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + }, + "globalthis": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/globalthis/-/globalthis-1.0.3.tgz", + "integrity": "sha512-sFdI5LyBiNTHjRd7cGPWapiHWMOXKyuBNX/cWJ3NfzrZQVa8GI/8cofCl74AOVqq9W5kNmguTIzJ/1s2gyI9wA==", + "requires": { + "define-properties": "^1.1.3" + } + }, + "has": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "requires": { + "function-bind": "^1.1.1" + } + }, + "has-property-descriptors": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz", + "integrity": "sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ==", + "requires": { + "get-intrinsic": "^1.1.1" + } + }, + "has-symbols": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", + "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==" + }, + "hash-base": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/hash-base/-/hash-base-3.1.0.tgz", + "integrity": "sha512-1nmYp/rhMDiE7AYkDw+lLwlAzz0AntGIe51F3RfFfEqyQ3feY2eI/NcwC6umIQVOASPMsWJLJScWKSSvzL9IVA==", + "requires": { + "inherits": "^2.0.4", + "readable-stream": "^3.6.0", + "safe-buffer": "^5.2.0" + } + }, + "hash.js": { + "version": "1.1.7", + "resolved": "https://registry.npmjs.org/hash.js/-/hash.js-1.1.7.tgz", + "integrity": "sha512-taOaskGt4z4SOANNseOviYDvjEJinIkRgmp7LbKP2YTTmVxWBl87s/uzK9r+44BclBSp2X7K1hqeNfz9JbBeXA==", + "requires": { + "inherits": "^2.0.3", + "minimalistic-assert": "^1.0.1" + } + }, + "hmac-drbg": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/hmac-drbg/-/hmac-drbg-1.0.1.tgz", + "integrity": "sha512-Tti3gMqLdZfhOQY1Mzf/AanLiqh1WTiJgEj26ZuYQ9fbkLomzGchCws4FyrSd4VkpBfiNhaE1On+lOz894jvXg==", + "requires": { + "hash.js": "^1.0.3", + "minimalistic-assert": "^1.0.0", + "minimalistic-crypto-utils": "^1.0.1" + } + }, + "inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", + "dev": true, + "requires": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + }, + "is-core-module": { + "version": "2.9.0", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.9.0.tgz", + "integrity": "sha512-+5FPy5PnwmO3lvfMb0AsoPaBG+5KHUI0wYFXOtYPnVVVspTFUuMZNfNaNVRt3FZadstu2c8x23vykRW/NBoU6A==", + "dev": true, + "requires": { + "has": "^1.0.3" + } + }, + "isomorphic-unfetch": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/isomorphic-unfetch/-/isomorphic-unfetch-3.1.0.tgz", + "integrity": "sha512-geDJjpoZ8N0kWexiwkX8F9NkTsXhetLPVbZFQ+JTW239QNOwvB0gniuR1Wc6f0AMTn7/mFGyXvHTifrCp/GH8Q==", + "dev": true, + "requires": { + "node-fetch": "^2.6.1", + "unfetch": "^4.2.0" + } + }, + "isomorphic-ws": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/isomorphic-ws/-/isomorphic-ws-4.0.1.tgz", + "integrity": "sha512-BhBvN2MBpWTaSHdWRb/bwdZJ1WaehQ2L1KngkCkfLUGF0mAWAT1sQUQacEmQ0jXkFw/czDXPNQSL5u2/Krsz1w==", + "requires": {} + }, + "js-sha3": { + "version": "0.8.0", + "resolved": "https://registry.npmjs.org/js-sha3/-/js-sha3-0.8.0.tgz", + "integrity": "sha512-gF1cRrHhIzNfToc802P800N8PpXS+evLLXfsVpowqmAFR9uwbi89WvXg2QspOmXL8QL86J4T1EpFu+yUkwJY3Q==" + }, + "libsodium": { + "version": "0.7.10", + "resolved": "https://registry.npmjs.org/libsodium/-/libsodium-0.7.10.tgz", + "integrity": "sha512-eY+z7hDrDKxkAK+QKZVNv92A5KYkxfvIshtBJkmg5TSiCnYqZP3i9OO9whE79Pwgm4jGaoHgkM4ao/b9Cyu4zQ==" + }, + "libsodium-wrappers": { + "version": "0.7.10", + "resolved": "https://registry.npmjs.org/libsodium-wrappers/-/libsodium-wrappers-0.7.10.tgz", + "integrity": "sha512-pO3F1Q9NPLB/MWIhehim42b/Fwb30JNScCNh8TcQ/kIc+qGLQch8ag8wb0keK3EP5kbGakk1H8Wwo7v+36rNQg==", + "requires": { + "libsodium": "^0.7.0" + } + }, + "local-pkg": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/local-pkg/-/local-pkg-0.4.1.tgz", + "integrity": "sha512-lL87ytIGP2FU5PWwNDo0w3WhIo2gopIAxPg9RxDYF7m4rr5ahuZxP22xnJHIvaLTe4Z9P6uKKY2UHiwyB4pcrw==", + "dev": true + }, + "long": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/long/-/long-4.0.0.tgz", + "integrity": "sha512-XsP+KhQif4bjX1kbuSiySJFNAehNxgLb6hPRGJ9QsUr8ajHkuXGdrHmFUTUUXhDwVX2R5bY4JNZEwbUiMhV+MA==" + }, + "loupe": { + "version": "2.3.4", + "resolved": "https://registry.npmjs.org/loupe/-/loupe-2.3.4.tgz", + "integrity": "sha512-OvKfgCC2Ndby6aSTREl5aCCPTNIzlDfQZvZxNUrBrihDhL3xcrYegTblhmEiCrg2kKQz4XsFIaemE5BF4ybSaQ==", + "dev": true, + "requires": { + "get-func-name": "^2.0.0" + } + }, + "md5.js": { + "version": "1.3.5", + "resolved": "https://registry.npmjs.org/md5.js/-/md5.js-1.3.5.tgz", + "integrity": "sha512-xitP+WxNPcTTOgnTJcrhM0xvdPepipPSf3I8EIpGKeFLjt3PlJLIDG3u8EX53ZIubkb+5U2+3rELYpEhHhzdkg==", + "requires": { + "hash-base": "^3.0.0", + "inherits": "^2.0.1", + "safe-buffer": "^5.1.2" + } + }, + "minimalistic-assert": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz", + "integrity": "sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A==" + }, + "minimalistic-crypto-utils": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/minimalistic-crypto-utils/-/minimalistic-crypto-utils-1.0.1.tgz", + "integrity": "sha512-JIYlbt6g8i5jKfJ3xz7rF0LXmv2TkDxBLUkiBeZ7bAx4GnnNMr8xFpGnOxn6GhTEHx3SjRrZEoU+j04prX1ktg==" + }, + "minimatch": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.0.1.tgz", + "integrity": "sha512-nLDxIFRyhDblz3qMuq+SoRZED4+miJ/G+tdDrjkkkRnjAsBexeGpgjLEQ0blJy7rHhR2b93rhQY4SvyWu9v03g==", + "dev": true, + "requires": { + "brace-expansion": "^2.0.1" + } + }, + "nanoid": { + "version": "3.3.4", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.4.tgz", + "integrity": "sha512-MqBkQh/OHTS2egovRtLk45wEyNXwF+cokD+1YPf9u5VfJiRdAiRwB2froX5Co9Rh20xs4siNPm8naNotSD6RBw==", + "dev": true + }, + "node-fetch": { + "version": "2.6.7", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz", + "integrity": "sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==", + "dev": true, + "requires": { + "whatwg-url": "^5.0.0" + } + }, + "object-keys": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", + "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==" + }, + "once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", + "dev": true, + "requires": { + "wrappy": "1" + } + }, + "path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", + "dev": true + }, + "path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "dev": true + }, + "pathval": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/pathval/-/pathval-1.1.1.tgz", + "integrity": "sha512-Dp6zGqpTdETdR63lehJYPeIOqpiNBNtc7BpWSLrOje7UaIsE5aY92r/AunQA7rsXvet3lrJ3JnZX29UPTKXyKQ==", + "dev": true + }, + "pbkdf2": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/pbkdf2/-/pbkdf2-3.1.2.tgz", + "integrity": "sha512-iuh7L6jA7JEGu2WxDwtQP1ddOpaJNC4KlDEFfdQajSGgGPNi4OyDc2R7QnbY2bR9QjBVGwgvTdNJZoE7RaxUMA==", + "requires": { + "create-hash": "^1.1.2", + "create-hmac": "^1.1.4", + "ripemd160": "^2.0.1", + "safe-buffer": "^5.0.1", + "sha.js": "^2.4.8" + } + }, + "picocolors": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz", + "integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==", + "dev": true + }, + "postcss": { + "version": "8.4.13", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.13.tgz", + "integrity": "sha512-jtL6eTBrza5MPzy8oJLFuUscHDXTV5KcLlqAWHl5q5WYRfnNRGSmOZmOZ1T6Gy7A99mOZfqungmZMpMmCVJ8ZA==", + "dev": true, + "requires": { + "nanoid": "^3.3.3", + "picocolors": "^1.0.0", + "source-map-js": "^1.0.2" + } + }, + "protobufjs": { + "version": "6.10.3", + "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.10.3.tgz", + "integrity": "sha512-yvAslS0hNdBhlSKckI4R1l7wunVilX66uvrjzE4MimiAt7/qw1nLpMhZrn/ObuUTM/c3Xnfl01LYMdcSJe6dwg==", + "requires": { + "@protobufjs/aspromise": "^1.1.2", + "@protobufjs/base64": "^1.1.2", + "@protobufjs/codegen": "^2.0.4", + "@protobufjs/eventemitter": "^1.1.0", + "@protobufjs/fetch": "^1.1.0", + "@protobufjs/float": "^1.0.2", + "@protobufjs/inquire": "^1.1.0", + "@protobufjs/path": "^1.1.2", + "@protobufjs/pool": "^1.1.0", + "@protobufjs/utf8": "^1.1.0", + "@types/long": "^4.0.1", + "@types/node": "^13.7.0", + "long": "^4.0.0" + }, + "dependencies": { + "@types/node": { + "version": "13.13.52", + "resolved": "https://registry.npmjs.org/@types/node/-/node-13.13.52.tgz", + "integrity": "sha512-s3nugnZumCC//n4moGGe6tkNMyYEdaDBitVjwPxXmR5lnMG5dHePinH2EdxkG3Rh1ghFHHixAG4NJhpJW1rthQ==" + } + } + }, + "randombytes": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", + "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==", + "requires": { + "safe-buffer": "^5.1.0" + } + }, + "readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "requires": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + } + }, + "readonly-date": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/readonly-date/-/readonly-date-1.0.0.tgz", + "integrity": "sha512-tMKIV7hlk0h4mO3JTmmVuIlJVXjKk3Sep9Bf5OH0O+758ruuVkUy2J9SttDLm91IEX/WHlXPSpxMGjPj4beMIQ==" + }, + "resolve": { + "version": "1.22.0", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.0.tgz", + "integrity": "sha512-Hhtrw0nLeSrFQ7phPp4OOcVjLPIeMnRlr5mcnVuMe7M/7eBn98A3hmFRLoFo3DLZkivSYwhRUJTyPyWAk56WLw==", + "dev": true, + "requires": { + "is-core-module": "^2.8.1", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + } + }, + "ripemd160": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/ripemd160/-/ripemd160-2.0.2.tgz", + "integrity": "sha512-ii4iagi25WusVoiC4B4lq7pbXfAp3D9v5CwfkY33vffw2+pkDjY1D8GaN7spsxvCSx8dkPqOZCEZyfxcmJG2IA==", + "requires": { + "hash-base": "^3.0.0", + "inherits": "^2.0.1" + } + }, + "rollup": { + "version": "2.71.1", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-2.71.1.tgz", + "integrity": "sha512-lMZk3XfUBGjrrZQpvPSoXcZSfKcJ2Bgn+Z0L1MoW2V8Wh7BVM+LOBJTPo16yul2MwL59cXedzW1ruq3rCjSRgw==", + "dev": true, + "requires": { + "fsevents": "~2.3.2" + } + }, + "safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==" + }, + "sha.js": { + "version": "2.4.11", + "resolved": "https://registry.npmjs.org/sha.js/-/sha.js-2.4.11.tgz", + "integrity": "sha512-QMEp5B7cftE7APOjk5Y6xgrbWu+WkLVQwk8JNjZ8nKRciZaByEW6MubieAiToS7+dwvrjGhH8jRXz3MVd0AYqQ==", + "requires": { + "inherits": "^2.0.1", + "safe-buffer": "^5.0.1" + } + }, + "source-map-js": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.0.2.tgz", + "integrity": "sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==", + "dev": true + }, + "string_decoder": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "requires": { + "safe-buffer": "~5.2.0" + } + }, + "supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true + }, + "symbol-observable": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/symbol-observable/-/symbol-observable-2.0.3.tgz", + "integrity": "sha512-sQV7phh2WCYAn81oAkakC5qjq2Ml0g8ozqz03wOGnx9dDlG1de6yrF+0RAzSJD8fPUow3PTSMf2SAbOGxb93BA==" + }, + "tinypool": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/tinypool/-/tinypool-0.1.3.tgz", + "integrity": "sha512-2IfcQh7CP46XGWGGbdyO4pjcKqsmVqFAPcXfPxcPXmOWt9cYkTP9HcDmGgsfijYoAEc4z9qcpM/BaBz46Y9/CQ==", + "dev": true + }, + "tinyspy": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/tinyspy/-/tinyspy-0.3.2.tgz", + "integrity": "sha512-2+40EP4D3sFYy42UkgkFFB+kiX2Tg3URG/lVvAZFfLxgGpnWl5qQJuBw1gaLttq8UOS+2p3C0WrhJnQigLTT2Q==", + "dev": true + }, + "tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha1-gYT9NH2snNwYWZLzpmIuFLnZq2o=", + "dev": true + }, + "type-detect": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", + "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==", + "dev": true + }, + "unfetch": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/unfetch/-/unfetch-4.2.0.tgz", + "integrity": "sha512-F9p7yYCn6cIW9El1zi0HI6vqpeIvBsr3dSuRO6Xuppb1u5rXpCPmMvLSyECLhybr9isec8Ohl0hPekMVrEinDA==", + "dev": true + }, + "util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" + }, + "vite": { + "version": "2.9.7", + "resolved": "https://registry.npmjs.org/vite/-/vite-2.9.7.tgz", + "integrity": "sha512-5hH7aNQe8rJiTTqCtPNX/6mIKlGw+1wg8UXwAxDIIN8XaSR+Zx3GT2zSu7QKa1vIaBqfUODGh3vpwY8r0AW/jw==", + "dev": true, + "requires": { + "esbuild": "^0.14.27", + "fsevents": "~2.3.2", + "postcss": "^8.4.13", + "resolve": "^1.22.0", + "rollup": "^2.59.0" + } + }, + "vitest": { + "version": "0.10.2", + "resolved": "https://registry.npmjs.org/vitest/-/vitest-0.10.2.tgz", + "integrity": "sha512-41D+nhswCco5vy1NXmpAjZX11Aj+HMnyhjWQD12piwHibf4bvdTGtni56UcFWcvONVoIForgDuLrKSohHJjwQA==", + "dev": true, + "requires": { + "@types/chai": "^4.3.1", + "@types/chai-subset": "^1.3.3", + "chai": "^4.3.6", + "local-pkg": "^0.4.1", + "tinypool": "^0.1.3", + "tinyspy": "^0.3.2", + "vite": "^2.9.5" + } + }, + "webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha1-JFNCdeKnvGvnvIZhHMFq4KVlSHE=", + "dev": true + }, + "whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha1-lmRU6HZUYuN2RNNib2dCzotwll0=", + "dev": true, + "requires": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } + }, + "wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", + "dev": true + }, + "ws": { + "version": "7.5.9", + "resolved": "https://registry.npmjs.org/ws/-/ws-7.5.9.tgz", + "integrity": "sha512-F+P9Jil7UiSKSkppIiD94dN07AwvFixvLIj1Og1Rl9GGMuNipJnV9JzjD6XuqmAeiswGvUmNLjr5cFuXwNS77Q==", + "requires": {} + }, + "xstream": { + "version": "11.14.0", + "resolved": "https://registry.npmjs.org/xstream/-/xstream-11.14.0.tgz", + "integrity": "sha512-1bLb+kKKtKPbgTK6i/BaoAn03g47PpFstlbe1BA+y3pNS/LfvcaghS5BFf9+EE1J+KwSQsEpfJvFN5GqFtiNmw==", + "requires": { + "globalthis": "^1.0.1", + "symbol-observable": "^2.0.3" + } + } + } +} diff --git a/integration/testdata/tstestrunner/package.json b/integration/testdata/tstestrunner/package.json new file mode 100644 index 0000000..bfe2d6d --- /dev/null +++ b/integration/testdata/tstestrunner/package.json @@ -0,0 +1,21 @@ +{ + "name": "tstestrunner", + "version": "1.0.0", + "private": true, + "description": "Test Runner for the TS Client", + "license": "ISC", + "scripts": { + "test": "vitest --run" + }, + "devDependencies": { + "@types/glob": "^7.2.0", + "@types/node": "^17.0.31", + "glob": "^8.0.1", + "isomorphic-unfetch": "^3.1.0", + "vitest": "^0.10.2" + }, + "dependencies": { + "@cosmjs/proto-signing": "^0.27.0", + "@cosmjs/stargate": "^0.27.0" + } +} diff --git a/integration/testdata/tstestrunner/testutil/setup.ts b/integration/testdata/tstestrunner/testutil/setup.ts new file mode 100644 index 0000000..fd18562 --- /dev/null +++ b/integration/testdata/tstestrunner/testutil/setup.ts @@ -0,0 +1,31 @@ +import { beforeAll, expect } from "vitest"; + +// Make sure that the tests have fetch API support +import "isomorphic-unfetch"; + +type Account = { + Name: string; + Address: string; + Mnemonic: string; + Coins: string[]; +}; + +type GlobalAccounts = { + [name: string]: Account; +}; + +beforeAll(() => { + // Initialize required globals + globalThis.txApi = process.env.TEST_TX_API || ""; + globalThis.queryApi = process.env.TEST_QUERY_API || ""; + + expect(globalThis.txApi, "TEST_TX_API is required").not.toEqual(""); + expect(globalThis.queryApi, "TEST_QUERY_API is required").not.toEqual(""); + + // Initialize the global accounts + globalThis.accounts = {}; + + JSON.parse(process.env.TEST_ACCOUNTS || "[]").forEach((account: Account) => { + globalThis.accounts[account.Name] = account; + }); +}); diff --git a/integration/testdata/tstestrunner/vitest.config.ts b/integration/testdata/tstestrunner/vitest.config.ts new file mode 100644 index 0000000..1c80fb9 --- /dev/null +++ b/integration/testdata/tstestrunner/vitest.config.ts @@ -0,0 +1,19 @@ +import { defineConfig } from "vitest/config"; + +// TODO: add .env file support for a better developer experience ? It would allow +// writting new tests agains a running blockchain without the need of scaffolding +// a new one for each test run. + +export default defineConfig({ + test: { + include: ["**/*_test.ts"], + globals: true, + setupFiles: "testutil/setup.ts", + testTimeout: 600000, // milliseconds + }, + resolve: { + alias: { + client: process.env.TEST_TSCLIENT_DIR, + }, + }, +}); diff --git a/integration/tx/tx_test.go b/integration/tx/tx_test.go new file mode 100644 index 0000000..940d4c2 --- /dev/null +++ b/integration/tx/tx_test.go @@ -0,0 +1,248 @@ +//go:build !relayer + +package tx_test + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "net/http" + "testing" + "time" + + "github.com/stretchr/testify/require" + + "github.com/ignite/cli/v29/ignite/pkg/cmdrunner" + "github.com/ignite/cli/v29/ignite/pkg/cmdrunner/step" + "github.com/ignite/cli/v29/ignite/pkg/errors" + "github.com/ignite/cli/v29/ignite/pkg/randstr" + "github.com/ignite/cli/v29/ignite/pkg/xurl" + envtest "github.com/ignite/cli/v29/integration" +) + +func TestSignTxWithDashedAppName(t *testing.T) { + var ( + env = envtest.New(t) + appname = "da-shed-a-p-p" + app = env.ScaffoldApp(appname) + servers = app.RandomizeServerPorts() + ctx, cancel = context.WithCancel(env.Ctx()) + ) + + nodeAddr, err := xurl.TCP(servers.RPC) + require.NoErrorf(t, err, "cant read nodeAddr from host.RPC %v", servers.RPC) + + app.Scaffold( + "scaffold a simple list", + false, + "list", + "item", + "str", + ) + + var ( + output = &bytes.Buffer{} + isTxBodyRetrieved bool + txResponse struct { + Code int + RawLog string `json:"raw_log"` + } + ) + // sign tx to add an item to the list. + steps := step.NewSteps( + step.New( + step.Stdout(output), + step.Exec( + app.Binary(), + "tx", + "dashedapp", + "create-item", + "helloworld", + "--chain-id", "dashedapp", + "--from", "alice", + "--node", nodeAddr, + "--output", "json", + "--log_format", "json", + "--yes", + ), + step.PostExec(func(execErr error) error { + if execErr != nil { + return execErr + } + err := json.Unmarshal(output.Bytes(), &txResponse) + if err != nil { + return errors.Errorf("unmarshling tx response: %w", err) + } + return nil + }), + ), + ) + + go func() { + defer cancel() + app.WaitChainUp(ctx, servers.API) + isTxBodyRetrieved = env.Exec("sign a tx", steps, envtest.ExecRetry()) + }() + + app.MustServe(ctx) + + if !isTxBodyRetrieved { + t.FailNow() + } + require.Equal(t, 0, txResponse.Code, + "tx failed code=%d log=%s", txResponse.Code, txResponse.RawLog) +} + +func TestGetTxViaGRPCGateway(t *testing.T) { + var ( + env = envtest.New(t) + appname = randstr.Runes(10) + app = env.ScaffoldApp(fmt.Sprintf("github.com/test/%s", appname)) + servers = app.RandomizeServerPorts() + ctx, cancel = context.WithCancel(env.Ctx()) + ) + + var ( + output = &bytes.Buffer{} + isTxBodyRetrieved bool + txBody = struct { + Tx struct { + Body struct { + Messages []struct { + Amount []struct { + Denom string `json:"denom"` + Amount string `json:"amount"` + } `json:"amount"` + } `json:"messages"` + } `json:"body"` + } `json:"tx"` + }{} + ) + + // 1- list accounts + // 2- send tokens from one to other. + // 3- verify tx by using gRPC Gateway API. + steps := step.NewSteps( + step.New( + step.Exec( + app.Binary(), + "keys", + "list", + "--keyring-backend", "test", + "--output", "json", + "--log_format", "json", + ), + step.PostExec(func(execErr error) error { + if execErr != nil { + return execErr + } + + // collect addresses of alice and bob. + var ( + accounts []struct { + Name string `json:"name"` + Address string `json:"address"` + } + addresses []string + ) + if err := json.NewDecoder(output).Decode(&accounts); err != nil { + return err + } + for _, account := range accounts { + if account.Name == "alice" || account.Name == "bob" { + addresses = append(addresses, account.Address) + } + } + if len(addresses) != 2 { + return errors.New("expected alice and bob accounts to be created") + } + + nodeAddr, err := xurl.TCP(servers.RPC) + require.NoErrorf(t, err, "cant read nodeAddr from host.RPC %v", servers.RPC) + + // send some tokens from alice to bob and confirm the corresponding tx via gRPC gateway + // endpoint by asserting denom and amount. + return cmdrunner.New().Run(ctx, step.New( + step.Exec( + app.Binary(), + "tx", + "bank", + "send", + addresses[0], + addresses[1], + "10token", + "--keyring-backend", "test", + "--chain-id", appname, + "--node", nodeAddr, + "--output", "json", + "--log_format", "json", + "--yes", + ), + step.PreExec(func() error { + output.Reset() + return nil + }), + step.PostExec(func(execErr error) error { + if execErr != nil { + return execErr + } + + tx := struct { + Hash string `json:"txHash"` + }{} + if err := json.NewDecoder(output).Decode(&tx); err != nil { + return err + } + + apiAddr, err := xurl.HTTP(servers.API) + if err != nil { + return err + } + + addr := fmt.Sprintf("%s/cosmos/tx/v1beta1/txs/%s", apiAddr, tx.Hash) + req, err := http.NewRequestWithContext(ctx, http.MethodGet, addr, nil) + if err != nil { + return errors.Wrap(err, "call to get tx via gRPC gateway") + } + + time.Sleep(5 * time.Second) + resp, err := http.DefaultClient.Do(req) + if err != nil { + return err + } + defer resp.Body.Close() + + // Send error if the request failed + if resp.StatusCode != http.StatusOK { + return errors.New(resp.Status) + } + + if err := json.NewDecoder(resp.Body).Decode(&txBody); err != nil { + return err + } + return nil + }), + step.Stdout(output), + )) + }), + step.Stdout(output), + )) + + go func() { + defer cancel() + app.WaitChainUp(ctx, servers.API) + isTxBodyRetrieved = env.Exec("retrieve account addresses", steps, envtest.ExecRetry()) + }() + + app.MustServe(ctx) + + if !isTxBodyRetrieved { + t.FailNow() + } + + require.Len(t, txBody.Tx.Body.Messages, 1) + require.Len(t, txBody.Tx.Body.Messages[0].Amount, 1) + require.Equal(t, "token", txBody.Tx.Body.Messages[0].Amount[0].Denom) + require.Equal(t, "10", txBody.Tx.Body.Messages[0].Amount[0].Amount) +} diff --git a/packaging/.gitignore b/packaging/.gitignore new file mode 100644 index 0000000..9727ef5 --- /dev/null +++ b/packaging/.gitignore @@ -0,0 +1,3 @@ +build-dir +repo +*.snap \ No newline at end of file diff --git a/packaging/brew/ignite.rb b/packaging/brew/ignite.rb new file mode 100644 index 0000000..59f648b --- /dev/null +++ b/packaging/brew/ignite.rb @@ -0,0 +1,20 @@ +class Ignite < Formula + desc "Build, launch, and maintain any crypto application with Ignite CLI" + homepage "https://github.com/ignite/cli" + url "https://github.com/ignite/cli/archive/refs/tags/v28.2.0.tar.gz" + sha256 "556f953fd7f922354dea64e7b3dade5dd75b3f62ece93167e2ba126cac27602e" + license "Apache-2.0" + + depends_on "go" + depends_on "node" + + def install + system "go", "build", "-mod=readonly", *std_go_args(output: bin/"ignite"), "./ignite/cmd/ignite" + end + + test do + ENV["DO_NOT_TRACK"] = "1" + system bin/"ignite", "s", "chain", "mars" + assert_predicate testpath/"mars/go.mod", :exist? + end +end diff --git a/packaging/readme.md b/packaging/readme.md new file mode 100644 index 0000000..16a480b --- /dev/null +++ b/packaging/readme.md @@ -0,0 +1,17 @@ +# Packaging and Distributing Ignite + +Ignite CLI is distributed on package managers. This document describes how to package and distribute Ignite CLI. + +## HomeBrew + +Read the following resources to understand HomeBrew. + +- +- + +```bash +HOMEBREW_NO_INSTALL_FROM_API=1 brew install --interactive ignite +brew audit --new-formula ignite +``` + +The formula is published in the [homebrew-core](https://github.com/homebrew/homebrew-core) repository: diff --git a/proto/buf.gen.yaml b/proto/buf.gen.yaml new file mode 100644 index 0000000..cb8a631 --- /dev/null +++ b/proto/buf.gen.yaml @@ -0,0 +1,8 @@ +version: v2 +plugins: + - remote: buf.build/protocolbuffers/go + out: . + opt: paths=source_relative + - remote: buf.build/grpc/go + out: . + opt: paths=source_relative diff --git a/proto/buf.md b/proto/buf.md new file mode 100644 index 0000000..11cc5f5 --- /dev/null +++ b/proto/buf.md @@ -0,0 +1,3 @@ +# Protobufs + +This is the public protocol buffers API for [Ignite CLI](https://github.com/ignite/cli). diff --git a/proto/ignite/services/plugin/grpc/v1/client_api.proto b/proto/ignite/services/plugin/grpc/v1/client_api.proto new file mode 100644 index 0000000..a295d9d --- /dev/null +++ b/proto/ignite/services/plugin/grpc/v1/client_api.proto @@ -0,0 +1,26 @@ +syntax = "proto3"; + +package ignite.services.plugin.grpc.v1; + +option go_package = "github.com/ignite/cli/v29/ignite/services/plugin/grpc/v1"; + +message ChainInfo { + string chain_id = 1; + string app_path = 2; + string config_path = 3; + string rpc_address = 4; + string home = 5; +} + +message IgniteInfo { + string cli_version = 1; + string go_version = 2; + string sdk_version = 3; + string buf_version = 4; + string build_date = 5; + string source_hash = 6; + string config_version = 7; + string os = 8; + string arch = 9; + bool build_from_source = 10; +} diff --git a/proto/ignite/services/plugin/grpc/v1/interface.proto b/proto/ignite/services/plugin/grpc/v1/interface.proto new file mode 100644 index 0000000..dd03097 --- /dev/null +++ b/proto/ignite/services/plugin/grpc/v1/interface.proto @@ -0,0 +1,151 @@ +syntax = "proto3"; + +package ignite.services.plugin.grpc.v1; + +option go_package = "github.com/ignite/cli/v29/ignite/services/plugin/grpc/v1"; + +// ExecutedCommand represents a plugin command under execution. +message ExecutedCommand { + // Use is the one-line usage message. + string use = 1; + + // Path contains the command path, e.g. `ignite scaffold foo`. + string path = 2; + + // Args are the command arguments. + repeated string args = 3; + + // Full list of args taken from the command line. + repeated string os_args = 4; + + // With contains the plugin config parameters. + map with = 5; + + // Flags holds the list of command flags. + repeated Flag flags = 6; +} + +// ExecutedHook represents a plugin hook under execution. +message ExecutedHook { + // Hook is a copy of the original Hook defined in the Manifest. + Hook hook = 1; + + // ExecutedCommand gives access to the command attached by the hook. + ExecutedCommand executed_command = 2; +} + +// Manifest represents the plugin behavior. +message Manifest { + // Plugin name. + string name = 1; + + // Commands contains the commands that will be added to the list of ignite commands. + // Each commands are independent, for nested commands use the inner Commands field. + bool shared_host = 2; + + // Hooks contains the hooks that will be attached to the existing ignite commands. + repeated Command commands = 3; + + // Enables sharing a single plugin server across all running instances of a plugin. + // Useful if a plugin adds or extends long running commands. + // + // Example: if a plugin defines a hook on `ignite chain serve`, a plugin server is + // instanciated when the command is run. Now if you want to interact with that instance + // from commands defined in that plugin, you need to enable shared host, or else the + // commands will just instantiate separate plugin servers. + // + // When enabled, all plugins of the same path loaded from the same configuration will + // attach it's RPC client to a an existing RPC server. + // + // If a plugin instance has no other running plugin servers, it will create one and it + // will be the host. + repeated Hook hooks = 4; +} + +// Command represents a plugin command. +message Command { + // Use is the one-line usage message. + // + // Recommended syntax is as follow: + // [ ] identifies an optional argument. Arguments that are not enclosed in brackets are required. + // ... indicates that you can specify multiple values for the previous argument. + // | indicates mutually exclusive information. You can use the argument to the left of the separator or the + // argument to the right of the separator. You cannot use both arguments in a single use of the command. + // { } delimits a set of mutually exclusive arguments when one of the arguments is required. If the arguments are + // optional, they are enclosed in brackets ([ ]). + // + // Example: add [-F file | -D dir]... [-f format] profile + string use = 1; + + // Aliases is an array of aliases that can be used instead of the first word in Use. + // Note: Aliases have no effect on runnable commands. + repeated string aliases = 2; + + // Short is the short description shown in the 'help' output. + string short = 3; + + // Long is the long message shown in the 'help ' output. + string long = 4; + + // Hidden defines, if this command is hidden and should NOT show up in the list of available commands. + bool hidden = 5; + + // Flags holds the list of command flags. + repeated Flag flags = 6; + + // Indicates where the command should be placed. + // For instance `ignite scaffold` will place the command at the `scaffold` command. + // An empty value is interpreted as `ignite` (==root). + string place_command_under = 7; + + // List of sub commands. + repeated Command commands = 8; +} + +// Flag represents of a command line flag. +message Flag { + // Type represents the flag type. + enum Type { + TYPE_FLAG_STRING_UNSPECIFIED = 0; + TYPE_FLAG_INT = 1; + TYPE_FLAG_UINT = 2; + TYPE_FLAG_INT64 = 3; + TYPE_FLAG_UINT64 = 4; + TYPE_FLAG_BOOL = 5; + TYPE_FLAG_STRING_SLICE = 6; + } + + // Name as it appears in the command line. + string name = 1; + + // One letter abbreviation of the flag. + string shorthand = 2; + + // Help message. + string usage = 3; + + // Default flag value. + string default_value = 4; + + // Flag type. + Type type = 5; + + // Flag value. + string value = 6; + + // Indicates wether or not the flag is propagated on children commands. + bool persistent = 7; +} + +// Hook represents a user defined action within a plugin. +message Hook { + // Identifies the hook for the client to invoke the correct hook. + // It must be unique. + string name = 1; + + // Indicates the command where to register the hooks. + string place_hook_on = 2; + + // Flags holds the list of command flags. + repeated Flag flags = 3; +} diff --git a/proto/ignite/services/plugin/grpc/v1/service.proto b/proto/ignite/services/plugin/grpc/v1/service.proto new file mode 100644 index 0000000..71924c8 --- /dev/null +++ b/proto/ignite/services/plugin/grpc/v1/service.proto @@ -0,0 +1,92 @@ +syntax = "proto3"; + +package ignite.services.plugin.grpc.v1; + +import "ignite/services/plugin/grpc/v1/client_api.proto"; +import "ignite/services/plugin/grpc/v1/interface.proto"; + +option go_package = "github.com/ignite/cli/v29/ignite/services/plugin/grpc/v1"; + +// InterfaceService defines the interface that must be implemented by all plugins. +service InterfaceService { + // Manifest declares the plugin's Command(s) and Hook(s). + rpc Manifest(ManifestRequest) returns (ManifestResponse); + + // Execute will be invoked by ignite when a plugin Command is executed. + // It is global for all commands declared in Manifest, if you have declared + // multiple commands, use cmd.Path to distinguish them. + rpc Execute(ExecuteRequest) returns (ExecuteResponse); + + // ExecuteHookPre is invoked by ignite when a command specified by the Hook + // path is invoked. + // It is global for all hooks declared in Manifest, if you have declared + // multiple hooks, use hook.Name to distinguish them. + rpc ExecuteHookPre(ExecuteHookPreRequest) returns (ExecuteHookPreResponse); + + // ExecuteHookPost is invoked by ignite when a command specified by the hook + // path is invoked. + // It is global for all hooks declared in Manifest, if you have declared + // multiple hooks, use hook.Name to distinguish them. + rpc ExecuteHookPost(ExecuteHookPostRequest) returns (ExecuteHookPostResponse); + + // ExecuteHookCleanUp is invoked by ignite when a command specified by the + // hook path is invoked. Unlike ExecuteHookPost, it is invoked regardless of + // execution status of the command and hooks. + // It is global for all hooks declared in Manifest, if you have declared + // multiple hooks, use hook.Name to distinguish them. + rpc ExecuteHookCleanUp(ExecuteHookCleanUpRequest) returns (ExecuteHookCleanUpResponse); +} + +message ManifestRequest {} + +message ManifestResponse { + Manifest manifest = 1; +} + +message ExecuteRequest { + ExecutedCommand cmd = 1; + uint32 client_api = 2; +} + +message ExecuteResponse {} + +message ExecuteHookPreRequest { + ExecutedHook hook = 1; + uint32 client_api = 2; +} + +message ExecuteHookPreResponse {} + +message ExecuteHookPostRequest { + ExecutedHook hook = 1; + uint32 client_api = 2; +} + +message ExecuteHookPostResponse {} + +message ExecuteHookCleanUpRequest { + ExecutedHook hook = 1; + uint32 client_api = 2; +} + +message ExecuteHookCleanUpResponse {} + +// ClientAPIService defines the interface that allows plugins to get chain app analysis info. +service ClientAPIService { + // GetChainInfo returns basic chain info for the configured app + rpc GetChainInfo(GetChainInfoRequest) returns (GetChainInfoResponse); + // GetIgniteInfo returns basic ignite info + rpc GetIgniteInfo(GetIgniteInfoRequest) returns (GetIgniteInfoResponse); +} + +message GetChainInfoRequest {} + +message GetChainInfoResponse { + ChainInfo chain_info = 1; +} + +message GetIgniteInfoRequest {} + +message GetIgniteInfoResponse { + IgniteInfo ignite_info = 1; +} diff --git a/readme.md b/readme.md new file mode 100644 index 0000000..cd8bf75 --- /dev/null +++ b/readme.md @@ -0,0 +1,276 @@ +
+

Ignite

+
+ + + + +![Ignite CLI](./assets/ignite-cli.png) + +[Ignite CLI](https://ignite.com/cli) is the all-in-one platform to build, +launch, and maintain any crypto application on a sovereign and secured +blockchain. It is a developer-friendly interface to the [Cosmos +SDK](https://github.com/cosmos/cosmos-sdk), the world's most widely-used +blockchain application framework. Ignite CLI generates boilerplate code for you, +so you can focus on writing business logic. + +## Quick start + +Ignite CLI can be installed using popular package managers such as Homebrew and Snap, making it easy to stay up-to-date with the latest versions. These package manager installations are maintained regularly for both macOS and GNU/Linux. For those who prefer manual installation or need to set up a development environment, additional instructions are provided at the end of this section. + +### Installation + +Install Ignite using [Homebrew](https://formulae.brew.sh/formula/ignite) on macOS and GNU/Linux: + +```sh +brew install ignite +``` + +Or manually using the following command: + +```sh +curl https://get.ignite.com/cli! | bash +``` + +
+ Troubleshoot + +If Ignite doesn't automatically move to your `/usr/local/bin` directory, use the following command: + +```sh +sudo mv ignite /usr/local/bin +``` + +If you encounter an error, you may need to create the `/usr/local/bin` directory and set the necessary permissions: + +```sh +mkdir /usr/local/bin +sudo chown -R $(whoami) /usr/local/bin +``` + +
+ +To create and start a blockchain: + +```sh +ignite scaffold chain mars + +cd mars + +ignite chain serve +``` + +The instructions for installing Ignite CLI manually and configuring your system are provided below. + +
+ View development setup instructions + +#### Supported Operating Systems + +- GNU/Linux +- macOS + +#### Install Go + +1. Install the latest version of Go. +2. Download the release suitable for your system. +3. Follow the installation instructions. + +**Note:** We recommend not using `brew` to install Go. + +#### Add the Go bin Directory to Your PATH + +1. Edit your `~/.bashrc` file and add the following line: + + ```sh + export PATH=$PATH:$(go env GOPATH)/bin + ``` + +2. Apply the changes: + + ```sh + source ~/.bashrc + ``` + +#### Remove Existing Ignite CLI Installations + +1. Remove the Ignite CLI binary: + + ```sh + rm $(which ignite) + ``` + + You may need to run this with `sudo`. + +2. Repeat the step until all Ignite CLI installations are removed. + +#### Install Ignite CLI + +```sh +curl https://get.ignite.com/cli! | bash +``` + +#### Clone the Ignite CLI Repo + +1. Clone the repository: + + ```sh + git clone --depth=1 git@github.com:ignite/cli.git + ``` + +2. Change to the `cli` directory: + + ```sh + cd cli + ``` + +#### Run make install + +```sh +make install +``` + +#### Verify Your Ignite CLI Version + +```sh +ignite version +``` + +
+ +Alternatively, you can open Ignite CLI directly in your web browser via [GitHub Codespaces](https://github.com/features/codespaces): [Open in GitHub Codespaces](https://github.dev/ignite/cli). + +## Documentation + +To learn how to use Ignite CLI, check out the [Ignite CLI +docs](https://docs.ignite.com). To learn more about how to build blockchain apps +with Ignite CLI, see the [Ignite CLI Developer +Tutorials](https://tutorials.ignite.com). + +To install Ignite CLI locally on GNU, Linux, or macOS, see [Install Ignite +CLI](https://docs.ignite.com/welcome/install). + +To learn more about building a JavaScript frontend for your Cosmos SDK +blockchain, see [ignite/web](https://github.com/ignite/web). + +## Questions + +For questions and support, join the official [Ignite +Discord](https://discord.gg/ignite) server. The issue list in this repo is +exclusively for bug reports and feature requests. + +## Cosmos SDK compatibility + +Blockchains created with Ignite CLI use the [Cosmos +SDK](https://github.com/cosmos/cosmos-sdk) framework. To ensure the best +possible experience, use the version of Ignite CLI that corresponds to the +version of Cosmos SDK that your blockchain is built with. Unless noted +otherwise, a row refers to a minor version and all associated patch versions. + +| Ignite CLI | Cosmos SDK | IBC | Notes | +| ----------- | ----------------- | -------------------- | ------------------------------------------------------------- | +| v29.x.y | v0.50.x & v0.53.x | v10.2.0 | Originally in v0.53.x. Can work with v0.50.x. | +| v28.x.y | v0.50.x & v0.53.x | v8.0.0 | Originally in v0.50.x. Can work with v0.53.x. | +| v0.27.1 | v0.47.3 | v7.1.0 | - | +| v0.26.0 | v0.46.7 | v6.1.0 | - | +| v0.25.2 | v0.46.6 | v5.1.0 | Bump Tendermint version to v0.34.24 | +| v0.25.1 | v0.46.3 | v5.0.0 | Includes Dragonberry security fix | +| ~~v0.24.0~~ | ~~v0.46.0~~ | ~~v5.0.0~~ | This version is deprecated due to a security fix in `v0.25.0` | +| v0.23.0 | v0.45.5 | v3.0.1 | | +| v0.21.1 | v0.45.4 | v2.0.3 | Supports Cosmos SDK v0.46.0-alpha1 and above | +| v0.21.0 | v0.45.4 | v2.0.3 | | +| v0.20.0 | v0.45.3 | v2.0.3 | | +| v0.19 | v0.44 | v1.2.2 | | +| v0.18 | v0.44 | v1.2.2 | `ignite chain serve` works with v0.44.x chains | +| v0.17 | v0.42 | Same with Cosmos SDK | | + +To upgrade your blockchain to the newer version of Cosmos SDK, see the +[Migration guide](https://docs.ignite.com/migration). + +## Ignite Apps + +[Ignite Apps](https://ignite.com/marketplace) aims to extend the functionality of Ignite CLI, offering both official and community-contributed integrations. These integrations are designed to streamline development processes and offer valuable insights for blockchain app developers. + +### Discover Ignite Apps + +You can discover available Ignite Apps in the [Ignite Marketplace](https://ignite.com/marketplace). The marketplace features a variety of apps that can enhance your development experience, including tools for scaffolding, deploying, and more. + +### How to Install an Ignite App + +```bash +ignite app install -g github.com/ignite/apps/[app-name] +``` + +The `ignite app list` command allows to list the plugins and their status, and to +update a plugin if you need to get the latest version. + +### How to Create an App + +Scaffold your Ignite app with one simple command: + +```bash +ignite scaffold app path/to/your/app +``` + +Afterwards, install using: + +```bash +ignite app install -g path/to/your/app +``` + +For more information, refer to [Creating Ignite Apps](https://docs.ignite.com/apps/developing-apps). + +Also check out the section of our [example Apps](https://github.com/ignite/apps/tree/main/examples). + +## Contributing to Ignite CLI + +We welcome contributions from everyone. The `main` branch contains the +development version of the code. You can create a branch from `main` and +create a pull request, or maintain your own fork and submit a cross-repository +pull request. + +Our Ignite CLI bounty program provides incentives for your participation and +pays rewards. Track new, in-progress, and completed bounties in the [GitHub Issues +board](https://github.com/ignite/cli/issues?q=is%3Aissue+is%3Aopen+label%3Abounty). + +**Important** Before you start implementing a new Ignite CLI feature, the first +step is to create an issue on GitHub that describes the proposed changes. + +If you're not sure where to start, check out [contributing.md](contributing.md) +for our guidelines and policies for how we develop Ignite CLI. Thank you to +everyone who has contributed to Ignite CLI! + +## Community + +Ignite CLI is a free and open source product maintained by +[Ignite](https://ignite.com). Here's where you can find us. Stay in touch. + +- [ignite.com website](https://ignite.com) +- [@ignite on Twitter](https://x.com/ignite) +- [ignite.com/blog](https://ignite.com/blog) +- [Ignite Discord](https://discord.com/invite/ignitecli) +- [Ignite YouTube](https://www.youtube.com/@ignitehq) +- [Ignite docs](https://docs.ignite.com) +- [Ignite jobs](https://ignite.com/careers) diff --git a/release_process.md b/release_process.md new file mode 100644 index 0000000..b4e3b0e --- /dev/null +++ b/release_process.md @@ -0,0 +1,90 @@ +# Release Process + +This document outlines the release process for Ignite, ensuring consistency, quality, and clear communication with users. Ignite follows [semantic versioning](https://semver.org/) to signal the stability and compatibility of each release. + +## Development Branch + +The `main` branch serves as the development branch for Ignite. All new features, bug fixes, and updates are merged into this branch. The `main` branch is typically updated regularly, depending on development activity. + +## Backporting Features & Bug Fixes + +Features and bug fixes ready for release are backported from `main` to the release branch. This process is automated using [Mergify](https://mergify.com/), a CI/CD tool. By adding the `backport release/x.y.z` label to a PR, Mergify will automatically backport the PR to the release branch when it is merged into `main`. + +## Changelog + +Before any release, the changelog must be up-to-date. It lists all changes made to Ignite since the last release and must be carefully reviewed to ensure accuracy. + +## Release Cadence: Alpha → Beta → RC → Full Release + +To accommodate Ignite’s dependency on the [Cosmos SDK](https://github.com/cosmos/cosmos-sdk) and the addition of new features, a structured release cadence is used that includes Alpha, Beta, and Release Candidate (RC) stages before the Full Release. This ensures features are thoroughly tested, dependencies are compatible, and disruptions are minimized. + +### Major & Minor Releases + +For **major** releases, the Alpha → Beta → RC progression is **required**. For **minor** releases, these stages are **optional** and used at the discretion of recent updates and their complexity. + +These stages help ensure stability, especially given Ignite's integration with the Cosmos SDK and potential for breaking changes. This process allows developers to prepare for compatibility changes: + +- **Alpha Releases**: Early, incomplete versions with new features that may be unstable, shared internally or with select testers. +- **Beta Releases**: Feature-complete versions shared with the community for feedback and testing. These releases are more stable than Alpha but may still contain bugs. +- **RC (Release Candidate)**: A near-final version with all intended features and fixes. Multiple RCs may be issued for **MAJOR releases** (e.g., `v30.0.0-rc1`, `v30.0.0-rc2`, etc.) as issues are identified and resolved. The final RC becomes the Full Release if no critical issues remain. + +### Patch Releases + +**Patch releases** do not go through Alpha, Beta, and RC stages. They are fast-tracked for release after internal testing, as they address specific bug fixes or security vulnerabilities. Patch releases should remain backward-compatible and thoroughly tested for regressions. + +## Managing SDK Dependencies & Compatibility + +Given Ignite’s reliance on the Cosmos SDK, ensuring compatibility between Ignite releases and the SDK is crucial. When upgrading the SDK version, a transition period may be needed to allow users time to adapt. + +- **Backward Compatibility**: Ignite strives to maintain backward compatibility for `chain` and `app` commands between major releases. This allows users to upgrade Ignite without immediate refactoring. +- **Breaking Changes**: If breaking changes are introduced (such as SDK upgrades or major feature revisions), transition periods will be defined and communicated in release notes starting from Alpha versions. +- **Transition Periods**: When significant changes impact downstream applications, transition periods give users time to test and adapt their applications before the final release. + +## Release Branches + +Releases are tagged from dedicated release branches, named after the release version (e.g., `release/v28.x.y` or `release/v30.x.y`). These branches are created from `main` and contain all changes intended for the release. + +- **Alpha, Beta, and RC Branches**: Pre-release branches are named accordingly, such as `release/v28.x.y-alpha`, `release/v28.x.y-beta`, or `release/v28.x.y-rc1`, `release/v28.x.y-rc2`, etc. + +## Release Preparation & Testing + +The preparation and testing phases vary depending on the type of release: + +### Major Releases + +- **Freeze `main`**: No new features are merged into `main` during final preparation. +- **Create the release branch**: A new branch (e.g., `release/v30.x.y`) is created from `main`. +- **Backport**: Ensure that all desired features and fixes are backported to the release branch. +- **Testing**: Run unit, integration, and manual tests. +- **Changelog**: Finalize the changelog. + +### Minor & Patch Releases + +- **Backport**: Ensure that all necessary changes are backported to the release branch. +- **Testing**: Conduct unit, integration, and manual tests. +- **Changelog**: Finalize the changelog. + +## Release Publication + +When testing is complete, the release is published to the [releases page](https://github.com/ignite/cli/releases) on GitHub. This includes tagging the release branch with the version number and publishing a release announcement with the changelog. + +```sh +git checkout release/v28.x.y +git tag v28.x.y -m "Release Ignite v28.x.y" +``` + +For Alpha, Beta, and RC releases, use the appropriate tags (e.g., `v28.x.y-alpha`, `v28.x.y-beta`, `v28.x.y-rc1`, etc.). + +## Post-Release Activities + +After a release, monitor feedback and bug reports. These will inform subsequent patch releases or feature additions. + +Following a **MAJOR** release, the `main` branch must be updated to the next **MAJOR** version. This includes updating the `go.mod` file and any other version number references in the codebase. + +## Maintenance Policy + +Only the latest released version of Ignite is actively maintained for new features and fixes. Older versions may continue to function but will not receive updates, ensuring stability and security for users. + +Users are encouraged to upgrade to the latest release to benefit from the newest features and fixes. + +Ignite ensures compatibility for `chain` and `app` commands between **MAJOR** releases, but other commands may change and may require users to upgrade their codebase to match the Cosmos SDK version used by Ignite. diff --git a/scripts/gen-cli-docs b/scripts/gen-cli-docs new file mode 100755 index 0000000..ccc2cba --- /dev/null +++ b/scripts/gen-cli-docs @@ -0,0 +1,3 @@ +#!/bin/bash + +go run ignite/internal/tools/gen-cli-docs/*.go --out docs/docs/03-CLI-Commands/01-cli-commands.md diff --git a/scripts/gen-config-doc b/scripts/gen-config-doc new file mode 100755 index 0000000..ffd5859 --- /dev/null +++ b/scripts/gen-config-doc @@ -0,0 +1,8 @@ +#!/bin/bash + +cd ignite/internal/tools/gen-config-doc || exit + +go run . + +rsync -av docs ../../../../ +rm -r docs \ No newline at end of file diff --git a/scripts/gen-mig-diffs b/scripts/gen-mig-diffs new file mode 100755 index 0000000..a852182 --- /dev/null +++ b/scripts/gen-mig-diffs @@ -0,0 +1,4 @@ +#!/bin/bash + +cd ignite/internal/tools/gen-mig-diffs +go run . \ No newline at end of file diff --git a/scripts/go-mod-tidy-all.sh b/scripts/go-mod-tidy-all.sh new file mode 100755 index 0000000..2562cac --- /dev/null +++ b/scripts/go-mod-tidy-all.sh @@ -0,0 +1,13 @@ +#!/usr/bin/env bash + +set -euo pipefail + +for modfile in $(find . -name go.mod); do + echo "Updating $modfile" + DIR=$(dirname $modfile) + if [[ $DIR == *"testdata"* ]]; then + echo "Skipping testdata directory" + continue + fi + (cd $DIR; go mod tidy) +done diff --git a/scripts/test b/scripts/test new file mode 100755 index 0000000..b1564a7 --- /dev/null +++ b/scripts/test @@ -0,0 +1,4 @@ +#!/bin/bash +set -e -x + +go test -race $(go list github.com/ignite/cli/v29/ignite/...) \ No newline at end of file diff --git a/scripts/test-coverage b/scripts/test-coverage new file mode 100755 index 0000000..d7d9e08 --- /dev/null +++ b/scripts/test-coverage @@ -0,0 +1,20 @@ +#!/bin/bash +set -e -x + +go test -race -coverprofile=coverage.txt -covermode=atomic -coverpkg=./... $(go list github.com/ignite/cli/v29/ignite/...) + +# append "||true" to grep so if no match the return code stays 0 +excludelist="$(find ./ -type f -name '*.go' | xargs grep -l 'DONTCOVER' || true)" +excludelist+=" $(find ./ -type f -name '*.pb.go')" +excludelist+=" $(find ./ -type f -name '*.pb.gw.go')" +excludelist+=" $(find ./actions -type d)" +excludelist+=" $(find ./assets -type d)" +excludelist+=" $(find ./docs -type d)" +excludelist+=" $(find ./integration -type d)" +excludelist+=" $(find ./scripts -type d)" +for filename in ${excludelist}; do + filename=${filename#".//"} + echo "Excluding ${filename} from coverage report..." + filename=$(echo "$filename" | sed 's/\//\\\//g') + sed -i.bak "/""$filename""/d" coverage.txt +done diff --git a/scripts/test-integration b/scripts/test-integration new file mode 100755 index 0000000..3d0d9fb --- /dev/null +++ b/scripts/test-integration @@ -0,0 +1,3 @@ +#!/bin/bash + +go test -v -timeout 30m github.com/ignite/cli/v29/integration/... diff --git a/scripts/test-unit b/scripts/test-unit new file mode 100755 index 0000000..49bcc59 --- /dev/null +++ b/scripts/test-unit @@ -0,0 +1,3 @@ +#!/bin/bash + +go test -v github.com/ignite/cli/v29/ignite/...