From a7b4a69353c39ca2b82e26a28e70e6709b5fd359 Mon Sep 17 00:00:00 2001 From: Samir Faci Date: Fri, 3 Nov 2023 13:24:41 -0400 Subject: [PATCH] Updating GoReleaser to update commit, builddate, and version correclty --- .github/workflows/code_scanners.yml | 6 +- .github/workflows/go.yml | 12 +- .github/workflows/hugo.yml | 2 +- .github/workflows/release.yml | 8 +- .goreleaser.yml | 84 +- .mockery.yaml | 13 + README.md | 8 +- Taskfile.yml | 43 +- cli/backup/alertnotifications.go | 154 + cli/backup/backup.go | 40 + cli/backup/connection_permissions.go | 163 + cli/backup/connections.go | 139 + cli/backup/dashboard.go | 202 + cli/backup/folder_permissions.go | 116 + cli/backup/folders.go | 152 + cli/backup/library.go | 172 + cli/backup/organizations.go | 115 + cli/backup/team.go | 168 + cli/backup/users.go | 152 + cli/commandeer.go | 45 + cli/support/init_cfg.go | 29 + cli/support/root.go | 99 + cli/support/simple.go | 68 + cli/test/conections_test.go | 50 + cli/test/devel_test.go | 50 + cli/test/support.go | 71 + cli/test/version_test.go | 52 + cli/tools/auth.go | 20 + cli/tools/auth_service_accounts.go | 229 + cli/tools/auth_tokens.go | 139 + cli/tools/context.go | 167 + cli/tools/devel.go | 64 + cli/tools/organizations.go | 236 + cli/tools/tools.go | 29 + cli/tools/users.go | 60 + cli/version.go | 30 + cmd/gdg/main.go | 29 + cmd/gen/main.go | 72 + config/assets.go | 18 + config/importer-example.yml | 32 +- config/templates-example.yml | 24 + config/testing.yml | 30 +- docker/Dockerfile | 55 +- internal/api/README.md | 8 +- internal/api/health.go | 2 +- internal/config/config.go | 172 +- internal/config/config_model.go | 58 +- internal/config/config_new_ctx.go | 100 +- internal/config/config_test.go | 54 +- internal/config/types.go | 82 +- internal/log/log.go | 33 +- internal/log/slog_handler.go | 61 + internal/service/alertnotifications.go | 39 +- internal/service/common.go | 19 +- internal/service/common_test.go | 32 +- internal/service/connection_permissions.go | 59 +- internal/service/connections.go | 72 +- internal/service/contract.go | 9 +- internal/service/dashboards.go | 182 +- internal/service/filters/filters.go | 12 +- internal/service/folders.go | 73 +- internal/service/libraryelements.go | 64 +- internal/service/login.go | 111 +- .../service/mocks/AlertNotificationsApi.go | 120 +- internal/service/mocks/AuthenticationApi.go | 303 +- .../service/mocks/ConnectionPermissions.go | 124 +- internal/service/mocks/ConnectionsApi.go | 236 +- internal/service/mocks/DashboardsApi.go | 124 +- internal/service/mocks/FoldersApi.go | 208 +- internal/service/mocks/GrafanaService.go | 1728 ++- internal/service/mocks/LibraryElementsApi.go | 153 +- internal/service/mocks/OrganizationsApi.go | 347 +- internal/service/mocks/ServiceAccountApi.go | 192 +- internal/service/mocks/Storage.go | 123 +- internal/service/mocks/TeamsApi.go | 124 +- internal/service/mocks/TokenApi.go | 96 +- internal/service/mocks/UsersApi.go | 179 +- internal/service/organizations.go | 98 +- internal/service/server.go | 4 +- internal/service/serviceaccounts.go | 56 +- internal/service/storage_cloud.go | 11 +- internal/service/storage_local.go | 5 +- internal/service/teams.go | 71 +- internal/service/tokens.go | 22 +- internal/service/user.go | 74 +- internal/templating/templating.go | 132 + internal/templating/templating_test.go | 58 + internal/tools/generics_tooling.go | 14 +- internal/tools/prompt_helpers.go | 5 +- internal/types/models.go | 8 + internal/version/version.go | 2 +- test/cloud_integration_test.go | 27 +- test/common_test.go | 166 +- test/connections_integration_test.go | 45 +- test/dashboard_integration_test.go | 158 +- .../General/top-talkers-over-time.json | 2 +- test/data/secure/complex.json | 4 + test/data/secure/default.json | 4 + test/data/templates/template_example.go.tmpl | 12785 ++++++++++++++++ test/folder_integration_test.go | 12 +- test/libraryelements_integration_test.go | 18 +- test/organizations_integration_test.go | 6 +- test/team_integration_test.go | 14 +- test/users_integration_test.go | 2 +- 104 files changed, 21487 insertions(+), 1062 deletions(-) create mode 100644 .mockery.yaml create mode 100644 cli/backup/alertnotifications.go create mode 100644 cli/backup/backup.go create mode 100644 cli/backup/connection_permissions.go create mode 100644 cli/backup/connections.go create mode 100644 cli/backup/dashboard.go create mode 100644 cli/backup/folder_permissions.go create mode 100644 cli/backup/folders.go create mode 100644 cli/backup/library.go create mode 100644 cli/backup/organizations.go create mode 100644 cli/backup/team.go create mode 100644 cli/backup/users.go create mode 100644 cli/commandeer.go create mode 100644 cli/support/init_cfg.go create mode 100644 cli/support/root.go create mode 100644 cli/support/simple.go create mode 100644 cli/test/conections_test.go create mode 100644 cli/test/devel_test.go create mode 100644 cli/test/support.go create mode 100644 cli/test/version_test.go create mode 100644 cli/tools/auth.go create mode 100644 cli/tools/auth_service_accounts.go create mode 100644 cli/tools/auth_tokens.go create mode 100644 cli/tools/context.go create mode 100644 cli/tools/devel.go create mode 100644 cli/tools/organizations.go create mode 100644 cli/tools/tools.go create mode 100644 cli/tools/users.go create mode 100644 cli/version.go create mode 100644 cmd/gdg/main.go create mode 100644 cmd/gen/main.go create mode 100644 config/assets.go create mode 100644 config/templates-example.yml create mode 100644 internal/log/slog_handler.go create mode 100644 internal/templating/templating.go create mode 100644 internal/templating/templating_test.go create mode 100644 internal/types/models.go create mode 100644 test/data/secure/complex.json create mode 100644 test/data/secure/default.json create mode 100644 test/data/templates/template_example.go.tmpl diff --git a/.github/workflows/code_scanners.yml b/.github/workflows/code_scanners.yml index 6c2ad20a..fa5e2b15 100644 --- a/.github/workflows/code_scanners.yml +++ b/.github/workflows/code_scanners.yml @@ -26,10 +26,10 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout Source - uses: actions/checkout@v3 - - uses: actions/setup-go@v4 + uses: actions/checkout@v4 + - uses: actions/setup-go@v5 with: - go-version: "1.21.3" + go-version: "1.21.5" cache: false - name: Install Task run: sh -c "$(curl --location https://taskfile.dev/install.sh)" -- -d diff --git a/.github/workflows/go.yml b/.github/workflows/go.yml index 6934eb76..9b915ebb 100644 --- a/.github/workflows/go.yml +++ b/.github/workflows/go.yml @@ -11,7 +11,7 @@ jobs: test: strategy: matrix: - go: [ {version: 1.21.3, token: 1}, {version: 1.21.3, token: 0}] + go: [ {version: 1.21.5, token: 1}, {version: 1.21.5, token: 0}] grafana: [ 10.1.4 ] env: @@ -20,14 +20,14 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Go - uses: actions/setup-go@v4 + uses: actions/setup-go@v5 with: go-version: ${{ matrix.go.version }} - name: Verify go version run: go version - - uses: actions/cache@v3 + - uses: actions/cache@v4 with: path: | ~/go/pkg/mod @@ -40,11 +40,11 @@ jobs: run: | echo "token IS $TEST_TOKEN_CONFIG" - name: Calc coverage - if: "${{ matrix.go.version == '1.21.3' && matrix.grafana == '10.1.4' && matrix.go.token == '0' }}" + if: "${{ matrix.go.version == '1.21.5' && matrix.grafana == '10.1.4' && matrix.go.token == '0' }}" run: | go test -v -covermode=atomic -coverprofile=coverage.out ./... - name: Convert coverage.out to coverage.lcov - if: "${{ matrix.go.version == '1.21.3' && matrix.grafana == '10.1.4' && matrix.go.token == '0' }}" + if: "${{ matrix.go.version == '1.21.5' && matrix.grafana == '10.1.4' && matrix.go.token == '0' }}" uses: jandelgado/gcov2lcov-action@v1.0.9 - name: Test if: "${{ matrix.go.token == '1' }}" diff --git a/.github/workflows/hugo.yml b/.github/workflows/hugo.yml index 004d4f69..3f97b954 100644 --- a/.github/workflows/hugo.yml +++ b/.github/workflows/hugo.yml @@ -10,7 +10,7 @@ jobs: deployHugoPages: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: submodules: true fetch-depth: 0 diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 729f66d6..223f7718 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -10,12 +10,12 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: submodules: true fetch-depth: 0 - name: Set up Go - uses: actions/setup-go@v3 + uses: actions/setup-go@v5 with: go-version: '1.21' - name: Log in to Docker Hub @@ -25,10 +25,10 @@ jobs: password: ${{ secrets.GITHUB_TOKEN }} registry: ghcr.io - name: Run GoReleaser - uses: goreleaser/goreleaser-action@v3 + uses: goreleaser/goreleaser-action@v5 with: distribution: goreleaser - version: latest + version: v1.24.0 args: release --clean env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.goreleaser.yml b/.goreleaser.yml index 0af0bc9c..1091faf3 100644 --- a/.goreleaser.yml +++ b/.goreleaser.yml @@ -4,32 +4,53 @@ before: hooks: # You may remove this if you don't use go modules. - go mod tidy - # you may remove this if you don't need go generate - - go generate ./... source: rlcp: true + builds: - - env: + - id: gdg + env: - CGO_ENABLED=0 binary: gdg - ldflags: -s -w -X github.com/esnet/gdg/version.GitCommit={{ .Commit }} -X github.com/esnet/gdg/version.BuildDate={{ .Date }} -X github.com/esnet/gdg/version.Version={{ .Tag }} + ldflags: -s -w -X github.com/esnet/gdg/internal/version.GitCommit={{ .Commit }} -X github.com/esnet/gdg/internal/version.BuildDate={{ .Date }} -X github.com/esnet/gdg/internal/version.Version={{ .Tag }} + main: ./cmd/gdg goos: - linux - windows - darwin + goarch: + - arm64 + - 386 + - amd64 + - id: gdg-generate + env: + - CGO_ENABLED=0 + binary: gdg-generate + ldflags: -s -w -X github.com/esnet/gdg/internal/version.GitCommit={{ .Commit }} -X github.com/esnet/gdg/internal/version.BuildDate={{ .Date }} -X github.com/esnet/gdg/internal/version.Version={{ .Tag }} + main: ./cmd/gen + goos: + - linux + - windows + - darwin + goarch: + - arm64 + - 386 + - amd64 nfpms: - - - id: gdg + - id: gdg package_name: gdg file_name_template: "{{ .ConventionalFileName }}" homepage: https://software.es.net/gdg/ maintainer: GDG ESNet description: |- - GDG is a tool used to manage dashboards, datasources, orgs and various entities of the Grafana application. + GDG is a tool used to manage dashboards, connections, organizations and various entities of the Grafana application. license: BSD License + builds: + - gdg + - gdg-generate formats: - apk - deb @@ -52,37 +73,44 @@ nfpms: - src: config/importer-example.yml dst: /etc/gdg/importer.yml type: config + # Simple config file + - src: config/templates-example.yml + dst: /etc/gdg/templates.yml + type: config + +# ids: [ gdg gdg-generate ] universal_binaries: - replace: true + ids: + - gdg + - gdg-generate release: prerelease: auto dockers: - - - id: gdg -# # You can have multiple Docker images. -# - # ID of the image, needed if you want to filter by it later on (e.g. on custom publishers). + - id: gdg + # # You can have multiple Docker images. + # - # ID of the image, needed if you want to filter by it later on (e.g. on custom publishers). goos: linux goarch: amd64 -# + # image_templates: - "ghcr.io/esnet/gdg:latest" - "ghcr.io/esnet/gdg:{{ .RawVersion }}" - "ghcr.io/esnet/gdg:{{ .Major }}.{{ .Minor }}" skip_push: false - dockerfile: "docker/Dockerfile-gorelease" + dockerfile: "docker/Dockerfile" brews: - - - name: + - name: gdg homepage: https://software.es.net/gdg commit_msg_template: "Brew formula update for {{ .ProjectName }} version {{ .Tag }}" - description: Grafana Dash-n-Grab (GDG) -- Dashboard/DataSource Manager for grafana supporting backup/restore to local filesystem, s3, gcs, azure, and other S3 compatible storage engines. + description: Grafana Dash-n-Grab (GDG) -- Dashboard/DataSource Manager for grafana supporting backup/restore to local filesystem, s3, gcs, azure, and other S3 compatible storage engines. folder: Formula repository: owner: esnet @@ -90,19 +118,27 @@ brews: token: "{{ .Env.HOMEBREW_TOKEN }}" branch: main commit_author: - name: GDG ESNet + name: GDG ESNet email: gdg@es.net archives: - - name_template: >- - {{ .ProjectName }}_ - {{- title .Os }}_ - {{- if eq .Arch "amd64" }}x86_64 - {{- else if eq .Arch "386" }}i386 - {{- else }}{{ .Arch }}{{ end }} - files: + - name_template: >- + {{ .ProjectName }}_ + {{- title .Os }}_ + {{- if eq .Arch "amd64" }}x86_64 + {{- else if eq .Arch "386" }}i386 + {{- else }}{{ .Arch }}{{ end }} + allow_different_binary_count: true + format_overrides: + - goos: windows + format: zip + builds: + - gdg + - gdg-generate + files: - README* - config/importer-example.yml + - config/templates-example.yml checksum: name_template: "checksums.txt" diff --git a/.mockery.yaml b/.mockery.yaml new file mode 100644 index 00000000..b95a7950 --- /dev/null +++ b/.mockery.yaml @@ -0,0 +1,13 @@ +with-expecter: true +recursive: false +all: true +dir: "{{.InterfaceDir}}/mocks" +outpkg: "mocks" +filename: "{{.InterfaceName}}.go" +mockname: "{{.InterfaceName}}" +packages: + github.com/esnet/gdg/internal/service: + config: + all: true + dir: "{{.InterfaceDir}}/mocks" + outpkg: "mocks" diff --git a/README.md b/README.md index d7485456..f8542f4c 100644 --- a/README.md +++ b/README.md @@ -12,6 +12,10 @@ The following remote backup locations are supported: Please find the generated documentation [here](https://software.es.net/gdg/) and the code for updating the docs is available [here](https://github.com/esnet/gdg/blob/master/documentation/content/docs/usage_guide.md) +## Quickstart + +![Quickstart screen](website/static/quickstart.gif) + ## Release conventions. GDG mostly follows the semver conventions with some minor modifications. @@ -33,7 +37,3 @@ contexts. i.e. `gdg diff dashboards prod staging` is a major divergences from For more info, please see the release notes and documentation both available [here](https://software.es.net/gdg/) -## Quickstart - -![Quickstart screen](website/static/quickstart.gif) - diff --git a/Taskfile.yml b/Taskfile.yml index 62a20701..743f14b9 100644 --- a/Taskfile.yml +++ b/Taskfile.yml @@ -14,7 +14,7 @@ dotenv: ['.env'] tasks: default: cmds: - - task: build + - task: build_all install_tools: desc: "Install required Dev tools by GDG" cmds: @@ -22,10 +22,6 @@ tasks: - go install github.com/client9/misspell/cmd/misspell@latest - go install github.com/securego/gosec/v2/cmd/gosec@master - go install golang.org/x/vuln/cmd/govulncheck@latest - format: - desc: "Format code" - cmds: - - gofmt -w -s . security: desc: "Run security scan" cmds: @@ -51,37 +47,33 @@ tasks: mocks: desc: "Re-generate Mocks" cmds: - - mockery --dir=internal/service/ --output=internal/service/mocks --outpkg=mocks --all + - rm -fr internal/service/mocks + - mockery linux: desc: "Build linux binary" cmds: - - env GOOS='linux' GOARCH='amd64' go build -ldflags "{{ .LD_FLAGS }} " -o bin/{{ .BIN_NAME }}_linux + - env GOOS='linux' GOARCH='amd64' go build -ldflags "{{ .LD_FLAGS }}" -o bin/{{ .BIN_NAME }}_linux cmd/gdg/main.go + build_all: + desc: "Buiding All binaries" + cmds: + - task: build + - task: build_generate build: desc: "Buiding {{ .BIN_NAME }} {{ .VERSION }}" cmds: - echo "GOPATH=${GOPATH}" - - go build -ldflags "{{ .LD_FLAGS }}" -o bin/{{ .BIN_NAME }} + - go build -ldflags "{{ .LD_FLAGS }}" -o bin/{{ .BIN_NAME }} cmd/gdg/main.go + build_generate: + desc: "Buiding {{ .BIN_NAME }}-generate {{ .VERSION }}" + cmds: + - echo "GOPATH=${GOPATH}" + - go build -ldflags "{{ .LD_FLAGS }}" -o bin/{{ .BIN_NAME }}-generate cmd/gen/main.go install: desc: "installing {{ .BIN_NAME }} {{ .VERSION }}" cmds: - echo "GOPATH=${GOPATH}" - - go install -ldflags "{{ .LD_FLAGS}}" - - mv ${GOPATH}/bin/gdg ${GOPATH}/bin/{{ .BIN_NAME }} + - go install -ldflags "{{ .LD_FLAGS}}" cmd/gdg/main.go silent: false - get-deps: - desc: "Tidy Deps" - cmds: - - go mod tidy - pakcage: - desc: "building image {{ .BIN_NAME }} {{ .VERSION }} {{ .GIT_COMMIT }}" - cmds: - - docker build --build-arg VERSION={{ .VERSION }} --build-arg GIT_COMMIT={{ .GIT_COMMIT }} -t $(IMAGE_NAME):local . - tag: - desc: "Tagging: latest {{ .VERSION }} {{ .GIT_COMMIT }}" - cmds: - - docker tag $(IMAGE_NAME):local $(IMAGE_NAME):{{ .GIT_COMMIT }} - - docker tag $(IMAGE_NAME):local $(IMAGE_NAME):{{ .VERSION }} - - docker tag $(IMAGE_NAME):local $(IMAGE_NAME):latest push: desc: "Pushing docker image to registry: latest {{ .VERSION }} {{ .GIT_COMMIT }}" deps: [tag] @@ -103,7 +95,8 @@ tasks: deps: [clean] desc: "Release GDG" cmds: - - goreleaser release + - goreleaser release --skip=publish,validate + test: desc: "test check" cmds: diff --git a/cli/backup/alertnotifications.go b/cli/backup/alertnotifications.go new file mode 100644 index 00000000..ff4e1881 --- /dev/null +++ b/cli/backup/alertnotifications.go @@ -0,0 +1,154 @@ +package backup + +import ( + "context" + "fmt" + "github.com/bep/simplecobra" + "github.com/esnet/gdg/cli/support" + "github.com/esnet/gdg/internal/config" + "github.com/esnet/gdg/internal/service" + "github.com/jedib0t/go-pretty/v6/table" + "github.com/spf13/cobra" + "log/slog" +) + +func newAlertNotificationsCommand() simplecobra.Commander { + description := "Manage alert notification channels" + return &support.SimpleCommand{ + NameP: "alertnotifications", + Short: description, + Long: description, + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.Aliases = []string{"an", "alertnotifications"} + }, + CommandsList: []simplecobra.Commander{ + newListAlertNotificationsCmd(), + newDownloadAlertNotificationsCmd(), + newUploadAlertNotificationsCmd(), + newClearAlertNotificationsCmd(), + }, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + return cd.CobraCommand.Help() + }, + } + +} + +func newClearAlertNotificationsCmd() simplecobra.Commander { + description := "delete all alert notification channels from grafana" + return &support.SimpleCommand{ + NameP: "clear", + Short: description, + Long: description, + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.Aliases = []string{"c"} + }, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + slog.Warn("Alert Notifications will be deprecated as of Grafana 9.0, this API may no longer work soon") + rootCmd.TableObj.AppendHeader(table.Row{"type", "filename"}) + + slog.Info("Clearing all alert notification channels for context", + "context", config.Config().GetGDGConfig().GetContext()) + deleted := rootCmd.GrafanaSvc().DeleteAllAlertNotifications() + for _, item := range deleted { + rootCmd.TableObj.AppendRow(table.Row{"alertnotification", item}) + } + if len(deleted) == 0 { + slog.Info("No alert notification channels were found. 0 removed") + } else { + slog.Info("alert notification channels were deleted", "count", len(deleted)) + rootCmd.TableObj.Render() + } + return nil + }, + } +} + +func newUploadAlertNotificationsCmd() simplecobra.Commander { + description := "upload all alert notification channels to grafana" + return &support.SimpleCommand{ + NameP: "upload", + Short: description, + Long: description, + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.Aliases = []string{"u"} + }, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + slog.Warn("Alert Notifications will be deprecated as of Grafana 9.0, this API may no longer work soon") + rootCmd.TableObj.AppendHeader(table.Row{"name", "id", "UID"}) + + slog.Info("Exporting alert notification channels for context", + "context", config.Config().GetGDGConfig().GetContext()) + rootCmd.GrafanaSvc().UploadAlertNotifications() + items := rootCmd.GrafanaSvc().ListAlertNotifications() + for _, item := range items { + rootCmd.TableObj.AppendRow(table.Row{item.Name, item.ID, item.UID}) + } + if len(items) > 0 { + rootCmd.TableObj.Render() + } else { + slog.Info("No alert notification channels found") + } + return nil + }, + } +} + +func newDownloadAlertNotificationsCmd() simplecobra.Commander { + description := "download all alert notification channels from grafana to local filesystem" + return &support.SimpleCommand{ + NameP: "download", + Short: description, + Long: description, + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.Aliases = []string{"d"} + }, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + slog.Warn("Alert Notifications will be deprecated as of Grafana 9.0, this API may no longer work soon") + rootCmd.TableObj.AppendHeader(table.Row{"type", "filename"}) + + slog.Info("Downloading alert notification channels for context", + "context", config.Config().GetGDGConfig().GetContext()) + + savedFiles := rootCmd.GrafanaSvc().DownloadAlertNotifications() + for _, file := range savedFiles { + rootCmd.TableObj.AppendRow(table.Row{"alertnotification", file}) + } + rootCmd.TableObj.Render() + return nil + }, + } +} + +func newListAlertNotificationsCmd() simplecobra.Commander { + description := "List all alert notification channels from grafana" + return &support.SimpleCommand{ + NameP: "list", + Short: description, + Long: description, + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.Aliases = []string{"l"} + }, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + slog.Warn("Alert Notifications will be deprecated as of Grafana 9.0, this API may no longer work soon") + + rootCmd.TableObj.AppendHeader(table.Row{"id", "name", "slug", "type", "default", "url"}) + alertnotifications := rootCmd.GrafanaSvc().ListAlertNotifications() + + slog.Info("Listing alert notifications channels for context", + "context", config.Config().GetGDGConfig().GetContext()) + + if len(alertnotifications) == 0 { + slog.Info("No alert notifications found") + } else { + for _, link := range alertnotifications { + url := fmt.Sprintf("%s/alerting/notification/%d/edit", config.Config().GetDefaultGrafanaConfig().URL, link.ID) + rootCmd.TableObj.AppendRow(table.Row{link.ID, link.Name, service.GetSlug(link.Name), link.Type, link.IsDefault, url}) + } + rootCmd.TableObj.Render() + } + + return nil + }, + } +} diff --git a/cli/backup/backup.go b/cli/backup/backup.go new file mode 100644 index 00000000..2a740cca --- /dev/null +++ b/cli/backup/backup.go @@ -0,0 +1,40 @@ +package backup + +import ( + "context" + "github.com/bep/simplecobra" + "github.com/esnet/gdg/cli/support" + "github.com/spf13/cobra" +) + +func NewBackupCommand() simplecobra.Commander { + description := "Manage entities that are backup up and updated via api" + return &support.SimpleCommand{ + NameP: "backup", + Short: description, + Long: `Manage entities that are backup up and updated via api. These utilities are mostly +limited to clear/delete, list, download and upload. Any other functionality will be found under the tools.`, + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.Aliases = []string{"b"} + }, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + return cd.CobraCommand.Help() + }, + InitCFunc: func(cd *simplecobra.Commandeer, r *support.RootCommand) error { + support.InitConfiguration(cd.CobraCommand) + r.GrafanaSvc().InitOrganizations() + return nil + }, + CommandsList: []simplecobra.Commander{ + newDashboardCommand(), + newAlertNotificationsCommand(), + newConnectionsCommand(), + newFolderCommand(), + newLibraryElementsCommand(), + newOrganizationsCommand(), + newTeamsCommand(), + newUsersCommand(), + }, + } + +} diff --git a/cli/backup/connection_permissions.go b/cli/backup/connection_permissions.go new file mode 100644 index 00000000..54521491 --- /dev/null +++ b/cli/backup/connection_permissions.go @@ -0,0 +1,163 @@ +package backup + +import ( + "context" + "fmt" + "github.com/bep/simplecobra" + "github.com/esnet/gdg/cli/support" + "github.com/esnet/gdg/internal/config" + "github.com/esnet/gdg/internal/service" + "github.com/esnet/gdg/internal/tools" + "github.com/jedib0t/go-pretty/v6/table" + "log/slog" + + "github.com/spf13/cobra" +) + +func newConnectionsPermissionCmd() simplecobra.Commander { + description := "Connections Permission" + return &support.SimpleCommand{ + NameP: "permission", + Short: description, + Long: description, + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.Aliases = []string{"l", "permissions"} + }, + CommandsList: []simplecobra.Commander{ + newConnectionsPermissionListCmd(), + newConnectionsPermissionDownloadCmd(), + newConnectionsPermissionUploadCmd(), + newConnectionsPermissionClearCmd(), + }, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + return cd.CobraCommand.Help() + }, + } +} + +func newConnectionsPermissionListCmd() simplecobra.Commander { + description := "List Connection Permissions" + return &support.SimpleCommand{ + NameP: "list", + Short: description, + Long: description, + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.Aliases = []string{"l"} + }, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + connectionFilter, _ := cd.CobraCommand.Flags().GetString("connection") + filters := service.NewConnectionFilter(connectionFilter) + slog.Info("Listing Connection Permissions for context", "context", config.Config().GetGDGConfig().GetContext()) + rootCmd.TableObj.AppendHeader(table.Row{"id", "uid", "name", "slug", "type", "default", "url"}) + connections := rootCmd.GrafanaSvc().ListConnectionPermissions(filters) + _ = connections + + if len(connections) == 0 { + slog.Info("No connections found") + } else { + for link, perms := range connections { + url := fmt.Sprintf("%s/datasource/edit/%d", config.Config().GetDefaultGrafanaConfig().URL, link.ID) + rootCmd.TableObj.AppendRow(table.Row{link.ID, link.UID, link.Name, service.GetSlug(link.Name), link.Type, link.IsDefault, url}) + if perms != nil && perms.Enabled { + for _, perm := range perms.Permissions { + rootCmd.TableObj.AppendRow(table.Row{link.ID, link.UID, " PERMISSION-->", perm.PermissionName, perm.Team, perm.UserEmail}) + } + } + } + rootCmd.TableObj.Render() + } + return nil + }, + } +} +func newConnectionsPermissionClearCmd() simplecobra.Commander { + description := "Clear Connection Permissions" + return &support.SimpleCommand{ + NameP: "clear", + Short: description, + Long: description, + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.Aliases = []string{"c"} + }, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + slog.Info("Clear all connections permissions") + tools.GetUserConfirmation(fmt.Sprintf("WARNING: this will clear all permission from all connections on: '%s' "+ + "(Or all permission matching yoru --connection filter). Do you wish to continue (y/n) ", config.Config().GetGDGConfig().ContextName, + ), "", true) + rootCmd.TableObj.AppendHeader(table.Row{"cleared connection permissions"}) + connectionFilter, _ := cd.CobraCommand.Flags().GetString("connection") + filters := service.NewConnectionFilter(connectionFilter) + connections := rootCmd.GrafanaSvc().DeleteAllConnectionPermissions(filters) + + if len(connections) == 0 { + slog.Info("No connections found") + } else { + for _, connections := range connections { + rootCmd.TableObj.AppendRow(table.Row{connections}) + } + rootCmd.TableObj.Render() + } + + return nil + }, + } +} + +func newConnectionsPermissionDownloadCmd() simplecobra.Commander { + description := "Download Connection Permissions" + return &support.SimpleCommand{ + NameP: "download", + Short: description, + Long: description, + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.Aliases = []string{"d"} + }, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + slog.Info("import Connections for context", + "context", config.Config().GetGDGConfig().GetContext()) + rootCmd.TableObj.AppendHeader(table.Row{"filename"}) + connectionFilter, _ := cd.CobraCommand.Flags().GetString("connection") + filters := service.NewConnectionFilter(connectionFilter) + connections := rootCmd.GrafanaSvc().DownloadConnectionPermissions(filters) + slog.Info("Downloading connections permissions") + + if len(connections) == 0 { + slog.Info("No connections found") + } else { + for _, connections := range connections { + rootCmd.TableObj.AppendRow(table.Row{connections}) + } + rootCmd.TableObj.Render() + } + return nil + }, + } +} +func newConnectionsPermissionUploadCmd() simplecobra.Commander { + description := "Upload Connection Permissions" + return &support.SimpleCommand{ + NameP: "upload", + Short: description, + Long: description, + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.Aliases = []string{"u"} + }, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + slog.Info("Uploading connections permissions") + rootCmd.TableObj.AppendHeader(table.Row{"connection permission"}) + connectionFilter, _ := cd.CobraCommand.Flags().GetString("connection") + filters := service.NewConnectionFilter(connectionFilter) + connections := rootCmd.GrafanaSvc().UploadConnectionPermissions(filters) + + if len(connections) == 0 { + slog.Info("No connections found") + } else { + for _, connections := range connections { + rootCmd.TableObj.AppendRow(table.Row{connections}) + } + rootCmd.TableObj.Render() + } + return nil + }, + } +} diff --git a/cli/backup/connections.go b/cli/backup/connections.go new file mode 100644 index 00000000..e21b51f5 --- /dev/null +++ b/cli/backup/connections.go @@ -0,0 +1,139 @@ +package backup + +import ( + "context" + "fmt" + "github.com/bep/simplecobra" + "github.com/esnet/gdg/cli/support" + "github.com/esnet/gdg/internal/config" + "github.com/esnet/gdg/internal/service" + "github.com/jedib0t/go-pretty/v6/table" + "log/slog" + + "github.com/spf13/cobra" +) + +func newConnectionsCommand() simplecobra.Commander { + description := "Manage connections (formerly Data Sources)" + return &support.SimpleCommand{ + NameP: "connections", + Short: description, + Long: description, + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.Aliases = []string{"connection", "ds", "c", "datasource", "datasources"} + connections := cmd + connections.PersistentFlags().StringP("connection", "", "", "filter by connection slug") + }, + CommandsList: []simplecobra.Commander{ + newClearConnectionsCmd(), + newUploadConnectionsCmd(), + newDownloadConnectionsCmd(), + newListConnectionsCmd(), + newConnectionsPermissionCmd(), + }, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + return cd.CobraCommand.Help() + }, + } +} + +func newClearConnectionsCmd() simplecobra.Commander { + description := "clear all connections for the given Organization" + return &support.SimpleCommand{ + NameP: "clear", + Short: description, + Long: description, + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.Aliases = []string{"c"} + }, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + slog.Info("Delete connections") + dashboardFilter, _ := cd.CobraCommand.Flags().GetString("datasource") + filters := service.NewConnectionFilter(dashboardFilter) + savedFiles := rootCmd.GrafanaSvc().DeleteAllConnections(filters) + rootCmd.TableObj.AppendHeader(table.Row{"type", "filename"}) + for _, file := range savedFiles { + rootCmd.TableObj.AppendRow(table.Row{"datasource", file}) + } + rootCmd.TableObj.Render() + return nil + }, + } +} + +func newUploadConnectionsCmd() simplecobra.Commander { + description := "upload all connections to grafana for the given Organization" + return &support.SimpleCommand{ + NameP: "upload", + Short: description, + Long: description, + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.Aliases = []string{"u"} + }, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + slog.Info("Uploading connections") + dashboardFilter, _ := cd.CobraCommand.Flags().GetString("connection") + filters := service.NewConnectionFilter(dashboardFilter) + exportedList := rootCmd.GrafanaSvc().UploadConnections(filters) + rootCmd.TableObj.AppendHeader(table.Row{"type", "filename"}) + for _, file := range exportedList { + rootCmd.TableObj.AppendRow(table.Row{"datasource", file}) + } + rootCmd.TableObj.Render() + return nil + }, + } +} + +func newDownloadConnectionsCmd() simplecobra.Commander { + description := "download all connections from grafana for the given Organization" + return &support.SimpleCommand{ + NameP: "download", + Short: description, + Long: description, + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.Aliases = []string{"d"} + }, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + slog.Info("Importing connections for context", + "context", config.Config().GetGDGConfig().GetContext()) + dashboardFilter, _ := cd.CobraCommand.Flags().GetString("connection") + filters := service.NewConnectionFilter(dashboardFilter) + savedFiles := rootCmd.GrafanaSvc().DownloadConnections(filters) + rootCmd.TableObj.AppendHeader(table.Row{"type", "filename"}) + for _, file := range savedFiles { + rootCmd.TableObj.AppendRow(table.Row{"datasource", file}) + } + rootCmd.TableObj.Render() + return nil + }, + } +} +func newListConnectionsCmd() simplecobra.Commander { + description := "List all connections for the given Organization" + return &support.SimpleCommand{ + NameP: "list", + Short: description, + Long: description, + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.Aliases = []string{"l"} + }, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + rootCmd.TableObj.AppendHeader(table.Row{"id", "uid", "name", "slug", "type", "default", "url"}) + dashboardFilter, _ := cd.CobraCommand.Flags().GetString("connection") + filters := service.NewConnectionFilter(dashboardFilter) + dsListing := rootCmd.GrafanaSvc().ListConnections(filters) + slog.Info("Listing connections for context", "context", config.Config().GetGDGConfig().GetContext()) + if len(dsListing) == 0 { + slog.Info("No connections found") + } else { + for _, link := range dsListing { + url := fmt.Sprintf("%s/datasource/edit/%d", config.Config().GetDefaultGrafanaConfig().URL, link.ID) + rootCmd.TableObj.AppendRow(table.Row{link.ID, link.UID, link.Name, service.GetSlug(link.Name), link.Type, link.IsDefault, url}) + } + rootCmd.TableObj.Render() + } + return nil + }, + } +} diff --git a/cli/backup/dashboard.go b/cli/backup/dashboard.go new file mode 100644 index 00000000..3ba4019a --- /dev/null +++ b/cli/backup/dashboard.go @@ -0,0 +1,202 @@ +package backup + +import ( + "context" + "encoding/json" + "fmt" + "log/slog" + "net/url" + "strings" + + "github.com/bep/simplecobra" + "github.com/esnet/gdg/cli/support" + "github.com/esnet/gdg/internal/config" + "github.com/esnet/gdg/internal/service" + "github.com/esnet/gdg/internal/tools" + "github.com/jedib0t/go-pretty/v6/table" + "github.com/spf13/cobra" +) + +var skipConfirmAction bool + +func parseDashboardGlobalFlags(command *cobra.Command) []string { + folderFilter, _ := command.Flags().GetString("folder") + dashboardFilter, _ := command.Flags().GetString("dashboard") + tagsFilter, _ := command.Flags().GetStringArray("tags") + rawTags, err := json.Marshal(tagsFilter) + jsonTags := "" + if err == nil { + jsonTags = string(rawTags) + } + + return []string{folderFilter, dashboardFilter, jsonTags} +} + +func newDashboardCommand() simplecobra.Commander { + description := "Manage Grafana Dashboards" + return &support.SimpleCommand{ + NameP: "dashboards", + Short: description, + Long: description, + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.Aliases = []string{"dash", "dashboard"} + dashboard := cmd + dashboard.PersistentFlags().BoolVarP(&skipConfirmAction, "skip-confirmation", "", false, "when set to true, bypass confirmation prompts") + dashboard.PersistentFlags().StringP("dashboard", "d", "", "filter by dashboard slug") + dashboard.PersistentFlags().StringP("folder", "f", "", "Filter by Folder Name (Quotes in names not supported)") + dashboard.PersistentFlags().StringArrayP("tags", "t", []string{}, "Filter by list of comma delimited tags") + }, + CommandsList: []simplecobra.Commander{ + newListDashboardsCmd(), + newDownloadDashboardsCmd(), + newUploadDashboardsCmd(), + newClearDashboardsCmd(), + }, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + return cd.CobraCommand.Help() + }, + } +} + +func newClearDashboardsCmd() simplecobra.Commander { + description := "delete all monitored dashboards from grafana" + return &support.SimpleCommand{ + NameP: "clear", + Short: description, + Long: description, + CommandsList: []simplecobra.Commander{}, + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.Aliases = []string{"c"} + }, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + filter := service.NewDashboardFilter(parseDashboardGlobalFlags(cd.CobraCommand)...) + deletedDashboards := rootCmd.GrafanaSvc().DeleteAllDashboards(filter) + rootCmd.TableObj.AppendHeader(table.Row{"type", "filename"}) + for _, file := range deletedDashboards { + rootCmd.TableObj.AppendRow(table.Row{"dashboard", file}) + } + if len(deletedDashboards) == 0 { + slog.Info("No dashboards were found. 0 dashboards were removed") + } else { + slog.Info("dashboards were deleted", "count", len(deletedDashboards)) + rootCmd.TableObj.Render() + } + return nil + }, + } +} + +func newUploadDashboardsCmd() simplecobra.Commander { + description := "upload all dashboards to grafana" + return &support.SimpleCommand{ + NameP: "upload", + Short: description, + Long: description, + CommandsList: []simplecobra.Commander{}, + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.Aliases = []string{"u", "up"} + }, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + filter := service.NewDashboardFilter(parseDashboardGlobalFlags(cd.CobraCommand)...) + + if !skipConfirmAction { + tools.GetUserConfirmation(fmt.Sprintf("WARNING: this will delete all dashboards from the monitored folders: '%s' "+ + "(or all folders if ignore_dashboard_filters is set to true) and upload your local copy. Do you wish to "+ + "continue (y/n) ", strings.Join(config.Config().GetDefaultGrafanaConfig().GetMonitoredFolders(), ", "), + ), "", true) + } + rootCmd.GrafanaSvc().UploadDashboards(filter) + + rootCmd.TableObj.AppendHeader(table.Row{"Title", "id", "folder", "UID"}) + boards := rootCmd.GrafanaSvc().ListDashboards(filter) + + slog.Info(fmt.Sprintf("%d dashboards have been uploaded", len(boards))) + for _, link := range boards { + rootCmd.TableObj.AppendRow(table.Row{link.Title, link.ID, link.FolderTitle, link.UID}) + } + if len(boards) > 0 { + rootCmd.TableObj.Render() + } else { + slog.Info("No dashboards found") + } + return nil + }, + } +} + +func newDownloadDashboardsCmd() simplecobra.Commander { + description := "download all dashboards from grafana" + return &support.SimpleCommand{ + NameP: "download", + Short: description, + Long: description, + CommandsList: []simplecobra.Commander{}, + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.Aliases = []string{"d"} + }, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + filter := service.NewDashboardFilter(parseDashboardGlobalFlags(cd.CobraCommand)...) + savedFiles := rootCmd.GrafanaSvc().DownloadDashboards(filter) + slog.Info("Downloading dashboards for context", + "context", config.Config().GetGDGConfig().GetContext()) + rootCmd.TableObj.AppendHeader(table.Row{"type", "filename"}) + for _, file := range savedFiles { + rootCmd.TableObj.AppendRow(table.Row{"dashboard", file}) + } + rootCmd.TableObj.Render() + return nil + }, + } +} + +func newListDashboardsCmd() simplecobra.Commander { + description := "List all dashboards from grafana" + return &support.SimpleCommand{ + NameP: "list", + Short: description, + Long: description, + CommandsList: []simplecobra.Commander{}, + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.Aliases = []string{"l"} + }, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + rootCmd.TableObj.AppendHeader(table.Row{"id", "Title", "Slug", "Folder", "UID", "Tags", "URL"}) + + filters := service.NewDashboardFilter(parseDashboardGlobalFlags(cd.CobraCommand)...) + boards := rootCmd.GrafanaSvc().ListDashboards(filters) + + slog.Info("Listing dashboards for context", slog.String("context", config.Config().GetGDGConfig().GetContext()), slog.Any("count", len(boards))) + for _, link := range boards { + base, err := url.Parse(config.Config().GetDefaultGrafanaConfig().URL) + var baseHost string + if err != nil { + baseHost = "http://unknown/" + slog.Warn("unable to determine grafana base host for dashboard", slog.String("dashboard-uid", link.UID)) + } else { + base.Path = "" + baseHost = base.String() + } + urlValue := fmt.Sprintf("%s%s", baseHost, link.URL) + var tagVal string + if len(link.Tags) > 0 { + tagValByte, err := json.Marshal(link.Tags) + if err == nil { + tagVal = string(tagValByte) + } + } + + rootCmd.TableObj.AppendRow(table.Row{ + link.ID, link.Title, link.Slug, link.FolderTitle, + link.UID, tagVal, urlValue, + }) + + } + if len(boards) > 0 { + rootCmd.TableObj.Render() + } else { + slog.Info("No dashboards found") + } + return nil + }, + } +} diff --git a/cli/backup/folder_permissions.go b/cli/backup/folder_permissions.go new file mode 100644 index 00000000..a12e41ad --- /dev/null +++ b/cli/backup/folder_permissions.go @@ -0,0 +1,116 @@ +package backup + +import ( + "context" + "github.com/bep/simplecobra" + "github.com/esnet/gdg/cli/support" + "github.com/esnet/gdg/internal/config" + "github.com/jedib0t/go-pretty/v6/table" + "github.com/spf13/cobra" + "log/slog" +) + +func newFolderPermissionCommand() simplecobra.Commander { + description := "Folder Permissions" + return &support.SimpleCommand{ + NameP: "permission", + Short: description, + Long: description, + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.Aliases = []string{"p", "permissions"} + }, + CommandsList: []simplecobra.Commander{ + newFolderPermissionListCmd(), + newFolderPermissionUploadCmd(), + newFolderPermissionDownloadCmd(), + }, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + return cd.CobraCommand.Help() + }, + } +} + +func newFolderPermissionListCmd() simplecobra.Commander { + description := "list Folder Permissions" + return &support.SimpleCommand{ + NameP: "list", + Short: description, + Long: description, + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.Aliases = []string{"l"} + }, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + rowConfigAutoMerge := table.RowConfig{AutoMerge: true} + + slog.Info("Listing Folders for context", "context", config.Config().GetGDGConfig().GetContext()) + rootCmd.TableObj.AppendHeader(table.Row{"folder ID", "folderUid", "folder Name", "UserID", "Team Name", "Role", "Permission Name"}, rowConfigAutoMerge) + folders := rootCmd.GrafanaSvc().ListFolderPermissions(getFolderFilter()) + + if len(folders) == 0 { + slog.Info("No folders found") + return nil + } + for key, value := range folders { + rootCmd.TableObj.AppendRow(table.Row{key.ID, key.UID, key.Title}) + for _, entry := range value { + rootCmd.TableObj.AppendRow(table.Row{"", "", " PERMISSION--->", entry.UserLogin, entry.Team, entry.Role, entry.PermissionName}, rowConfigAutoMerge) + } + } + rootCmd.TableObj.Render() + return nil + }, + } +} +func newFolderPermissionDownloadCmd() simplecobra.Commander { + description := "download Folders Permissions" + return &support.SimpleCommand{ + NameP: "download", + Short: description, + Long: description, + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.Aliases = []string{"d"} + }, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + slog.Info("Downloading Folder Permissions for context", "context", config.Config().GetGDGConfig().GetContext()) + rootCmd.TableObj.AppendHeader(table.Row{"filename"}) + folders := rootCmd.GrafanaSvc().DownloadFolderPermissions(getFolderFilter()) + slog.Info("Downloading folder permissions") + + if len(folders) == 0 { + slog.Info("No folders found") + return nil + } + for _, folder := range folders { + rootCmd.TableObj.AppendRow(table.Row{folder}) + } + rootCmd.TableObj.Render() + return nil + }, + } +} +func newFolderPermissionUploadCmd() simplecobra.Commander { + description := "upload Folders Permissions" + return &support.SimpleCommand{ + NameP: "upload", + Short: description, + Long: description, + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.Aliases = []string{"u"} + }, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + slog.Info("Uploading folder permissions") + rootCmd.TableObj.AppendHeader(table.Row{"file name"}) + folders := rootCmd.GrafanaSvc().UploadFolderPermissions(getFolderFilter()) + + if len(folders) == 0 { + slog.Info("No folders found") + return nil + } + for _, folder := range folders { + rootCmd.TableObj.AppendRow(table.Row{folder}) + } + rootCmd.TableObj.Render() + return nil + }, + } +} diff --git a/cli/backup/folders.go b/cli/backup/folders.go new file mode 100644 index 00000000..e8880c64 --- /dev/null +++ b/cli/backup/folders.go @@ -0,0 +1,152 @@ +package backup + +import ( + "context" + "github.com/bep/simplecobra" + "github.com/esnet/gdg/cli/support" + "github.com/esnet/gdg/internal/config" + "github.com/esnet/gdg/internal/service" + "github.com/esnet/gdg/internal/service/filters" + "github.com/jedib0t/go-pretty/v6/table" + "log/slog" + + "github.com/spf13/cobra" +) + +var useFolderFilters bool + +func getFolderFilter() filters.Filter { + if !useFolderFilters { + return nil + } + return service.NewFolderFilter() + +} + +func newFolderCommand() simplecobra.Commander { + description := "Manage folder entities" + return &support.SimpleCommand{ + NameP: "folders", + Short: description, + Long: description, + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.Aliases = []string{"fld", "folder"} + cmd.PersistentFlags().BoolVar(&useFolderFilters, "use-filters", false, "Default to false, but if passed then will only operate on the list of folders listed in the configuration file") + }, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + return cd.CobraCommand.Help() + }, + CommandsList: []simplecobra.Commander{ + newFolderPermissionCommand(), + newFolderListCmd(), + newFolderClearCmd(), + newFolderDownloadCmd(), + newFolderUploadCmd(), + }, + } + +} + +func newFolderClearCmd() simplecobra.Commander { + description := "delete Folders from grafana" + return &support.SimpleCommand{ + NameP: "clear", + Short: description, + Long: description, + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.Aliases = []string{"c", "delete"} + }, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + slog.Info("Deleting all Folders for context", "context", config.Config().GetGDGConfig().GetContext()) + rootCmd.TableObj.AppendHeader(table.Row{"title"}) + + folders := rootCmd.GrafanaSvc().DeleteAllFolders(getFolderFilter()) + if len(folders) == 0 { + slog.Info("No Folders found") + } else { + for _, folder := range folders { + rootCmd.TableObj.AppendRow(table.Row{folder}) + } + rootCmd.TableObj.Render() + } + return nil + }, + } +} + +func newFolderListCmd() simplecobra.Commander { + description := "List Folders" + return &support.SimpleCommand{ + NameP: "list", + Short: description, + Long: description, + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.Aliases = []string{"u"} + }, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + slog.Info("Listing Folders for context", "context", config.Config().GetGDGConfig().GetContext()) + rootCmd.TableObj.AppendHeader(table.Row{"id", "uid", "title"}) + folders := rootCmd.GrafanaSvc().ListFolder(getFolderFilter()) + + if len(folders) == 0 { + slog.Info("No folders found") + } else { + for _, folder := range folders { + rootCmd.TableObj.AppendRow(table.Row{folder.ID, folder.UID, folder.Title}) + } + rootCmd.TableObj.Render() + } + return nil + }, + } +} +func newFolderDownloadCmd() simplecobra.Commander { + description := "Download Folders from grafana" + return &support.SimpleCommand{ + NameP: "download", + Short: description, + Long: description, + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.Aliases = []string{"d"} + }, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + slog.Info("Listing Folders for context", "context", config.Config().GetGDGConfig().GetContext()) + rootCmd.TableObj.AppendHeader(table.Row{"file"}) + folders := rootCmd.GrafanaSvc().DownloadFolders(getFolderFilter()) + if len(folders) == 0 { + slog.Info("No folders found") + } else { + for _, folder := range folders { + rootCmd.TableObj.AppendRow(table.Row{folder}) + } + rootCmd.TableObj.Render() + } + return nil + }, + } +} +func newFolderUploadCmd() simplecobra.Commander { + description := "upload Folders to grafana" + return &support.SimpleCommand{ + NameP: "upload", + Short: description, + Long: description, + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.Aliases = []string{"u"} + }, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + slog.Info("Uploading Folders for context", "context", config.Config().GetGDGConfig().GetContext()) + rootCmd.TableObj.AppendHeader(table.Row{"file"}) + folders := rootCmd.GrafanaSvc().UploadFolders(getFolderFilter()) + if len(folders) == 0 { + slog.Info("No folders found") + } else { + for _, folder := range folders { + rootCmd.TableObj.AppendRow(table.Row{folder}) + } + rootCmd.TableObj.Render() + } + return nil + }, + } +} diff --git a/cli/backup/library.go b/cli/backup/library.go new file mode 100644 index 00000000..903dd0ba --- /dev/null +++ b/cli/backup/library.go @@ -0,0 +1,172 @@ +package backup + +import ( + "context" + "encoding/json" + "github.com/bep/simplecobra" + "github.com/esnet/gdg/cli/support" + "github.com/esnet/gdg/internal/config" + "github.com/esnet/gdg/internal/service/filters" + "github.com/jedib0t/go-pretty/v6/table" + "github.com/spf13/cobra" + "log" + "log/slog" +) + +func newLibraryElementsCommand() simplecobra.Commander { + description := "Manage Library Elements" + return &support.SimpleCommand{ + NameP: "libraryelements", + Short: description, + Long: description, + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.Aliases = []string{"lib", "library"} + }, + CommandsList: []simplecobra.Commander{ + newLibraryElementsListCmd(), + newLibraryElementsClearCmd(), + newLibraryElementsDownloadCmd(), + newLibraryElementsUploadCmd(), + newLibraryElementsListConnectionsCmd(), + }, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + return cd.CobraCommand.Help() + }, + } +} + +func newLibraryElementsClearCmd() simplecobra.Commander { + description := "delete all Library elements from grafana" + return &support.SimpleCommand{ + NameP: "clear", + Short: description, + Long: description, + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.Aliases = []string{"c"} + }, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + //filter := getLibraryGlobalFlags(cli) + deletedLibrarys := rootCmd.GrafanaSvc().DeleteAllLibraryElements(nil) + rootCmd.TableObj.AppendHeader(table.Row{"type", "filename"}) + for _, file := range deletedLibrarys { + rootCmd.TableObj.AppendRow(table.Row{"library", file}) + } + if len(deletedLibrarys) == 0 { + slog.Info("No library were found. 0 libraries removed") + + } else { + slog.Info("libraries were deleted", "count", len(deletedLibrarys)) + rootCmd.TableObj.Render() + } + return nil + }, + } +} +func newLibraryElementsListCmd() simplecobra.Commander { + description := "List all library Elements" + return &support.SimpleCommand{ + NameP: "list", + Short: description, + Long: description, + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.Aliases = []string{"l"} + }, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + rootCmd.TableObj.AppendHeader(table.Row{"id", "UID", "Folder", "Name", "Type"}) + + elements := rootCmd.GrafanaSvc().ListLibraryElements(nil) + + slog.Info("Listing library for context", "context", config.Config().GetGDGConfig().GetContext()) + for _, link := range elements { + rootCmd.TableObj.AppendRow(table.Row{link.ID, link.UID, link.Meta.FolderName, link.Name, link.Type}) + + } + if len(elements) > 0 { + rootCmd.TableObj.Render() + } else { + slog.Info("No library found") + } + + return nil + }, + } +} +func newLibraryElementsDownloadCmd() simplecobra.Commander { + description := "Download all library from grafana to local file system" + return &support.SimpleCommand{ + NameP: "download", + Short: description, + Long: description, + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.Aliases = []string{"d"} + }, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + slog.Info("Downloading library for context", "context", config.Config().GetGDGConfig().GetContext()) + savedFiles := rootCmd.GrafanaSvc().DownloadLibraryElements(nil) + rootCmd.TableObj.AppendHeader(table.Row{"type", "filename"}) + for _, file := range savedFiles { + rootCmd.TableObj.AppendRow(table.Row{"library", file}) + } + rootCmd.TableObj.Render() + return nil + }, + } +} +func newLibraryElementsUploadCmd() simplecobra.Commander { + description := "upload all library to grafana" + return &support.SimpleCommand{ + NameP: "upload", + Short: description, + Long: description, + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.Aliases = []string{"u"} + }, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + slog.Info("exporting lib elements") + libraryFilter := filters.NewBaseFilter() + elements := rootCmd.GrafanaSvc().UploadLibraryElements(libraryFilter) + rootCmd.TableObj.AppendHeader(table.Row{"Name"}) + if len(elements) > 0 { + for _, link := range elements { + rootCmd.TableObj.AppendRow(table.Row{link}) + } + rootCmd.TableObj.Render() + } else { + slog.Info("No library found") + } + return nil + }, + } +} + +func newLibraryElementsListConnectionsCmd() simplecobra.Commander { + description := "List all library Connection given a valid library Connection UID" + return &support.SimpleCommand{ + NameP: "list-connections", + Short: description, + Long: description, + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.Aliases = []string{"c"} + }, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + if len(args) != 1 { + log.Fatal("Wrong number of arguments, requires library element UUID") + } + rootCmd.TableObj.AppendHeader(table.Row{"id", "UID", "Slug", "Title", "Folder"}) + + libElmentUid := args[0] + elements := rootCmd.GrafanaSvc().ListLibraryElementsConnections(nil, libElmentUid) + slog.Info("Listing library connections for context", "context", config.Config().GetGDGConfig().GetContext()) + for _, link := range elements { + dash := link.Dashboard.(map[string]interface{}) + rootCmd.TableObj.AppendRow(table.Row{dash["id"].(json.Number), dash["uid"].(string), link.Meta.Slug, dash["title"].(string), link.Meta.FolderTitle}) + } + if len(elements) > 0 { + rootCmd.TableObj.Render() + } else { + slog.Info("No library found") + } + return nil + }, + } +} diff --git a/cli/backup/organizations.go b/cli/backup/organizations.go new file mode 100644 index 00000000..581ecd33 --- /dev/null +++ b/cli/backup/organizations.go @@ -0,0 +1,115 @@ +package backup + +import ( + "context" + "github.com/bep/simplecobra" + "github.com/esnet/gdg/cli/support" + "github.com/esnet/gdg/internal/config" + "github.com/jedib0t/go-pretty/v6/table" + "github.com/spf13/cobra" + "log/slog" + "sort" +) + +func newOrganizationsCommand() simplecobra.Commander { + description := "Manage Grafana Organizations." + return &support.SimpleCommand{ + NameP: "organizations", + Short: description, + Long: description, + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.Aliases = []string{"org", "orgs"} + }, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + return cd.CobraCommand.Help() + }, + CommandsList: []simplecobra.Commander{ + newOrganizationsListCmd(), + newOrganizationsDownloadCmd(), + newOrganizationsUploadCmd(), + }, + } + +} + +func newOrganizationsListCmd() simplecobra.Commander { + description := "List Grafana Organizations." + return &support.SimpleCommand{ + NameP: "list", + Short: description, + Long: description, + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.Aliases = []string{"l"} + }, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + slog.Info("Listing organizations for context", "context", config.Config().GetGDGConfig().GetContext()) + rootCmd.TableObj.AppendHeader(table.Row{"id", "org"}) + listOrganizations := rootCmd.GrafanaSvc().ListOrganizations() + sort.Slice(listOrganizations, func(a, b int) bool { + return listOrganizations[a].ID < listOrganizations[b].ID + }) + if len(listOrganizations) == 0 { + slog.Info("No organizations found") + } else { + for _, org := range listOrganizations { + rootCmd.TableObj.AppendRow(table.Row{org.ID, org.Name}) + } + rootCmd.TableObj.Render() + } + return nil + }, + } + +} +func newOrganizationsDownloadCmd() simplecobra.Commander { + description := "download Organizations" + return &support.SimpleCommand{ + NameP: "download", + Short: description, + Long: description, + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.Aliases = []string{"d"} + }, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + slog.Info("Downloading organizations for context", "context", config.Config().GetGDGConfig().GetContext()) + rootCmd.TableObj.AppendHeader(table.Row{"file"}) + listOrganizations := rootCmd.GrafanaSvc().DownloadOrganizations() + if len(listOrganizations) == 0 { + slog.Info("No organizations found") + } else { + for _, org := range listOrganizations { + rootCmd.TableObj.AppendRow(table.Row{org}) + } + rootCmd.TableObj.Render() + } + return nil + }, + } + +} +func newOrganizationsUploadCmd() simplecobra.Commander { + description := "upload Orgs to grafana" + return &support.SimpleCommand{ + NameP: "upload", + Short: description, + Long: description, + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.Aliases = []string{"u"} + }, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + slog.Info("Uploading Folders for context: '%s'", "context", config.Config().GetGDGConfig().GetContext()) + rootCmd.TableObj.AppendHeader(table.Row{"file"}) + folders := rootCmd.GrafanaSvc().UploadOrganizations() + if len(folders) == 0 { + slog.Info("No Orgs were uploaded") + } else { + for _, folder := range folders { + rootCmd.TableObj.AppendRow(table.Row{folder}) + } + rootCmd.TableObj.Render() + } + return nil + }, + } + +} diff --git a/cli/backup/team.go b/cli/backup/team.go new file mode 100644 index 00000000..25c552b0 --- /dev/null +++ b/cli/backup/team.go @@ -0,0 +1,168 @@ +package backup + +import ( + "context" + "github.com/bep/simplecobra" + "github.com/esnet/gdg/cli/support" + "github.com/esnet/gdg/internal/config" + api "github.com/esnet/gdg/internal/service" + "github.com/grafana/grafana-openapi-client-go/models" + "github.com/jedib0t/go-pretty/v6/table" + "github.com/spf13/cobra" + "log/slog" +) + +func parseTeamGlobalFlags(command *cobra.Command) []string { + teamName, _ := command.Flags().GetString("team") + return []string{teamName} +} + +func getTeamPermission(permissionType models.PermissionType) string { + permission := "Member" + if permissionType == models.PermissionType(api.AdminUserPermission) { + permission = "Admin" + } + return permission +} + +func newTeamsCommand() simplecobra.Commander { + description := "Manage teams" + return &support.SimpleCommand{ + NameP: "teams", + Short: description, + Long: description, + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.Aliases = []string{"team", "t"} + cmd.PersistentFlags().StringP("team", "t", "", "team ID") + }, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + return cd.CobraCommand.Help() + }, + CommandsList: []simplecobra.Commander{ + newTeamsListCmd(), + newTeamsDownloadCmd(), + newTeamsUploadCmd(), + newTeamsClearCmd(), + }, + } + +} + +func newTeamsListCmd() simplecobra.Commander { + description := "list teams from grafana" + return &support.SimpleCommand{ + NameP: "list", + Short: description, + Long: description, + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.Aliases = []string{"l"} + }, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + slog.Info("Listing teams for context", "context", config.Config().GetGDGConfig().GetContext()) + rootCmd.TableObj.AppendHeader(table.Row{"id", "name", "email", "orgID", "memberCount", "memberID", "member Permission"}) + filter := api.NewTeamFilter(parseTeamGlobalFlags(cd.CobraCommand)...) + teams := rootCmd.GrafanaSvc().ListTeams(filter) + if len(teams) == 0 { + slog.Info("No teams found") + } else { + for team, members := range teams { + rootCmd.TableObj.AppendRow(table.Row{team.ID, team.Name, team.Email, team.OrgID, team.MemberCount}) + if team.MemberCount > 0 { + for _, member := range members { + rootCmd.TableObj.AppendRow(table.Row{"", "", "", "", "", member.Login, getTeamPermission(member.Permission)}) + } + } + } + rootCmd.TableObj.Render() + } + return nil + }, + } +} +func newTeamsDownloadCmd() simplecobra.Commander { + description := "download teams from grafana" + return &support.SimpleCommand{ + NameP: "download", + Short: description, + Long: description, + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.Aliases = []string{"d"} + }, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + slog.Info("Importing Teams for context", "context", config.Config().GetGDGConfig().GetContext()) + filter := api.NewTeamFilter(parseTeamGlobalFlags(cd.CobraCommand)...) + savedFiles := rootCmd.GrafanaSvc().DownloadTeams(filter) + if len(savedFiles) == 0 { + slog.Info("No teams found") + } else { + rootCmd.TableObj.AppendHeader(table.Row{"id", "name", "email", "orgID", "memberCount", "member user ID", "Member Permission"}) + for team, members := range savedFiles { + rootCmd.TableObj.AppendRow(table.Row{team.ID, team.Name, team.Email, team.OrgID, team.MemberCount}) + for _, member := range members { + rootCmd.TableObj.AppendRow(table.Row{"", "", "", "", "", member.Login, getTeamPermission(member.Permission)}) + } + } + rootCmd.TableObj.Render() + } + return nil + }, + } + +} +func newTeamsUploadCmd() simplecobra.Commander { + description := "upload teams to grafana" + return &support.SimpleCommand{ + NameP: "upload", + Short: description, + Long: description, + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.Aliases = []string{"u"} + }, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + slog.Info("Exporting Teams for context", "context", config.Config().GetGDGConfig().GetContext()) + filter := api.NewTeamFilter(parseTeamGlobalFlags(cd.CobraCommand)...) + savedFiles := rootCmd.GrafanaSvc().UploadTeams(filter) + if len(savedFiles) == 0 { + slog.Info("No teams found") + } else { + rootCmd.TableObj.AppendHeader(table.Row{"id", "name", "email", "orgID", "created", "memberCount", "member Login", "member Permission"}) + for team, members := range savedFiles { + rootCmd.TableObj.AppendRow(table.Row{team.ID, team.Name, team.Email, team.OrgID, team.MemberCount}) + if team.MemberCount > 0 { + for _, member := range members { + rootCmd.TableObj.AppendRow(table.Row{"", "", "", "", "", member.Login, getTeamPermission(member.Permission)}) + } + } + } + rootCmd.TableObj.Render() + } + return nil + }, + } +} +func newTeamsClearCmd() simplecobra.Commander { + description := "Delete All Team from grafana" + return &support.SimpleCommand{ + NameP: "clear", + Short: description, + Long: description, + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.Aliases = []string{"c"} + }, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + slog.Info("Deleting teams for context", "context", config.Config().GetGDGConfig().GetContext()) + filter := api.NewTeamFilter(parseTeamGlobalFlags(cd.CobraCommand)...) + rootCmd.TableObj.AppendHeader(table.Row{"type", "team ID", "team Name"}) + teams, err := rootCmd.GrafanaSvc().DeleteTeam(filter) + if err != nil { + slog.Error(err.Error()) + } else { + for _, team := range teams { + rootCmd.TableObj.AppendRow(table.Row{"team", team.ID, team.Name}) + } + rootCmd.TableObj.Render() + } + return nil + }, + } +} diff --git a/cli/backup/users.go b/cli/backup/users.go new file mode 100644 index 00000000..6b451124 --- /dev/null +++ b/cli/backup/users.go @@ -0,0 +1,152 @@ +package backup + +import ( + "context" + "github.com/bep/simplecobra" + "github.com/esnet/gdg/cli/support" + "github.com/esnet/gdg/internal/config" + "github.com/esnet/gdg/internal/service" + "github.com/jedib0t/go-pretty/v6/table" + "github.com/spf13/cobra" + "log/slog" + "strings" +) + +func newUsersCommand() simplecobra.Commander { + description := "Manage users" + return &support.SimpleCommand{ + NameP: "users", + Short: description, + Long: `Provides some utility to manage grafana users from the CLI. Please note, as the credentials cannot be imported, the export with generate a default password for any user not already present`, + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.Aliases = []string{"user", "u"} + cmd.PersistentFlags().StringP("authlabel", "", "", "filter by a given auth label") + }, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + return cd.CobraCommand.Help() + }, + CommandsList: []simplecobra.Commander{ + newUsersListCmd(), + newUsersDownloadCmd(), + newUsersUploadCmd(), + newUsersClearCmd(), + }, + } + +} + +func newUsersListCmd() simplecobra.Commander { + description := "list users from grafana" + return &support.SimpleCommand{ + NameP: "list", + Short: description, + Long: description, + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.Aliases = []string{"l"} + }, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + authLabel, _ := cd.CobraCommand.Flags().GetString("authlabel") + slog.Info("Listing users for context", "context", config.Config().GetGDGConfig().GetContext()) + rootCmd.TableObj.AppendHeader(table.Row{"id", "login", "name", "email", "admin", "disabled", "default Password", "authLabels"}) + users := rootCmd.GrafanaSvc().ListUsers(service.NewUserFilter(authLabel)) + if len(users) == 0 { + slog.Info("No users found") + } else { + for _, user := range users { + var labels string + if len(user.AuthLabels) > 0 { + labels = strings.Join(user.AuthLabels, ", ") + + } + rootCmd.TableObj.AppendRow(table.Row{user.ID, user.Login, user.Name, user.Email, user.IsAdmin, user.IsDisabled, service.DefaultUserPassword(user.Login), labels}) + } + rootCmd.TableObj.Render() + } + + return nil + }, + } +} +func newUsersDownloadCmd() simplecobra.Commander { + description := "download users from grafana" + return &support.SimpleCommand{ + NameP: "download", + Short: description, + Long: description, + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.Aliases = []string{"d"} + }, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + authLabel, _ := cd.CobraCommand.Flags().GetString("authlabel") + savedFiles := rootCmd.GrafanaSvc().DownloadUsers(service.NewUserFilter(authLabel)) + slog.Info("Importing Users for context", "context", config.Config().GetGDGConfig().GetContext()) + rootCmd.TableObj.AppendHeader(table.Row{"type", "filename"}) + if len(savedFiles) == 0 { + slog.Info("No users found") + } else { + for _, file := range savedFiles { + rootCmd.TableObj.AppendRow(table.Row{"user", file}) + } + rootCmd.TableObj.Render() + } + return nil + }, + } +} +func newUsersUploadCmd() simplecobra.Commander { + description := "upload users to grafana" + return &support.SimpleCommand{ + NameP: "upload", + Short: description, + Long: description, + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.Aliases = []string{"u"} + }, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + authLabel, _ := cd.CobraCommand.Flags().GetString("authlabel") + slog.Info("Uploading Users to context", "context", config.Config().GetGDGConfig().GetContext()) + savedFiles := rootCmd.GrafanaSvc().UploadUsers(service.NewUserFilter(authLabel)) + rootCmd.TableObj.AppendHeader(table.Row{"id", "login", "name", "email", "grafanaAdmin", "disabled", "default Password", "authLabels"}) + if len(savedFiles) == 0 { + slog.Info("No users found") + } else { + for _, user := range savedFiles { + var labels string + if len(user.AuthLabels) > 0 { + labels = strings.Join(user.AuthLabels, ", ") + + } + rootCmd.TableObj.AppendRow(table.Row{user.ID, user.Login, user.Name, user.Email, user.IsGrafanaAdmin, user.IsDisabled, service.DefaultUserPassword(user.Login), labels}) + } + rootCmd.TableObj.Render() + } + return nil + }, + } +} +func newUsersClearCmd() simplecobra.Commander { + description := "delete all users" + return &support.SimpleCommand{ + NameP: "clear", + Short: description, + Long: description, + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.Aliases = []string{"c"} + }, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + authLabel, _ := cd.CobraCommand.Flags().GetString("authlabel") + savedFiles := rootCmd.GrafanaSvc().DeleteAllUsers(service.NewUserFilter(authLabel)) + slog.Info("Delete Users for context", "context", config.Config().GetGDGConfig().GetContext()) + rootCmd.TableObj.AppendHeader(table.Row{"type", "filename"}) + if len(savedFiles) == 0 { + slog.Info("No users found") + } else { + for _, file := range savedFiles { + rootCmd.TableObj.AppendRow(table.Row{"user", file}) + } + rootCmd.TableObj.Render() + } + return nil + }, + } +} diff --git a/cli/commandeer.go b/cli/commandeer.go new file mode 100644 index 00000000..701a3878 --- /dev/null +++ b/cli/commandeer.go @@ -0,0 +1,45 @@ +package cli + +import ( + "context" + "github.com/bep/simplecobra" + "github.com/esnet/gdg/cli/backup" + "github.com/esnet/gdg/cli/support" + "github.com/esnet/gdg/cli/tools" + assets "github.com/esnet/gdg/config" + "log/slog" +) + +// Execute executes a command. +func Execute(defaultCfg string, args []string, options ...support.RootOption) error { + var err error + support.DefaultConfig, err = assets.GetFile(defaultCfg) + if err != nil { + slog.Warn("unable to find load default configuration", "err", err) + } + rootCmd := support.NewRootCmd(getNewRootCmd(), options...) + x, err := simplecobra.New(rootCmd) + if err != nil { + return err + } + + cd, err := x.Execute(context.Background(), args) + + if err != nil || len(args) == 0 { + _ = cd.CobraCommand.Help() + return err + } + + return nil +} + +func getNewRootCmd() *support.RootCommand { + return &support.RootCommand{ + NameP: "gdg", + CommandEntries: []simplecobra.Commander{ + newVersionCmd(), + tools.NewToolsCommand(), + backup.NewBackupCommand(), + }, + } +} diff --git a/cli/support/init_cfg.go b/cli/support/init_cfg.go new file mode 100644 index 00000000..b99d0fc6 --- /dev/null +++ b/cli/support/init_cfg.go @@ -0,0 +1,29 @@ +package support + +import ( + "github.com/esnet/gdg/internal/config" + appconfig "github.com/esnet/gdg/internal/log" + "github.com/spf13/cobra" + "os" +) + +// InitConfiguration Loads configuration, and setups fail over case +func InitConfiguration(cmd *cobra.Command) { + configOverride, _ := cmd.Flags().GetString("config") + if DefaultConfig == "" { + raw, err := os.ReadFile("config/importer-example.yml") + if err == nil { + DefaultConfig = string(raw) + } else { + DefaultConfig = "" + } + } + + //Registers sub CommandsList + config.InitConfig(configOverride, DefaultConfig) + appconfig.InitializeAppLogger(os.Stdout, os.Stderr, config.Config().IsDebug()) + + //Validate current configuration + config.Config().GetDefaultGrafanaConfig().Validate() + +} diff --git a/cli/support/root.go b/cli/support/root.go new file mode 100644 index 00000000..ac1aa5cf --- /dev/null +++ b/cli/support/root.go @@ -0,0 +1,99 @@ +package support + +import ( + "context" + "errors" + "github.com/bep/simplecobra" + appconfig "github.com/esnet/gdg/internal/log" + "github.com/esnet/gdg/internal/service" + "github.com/jedib0t/go-pretty/v6/table" + "os" +) + +var ( + DefaultConfig string +) + +// RootCommand struct wraps the root command and supporting services needed +type RootCommand struct { + NameP string + isInit bool + + GrafanaSvc func() service.GrafanaService + + ctx context.Context + initThis *simplecobra.Commandeer + initRunner *simplecobra.Commandeer + failWithCobraCommand bool + failRun bool + + TableObj table.Writer + + CommandEntries []simplecobra.Commander +} + +// RootOption used to configure the Root Command struct +type RootOption func(command *RootCommand) + +// NewRootCmd Allows to construct a root command passing any number of arguments to set RootCommand Options +func NewRootCmd(root *RootCommand, options ...RootOption) *RootCommand { + if root == nil { + root = &RootCommand{} + } + for _, o := range options { + o(root) + } + return root +} + +// Commands returns a list of Cobra commands +func (c *RootCommand) Commands() []simplecobra.Commander { + return c.CommandEntries +} + +// PreRun executed prior to command invocation +func (c *RootCommand) PreRun(this, runner *simplecobra.Commandeer) error { + c.isInit = true + c.initThis = this + c.initRunner = runner + c.initConfiguration() + return nil +} + +// initConfiguration Loads configuration, and setups fail over case +func (c *RootCommand) initConfiguration() { + appconfig.InitializeAppLogger(os.Stdout, os.Stderr, false) + +} + +// Name returns the cli command name +func (c *RootCommand) Name() string { + return c.NameP +} + +// Run invokes the CLI command +func (c *RootCommand) Run(ctx context.Context, cd *simplecobra.Commandeer, args []string) error { + if c.failRun { + return errors.New("failRun") + } + c.ctx = ctx + return nil +} + +// Init invoked to Initialize the RootCommand object +func (c *RootCommand) Init(cd *simplecobra.Commandeer) error { + if c.failWithCobraCommand { + return errors.New("failWithCobraCommand") + } + cmd := cd.CobraCommand + + persistentFlags := cmd.PersistentFlags() + persistentFlags.StringP("config", "c", "", "Configuration Override") + if c.TableObj == nil { + c.TableObj = table.NewWriter() + c.TableObj.SetOutputMirror(os.Stdout) + c.TableObj.SetStyle(table.StyleLight) + } + + return nil +} diff --git a/cli/support/simple.go b/cli/support/simple.go new file mode 100644 index 00000000..28db95db --- /dev/null +++ b/cli/support/simple.go @@ -0,0 +1,68 @@ +package support + +import ( + "context" + "github.com/bep/simplecobra" + "github.com/spf13/cobra" +) + +// SimpleCommand wraps a simple command +type SimpleCommand struct { + use string + NameP string + Short string + Long string + RunFunc func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *RootCommand, args []string) error + WithCFunc func(cmd *cobra.Command, r *RootCommand) + InitCFunc func(cd *simplecobra.Commandeer, r *RootCommand) error + + CommandsList []simplecobra.Commander + + RootCmd *RootCommand +} + +// Commands is a list of subcommands +func (c *SimpleCommand) Commands() []simplecobra.Commander { + return c.CommandsList +} + +// SetName Function allows name to be set +func (c *SimpleCommand) SetName(name string) { + c.NameP = name +} + +// Name returns function Name +func (c *SimpleCommand) Name() string { + return c.NameP +} + +// Run executes cli command +func (c *SimpleCommand) Run(ctx context.Context, cd *simplecobra.Commandeer, args []string) error { + if c.RunFunc == nil { + return nil + } + return c.RunFunc(ctx, cd, c.RootCmd, args) +} + +// Init initializes the SimpleCommand +func (c *SimpleCommand) Init(cd *simplecobra.Commandeer) error { + c.RootCmd = cd.Root.Command.(*RootCommand) + cmd := cd.CobraCommand + cmd.Short = c.Short + cmd.Long = c.Long + if c.use != "" { + cmd.Use = c.use + } + if c.WithCFunc != nil { + c.WithCFunc(cmd, c.RootCmd) + } + return nil +} + +// PreRun executed prior to cli command execution +func (c *SimpleCommand) PreRun(cd, runner *simplecobra.Commandeer) error { + if c.InitCFunc != nil { + return c.InitCFunc(cd, c.RootCmd) + } + return nil +} diff --git a/cli/test/conections_test.go b/cli/test/conections_test.go new file mode 100644 index 00000000..3613d8df --- /dev/null +++ b/cli/test/conections_test.go @@ -0,0 +1,50 @@ +package test + +import ( + "github.com/esnet/gdg/cli" + "github.com/esnet/gdg/cli/support" + "github.com/esnet/gdg/internal/service" + "github.com/esnet/gdg/internal/service/mocks" + "github.com/grafana/grafana-openapi-client-go/models" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" + "io" + "strings" + "testing" +) + +func TestConnectionCommand(t *testing.T) { + testSvc := new(mocks.GrafanaService) + getMockSvc := func() service.GrafanaService { + return testSvc + } + resp := []models.DataSourceListItemDTO{ + { + ID: 5, + Name: "Hello", + UID: "magicUid", + Type: "elasticsearch", + IsDefault: false, + }, + } + + testSvc.EXPECT().InitOrganizations().Return() + testSvc.EXPECT().ListConnections(mock.Anything).Return(resp) + + optionMockSvc := func() support.RootOption { + return func(response *support.RootCommand) { + response.GrafanaSvc = getMockSvc + } + } + r, w, cleanup := InterceptStdout() + + err := cli.Execute("testing.yml", []string{"backup", "connections", "list"}, optionMockSvc()) + assert.Nil(t, err) + defer cleanup() + w.Close() + + out, _ := io.ReadAll(r) + outStr := string(out) + assert.True(t, strings.Contains(outStr, "magicUid")) + assert.True(t, strings.Contains(outStr, "Hello")) +} diff --git a/cli/test/devel_test.go b/cli/test/devel_test.go new file mode 100644 index 00000000..434b5bd2 --- /dev/null +++ b/cli/test/devel_test.go @@ -0,0 +1,50 @@ +package test + +import ( + "github.com/esnet/gdg/cli" + "github.com/esnet/gdg/cli/support" + "github.com/esnet/gdg/internal/service/mocks" + "github.com/stretchr/testify/assert" + "strings" + "testing" +) + +func TestDevelSrvInfo(t *testing.T) { + var execMe = func(mock *mocks.GrafanaService, data []byte, optionMockSvc func() support.RootOption) error { + expected := make(map[string]interface{}) + expected["Database"] = "db" + expected["Commit"] = "commit" + expected["Version"] = "version" + + mock.EXPECT().GetServerInfo().Return(expected) + err := cli.Execute(string(data), []string{"tools", "devel", "srvinfo"}, optionMockSvc()) + return err + } + outStr, closeReader := setupAndExecuteMockingServices(t, execMe) + defer closeReader() + + assert.True(t, strings.Contains(outStr, "Version=")) + assert.True(t, strings.Contains(outStr, "Database=")) + assert.True(t, strings.Contains(outStr, "Commit=")) +} + +func TestDevelSrvCompletion(t *testing.T) { + fn := func(args []string) func(mock *mocks.GrafanaService, data []byte, optionMockSvc func() support.RootOption) error { + return func(mock *mocks.GrafanaService, data []byte, optionMockSvc func() support.RootOption) error { + err := cli.Execute(string(data), args, optionMockSvc()) + return err + } + } + + outStr, closeReader := setupAndExecuteMockingServices(t, fn([]string{"tools", "devel", "completion", "fish"})) + assert.True(t, strings.Contains(outStr, "fish")) + assert.True(t, strings.Contains(outStr, "__completion_prepare_completions")) + closeReader() + outStr, closeReader = setupAndExecuteMockingServices(t, fn([]string{"tools", "devel", "completion", "bash"})) + assert.True(t, strings.Contains(outStr, "bash")) + assert.True(t, strings.Contains(outStr, "flag_parsing_disabled")) + closeReader() + outStr, closeReader = setupAndExecuteMockingServices(t, fn([]string{"tools", "devel", "completion", "zsh"})) + assert.True(t, strings.Contains(outStr, "shellCompDirectiveKeepOrder")) + closeReader() +} diff --git a/cli/test/support.go b/cli/test/support.go new file mode 100644 index 00000000..f0e2bf53 --- /dev/null +++ b/cli/test/support.go @@ -0,0 +1,71 @@ +package test + +import ( + "github.com/esnet/gdg/cli/support" + "github.com/esnet/gdg/internal/config" + applog "github.com/esnet/gdg/internal/log" + "github.com/esnet/gdg/internal/service" + "github.com/esnet/gdg/internal/service/mocks" + "log/slog" + + "github.com/stretchr/testify/assert" + "io" + "os" + "testing" +) + +// setupAndExecuteMockingServices will create a mock for varous required entities allowing to test the CLI flag parsing +// process: function that setups mocks and invokes the Execute command +func setupAndExecuteMockingServices(t *testing.T, process func(mock *mocks.GrafanaService, data []byte, optionMockSvc func() support.RootOption) error) (string, func()) { + testSvc := new(mocks.GrafanaService) + getMockSvc := func() service.GrafanaService { + return testSvc + } + + optionMockSvc := func() support.RootOption { + return func(response *support.RootCommand) { + response.GrafanaSvc = getMockSvc + } + } + + r, w, cleanup := InterceptStdout() + data, err := os.ReadFile("../../config/testing.yml") + assert.Nil(t, err) + + err = process(testSvc, data, optionMockSvc) + assert.Nil(t, err) + defer cleanup() + err = w.Close() + if err != nil { + slog.Warn("unable to close write stream") + } + clean := func() { + defer r.Close() + } + out, _ := io.ReadAll(r) + outStr := string(out) + return outStr, clean + +} + +// InterceptStdout is a test helper function that will redirect all stdout in and out to a different file stream. +// It returns the stdout, stderr, and a function to be invoked to close the streams. +func InterceptStdout() (*os.File, *os.File, func()) { + backupStd := os.Stdout + backupErr := os.Stderr + r, w, _ := os.Pipe() + //Restore streams + config.InitConfig("testing", "") + applog.InitializeAppLogger(w, w, false) + cleanup := func() { + os.Stdout = backupStd + os.Stderr = backupErr + applog.InitializeAppLogger(os.Stdout, os.Stderr, false) + + } + os.Stdout = w + os.Stderr = w + + return r, w, cleanup + +} diff --git a/cli/test/version_test.go b/cli/test/version_test.go new file mode 100644 index 00000000..4d0b5a92 --- /dev/null +++ b/cli/test/version_test.go @@ -0,0 +1,52 @@ +package test + +import ( + "fmt" + "github.com/esnet/gdg/cli" + "github.com/esnet/gdg/cli/support" + "github.com/esnet/gdg/internal/service" + "github.com/esnet/gdg/internal/service/mocks" + "github.com/esnet/gdg/internal/version" + "github.com/stretchr/testify/assert" + "os" + "strings" + "testing" +) + +func TestVersionCommand(t *testing.T) { + var execMe = func(mock *mocks.GrafanaService, data []byte, optionMockSvc func() support.RootOption) error { + err := cli.Execute(string(data), []string{"version"}, optionMockSvc()) + return err + } + outStr, closeReader := setupAndExecuteMockingServices(t, execMe) + defer closeReader() + + assert.True(t, strings.Contains(outStr, "Build Date:")) + assert.True(t, strings.Contains(outStr, "Git Commit:")) + assert.True(t, strings.Contains(outStr, "Version:")) + assert.True(t, strings.Contains(outStr, version.Version)) + assert.True(t, strings.Contains(outStr, "Date:")) + assert.True(t, strings.Contains(outStr, "Go Version:")) + assert.True(t, strings.Contains(outStr, "OS / Arch:")) +} + +func TestVersionErrCommand(t *testing.T) { + testSvc := new(mocks.GrafanaService) + getMockSvc := func() service.GrafanaService { + return testSvc + } + + optionMockSvc := func() support.RootOption { + return func(response *support.RootCommand) { + response.GrafanaSvc = getMockSvc + } + } + path, _ := os.Getwd() + fmt.Println(path) + data, err := os.ReadFile("../../config/testing.yml") + assert.Nil(t, err) + + err = cli.Execute(string(data), []string{"dumb", "dumb"}, optionMockSvc()) + assert.NotNil(t, err) + assert.Equal(t, err.Error(), `command error: unknown command "dumb" for "gdg"`) +} diff --git a/cli/tools/auth.go b/cli/tools/auth.go new file mode 100644 index 00000000..ae873ebe --- /dev/null +++ b/cli/tools/auth.go @@ -0,0 +1,20 @@ +package tools + +import ( + "context" + "github.com/bep/simplecobra" + "github.com/esnet/gdg/cli/support" +) + +func newAuthCmd() simplecobra.Commander { + description := "Manage auth via API" + return &support.SimpleCommand{ + NameP: "auth", + Short: description, + Long: description, + CommandsList: []simplecobra.Commander{newTokensCmd(), newServiceAccountCmd()}, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + return cd.CobraCommand.Help() + }, + } +} diff --git a/cli/tools/auth_service_accounts.go b/cli/tools/auth_service_accounts.go new file mode 100644 index 00000000..aa9aa215 --- /dev/null +++ b/cli/tools/auth_service_accounts.go @@ -0,0 +1,229 @@ +package tools + +import ( + "context" + "errors" + "github.com/bep/simplecobra" + "github.com/esnet/gdg/cli/support" + "github.com/esnet/gdg/internal/config" + "github.com/jedib0t/go-pretty/v6/table" + "log" + "log/slog" + + "github.com/spf13/cobra" + "slices" + "sort" + "strconv" +) + +func newServiceAccountCmd() simplecobra.Commander { + description := "Manage api service-account" + return &support.SimpleCommand{ + NameP: "service-accounts", + Short: description, + Long: description, + CommandsList: []simplecobra.Commander{ + newListServiceAccountCmd(), + newDeleteServiceAccountCmd(), + newDeleteServiceAccountTokensCmd(), + newServiceAccount(), + newServiceAccountTokenCmd(), + }, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + return cd.CobraCommand.Help() + }, + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.Aliases = []string{"service-account", "svcAcct", "svcAccts", "svc"} + }, + } +} + +func newListServiceAccountCmd() simplecobra.Commander { + description := "List Service Accounts" + return &support.SimpleCommand{ + NameP: "list", + Short: description, + Long: description, + CommandsList: []simplecobra.Commander{}, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + rootCmd.TableObj.AppendHeader(table.Row{"id", "service name", "role", "tokens", "token id", "token name", "expiration"}) + apiKeys := rootCmd.GrafanaSvc().ListServiceAccounts() + sort.SliceStable(apiKeys, func(i, j int) bool { + return apiKeys[i].ServiceAccount.ID < apiKeys[j].ServiceAccount.ID + }) + if len(apiKeys) == 0 { + slog.Info("No Service Accounts found") + } else { + for _, apiKey := range apiKeys { + + rootCmd.TableObj.AppendRow(table.Row{apiKey.ServiceAccount.ID, apiKey.ServiceAccount.Name, apiKey.ServiceAccount.Role, apiKey.ServiceAccount.Tokens}) + if apiKey.Tokens != nil { + sort.SliceStable(apiKey.Tokens, func(i, j int) bool { + return apiKey.Tokens[i].ID < apiKey.Tokens[j].ID + }) + for _, token := range apiKey.Tokens { + var formattedDate string = token.Expiration.String() + date, _ := token.Expiration.Value() + if date.(string) == "0001-01-01T00:00:00.000Z" { + formattedDate = "No Expiration" + } + rootCmd.TableObj.AppendRow(table.Row{"", "", "", "", token.ID, token.Name, formattedDate}) + } + } + } + rootCmd.TableObj.Render() + } + + return nil + }, + } +} + +func newDeleteServiceAccountTokensCmd() simplecobra.Commander { + description := "delete all tokens for Service Account from grafana" + return &support.SimpleCommand{ + NameP: "clearTokens", + Short: description, + Long: description, + CommandsList: []simplecobra.Commander{}, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + if len(args) < 1 { + return errors.New("requires a service account ID to be specified") + } + idStr := args[0] + id, err := strconv.ParseInt(idStr, 10, 64) + if err != nil { + log.Fatalf("unable to parse %s as a valid numeric value", idStr) + } + + slog.Info("Deleting Service Accounts Tokens for context", + "serviceAccountId", id, + "context", config.Config().GetGDGConfig().GetContext()) + savedFiles := rootCmd.GrafanaSvc().DeleteServiceAccountTokens(id) + rootCmd.TableObj.AppendHeader(table.Row{"serviceID", "type", "token_name"}) + if len(savedFiles) == 0 { + slog.Info("No Service Accounts tokens found") + } else { + for _, token := range savedFiles { + rootCmd.TableObj.AppendRow(table.Row{id, "service token", token}) + } + rootCmd.TableObj.Render() + } + return nil + }, + } +} + +func newDeleteServiceAccountCmd() simplecobra.Commander { + description := "delete all Service Accounts from grafana" + return &support.SimpleCommand{ + NameP: "clear", + Short: description, + Long: description, + CommandsList: []simplecobra.Commander{}, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + savedFiles := rootCmd.GrafanaSvc().DeleteAllServiceAccounts() + slog.Info("Delete Service Accounts for context", "context", config.Config().GetGDGConfig().GetContext()) + rootCmd.TableObj.AppendHeader(table.Row{"type", "filename"}) + if len(savedFiles) == 0 { + slog.Info("No Service Accounts found") + } else { + for _, file := range savedFiles { + rootCmd.TableObj.AppendRow(table.Row{"user", file}) + } + rootCmd.TableObj.Render() + } + return nil + }, + } +} + +func newServiceAccount() simplecobra.Commander { + description := "newService [ttl in seconds]" + return &support.SimpleCommand{ + NameP: "newService", + Short: description, + Long: description, + CommandsList: []simplecobra.Commander{}, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + if len(args) < 2 { + return errors.New("requires a key name and a role('admin','viewer','editor') [ttl optional] ") + } + name := args[0] + role := args[1] + ttl := "0" + if len(args) > 2 { + ttl = args[2] + } + var ( + expiration int64 + err error + ) + + expiration, err = strconv.ParseInt(ttl, 10, 64) + if err != nil { + expiration = 0 + } + + if !slices.Contains([]string{"admin", "editor", "viewer"}, role) { + log.Fatal("Invalid role specified") + } + serviceAcct, err := rootCmd.GrafanaSvc().CreateServiceAccount(name, role, expiration) + if err != nil { + log.Fatal("unable to create api key", "error", err) + } else { + + rootCmd.TableObj.AppendHeader(table.Row{"id", "name", "role"}) + rootCmd.TableObj.AppendRow(table.Row{serviceAcct.ID, serviceAcct.Name, serviceAcct.Role}) + rootCmd.TableObj.Render() + } + return nil + }, + } +} + +func newServiceAccountTokenCmd() simplecobra.Commander { + description := "newToken [ttl in seconds]" + return &support.SimpleCommand{ + NameP: "newToken", + Short: description, + Long: description, + CommandsList: []simplecobra.Commander{newTokensCmd()}, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + if len(args) < 2 { + return errors.New("requires a service-account ID and token name [ttl optional] ") + } + serviceIDRaw := args[0] + name := args[1] + ttl := "0" + if len(args) > 2 { + ttl = args[2] + } + var ( + expiration int64 + err error + ) + + serviceID, err := strconv.ParseInt(serviceIDRaw, 10, 64) + if err != nil { + log.Fatal("unable to parse serviceID, make sure it's a numeric value") + } + expiration, err = strconv.ParseInt(ttl, 10, 64) + if err != nil { + expiration = 0 + } + + key, err := rootCmd.GrafanaSvc().CreateServiceAccountToken(serviceID, name, expiration) + if err != nil { + log.Fatal("unable to create api key", "err", err) + } else { + + rootCmd.TableObj.AppendHeader(table.Row{"serviceID", "token_id", "name", "token"}) + rootCmd.TableObj.AppendRow(table.Row{serviceID, key.ID, key.Name, key.Key}) + rootCmd.TableObj.Render() + } + + return nil + }, + } +} diff --git a/cli/tools/auth_tokens.go b/cli/tools/auth_tokens.go new file mode 100644 index 00000000..218e30ce --- /dev/null +++ b/cli/tools/auth_tokens.go @@ -0,0 +1,139 @@ +package tools + +import ( + "context" + "errors" + "github.com/bep/simplecobra" + "github.com/esnet/gdg/cli/support" + "github.com/esnet/gdg/internal/config" + "github.com/jedib0t/go-pretty/v6/table" + "github.com/spf13/cobra" + "log" + "log/slog" + "slices" + "sort" + "strconv" +) + +func newTokensCmd() simplecobra.Commander { + description := "Provides some utility to help the user manage their API token keys" + return &support.SimpleCommand{ + NameP: "tokens", + Short: description, + Long: description, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + return cd.CobraCommand.Help() + }, + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.Aliases = []string{"token", "apikeys"} + }, + CommandsList: []simplecobra.Commander{ + newListTokensCmd(), + newDeleteTokenCmd(), + newNewTokenCmd(), + }, + } +} + +func newListTokensCmd() simplecobra.Commander { + description := "List API Keys" + return &support.SimpleCommand{ + NameP: "list", + Short: description, + Long: description, + CommandsList: []simplecobra.Commander{}, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + + rootCmd.TableObj.AppendHeader(table.Row{"id", "name", "role", "expiration"}) + apiKeys := rootCmd.GrafanaSvc().ListAPIKeys() + sort.SliceStable(apiKeys, func(i, j int) bool { + return apiKeys[i].ID < apiKeys[j].ID + }) + if len(apiKeys) == 0 { + slog.Info("No apiKeys found") + } else { + for _, apiKey := range apiKeys { + var formattedDate string = apiKey.Expiration.String() + date, _ := apiKey.Expiration.Value() + if date.(string) == "0001-01-01T00:00:00.000Z" { + formattedDate = "No Expiration" + } + + rootCmd.TableObj.AppendRow(table.Row{apiKey.ID, apiKey.Name, apiKey.Role, formattedDate}) + } + rootCmd.TableObj.Render() + } + return nil + }, + } +} + +func newDeleteTokenCmd() simplecobra.Commander { + description := "delete all Tokens from grafana" + return &support.SimpleCommand{ + NameP: "clear", + Short: description, + Long: description, + CommandsList: []simplecobra.Commander{}, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + + savedFiles := rootCmd.GrafanaSvc().DeleteAllTokens() + slog.Info("Delete Tokens for context: ", "context", config.Config().GetGDGConfig().GetContext()) + rootCmd.TableObj.AppendHeader(table.Row{"type", "filename"}) + if len(savedFiles) == 0 { + slog.Info("No Tokens found") + } else { + for _, file := range savedFiles { + rootCmd.TableObj.AppendRow(table.Row{"user", file}) + } + rootCmd.TableObj.Render() + } + return nil + }, + } +} + +func newNewTokenCmd() simplecobra.Commander { + description := "new [ttl in seconds]" + return &support.SimpleCommand{ + NameP: "new", + Short: description, + Long: description, + CommandsList: []simplecobra.Commander{}, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + if len(args) < 2 { + return errors.New("requires a key name and a role('admin','viewer','editor') [ttl optional] ") + } + name := args[0] + role := args[1] + ttl := "0" + if len(args) > 2 { + ttl = args[2] + } + var ( + expiration int64 + err error + ) + + expiration, err = strconv.ParseInt(ttl, 10, 64) + if err != nil { + expiration = 0 + } + + if !slices.Contains([]string{"admin", "editor", "viewer"}, role) { + log.Fatal("Invalid role specified") + } + key, err := rootCmd.GrafanaSvc().CreateAPIKey(name, role, expiration) + if err != nil { + log.Fatal("unable to create api key", "err", err) + } else { + + rootCmd.TableObj.AppendHeader(table.Row{"id", "name", "token"}) + rootCmd.TableObj.AppendRow(table.Row{key.ID, key.Name, key.Key}) + rootCmd.TableObj.Render() + } + + return nil + }, + } +} diff --git a/cli/tools/context.go b/cli/tools/context.go new file mode 100644 index 00000000..9db182f8 --- /dev/null +++ b/cli/tools/context.go @@ -0,0 +1,167 @@ +package tools + +import ( + "context" + "errors" + "fmt" + "github.com/bep/simplecobra" + "github.com/esnet/gdg/cli/support" + "github.com/esnet/gdg/internal/config" + "github.com/jedib0t/go-pretty/v6/table" + "log/slog" + + "github.com/spf13/cobra" + "strings" +) + +func newContextCmd() simplecobra.Commander { + v := &support.SimpleCommand{ + NameP: "contexts", + CommandsList: []simplecobra.Commander{newContextClearCmd(), newListContextCmd(), + newContextCopy(), newShowContext(), newDeleteContext(), newContext(), newSetContext()}, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, r *support.RootCommand, args []string) error { + return cd.CobraCommand.Help() + }, + Short: "Manage Context configuration", + Long: "Manage Context configuration which allows multiple grafana configs to be used.", + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.Aliases = []string{"ctx", "context"} + }, + } + return v +} + +func newContextClearCmd() simplecobra.Commander { + return &support.SimpleCommand{ + NameP: "clear", + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, r *support.RootCommand, args []string) error { + config.Config().ClearContexts() + slog.Info("Successfully deleted all configured contexts") + return nil + }, + Short: "Manage Context configuration", + Long: "Manage Context configuration which allows multiple grafana configs to be used.", + } +} + +func newListContextCmd() simplecobra.Commander { + return &support.SimpleCommand{ + NameP: "list", + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, r *support.RootCommand, args []string) error { + r.TableObj.AppendHeader(table.Row{"context", "active"}) + contexts := config.Config().GetGDGConfig().GetContexts() + activeContext := config.Config().GetGDGConfig().GetContext() + for key := range contexts { + active := false + if key == strings.ToLower(activeContext) { + key = fmt.Sprintf("*%s", activeContext) + active = true + } + _ = active + r.TableObj.AppendRow(table.Row{key, active}) + } + + r.TableObj.Render() + return nil + }, + Short: "List context", + Long: "List contexts.", + } +} + +func newContextCopy() simplecobra.Commander { + v := &support.SimpleCommand{ + NameP: "copy", + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, r *support.RootCommand, args []string) error { + src := args[0] + dest := args[1] + config.Config().CopyContext(src, dest) + return nil + }, + InitCFunc: func(cd *simplecobra.Commandeer, r *support.RootCommand) error { + cd.CobraCommand.Aliases = []string{"cp"} + cd.CobraCommand.Args = func(cmd *cobra.Command, args []string) error { + if len(args) < 2 { + return errors.New("requires a src and destination argument") + } + return nil + } + return nil + }, + Short: "copy context ", + Long: "copy contexts ", + } + + return v + +} + +func newShowContext() simplecobra.Commander { + return &support.SimpleCommand{ + NameP: "show", + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, r *support.RootCommand, args []string) error { + contextEntry := config.Config().GetGDGConfig().GetContext() + if len(args) > 0 && len(args[0]) > 0 { + contextEntry = args[0] + } + config.Config().PrintContext(contextEntry) + return nil + }, + Short: "show optional[context]", + Long: "show optional[context]", + } +} + +func newDeleteContext() simplecobra.Commander { + return &support.SimpleCommand{ + NameP: "delete", + Short: "delete context ", + Long: "delete context ", + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, r *support.RootCommand, args []string) error { + if len(args) < 1 { + return errors.New("requires a context argument") + } + contextEntry := args[0] + config.Config().DeleteContext(contextEntry) + slog.Info("Successfully deleted context", "context", ctx) + return nil + }, + InitCFunc: func(cd *simplecobra.Commandeer, r *support.RootCommand) error { + cd.CobraCommand.Aliases = []string{"del"} + return nil + }, + } + +} + +func newContext() simplecobra.Commander { + return &support.SimpleCommand{ + NameP: "new", + Short: "new ", + Long: "new ", + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + if len(args) < 1 { + return errors.New("requires a context NameP") + } + contextEntry := args[0] + config.Config().NewContext(contextEntry) + return nil + }, + } +} + +func newSetContext() simplecobra.Commander { + return &support.SimpleCommand{ + NameP: "set", + Short: "set ", + Long: "set ", + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + if len(args) < 1 { + return errors.New("requires a context argument") + } + contextEntry := args[0] + config.Config().ChangeContext(contextEntry) + return nil + }, + } +} diff --git a/cli/tools/devel.go b/cli/tools/devel.go new file mode 100644 index 00000000..b2749344 --- /dev/null +++ b/cli/tools/devel.go @@ -0,0 +1,64 @@ +package tools + +import ( + "context" + "github.com/bep/simplecobra" + "github.com/esnet/gdg/cli/support" + "github.com/spf13/cobra" + "log/slog" + "os" +) + +func newDevelCmd() simplecobra.Commander { + return &support.SimpleCommand{ + NameP: "devel", + Short: "Developer Tooling", + Long: "Developer Tooling", + CommandsList: []simplecobra.Commander{newServerInfoCmd(), newCompletion()}, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + return cd.CobraCommand.Help() + }, + } +} + +func newServerInfoCmd() simplecobra.Commander { + return &support.SimpleCommand{ + NameP: "srvinfo", + Short: "server health info", + Long: "server health info", + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + result := rootCmd.GrafanaSvc().GetServerInfo() + for key, value := range result { + slog.Info("", key, value) + } + return nil + }, + } +} + +func newCompletion() simplecobra.Commander { + return &support.SimpleCommand{ + NameP: "completion [bash|zsh|fish|powershell]", + Short: "Generate completion script", + Long: "Generate completion script", + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + var err error + switch args[0] { + case "bash": + err = cd.CobraCommand.GenBashCompletion(os.Stdout) + case "zsh": + err = cd.CobraCommand.GenZshCompletion(os.Stdout) + case "fish": + err = cd.CobraCommand.GenFishCompletion(os.Stdout, true) + case "powershell": + err = cd.CobraCommand.GenPowerShellCompletion(os.Stdout) + } + return err + }, + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.ValidArgs = []string{"bash", "zsh", "fish", "powershell"} + cmd.Args = cobra.MatchAll(cobra.ExactArgs(1), cobra.OnlyValidArgs) + + }, + } +} diff --git a/cli/tools/organizations.go b/cli/tools/organizations.go new file mode 100644 index 00000000..eeba60d1 --- /dev/null +++ b/cli/tools/organizations.go @@ -0,0 +1,236 @@ +package tools + +import ( + "context" + "errors" + "fmt" + "github.com/bep/simplecobra" + "github.com/esnet/gdg/cli/support" + "github.com/esnet/gdg/internal/config" + "github.com/jedib0t/go-pretty/v6/table" + "github.com/spf13/cobra" + "log" + "log/slog" + "strconv" +) + +func newOrgCommand() simplecobra.Commander { + return &support.SimpleCommand{ + NameP: "organizations", + Short: "Manage organizations", + Long: "Manage organizations", + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + return cd.CobraCommand.Help() + + }, + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.Aliases = []string{"org", "orgs"} + }, + CommandsList: []simplecobra.Commander{ + newSetOrgCmd(), + newGetUserOrgCmd(), + newGetTokenOrgCmd(), + //Users + newListUsers(), + newUpdateUserRoleCmd(), + newAddUserRoleCmd(), + newDeleteUserRoleCmd(), + }, + } + +} + +func newSetOrgCmd() simplecobra.Commander { + return &support.SimpleCommand{ + NameP: "set", + Short: "Set , 0 removes filter", + Long: "Set , 0 removes filter", + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + if len(args) < 1 { + return errors.New("requires an Org ID and name") + } + OrgId := args[0] + orgId, err := strconv.ParseInt(OrgId, 10, 64) + if err != nil { + log.Fatal("invalid Org ID, could not parse value to a numeric value") + } + err = rootCmd.GrafanaSvc().SetOrganization(orgId) + if err != nil { + log.Fatal("unable to set Org ID", "err", err) + } + slog.Info("Successfully set Org ID for context", "context", config.Config().GetGDGConfig().GetContext()) + return nil + + }, + } + +} + +func newGetUserOrgCmd() simplecobra.Commander { + description := "display org associated with user" + return &support.SimpleCommand{ + NameP: "userOrg", + Short: description, + Long: description, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + slog.Info("Listing organizations for context", "context", config.Config().GetGDGConfig().GetContext()) + rootCmd.TableObj.AppendHeader(table.Row{"id", "name"}) + org := rootCmd.GrafanaSvc().GetUserOrganization() + if org == nil { + slog.Info("No organizations found") + } else { + rootCmd.TableObj.AppendRow(table.Row{org.ID, org.Name}) + rootCmd.TableObj.Render() + } + return nil + + }, + } + +} + +func newGetTokenOrgCmd() simplecobra.Commander { + description := "display org associated with token" + return &support.SimpleCommand{ + NameP: "tokenOrg", + Short: description, + Long: description, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + + slog.Info("Display token organization for context'", "context", config.Config().GetGDGConfig().GetContext()) + rootCmd.TableObj.AppendHeader(table.Row{"id", "name"}) + org := rootCmd.GrafanaSvc().GetTokenOrganization() + if org == nil { + slog.Info("No tokens were found") + } else { + rootCmd.TableObj.AppendRow(table.Row{org.ID, org.Name}) + rootCmd.TableObj.Render() + } + return nil + }, + } + +} + +func newListUsers() simplecobra.Commander { + description := "list an Organization users" + return &support.SimpleCommand{ + NameP: "listUsers", + Short: description, + Long: description, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + if len(args) < 1 { + return errors.New("requires an orgId to be specified") + } + orgId, err := strconv.ParseInt(args[0], 10, 64) + if err != nil { + log.Fatal("unable to parse orgId to numeric value") + } + slog.Info("Listing org users for context", "context", config.Config().GetGDGConfig().GetContext()) + rootCmd.TableObj.AppendHeader(table.Row{"id", "login", "orgId", "name", "email", "role"}) + users := rootCmd.GrafanaSvc().ListOrgUsers(orgId) + if len(users) == 0 { + slog.Info("No users found") + } else { + for _, user := range users { + rootCmd.TableObj.AppendRow(table.Row{user.UserID, user.Login, user.OrgID, user.Name, user.Email, user.Role}) + } + rootCmd.TableObj.Render() + } + return nil + }, + } + +} + +func newUpdateUserRoleCmd() simplecobra.Commander { + description := "updateUserRole " + return &support.SimpleCommand{ + NameP: "updateUserRole", + Short: description, + Long: description, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + if len(args) < 3 { + return fmt.Errorf("requires the following parameters to be specified: [ ]\nValid roles are: [admin, editor, viewer]") + } + orgId, err := strconv.ParseInt(args[0], 10, 64) + if err != nil { + log.Fatal("unable to parse orgId to numeric value") + } + userId, err := strconv.ParseInt(args[1], 10, 64) + if err != nil { + log.Fatal("unable to parse userId to numeric value") + } + slog.Info("Listing org users for context", "context", config.Config().GetGDGConfig().GetContext()) + rootCmd.TableObj.AppendHeader(table.Row{"login", "orgId", "name", "email", "role"}) + err = rootCmd.GrafanaSvc().UpdateUserInOrg(args[2], userId, orgId) + if err != nil { + slog.Error("Unable to update Org user") + } else { + slog.Info("User has been updated") + } + return nil + }, + } +} + +func newAddUserRoleCmd() simplecobra.Commander { + description := "addUser " + return &support.SimpleCommand{ + NameP: "addUser", + Short: description, + Long: description, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + if len(args) < 3 { + return fmt.Errorf("requires the following parameters to be specified: [ ]\nValid roles are: [admin, editor, viewer]") + } + orgId, err := strconv.ParseInt(args[0], 10, 64) + if err != nil { + log.Fatal("unable to parse orgId to numeric value") + } + userId, err := strconv.ParseInt(args[1], 10, 64) + if err != nil { + log.Fatal("unable to parse userId to numeric value") + } + slog.Info("Add user to org for context", "context", config.Config().GetGDGConfig().GetContext()) + rootCmd.TableObj.AppendHeader(table.Row{"login", "orgId", "name", "email", "role"}) + err = rootCmd.GrafanaSvc().AddUserToOrg(args[2], userId, orgId) + if err != nil { + slog.Error("Unable to add user to Org") + } else { + slog.Info("User has been add to Org") + } + return nil + }, + } +} + +func newDeleteUserRoleCmd() simplecobra.Commander { + description := "deleteUser removes a user from the given Organization (This will NOT delete the actual user from Grafana)" + return &support.SimpleCommand{ + NameP: "deleteUser", + Short: description, + Long: description, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + if len(args) < 2 { + return fmt.Errorf("requires the following parameters to be specified: [ ]") + } + orgId, err := strconv.ParseInt(args[0], 10, 64) + if err != nil { + log.Fatal("unable to parse orgId to numeric value") + } + userId, err := strconv.ParseInt(args[1], 10, 64) + if err != nil { + log.Fatal("unable to parse userId to numeric value") + } + slog.Info("Update org for context", "context", config.Config().GetGDGConfig().GetContext()) + err = rootCmd.GrafanaSvc().DeleteUserFromOrg(userId, orgId) + if err != nil { + slog.Error("Unable to remove user from Org") + } else { + slog.Info("User has been removed from Org", "userId", args[0]) + } + return nil + }, + } +} diff --git a/cli/tools/tools.go b/cli/tools/tools.go new file mode 100644 index 00000000..112c688b --- /dev/null +++ b/cli/tools/tools.go @@ -0,0 +1,29 @@ +package tools + +import ( + "context" + "github.com/bep/simplecobra" + "github.com/esnet/gdg/cli/support" + "github.com/spf13/cobra" +) + +func NewToolsCommand() simplecobra.Commander { + description := "A collection of tools to manage a grafana instance" + return &support.SimpleCommand{ + NameP: "tools", + Short: description, + Long: description, + CommandsList: []simplecobra.Commander{newContextCmd(), newDevelCmd(), newUserCommand(), newAuthCmd(), newOrgCommand()}, + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.Aliases = []string{"t"} + }, + InitCFunc: func(cd *simplecobra.Commandeer, r *support.RootCommand) error { + support.InitConfiguration(cd.CobraCommand) + return nil + }, + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + return cd.CobraCommand.Help() + }, + } + +} diff --git a/cli/tools/users.go b/cli/tools/users.go new file mode 100644 index 00000000..1a5246ee --- /dev/null +++ b/cli/tools/users.go @@ -0,0 +1,60 @@ +package tools + +import ( + "context" + "github.com/bep/simplecobra" + "github.com/esnet/gdg/cli/support" + "github.com/esnet/gdg/internal/config" + "github.com/spf13/cobra" + "log/slog" +) + +func newUserCommand() simplecobra.Commander { + return &support.SimpleCommand{ + NameP: "users", + Short: "Manage users", + Long: "Provides some utility to manage grafana users from the CLI. Please note, as the credentials cannot be imported, the export with generate a default password for any user not already present", + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + return cd.CobraCommand.Help() + + }, + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.Aliases = []string{"u", "user"} + }, + InitCFunc: nil, + CommandsList: []simplecobra.Commander{newPromoteUserCmd()}, + } + +} + +func newPromoteUserCmd() simplecobra.Commander { + return &support.SimpleCommand{ + NameP: "makeGrafanaAdmin", + Short: "Promote User to Grafana Admin", + Long: "Promote User to Grafana Admin", + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, rootCmd *support.RootCommand, args []string) error { + slog.Info("Promoting User to Grafana Admin for context: '%s'", "context", config.Config().GetGDGConfig().GetContext()) + userLogin, _ := cd.CobraCommand.Flags().GetString("user") + + msg, err := rootCmd.GrafanaSvc().PromoteUser(userLogin) + if err != nil { + slog.Error(err.Error()) + } else { + slog.Info(msg) + slog.Info("Please note user is a grafana admin, not necessarily an Org admin. You may need to promote yourself manually per org") + } + return nil + + }, + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.Aliases = []string{"godmode", "promote"} + cmd.Flags().StringP("user", "u", "", "user email") + err := cmd.MarkFlagRequired("user") + if err != nil { + slog.Debug("Failed to mark user flag as required") + } + }, + InitCFunc: nil, + CommandsList: nil, + } +} diff --git a/cli/version.go b/cli/version.go new file mode 100644 index 00000000..f059ccda --- /dev/null +++ b/cli/version.go @@ -0,0 +1,30 @@ +package cli + +import ( + "context" + "fmt" + "github.com/bep/simplecobra" + "github.com/esnet/gdg/cli/support" + "github.com/esnet/gdg/internal/version" + "github.com/spf13/cobra" + "log/slog" +) + +func newVersionCmd() simplecobra.Commander { + return &support.SimpleCommand{ + NameP: "version", + RunFunc: func(ctx context.Context, cd *simplecobra.Commandeer, r *support.RootCommand, args []string) error { + slog.Info(fmt.Sprintf("Build Date: %s", version.BuildDate)) + slog.Info(fmt.Sprintf("Git Commit: %s", version.GitCommit)) + slog.Info(fmt.Sprintf("Version: %s", version.Version)) + slog.Info(fmt.Sprintf("Go Version: %s", version.GoVersion)) + slog.Info(fmt.Sprintf("OS / Arch: %s", version.OsArch)) + return nil + }, + WithCFunc: func(cmd *cobra.Command, r *support.RootCommand) { + cmd.Aliases = []string{"v"} + }, + Short: "Print the version number of generated code example", + Long: "All software has versions. This is generated code example", + } +} diff --git a/cmd/gdg/main.go b/cmd/gdg/main.go new file mode 100644 index 00000000..1f50e5d5 --- /dev/null +++ b/cmd/gdg/main.go @@ -0,0 +1,29 @@ +package main + +import ( + "github.com/esnet/gdg/cli" + "github.com/esnet/gdg/cli/support" + "log" + "os" + + api "github.com/esnet/gdg/internal/service" +) + +var ( + getGrafanaSvc = func() api.GrafanaService { + return api.NewApiService() + } +) + +func main() { + setGrafanaSvc := func() support.RootOption { + return func(response *support.RootCommand) { + response.GrafanaSvc = getGrafanaSvc + } + } + + err := cli.Execute("importer-example.yml", os.Args[1:], setGrafanaSvc()) + if err != nil { + log.Fatalf("Error: %s", err) + } +} diff --git a/cmd/gen/main.go b/cmd/gen/main.go new file mode 100644 index 00000000..143d9fa8 --- /dev/null +++ b/cmd/gen/main.go @@ -0,0 +1,72 @@ +package main + +import ( + "fmt" + assets "github.com/esnet/gdg/config" + "github.com/esnet/gdg/internal/config" + appconfig "github.com/esnet/gdg/internal/log" + "github.com/esnet/gdg/internal/templating" + "github.com/jedib0t/go-pretty/v6/table" + flag "github.com/spf13/pflag" + "gopkg.in/yaml.v3" + "log" + "log/slog" + "os" +) + +func main() { + //Using pflag over corba for now, as this should be a simple enough CLI tool + var cfgName = flag.StringP("config", "c", "importer.yml", "GDG Configuration file override.") + var tmpCfgName = flag.StringP("ct", "", "templates.yml", "GDG Template configuration file override.") + var showTemplateCfg = flag.BoolP("show-config", "", false, "Will display the current template configuration") + var listTemplates = flag.BoolP("list-templates", "", false, "List all current templates") + var templateName = flag.StringP("template", "t", "", "Specify template name, optional. Default is to operate on all configured templates that are found.") + flag.Parse() + defaultConfiguration, err := assets.GetFile("importer-example.yml") + if err != nil { + slog.Warn("unable to load default configuration, no fallback") + } + + config.InitConfig(*cfgName, defaultConfiguration) + config.InitTemplateConfig(*tmpCfgName) + cfg := config.Config() + appconfig.InitializeAppLogger(os.Stdout, os.Stderr, cfg.IsDebug()) + + if *showTemplateCfg { + data, err := yaml.Marshal(cfg.GetTemplateConfig()) + if err != nil { + log.Fatalf("unable to load template configuration: %v", err) + } + slog.Info(fmt.Sprintf("Configuration\n%s", string(data))) + return + } + slog.Info("Context is set to: ", slog.String("context", cfg.GetGDGConfig().ContextName)) + template := templating.NewTemplate() + + if *listTemplates { + templates := template.ListTemplates() + for ndx, t := range templates { + slog.Info(fmt.Sprintf("%d: %s", ndx+1, t)) + } + + return + } + + payload, err := template.Generate(*templateName) + if err != nil { + log.Fatal("Failed to generate templates", slog.Any("err", err)) + } + + tableObj := table.NewWriter() + tableObj.SetOutputMirror(os.Stdout) + tableObj.SetStyle(table.StyleLight) + + tableObj.AppendHeader(table.Row{"Template Name", "Output"}) + for key, val := range payload { + for _, file := range val { + tableObj.AppendRow(table.Row{key, file}) + } + } + + tableObj.Render() +} diff --git a/config/assets.go b/config/assets.go new file mode 100644 index 00000000..1b2c7b9b --- /dev/null +++ b/config/assets.go @@ -0,0 +1,18 @@ +package config + +import ( + "embed" + "log/slog" +) + +//go:embed * +var Assets embed.FS + +func GetFile(name string) (string, error) { + data, err := Assets.ReadFile(name) + if err != nil { + slog.Info("unable to find load default configuration", "err", err) + return "", err + } + return string(data), nil +} diff --git a/config/importer-example.yml b/config/importer-example.yml index 2303235d..eda705b2 100644 --- a/config/importer-example.yml +++ b/config/importer-example.yml @@ -40,22 +40,16 @@ contexts: regex: "misc" - field: "url" value: ".*esproxy2*" - auth: - user: user - password: password + secure_data: "default.json" - rules: - field: "url" regex: ".*esproxy2*" - auth: - user: admin - password: secret + secure_data: "default.json" - rules: # Default - field: "name" regex: ".*" - auth: - user: user - password: password + secure_data: "default.json" url: http://grafana:3000 user_name: admin password: admin @@ -72,20 +66,16 @@ contexts: - rules: - field: "name" regex: "(?i)complex name" - auth: - user: test - password: secret + secure_data: "default.json" # Default - rules: - field: "name" regex: ".*" - auth: - user: user - password: password + secure_data: "default.json" url: https://grafana.com user_name: admin password: admin - organization: your-org + organization_id: 1 filter_override: ignore_dashboard_filters: false # When set to true all Watched filtered folders will be ignored and ALL folders will be acted on watched: @@ -98,18 +88,14 @@ contexts: - rules: - field: "name" regex: "(?i)complex name" - auth: - user: test - password: secret + secure_data: "default.json" - rules: # Default - field: "name" regex: ".*" - auth: - user: user - password: password + secure_data: "default.json" url: https://staging.grafana.com - organization: your-org + organization_id: 1 filter_override: ignore_dashboard_filters: false # When set to true all Watched filtered folders will be ignored and ALL folders will be acted on watched: diff --git a/config/templates-example.yml b/config/templates-example.yml new file mode 100644 index 00000000..309bc988 --- /dev/null +++ b/config/templates-example.yml @@ -0,0 +1,24 @@ +entities: + dashboards: + - template_name: template_example + output: + - folder: "General" + org_id: 2 + dashboard_name: "Testing Foobar" + template_data: + Title: Bob Loves Candy + enabledlight: true + lightsources: + - sun + - moon + - lightbulb + - office lights + - folder: "Testing" + org_id: 3 + dashboard_name: "" + template_data: + Title: Uncle McDonalds + enabledlight: true + lightsources: + - sun + - moon \ No newline at end of file diff --git a/config/testing.yml b/config/testing.yml index ff700f7f..c61d38d1 100644 --- a/config/testing.yml +++ b/config/testing.yml @@ -11,15 +11,11 @@ contexts: regex: "misc" - field: "url" regex: ".*esproxy2*" - auth: - user: admin - password: secret + secure_data: "default.json" - rules: - field: "name" regex: ".*" - auth: - user: user - password: password + secure_data: "default.json" url: http://localhost:3000 user_name: admin password: admin @@ -34,16 +30,12 @@ contexts: - rules: - field: "name" regex: "(?i)complex name" - auth: - password: secret - user: test + secure_data: "default.json" - rules: - match: - field: "name" regex: ".*" - auth: - password: password - user: user + secure_data: "default.json" url: https://grafana.com user_name: admin @@ -53,29 +45,23 @@ contexts: watched: - General qa: - output_path: qa + output_path: test/data token: connections: credential_rules: - rules: - field: "name" regex: "(?i)complex name" - auth: - user: test - password: secret + secure_data: "complex.json" - rules: - field: "name" regex: "ds_name" - auth: - user: abcd - password: secret + secure_data: "default.json" # Default - rules: - field: "name" regex: ".*" - auth: - user: user - password: password + secure_data: "default.json" url: https://staging.grafana.com organization: your-org ignore_filters: False # When set to true all Watched filtered folders will be ignored and ALL folders will be acted on diff --git a/docker/Dockerfile b/docker/Dockerfile index 0da58c3b..8699da04 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -1,46 +1,9 @@ -# Build Stage -FROM golang:1.21.3 AS build-stage - -LABEL app="build-gdg" -LABEL REPO="https://github.com/esnet/gdg" - -ENV PROJPATH=/go/src/github.com/esnet/gdg - -# Because of https://github.com/docker/docker/issues/14914 -ENV PATH=$PATH:$GOROOT/bin:$GOPATH/bin - -ADD . /go/src/github.com/esnet/gdg -WORKDIR /go/src/github.com/esnet/gdg - -RUN make build-alpine - -# Final Stage -FROM golang:1.21.3 - -ARG GIT_COMMIT -ARG VERSION -LABEL REPO="https://github.com/esnet/gdg" -LABEL GIT_COMMIT=$GIT_COMMIT -LABEL VERSION=$VERSION - -# Because of https://github.com/docker/docker/issues/14914 -ENV PATH=$PATH:/opt/gdg/bin - -WORKDIR /opt/gdg/bin - -COPY --from=build-stage /go/src/github.com/esnet/gdg/bin/gdg /opt/gdg/bin/ -RUN \ - apt-get update && \ - apt install -y dumb-init && \ - apt-get clean autoclean && \ - apt-get autoremove --yes && \ - rm -rf /var/lib/{apt,dpkg,cache,log}/ && \ - chmod +x /opt/gdg/bin/gdg - -# Create appuser -RUN useradd -m gdg -USER gdg - -ENTRYPOINT ["/usr/bin/dumb-init", "--"] - -CMD ["/opt/gdg/bin/gdg"] +FROM alpine:latest +RUN mkdir /app +COPY gdg /app/gdg +COPY gdg-generate /app/gdg-generate +VOLUME /app/config +VOLUME /app/exports + +WORKDIR /app +ENTRYPOINT ["/app/gdg"] diff --git a/internal/api/README.md b/internal/api/README.md index 2420338c..f5a487c8 100644 --- a/internal/api/README.md +++ b/internal/api/README.md @@ -1,6 +1,4 @@ -Everything in here is intended implemented due to limitations or bugs found. +Everything in here is implemented due to limitations or bugs found in the OpenAPI Grafana API. -Currently, the /api/health is not documented via swagger therefore the extended API provides that functionality. - -The /api/users is documented incorrectly and mapped to the wrong response type, so for the time being relying on a -custom implementation to derive the same functionality. \ No newline at end of file +Any implementation in this package is temporary workaround till the spec/API is fixed or providing custom +data models to better organize results for GDGs use case. diff --git a/internal/api/health.go b/internal/api/health.go index 5dadc16f..5e712efa 100644 --- a/internal/api/health.go +++ b/internal/api/health.go @@ -16,7 +16,7 @@ type HealthResponse struct { func (extended *ExtendedApi) Health() (*HealthResponse, error) { health := &HealthResponse{} err := extended.getRequestBuilder(). - Path("/api/health"). + Path("api/health"). ToJSON(health). Method(http.MethodGet).Fetch(context.Background()) return health, err diff --git a/internal/config/config.go b/internal/config/config.go index cf8a9b9f..b2d50acb 100644 --- a/internal/config/config.go +++ b/internal/config/config.go @@ -6,22 +6,29 @@ import ( "fmt" "github.com/esnet/gdg/internal/tools" "github.com/thoas/go-funk" + "log/slog" "os" "path/filepath" "strings" - log "github.com/sirupsen/logrus" - "github.com/spf13/viper" "gopkg.in/yaml.v3" + "log" ) +func (s *Configuration) GetViperConfig(name string) *viper.Viper { + if s.viperConfiguration == nil { + return nil + } + return s.viperConfiguration[name] +} + func (s *Configuration) ClearContexts() { newContext := make(map[string]*GrafanaConfig) newContext["example"] = &GrafanaConfig{ APIToken: "dummy", } - appCfg := s.GetAppConfig() + appCfg := s.GetGDGConfig() appCfg.Contexts = newContext appCfg.ContextName = "example" err := s.SaveToDisk(false) @@ -29,15 +36,15 @@ func (s *Configuration) ClearContexts() { log.Fatal("Failed to make save changes") } - log.Info("All contexts were cleared") + slog.Info("All contexts were cleared") } // GetDefaultGrafanaConfig returns the default aka. selected grafana config func (s *Configuration) GetDefaultGrafanaConfig() *GrafanaConfig { - name := s.GetAppConfig().GetContext() + name := s.GetGDGConfig().GetContext() - val, ok := s.GetAppConfig().GetContexts()[name] + val, ok := s.GetGDGConfig().GetContexts()[name] if ok { return val } else { @@ -50,7 +57,7 @@ func (s *Configuration) GetDefaultGrafanaConfig() *GrafanaConfig { // CopyContext Makes a copy of the specified context and write to disk func (s *Configuration) CopyContext(src, dest string) { //Validate context - contexts := s.GetAppConfig().GetContexts() + contexts := s.GetGDGConfig().GetContexts() if len(contexts) == 0 { log.Fatal("Cannot set context. No valid configuration found in importer.yml") } @@ -64,24 +71,24 @@ func (s *Configuration) CopyContext(src, dest string) { } contexts[dest] = newCopy - s.GetAppConfig().ContextName = dest + s.GetGDGConfig().ContextName = dest err = s.SaveToDisk(false) if err != nil { log.Fatal("Failed to make save changes") } - log.Infof("Copied %s context to %s please check your config to confirm", src, dest) + slog.Info("Copied context to destination, please check your config to confirm", "sourceContext", src, "destinationContext", dest) } func (s *Configuration) PrintContext(name string) { name = strings.ToLower(name) - grafana, ok := s.GetAppConfig().GetContexts()[name] + grafana, ok := s.GetGDGConfig().GetContexts()[name] if !ok { - log.Errorf("context %s was not found", name) + slog.Error("context was not found", "context", name) return } d, err := yaml.Marshal(grafana) if err != nil { - log.WithError(err).Fatal("failed to serialize context") + log.Fatal("failed to serialize context", "err", err) } fmt.Printf("---%s:\n%s\n\n", name, string(d)) @@ -90,16 +97,16 @@ func (s *Configuration) PrintContext(name string) { // DeleteContext remove a given context func (s *Configuration) DeleteContext(name string) { name = strings.ToLower(name) //ensure name is lower case - contexts := s.GetAppConfig().GetContexts() + contexts := s.GetGDGConfig().GetContexts() _, ok := contexts[name] if !ok { - log.Infof("Context not found, cannot delete context named '%s'", name) + slog.Info("Context not found, cannot delete context", "context", name) return } delete(contexts, name) if len(contexts) != 0 { for key := range contexts { - s.GetAppConfig().ContextName = key + s.GetGDGConfig().ContextName = key break } } @@ -108,33 +115,33 @@ func (s *Configuration) DeleteContext(name string) { if err != nil { log.Fatal("Failed to make save changes") } - log.Infof("Delete %s context and set new context to %s", name, s.GetAppConfig().ContextName) + slog.Info("Deleted context and set new context to", "deletedContext", name, "newActiveContext", s.GetGDGConfig().ContextName) } -// ChangeContext +// ChangeContext changes active context func (s *Configuration) ChangeContext(name string) { name = strings.ToLower(name) - _, ok := s.GetAppConfig().GetContexts()[name] + _, ok := s.GetGDGConfig().GetContexts()[name] if !ok { log.Fatalf("context %s was not found", name) } - s.GetAppConfig().ContextName = name + s.GetGDGConfig().ContextName = name err := s.SaveToDisk(false) if err != nil { log.Fatal("Failed to make save changes") } - log.Infof("Change context to: '%s'", name) + slog.Info("Changed context", "context", name) } // SaveToDisk Persists current configuration to disk func (s *Configuration) SaveToDisk(useViper bool) error { if useViper { - return s.ViperConfig().WriteConfig() + return s.GetViperConfig(ViperGdgConfig).WriteConfig() } - file := s.ViperConfig().ConfigFileUsed() - data, err := yaml.Marshal(s.AppConfig) + file := s.GetViperConfig(ViperGdgConfig).ConfigFileUsed() + data, err := yaml.Marshal(s.gdgConfig) if err == nil { err = os.WriteFile(file, data, 0600) } @@ -142,16 +149,16 @@ func (s *Configuration) SaveToDisk(useViper bool) error { return err } -func (app *AppConfig) GetContext() string { +func (app *GDGAppConfiguration) GetContext() string { return strings.ToLower(app.ContextName) } // Temporary function -func (app *AppConfig) GetContextMap() map[string]interface{} { +func (app *GDGAppConfiguration) GetContextMap() map[string]interface{} { response := make(map[string]interface{}) data, err := json.Marshal(app.Contexts) if err != nil { - log.Errorf("could not serialize contexts") + slog.Error("could not serialize contexts") return response } err = json.Unmarshal(data, &response) @@ -164,13 +171,13 @@ func (app *AppConfig) GetContextMap() map[string]interface{} { } var ( - configData *Configuration + configData = new(Configuration) configSearchPaths = []string{"config", ".", "../config", "../../config", "/etc/gdg"} ) // GetCloudConfiguration Returns storage type and configuration func (s *Configuration) GetCloudConfiguration(configName string) (string, map[string]string) { - appData := s.AppConfig.StorageEngine[configName] + appData := s.GetGDGConfig().StorageEngine[configName] storageType := "local" if len(appData) != 0 { storageType = appData["kind"] @@ -178,36 +185,50 @@ func (s *Configuration) GetCloudConfiguration(configName string) (string, map[st return storageType, appData } -// ViperConfig returns the loaded configuration via a viper reference -func (s *Configuration) ViperConfig() *viper.Viper { - return s.defaultConfig -} - -func (app *AppConfig) GetContexts() map[string]*GrafanaConfig { +func (app *GDGAppConfiguration) GetContexts() map[string]*GrafanaConfig { return app.Contexts } // GetContexts returns map of all contexts func (s *Configuration) GetContexts() map[string]*GrafanaConfig { - return s.GetAppConfig().GetContexts() + return s.GetGDGConfig().GetContexts() } // IsDebug returns true if debug mode is enabled func (s *Configuration) IsDebug() bool { - return s.defaultConfig.GetBool("global.debug") + if val := s.GetViperConfig(ViperGdgConfig); val != nil { + return val.GetBool("global.debug") + } + return false } // IgnoreSSL returns true if SSL errors should be ignored func (s *Configuration) IgnoreSSL() bool { - return s.defaultConfig.GetBool("global.ignore_ssl_errors") + return s.GetViperConfig(ViperGdgConfig).GetBool("global.ignore_ssl_errors") } func Config() *Configuration { return configData } -func (s *Configuration) GetAppConfig() *AppConfig { - return s.AppConfig +// GetGDGConfig return instance of gdg app configuration +func (s *Configuration) GetGDGConfig() *GDGAppConfiguration { + return s.gdgConfig +} + +// GetTemplateConfig return instance of gdg app configuration +func (s *Configuration) GetTemplateConfig() *TemplatingConfig { + return s.templatingConfig +} + +func (s *TemplatingConfig) GetTemplate(name string) (*TemplateDashboards, bool) { + for ndx, t := range s.Entities.Dashboards { + if t.TemplateName == name { + return &s.Entities.Dashboards[ndx], true + } + } + + return nil, false } // setMapValueEnvOverride recursively iterate over the keys and updates the map value accordingly @@ -215,13 +236,13 @@ func setMapValueEnvOverride(keys []string, mapValue map[string]interface{}, valu if len(keys) > 1 { rawInnerObject, ok := mapValue[keys[0]] if !ok { - log.Warn("No Inner map exists, cannot set Env Override") + slog.Warn("No Inner map exists, cannot set Env Override") return } innerMap, ok := rawInnerObject.(map[string]interface{}) if !ok { - log.Warn("cannot traverse full map path. Unable to set ENV override. Returning ") + slog.Warn("cannot traverse full map path. Unable to set ENV override. Returning ") return } setMapValueEnvOverride(keys[1:], innerMap, value) @@ -248,28 +269,55 @@ func applyEnvOverrides(contexts map[string]interface{}, mapName string, config * return contexts } -func InitConfig(override, defaultConfig string) { - configData = &Configuration{} - appName := "importer" +// buildConfigSearchPath common pattern used when loading configuration for both CLI tools. +func buildConfigSearchPath(configFile string, appName *string) []string { var configDirs []string - if override != "" { - overrideDir := filepath.Dir(override) - if overrideDir != "" { - configDirs = append([]string{overrideDir}, configSearchPaths...) + if configFile != "" { + configFileDir := filepath.Dir(configFile) + if configFileDir != "" { + configDirs = append([]string{configFileDir}, configSearchPaths...) } - appName = filepath.Base(override) - appName = strings.TrimSuffix(appName, filepath.Ext(appName)) + *appName = filepath.Base(configFile) + *appName = strings.TrimSuffix(*appName, filepath.Ext(*appName)) } else { configDirs = append(configDirs, configSearchPaths...) } + + return configDirs +} + +func InitTemplateConfig(override string) { + if configData == nil { + log.Fatal("GDG configuration was not able to be loaded, cannot continue") + } + appName := "templates" + configDirs := buildConfigSearchPath(override, &appName) + configData.templatingConfig = new(TemplatingConfig) + + v, err := readViperConfig[TemplatingConfig](appName, configDirs, configData.templatingConfig) + if err != nil { + log.Fatal("unable to read templating configuration") + } + if configData.viperConfiguration == nil { + configData.viperConfiguration = make(map[string]*viper.Viper) + } + configData.viperConfiguration[ViperTemplateConfig] = v +} + +func InitConfig(override, defaultConfig string) { + configData = &Configuration{} + appName := "importer" + configDirs := buildConfigSearchPath(override, &appName) var err error + var v *viper.Viper + configData.gdgConfig = new(GDGAppConfiguration) - configData.defaultConfig, configData.AppConfig, err = readViperConfig(appName, configDirs) + v, err = readViperConfig[GDGAppConfiguration](appName, configDirs, configData.gdgConfig) var configFileNotFoundError viper.ConfigFileNotFoundError ok := errors.As(err, &configFileNotFoundError) if err != nil && ok { - log.Info("No configuration file has been found, creating a default configuration") + slog.Info("No configuration file has been found, creating a default configuration") err = os.MkdirAll("config", os.ModePerm) if err != nil { log.Fatal("unable to create configuration folder: 'config'") @@ -280,7 +328,7 @@ func InitConfig(override, defaultConfig string) { } appName = "importer" - configData.defaultConfig, configData.AppConfig, err = readViperConfig(appName, configDirs) + v, err = readViperConfig[GDGAppConfiguration](appName, configDirs, configData.gdgConfig) if err != nil { log.Panic(err) } @@ -288,16 +336,20 @@ func InitConfig(override, defaultConfig string) { } else if err != nil { // config is found but is invalid log.Fatal("Invalid configuration detected, please fix your configuration and try again.") } + if configData.viperConfiguration == nil { + configData.viperConfiguration = make(map[string]*viper.Viper, 0) + } + configData.viperConfiguration[ViperGdgConfig] = v //unmarshall struct - contexts := configData.defaultConfig.GetStringMap("contexts") - contexts = applyEnvOverrides(contexts, "contexts", configData.defaultConfig) + contexts := configData.GetViperConfig(ViperGdgConfig).GetStringMap("contexts") + contexts = applyEnvOverrides(contexts, "contexts", v) contextMaps, err := yaml.Marshal(contexts) if err != nil { log.Fatal("Failed to decode context map, please check your configuration") } - err = yaml.Unmarshal(contextMaps, &configData.AppConfig.Contexts) + err = yaml.Unmarshal(contextMaps, &configData.gdgConfig.Contexts) if err != nil { log.Fatal("No valid configuration file has been found") } @@ -305,8 +357,8 @@ func InitConfig(override, defaultConfig string) { } // readViperConfig utilizes the viper library to load the config from the selected paths -func readViperConfig(appName string, configDirs []string) (*viper.Viper, *AppConfig, error) { - app := &AppConfig{} +func readViperConfig[T any](appName string, configDirs []string, object *T) (*viper.Viper, error) { + v := viper.New() v.SetEnvPrefix("GDG") replacer := strings.NewReplacer(".", "__") @@ -321,8 +373,8 @@ func readViperConfig(appName string, configDirs []string) (*viper.Viper, *AppCon err := v.ReadInConfig() if err == nil { //Marshall the data read into a app struct - err = v.Unmarshal(app) + err = v.Unmarshal(object) } - return v, app, err + return v, err } diff --git a/internal/config/config_model.go b/internal/config/config_model.go index 8a76f40f..ee82a9db 100644 --- a/internal/config/config_model.go +++ b/internal/config/config_model.go @@ -4,9 +4,10 @@ import ( "encoding/json" "errors" "fmt" - "github.com/esnet/grafana-swagger-api-golang/goclient/models" - log "github.com/sirupsen/logrus" + "github.com/grafana/grafana-openapi-client-go/models" "github.com/tidwall/gjson" + "log" + "log/slog" "os" "path" "regexp" @@ -27,6 +28,8 @@ const ( OrganizationMetaResource = "org" TeamResource = "teams" UserResource = "users" + TemplatesResource = "templates" + SecureSecretsResource = "secure" ) var orgNamespacedResource = map[ResourceType]bool{ @@ -66,13 +69,13 @@ func (ds *ConnectionSettings) FiltersEnabled() bool { } // GetCredentials returns the credentials for the connection -func (ds *ConnectionSettings) GetCredentials(connectionEntity models.AddDataSourceCommand) (*GrafanaConnection, error) { +func (ds *ConnectionSettings) GetCredentials(connectionEntity models.AddDataSourceCommand, path string) (*GrafanaConnection, error) { data, err := json.Marshal(connectionEntity) if err != nil { - log.Warn("Unable to marshall Connection, unable to fetch credentials") + slog.Warn("Unable to marshall Connection, unable to fetch credentials") return nil, fmt.Errorf("unable to marshall Connection, unable to fetch credentials") } - //Get Auth based on New Matching Rules + //Get SecureData based on New Matching Rules parser := gjson.ParseBytes(data) for _, entry := range ds.MatchingRules { //Check Rules @@ -80,14 +83,14 @@ func (ds *ConnectionSettings) GetCredentials(connectionEntity models.AddDataSour for _, rule := range entry.Rules { fieldObject := parser.Get(rule.Field) if !fieldObject.Exists() { - log.Warnf("Unable to find a field titled: %s in datasource, skipping validation rule", rule.Field) + slog.Warn("Unable to find a matching field in datasource, skipping validation rule", "filedName", rule.Field) valid = false continue } fieldValue := fieldObject.String() p, err := regexp.Compile(rule.Regex) if err != nil { - log.Warnf("Unable to compile regex: %s to match against field %s, skipping validation", rule.Regex, rule.Field) + slog.Warn("Unable to compile regex to match against field, skipping validation", "regex", rule.Regex, "fieldName", rule.Field) valid = false } if !p.Match([]byte(fieldValue)) { @@ -96,7 +99,7 @@ func (ds *ConnectionSettings) GetCredentials(connectionEntity models.AddDataSour } } if valid { - return entry.Auth, nil + return entry.GetAuth(path) } } @@ -108,7 +111,7 @@ func (ds *ConnectionSettings) GetCredentials(connectionEntity models.AddDataSour func (ds *ConnectionSettings) IsExcluded(item interface{}) bool { data, err := json.Marshal(item) if err != nil { - log.Warn("Unable to serialize object, cannot validate") + slog.Warn("Unable to serialize object, cannot validate") return true } @@ -124,7 +127,7 @@ func (ds *ConnectionSettings) IsExcluded(item interface{}) bool { fieldValue := fieldParse.String() p, err := regexp.Compile(field.Regex) if err != nil { - log.Warnf("Invalid regex for filter rule with field: %s", field.Field) + slog.Warn("Invalid regex for filter rule", "field", field.Field) return true } match := p.Match([]byte(fieldValue)) @@ -152,10 +155,10 @@ func (s *GrafanaConfig) GetFilterOverrides() *FilterOverrides { // GetDataSourceSettings returns the datasource settings for the connection func (s *GrafanaConfig) GetDataSourceSettings() *ConnectionSettings { - if s.DataSourceSettings == nil { - s.DataSourceSettings = &ConnectionSettings{} + if s.ConnectionSettings == nil { + s.ConnectionSettings = &ConnectionSettings{} } - return s.DataSourceSettings + return s.ConnectionSettings } // GetPath returns the path of the resource type @@ -163,31 +166,6 @@ func (s *GrafanaConfig) GetPath(r ResourceType) string { return r.GetPath(s.OutputPath) } -// GetDashboardOutput returns the path of the dashboards output -func (s *GrafanaConfig) GetDashboardOutput() string { - return path.Join(s.OutputPath, DashboardResource) -} - -func (s *GrafanaConfig) GetDataSourceOutput() string { - return path.Join(s.OutputPath, ConnectionResource) -} - -func (s *GrafanaConfig) GetAlertNotificationOutput() string { - return path.Join(s.OutputPath, AlertNotificationResource) -} - -func (s *GrafanaConfig) GetUserOutput() string { - return path.Join(s.OutputPath, UserResource) -} - -func (s *GrafanaConfig) GetFolderOutput() string { - return path.Join(s.OutputPath, FolderResource) -} - -func (s *GrafanaConfig) GetTeamOutput() string { - return path.Join(s.OutputPath, TeamResource) -} - // GetOrgMonitoredFolders return the OrganizationMonitoredFolders that override a given Org func (s *GrafanaConfig) GetOrgMonitoredFolders(orgId int64) []string { for _, item := range s.MonitoredFoldersOverride { @@ -233,8 +211,8 @@ func (s *GrafanaConfig) IsAdminEnabled() bool { } // GetCredentials return credentials for a given datasource or falls back on default value -func (s *GrafanaConfig) GetCredentials(dataSourceName models.AddDataSourceCommand) (*GrafanaConnection, error) { - source, err := s.GetDataSourceSettings().GetCredentials(dataSourceName) +func (s *GrafanaConfig) GetCredentials(dataSourceName models.AddDataSourceCommand, location string) (*GrafanaConnection, error) { + source, err := s.GetDataSourceSettings().GetCredentials(dataSourceName, location) if err == nil { return source, nil } diff --git a/internal/config/config_new_ctx.go b/internal/config/config_new_ctx.go index 4cb5a9c8..358556f6 100644 --- a/internal/config/config_new_ctx.go +++ b/internal/config/config_new_ctx.go @@ -1,16 +1,20 @@ package config import ( - "github.com/AlecAivazis/survey/v2" - log "github.com/sirupsen/logrus" + "encoding/json" + "log" + "log/slog" + "os" + "path/filepath" "strings" + + "github.com/AlecAivazis/survey/v2" ) func (s *Configuration) NewContext(name string) { - name = strings.ToLower(name) // forces lowercase contexts answers := GrafanaConfig{ - DataSourceSettings: &ConnectionSettings{ + ConnectionSettings: &ConnectionSettings{ MatchingRules: make([]RegexMatchesList, 0), }, } @@ -20,8 +24,8 @@ func (s *Configuration) NewContext(name string) { DSUser string DSPassword string }{} - //Setup question that drive behavior - var behaviorQuestions = []*survey.Question{ + // Setup question that drive behavior + behaviorQuestions := []*survey.Question{ { Name: "AuthType", Prompt: &survey.Select{ @@ -30,10 +34,6 @@ func (s *Configuration) NewContext(name string) { Default: "basicauth", }, }, - { - Name: "Folders", - Prompt: &survey.Input{Message: "List the folders you wish to manage (example: folder1,folder2)? (Blank for General)?"}, - }, { Name: "DSUser", Prompt: &survey.Input{Message: "Please enter your datasource default username"}, @@ -42,48 +42,34 @@ func (s *Configuration) NewContext(name string) { Name: "DSPassword", Prompt: &survey.Password{Message: "Please enter your datasource default password"}, }, + { + Name: "Folders", + Prompt: &survey.Input{Message: "List the folders you wish to manage (example: folder1,folder2)? (Blank for General)?"}, + }, } err := survey.Ask(behaviorQuestions, &promptAnswers) if err != nil { log.Fatal("Failed to get valid answers to generate a new context") } - //Set Watched Folders + // Set Watched Folders foldersList := strings.Split(promptAnswers.Folders, ",") if len(foldersList) > 0 && foldersList[0] != "" { answers.MonitoredFolders = foldersList } else { answers.MonitoredFolders = []string{"General"} } - //Set Default Datasource - if promptAnswers.DSUser != "" && promptAnswers.DSPassword != "" { - ds := GrafanaConnection{ - User: promptAnswers.DSUser, - Password: promptAnswers.DSPassword, - } - answers.DataSourceSettings.MatchingRules = []RegexMatchesList{ - { - Rules: []MatchingRule{ - { - Field: "name", - Regex: ".*", - }, - }, - Auth: &ds, - }, - } - } - - //Setup grafana required field based on responses - var questions = []*survey.Question{ + // Setup grafana required field based on responses + questions := []*survey.Question{ { Name: "URL", Prompt: &survey.Input{Message: "What is the Grafana URL include http(s)?"}, }, { - Name: "OutputPath", - Prompt: &survey.Input{Message: "Destination Folder?"}, + Name: "OutputPath", + Prompt: &survey.Input{Message: "Destination Folder?"}, + Validate: survey.Required, }, } @@ -98,12 +84,12 @@ func (s *Configuration) NewContext(name string) { if promptAnswers.AuthType == "both" || promptAnswers.AuthType == "basicauth" { questions = append(questions, &survey.Question{ Name: "UserName", - Prompt: &survey.Input{Message: "Please enter your admin UserName"}, + Prompt: &survey.Input{Message: "Please enter your grafana admin Username"}, Validate: survey.Required, }) questions = append(questions, &survey.Question{ Name: "Password", - Prompt: &survey.Password{Message: "Please enter your admin Password"}, + Prompt: &survey.Password{Message: "Please enter your grafana admin Password"}, Validate: survey.Required, }) @@ -114,14 +100,48 @@ func (s *Configuration) NewContext(name string) { log.Fatal(err.Error()) } - contextMap := s.GetAppConfig().GetContexts() + // Set Default Datasource + if promptAnswers.DSUser != "" && promptAnswers.DSPassword != "" { + ds := GrafanaConnection{ + "user": promptAnswers.DSUser, + "basicAuthPassword": promptAnswers.DSPassword, + } + + location := filepath.Join(answers.OutputPath, SecureSecretsResource) + err = os.MkdirAll(location, 0750) + if err != nil { + log.Fatalf("unable to create default secret location. location: %s, %v", location, err) + } + data, err := json.MarshalIndent(&ds, "", " ") + if err != nil { + log.Fatalf("unable to turn map into json representation. location: %s, %v", location, err) + } + secretFileLocation := filepath.Join(location, "default.json") + err = os.WriteFile(secretFileLocation, data, 0600) + if err != nil { + log.Fatalf("unable to write secret default file. location: %s, %v", secretFileLocation, err) + } + answers.ConnectionSettings.MatchingRules = []RegexMatchesList{ + { + Rules: []MatchingRule{ + { + Field: "name", + Regex: ".*", + }, + }, + SecureData: "default.json", + }, + } + + } + + contextMap := s.GetGDGConfig().GetContexts() contextMap[name] = &answers - s.GetAppConfig().ContextName = name + s.GetGDGConfig().ContextName = name err = s.SaveToDisk(false) if err != nil { log.Fatal("could not save configuration.") } - log.Infof("New configuration %s has been created", name) - + slog.Info("New configuration has been created", "newContext", name) } diff --git a/internal/config/config_test.go b/internal/config/config_test.go index 10b98270..8cfc89c2 100644 --- a/internal/config/config_test.go +++ b/internal/config/config_test.go @@ -3,9 +3,9 @@ package config_test import ( "fmt" "github.com/esnet/gdg/internal/config" - "github.com/esnet/grafana-swagger-api-golang/goclient/models" - log "github.com/sirupsen/logrus" + "github.com/grafana/grafana-openapi-client-go/models" "golang.org/x/exp/slices" + "log/slog" "os" "strings" "testing" @@ -36,21 +36,24 @@ func DuplicateConfig(t *testing.T) string { } func TestSetup(t *testing.T) { - os.Setenv("GDG_CONTEXT_NAME", "qa") //clear all ENV values for _, key := range os.Environ() { if strings.Contains(key, "GDG_") { os.Unsetenv(key) } } + cwd, _ := os.Getwd() + if strings.Contains(cwd, "config") { + os.Chdir("../../") + } os.Setenv("GDG_CONTEXT_NAME", "qa") config.InitConfig("testing.yml", "") - conf := config.Config().ViperConfig() - log.Info(conf.ConfigFileUsed()) + conf := config.Config().GetViperConfig(config.ViperGdgConfig) + slog.Info(conf.ConfigFileUsed()) - confobj := config.Config().GetAppConfig() - log.Infof(confobj.ContextName) + confobj := config.Config().GetGDGConfig() + slog.Info(confobj.ContextName) assert.NotNil(t, conf) context := conf.GetString("context_name") assert.Equal(t, context, "qa") @@ -69,11 +72,11 @@ func TestWatchedFoldersConfig(t *testing.T) { os.Setenv("GDG_CONTEXT_NAME", "qa") config.InitConfig("testing.yml", "") - conf := config.Config().ViperConfig() - log.Info(conf.ConfigFileUsed()) + conf := config.Config().GetViperConfig(config.ViperGdgConfig) + slog.Info(conf.ConfigFileUsed()) - confobj := config.Config().GetAppConfig() - log.Infof(confobj.ContextName) + confobj := config.Config().GetGDGConfig() + slog.Info(confobj.ContextName) assert.NotNil(t, conf) context := conf.GetString("context_name") assert.Equal(t, context, "qa") @@ -101,7 +104,7 @@ func TestWatchedFoldersConfig(t *testing.T) { func TestSetupDifferentPath(t *testing.T) { cfgFile := DuplicateConfig(t) config.InitConfig(cfgFile, "") - conf := config.Config().ViperConfig() + conf := config.Config().GetViperConfig(config.ViperGdgConfig) assert.NotNil(t, conf) context := conf.GetString("context_name") assert.Equal(t, context, "production") @@ -114,7 +117,7 @@ func TestConfigEnv(t *testing.T) { os.Setenv("GDG_CONTEXT_NAME", "testing") os.Setenv("GDG_CONTEXTS__TESTING__URL", "www.google.com") config.InitConfig("testing.yml", "") - conf := config.Config().ViperConfig() + conf := config.Config().GetViperConfig(config.ViperGdgConfig) context := conf.GetString("context_name") assert.Equal(t, context, "testing") url := conf.GetString("contexts.testing.url") @@ -124,7 +127,7 @@ func TestConfigEnv(t *testing.T) { os.Setenv("GDG_CONTEXT_NAME", "production") os.Setenv("GDG_CONTEXTS__PRODUCTION__URL", "grafana.com") config.InitConfig("testing.yml", "") - conf = config.Config().ViperConfig() + conf = config.Config().GetViperConfig(config.ViperGdgConfig) url = conf.GetString("contexts.production.url") assert.Equal(t, url, "grafana.com") } @@ -137,18 +140,21 @@ func validateGrafanaQA(t *testing.T, grafana *config.GrafanaConfig) { folders := grafana.GetMonitoredFolders() assert.True(t, funk.Contains(folders, "Folder1")) assert.True(t, funk.Contains(folders, "Folder2")) - assert.Equal(t, "qa/connections", grafana.GetDataSourceOutput()) - assert.Equal(t, "qa/dashboards", grafana.GetDashboardOutput()) - dsSettings := grafana.DataSourceSettings + assert.Equal(t, "test/data/org_1/connections", grafana.GetPath(config.ConnectionResource)) + assert.Equal(t, "test/data/org_1/dashboards", grafana.GetPath(config.DashboardResource)) + dsSettings := grafana.ConnectionSettings request := models.AddDataSourceCommand{} - assert.Equal(t, len(grafana.DataSourceSettings.MatchingRules), 3) + assert.Equal(t, len(grafana.ConnectionSettings.MatchingRules), 3) //Last Entry is the default - defaultSettings := grafana.DataSourceSettings.MatchingRules[2].Auth - assert.Equal(t, "user", defaultSettings.User) - assert.Equal(t, "password", defaultSettings.Password) + secureLoc := grafana.GetPath(config.SecureSecretsResource) + defaultSettings, err := grafana.ConnectionSettings.MatchingRules[2].GetAuth(secureLoc) + assert.Nil(t, err) + assert.Equal(t, "user", defaultSettings.User()) + assert.Equal(t, "password", defaultSettings.Password()) request.Name = "Complex Name" - defaultSettings, _ = dsSettings.GetCredentials(request) - assert.Equal(t, "test", defaultSettings.User) - assert.Equal(t, "secret", defaultSettings.Password) + securePath := grafana.GetPath(config.SecureSecretsResource) + defaultSettings, _ = dsSettings.GetCredentials(request, securePath) + assert.Equal(t, "test", defaultSettings.User()) + assert.Equal(t, "secret", defaultSettings.Password()) } diff --git a/internal/config/types.go b/internal/config/types.go index 3853efd1..4af44256 100644 --- a/internal/config/types.go +++ b/internal/config/types.go @@ -1,12 +1,43 @@ package config import ( + "encoding/json" + "errors" "github.com/spf13/viper" + "log/slog" + "os" + "path/filepath" +) + +const ( + ViperGdgConfig = "gdg" + ViperTemplateConfig = "template" ) type Configuration struct { - defaultConfig *viper.Viper - AppConfig *AppConfig + viperConfiguration map[string]*viper.Viper + gdgConfig *GDGAppConfiguration + templatingConfig *TemplatingConfig +} + +type TemplatingConfig struct { + Entities TemplateEntities `mapstructure:"entities"` +} + +type TemplateEntities struct { + Dashboards []TemplateDashboards `mapstructure:"dashboards"` +} + +type TemplateDashboards struct { + TemplateName string `mapstructure:"template_name"` + DashboardEntities []TemplateDashboardEntity `mapstructure:"output"` +} + +type TemplateDashboardEntity struct { + Folder string `mapstructure:"folder"` + OrgId int64 `mapstructure:"org_id"` + DashboardName string `mapstructure:"dashboard_name"` + TemplateData map[string]interface{} `mapstructure:"template_data"` } // AppGlobals is the global configuration for the application @@ -15,8 +46,8 @@ type AppGlobals struct { IgnoreSSLErrors bool `mapstructure:"ignore_ssl_errors" yaml:"ignore_ssl_errors"` } -// AppConfig is the configuration for the application -type AppConfig struct { +// GDGAppConfiguration is the configuration for the application +type GDGAppConfiguration struct { ContextName string `mapstructure:"context_name" yaml:"context_name"` StorageEngine map[string]map[string]string `mapstructure:"storage_engine" yaml:"storage_engine"` Contexts map[string]*GrafanaConfig `mapstructure:"contexts" yaml:"contexts"` @@ -35,7 +66,7 @@ type GrafanaConfig struct { OrganizationId int64 `mapstructure:"organization_id" yaml:"organization_id"` MonitoredFoldersOverride []MonitoredOrgFolders `mapstructure:"watched_folders_override" yaml:"watched_folders_override"` MonitoredFolders []string `mapstructure:"watched" yaml:"watched"` - DataSourceSettings *ConnectionSettings `mapstructure:"connections" yaml:"connections"` + ConnectionSettings *ConnectionSettings `mapstructure:"connections" yaml:"connections"` //Datasources are deprecated, please use Connections LegacyConnectionSettings map[string]interface{} `mapstructure:"datasources" yaml:"datasources"` FilterOverrides *FilterOverrides `mapstructure:"filter_override" yaml:"filter_override"` @@ -77,8 +108,34 @@ type ConnectionSettings struct { // RegexMatchesList model wraps regex matches list for grafana type RegexMatchesList struct { - Rules []MatchingRule `mapstructure:"rules" yaml:"rules,omitempty"` - Auth *GrafanaConnection `mapstructure:"auth" yaml:"auth,omitempty"` + Rules []MatchingRule `mapstructure:"rules" yaml:"rules,omitempty"` + SecureData string `mapstructure:"secure_data" yaml:"secure_data,omitempty"` + LegacyAuth *GrafanaConnection `mapstructure:"auth" yaml:"auth,omitempty" json:"auth,omitempty"` +} + +func (r RegexMatchesList) GetAuth(path string) (*GrafanaConnection, error) { + if r.LegacyAuth != nil && len(*r.LegacyAuth) > 0 { + slog.Warn("the 'auth' key is deprecated, please update to use 'secure_data'") + } + if r.SecureData == "" { + return r.LegacyAuth, nil + } + secretLocation := filepath.Join(path, r.SecureData) + result := new(GrafanaConnection) + raw, err := os.ReadFile(secretLocation) + if err != nil { + msg := "unable to read secrets at location" + slog.Error(msg, slog.String("file", secretLocation)) + return nil, errors.New(msg) + } + err = json.Unmarshal(raw, result) + if err != nil { + msg := "unable to read JSON secrets" + slog.Error(msg, slog.Any("err", err), slog.String("file", secretLocation)) + return nil, errors.New(msg) + } + + return result, nil } // CredentialRule model wraps regex and auth for grafana @@ -107,7 +164,12 @@ type ConnectionFilters struct { } // GrafanaConnection Default connection credentials -type GrafanaConnection struct { - User string `yaml:"user"` - Password string `yaml:"password"` +type GrafanaConnection map[string]string + +func (g GrafanaConnection) User() string { + return g["user"] +} + +func (g GrafanaConnection) Password() string { + return g["basicAuthPassword"] } diff --git a/internal/log/log.go b/internal/log/log.go index 4fb16626..23c4a378 100644 --- a/internal/log/log.go +++ b/internal/log/log.go @@ -1,14 +1,35 @@ package log import ( - log "github.com/sirupsen/logrus" - "io" + "github.com/lmittmann/tint" + "github.com/mattn/go-isatty" + "log" + "log/slog" + "os" + "time" ) // InitializeAppLogger initialize logger, invoked from main -func InitializeAppLogger() { - log.SetOutput(io.Discard) - log.AddHook(&StdOutLoggingHook{&log.TextFormatter{ForceColors: true}}) - log.AddHook(&StdErrLoggingHook{&log.TextFormatter{ForceColors: true}}) +func InitializeAppLogger(stdout *os.File, stderr *os.File, debug bool) { + errStream := stderr + outStream := stdout + level := slog.LevelInfo + showSource := false + if debug { + level = slog.LevelDebug + showSource = true + } + + opts := &tint.Options{ + Level: level, + TimeFormat: time.DateTime, + AddSource: showSource, + NoColor: !isatty.IsTerminal(outStream.Fd())} + + //Splits the logging between stdout/stderr as appropriate + myHandler := NewContextHandler(slog.Default().Handler(), outStream, errStream, opts) + customSplitStreamLogger := slog.New(myHandler) + slog.SetDefault(customSplitStreamLogger) + log.SetOutput(os.Stderr) } diff --git a/internal/log/slog_handler.go b/internal/log/slog_handler.go new file mode 100644 index 00000000..07891a85 --- /dev/null +++ b/internal/log/slog_handler.go @@ -0,0 +1,61 @@ +package log + +import ( + "context" + "github.com/lmittmann/tint" + "log/slog" + + "os" +) + +type ContextHandler struct { + handler slog.Handler + errorHandler slog.Handler + outStream *os.File + errStream *os.File + options *tint.Options +} + +func NewContextHandler(h slog.Handler, out *os.File, err *os.File, opts *tint.Options) *ContextHandler { + ch := &ContextHandler{outStream: out, errStream: err, options: opts} + if lh, ok := h.(*ContextHandler); ok { + if lh.outStream == out && lh.errStream == err { + return lh + } + } + + errOpts := *opts + errOpts.Level = slog.LevelWarn + // create a error logger + // set global logger with custom options + errorHandler := tint.NewHandler(err, &errOpts) + ch.errorHandler = errorHandler + + outHandler := tint.NewHandler(out, opts) + ch.handler = outHandler + return ch +} + +func (h *ContextHandler) Enabled(ctx context.Context, level slog.Level) bool { + if level >= slog.LevelWarn { + return h.errorHandler.Enabled(ctx, level) + } + + return h.handler.Enabled(ctx, level) +} + +func (h *ContextHandler) Handle(ctx context.Context, r slog.Record) error { + + if r.Level >= slog.LevelWarn { + return h.errorHandler.Handle(ctx, r) + } + return h.handler.Handle(ctx, r) +} + +func (h *ContextHandler) WithAttrs(attrs []slog.Attr) slog.Handler { + return NewContextHandler(h.handler.WithAttrs(attrs), h.outStream, h.errStream, nil) +} + +func (h *ContextHandler) WithGroup(name string) slog.Handler { + return NewContextHandler(h.handler.WithGroup(name), h.outStream, h.errStream, nil) +} diff --git a/internal/service/alertnotifications.go b/internal/service/alertnotifications.go index 927c5933..b3f31b4f 100644 --- a/internal/service/alertnotifications.go +++ b/internal/service/alertnotifications.go @@ -3,12 +3,12 @@ package service import ( "encoding/json" "github.com/esnet/gdg/internal/config" - "github.com/esnet/grafana-swagger-api-golang/goclient/client/legacy_alerts_notification_channels" - "github.com/esnet/grafana-swagger-api-golang/goclient/models" + "github.com/grafana/grafana-openapi-client-go/models" + "log/slog" "strings" "github.com/gosimple/slug" - log "github.com/sirupsen/logrus" + "log" ) // AlertNotificationsApi Contract definition @@ -23,8 +23,7 @@ type AlertNotificationsApi interface { //ListAlertNotifications: list all currently configured notification channels func (s *DashNGoImpl) ListAlertNotifications() []*models.AlertNotification { - params := legacy_alerts_notification_channels.NewGetAlertNotificationChannelsParams() - channels, err := s.client.LegacyAlertsNotificationChannels.GetAlertNotificationChannels(params, s.getAuth()) + channels, err := s.GetClient().LegacyAlertsNotificationChannels.GetAlertNotificationChannels() if err != nil { log.Panic(err) } @@ -42,12 +41,12 @@ func (s *DashNGoImpl) DownloadAlertNotifications() []string { alertnotifications = s.ListAlertNotifications() for _, an := range alertnotifications { if anPacked, err = json.Marshal(an); err != nil { - log.Errorf("error marshalling %s to json with %s", an.Name, err) + slog.Error("error marshalling to json", "filename", an.Name, "err", err.Error()) continue } anPath := buildResourcePath(slug.Make(an.Name), config.AlertNotificationResource) if err = s.storage.WriteFile(anPath, anPacked); err != nil { - log.Errorf("error writing %s to file with %s", slug.Make(an.Name), err) + slog.Error("error writing to file", "filename", slug.Make(an.Name), "err", err.Error()) } else { dataFiles = append(dataFiles, anPath) } @@ -57,14 +56,12 @@ func (s *DashNGoImpl) DownloadAlertNotifications() []string { // Removes all current alert notification channels func (s *DashNGoImpl) DeleteAllAlertNotifications() []string { - var an []string = make([]string, 0) + var an = make([]string, 0) items := s.ListAlertNotifications() for _, item := range items { - params := legacy_alerts_notification_channels.NewDeleteAlertNotificationChannelParams() - params.NotificationChannelID = item.ID - _, err := s.client.LegacyAlertsNotificationChannels.DeleteAlertNotificationChannel(params, s.getAuth()) + _, err := s.GetClient().LegacyAlertsNotificationChannels.DeleteAlertNotificationChannel(item.ID) if err != nil { - log.Error("Failed to delete notification") + slog.Error("Failed to delete notification") continue } an = append(an, item.Name) @@ -86,7 +83,7 @@ func (s *DashNGoImpl) UploadAlertNotifications() []string { dirPath := config.Config().GetDefaultGrafanaConfig().GetPath(config.AlertNotificationResource) filesInDir, err = s.storage.FindAllFiles(dirPath, true) if err != nil { - log.WithError(err).Fatalf("Unable to find Alert data in Storage System %s", s.storage.Name()) + log.Fatalf("Unable to find Alert data in Storage System %s, err: %s", s.storage.Name(), err.Error()) } alertnotifications = s.ListAlertNotifications() @@ -94,31 +91,27 @@ func (s *DashNGoImpl) UploadAlertNotifications() []string { for _, file := range filesInDir { if strings.HasSuffix(file, ".json") { if raw, err = s.storage.ReadFile(file); err != nil { - log.Errorf("error reading file %s with %s", file, err) + slog.Error("error reading file", "file", file, "err", err) continue } var newAlertNotification models.CreateAlertNotificationCommand if err = json.Unmarshal(raw, &newAlertNotification); err != nil { - log.Errorf("error unmarshalling json with %s", err) + slog.Error("error unmarshalling json", "err", err) continue } for _, existing := range alertnotifications { if existing.Name == newAlertNotification.Name { - dp := legacy_alerts_notification_channels.NewDeleteAlertNotificationChannelByUIDParams() - dp.NotificationChannelUID = existing.UID - if _, err := s.client.LegacyAlertsNotificationChannels.DeleteAlertNotificationChannelByUID(dp, s.getAuth()); err != nil { - log.Errorf("error on deleting datasource %s with %s", newAlertNotification.Name, err) + if _, err := s.GetClient().LegacyAlertsNotificationChannels.DeleteAlertNotificationChannelByUID(existing.UID); err != nil { + slog.Error("error on deleting datasource", "datasource", newAlertNotification.Name, "err", err) } break } } - params := legacy_alerts_notification_channels.NewCreateAlertNotificationChannelParams() - params.Body = &newAlertNotification - if _, err = s.client.LegacyAlertsNotificationChannels.CreateAlertNotificationChannel(params, s.getAuth()); err != nil { - log.Errorf("error on importing datasource %s with %s", newAlertNotification.Name, err) + if _, err = s.GetClient().LegacyAlertsNotificationChannels.CreateAlertNotificationChannel(&newAlertNotification); err != nil { + slog.Error("error on importing datasource", "datasource", newAlertNotification.Name, "err", err) continue } exported = append(exported, file) diff --git a/internal/service/common.go b/internal/service/common.go index e505a14a..dc0071fd 100644 --- a/internal/service/common.go +++ b/internal/service/common.go @@ -4,8 +4,9 @@ import ( "errors" "fmt" "github.com/esnet/gdg/internal/config" + "github.com/esnet/gdg/internal/tools" "github.com/gosimple/slug" - log "github.com/sirupsen/logrus" + "log/slog" "os" "path/filepath" "strings" @@ -32,14 +33,6 @@ func updateSlug(board string) string { return "" } -// CreateDestinationPath Handle osMkdir Errors -func CreateDestinationPath(v string) { - err := os.MkdirAll(v, 0750) - if err != nil { - log.WithError(err).Panicf("unable to create path %s", v) - } -} - // getFolderFromResourcePath if a use encodes a path separator in path, we can't determine the folder name. This strips away // all the known components of a resource type leaving only the folder name. func getFolderFromResourcePath(storageEngine string, filePath string, resourceType config.ResourceType) (string, error) { @@ -56,13 +49,13 @@ func getFolderFromResourcePath(storageEngine string, filePath string, resourceTy ndx := strings.LastIndex(folderName, string(os.PathSeparator)) if ndx != -1 { folderName = folderName[0:ndx] - log.Debugf("Folder name is: %s", folderName) + slog.Debug("Folder name is", "folder", folderName) return folderName, nil } return "", errors.New("unable to parse resource to retrieve folder name") } -func buildResourceFolder(folderName string, resourceType config.ResourceType) string { +func BuildResourceFolder(folderName string, resourceType config.ResourceType) string { if resourceType == config.DashboardResource && folderName == "" { folderName = DefaultFolderName } @@ -72,13 +65,13 @@ func buildResourceFolder(folderName string, resourceType config.ResourceType) st folderName = strings.ReplaceAll(folderName, strSeperator, fmt.Sprintf("//%s", strSeperator)) } v := fmt.Sprintf("%s/%s", config.Config().GetDefaultGrafanaConfig().GetPath(resourceType), folderName) - CreateDestinationPath(v) + tools.CreateDestinationPath(v) return v } func buildResourcePath(folderName string, resourceType config.ResourceType) string { v := fmt.Sprintf("%s/%s.json", config.Config().GetDefaultGrafanaConfig().GetPath(resourceType), folderName) - CreateDestinationPath(filepath.Dir(v)) + tools.CreateDestinationPath(filepath.Dir(v)) return v } diff --git a/internal/service/common_test.go b/internal/service/common_test.go index 514df338..ecae0cb3 100644 --- a/internal/service/common_test.go +++ b/internal/service/common_test.go @@ -5,9 +5,27 @@ import ( "github.com/gosimple/slug" "github.com/stretchr/testify/assert" "os" + "strings" "testing" ) +func TestRelativePathLogin(t *testing.T) { + cwd, err := os.Getwd() + assert.Nil(t, err) + if strings.Contains(cwd, "service") { + os.Chdir("../..") + } + os.Setenv("GDG_CONTEXTS__TESTING__URL", "http://localhost:3000/grafana/") + config.InitConfig("config/testing.yml", "'") + defer os.Unsetenv("GDG_CONTEXTS__TESTING__URL") + + svc := NewApiService("dummy") + _, cfg := svc.(*DashNGoImpl).getNewClient() + assert.Equal(t, cfg.Host, "localhost:3000") + assert.Equal(t, cfg.BasePath, "/grafana/api") + +} + // Validates the paths for the various entity types using the common // code used to create folders and generate paths. func TestSlug(t *testing.T) { @@ -22,26 +40,26 @@ func TestUserPath(t *testing.T) { err := os.Setenv("GDG_CONTEXT_NAME", "qa") assert.Nil(t, err) config.InitConfig("testing.yml", "'") - path := buildResourceFolder("", config.UserResource) - assert.Equal(t, "qa/users/", path) + path := BuildResourceFolder("", config.UserResource) + assert.Equal(t, "test/data/users/", path) } func TestBuildDashboardPath(t *testing.T) { - result := buildResourceFolder("General", config.DashboardResource) - assert.Equal(t, "qa/org_1/dashboards/General", result) + result := BuildResourceFolder("General", config.DashboardResource) + assert.Equal(t, "test/data/org_1/dashboards/General", result) } func TestBuildFolderSourcePath(t *testing.T) { result := buildResourcePath(slug.Make("Some Folder"), config.FolderResource) - assert.Equal(t, "qa/org_1/folders/some-folder.json", result) + assert.Equal(t, "test/data/org_1/folders/some-folder.json", result) } func TestBuildDataSourcePath(t *testing.T) { result := buildResourcePath(slug.Make("My DS"), config.ConnectionResource) - assert.Equal(t, "qa/org_1/connections/my-ds.json", result) + assert.Equal(t, "test/data/org_1/connections/my-ds.json", result) } func TestBuildAlertNotificationPath(t *testing.T) { result := buildResourcePath("SomeNotification", config.AlertNotificationResource) - assert.Equal(t, "qa/org_1/alertnotifications/SomeNotification.json", result) + assert.Equal(t, "test/data/org_1/alertnotifications/SomeNotification.json", result) } diff --git a/internal/service/connection_permissions.go b/internal/service/connection_permissions.go index 84732ec7..129b44bb 100644 --- a/internal/service/connection_permissions.go +++ b/internal/service/connection_permissions.go @@ -3,19 +3,21 @@ package service import ( "encoding/json" "fmt" + "log" + "log/slog" + "path/filepath" + "strings" + "github.com/esnet/gdg/internal/config" "github.com/esnet/gdg/internal/service/filters" "github.com/esnet/gdg/internal/tools" - "github.com/esnet/grafana-swagger-api-golang/goclient/client/datasource_permissions" - "github.com/esnet/grafana-swagger-api-golang/goclient/models" "github.com/gosimple/slug" - log "github.com/sirupsen/logrus" - "path/filepath" - "strings" + "github.com/grafana/grafana-openapi-client-go/client/datasource_permissions" + "github.com/grafana/grafana-openapi-client-go/models" ) type ConnectionPermissions interface { - //Permissions Enterprise only + // Permissions Enterprise only ListConnectionPermissions(filter filters.Filter) map[*models.DataSourceListItemDTO]*models.DataSourcePermissionsDTO DownloadConnectionPermissions(filter filters.Filter) []string UploadConnectionPermissions(filter filters.Filter) []string @@ -32,7 +34,7 @@ func (s *DashNGoImpl) ListConnectionPermissions(filter filters.Filter) map[*mode for ndx, connection := range connections { permission, err := s.getConnectionPermission(connection.ID) if err != nil { - log.Errorf("unable to retrieve connection permissions for ID: %d", connection.ID) + slog.Error("unable to retrieve connection permissions for ID", "id", connection.ID) continue } result[&connections[ndx]] = permission.GetPayload() @@ -44,7 +46,7 @@ func (s *DashNGoImpl) ListConnectionPermissions(filter filters.Filter) map[*mode // DownloadConnectionPermissions download permissions to local file system func (s *DashNGoImpl) DownloadConnectionPermissions(filter filters.Filter) []string { - log.Infof("Downloading connection permissions") + slog.Info("Downloading connection permissions") var ( dsPacked []byte err error @@ -53,12 +55,12 @@ func (s *DashNGoImpl) DownloadConnectionPermissions(filter filters.Filter) []str currentPermissions := s.ListConnectionPermissions(filter) for connection, permission := range currentPermissions { if dsPacked, err = json.MarshalIndent(permission, "", " "); err != nil { - log.Errorf("unable to marshall json %s for %s Permissions\n", err, connection.Name) + slog.Error("unable to marshall json ", "err", err.Error(), "connectionName", connection.Name) continue } dsPath := buildResourcePath(slug.Make(connection.Name), config.ConnectionPermissionResource) if err = s.storage.WriteFile(dsPath, dsPacked); err != nil { - log.Errorf("%s for %s\n", err.Error(), slug.Make(connection.Name)) + slog.Error("unable to write file. ", "filename", slug.Make(connection.Name), "error", err.Error()) } else { dataFiles = append(dataFiles, dsPath) } @@ -78,17 +80,17 @@ func (s *DashNGoImpl) UploadConnectionPermissions(filter filters.Filter) []strin filesInDir, err := s.storage.FindAllFiles(config.Config().GetDefaultGrafanaConfig().GetPath(config.ConnectionPermissionResource), false) if err != nil { - log.WithError(err).Fatal("Failed to read folders permission imports") + log.Fatalf("Failed to read folders permission imports: %s", err.Error()) } for _, file := range filesInDir { fileLocation := filepath.Join(config.Config().GetDefaultGrafanaConfig().GetPath(config.ConnectionPermissionResource), file) if !filter.ValidateAll(map[filters.FilterType]string{filters.Name: strings.ReplaceAll(file, ".json", "")}) { - log.Debugf("File does not match pattern, skipping %s", file) + slog.Debug("File does not match pattern, skipping file", "filename", file) continue } if strings.HasSuffix(file, ".json") { if rawFolder, err = s.storage.ReadFile(fileLocation); err != nil { - log.WithError(err).Errorf("failed to read file %s", fileLocation) + slog.Error("failed to read file %s", "filename", fileLocation, "err", err) continue } } @@ -96,24 +98,24 @@ func (s *DashNGoImpl) UploadConnectionPermissions(filter filters.Filter) []strin newEntries := new(models.DataSourcePermissionsDTO) err = json.Unmarshal(rawFolder, &newEntries) if err != nil { - log.Warnf("Failed to Decode payload for %s", fileLocation) + slog.Warn("Failed to Decode payload for file", "filename", fileLocation) continue } - //Get current permissions + // Get current permissions permissions, err := s.getConnectionPermission(newEntries.DatasourceID) if err != nil { - log.Errorf("connection permission could not be retrieved, cannot update permissions") + slog.Error("connection permission could not be retrieved, cannot update permissions") continue } success := true - //Delete datasource Permissions + // Delete datasource Permissions for _, p := range permissions.GetPayload().Permissions { success = s.deleteConnectionPermission(p.ID, newEntries.DatasourceID) } if !success { - log.Errorf("Failed to delete previous data, cannot update permissions") + slog.Error("Failed to delete previous data, cannot update permissions") continue } @@ -126,16 +128,16 @@ func (s *DashNGoImpl) UploadConnectionPermissions(filter filters.Filter) []strin if entry.BuiltInRole != "" { p.SetBuiltinRole(tools.PtrOf(entry.BuiltInRole)) } - err = s.extended.AddConnectionPermission(p) + _, err = s.GetClient().DatasourcePermissions.AddPermission(p) if err != nil { - log.Errorf("Failed to update folder permissions") + slog.Error("Failed to update folder permissions") } else { dataFiles = append(dataFiles, fileLocation) } } } - log.Infof("Removing all previous permissions and re-applying") + slog.Info("Removing all previous permissions and re-applying") return dataFiles } @@ -161,22 +163,17 @@ func (s *DashNGoImpl) DeleteAllConnectionPermissions(filter filters.Filter) []st // deleteConnectionPermission delete a given permission associated with a given datasourceId func (s *DashNGoImpl) deleteConnectionPermission(permissionId int64, datasourceId int64) bool { - deleteMe := datasource_permissions.NewDeletePermissionsParams() - deleteMe.PermissionID = fmt.Sprintf("%d", permissionId) - deleteMe.DatasourceID = fmt.Sprintf("%d", datasourceId) - resp, err := s.client.DatasourcePermissions.DeletePermissions(deleteMe, s.getAuth()) + permissionIdStr := fmt.Sprintf("%d", permissionId) + connectionId := fmt.Sprintf("%d", datasourceId) + resp, err := s.GetClient().DatasourcePermissions.DeletePermissions(permissionIdStr, connectionId) if err != nil { return false } - log.Debugf("%d permission has been removed associated with datasource %d: %s", permissionId, datasourceId, resp.GetPayload().Message) - + slog.Debug("permission has been removed associated with datasource %d: %s", "permissionId", permissionId, "datasourceId", datasourceId, "response", resp.GetPayload().Message) return true } // getConnectionPermission Get all permissions for a given connection func (s *DashNGoImpl) getConnectionPermission(id int64) (*datasource_permissions.GetAllPermissionsOK, error) { - p := datasource_permissions.NewGetAllPermissionsParams() - //p.DatasourceID = fmt.Sprintf("%d", connection.ID) - p.DatasourceID = fmt.Sprintf("%d", id) - return s.client.DatasourcePermissions.GetAllPermissions(p, s.getAuth()) + return s.GetClient().DatasourcePermissions.GetAllPermissions(fmt.Sprintf("%d", id)) } diff --git a/internal/service/connections.go b/internal/service/connections.go index 463bbc19..29e333f4 100644 --- a/internal/service/connections.go +++ b/internal/service/connections.go @@ -5,13 +5,13 @@ import ( "fmt" "github.com/esnet/gdg/internal/config" "github.com/esnet/gdg/internal/service/filters" - "github.com/esnet/grafana-swagger-api-golang/goclient/client/datasources" - "github.com/esnet/grafana-swagger-api-golang/goclient/models" + "github.com/grafana/grafana-openapi-client-go/models" + "log/slog" "path/filepath" "strings" "github.com/gosimple/slug" - log "github.com/sirupsen/logrus" + "log" ) // ConnectionsApi Contract definition @@ -48,7 +48,7 @@ func (s *DashNGoImpl) ListConnections(filter filters.Filter) []models.DataSource log.Fatalf("Failed to switch organization ID %d: ", s.grafanaConf.OrganizationId) } - ds, err := s.client.Datasources.GetDataSources(datasources.NewGetDataSourcesParams(), s.getAuth()) + ds, err := s.GetClient().Datasources.GetDataSources() if err != nil { panic(err) } @@ -57,7 +57,7 @@ func (s *DashNGoImpl) ListConnections(filter filters.Filter) []models.DataSource dsSettings := s.grafanaConf.GetDataSourceSettings() for _, item := range ds.GetPayload() { if dsSettings.FiltersEnabled() && dsSettings.IsExcluded(item) { - log.Debugf("Skipping data source: %s since it fails filter checks with dataType of: %s", item.Name, item.Type) + slog.Debug("Skipping data source, since it fails datatype filter checks", "datasource", item.Name, "datatype", item.Type) continue } if filter.ValidateAll(map[filters.FilterType]string{filters.Name: GetSlug(item.Name)}) { @@ -80,14 +80,14 @@ func (s *DashNGoImpl) DownloadConnections(filter filters.Filter) []string { dsListing = s.ListConnections(filter) for _, ds := range dsListing { if dsPacked, err = json.MarshalIndent(ds, "", " "); err != nil { - log.Errorf("%s for %s\n", err, ds.Name) + slog.Error("unable to marshall file", "datasource", ds.Name, "err", err) continue } dsPath := buildResourcePath(slug.Make(ds.Name), config.ConnectionResource) if err = s.storage.WriteFile(dsPath, dsPacked); err != nil { - log.Errorf("%s for %s\n", err, slug.Make(ds.Name)) + slog.Error("Unable to write file", "filename", slug.Make(ds.Name), "err", err) } else { dataFiles = append(dataFiles, dsPath) } @@ -100,12 +100,9 @@ func (s *DashNGoImpl) DeleteAllConnections(filter filters.Filter) []string { var ds []string = make([]string, 0) items := s.ListConnections(filter) for _, item := range items { - p := datasources.NewDeleteDataSourceByIDParams() - p.ID = fmt.Sprintf("%d", item.ID) - - dsItem, err := s.client.Datasources.DeleteDataSourceByID(p, s.getAuth()) + dsItem, err := s.GetClient().Datasources.DeleteDataSourceByID(fmt.Sprintf("%d", item.ID)) if err != nil { - log.Warningf("Failed to delete datasource: %s, response: %s", item.Name, dsItem.Error()) + slog.Warn("Failed to delete datasource", "datasource", item.Name, "err", dsItem.Error()) continue } ds = append(ds, item.Name) @@ -113,17 +110,17 @@ func (s *DashNGoImpl) DeleteAllConnections(filter filters.Filter) []string { return ds } -// UploadConnections exports all datasources to grafana using the credentials configured in config file. +// UploadConnections exports all connections to grafana using the credentials configured in config file. func (s *DashNGoImpl) UploadConnections(filter filters.Filter) []string { var dsListing []models.DataSourceListItemDTO var exported []string - log.Infof("Reading files from folder: %s", config.Config().GetDefaultGrafanaConfig().GetPath(config.ConnectionResource)) + slog.Info("Reading files from folder", "folder", config.Config().GetDefaultGrafanaConfig().GetPath(config.ConnectionResource)) filesInDir, err := s.storage.FindAllFiles(config.Config().GetDefaultGrafanaConfig().GetPath(config.ConnectionResource), false) if err != nil { - log.WithError(err).Errorf("failed to list files in directory for datasources") + slog.Error("failed to list files in directory for datasources", "err", err) } dsListing = s.ListConnections(filter) @@ -134,13 +131,13 @@ func (s *DashNGoImpl) UploadConnections(filter filters.Filter) []string { fileLocation := filepath.Join(config.Config().GetDefaultGrafanaConfig().GetPath(config.ConnectionResource), file) if strings.HasSuffix(file, ".json") { if rawDS, err = s.storage.ReadFile(fileLocation); err != nil { - log.WithError(err).Errorf("failed to read file: %s", fileLocation) + slog.Error("failed to read file", "filename", fileLocation, "err", err) continue } var newDS models.AddDataSourceCommand if err = json.Unmarshal(rawDS, &newDS); err != nil { - log.WithError(err).Errorf("failed to unmarshall file: %s", fileLocation) + slog.Error("failed to unmarshall file", "filename", fileLocation, "err", err) continue } @@ -148,45 +145,42 @@ func (s *DashNGoImpl) UploadConnections(filter filters.Filter) []string { continue } dsConfig := s.grafanaConf - var creds *config.GrafanaConnection - if newDS.BasicAuth { - creds, err = dsConfig.GetCredentials(newDS) - if err != nil { //Attempt to get Credentials by URL regex - log.Warn("DataSource has Auth enabled but has no valid Credentials that could be retrieved. Please check your configuration and try again.") - } - } else { - creds = nil + secureLocation := config.Config().GetDefaultGrafanaConfig().GetPath(config.SecureSecretsResource) + credentials, err := dsConfig.GetCredentials(newDS, secureLocation) + if err != nil { //Attempt to get Credentials by URL regex + slog.Warn("DataSource has no secureData configured. Please check your configuration.") } if dsSettings.FiltersEnabled() && dsSettings.IsExcluded(newDS) { - log.Debugf("Skipping local JSON file since source: %s since it fails filter checks with dataType of: %s", newDS.Name, newDS.Type) + slog.Debug("Skipping local JSON file since source fails datatype filter checks", "datasource", newDS.Name, "datatype", newDS.Type) continue } - if creds != nil { - user := creds.User - var secureData = make(map[string]string) - newDS.BasicAuthUser = user - secureData["basicAuthPassword"] = creds.Password - newDS.SecureJSONData = secureData + if credentials != nil { + //Sets basic auth if secureData contains it + if credentials.User() != "" && (*credentials)["basicAuthPassword"] != "" { + newDS.BasicAuthUser = credentials.User() + newDS.BasicAuth = true + } + //Pass any secure data that GDG is configured to use + newDS.SecureJSONData = *credentials } else { + //if credentials are nil, then basicAuth has to be false newDS.BasicAuth = false } for _, existingDS := range dsListing { if existingDS.Name == newDS.Name { - deleteParam := datasources.NewDeleteDataSourceByIDParams() - deleteParam.ID = fmt.Sprintf("%d", existingDS.ID) - if _, err := s.client.Datasources.DeleteDataSourceByID(deleteParam, s.getAuth()); err != nil { - log.Errorf("error on deleting datasource %s with %s", newDS.Name, err) + if _, err := s.GetClient().Datasources.DeleteDataSourceByID(fmt.Sprintf("%d", existingDS.ID)); err != nil { + slog.Error("error on deleting datasource", "datasource", newDS.Name, "err", err) } break } } - p := datasources.NewAddDataSourceParams().WithBody(&newDS) - if createStatus, err := s.client.Datasources.AddDataSource(p, s.getAuth()); err != nil { - log.Errorf("error on importing datasource %s with %s (%s)", newDS.Name, err, createStatus.Error()) + + if createStatus, err := s.GetClient().Datasources.AddDataSource(&newDS); err != nil { + slog.Error("error on importing datasource", "datasource", newDS.Name, "err", err, "createError", createStatus.Error()) } else { exported = append(exported, fileLocation) } diff --git a/internal/service/contract.go b/internal/service/contract.go index 73af32fb..9b5e094d 100644 --- a/internal/service/contract.go +++ b/internal/service/contract.go @@ -4,10 +4,8 @@ import ( "context" "github.com/esnet/gdg/internal/api" "github.com/esnet/gdg/internal/config" - "github.com/esnet/grafana-swagger-api-golang/goclient/client" - log "github.com/sirupsen/logrus" "github.com/spf13/viper" - + "log/slog" "sync" ) @@ -32,7 +30,6 @@ var ( ) type DashNGoImpl struct { - client *client.GrafanaHTTPAPI extended *api.ExtendedApi grafanaConf *config.GrafanaConfig @@ -51,7 +48,7 @@ func NewDashNGoImpl() *DashNGoImpl { func newInstance() *DashNGoImpl { obj := &DashNGoImpl{} obj.grafanaConf = config.Config().GetDefaultGrafanaConfig() - obj.configRef = config.Config().ViperConfig() + obj.configRef = config.Config().GetViperConfig(config.ViperGdgConfig) obj.Login() obj.debug = config.Config().IsDebug() @@ -77,7 +74,7 @@ func configureStorage(obj *DashNGoImpl) { { obj.storage, err = NewCloudStorage(ctx) if err != nil { - log.Warn("falling back on Local Storage, Cloud storage configuration error") + slog.Warn("falling back on Local Storage, Cloud storage configuration error") obj.storage = NewLocalStorage(ctx) } } diff --git a/internal/service/dashboards.go b/internal/service/dashboards.go index cf9a12be..0963e8a5 100644 --- a/internal/service/dashboards.go +++ b/internal/service/dashboards.go @@ -5,19 +5,20 @@ import ( "fmt" "github.com/esnet/gdg/internal/config" "github.com/esnet/gdg/internal/service/filters" - gapi "github.com/esnet/grafana-swagger-api-golang" - "github.com/esnet/grafana-swagger-api-golang/goclient/client/dashboards" - "github.com/esnet/grafana-swagger-api-golang/goclient/client/folders" - "github.com/esnet/grafana-swagger-api-golang/goclient/client/search" - "github.com/esnet/grafana-swagger-api-golang/goclient/models" - "golang.org/x/exp/slices" + "github.com/esnet/gdg/internal/tools" + "github.com/grafana/grafana-openapi-client-go/client/dashboards" + "github.com/grafana/grafana-openapi-client-go/client/search" + "github.com/grafana/grafana-openapi-client-go/models" + "github.com/tidwall/pretty" + "golang.org/x/exp/maps" + "log" + "log/slog" "path/filepath" "regexp" + "slices" + "sort" "strings" - "github.com/tidwall/pretty" - - log "github.com/sirupsen/logrus" "github.com/thoas/go-funk" ) @@ -33,7 +34,7 @@ type DashboardsApi interface { func (s *DashNGoImpl) getDashboardByUid(uid string) (*models.DashboardFullWithMeta, error) { params := dashboards.NewGetDashboardByUIDParams() params.UID = uid - data, err := s.client.Dashboards.GetDashboardByUID(params, s.getAuth()) + data, err := s.GetClient().Dashboards.GetDashboardByUID(uid) if err != nil { return nil, err } @@ -43,18 +44,20 @@ func (s *DashNGoImpl) getDashboardByUid(uid string) (*models.DashboardFullWithMe func NewDashboardFilter(entries ...string) filters.Filter { if len(entries) != 3 { - log.Fatal("Unable to create a valid Dashboard Filter, aborting.") + log.Fatalf("Unable to create a valid Dashboard Filter, aborting.") } folderFilter := entries[0] dashboardFilter := entries[1] tagsFilter := entries[2] + if tagsFilter == "" { + tagsFilter = "[]" + } filterObj := filters.NewBaseFilter() filterObj.AddFilter(filters.FolderFilter, folderFilter) filterObj.AddFilter(filters.DashFilter, dashboardFilter) filterObj.AddFilter(filters.TagsFilter, tagsFilter) quoteRegex, _ := regexp.Compile("['\"]+") - filterObj.AddRegex(filters.TagsFilter, quoteRegex) filterObj.AddRegex(filters.FolderFilter, quoteRegex) //Add Folder Validation filterObj.AddValidation(filters.FolderFilter, func(i interface{}) bool { @@ -74,25 +77,6 @@ func NewDashboardFilter(entries ...string) filters.Filter { } }) - //Add Tag Validation - filterObj.AddValidation(filters.TagsFilter, func(i interface{}) bool { - val, ok := i.(map[filters.FilterType]string) - if !ok { - return ok - } - - //Check Tags - if tagsFilter, ok = val[filters.TagsFilter]; ok { - if filterObj.GetFilter(filters.TagsFilter) == "" { - return true - } - return tagsFilter == filterObj.GetFilter(filters.TagsFilter) - } else { - return true - } - //Check Dashboard - - }) //Add DashValidation filterObj.AddValidation(filters.DashFilter, func(i interface{}) bool { val, ok := i.(map[filters.FilterType]string) @@ -122,39 +106,55 @@ func (s *DashNGoImpl) ListDashboards(filterReq filters.Filter) []*models.Hit { filterReq = NewDashboardFilter("", "", "") } - var boardsList = make([]*models.Hit, 0) var boardLinks = make([]*models.Hit, 0) + var deduplicatedLinks = make(map[int64]*models.Hit) var page uint = 1 var limit uint = 5000 // Upper bound of Grafana API call var tagsParams = make([]string, 0) - if !config.Config().GetDefaultGrafanaConfig().GetFilterOverrides().IgnoreDashboardFilters { - tagsParams = append(tagsParams, filterReq.GetEntity(filters.TagsFilter)...) - } + tagsParams = append(tagsParams, filterReq.GetEntity(filters.TagsFilter)...) - for { - searchParams := search.NewSearchParams() - searchParams.Tag = tagsParams - searchParams.Limit = gapi.ToPtr(int64(limit)) - searchParams.Page = gapi.ToPtr(int64(page)) - searchParams.Type = gapi.ToPtr(searchTypeDashboard) + retrieve := func(tag string) { + for { + searchParams := search.NewSearchParams() + if tag != "" { + searchParams.Tag = []string{tag} + } + searchParams.Limit = tools.PtrOf(int64(limit)) + searchParams.Page = tools.PtrOf(int64(page)) + searchParams.Type = tools.PtrOf(searchTypeDashboard) - pageBoardLinks, err := s.client.Search.Search(searchParams, s.getAuth()) - if err != nil { - log.Fatal("Failed to retrieve dashboards", err) + pageBoardLinks, err := s.GetClient().Search.Search(searchParams) + if err != nil { + log.Fatal("Failed to retrieve dashboards", err) + } + boardLinks = append(boardLinks, pageBoardLinks.GetPayload()...) + if len(pageBoardLinks.GetPayload()) < int(limit) { + break + } + page += 1 } - boardLinks = append(boardLinks, pageBoardLinks.GetPayload()...) - if len(pageBoardLinks.GetPayload()) < int(limit) { - break + } + if len(tagsParams) == 0 { + retrieve("") + } else { + for _, tag := range tagsParams { + slog.Info("retrieving dashboard by tag", slog.String("tag", tag)) + retrieve(tag) } - page += 1 } folderFilters := filterReq.GetEntity(filters.FolderFilter) var validFolder bool var validUid bool - for _, link := range boardLinks { + for ndx, link := range boardLinks { + link.Slug = updateSlug(link.URI) + _, ok := deduplicatedLinks[link.ID] + if ok { + slog.Debug("duplicate board, skipping ") + continue + } validFolder = false if config.Config().GetDefaultGrafanaConfig().GetFilterOverrides().IgnoreDashboardFilters { validFolder = true @@ -167,7 +167,6 @@ func (s *DashNGoImpl) ListDashboards(filterReq filters.Filter) []*models.Hit { if !validFolder { continue } - link.Slug = updateSlug(link.URI) validUid = filterReq.GetFilter(filters.DashFilter) == "" || link.Slug == filterReq.GetFilter(filters.DashFilter) if link.FolderID == 0 { @@ -175,11 +174,16 @@ func (s *DashNGoImpl) ListDashboards(filterReq filters.Filter) []*models.Hit { } if validFolder && validUid { - boardsList = append(boardsList, link) + deduplicatedLinks[link.ID] = boardLinks[ndx] } } - return boardsList + boardLinks = maps.Values(deduplicatedLinks) + sort.Slice(boardLinks, func(i, j int) bool { + return boardLinks[i].ID < boardLinks[j].ID + }) + + return boardLinks } @@ -195,23 +199,20 @@ func (s *DashNGoImpl) DownloadDashboards(filter filters.Filter) []string { boardLinks = s.ListDashboards(filter) var boards []string for _, link := range boardLinks { - dp := dashboards.NewGetDashboardByUIDParams() - dp.UID = link.UID - - if metaData, err = s.client.Dashboards.GetDashboardByUID(dp, s.getAuth()); err != nil { - log.Errorf("%s for %s\n", err, link.URI) + if metaData, err = s.GetClient().Dashboards.GetDashboardByUID(link.UID); err != nil { + slog.Error("unable to get Dashboard by UID", "err", err, "Dashboard-URI", link.URI) continue } rawBoard, err = json.Marshal(metaData.Payload.Dashboard) if err != nil { - log.Errorf("unable to serialize dashboard %s", dp.UID) + slog.Error("unable to serialize dashboard", "dashboard", link.UID) continue } - fileName := fmt.Sprintf("%s/%s.json", buildResourceFolder(link.FolderTitle, config.DashboardResource), metaData.Payload.Meta.Slug) + fileName := fmt.Sprintf("%s/%s.json", BuildResourceFolder(link.FolderTitle, config.DashboardResource), metaData.Payload.Meta.Slug) if err = s.storage.WriteFile(fileName, pretty.Pretty(rawBoard)); err != nil { - log.Errorf("%s for %s\n", err, metaData.Payload.Meta.Slug) + slog.Error("Unable to save dashboard to file\n", "err", err, "dashboard", metaData.Payload.Meta.Slug) } else { boards = append(boards, fileName) } @@ -222,11 +223,10 @@ func (s *DashNGoImpl) DownloadDashboards(filter filters.Filter) []string { // createFolder Creates a new folder with the given name. func (s *DashNGoImpl) createdFolder(folderName string) (int64, error) { - createdFolderRequest := folders.NewCreateFolderParams() - createdFolderRequest.Body = &models.CreateFolderCommand{ + request := &models.CreateFolderCommand{ Title: folderName, } - folder, err := s.client.Folders.CreateFolder(createdFolderRequest, s.getAuth()) + folder, err := s.GetClient().Folders.CreateFolder(request) if err != nil { return 0, err } @@ -246,7 +246,7 @@ func (s *DashNGoImpl) UploadDashboards(filterReq filters.Filter) { path := config.Config().GetDefaultGrafanaConfig().GetPath(config.DashboardResource) filesInDir, err := s.storage.FindAllFiles(path, true) if err != nil { - log.WithError(err).Fatal("unable to find any files to export from storage engine") + log.Fatalf("unable to find any files to export from storage engine, err: %v", err) } //Delete all dashboards that match prior to import s.DeleteAllDashboards(filterReq) @@ -263,24 +263,49 @@ func (s *DashNGoImpl) UploadDashboards(filterReq filters.Filter) { baseFile = strings.ReplaceAll(baseFile, ".json", "") if !strings.HasSuffix(file, ".json") { - log.Warnf("Only json files are supported, skipping %s", file) + slog.Warn("Only json files are supported, skipping", "filename", file) continue } if rawBoard, err = s.storage.ReadFile(file); err != nil { - log.Println(err) + slog.Warn("Unable to read file", "filename", file, "err", err) continue } var board = make(map[string]interface{}) if err = json.Unmarshal(rawBoard, &board); err != nil { - log.WithError(err).Printf("Failed to unmarshall file: %s", file) + slog.Warn("Failed to unmarshall file", "filename", file) continue } + //Extract Tags + if filterVal := filterReq.GetFilter(filters.TagsFilter); filterVal != "[]" { + var boardTags []string + for _, val := range board["tags"].([]interface{}) { + boardTags = append(boardTags, val.(string)) + } + var requestedSlices []string + err = json.Unmarshal([]byte(filterVal), &requestedSlices) + if err != nil { + slog.Warn("unable to decode json of requested tags") + requestedSlices = []string{} + } + valid := false + for _, val := range requestedSlices { + if slices.Contains(boardTags, val) { + valid = true + break + } + } + if !valid { + slog.Debug("board fails tag filter, ignoring board", slog.Any("title", board["title"])) + continue + } + + } //Extract Folder Name based on path folderName, err = getFolderFromResourcePath(s.grafanaConf.Storage, file, config.DashboardResource) if err != nil { - log.Warnf("unable to determine dashboard folder name, falling back on default") + slog.Warn("unable to determine dashboard folder name, falling back on default") } if folderName == "" || folderName == DefaultFolderName { @@ -288,7 +313,7 @@ func (s *DashNGoImpl) UploadDashboards(filterReq filters.Filter) { folderName = DefaultFolderName } if !slices.Contains(validFolders, folderName) && !config.Config().GetDefaultGrafanaConfig().GetFilterOverrides().IgnoreDashboardFilters { - log.Debugf("Skipping file %s, doesn't match any valid folders", file) + slog.Debug("Skipping file since it doesn't match any valid folders", "filename", file) continue } validateMap := map[filters.FilterType]string{filters.FolderFilter: folderName, filters.DashFilter: baseFile} @@ -316,20 +341,23 @@ func (s *DashNGoImpl) UploadDashboards(filterReq filters.Filter) { } } - data := make(map[string]interface{}, 0) + data := make(map[string]interface{}) err = json.Unmarshal(rawBoard, &data) + if err != nil { + slog.Error("Unable to marshall data to valid JSON, skipping import", slog.Any("data", rawBoard)) + continue + } //zero out ID. Can't create a new dashboard if an ID already exists. delete(data, "id") - importDashReq := dashboards.NewImportDashboardParams() - importDashReq.Body = &models.ImportDashboardRequest{ + importDashReq := &models.ImportDashboardRequest{ FolderID: folderId, Overwrite: true, Dashboard: data, } - if _, exportError := s.client.Dashboards.ImportDashboard(importDashReq, s.getAuth()); exportError != nil { - log.WithError(err).Printf("error on Exporting dashboard %s", file) + if _, exportError := s.GetClient().Dashboards.ImportDashboard(importDashReq); exportError != nil { + slog.Info("error on Exporting dashboard", "dashboard-filename", file, "err", exportError) continue } @@ -344,11 +372,11 @@ func (s *DashNGoImpl) DeleteAllDashboards(filter filters.Filter) []string { items := s.ListDashboards(filter) for _, item := range items { if filter.ValidateAll(map[filters.FilterType]string{filters.FolderFilter: item.FolderTitle, filters.DashFilter: item.Slug}) { - dp := dashboards.NewDeleteDashboardByUIDParams() - dp.UID = item.UID - _, err := s.client.Dashboards.DeleteDashboardByUID(dp, s.getAuth()) + _, err := s.GetClient().Dashboards.DeleteDashboardByUID(item.UID) if err == nil { dashboardListing = append(dashboardListing, item.Title) + } else { + slog.Warn("Unable to remove dashboard", slog.String("title", item.Title), slog.String("uid", item.UID)) } } } diff --git a/internal/service/filters/filters.go b/internal/service/filters/filters.go index 4ab457c2..e8f64b28 100644 --- a/internal/service/filters/filters.go +++ b/internal/service/filters/filters.go @@ -1,8 +1,10 @@ package filters import ( + "encoding/json" "github.com/esnet/gdg/internal/config" - log "github.com/sirupsen/logrus" + "log/slog" + "github.com/thoas/go-funk" "regexp" "strings" @@ -63,7 +65,7 @@ func (s *BaseFilter) AddRegex(name FilterType, pattern *regexp.Regexp) { name = DefaultFilter } if pattern == nil { - log.Warnf("invalid pattern received, cannot set filter pattern for entity name: %s", name) + slog.Warn("invalid pattern received, cannot set filter pattern for entity name", "entityName", name) return } s.validationPatterns[name] = pattern @@ -90,6 +92,12 @@ func (s *BaseFilter) GetEntity(name FilterType) []string { } switch name { case TagsFilter: + entityFilter := s.GetFilter(name) + var result []string + err := json.Unmarshal([]byte(entityFilter), &result) + if err == nil { + return result + } return s.getEntities(TagsFilter, []string{}) case FolderFilter: return s.getEntities(FolderFilter, config.Config().GetDefaultGrafanaConfig().GetMonitoredFolders()) diff --git a/internal/service/folders.go b/internal/service/folders.go index 1babac1d..aa673f67 100644 --- a/internal/service/folders.go +++ b/internal/service/folders.go @@ -6,14 +6,15 @@ import ( "fmt" "github.com/esnet/gdg/internal/config" "github.com/esnet/gdg/internal/service/filters" - "github.com/esnet/grafana-swagger-api-golang/goclient/client/folder_permissions" - "github.com/esnet/grafana-swagger-api-golang/goclient/client/folders" - "github.com/esnet/grafana-swagger-api-golang/goclient/client/search" - "github.com/esnet/grafana-swagger-api-golang/goclient/models" "github.com/gosimple/slug" - log "github.com/sirupsen/logrus" + "github.com/grafana/grafana-openapi-client-go/client/folder_permissions" + "github.com/grafana/grafana-openapi-client-go/client/folders" + "github.com/grafana/grafana-openapi-client-go/client/search" + "github.com/grafana/grafana-openapi-client-go/models" "github.com/tidwall/gjson" "golang.org/x/exp/slices" + "log" + "log/slog" "path/filepath" "strings" ) @@ -59,7 +60,7 @@ func (s *DashNGoImpl) checkFolderName(folderName string) bool { // DownloadFolderPermissions downloads all the current folder permissions based on filter. func (s *DashNGoImpl) DownloadFolderPermissions(filter filters.Filter) []string { - log.Infof("Downloading folder permissions") + slog.Info("Downloading folder permissions") var ( dsPacked []byte err error @@ -68,12 +69,12 @@ func (s *DashNGoImpl) DownloadFolderPermissions(filter filters.Filter) []string currentPermissions := s.ListFolderPermissions(filter) for folder, permission := range currentPermissions { if dsPacked, err = json.MarshalIndent(permission, "", " "); err != nil { - log.Errorf("%s for %s Permissions\n", err, folder.Title) + slog.Error("Unable to marshall file", "err", err, "folderName", folder.Title) continue } dsPath := buildResourcePath(slug.Make(folder.Title), config.FolderPermissionResource) if err = s.storage.WriteFile(dsPath, dsPacked); err != nil { - log.Errorf("%s for %s\n", err.Error(), slug.Make(folder.Title)) + slog.Error("Unable to write file", "err", err.Error(), "filename", slug.Make(folder.Title)) } else { dataFiles = append(dataFiles, dsPath) } @@ -91,13 +92,13 @@ func (s *DashNGoImpl) UploadFolderPermissions(filter filters.Filter) []string { ) filesInDir, err := s.storage.FindAllFiles(config.Config().GetDefaultGrafanaConfig().GetPath(config.FolderPermissionResource), false) if err != nil { - log.WithError(err).Fatal("Failed to read folders permission imports") + log.Fatalf("Failed to read folders permission imports, %v", err) } for _, file := range filesInDir { fileLocation := filepath.Join(config.Config().GetDefaultGrafanaConfig().GetPath(config.FolderPermissionResource), file) if strings.HasSuffix(file, ".json") { if rawFolder, err = s.storage.ReadFile(fileLocation); err != nil { - log.WithError(err).Errorf("failed to read file %s", fileLocation) + slog.Error("failed to read file", "filename", fileLocation, "err", err) continue } } @@ -106,25 +107,22 @@ func (s *DashNGoImpl) UploadFolderPermissions(filter filters.Filter) []string { newEntries := make([]*models.DashboardACLUpdateItem, 0) err = json.Unmarshal(rawFolder, &newEntries) if err != nil { - log.Warnf("Failed to Decode payload for %s", fileLocation) + slog.Warn("Failed to Decode payload file", "filename", fileLocation) continue } payload := &models.UpdateDashboardACLCommand{ Items: newEntries, } - p := folder_permissions.NewUpdateFolderPermissionsParams() - p.FolderUID = uid.String() - p.Body = payload - _, err := s.client.FolderPermissions.UpdateFolderPermissions(p, s.getAuth()) + _, err := s.GetClient().FolderPermissions.UpdateFolderPermissions(uid.String(), payload) if err != nil { - log.Errorf("Failed to update folder permissions") + slog.Error("Failed to update folder permissions") } else { dataFiles = append(dataFiles, fileLocation) } } - log.Infof("Patching server with local folder permissions") + slog.Info("Patching server with local folder permissions") return dataFiles } @@ -137,21 +135,18 @@ func (s *DashNGoImpl) ListFolderPermissions(filter filters.Filter) map[*models.H r := make(map[*models.Hit][]*models.DashboardACLInfoDTO, 0) for ndx, foldersEntry := range foldersList { - p := folder_permissions.NewGetFolderPermissionListParams() - p.FolderUID = foldersEntry.UID - results, err := s.client.FolderPermissions.GetFolderPermissionList(p, s.getAuth()) + results, err := s.GetClient().FolderPermissions.GetFolderPermissionList(foldersEntry.UID) if err != nil { - msg := fmt.Sprintf("Unable to get folder permissions for folderUID: %s", p.FolderUID) + msg := fmt.Sprintf("Unable to get folder permissions for folderUID: %s", foldersEntry.UID) var getFolderPermissionListInternalServerError *folder_permissions.GetFolderPermissionListInternalServerError switch { case errors.As(err, &getFolderPermissionListInternalServerError): var castError *folder_permissions.GetFolderPermissionListInternalServerError errors.As(err, &castError) - log.WithField("message", *castError.GetPayload().Message). - WithError(err).Error(msg) + slog.Error(msg, "message", *castError.GetPayload().Message, "err", err) default: - log.WithError(err).Error(msg) + slog.Error(msg, "err", err) } } else { r[foldersList[ndx]] = results.GetPayload() @@ -169,7 +164,7 @@ func (s *DashNGoImpl) ListFolder(filter filters.Filter) []*models.Hit { } p := search.NewSearchParams() p.Type = &searchTypeFolder - folderListing, err := s.client.Search.Search(p, s.getAuth()) + folderListing, err := s.GetClient().Search.Search(p) folderListing.GetPayload() if err != nil { log.Fatal("unable to retrieve folder list.") @@ -179,13 +174,13 @@ func (s *DashNGoImpl) ListFolder(filter filters.Filter) []*models.Hit { valid := s.checkFolderName(val.Title) if filter == nil { if !valid { - log.Warningf("Folder '%s' has an invalid character and is not supported. Path seperators are not allowed", val.Title) + slog.Warn("Folder has an invalid character and is not supported. Path separators are not allowed", "folderName", val.Title) continue } result = append(result, folderListing.GetPayload()[ndx]) } else if filter.ValidateAll(map[filters.FilterType]string{filters.FolderFilter: val.Title}) { if !valid { - log.Warningf("Folder '%s' has an invalid character and is not supported. Path seperators are not allowed", val.Title) + slog.Warn("Folder has an invalid character and is not supported. Path separators are not allowed", "folderName", val.Title) continue } result = append(result, folderListing.GetPayload()[ndx]) @@ -206,12 +201,12 @@ func (s *DashNGoImpl) DownloadFolders(filter filters.Filter) []string { folderListing := s.ListFolder(filter) for _, folder := range folderListing { if dsPacked, err = json.MarshalIndent(folder, "", " "); err != nil { - log.Errorf("%s for %s\n", err, folder.Title) + slog.Error("Unable to serialize data to JSON", "err", err, "folderName", folder.Title) continue } dsPath := buildResourcePath(slug.Make(folder.Title), config.FolderResource) if err = s.storage.WriteFile(dsPath, dsPacked); err != nil { - log.Errorf("%s for %s\n", err.Error(), slug.Make(folder.Title)) + slog.Error("Unable to write file.", "err", err.Error(), "folderName", slug.Make(folder.Title)) } else { dataFiles = append(dataFiles, dsPath) } @@ -228,7 +223,7 @@ func (s *DashNGoImpl) UploadFolders(filter filters.Filter) []string { ) filesInDir, err := s.storage.FindAllFiles(config.Config().GetDefaultGrafanaConfig().GetPath(config.FolderResource), false) if err != nil { - log.WithError(err).Fatal("Failed to read folders imports") + log.Fatalf("Failed to read folders imports, %v", err) } folderItems := s.ListFolder(filter) @@ -236,23 +231,25 @@ func (s *DashNGoImpl) UploadFolders(filter filters.Filter) []string { fileLocation := filepath.Join(config.Config().GetDefaultGrafanaConfig().GetPath(config.FolderResource), file) if strings.HasSuffix(file, ".json") { if rawFolder, err = s.storage.ReadFile(fileLocation); err != nil { - log.WithError(err).Errorf("failed to read file %s", fileLocation) + slog.Error("failed to read file", "filename", fileLocation, "err", err) continue } } + var newFolder models.CreateFolderCommand + //var newFolder models.CreateFolderCommand if err = json.Unmarshal(rawFolder, &newFolder); err != nil { - log.WithError(err).Warn("failed to unmarshall folder") + slog.Warn("failed to unmarshall folder", "err", err) continue } if !s.checkFolderName(newFolder.Title) { - log.Warningf("Folder '%s' has an invalid character and is not supported, skipping folder", newFolder.Title) + slog.Warn("Folder has an invalid character and is not supported, skipping folder", "folderName", newFolder.Title) continue } skipCreate := false for _, existingFolder := range folderItems { if existingFolder.UID == newFolder.UID { - log.Warnf("Folder '%s' already exists, skipping", existingFolder.Title) + slog.Warn("Folder already exists, skipping", "folderName", existingFolder.Title) skipCreate = true } @@ -262,9 +259,9 @@ func (s *DashNGoImpl) UploadFolders(filter filters.Filter) []string { } params := folders.NewCreateFolderParams() params.Body = &newFolder - f, err := s.client.Folders.CreateFolder(params, s.getAuth()) + f, err := s.GetClient().Folders.CreateFolder(&newFolder) if err != nil { - log.Errorf("failed to create folder %s", newFolder.Title) + slog.Error("failed to create folder.", "folderName", newFolder.Title, "err", err) continue } result = append(result, f.Payload.Title) @@ -273,14 +270,14 @@ func (s *DashNGoImpl) UploadFolders(filter filters.Filter) []string { return result } -// DeleteAllFolder deletes all the matching folders from grafana +// DeleteAllFolders deletes all the matching folders from grafana func (s *DashNGoImpl) DeleteAllFolders(filter filters.Filter) []string { var result []string folderListing := s.ListFolder(filter) for _, folder := range folderListing { params := folders.NewDeleteFolderParams() params.FolderUID = folder.UID - _, err := s.client.Folders.DeleteFolder(params, s.getAuth()) + _, err := s.GetClient().Folders.DeleteFolder(params) if err == nil { result = append(result, folder.Title) } diff --git a/internal/service/libraryelements.go b/internal/service/libraryelements.go index 532caa15..f79e5fca 100644 --- a/internal/service/libraryelements.go +++ b/internal/service/libraryelements.go @@ -2,17 +2,19 @@ package service import ( "encoding/json" + "errors" "fmt" "github.com/esnet/gdg/internal/config" "github.com/esnet/gdg/internal/service/filters" "github.com/esnet/gdg/internal/tools" - "github.com/esnet/grafana-swagger-api-golang/goclient/client/library_elements" - "github.com/esnet/grafana-swagger-api-golang/goclient/models" "github.com/gosimple/slug" - log "github.com/sirupsen/logrus" + "github.com/grafana/grafana-openapi-client-go/client/library_elements" + "github.com/grafana/grafana-openapi-client-go/models" "github.com/tidwall/gjson" "golang.org/x/exp/maps" "golang.org/x/exp/slices" + "log" + "log/slog" "strings" ) @@ -30,9 +32,7 @@ const ( ) func (s *DashNGoImpl) ListLibraryElementsConnections(filter filters.Filter, connectionID string) []*models.DashboardFullWithMeta { - params := library_elements.NewGetLibraryElementConnectionsParams() - params.SetLibraryElementUID(connectionID) - payload, err := s.client.LibraryElements.GetLibraryElementConnections(params, s.getAuth()) + payload, err := s.GetClient().LibraryElements.GetLibraryElementConnections(connectionID) if err != nil { log.Fatalf("unable to retrieve a valid connection for %s", connectionID) } @@ -41,7 +41,7 @@ func (s *DashNGoImpl) ListLibraryElementsConnections(filter filters.Filter, conn for _, item := range payload.GetPayload().Result { dashboard, err := s.getDashboardByUid(item.ConnectionUID) if err != nil { - log.WithField("UID", item.ConnectionUID).Errorf("failed to retrieve linked Dashboard") + slog.Error("failed to retrieve linked Dashboard", "uid", item.ConnectionUID) } results = append(results, dashboard) } @@ -74,9 +74,9 @@ func (s *DashNGoImpl) ListLibraryElements(filter filters.Filter) []*models.Libra params := library_elements.NewGetLibraryElementsParams() params.FolderFilter = &folderList params.Kind = tools.PtrOf(listLibraryPanels) - libraryElements, err := s.client.LibraryElements.GetLibraryElements(params, s.getAuth()) + libraryElements, err := s.GetClient().LibraryElements.GetLibraryElements(params) if err != nil { - log.WithError(err).Fatal("Unable to list Library Elements") + log.Fatalf("Unable to list Library Elements %v", err) } return libraryElements.GetPayload().Result.Elements @@ -95,7 +95,7 @@ func (s *DashNGoImpl) DownloadLibraryElements(filter filters.Filter) []string { listing = s.ListLibraryElements(filter) for _, item := range listing { if dsPacked, err = json.MarshalIndent(item, "", " "); err != nil { - log.Errorf("%s for %s\n", err, item.Name) + slog.Error("Unable to serialize object", "err", err, "library-element", item.Name) continue } folderName := DefaultFolderName @@ -104,10 +104,10 @@ func (s *DashNGoImpl) DownloadLibraryElements(filter filters.Filter) []string { folderName = val } - libraryPath := fmt.Sprintf("%s/%s.json", buildResourceFolder(folderName, config.LibraryElementResource), slug.Make(item.Name)) + libraryPath := fmt.Sprintf("%s/%s.json", BuildResourceFolder(folderName, config.LibraryElementResource), slug.Make(item.Name)) if err = s.storage.WriteFile(libraryPath, dsPacked); err != nil { - log.Errorf("%s for %s\n", err, slug.Make(item.Name)) + slog.Error("Unable to write file", "err", err, "library-element", slug.Make(item.Name)) } else { dataFiles = append(dataFiles, libraryPath) } @@ -124,7 +124,7 @@ func (s *DashNGoImpl) UploadLibraryElements(filter filters.Filter) []string { libraryUID string ) - log.Infof("Reading files from folder: %s", config.Config().GetDefaultGrafanaConfig().GetPath(config.LibraryElementResource)) + slog.Info("Reading files from folder", "folder", config.Config().GetDefaultGrafanaConfig().GetPath(config.LibraryElementResource)) filesInDir, err := s.storage.FindAllFiles(config.Config().GetDefaultGrafanaConfig().GetPath(config.LibraryElementResource), true) currentLibElements := s.ListLibraryElements(filter) @@ -135,14 +135,14 @@ func (s *DashNGoImpl) UploadLibraryElements(filter filters.Filter) []string { } if err != nil { - log.WithError(err).Errorf("failed to list files in directory for library elements") + slog.Error("failed to list files in directory for library elements", "err", err) } for _, file := range filesInDir { fileLocation := file if strings.HasSuffix(file, ".json") { if rawLibraryElement, err = s.storage.ReadFile(fileLocation); err != nil { - log.WithError(err).Errorf("failed to read file: %s", fileLocation) + slog.Error("failed to read file", "file", fileLocation, "err", err) continue } @@ -151,38 +151,35 @@ func (s *DashNGoImpl) UploadLibraryElements(filter filters.Filter) []string { if Results[0].Exists() { folderName = Results[0].String() } else { - log.Errorf("Unable to determine folder name of library component, skipping %s", file) + slog.Error("Unable to determine folder name of library component, skipping.", "filename", file) continue } //Get UID if Results[1].Exists() { libraryUID = Results[1].String() } else { - log.Errorf("Unable to determine the library panel UID, %s, attempting to export anyways", file) - //continue + slog.Error("Unable to determine the library panel UID, attempting to export anyways", "filename", file) } if _, ok := libMapping[libraryUID]; ok { - log.Warnf("Library already exists, skipping %s", file) + slog.Warn("Library already exists, skipping", "filename", file) continue } if !slices.Contains(config.Config().GetDefaultGrafanaConfig().GetMonitoredFolders(), folderName) { - log.WithField("folder", folderName).WithField("file", file).Warn("Skipping since requested file is not in a folder gdg is configured to manage") + slog.Warn("Skipping since requested file is not in a folder gdg is configured to manage", "folder", folderName, "file", file) continue } var newLibraryRequest models.CreateLibraryElementCommand if err = json.Unmarshal(rawLibraryElement, &newLibraryRequest); err != nil { - log.WithError(err).Errorf("failed to unmarshall file: %s", fileLocation) + slog.Error("failed to unmarshall file", "filename", fileLocation, "err", err) continue } - params := library_elements.NewCreateLibraryElementParams() - params.Body = &newLibraryRequest - entity, err := s.client.LibraryElements.CreateLibraryElement(params, s.getAuth()) + entity, err := s.GetClient().LibraryElements.CreateLibraryElement(&newLibraryRequest) if err != nil { - log.WithError(err).Errorf("Failed to create library element") + slog.Error("Failed to create library element", "err", err) } else { exported = append(exported, entity.Payload.Result.Name) } @@ -197,17 +194,16 @@ func (s *DashNGoImpl) DeleteAllLibraryElements(filter filters.Filter) []string { libraryElements := s.ListLibraryElements(filter) for _, element := range libraryElements { - params := library_elements.NewDeleteLibraryElementByUIDParams() - params.SetLibraryElementUID(element.UID) - _, err := s.client.LibraryElements.DeleteLibraryElementByUID(params, s.getAuth()) + _, err := s.GetClient().LibraryElements.DeleteLibraryElementByUID(element.UID) if err != nil { - var logEntry *log.Entry - if serr, ok := err.(*library_elements.DeleteLibraryElementByUIDForbidden); ok { - logEntry = log.WithField("ErrorMessage", *serr.GetPayload().Message) - } else { - log.WithError(err) + logEntries := make([]interface{}, 0) + var serr *library_elements.DeleteLibraryElementByUIDForbidden + if errors.As(err, &serr) { + logEntries = append(logEntries, []interface{}{"ErrorMessage", *serr.GetPayload().Message}...) } - logEntry.Errorf("Failed to delete library panel titled: %s", element.Name) + + logEntries = append(logEntries, []interface{}{"panel", element.Name}...) + slog.Error("Failed to delete library panel", logEntries...) continue } entries = append(entries, element.Name) diff --git a/internal/service/login.go b/internal/service/login.go index e22beef1..d469df00 100644 --- a/internal/service/login.go +++ b/internal/service/login.go @@ -2,16 +2,15 @@ package service import ( "crypto/tls" + "log" + "net/http" + "net/url" + "github.com/esnet/gdg/internal/api" "github.com/esnet/gdg/internal/config" - gapi "github.com/esnet/grafana-swagger-api-golang" - "github.com/go-openapi/runtime/client" - "net/url" + "github.com/go-openapi/strfmt" - gclient "github.com/esnet/grafana-swagger-api-golang/goclient/client" - "github.com/go-openapi/runtime" - log "github.com/sirupsen/logrus" - "net/http" + "github.com/grafana/grafana-openapi-client-go/client" ) // AuthenticationApi Contract definition @@ -21,67 +20,93 @@ type AuthenticationApi interface { Login() } -// Login Logs into grafana returning a legacyClient instance using Token or Basic Auth +// Login sets admin flag and provisions the Extended API for calls unsupported by the OpenAPI spec. func (s *DashNGoImpl) Login() { var err error - u, err := url.Parse(s.grafanaConf.URL) - if err != nil { - log.Fatal("invalid Grafana URL") - } - httpClient := &http.Client{} - if config.Config().IgnoreSSL() { - httpClient = ignoreSSLErrors() - } - - runtimeClient := client.NewWithClient(u.Host, "/api", []string{u.Scheme}, httpClient) - s.client = gclient.New(runtimeClient, nil) + //Will only succeed for BasicAuth userInfo, err := s.GetUserInfo() - //Sets state based on user permissions + // Sets state based on user permissions if err == nil { s.grafanaConf.SetAdmin(userInfo.IsGrafanaAdmin) } s.extended = api.NewExtendedApi() - } -// getGrafanaAdminAuth returns a runtime.ClientAuthInfoWriter that represents a Grafana Admin -func (s *DashNGoImpl) getGrafanaAdminAuth() runtime.ClientAuthInfoWriter { - if !s.grafanaConf.IsAdminEnabled() || s.grafanaConf.UserName == "" { - log.Fatal("Unable to get Grafana Admin Auth. ") - } - - return s.getBasicAuth() +func ignoreSSL(transportConfig *client.TransportConfig) { + _, clientTransport := ignoreSSLErrors() + transportConfig.TLSConfig = clientTransport.TLSClientConfig } -// getBasicAuth returns a valid user/password auth -func (s *DashNGoImpl) getBasicAuth() runtime.ClientAuthInfoWriter { +type NewClientOpts func(transportConfig *client.TransportConfig) - return &gapi.BasicAuthenticator{ - Username: s.grafanaConf.UserName, - Password: s.grafanaConf.Password, +func (s *DashNGoImpl) getNewClient(opts ...NewClientOpts) (*client.GrafanaHTTPAPI, *client.TransportConfig) { + var err error + u, err := url.Parse(s.grafanaConf.URL) + if err != nil { + log.Fatal("invalid Grafana URL") + } + path, err := url.JoinPath(u.Path, "api") + if err != nil { + log.Fatal("invalid Grafana URL Path") + } + httpConfig := &client.TransportConfig{ + Host: u.Host, + BasePath: path, + Schemes: []string{u.Scheme}, + // NumRetries: 3, + } + // Sets Organization one client if one is configured + if s.grafanaConf.OrganizationId != 0 { + opts = append(opts, func(clientCfg *client.TransportConfig) { + clientCfg.OrgID = s.grafanaConf.OrganizationId + }) + } + for _, opt := range opts { + if opt != nil { + opt(httpConfig) + } + } + if config.Config().IgnoreSSL() { + ignoreSSL(httpConfig) } + return client.NewHTTPClientWithConfig(strfmt.Default, httpConfig), httpConfig } -// getAuth returns token if present or basic auth -func (s *DashNGoImpl) getAuth() runtime.ClientAuthInfoWriter { +// GetClient Returns a new defaultClient given token precedence over Basic Auth +func (s *DashNGoImpl) GetClient() *client.GrafanaHTTPAPI { if s.grafanaConf.APIToken != "" { - return &gapi.APIKeyAuthenticator{ - APIKey: s.grafanaConf.APIToken, - } - + grafanaClient, _ := s.getNewClient(func(clientCfg *client.TransportConfig) { + clientCfg.APIKey = s.grafanaConf.APIToken + }) + return grafanaClient } else { - return s.getBasicAuth() + return s.GetBasicAuthClient() } } +// GetAdminClient Returns the admin defaultClient if one is configured +func (s *DashNGoImpl) GetAdminClient() *client.GrafanaHTTPAPI { + if !s.grafanaConf.IsAdminEnabled() || s.grafanaConf.UserName == "" { + log.Fatal("Unable to get Grafana Admin SecureData. ") + } + return s.GetBasicAuthClient() +} + +// GetBasicAuthClient returns a basic auth grafana API Client +func (s *DashNGoImpl) GetBasicAuthClient() *client.GrafanaHTTPAPI { + grafanaClient, _ := s.getNewClient(func(clientCfg *client.TransportConfig) { + clientCfg.BasicAuth = url.UserPassword(s.grafanaConf.UserName, s.grafanaConf.Password) + }) + return grafanaClient +} + // ignoreSSLErrors when called replaces the default http legacyClient to ignore invalid SSL issues. // only to be used for testing, highly discouraged in production. -func ignoreSSLErrors() *http.Client { +func ignoreSSLErrors() (*http.Client, *http.Transport) { customTransport := http.DefaultTransport.(*http.Transport).Clone() customTransport.TLSClientConfig = &tls.Config{InsecureSkipVerify: true} httpclient := &http.Client{Transport: customTransport} - return httpclient - + return httpclient, customTransport } diff --git a/internal/service/mocks/AlertNotificationsApi.go b/internal/service/mocks/AlertNotificationsApi.go index b275ee1c..aba1e429 100644 --- a/internal/service/mocks/AlertNotificationsApi.go +++ b/internal/service/mocks/AlertNotificationsApi.go @@ -1,9 +1,9 @@ -// Code generated by mockery v2.34.0. DO NOT EDIT. +// Code generated by mockery v2.36.0. DO NOT EDIT. package mocks import ( - models "github.com/esnet/grafana-swagger-api-golang/goclient/models" + models "github.com/grafana/grafana-openapi-client-go/models" mock "github.com/stretchr/testify/mock" ) @@ -12,6 +12,14 @@ type AlertNotificationsApi struct { mock.Mock } +type AlertNotificationsApi_Expecter struct { + mock *mock.Mock +} + +func (_m *AlertNotificationsApi) EXPECT() *AlertNotificationsApi_Expecter { + return &AlertNotificationsApi_Expecter{mock: &_m.Mock} +} + // DeleteAllAlertNotifications provides a mock function with given fields: func (_m *AlertNotificationsApi) DeleteAllAlertNotifications() []string { ret := _m.Called() @@ -28,6 +36,33 @@ func (_m *AlertNotificationsApi) DeleteAllAlertNotifications() []string { return r0 } +// AlertNotificationsApi_DeleteAllAlertNotifications_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteAllAlertNotifications' +type AlertNotificationsApi_DeleteAllAlertNotifications_Call struct { + *mock.Call +} + +// DeleteAllAlertNotifications is a helper method to define mock.On call +func (_e *AlertNotificationsApi_Expecter) DeleteAllAlertNotifications() *AlertNotificationsApi_DeleteAllAlertNotifications_Call { + return &AlertNotificationsApi_DeleteAllAlertNotifications_Call{Call: _e.mock.On("DeleteAllAlertNotifications")} +} + +func (_c *AlertNotificationsApi_DeleteAllAlertNotifications_Call) Run(run func()) *AlertNotificationsApi_DeleteAllAlertNotifications_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *AlertNotificationsApi_DeleteAllAlertNotifications_Call) Return(_a0 []string) *AlertNotificationsApi_DeleteAllAlertNotifications_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *AlertNotificationsApi_DeleteAllAlertNotifications_Call) RunAndReturn(run func() []string) *AlertNotificationsApi_DeleteAllAlertNotifications_Call { + _c.Call.Return(run) + return _c +} + // DownloadAlertNotifications provides a mock function with given fields: func (_m *AlertNotificationsApi) DownloadAlertNotifications() []string { ret := _m.Called() @@ -44,6 +79,33 @@ func (_m *AlertNotificationsApi) DownloadAlertNotifications() []string { return r0 } +// AlertNotificationsApi_DownloadAlertNotifications_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DownloadAlertNotifications' +type AlertNotificationsApi_DownloadAlertNotifications_Call struct { + *mock.Call +} + +// DownloadAlertNotifications is a helper method to define mock.On call +func (_e *AlertNotificationsApi_Expecter) DownloadAlertNotifications() *AlertNotificationsApi_DownloadAlertNotifications_Call { + return &AlertNotificationsApi_DownloadAlertNotifications_Call{Call: _e.mock.On("DownloadAlertNotifications")} +} + +func (_c *AlertNotificationsApi_DownloadAlertNotifications_Call) Run(run func()) *AlertNotificationsApi_DownloadAlertNotifications_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *AlertNotificationsApi_DownloadAlertNotifications_Call) Return(_a0 []string) *AlertNotificationsApi_DownloadAlertNotifications_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *AlertNotificationsApi_DownloadAlertNotifications_Call) RunAndReturn(run func() []string) *AlertNotificationsApi_DownloadAlertNotifications_Call { + _c.Call.Return(run) + return _c +} + // ListAlertNotifications provides a mock function with given fields: func (_m *AlertNotificationsApi) ListAlertNotifications() []*models.AlertNotification { ret := _m.Called() @@ -60,6 +122,33 @@ func (_m *AlertNotificationsApi) ListAlertNotifications() []*models.AlertNotific return r0 } +// AlertNotificationsApi_ListAlertNotifications_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListAlertNotifications' +type AlertNotificationsApi_ListAlertNotifications_Call struct { + *mock.Call +} + +// ListAlertNotifications is a helper method to define mock.On call +func (_e *AlertNotificationsApi_Expecter) ListAlertNotifications() *AlertNotificationsApi_ListAlertNotifications_Call { + return &AlertNotificationsApi_ListAlertNotifications_Call{Call: _e.mock.On("ListAlertNotifications")} +} + +func (_c *AlertNotificationsApi_ListAlertNotifications_Call) Run(run func()) *AlertNotificationsApi_ListAlertNotifications_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *AlertNotificationsApi_ListAlertNotifications_Call) Return(_a0 []*models.AlertNotification) *AlertNotificationsApi_ListAlertNotifications_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *AlertNotificationsApi_ListAlertNotifications_Call) RunAndReturn(run func() []*models.AlertNotification) *AlertNotificationsApi_ListAlertNotifications_Call { + _c.Call.Return(run) + return _c +} + // UploadAlertNotifications provides a mock function with given fields: func (_m *AlertNotificationsApi) UploadAlertNotifications() []string { ret := _m.Called() @@ -76,6 +165,33 @@ func (_m *AlertNotificationsApi) UploadAlertNotifications() []string { return r0 } +// AlertNotificationsApi_UploadAlertNotifications_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UploadAlertNotifications' +type AlertNotificationsApi_UploadAlertNotifications_Call struct { + *mock.Call +} + +// UploadAlertNotifications is a helper method to define mock.On call +func (_e *AlertNotificationsApi_Expecter) UploadAlertNotifications() *AlertNotificationsApi_UploadAlertNotifications_Call { + return &AlertNotificationsApi_UploadAlertNotifications_Call{Call: _e.mock.On("UploadAlertNotifications")} +} + +func (_c *AlertNotificationsApi_UploadAlertNotifications_Call) Run(run func()) *AlertNotificationsApi_UploadAlertNotifications_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *AlertNotificationsApi_UploadAlertNotifications_Call) Return(_a0 []string) *AlertNotificationsApi_UploadAlertNotifications_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *AlertNotificationsApi_UploadAlertNotifications_Call) RunAndReturn(run func() []string) *AlertNotificationsApi_UploadAlertNotifications_Call { + _c.Call.Return(run) + return _c +} + // NewAlertNotificationsApi creates a new instance of AlertNotificationsApi. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewAlertNotificationsApi(t interface { diff --git a/internal/service/mocks/AuthenticationApi.go b/internal/service/mocks/AuthenticationApi.go index eb33f295..a453a7a3 100644 --- a/internal/service/mocks/AuthenticationApi.go +++ b/internal/service/mocks/AuthenticationApi.go @@ -1,12 +1,12 @@ -// Code generated by mockery v2.34.0. DO NOT EDIT. +// Code generated by mockery v2.36.0. DO NOT EDIT. package mocks import ( - api "github.com/esnet/gdg/internal/api" + models "github.com/grafana/grafana-openapi-client-go/models" mock "github.com/stretchr/testify/mock" - models "github.com/esnet/grafana-swagger-api-golang/goclient/models" + types "github.com/esnet/gdg/internal/types" ) // AuthenticationApi is an autogenerated mock type for the AuthenticationApi type @@ -14,6 +14,14 @@ type AuthenticationApi struct { mock.Mock } +type AuthenticationApi_Expecter struct { + mock *mock.Mock +} + +func (_m *AuthenticationApi) EXPECT() *AuthenticationApi_Expecter { + return &AuthenticationApi_Expecter{mock: &_m.Mock} +} + // CreateAPIKey provides a mock function with given fields: name, role, expiration func (_m *AuthenticationApi) CreateAPIKey(name string, role string, expiration int64) (*models.NewAPIKeyResult, error) { ret := _m.Called(name, role, expiration) @@ -40,6 +48,36 @@ func (_m *AuthenticationApi) CreateAPIKey(name string, role string, expiration i return r0, r1 } +// AuthenticationApi_CreateAPIKey_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateAPIKey' +type AuthenticationApi_CreateAPIKey_Call struct { + *mock.Call +} + +// CreateAPIKey is a helper method to define mock.On call +// - name string +// - role string +// - expiration int64 +func (_e *AuthenticationApi_Expecter) CreateAPIKey(name interface{}, role interface{}, expiration interface{}) *AuthenticationApi_CreateAPIKey_Call { + return &AuthenticationApi_CreateAPIKey_Call{Call: _e.mock.On("CreateAPIKey", name, role, expiration)} +} + +func (_c *AuthenticationApi_CreateAPIKey_Call) Run(run func(name string, role string, expiration int64)) *AuthenticationApi_CreateAPIKey_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(string), args[1].(string), args[2].(int64)) + }) + return _c +} + +func (_c *AuthenticationApi_CreateAPIKey_Call) Return(_a0 *models.NewAPIKeyResult, _a1 error) *AuthenticationApi_CreateAPIKey_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *AuthenticationApi_CreateAPIKey_Call) RunAndReturn(run func(string, string, int64) (*models.NewAPIKeyResult, error)) *AuthenticationApi_CreateAPIKey_Call { + _c.Call.Return(run) + return _c +} + // CreateServiceAccount provides a mock function with given fields: name, role, expiration func (_m *AuthenticationApi) CreateServiceAccount(name string, role string, expiration int64) (*models.ServiceAccountDTO, error) { ret := _m.Called(name, role, expiration) @@ -66,6 +104,36 @@ func (_m *AuthenticationApi) CreateServiceAccount(name string, role string, expi return r0, r1 } +// AuthenticationApi_CreateServiceAccount_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateServiceAccount' +type AuthenticationApi_CreateServiceAccount_Call struct { + *mock.Call +} + +// CreateServiceAccount is a helper method to define mock.On call +// - name string +// - role string +// - expiration int64 +func (_e *AuthenticationApi_Expecter) CreateServiceAccount(name interface{}, role interface{}, expiration interface{}) *AuthenticationApi_CreateServiceAccount_Call { + return &AuthenticationApi_CreateServiceAccount_Call{Call: _e.mock.On("CreateServiceAccount", name, role, expiration)} +} + +func (_c *AuthenticationApi_CreateServiceAccount_Call) Run(run func(name string, role string, expiration int64)) *AuthenticationApi_CreateServiceAccount_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(string), args[1].(string), args[2].(int64)) + }) + return _c +} + +func (_c *AuthenticationApi_CreateServiceAccount_Call) Return(_a0 *models.ServiceAccountDTO, _a1 error) *AuthenticationApi_CreateServiceAccount_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *AuthenticationApi_CreateServiceAccount_Call) RunAndReturn(run func(string, string, int64) (*models.ServiceAccountDTO, error)) *AuthenticationApi_CreateServiceAccount_Call { + _c.Call.Return(run) + return _c +} + // CreateServiceAccountToken provides a mock function with given fields: name, role, expiration func (_m *AuthenticationApi) CreateServiceAccountToken(name int64, role string, expiration int64) (*models.NewAPIKeyResult, error) { ret := _m.Called(name, role, expiration) @@ -92,6 +160,36 @@ func (_m *AuthenticationApi) CreateServiceAccountToken(name int64, role string, return r0, r1 } +// AuthenticationApi_CreateServiceAccountToken_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateServiceAccountToken' +type AuthenticationApi_CreateServiceAccountToken_Call struct { + *mock.Call +} + +// CreateServiceAccountToken is a helper method to define mock.On call +// - name int64 +// - role string +// - expiration int64 +func (_e *AuthenticationApi_Expecter) CreateServiceAccountToken(name interface{}, role interface{}, expiration interface{}) *AuthenticationApi_CreateServiceAccountToken_Call { + return &AuthenticationApi_CreateServiceAccountToken_Call{Call: _e.mock.On("CreateServiceAccountToken", name, role, expiration)} +} + +func (_c *AuthenticationApi_CreateServiceAccountToken_Call) Run(run func(name int64, role string, expiration int64)) *AuthenticationApi_CreateServiceAccountToken_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(int64), args[1].(string), args[2].(int64)) + }) + return _c +} + +func (_c *AuthenticationApi_CreateServiceAccountToken_Call) Return(_a0 *models.NewAPIKeyResult, _a1 error) *AuthenticationApi_CreateServiceAccountToken_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *AuthenticationApi_CreateServiceAccountToken_Call) RunAndReturn(run func(int64, string, int64) (*models.NewAPIKeyResult, error)) *AuthenticationApi_CreateServiceAccountToken_Call { + _c.Call.Return(run) + return _c +} + // DeleteAllServiceAccounts provides a mock function with given fields: func (_m *AuthenticationApi) DeleteAllServiceAccounts() []string { ret := _m.Called() @@ -108,6 +206,33 @@ func (_m *AuthenticationApi) DeleteAllServiceAccounts() []string { return r0 } +// AuthenticationApi_DeleteAllServiceAccounts_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteAllServiceAccounts' +type AuthenticationApi_DeleteAllServiceAccounts_Call struct { + *mock.Call +} + +// DeleteAllServiceAccounts is a helper method to define mock.On call +func (_e *AuthenticationApi_Expecter) DeleteAllServiceAccounts() *AuthenticationApi_DeleteAllServiceAccounts_Call { + return &AuthenticationApi_DeleteAllServiceAccounts_Call{Call: _e.mock.On("DeleteAllServiceAccounts")} +} + +func (_c *AuthenticationApi_DeleteAllServiceAccounts_Call) Run(run func()) *AuthenticationApi_DeleteAllServiceAccounts_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *AuthenticationApi_DeleteAllServiceAccounts_Call) Return(_a0 []string) *AuthenticationApi_DeleteAllServiceAccounts_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *AuthenticationApi_DeleteAllServiceAccounts_Call) RunAndReturn(run func() []string) *AuthenticationApi_DeleteAllServiceAccounts_Call { + _c.Call.Return(run) + return _c +} + // DeleteAllTokens provides a mock function with given fields: func (_m *AuthenticationApi) DeleteAllTokens() []string { ret := _m.Called() @@ -124,6 +249,33 @@ func (_m *AuthenticationApi) DeleteAllTokens() []string { return r0 } +// AuthenticationApi_DeleteAllTokens_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteAllTokens' +type AuthenticationApi_DeleteAllTokens_Call struct { + *mock.Call +} + +// DeleteAllTokens is a helper method to define mock.On call +func (_e *AuthenticationApi_Expecter) DeleteAllTokens() *AuthenticationApi_DeleteAllTokens_Call { + return &AuthenticationApi_DeleteAllTokens_Call{Call: _e.mock.On("DeleteAllTokens")} +} + +func (_c *AuthenticationApi_DeleteAllTokens_Call) Run(run func()) *AuthenticationApi_DeleteAllTokens_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *AuthenticationApi_DeleteAllTokens_Call) Return(_a0 []string) *AuthenticationApi_DeleteAllTokens_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *AuthenticationApi_DeleteAllTokens_Call) RunAndReturn(run func() []string) *AuthenticationApi_DeleteAllTokens_Call { + _c.Call.Return(run) + return _c +} + // DeleteServiceAccountTokens provides a mock function with given fields: serviceId func (_m *AuthenticationApi) DeleteServiceAccountTokens(serviceId int64) []string { ret := _m.Called(serviceId) @@ -140,6 +292,34 @@ func (_m *AuthenticationApi) DeleteServiceAccountTokens(serviceId int64) []strin return r0 } +// AuthenticationApi_DeleteServiceAccountTokens_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteServiceAccountTokens' +type AuthenticationApi_DeleteServiceAccountTokens_Call struct { + *mock.Call +} + +// DeleteServiceAccountTokens is a helper method to define mock.On call +// - serviceId int64 +func (_e *AuthenticationApi_Expecter) DeleteServiceAccountTokens(serviceId interface{}) *AuthenticationApi_DeleteServiceAccountTokens_Call { + return &AuthenticationApi_DeleteServiceAccountTokens_Call{Call: _e.mock.On("DeleteServiceAccountTokens", serviceId)} +} + +func (_c *AuthenticationApi_DeleteServiceAccountTokens_Call) Run(run func(serviceId int64)) *AuthenticationApi_DeleteServiceAccountTokens_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(int64)) + }) + return _c +} + +func (_c *AuthenticationApi_DeleteServiceAccountTokens_Call) Return(_a0 []string) *AuthenticationApi_DeleteServiceAccountTokens_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *AuthenticationApi_DeleteServiceAccountTokens_Call) RunAndReturn(run func(int64) []string) *AuthenticationApi_DeleteServiceAccountTokens_Call { + _c.Call.Return(run) + return _c +} + // ListAPIKeys provides a mock function with given fields: func (_m *AuthenticationApi) ListAPIKeys() []*models.APIKeyDTO { ret := _m.Called() @@ -156,22 +336,76 @@ func (_m *AuthenticationApi) ListAPIKeys() []*models.APIKeyDTO { return r0 } +// AuthenticationApi_ListAPIKeys_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListAPIKeys' +type AuthenticationApi_ListAPIKeys_Call struct { + *mock.Call +} + +// ListAPIKeys is a helper method to define mock.On call +func (_e *AuthenticationApi_Expecter) ListAPIKeys() *AuthenticationApi_ListAPIKeys_Call { + return &AuthenticationApi_ListAPIKeys_Call{Call: _e.mock.On("ListAPIKeys")} +} + +func (_c *AuthenticationApi_ListAPIKeys_Call) Run(run func()) *AuthenticationApi_ListAPIKeys_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *AuthenticationApi_ListAPIKeys_Call) Return(_a0 []*models.APIKeyDTO) *AuthenticationApi_ListAPIKeys_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *AuthenticationApi_ListAPIKeys_Call) RunAndReturn(run func() []*models.APIKeyDTO) *AuthenticationApi_ListAPIKeys_Call { + _c.Call.Return(run) + return _c +} + // ListServiceAccounts provides a mock function with given fields: -func (_m *AuthenticationApi) ListServiceAccounts() []*api.ServiceAccountDTOWithTokens { +func (_m *AuthenticationApi) ListServiceAccounts() []*types.ServiceAccountDTOWithTokens { ret := _m.Called() - var r0 []*api.ServiceAccountDTOWithTokens - if rf, ok := ret.Get(0).(func() []*api.ServiceAccountDTOWithTokens); ok { + var r0 []*types.ServiceAccountDTOWithTokens + if rf, ok := ret.Get(0).(func() []*types.ServiceAccountDTOWithTokens); ok { r0 = rf() } else { if ret.Get(0) != nil { - r0 = ret.Get(0).([]*api.ServiceAccountDTOWithTokens) + r0 = ret.Get(0).([]*types.ServiceAccountDTOWithTokens) } } return r0 } +// AuthenticationApi_ListServiceAccounts_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListServiceAccounts' +type AuthenticationApi_ListServiceAccounts_Call struct { + *mock.Call +} + +// ListServiceAccounts is a helper method to define mock.On call +func (_e *AuthenticationApi_Expecter) ListServiceAccounts() *AuthenticationApi_ListServiceAccounts_Call { + return &AuthenticationApi_ListServiceAccounts_Call{Call: _e.mock.On("ListServiceAccounts")} +} + +func (_c *AuthenticationApi_ListServiceAccounts_Call) Run(run func()) *AuthenticationApi_ListServiceAccounts_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *AuthenticationApi_ListServiceAccounts_Call) Return(_a0 []*types.ServiceAccountDTOWithTokens) *AuthenticationApi_ListServiceAccounts_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *AuthenticationApi_ListServiceAccounts_Call) RunAndReturn(run func() []*types.ServiceAccountDTOWithTokens) *AuthenticationApi_ListServiceAccounts_Call { + _c.Call.Return(run) + return _c +} + // ListServiceAccountsTokens provides a mock function with given fields: id func (_m *AuthenticationApi) ListServiceAccountsTokens(id int64) ([]*models.TokenDTO, error) { ret := _m.Called(id) @@ -198,11 +432,66 @@ func (_m *AuthenticationApi) ListServiceAccountsTokens(id int64) ([]*models.Toke return r0, r1 } +// AuthenticationApi_ListServiceAccountsTokens_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListServiceAccountsTokens' +type AuthenticationApi_ListServiceAccountsTokens_Call struct { + *mock.Call +} + +// ListServiceAccountsTokens is a helper method to define mock.On call +// - id int64 +func (_e *AuthenticationApi_Expecter) ListServiceAccountsTokens(id interface{}) *AuthenticationApi_ListServiceAccountsTokens_Call { + return &AuthenticationApi_ListServiceAccountsTokens_Call{Call: _e.mock.On("ListServiceAccountsTokens", id)} +} + +func (_c *AuthenticationApi_ListServiceAccountsTokens_Call) Run(run func(id int64)) *AuthenticationApi_ListServiceAccountsTokens_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(int64)) + }) + return _c +} + +func (_c *AuthenticationApi_ListServiceAccountsTokens_Call) Return(_a0 []*models.TokenDTO, _a1 error) *AuthenticationApi_ListServiceAccountsTokens_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *AuthenticationApi_ListServiceAccountsTokens_Call) RunAndReturn(run func(int64) ([]*models.TokenDTO, error)) *AuthenticationApi_ListServiceAccountsTokens_Call { + _c.Call.Return(run) + return _c +} + // Login provides a mock function with given fields: func (_m *AuthenticationApi) Login() { _m.Called() } +// AuthenticationApi_Login_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Login' +type AuthenticationApi_Login_Call struct { + *mock.Call +} + +// Login is a helper method to define mock.On call +func (_e *AuthenticationApi_Expecter) Login() *AuthenticationApi_Login_Call { + return &AuthenticationApi_Login_Call{Call: _e.mock.On("Login")} +} + +func (_c *AuthenticationApi_Login_Call) Run(run func()) *AuthenticationApi_Login_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *AuthenticationApi_Login_Call) Return() *AuthenticationApi_Login_Call { + _c.Call.Return() + return _c +} + +func (_c *AuthenticationApi_Login_Call) RunAndReturn(run func()) *AuthenticationApi_Login_Call { + _c.Call.Return(run) + return _c +} + // NewAuthenticationApi creates a new instance of AuthenticationApi. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewAuthenticationApi(t interface { diff --git a/internal/service/mocks/ConnectionPermissions.go b/internal/service/mocks/ConnectionPermissions.go index 2cae8024..e665a561 100644 --- a/internal/service/mocks/ConnectionPermissions.go +++ b/internal/service/mocks/ConnectionPermissions.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.34.0. DO NOT EDIT. +// Code generated by mockery v2.36.0. DO NOT EDIT. package mocks @@ -6,7 +6,7 @@ import ( filters "github.com/esnet/gdg/internal/service/filters" mock "github.com/stretchr/testify/mock" - models "github.com/esnet/grafana-swagger-api-golang/goclient/models" + models "github.com/grafana/grafana-openapi-client-go/models" ) // ConnectionPermissions is an autogenerated mock type for the ConnectionPermissions type @@ -14,6 +14,14 @@ type ConnectionPermissions struct { mock.Mock } +type ConnectionPermissions_Expecter struct { + mock *mock.Mock +} + +func (_m *ConnectionPermissions) EXPECT() *ConnectionPermissions_Expecter { + return &ConnectionPermissions_Expecter{mock: &_m.Mock} +} + // DeleteAllConnectionPermissions provides a mock function with given fields: filter func (_m *ConnectionPermissions) DeleteAllConnectionPermissions(filter filters.Filter) []string { ret := _m.Called(filter) @@ -30,6 +38,34 @@ func (_m *ConnectionPermissions) DeleteAllConnectionPermissions(filter filters.F return r0 } +// ConnectionPermissions_DeleteAllConnectionPermissions_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteAllConnectionPermissions' +type ConnectionPermissions_DeleteAllConnectionPermissions_Call struct { + *mock.Call +} + +// DeleteAllConnectionPermissions is a helper method to define mock.On call +// - filter filters.Filter +func (_e *ConnectionPermissions_Expecter) DeleteAllConnectionPermissions(filter interface{}) *ConnectionPermissions_DeleteAllConnectionPermissions_Call { + return &ConnectionPermissions_DeleteAllConnectionPermissions_Call{Call: _e.mock.On("DeleteAllConnectionPermissions", filter)} +} + +func (_c *ConnectionPermissions_DeleteAllConnectionPermissions_Call) Run(run func(filter filters.Filter)) *ConnectionPermissions_DeleteAllConnectionPermissions_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *ConnectionPermissions_DeleteAllConnectionPermissions_Call) Return(_a0 []string) *ConnectionPermissions_DeleteAllConnectionPermissions_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *ConnectionPermissions_DeleteAllConnectionPermissions_Call) RunAndReturn(run func(filters.Filter) []string) *ConnectionPermissions_DeleteAllConnectionPermissions_Call { + _c.Call.Return(run) + return _c +} + // DownloadConnectionPermissions provides a mock function with given fields: filter func (_m *ConnectionPermissions) DownloadConnectionPermissions(filter filters.Filter) []string { ret := _m.Called(filter) @@ -46,6 +82,34 @@ func (_m *ConnectionPermissions) DownloadConnectionPermissions(filter filters.Fi return r0 } +// ConnectionPermissions_DownloadConnectionPermissions_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DownloadConnectionPermissions' +type ConnectionPermissions_DownloadConnectionPermissions_Call struct { + *mock.Call +} + +// DownloadConnectionPermissions is a helper method to define mock.On call +// - filter filters.Filter +func (_e *ConnectionPermissions_Expecter) DownloadConnectionPermissions(filter interface{}) *ConnectionPermissions_DownloadConnectionPermissions_Call { + return &ConnectionPermissions_DownloadConnectionPermissions_Call{Call: _e.mock.On("DownloadConnectionPermissions", filter)} +} + +func (_c *ConnectionPermissions_DownloadConnectionPermissions_Call) Run(run func(filter filters.Filter)) *ConnectionPermissions_DownloadConnectionPermissions_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *ConnectionPermissions_DownloadConnectionPermissions_Call) Return(_a0 []string) *ConnectionPermissions_DownloadConnectionPermissions_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *ConnectionPermissions_DownloadConnectionPermissions_Call) RunAndReturn(run func(filters.Filter) []string) *ConnectionPermissions_DownloadConnectionPermissions_Call { + _c.Call.Return(run) + return _c +} + // ListConnectionPermissions provides a mock function with given fields: filter func (_m *ConnectionPermissions) ListConnectionPermissions(filter filters.Filter) map[*models.DataSourceListItemDTO]*models.DataSourcePermissionsDTO { ret := _m.Called(filter) @@ -62,6 +126,34 @@ func (_m *ConnectionPermissions) ListConnectionPermissions(filter filters.Filter return r0 } +// ConnectionPermissions_ListConnectionPermissions_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListConnectionPermissions' +type ConnectionPermissions_ListConnectionPermissions_Call struct { + *mock.Call +} + +// ListConnectionPermissions is a helper method to define mock.On call +// - filter filters.Filter +func (_e *ConnectionPermissions_Expecter) ListConnectionPermissions(filter interface{}) *ConnectionPermissions_ListConnectionPermissions_Call { + return &ConnectionPermissions_ListConnectionPermissions_Call{Call: _e.mock.On("ListConnectionPermissions", filter)} +} + +func (_c *ConnectionPermissions_ListConnectionPermissions_Call) Run(run func(filter filters.Filter)) *ConnectionPermissions_ListConnectionPermissions_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *ConnectionPermissions_ListConnectionPermissions_Call) Return(_a0 map[*models.DataSourceListItemDTO]*models.DataSourcePermissionsDTO) *ConnectionPermissions_ListConnectionPermissions_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *ConnectionPermissions_ListConnectionPermissions_Call) RunAndReturn(run func(filters.Filter) map[*models.DataSourceListItemDTO]*models.DataSourcePermissionsDTO) *ConnectionPermissions_ListConnectionPermissions_Call { + _c.Call.Return(run) + return _c +} + // UploadConnectionPermissions provides a mock function with given fields: filter func (_m *ConnectionPermissions) UploadConnectionPermissions(filter filters.Filter) []string { ret := _m.Called(filter) @@ -78,6 +170,34 @@ func (_m *ConnectionPermissions) UploadConnectionPermissions(filter filters.Filt return r0 } +// ConnectionPermissions_UploadConnectionPermissions_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UploadConnectionPermissions' +type ConnectionPermissions_UploadConnectionPermissions_Call struct { + *mock.Call +} + +// UploadConnectionPermissions is a helper method to define mock.On call +// - filter filters.Filter +func (_e *ConnectionPermissions_Expecter) UploadConnectionPermissions(filter interface{}) *ConnectionPermissions_UploadConnectionPermissions_Call { + return &ConnectionPermissions_UploadConnectionPermissions_Call{Call: _e.mock.On("UploadConnectionPermissions", filter)} +} + +func (_c *ConnectionPermissions_UploadConnectionPermissions_Call) Run(run func(filter filters.Filter)) *ConnectionPermissions_UploadConnectionPermissions_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *ConnectionPermissions_UploadConnectionPermissions_Call) Return(_a0 []string) *ConnectionPermissions_UploadConnectionPermissions_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *ConnectionPermissions_UploadConnectionPermissions_Call) RunAndReturn(run func(filters.Filter) []string) *ConnectionPermissions_UploadConnectionPermissions_Call { + _c.Call.Return(run) + return _c +} + // NewConnectionPermissions creates a new instance of ConnectionPermissions. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewConnectionPermissions(t interface { diff --git a/internal/service/mocks/ConnectionsApi.go b/internal/service/mocks/ConnectionsApi.go index e1df4016..18e08cb6 100644 --- a/internal/service/mocks/ConnectionsApi.go +++ b/internal/service/mocks/ConnectionsApi.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.34.0. DO NOT EDIT. +// Code generated by mockery v2.36.0. DO NOT EDIT. package mocks @@ -6,7 +6,7 @@ import ( filters "github.com/esnet/gdg/internal/service/filters" mock "github.com/stretchr/testify/mock" - models "github.com/esnet/grafana-swagger-api-golang/goclient/models" + models "github.com/grafana/grafana-openapi-client-go/models" ) // ConnectionsApi is an autogenerated mock type for the ConnectionsApi type @@ -14,6 +14,14 @@ type ConnectionsApi struct { mock.Mock } +type ConnectionsApi_Expecter struct { + mock *mock.Mock +} + +func (_m *ConnectionsApi) EXPECT() *ConnectionsApi_Expecter { + return &ConnectionsApi_Expecter{mock: &_m.Mock} +} + // DeleteAllConnectionPermissions provides a mock function with given fields: filter func (_m *ConnectionsApi) DeleteAllConnectionPermissions(filter filters.Filter) []string { ret := _m.Called(filter) @@ -30,6 +38,34 @@ func (_m *ConnectionsApi) DeleteAllConnectionPermissions(filter filters.Filter) return r0 } +// ConnectionsApi_DeleteAllConnectionPermissions_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteAllConnectionPermissions' +type ConnectionsApi_DeleteAllConnectionPermissions_Call struct { + *mock.Call +} + +// DeleteAllConnectionPermissions is a helper method to define mock.On call +// - filter filters.Filter +func (_e *ConnectionsApi_Expecter) DeleteAllConnectionPermissions(filter interface{}) *ConnectionsApi_DeleteAllConnectionPermissions_Call { + return &ConnectionsApi_DeleteAllConnectionPermissions_Call{Call: _e.mock.On("DeleteAllConnectionPermissions", filter)} +} + +func (_c *ConnectionsApi_DeleteAllConnectionPermissions_Call) Run(run func(filter filters.Filter)) *ConnectionsApi_DeleteAllConnectionPermissions_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *ConnectionsApi_DeleteAllConnectionPermissions_Call) Return(_a0 []string) *ConnectionsApi_DeleteAllConnectionPermissions_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *ConnectionsApi_DeleteAllConnectionPermissions_Call) RunAndReturn(run func(filters.Filter) []string) *ConnectionsApi_DeleteAllConnectionPermissions_Call { + _c.Call.Return(run) + return _c +} + // DeleteAllConnections provides a mock function with given fields: filter func (_m *ConnectionsApi) DeleteAllConnections(filter filters.Filter) []string { ret := _m.Called(filter) @@ -46,6 +82,34 @@ func (_m *ConnectionsApi) DeleteAllConnections(filter filters.Filter) []string { return r0 } +// ConnectionsApi_DeleteAllConnections_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteAllConnections' +type ConnectionsApi_DeleteAllConnections_Call struct { + *mock.Call +} + +// DeleteAllConnections is a helper method to define mock.On call +// - filter filters.Filter +func (_e *ConnectionsApi_Expecter) DeleteAllConnections(filter interface{}) *ConnectionsApi_DeleteAllConnections_Call { + return &ConnectionsApi_DeleteAllConnections_Call{Call: _e.mock.On("DeleteAllConnections", filter)} +} + +func (_c *ConnectionsApi_DeleteAllConnections_Call) Run(run func(filter filters.Filter)) *ConnectionsApi_DeleteAllConnections_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *ConnectionsApi_DeleteAllConnections_Call) Return(_a0 []string) *ConnectionsApi_DeleteAllConnections_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *ConnectionsApi_DeleteAllConnections_Call) RunAndReturn(run func(filters.Filter) []string) *ConnectionsApi_DeleteAllConnections_Call { + _c.Call.Return(run) + return _c +} + // DownloadConnectionPermissions provides a mock function with given fields: filter func (_m *ConnectionsApi) DownloadConnectionPermissions(filter filters.Filter) []string { ret := _m.Called(filter) @@ -62,6 +126,34 @@ func (_m *ConnectionsApi) DownloadConnectionPermissions(filter filters.Filter) [ return r0 } +// ConnectionsApi_DownloadConnectionPermissions_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DownloadConnectionPermissions' +type ConnectionsApi_DownloadConnectionPermissions_Call struct { + *mock.Call +} + +// DownloadConnectionPermissions is a helper method to define mock.On call +// - filter filters.Filter +func (_e *ConnectionsApi_Expecter) DownloadConnectionPermissions(filter interface{}) *ConnectionsApi_DownloadConnectionPermissions_Call { + return &ConnectionsApi_DownloadConnectionPermissions_Call{Call: _e.mock.On("DownloadConnectionPermissions", filter)} +} + +func (_c *ConnectionsApi_DownloadConnectionPermissions_Call) Run(run func(filter filters.Filter)) *ConnectionsApi_DownloadConnectionPermissions_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *ConnectionsApi_DownloadConnectionPermissions_Call) Return(_a0 []string) *ConnectionsApi_DownloadConnectionPermissions_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *ConnectionsApi_DownloadConnectionPermissions_Call) RunAndReturn(run func(filters.Filter) []string) *ConnectionsApi_DownloadConnectionPermissions_Call { + _c.Call.Return(run) + return _c +} + // DownloadConnections provides a mock function with given fields: filter func (_m *ConnectionsApi) DownloadConnections(filter filters.Filter) []string { ret := _m.Called(filter) @@ -78,6 +170,34 @@ func (_m *ConnectionsApi) DownloadConnections(filter filters.Filter) []string { return r0 } +// ConnectionsApi_DownloadConnections_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DownloadConnections' +type ConnectionsApi_DownloadConnections_Call struct { + *mock.Call +} + +// DownloadConnections is a helper method to define mock.On call +// - filter filters.Filter +func (_e *ConnectionsApi_Expecter) DownloadConnections(filter interface{}) *ConnectionsApi_DownloadConnections_Call { + return &ConnectionsApi_DownloadConnections_Call{Call: _e.mock.On("DownloadConnections", filter)} +} + +func (_c *ConnectionsApi_DownloadConnections_Call) Run(run func(filter filters.Filter)) *ConnectionsApi_DownloadConnections_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *ConnectionsApi_DownloadConnections_Call) Return(_a0 []string) *ConnectionsApi_DownloadConnections_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *ConnectionsApi_DownloadConnections_Call) RunAndReturn(run func(filters.Filter) []string) *ConnectionsApi_DownloadConnections_Call { + _c.Call.Return(run) + return _c +} + // ListConnectionPermissions provides a mock function with given fields: filter func (_m *ConnectionsApi) ListConnectionPermissions(filter filters.Filter) map[*models.DataSourceListItemDTO]*models.DataSourcePermissionsDTO { ret := _m.Called(filter) @@ -94,6 +214,34 @@ func (_m *ConnectionsApi) ListConnectionPermissions(filter filters.Filter) map[* return r0 } +// ConnectionsApi_ListConnectionPermissions_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListConnectionPermissions' +type ConnectionsApi_ListConnectionPermissions_Call struct { + *mock.Call +} + +// ListConnectionPermissions is a helper method to define mock.On call +// - filter filters.Filter +func (_e *ConnectionsApi_Expecter) ListConnectionPermissions(filter interface{}) *ConnectionsApi_ListConnectionPermissions_Call { + return &ConnectionsApi_ListConnectionPermissions_Call{Call: _e.mock.On("ListConnectionPermissions", filter)} +} + +func (_c *ConnectionsApi_ListConnectionPermissions_Call) Run(run func(filter filters.Filter)) *ConnectionsApi_ListConnectionPermissions_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *ConnectionsApi_ListConnectionPermissions_Call) Return(_a0 map[*models.DataSourceListItemDTO]*models.DataSourcePermissionsDTO) *ConnectionsApi_ListConnectionPermissions_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *ConnectionsApi_ListConnectionPermissions_Call) RunAndReturn(run func(filters.Filter) map[*models.DataSourceListItemDTO]*models.DataSourcePermissionsDTO) *ConnectionsApi_ListConnectionPermissions_Call { + _c.Call.Return(run) + return _c +} + // ListConnections provides a mock function with given fields: filter func (_m *ConnectionsApi) ListConnections(filter filters.Filter) []models.DataSourceListItemDTO { ret := _m.Called(filter) @@ -110,6 +258,34 @@ func (_m *ConnectionsApi) ListConnections(filter filters.Filter) []models.DataSo return r0 } +// ConnectionsApi_ListConnections_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListConnections' +type ConnectionsApi_ListConnections_Call struct { + *mock.Call +} + +// ListConnections is a helper method to define mock.On call +// - filter filters.Filter +func (_e *ConnectionsApi_Expecter) ListConnections(filter interface{}) *ConnectionsApi_ListConnections_Call { + return &ConnectionsApi_ListConnections_Call{Call: _e.mock.On("ListConnections", filter)} +} + +func (_c *ConnectionsApi_ListConnections_Call) Run(run func(filter filters.Filter)) *ConnectionsApi_ListConnections_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *ConnectionsApi_ListConnections_Call) Return(_a0 []models.DataSourceListItemDTO) *ConnectionsApi_ListConnections_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *ConnectionsApi_ListConnections_Call) RunAndReturn(run func(filters.Filter) []models.DataSourceListItemDTO) *ConnectionsApi_ListConnections_Call { + _c.Call.Return(run) + return _c +} + // UploadConnectionPermissions provides a mock function with given fields: filter func (_m *ConnectionsApi) UploadConnectionPermissions(filter filters.Filter) []string { ret := _m.Called(filter) @@ -126,6 +302,34 @@ func (_m *ConnectionsApi) UploadConnectionPermissions(filter filters.Filter) []s return r0 } +// ConnectionsApi_UploadConnectionPermissions_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UploadConnectionPermissions' +type ConnectionsApi_UploadConnectionPermissions_Call struct { + *mock.Call +} + +// UploadConnectionPermissions is a helper method to define mock.On call +// - filter filters.Filter +func (_e *ConnectionsApi_Expecter) UploadConnectionPermissions(filter interface{}) *ConnectionsApi_UploadConnectionPermissions_Call { + return &ConnectionsApi_UploadConnectionPermissions_Call{Call: _e.mock.On("UploadConnectionPermissions", filter)} +} + +func (_c *ConnectionsApi_UploadConnectionPermissions_Call) Run(run func(filter filters.Filter)) *ConnectionsApi_UploadConnectionPermissions_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *ConnectionsApi_UploadConnectionPermissions_Call) Return(_a0 []string) *ConnectionsApi_UploadConnectionPermissions_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *ConnectionsApi_UploadConnectionPermissions_Call) RunAndReturn(run func(filters.Filter) []string) *ConnectionsApi_UploadConnectionPermissions_Call { + _c.Call.Return(run) + return _c +} + // UploadConnections provides a mock function with given fields: filter func (_m *ConnectionsApi) UploadConnections(filter filters.Filter) []string { ret := _m.Called(filter) @@ -142,6 +346,34 @@ func (_m *ConnectionsApi) UploadConnections(filter filters.Filter) []string { return r0 } +// ConnectionsApi_UploadConnections_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UploadConnections' +type ConnectionsApi_UploadConnections_Call struct { + *mock.Call +} + +// UploadConnections is a helper method to define mock.On call +// - filter filters.Filter +func (_e *ConnectionsApi_Expecter) UploadConnections(filter interface{}) *ConnectionsApi_UploadConnections_Call { + return &ConnectionsApi_UploadConnections_Call{Call: _e.mock.On("UploadConnections", filter)} +} + +func (_c *ConnectionsApi_UploadConnections_Call) Run(run func(filter filters.Filter)) *ConnectionsApi_UploadConnections_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *ConnectionsApi_UploadConnections_Call) Return(_a0 []string) *ConnectionsApi_UploadConnections_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *ConnectionsApi_UploadConnections_Call) RunAndReturn(run func(filters.Filter) []string) *ConnectionsApi_UploadConnections_Call { + _c.Call.Return(run) + return _c +} + // NewConnectionsApi creates a new instance of ConnectionsApi. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewConnectionsApi(t interface { diff --git a/internal/service/mocks/DashboardsApi.go b/internal/service/mocks/DashboardsApi.go index 5251e2c1..57b44ed2 100644 --- a/internal/service/mocks/DashboardsApi.go +++ b/internal/service/mocks/DashboardsApi.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.34.0. DO NOT EDIT. +// Code generated by mockery v2.36.0. DO NOT EDIT. package mocks @@ -6,7 +6,7 @@ import ( filters "github.com/esnet/gdg/internal/service/filters" mock "github.com/stretchr/testify/mock" - models "github.com/esnet/grafana-swagger-api-golang/goclient/models" + models "github.com/grafana/grafana-openapi-client-go/models" ) // DashboardsApi is an autogenerated mock type for the DashboardsApi type @@ -14,6 +14,14 @@ type DashboardsApi struct { mock.Mock } +type DashboardsApi_Expecter struct { + mock *mock.Mock +} + +func (_m *DashboardsApi) EXPECT() *DashboardsApi_Expecter { + return &DashboardsApi_Expecter{mock: &_m.Mock} +} + // DeleteAllDashboards provides a mock function with given fields: filter func (_m *DashboardsApi) DeleteAllDashboards(filter filters.Filter) []string { ret := _m.Called(filter) @@ -30,6 +38,34 @@ func (_m *DashboardsApi) DeleteAllDashboards(filter filters.Filter) []string { return r0 } +// DashboardsApi_DeleteAllDashboards_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteAllDashboards' +type DashboardsApi_DeleteAllDashboards_Call struct { + *mock.Call +} + +// DeleteAllDashboards is a helper method to define mock.On call +// - filter filters.Filter +func (_e *DashboardsApi_Expecter) DeleteAllDashboards(filter interface{}) *DashboardsApi_DeleteAllDashboards_Call { + return &DashboardsApi_DeleteAllDashboards_Call{Call: _e.mock.On("DeleteAllDashboards", filter)} +} + +func (_c *DashboardsApi_DeleteAllDashboards_Call) Run(run func(filter filters.Filter)) *DashboardsApi_DeleteAllDashboards_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *DashboardsApi_DeleteAllDashboards_Call) Return(_a0 []string) *DashboardsApi_DeleteAllDashboards_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *DashboardsApi_DeleteAllDashboards_Call) RunAndReturn(run func(filters.Filter) []string) *DashboardsApi_DeleteAllDashboards_Call { + _c.Call.Return(run) + return _c +} + // DownloadDashboards provides a mock function with given fields: filter func (_m *DashboardsApi) DownloadDashboards(filter filters.Filter) []string { ret := _m.Called(filter) @@ -46,6 +82,34 @@ func (_m *DashboardsApi) DownloadDashboards(filter filters.Filter) []string { return r0 } +// DashboardsApi_DownloadDashboards_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DownloadDashboards' +type DashboardsApi_DownloadDashboards_Call struct { + *mock.Call +} + +// DownloadDashboards is a helper method to define mock.On call +// - filter filters.Filter +func (_e *DashboardsApi_Expecter) DownloadDashboards(filter interface{}) *DashboardsApi_DownloadDashboards_Call { + return &DashboardsApi_DownloadDashboards_Call{Call: _e.mock.On("DownloadDashboards", filter)} +} + +func (_c *DashboardsApi_DownloadDashboards_Call) Run(run func(filter filters.Filter)) *DashboardsApi_DownloadDashboards_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *DashboardsApi_DownloadDashboards_Call) Return(_a0 []string) *DashboardsApi_DownloadDashboards_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *DashboardsApi_DownloadDashboards_Call) RunAndReturn(run func(filters.Filter) []string) *DashboardsApi_DownloadDashboards_Call { + _c.Call.Return(run) + return _c +} + // ListDashboards provides a mock function with given fields: filter func (_m *DashboardsApi) ListDashboards(filter filters.Filter) []*models.Hit { ret := _m.Called(filter) @@ -62,11 +126,67 @@ func (_m *DashboardsApi) ListDashboards(filter filters.Filter) []*models.Hit { return r0 } +// DashboardsApi_ListDashboards_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListDashboards' +type DashboardsApi_ListDashboards_Call struct { + *mock.Call +} + +// ListDashboards is a helper method to define mock.On call +// - filter filters.Filter +func (_e *DashboardsApi_Expecter) ListDashboards(filter interface{}) *DashboardsApi_ListDashboards_Call { + return &DashboardsApi_ListDashboards_Call{Call: _e.mock.On("ListDashboards", filter)} +} + +func (_c *DashboardsApi_ListDashboards_Call) Run(run func(filter filters.Filter)) *DashboardsApi_ListDashboards_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *DashboardsApi_ListDashboards_Call) Return(_a0 []*models.Hit) *DashboardsApi_ListDashboards_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *DashboardsApi_ListDashboards_Call) RunAndReturn(run func(filters.Filter) []*models.Hit) *DashboardsApi_ListDashboards_Call { + _c.Call.Return(run) + return _c +} + // UploadDashboards provides a mock function with given fields: filter func (_m *DashboardsApi) UploadDashboards(filter filters.Filter) { _m.Called(filter) } +// DashboardsApi_UploadDashboards_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UploadDashboards' +type DashboardsApi_UploadDashboards_Call struct { + *mock.Call +} + +// UploadDashboards is a helper method to define mock.On call +// - filter filters.Filter +func (_e *DashboardsApi_Expecter) UploadDashboards(filter interface{}) *DashboardsApi_UploadDashboards_Call { + return &DashboardsApi_UploadDashboards_Call{Call: _e.mock.On("UploadDashboards", filter)} +} + +func (_c *DashboardsApi_UploadDashboards_Call) Run(run func(filter filters.Filter)) *DashboardsApi_UploadDashboards_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *DashboardsApi_UploadDashboards_Call) Return() *DashboardsApi_UploadDashboards_Call { + _c.Call.Return() + return _c +} + +func (_c *DashboardsApi_UploadDashboards_Call) RunAndReturn(run func(filters.Filter)) *DashboardsApi_UploadDashboards_Call { + _c.Call.Return(run) + return _c +} + // NewDashboardsApi creates a new instance of DashboardsApi. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewDashboardsApi(t interface { diff --git a/internal/service/mocks/FoldersApi.go b/internal/service/mocks/FoldersApi.go index d2707d8d..16f70c2f 100644 --- a/internal/service/mocks/FoldersApi.go +++ b/internal/service/mocks/FoldersApi.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.34.0. DO NOT EDIT. +// Code generated by mockery v2.36.0. DO NOT EDIT. package mocks @@ -6,7 +6,7 @@ import ( filters "github.com/esnet/gdg/internal/service/filters" mock "github.com/stretchr/testify/mock" - models "github.com/esnet/grafana-swagger-api-golang/goclient/models" + models "github.com/grafana/grafana-openapi-client-go/models" ) // FoldersApi is an autogenerated mock type for the FoldersApi type @@ -14,6 +14,14 @@ type FoldersApi struct { mock.Mock } +type FoldersApi_Expecter struct { + mock *mock.Mock +} + +func (_m *FoldersApi) EXPECT() *FoldersApi_Expecter { + return &FoldersApi_Expecter{mock: &_m.Mock} +} + // DeleteAllFolders provides a mock function with given fields: filter func (_m *FoldersApi) DeleteAllFolders(filter filters.Filter) []string { ret := _m.Called(filter) @@ -30,6 +38,34 @@ func (_m *FoldersApi) DeleteAllFolders(filter filters.Filter) []string { return r0 } +// FoldersApi_DeleteAllFolders_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteAllFolders' +type FoldersApi_DeleteAllFolders_Call struct { + *mock.Call +} + +// DeleteAllFolders is a helper method to define mock.On call +// - filter filters.Filter +func (_e *FoldersApi_Expecter) DeleteAllFolders(filter interface{}) *FoldersApi_DeleteAllFolders_Call { + return &FoldersApi_DeleteAllFolders_Call{Call: _e.mock.On("DeleteAllFolders", filter)} +} + +func (_c *FoldersApi_DeleteAllFolders_Call) Run(run func(filter filters.Filter)) *FoldersApi_DeleteAllFolders_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *FoldersApi_DeleteAllFolders_Call) Return(_a0 []string) *FoldersApi_DeleteAllFolders_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *FoldersApi_DeleteAllFolders_Call) RunAndReturn(run func(filters.Filter) []string) *FoldersApi_DeleteAllFolders_Call { + _c.Call.Return(run) + return _c +} + // DownloadFolderPermissions provides a mock function with given fields: filter func (_m *FoldersApi) DownloadFolderPermissions(filter filters.Filter) []string { ret := _m.Called(filter) @@ -46,6 +82,34 @@ func (_m *FoldersApi) DownloadFolderPermissions(filter filters.Filter) []string return r0 } +// FoldersApi_DownloadFolderPermissions_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DownloadFolderPermissions' +type FoldersApi_DownloadFolderPermissions_Call struct { + *mock.Call +} + +// DownloadFolderPermissions is a helper method to define mock.On call +// - filter filters.Filter +func (_e *FoldersApi_Expecter) DownloadFolderPermissions(filter interface{}) *FoldersApi_DownloadFolderPermissions_Call { + return &FoldersApi_DownloadFolderPermissions_Call{Call: _e.mock.On("DownloadFolderPermissions", filter)} +} + +func (_c *FoldersApi_DownloadFolderPermissions_Call) Run(run func(filter filters.Filter)) *FoldersApi_DownloadFolderPermissions_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *FoldersApi_DownloadFolderPermissions_Call) Return(_a0 []string) *FoldersApi_DownloadFolderPermissions_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *FoldersApi_DownloadFolderPermissions_Call) RunAndReturn(run func(filters.Filter) []string) *FoldersApi_DownloadFolderPermissions_Call { + _c.Call.Return(run) + return _c +} + // DownloadFolders provides a mock function with given fields: filter func (_m *FoldersApi) DownloadFolders(filter filters.Filter) []string { ret := _m.Called(filter) @@ -62,6 +126,34 @@ func (_m *FoldersApi) DownloadFolders(filter filters.Filter) []string { return r0 } +// FoldersApi_DownloadFolders_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DownloadFolders' +type FoldersApi_DownloadFolders_Call struct { + *mock.Call +} + +// DownloadFolders is a helper method to define mock.On call +// - filter filters.Filter +func (_e *FoldersApi_Expecter) DownloadFolders(filter interface{}) *FoldersApi_DownloadFolders_Call { + return &FoldersApi_DownloadFolders_Call{Call: _e.mock.On("DownloadFolders", filter)} +} + +func (_c *FoldersApi_DownloadFolders_Call) Run(run func(filter filters.Filter)) *FoldersApi_DownloadFolders_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *FoldersApi_DownloadFolders_Call) Return(_a0 []string) *FoldersApi_DownloadFolders_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *FoldersApi_DownloadFolders_Call) RunAndReturn(run func(filters.Filter) []string) *FoldersApi_DownloadFolders_Call { + _c.Call.Return(run) + return _c +} + // ListFolder provides a mock function with given fields: filter func (_m *FoldersApi) ListFolder(filter filters.Filter) []*models.Hit { ret := _m.Called(filter) @@ -78,6 +170,34 @@ func (_m *FoldersApi) ListFolder(filter filters.Filter) []*models.Hit { return r0 } +// FoldersApi_ListFolder_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListFolder' +type FoldersApi_ListFolder_Call struct { + *mock.Call +} + +// ListFolder is a helper method to define mock.On call +// - filter filters.Filter +func (_e *FoldersApi_Expecter) ListFolder(filter interface{}) *FoldersApi_ListFolder_Call { + return &FoldersApi_ListFolder_Call{Call: _e.mock.On("ListFolder", filter)} +} + +func (_c *FoldersApi_ListFolder_Call) Run(run func(filter filters.Filter)) *FoldersApi_ListFolder_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *FoldersApi_ListFolder_Call) Return(_a0 []*models.Hit) *FoldersApi_ListFolder_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *FoldersApi_ListFolder_Call) RunAndReturn(run func(filters.Filter) []*models.Hit) *FoldersApi_ListFolder_Call { + _c.Call.Return(run) + return _c +} + // ListFolderPermissions provides a mock function with given fields: filter func (_m *FoldersApi) ListFolderPermissions(filter filters.Filter) map[*models.Hit][]*models.DashboardACLInfoDTO { ret := _m.Called(filter) @@ -94,6 +214,34 @@ func (_m *FoldersApi) ListFolderPermissions(filter filters.Filter) map[*models.H return r0 } +// FoldersApi_ListFolderPermissions_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListFolderPermissions' +type FoldersApi_ListFolderPermissions_Call struct { + *mock.Call +} + +// ListFolderPermissions is a helper method to define mock.On call +// - filter filters.Filter +func (_e *FoldersApi_Expecter) ListFolderPermissions(filter interface{}) *FoldersApi_ListFolderPermissions_Call { + return &FoldersApi_ListFolderPermissions_Call{Call: _e.mock.On("ListFolderPermissions", filter)} +} + +func (_c *FoldersApi_ListFolderPermissions_Call) Run(run func(filter filters.Filter)) *FoldersApi_ListFolderPermissions_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *FoldersApi_ListFolderPermissions_Call) Return(_a0 map[*models.Hit][]*models.DashboardACLInfoDTO) *FoldersApi_ListFolderPermissions_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *FoldersApi_ListFolderPermissions_Call) RunAndReturn(run func(filters.Filter) map[*models.Hit][]*models.DashboardACLInfoDTO) *FoldersApi_ListFolderPermissions_Call { + _c.Call.Return(run) + return _c +} + // UploadFolderPermissions provides a mock function with given fields: filter func (_m *FoldersApi) UploadFolderPermissions(filter filters.Filter) []string { ret := _m.Called(filter) @@ -110,6 +258,34 @@ func (_m *FoldersApi) UploadFolderPermissions(filter filters.Filter) []string { return r0 } +// FoldersApi_UploadFolderPermissions_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UploadFolderPermissions' +type FoldersApi_UploadFolderPermissions_Call struct { + *mock.Call +} + +// UploadFolderPermissions is a helper method to define mock.On call +// - filter filters.Filter +func (_e *FoldersApi_Expecter) UploadFolderPermissions(filter interface{}) *FoldersApi_UploadFolderPermissions_Call { + return &FoldersApi_UploadFolderPermissions_Call{Call: _e.mock.On("UploadFolderPermissions", filter)} +} + +func (_c *FoldersApi_UploadFolderPermissions_Call) Run(run func(filter filters.Filter)) *FoldersApi_UploadFolderPermissions_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *FoldersApi_UploadFolderPermissions_Call) Return(_a0 []string) *FoldersApi_UploadFolderPermissions_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *FoldersApi_UploadFolderPermissions_Call) RunAndReturn(run func(filters.Filter) []string) *FoldersApi_UploadFolderPermissions_Call { + _c.Call.Return(run) + return _c +} + // UploadFolders provides a mock function with given fields: filter func (_m *FoldersApi) UploadFolders(filter filters.Filter) []string { ret := _m.Called(filter) @@ -126,6 +302,34 @@ func (_m *FoldersApi) UploadFolders(filter filters.Filter) []string { return r0 } +// FoldersApi_UploadFolders_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UploadFolders' +type FoldersApi_UploadFolders_Call struct { + *mock.Call +} + +// UploadFolders is a helper method to define mock.On call +// - filter filters.Filter +func (_e *FoldersApi_Expecter) UploadFolders(filter interface{}) *FoldersApi_UploadFolders_Call { + return &FoldersApi_UploadFolders_Call{Call: _e.mock.On("UploadFolders", filter)} +} + +func (_c *FoldersApi_UploadFolders_Call) Run(run func(filter filters.Filter)) *FoldersApi_UploadFolders_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *FoldersApi_UploadFolders_Call) Return(_a0 []string) *FoldersApi_UploadFolders_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *FoldersApi_UploadFolders_Call) RunAndReturn(run func(filters.Filter) []string) *FoldersApi_UploadFolders_Call { + _c.Call.Return(run) + return _c +} + // NewFoldersApi creates a new instance of FoldersApi. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewFoldersApi(t interface { diff --git a/internal/service/mocks/GrafanaService.go b/internal/service/mocks/GrafanaService.go index 1c9c22fc..552c46b0 100644 --- a/internal/service/mocks/GrafanaService.go +++ b/internal/service/mocks/GrafanaService.go @@ -1,13 +1,14 @@ -// Code generated by mockery v2.34.0. DO NOT EDIT. +// Code generated by mockery v2.36.0. DO NOT EDIT. package mocks import ( - api "github.com/esnet/gdg/internal/api" filters "github.com/esnet/gdg/internal/service/filters" mock "github.com/stretchr/testify/mock" - models "github.com/esnet/grafana-swagger-api-golang/goclient/models" + models "github.com/grafana/grafana-openapi-client-go/models" + + types "github.com/esnet/gdg/internal/types" ) // GrafanaService is an autogenerated mock type for the GrafanaService type @@ -15,6 +16,14 @@ type GrafanaService struct { mock.Mock } +type GrafanaService_Expecter struct { + mock *mock.Mock +} + +func (_m *GrafanaService) EXPECT() *GrafanaService_Expecter { + return &GrafanaService_Expecter{mock: &_m.Mock} +} + // AddUserToOrg provides a mock function with given fields: role, userId, orgId func (_m *GrafanaService) AddUserToOrg(role string, userId int64, orgId int64) error { ret := _m.Called(role, userId, orgId) @@ -29,6 +38,36 @@ func (_m *GrafanaService) AddUserToOrg(role string, userId int64, orgId int64) e return r0 } +// GrafanaService_AddUserToOrg_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'AddUserToOrg' +type GrafanaService_AddUserToOrg_Call struct { + *mock.Call +} + +// AddUserToOrg is a helper method to define mock.On call +// - role string +// - userId int64 +// - orgId int64 +func (_e *GrafanaService_Expecter) AddUserToOrg(role interface{}, userId interface{}, orgId interface{}) *GrafanaService_AddUserToOrg_Call { + return &GrafanaService_AddUserToOrg_Call{Call: _e.mock.On("AddUserToOrg", role, userId, orgId)} +} + +func (_c *GrafanaService_AddUserToOrg_Call) Run(run func(role string, userId int64, orgId int64)) *GrafanaService_AddUserToOrg_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(string), args[1].(int64), args[2].(int64)) + }) + return _c +} + +func (_c *GrafanaService_AddUserToOrg_Call) Return(_a0 error) *GrafanaService_AddUserToOrg_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *GrafanaService_AddUserToOrg_Call) RunAndReturn(run func(string, int64, int64) error) *GrafanaService_AddUserToOrg_Call { + _c.Call.Return(run) + return _c +} + // CreateAPIKey provides a mock function with given fields: name, role, expiration func (_m *GrafanaService) CreateAPIKey(name string, role string, expiration int64) (*models.NewAPIKeyResult, error) { ret := _m.Called(name, role, expiration) @@ -55,6 +94,36 @@ func (_m *GrafanaService) CreateAPIKey(name string, role string, expiration int6 return r0, r1 } +// GrafanaService_CreateAPIKey_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateAPIKey' +type GrafanaService_CreateAPIKey_Call struct { + *mock.Call +} + +// CreateAPIKey is a helper method to define mock.On call +// - name string +// - role string +// - expiration int64 +func (_e *GrafanaService_Expecter) CreateAPIKey(name interface{}, role interface{}, expiration interface{}) *GrafanaService_CreateAPIKey_Call { + return &GrafanaService_CreateAPIKey_Call{Call: _e.mock.On("CreateAPIKey", name, role, expiration)} +} + +func (_c *GrafanaService_CreateAPIKey_Call) Run(run func(name string, role string, expiration int64)) *GrafanaService_CreateAPIKey_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(string), args[1].(string), args[2].(int64)) + }) + return _c +} + +func (_c *GrafanaService_CreateAPIKey_Call) Return(_a0 *models.NewAPIKeyResult, _a1 error) *GrafanaService_CreateAPIKey_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *GrafanaService_CreateAPIKey_Call) RunAndReturn(run func(string, string, int64) (*models.NewAPIKeyResult, error)) *GrafanaService_CreateAPIKey_Call { + _c.Call.Return(run) + return _c +} + // CreateServiceAccount provides a mock function with given fields: name, role, expiration func (_m *GrafanaService) CreateServiceAccount(name string, role string, expiration int64) (*models.ServiceAccountDTO, error) { ret := _m.Called(name, role, expiration) @@ -81,6 +150,36 @@ func (_m *GrafanaService) CreateServiceAccount(name string, role string, expirat return r0, r1 } +// GrafanaService_CreateServiceAccount_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateServiceAccount' +type GrafanaService_CreateServiceAccount_Call struct { + *mock.Call +} + +// CreateServiceAccount is a helper method to define mock.On call +// - name string +// - role string +// - expiration int64 +func (_e *GrafanaService_Expecter) CreateServiceAccount(name interface{}, role interface{}, expiration interface{}) *GrafanaService_CreateServiceAccount_Call { + return &GrafanaService_CreateServiceAccount_Call{Call: _e.mock.On("CreateServiceAccount", name, role, expiration)} +} + +func (_c *GrafanaService_CreateServiceAccount_Call) Run(run func(name string, role string, expiration int64)) *GrafanaService_CreateServiceAccount_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(string), args[1].(string), args[2].(int64)) + }) + return _c +} + +func (_c *GrafanaService_CreateServiceAccount_Call) Return(_a0 *models.ServiceAccountDTO, _a1 error) *GrafanaService_CreateServiceAccount_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *GrafanaService_CreateServiceAccount_Call) RunAndReturn(run func(string, string, int64) (*models.ServiceAccountDTO, error)) *GrafanaService_CreateServiceAccount_Call { + _c.Call.Return(run) + return _c +} + // CreateServiceAccountToken provides a mock function with given fields: name, role, expiration func (_m *GrafanaService) CreateServiceAccountToken(name int64, role string, expiration int64) (*models.NewAPIKeyResult, error) { ret := _m.Called(name, role, expiration) @@ -107,6 +206,36 @@ func (_m *GrafanaService) CreateServiceAccountToken(name int64, role string, exp return r0, r1 } +// GrafanaService_CreateServiceAccountToken_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateServiceAccountToken' +type GrafanaService_CreateServiceAccountToken_Call struct { + *mock.Call +} + +// CreateServiceAccountToken is a helper method to define mock.On call +// - name int64 +// - role string +// - expiration int64 +func (_e *GrafanaService_Expecter) CreateServiceAccountToken(name interface{}, role interface{}, expiration interface{}) *GrafanaService_CreateServiceAccountToken_Call { + return &GrafanaService_CreateServiceAccountToken_Call{Call: _e.mock.On("CreateServiceAccountToken", name, role, expiration)} +} + +func (_c *GrafanaService_CreateServiceAccountToken_Call) Run(run func(name int64, role string, expiration int64)) *GrafanaService_CreateServiceAccountToken_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(int64), args[1].(string), args[2].(int64)) + }) + return _c +} + +func (_c *GrafanaService_CreateServiceAccountToken_Call) Return(_a0 *models.NewAPIKeyResult, _a1 error) *GrafanaService_CreateServiceAccountToken_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *GrafanaService_CreateServiceAccountToken_Call) RunAndReturn(run func(int64, string, int64) (*models.NewAPIKeyResult, error)) *GrafanaService_CreateServiceAccountToken_Call { + _c.Call.Return(run) + return _c +} + // DeleteAllAlertNotifications provides a mock function with given fields: func (_m *GrafanaService) DeleteAllAlertNotifications() []string { ret := _m.Called() @@ -123,6 +252,33 @@ func (_m *GrafanaService) DeleteAllAlertNotifications() []string { return r0 } +// GrafanaService_DeleteAllAlertNotifications_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteAllAlertNotifications' +type GrafanaService_DeleteAllAlertNotifications_Call struct { + *mock.Call +} + +// DeleteAllAlertNotifications is a helper method to define mock.On call +func (_e *GrafanaService_Expecter) DeleteAllAlertNotifications() *GrafanaService_DeleteAllAlertNotifications_Call { + return &GrafanaService_DeleteAllAlertNotifications_Call{Call: _e.mock.On("DeleteAllAlertNotifications")} +} + +func (_c *GrafanaService_DeleteAllAlertNotifications_Call) Run(run func()) *GrafanaService_DeleteAllAlertNotifications_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *GrafanaService_DeleteAllAlertNotifications_Call) Return(_a0 []string) *GrafanaService_DeleteAllAlertNotifications_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *GrafanaService_DeleteAllAlertNotifications_Call) RunAndReturn(run func() []string) *GrafanaService_DeleteAllAlertNotifications_Call { + _c.Call.Return(run) + return _c +} + // DeleteAllConnectionPermissions provides a mock function with given fields: filter func (_m *GrafanaService) DeleteAllConnectionPermissions(filter filters.Filter) []string { ret := _m.Called(filter) @@ -139,6 +295,34 @@ func (_m *GrafanaService) DeleteAllConnectionPermissions(filter filters.Filter) return r0 } +// GrafanaService_DeleteAllConnectionPermissions_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteAllConnectionPermissions' +type GrafanaService_DeleteAllConnectionPermissions_Call struct { + *mock.Call +} + +// DeleteAllConnectionPermissions is a helper method to define mock.On call +// - filter filters.Filter +func (_e *GrafanaService_Expecter) DeleteAllConnectionPermissions(filter interface{}) *GrafanaService_DeleteAllConnectionPermissions_Call { + return &GrafanaService_DeleteAllConnectionPermissions_Call{Call: _e.mock.On("DeleteAllConnectionPermissions", filter)} +} + +func (_c *GrafanaService_DeleteAllConnectionPermissions_Call) Run(run func(filter filters.Filter)) *GrafanaService_DeleteAllConnectionPermissions_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *GrafanaService_DeleteAllConnectionPermissions_Call) Return(_a0 []string) *GrafanaService_DeleteAllConnectionPermissions_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *GrafanaService_DeleteAllConnectionPermissions_Call) RunAndReturn(run func(filters.Filter) []string) *GrafanaService_DeleteAllConnectionPermissions_Call { + _c.Call.Return(run) + return _c +} + // DeleteAllConnections provides a mock function with given fields: filter func (_m *GrafanaService) DeleteAllConnections(filter filters.Filter) []string { ret := _m.Called(filter) @@ -155,6 +339,34 @@ func (_m *GrafanaService) DeleteAllConnections(filter filters.Filter) []string { return r0 } +// GrafanaService_DeleteAllConnections_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteAllConnections' +type GrafanaService_DeleteAllConnections_Call struct { + *mock.Call +} + +// DeleteAllConnections is a helper method to define mock.On call +// - filter filters.Filter +func (_e *GrafanaService_Expecter) DeleteAllConnections(filter interface{}) *GrafanaService_DeleteAllConnections_Call { + return &GrafanaService_DeleteAllConnections_Call{Call: _e.mock.On("DeleteAllConnections", filter)} +} + +func (_c *GrafanaService_DeleteAllConnections_Call) Run(run func(filter filters.Filter)) *GrafanaService_DeleteAllConnections_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *GrafanaService_DeleteAllConnections_Call) Return(_a0 []string) *GrafanaService_DeleteAllConnections_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *GrafanaService_DeleteAllConnections_Call) RunAndReturn(run func(filters.Filter) []string) *GrafanaService_DeleteAllConnections_Call { + _c.Call.Return(run) + return _c +} + // DeleteAllDashboards provides a mock function with given fields: filter func (_m *GrafanaService) DeleteAllDashboards(filter filters.Filter) []string { ret := _m.Called(filter) @@ -171,6 +383,34 @@ func (_m *GrafanaService) DeleteAllDashboards(filter filters.Filter) []string { return r0 } +// GrafanaService_DeleteAllDashboards_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteAllDashboards' +type GrafanaService_DeleteAllDashboards_Call struct { + *mock.Call +} + +// DeleteAllDashboards is a helper method to define mock.On call +// - filter filters.Filter +func (_e *GrafanaService_Expecter) DeleteAllDashboards(filter interface{}) *GrafanaService_DeleteAllDashboards_Call { + return &GrafanaService_DeleteAllDashboards_Call{Call: _e.mock.On("DeleteAllDashboards", filter)} +} + +func (_c *GrafanaService_DeleteAllDashboards_Call) Run(run func(filter filters.Filter)) *GrafanaService_DeleteAllDashboards_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *GrafanaService_DeleteAllDashboards_Call) Return(_a0 []string) *GrafanaService_DeleteAllDashboards_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *GrafanaService_DeleteAllDashboards_Call) RunAndReturn(run func(filters.Filter) []string) *GrafanaService_DeleteAllDashboards_Call { + _c.Call.Return(run) + return _c +} + // DeleteAllFolders provides a mock function with given fields: filter func (_m *GrafanaService) DeleteAllFolders(filter filters.Filter) []string { ret := _m.Called(filter) @@ -187,6 +427,34 @@ func (_m *GrafanaService) DeleteAllFolders(filter filters.Filter) []string { return r0 } +// GrafanaService_DeleteAllFolders_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteAllFolders' +type GrafanaService_DeleteAllFolders_Call struct { + *mock.Call +} + +// DeleteAllFolders is a helper method to define mock.On call +// - filter filters.Filter +func (_e *GrafanaService_Expecter) DeleteAllFolders(filter interface{}) *GrafanaService_DeleteAllFolders_Call { + return &GrafanaService_DeleteAllFolders_Call{Call: _e.mock.On("DeleteAllFolders", filter)} +} + +func (_c *GrafanaService_DeleteAllFolders_Call) Run(run func(filter filters.Filter)) *GrafanaService_DeleteAllFolders_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *GrafanaService_DeleteAllFolders_Call) Return(_a0 []string) *GrafanaService_DeleteAllFolders_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *GrafanaService_DeleteAllFolders_Call) RunAndReturn(run func(filters.Filter) []string) *GrafanaService_DeleteAllFolders_Call { + _c.Call.Return(run) + return _c +} + // DeleteAllLibraryElements provides a mock function with given fields: filter func (_m *GrafanaService) DeleteAllLibraryElements(filter filters.Filter) []string { ret := _m.Called(filter) @@ -203,6 +471,34 @@ func (_m *GrafanaService) DeleteAllLibraryElements(filter filters.Filter) []stri return r0 } +// GrafanaService_DeleteAllLibraryElements_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteAllLibraryElements' +type GrafanaService_DeleteAllLibraryElements_Call struct { + *mock.Call +} + +// DeleteAllLibraryElements is a helper method to define mock.On call +// - filter filters.Filter +func (_e *GrafanaService_Expecter) DeleteAllLibraryElements(filter interface{}) *GrafanaService_DeleteAllLibraryElements_Call { + return &GrafanaService_DeleteAllLibraryElements_Call{Call: _e.mock.On("DeleteAllLibraryElements", filter)} +} + +func (_c *GrafanaService_DeleteAllLibraryElements_Call) Run(run func(filter filters.Filter)) *GrafanaService_DeleteAllLibraryElements_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *GrafanaService_DeleteAllLibraryElements_Call) Return(_a0 []string) *GrafanaService_DeleteAllLibraryElements_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *GrafanaService_DeleteAllLibraryElements_Call) RunAndReturn(run func(filters.Filter) []string) *GrafanaService_DeleteAllLibraryElements_Call { + _c.Call.Return(run) + return _c +} + // DeleteAllServiceAccounts provides a mock function with given fields: func (_m *GrafanaService) DeleteAllServiceAccounts() []string { ret := _m.Called() @@ -219,6 +515,33 @@ func (_m *GrafanaService) DeleteAllServiceAccounts() []string { return r0 } +// GrafanaService_DeleteAllServiceAccounts_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteAllServiceAccounts' +type GrafanaService_DeleteAllServiceAccounts_Call struct { + *mock.Call +} + +// DeleteAllServiceAccounts is a helper method to define mock.On call +func (_e *GrafanaService_Expecter) DeleteAllServiceAccounts() *GrafanaService_DeleteAllServiceAccounts_Call { + return &GrafanaService_DeleteAllServiceAccounts_Call{Call: _e.mock.On("DeleteAllServiceAccounts")} +} + +func (_c *GrafanaService_DeleteAllServiceAccounts_Call) Run(run func()) *GrafanaService_DeleteAllServiceAccounts_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *GrafanaService_DeleteAllServiceAccounts_Call) Return(_a0 []string) *GrafanaService_DeleteAllServiceAccounts_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *GrafanaService_DeleteAllServiceAccounts_Call) RunAndReturn(run func() []string) *GrafanaService_DeleteAllServiceAccounts_Call { + _c.Call.Return(run) + return _c +} + // DeleteAllTokens provides a mock function with given fields: func (_m *GrafanaService) DeleteAllTokens() []string { ret := _m.Called() @@ -235,6 +558,33 @@ func (_m *GrafanaService) DeleteAllTokens() []string { return r0 } +// GrafanaService_DeleteAllTokens_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteAllTokens' +type GrafanaService_DeleteAllTokens_Call struct { + *mock.Call +} + +// DeleteAllTokens is a helper method to define mock.On call +func (_e *GrafanaService_Expecter) DeleteAllTokens() *GrafanaService_DeleteAllTokens_Call { + return &GrafanaService_DeleteAllTokens_Call{Call: _e.mock.On("DeleteAllTokens")} +} + +func (_c *GrafanaService_DeleteAllTokens_Call) Run(run func()) *GrafanaService_DeleteAllTokens_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *GrafanaService_DeleteAllTokens_Call) Return(_a0 []string) *GrafanaService_DeleteAllTokens_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *GrafanaService_DeleteAllTokens_Call) RunAndReturn(run func() []string) *GrafanaService_DeleteAllTokens_Call { + _c.Call.Return(run) + return _c +} + // DeleteAllUsers provides a mock function with given fields: filter func (_m *GrafanaService) DeleteAllUsers(filter filters.Filter) []string { ret := _m.Called(filter) @@ -251,6 +601,34 @@ func (_m *GrafanaService) DeleteAllUsers(filter filters.Filter) []string { return r0 } +// GrafanaService_DeleteAllUsers_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteAllUsers' +type GrafanaService_DeleteAllUsers_Call struct { + *mock.Call +} + +// DeleteAllUsers is a helper method to define mock.On call +// - filter filters.Filter +func (_e *GrafanaService_Expecter) DeleteAllUsers(filter interface{}) *GrafanaService_DeleteAllUsers_Call { + return &GrafanaService_DeleteAllUsers_Call{Call: _e.mock.On("DeleteAllUsers", filter)} +} + +func (_c *GrafanaService_DeleteAllUsers_Call) Run(run func(filter filters.Filter)) *GrafanaService_DeleteAllUsers_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *GrafanaService_DeleteAllUsers_Call) Return(_a0 []string) *GrafanaService_DeleteAllUsers_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *GrafanaService_DeleteAllUsers_Call) RunAndReturn(run func(filters.Filter) []string) *GrafanaService_DeleteAllUsers_Call { + _c.Call.Return(run) + return _c +} + // DeleteServiceAccountTokens provides a mock function with given fields: serviceId func (_m *GrafanaService) DeleteServiceAccountTokens(serviceId int64) []string { ret := _m.Called(serviceId) @@ -267,6 +645,34 @@ func (_m *GrafanaService) DeleteServiceAccountTokens(serviceId int64) []string { return r0 } +// GrafanaService_DeleteServiceAccountTokens_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteServiceAccountTokens' +type GrafanaService_DeleteServiceAccountTokens_Call struct { + *mock.Call +} + +// DeleteServiceAccountTokens is a helper method to define mock.On call +// - serviceId int64 +func (_e *GrafanaService_Expecter) DeleteServiceAccountTokens(serviceId interface{}) *GrafanaService_DeleteServiceAccountTokens_Call { + return &GrafanaService_DeleteServiceAccountTokens_Call{Call: _e.mock.On("DeleteServiceAccountTokens", serviceId)} +} + +func (_c *GrafanaService_DeleteServiceAccountTokens_Call) Run(run func(serviceId int64)) *GrafanaService_DeleteServiceAccountTokens_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(int64)) + }) + return _c +} + +func (_c *GrafanaService_DeleteServiceAccountTokens_Call) Return(_a0 []string) *GrafanaService_DeleteServiceAccountTokens_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *GrafanaService_DeleteServiceAccountTokens_Call) RunAndReturn(run func(int64) []string) *GrafanaService_DeleteServiceAccountTokens_Call { + _c.Call.Return(run) + return _c +} + // DeleteTeam provides a mock function with given fields: filter func (_m *GrafanaService) DeleteTeam(filter filters.Filter) ([]*models.TeamDTO, error) { ret := _m.Called(filter) @@ -293,6 +699,34 @@ func (_m *GrafanaService) DeleteTeam(filter filters.Filter) ([]*models.TeamDTO, return r0, r1 } +// GrafanaService_DeleteTeam_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteTeam' +type GrafanaService_DeleteTeam_Call struct { + *mock.Call +} + +// DeleteTeam is a helper method to define mock.On call +// - filter filters.Filter +func (_e *GrafanaService_Expecter) DeleteTeam(filter interface{}) *GrafanaService_DeleteTeam_Call { + return &GrafanaService_DeleteTeam_Call{Call: _e.mock.On("DeleteTeam", filter)} +} + +func (_c *GrafanaService_DeleteTeam_Call) Run(run func(filter filters.Filter)) *GrafanaService_DeleteTeam_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *GrafanaService_DeleteTeam_Call) Return(_a0 []*models.TeamDTO, _a1 error) *GrafanaService_DeleteTeam_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *GrafanaService_DeleteTeam_Call) RunAndReturn(run func(filters.Filter) ([]*models.TeamDTO, error)) *GrafanaService_DeleteTeam_Call { + _c.Call.Return(run) + return _c +} + // DeleteUserFromOrg provides a mock function with given fields: userId, orgId func (_m *GrafanaService) DeleteUserFromOrg(userId int64, orgId int64) error { ret := _m.Called(userId, orgId) @@ -307,6 +741,35 @@ func (_m *GrafanaService) DeleteUserFromOrg(userId int64, orgId int64) error { return r0 } +// GrafanaService_DeleteUserFromOrg_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteUserFromOrg' +type GrafanaService_DeleteUserFromOrg_Call struct { + *mock.Call +} + +// DeleteUserFromOrg is a helper method to define mock.On call +// - userId int64 +// - orgId int64 +func (_e *GrafanaService_Expecter) DeleteUserFromOrg(userId interface{}, orgId interface{}) *GrafanaService_DeleteUserFromOrg_Call { + return &GrafanaService_DeleteUserFromOrg_Call{Call: _e.mock.On("DeleteUserFromOrg", userId, orgId)} +} + +func (_c *GrafanaService_DeleteUserFromOrg_Call) Run(run func(userId int64, orgId int64)) *GrafanaService_DeleteUserFromOrg_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(int64), args[1].(int64)) + }) + return _c +} + +func (_c *GrafanaService_DeleteUserFromOrg_Call) Return(_a0 error) *GrafanaService_DeleteUserFromOrg_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *GrafanaService_DeleteUserFromOrg_Call) RunAndReturn(run func(int64, int64) error) *GrafanaService_DeleteUserFromOrg_Call { + _c.Call.Return(run) + return _c +} + // DownloadAlertNotifications provides a mock function with given fields: func (_m *GrafanaService) DownloadAlertNotifications() []string { ret := _m.Called() @@ -323,6 +786,33 @@ func (_m *GrafanaService) DownloadAlertNotifications() []string { return r0 } +// GrafanaService_DownloadAlertNotifications_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DownloadAlertNotifications' +type GrafanaService_DownloadAlertNotifications_Call struct { + *mock.Call +} + +// DownloadAlertNotifications is a helper method to define mock.On call +func (_e *GrafanaService_Expecter) DownloadAlertNotifications() *GrafanaService_DownloadAlertNotifications_Call { + return &GrafanaService_DownloadAlertNotifications_Call{Call: _e.mock.On("DownloadAlertNotifications")} +} + +func (_c *GrafanaService_DownloadAlertNotifications_Call) Run(run func()) *GrafanaService_DownloadAlertNotifications_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *GrafanaService_DownloadAlertNotifications_Call) Return(_a0 []string) *GrafanaService_DownloadAlertNotifications_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *GrafanaService_DownloadAlertNotifications_Call) RunAndReturn(run func() []string) *GrafanaService_DownloadAlertNotifications_Call { + _c.Call.Return(run) + return _c +} + // DownloadConnectionPermissions provides a mock function with given fields: filter func (_m *GrafanaService) DownloadConnectionPermissions(filter filters.Filter) []string { ret := _m.Called(filter) @@ -339,6 +829,34 @@ func (_m *GrafanaService) DownloadConnectionPermissions(filter filters.Filter) [ return r0 } +// GrafanaService_DownloadConnectionPermissions_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DownloadConnectionPermissions' +type GrafanaService_DownloadConnectionPermissions_Call struct { + *mock.Call +} + +// DownloadConnectionPermissions is a helper method to define mock.On call +// - filter filters.Filter +func (_e *GrafanaService_Expecter) DownloadConnectionPermissions(filter interface{}) *GrafanaService_DownloadConnectionPermissions_Call { + return &GrafanaService_DownloadConnectionPermissions_Call{Call: _e.mock.On("DownloadConnectionPermissions", filter)} +} + +func (_c *GrafanaService_DownloadConnectionPermissions_Call) Run(run func(filter filters.Filter)) *GrafanaService_DownloadConnectionPermissions_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *GrafanaService_DownloadConnectionPermissions_Call) Return(_a0 []string) *GrafanaService_DownloadConnectionPermissions_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *GrafanaService_DownloadConnectionPermissions_Call) RunAndReturn(run func(filters.Filter) []string) *GrafanaService_DownloadConnectionPermissions_Call { + _c.Call.Return(run) + return _c +} + // DownloadConnections provides a mock function with given fields: filter func (_m *GrafanaService) DownloadConnections(filter filters.Filter) []string { ret := _m.Called(filter) @@ -355,6 +873,34 @@ func (_m *GrafanaService) DownloadConnections(filter filters.Filter) []string { return r0 } +// GrafanaService_DownloadConnections_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DownloadConnections' +type GrafanaService_DownloadConnections_Call struct { + *mock.Call +} + +// DownloadConnections is a helper method to define mock.On call +// - filter filters.Filter +func (_e *GrafanaService_Expecter) DownloadConnections(filter interface{}) *GrafanaService_DownloadConnections_Call { + return &GrafanaService_DownloadConnections_Call{Call: _e.mock.On("DownloadConnections", filter)} +} + +func (_c *GrafanaService_DownloadConnections_Call) Run(run func(filter filters.Filter)) *GrafanaService_DownloadConnections_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *GrafanaService_DownloadConnections_Call) Return(_a0 []string) *GrafanaService_DownloadConnections_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *GrafanaService_DownloadConnections_Call) RunAndReturn(run func(filters.Filter) []string) *GrafanaService_DownloadConnections_Call { + _c.Call.Return(run) + return _c +} + // DownloadDashboards provides a mock function with given fields: filter func (_m *GrafanaService) DownloadDashboards(filter filters.Filter) []string { ret := _m.Called(filter) @@ -371,6 +917,34 @@ func (_m *GrafanaService) DownloadDashboards(filter filters.Filter) []string { return r0 } +// GrafanaService_DownloadDashboards_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DownloadDashboards' +type GrafanaService_DownloadDashboards_Call struct { + *mock.Call +} + +// DownloadDashboards is a helper method to define mock.On call +// - filter filters.Filter +func (_e *GrafanaService_Expecter) DownloadDashboards(filter interface{}) *GrafanaService_DownloadDashboards_Call { + return &GrafanaService_DownloadDashboards_Call{Call: _e.mock.On("DownloadDashboards", filter)} +} + +func (_c *GrafanaService_DownloadDashboards_Call) Run(run func(filter filters.Filter)) *GrafanaService_DownloadDashboards_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *GrafanaService_DownloadDashboards_Call) Return(_a0 []string) *GrafanaService_DownloadDashboards_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *GrafanaService_DownloadDashboards_Call) RunAndReturn(run func(filters.Filter) []string) *GrafanaService_DownloadDashboards_Call { + _c.Call.Return(run) + return _c +} + // DownloadFolderPermissions provides a mock function with given fields: filter func (_m *GrafanaService) DownloadFolderPermissions(filter filters.Filter) []string { ret := _m.Called(filter) @@ -387,6 +961,34 @@ func (_m *GrafanaService) DownloadFolderPermissions(filter filters.Filter) []str return r0 } +// GrafanaService_DownloadFolderPermissions_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DownloadFolderPermissions' +type GrafanaService_DownloadFolderPermissions_Call struct { + *mock.Call +} + +// DownloadFolderPermissions is a helper method to define mock.On call +// - filter filters.Filter +func (_e *GrafanaService_Expecter) DownloadFolderPermissions(filter interface{}) *GrafanaService_DownloadFolderPermissions_Call { + return &GrafanaService_DownloadFolderPermissions_Call{Call: _e.mock.On("DownloadFolderPermissions", filter)} +} + +func (_c *GrafanaService_DownloadFolderPermissions_Call) Run(run func(filter filters.Filter)) *GrafanaService_DownloadFolderPermissions_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *GrafanaService_DownloadFolderPermissions_Call) Return(_a0 []string) *GrafanaService_DownloadFolderPermissions_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *GrafanaService_DownloadFolderPermissions_Call) RunAndReturn(run func(filters.Filter) []string) *GrafanaService_DownloadFolderPermissions_Call { + _c.Call.Return(run) + return _c +} + // DownloadFolders provides a mock function with given fields: filter func (_m *GrafanaService) DownloadFolders(filter filters.Filter) []string { ret := _m.Called(filter) @@ -403,6 +1005,34 @@ func (_m *GrafanaService) DownloadFolders(filter filters.Filter) []string { return r0 } +// GrafanaService_DownloadFolders_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DownloadFolders' +type GrafanaService_DownloadFolders_Call struct { + *mock.Call +} + +// DownloadFolders is a helper method to define mock.On call +// - filter filters.Filter +func (_e *GrafanaService_Expecter) DownloadFolders(filter interface{}) *GrafanaService_DownloadFolders_Call { + return &GrafanaService_DownloadFolders_Call{Call: _e.mock.On("DownloadFolders", filter)} +} + +func (_c *GrafanaService_DownloadFolders_Call) Run(run func(filter filters.Filter)) *GrafanaService_DownloadFolders_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *GrafanaService_DownloadFolders_Call) Return(_a0 []string) *GrafanaService_DownloadFolders_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *GrafanaService_DownloadFolders_Call) RunAndReturn(run func(filters.Filter) []string) *GrafanaService_DownloadFolders_Call { + _c.Call.Return(run) + return _c +} + // DownloadLibraryElements provides a mock function with given fields: filter func (_m *GrafanaService) DownloadLibraryElements(filter filters.Filter) []string { ret := _m.Called(filter) @@ -419,6 +1049,34 @@ func (_m *GrafanaService) DownloadLibraryElements(filter filters.Filter) []strin return r0 } +// GrafanaService_DownloadLibraryElements_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DownloadLibraryElements' +type GrafanaService_DownloadLibraryElements_Call struct { + *mock.Call +} + +// DownloadLibraryElements is a helper method to define mock.On call +// - filter filters.Filter +func (_e *GrafanaService_Expecter) DownloadLibraryElements(filter interface{}) *GrafanaService_DownloadLibraryElements_Call { + return &GrafanaService_DownloadLibraryElements_Call{Call: _e.mock.On("DownloadLibraryElements", filter)} +} + +func (_c *GrafanaService_DownloadLibraryElements_Call) Run(run func(filter filters.Filter)) *GrafanaService_DownloadLibraryElements_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *GrafanaService_DownloadLibraryElements_Call) Return(_a0 []string) *GrafanaService_DownloadLibraryElements_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *GrafanaService_DownloadLibraryElements_Call) RunAndReturn(run func(filters.Filter) []string) *GrafanaService_DownloadLibraryElements_Call { + _c.Call.Return(run) + return _c +} + // DownloadOrganizations provides a mock function with given fields: func (_m *GrafanaService) DownloadOrganizations() []string { ret := _m.Called() @@ -435,6 +1093,33 @@ func (_m *GrafanaService) DownloadOrganizations() []string { return r0 } +// GrafanaService_DownloadOrganizations_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DownloadOrganizations' +type GrafanaService_DownloadOrganizations_Call struct { + *mock.Call +} + +// DownloadOrganizations is a helper method to define mock.On call +func (_e *GrafanaService_Expecter) DownloadOrganizations() *GrafanaService_DownloadOrganizations_Call { + return &GrafanaService_DownloadOrganizations_Call{Call: _e.mock.On("DownloadOrganizations")} +} + +func (_c *GrafanaService_DownloadOrganizations_Call) Run(run func()) *GrafanaService_DownloadOrganizations_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *GrafanaService_DownloadOrganizations_Call) Return(_a0 []string) *GrafanaService_DownloadOrganizations_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *GrafanaService_DownloadOrganizations_Call) RunAndReturn(run func() []string) *GrafanaService_DownloadOrganizations_Call { + _c.Call.Return(run) + return _c +} + // DownloadTeams provides a mock function with given fields: filter func (_m *GrafanaService) DownloadTeams(filter filters.Filter) map[*models.TeamDTO][]*models.TeamMemberDTO { ret := _m.Called(filter) @@ -451,6 +1136,34 @@ func (_m *GrafanaService) DownloadTeams(filter filters.Filter) map[*models.TeamD return r0 } +// GrafanaService_DownloadTeams_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DownloadTeams' +type GrafanaService_DownloadTeams_Call struct { + *mock.Call +} + +// DownloadTeams is a helper method to define mock.On call +// - filter filters.Filter +func (_e *GrafanaService_Expecter) DownloadTeams(filter interface{}) *GrafanaService_DownloadTeams_Call { + return &GrafanaService_DownloadTeams_Call{Call: _e.mock.On("DownloadTeams", filter)} +} + +func (_c *GrafanaService_DownloadTeams_Call) Run(run func(filter filters.Filter)) *GrafanaService_DownloadTeams_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *GrafanaService_DownloadTeams_Call) Return(_a0 map[*models.TeamDTO][]*models.TeamMemberDTO) *GrafanaService_DownloadTeams_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *GrafanaService_DownloadTeams_Call) RunAndReturn(run func(filters.Filter) map[*models.TeamDTO][]*models.TeamMemberDTO) *GrafanaService_DownloadTeams_Call { + _c.Call.Return(run) + return _c +} + // DownloadUsers provides a mock function with given fields: filter func (_m *GrafanaService) DownloadUsers(filter filters.Filter) []string { ret := _m.Called(filter) @@ -467,7 +1180,35 @@ func (_m *GrafanaService) DownloadUsers(filter filters.Filter) []string { return r0 } -// GetServerInfo provides a mock function with given fields: +// GrafanaService_DownloadUsers_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DownloadUsers' +type GrafanaService_DownloadUsers_Call struct { + *mock.Call +} + +// DownloadUsers is a helper method to define mock.On call +// - filter filters.Filter +func (_e *GrafanaService_Expecter) DownloadUsers(filter interface{}) *GrafanaService_DownloadUsers_Call { + return &GrafanaService_DownloadUsers_Call{Call: _e.mock.On("DownloadUsers", filter)} +} + +func (_c *GrafanaService_DownloadUsers_Call) Run(run func(filter filters.Filter)) *GrafanaService_DownloadUsers_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *GrafanaService_DownloadUsers_Call) Return(_a0 []string) *GrafanaService_DownloadUsers_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *GrafanaService_DownloadUsers_Call) RunAndReturn(run func(filters.Filter) []string) *GrafanaService_DownloadUsers_Call { + _c.Call.Return(run) + return _c +} + +// GetServerInfo provides a mock function with given fields: func (_m *GrafanaService) GetServerInfo() map[string]interface{} { ret := _m.Called() @@ -483,6 +1224,33 @@ func (_m *GrafanaService) GetServerInfo() map[string]interface{} { return r0 } +// GrafanaService_GetServerInfo_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetServerInfo' +type GrafanaService_GetServerInfo_Call struct { + *mock.Call +} + +// GetServerInfo is a helper method to define mock.On call +func (_e *GrafanaService_Expecter) GetServerInfo() *GrafanaService_GetServerInfo_Call { + return &GrafanaService_GetServerInfo_Call{Call: _e.mock.On("GetServerInfo")} +} + +func (_c *GrafanaService_GetServerInfo_Call) Run(run func()) *GrafanaService_GetServerInfo_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *GrafanaService_GetServerInfo_Call) Return(_a0 map[string]interface{}) *GrafanaService_GetServerInfo_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *GrafanaService_GetServerInfo_Call) RunAndReturn(run func() map[string]interface{}) *GrafanaService_GetServerInfo_Call { + _c.Call.Return(run) + return _c +} + // GetTokenOrganization provides a mock function with given fields: func (_m *GrafanaService) GetTokenOrganization() *models.OrgDetailsDTO { ret := _m.Called() @@ -499,6 +1267,33 @@ func (_m *GrafanaService) GetTokenOrganization() *models.OrgDetailsDTO { return r0 } +// GrafanaService_GetTokenOrganization_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetTokenOrganization' +type GrafanaService_GetTokenOrganization_Call struct { + *mock.Call +} + +// GetTokenOrganization is a helper method to define mock.On call +func (_e *GrafanaService_Expecter) GetTokenOrganization() *GrafanaService_GetTokenOrganization_Call { + return &GrafanaService_GetTokenOrganization_Call{Call: _e.mock.On("GetTokenOrganization")} +} + +func (_c *GrafanaService_GetTokenOrganization_Call) Run(run func()) *GrafanaService_GetTokenOrganization_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *GrafanaService_GetTokenOrganization_Call) Return(_a0 *models.OrgDetailsDTO) *GrafanaService_GetTokenOrganization_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *GrafanaService_GetTokenOrganization_Call) RunAndReturn(run func() *models.OrgDetailsDTO) *GrafanaService_GetTokenOrganization_Call { + _c.Call.Return(run) + return _c +} + // GetUserInfo provides a mock function with given fields: func (_m *GrafanaService) GetUserInfo() (*models.UserProfileDTO, error) { ret := _m.Called() @@ -525,6 +1320,33 @@ func (_m *GrafanaService) GetUserInfo() (*models.UserProfileDTO, error) { return r0, r1 } +// GrafanaService_GetUserInfo_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetUserInfo' +type GrafanaService_GetUserInfo_Call struct { + *mock.Call +} + +// GetUserInfo is a helper method to define mock.On call +func (_e *GrafanaService_Expecter) GetUserInfo() *GrafanaService_GetUserInfo_Call { + return &GrafanaService_GetUserInfo_Call{Call: _e.mock.On("GetUserInfo")} +} + +func (_c *GrafanaService_GetUserInfo_Call) Run(run func()) *GrafanaService_GetUserInfo_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *GrafanaService_GetUserInfo_Call) Return(_a0 *models.UserProfileDTO, _a1 error) *GrafanaService_GetUserInfo_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *GrafanaService_GetUserInfo_Call) RunAndReturn(run func() (*models.UserProfileDTO, error)) *GrafanaService_GetUserInfo_Call { + _c.Call.Return(run) + return _c +} + // GetUserOrganization provides a mock function with given fields: func (_m *GrafanaService) GetUserOrganization() *models.OrgDetailsDTO { ret := _m.Called() @@ -541,11 +1363,65 @@ func (_m *GrafanaService) GetUserOrganization() *models.OrgDetailsDTO { return r0 } +// GrafanaService_GetUserOrganization_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetUserOrganization' +type GrafanaService_GetUserOrganization_Call struct { + *mock.Call +} + +// GetUserOrganization is a helper method to define mock.On call +func (_e *GrafanaService_Expecter) GetUserOrganization() *GrafanaService_GetUserOrganization_Call { + return &GrafanaService_GetUserOrganization_Call{Call: _e.mock.On("GetUserOrganization")} +} + +func (_c *GrafanaService_GetUserOrganization_Call) Run(run func()) *GrafanaService_GetUserOrganization_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *GrafanaService_GetUserOrganization_Call) Return(_a0 *models.OrgDetailsDTO) *GrafanaService_GetUserOrganization_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *GrafanaService_GetUserOrganization_Call) RunAndReturn(run func() *models.OrgDetailsDTO) *GrafanaService_GetUserOrganization_Call { + _c.Call.Return(run) + return _c +} + // InitOrganizations provides a mock function with given fields: func (_m *GrafanaService) InitOrganizations() { _m.Called() } +// GrafanaService_InitOrganizations_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'InitOrganizations' +type GrafanaService_InitOrganizations_Call struct { + *mock.Call +} + +// InitOrganizations is a helper method to define mock.On call +func (_e *GrafanaService_Expecter) InitOrganizations() *GrafanaService_InitOrganizations_Call { + return &GrafanaService_InitOrganizations_Call{Call: _e.mock.On("InitOrganizations")} +} + +func (_c *GrafanaService_InitOrganizations_Call) Run(run func()) *GrafanaService_InitOrganizations_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *GrafanaService_InitOrganizations_Call) Return() *GrafanaService_InitOrganizations_Call { + _c.Call.Return() + return _c +} + +func (_c *GrafanaService_InitOrganizations_Call) RunAndReturn(run func()) *GrafanaService_InitOrganizations_Call { + _c.Call.Return(run) + return _c +} + // ListAPIKeys provides a mock function with given fields: func (_m *GrafanaService) ListAPIKeys() []*models.APIKeyDTO { ret := _m.Called() @@ -562,6 +1438,33 @@ func (_m *GrafanaService) ListAPIKeys() []*models.APIKeyDTO { return r0 } +// GrafanaService_ListAPIKeys_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListAPIKeys' +type GrafanaService_ListAPIKeys_Call struct { + *mock.Call +} + +// ListAPIKeys is a helper method to define mock.On call +func (_e *GrafanaService_Expecter) ListAPIKeys() *GrafanaService_ListAPIKeys_Call { + return &GrafanaService_ListAPIKeys_Call{Call: _e.mock.On("ListAPIKeys")} +} + +func (_c *GrafanaService_ListAPIKeys_Call) Run(run func()) *GrafanaService_ListAPIKeys_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *GrafanaService_ListAPIKeys_Call) Return(_a0 []*models.APIKeyDTO) *GrafanaService_ListAPIKeys_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *GrafanaService_ListAPIKeys_Call) RunAndReturn(run func() []*models.APIKeyDTO) *GrafanaService_ListAPIKeys_Call { + _c.Call.Return(run) + return _c +} + // ListAlertNotifications provides a mock function with given fields: func (_m *GrafanaService) ListAlertNotifications() []*models.AlertNotification { ret := _m.Called() @@ -578,6 +1481,33 @@ func (_m *GrafanaService) ListAlertNotifications() []*models.AlertNotification { return r0 } +// GrafanaService_ListAlertNotifications_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListAlertNotifications' +type GrafanaService_ListAlertNotifications_Call struct { + *mock.Call +} + +// ListAlertNotifications is a helper method to define mock.On call +func (_e *GrafanaService_Expecter) ListAlertNotifications() *GrafanaService_ListAlertNotifications_Call { + return &GrafanaService_ListAlertNotifications_Call{Call: _e.mock.On("ListAlertNotifications")} +} + +func (_c *GrafanaService_ListAlertNotifications_Call) Run(run func()) *GrafanaService_ListAlertNotifications_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *GrafanaService_ListAlertNotifications_Call) Return(_a0 []*models.AlertNotification) *GrafanaService_ListAlertNotifications_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *GrafanaService_ListAlertNotifications_Call) RunAndReturn(run func() []*models.AlertNotification) *GrafanaService_ListAlertNotifications_Call { + _c.Call.Return(run) + return _c +} + // ListConnectionPermissions provides a mock function with given fields: filter func (_m *GrafanaService) ListConnectionPermissions(filter filters.Filter) map[*models.DataSourceListItemDTO]*models.DataSourcePermissionsDTO { ret := _m.Called(filter) @@ -594,6 +1524,34 @@ func (_m *GrafanaService) ListConnectionPermissions(filter filters.Filter) map[* return r0 } +// GrafanaService_ListConnectionPermissions_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListConnectionPermissions' +type GrafanaService_ListConnectionPermissions_Call struct { + *mock.Call +} + +// ListConnectionPermissions is a helper method to define mock.On call +// - filter filters.Filter +func (_e *GrafanaService_Expecter) ListConnectionPermissions(filter interface{}) *GrafanaService_ListConnectionPermissions_Call { + return &GrafanaService_ListConnectionPermissions_Call{Call: _e.mock.On("ListConnectionPermissions", filter)} +} + +func (_c *GrafanaService_ListConnectionPermissions_Call) Run(run func(filter filters.Filter)) *GrafanaService_ListConnectionPermissions_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *GrafanaService_ListConnectionPermissions_Call) Return(_a0 map[*models.DataSourceListItemDTO]*models.DataSourcePermissionsDTO) *GrafanaService_ListConnectionPermissions_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *GrafanaService_ListConnectionPermissions_Call) RunAndReturn(run func(filters.Filter) map[*models.DataSourceListItemDTO]*models.DataSourcePermissionsDTO) *GrafanaService_ListConnectionPermissions_Call { + _c.Call.Return(run) + return _c +} + // ListConnections provides a mock function with given fields: filter func (_m *GrafanaService) ListConnections(filter filters.Filter) []models.DataSourceListItemDTO { ret := _m.Called(filter) @@ -610,6 +1568,34 @@ func (_m *GrafanaService) ListConnections(filter filters.Filter) []models.DataSo return r0 } +// GrafanaService_ListConnections_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListConnections' +type GrafanaService_ListConnections_Call struct { + *mock.Call +} + +// ListConnections is a helper method to define mock.On call +// - filter filters.Filter +func (_e *GrafanaService_Expecter) ListConnections(filter interface{}) *GrafanaService_ListConnections_Call { + return &GrafanaService_ListConnections_Call{Call: _e.mock.On("ListConnections", filter)} +} + +func (_c *GrafanaService_ListConnections_Call) Run(run func(filter filters.Filter)) *GrafanaService_ListConnections_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *GrafanaService_ListConnections_Call) Return(_a0 []models.DataSourceListItemDTO) *GrafanaService_ListConnections_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *GrafanaService_ListConnections_Call) RunAndReturn(run func(filters.Filter) []models.DataSourceListItemDTO) *GrafanaService_ListConnections_Call { + _c.Call.Return(run) + return _c +} + // ListDashboards provides a mock function with given fields: filter func (_m *GrafanaService) ListDashboards(filter filters.Filter) []*models.Hit { ret := _m.Called(filter) @@ -626,6 +1612,34 @@ func (_m *GrafanaService) ListDashboards(filter filters.Filter) []*models.Hit { return r0 } +// GrafanaService_ListDashboards_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListDashboards' +type GrafanaService_ListDashboards_Call struct { + *mock.Call +} + +// ListDashboards is a helper method to define mock.On call +// - filter filters.Filter +func (_e *GrafanaService_Expecter) ListDashboards(filter interface{}) *GrafanaService_ListDashboards_Call { + return &GrafanaService_ListDashboards_Call{Call: _e.mock.On("ListDashboards", filter)} +} + +func (_c *GrafanaService_ListDashboards_Call) Run(run func(filter filters.Filter)) *GrafanaService_ListDashboards_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *GrafanaService_ListDashboards_Call) Return(_a0 []*models.Hit) *GrafanaService_ListDashboards_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *GrafanaService_ListDashboards_Call) RunAndReturn(run func(filters.Filter) []*models.Hit) *GrafanaService_ListDashboards_Call { + _c.Call.Return(run) + return _c +} + // ListFolder provides a mock function with given fields: filter func (_m *GrafanaService) ListFolder(filter filters.Filter) []*models.Hit { ret := _m.Called(filter) @@ -642,6 +1656,34 @@ func (_m *GrafanaService) ListFolder(filter filters.Filter) []*models.Hit { return r0 } +// GrafanaService_ListFolder_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListFolder' +type GrafanaService_ListFolder_Call struct { + *mock.Call +} + +// ListFolder is a helper method to define mock.On call +// - filter filters.Filter +func (_e *GrafanaService_Expecter) ListFolder(filter interface{}) *GrafanaService_ListFolder_Call { + return &GrafanaService_ListFolder_Call{Call: _e.mock.On("ListFolder", filter)} +} + +func (_c *GrafanaService_ListFolder_Call) Run(run func(filter filters.Filter)) *GrafanaService_ListFolder_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *GrafanaService_ListFolder_Call) Return(_a0 []*models.Hit) *GrafanaService_ListFolder_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *GrafanaService_ListFolder_Call) RunAndReturn(run func(filters.Filter) []*models.Hit) *GrafanaService_ListFolder_Call { + _c.Call.Return(run) + return _c +} + // ListFolderPermissions provides a mock function with given fields: filter func (_m *GrafanaService) ListFolderPermissions(filter filters.Filter) map[*models.Hit][]*models.DashboardACLInfoDTO { ret := _m.Called(filter) @@ -658,6 +1700,34 @@ func (_m *GrafanaService) ListFolderPermissions(filter filters.Filter) map[*mode return r0 } +// GrafanaService_ListFolderPermissions_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListFolderPermissions' +type GrafanaService_ListFolderPermissions_Call struct { + *mock.Call +} + +// ListFolderPermissions is a helper method to define mock.On call +// - filter filters.Filter +func (_e *GrafanaService_Expecter) ListFolderPermissions(filter interface{}) *GrafanaService_ListFolderPermissions_Call { + return &GrafanaService_ListFolderPermissions_Call{Call: _e.mock.On("ListFolderPermissions", filter)} +} + +func (_c *GrafanaService_ListFolderPermissions_Call) Run(run func(filter filters.Filter)) *GrafanaService_ListFolderPermissions_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *GrafanaService_ListFolderPermissions_Call) Return(_a0 map[*models.Hit][]*models.DashboardACLInfoDTO) *GrafanaService_ListFolderPermissions_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *GrafanaService_ListFolderPermissions_Call) RunAndReturn(run func(filters.Filter) map[*models.Hit][]*models.DashboardACLInfoDTO) *GrafanaService_ListFolderPermissions_Call { + _c.Call.Return(run) + return _c +} + // ListLibraryElements provides a mock function with given fields: filter func (_m *GrafanaService) ListLibraryElements(filter filters.Filter) []*models.LibraryElementDTO { ret := _m.Called(filter) @@ -674,6 +1744,34 @@ func (_m *GrafanaService) ListLibraryElements(filter filters.Filter) []*models.L return r0 } +// GrafanaService_ListLibraryElements_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListLibraryElements' +type GrafanaService_ListLibraryElements_Call struct { + *mock.Call +} + +// ListLibraryElements is a helper method to define mock.On call +// - filter filters.Filter +func (_e *GrafanaService_Expecter) ListLibraryElements(filter interface{}) *GrafanaService_ListLibraryElements_Call { + return &GrafanaService_ListLibraryElements_Call{Call: _e.mock.On("ListLibraryElements", filter)} +} + +func (_c *GrafanaService_ListLibraryElements_Call) Run(run func(filter filters.Filter)) *GrafanaService_ListLibraryElements_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *GrafanaService_ListLibraryElements_Call) Return(_a0 []*models.LibraryElementDTO) *GrafanaService_ListLibraryElements_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *GrafanaService_ListLibraryElements_Call) RunAndReturn(run func(filters.Filter) []*models.LibraryElementDTO) *GrafanaService_ListLibraryElements_Call { + _c.Call.Return(run) + return _c +} + // ListLibraryElementsConnections provides a mock function with given fields: filter, connectionID func (_m *GrafanaService) ListLibraryElementsConnections(filter filters.Filter, connectionID string) []*models.DashboardFullWithMeta { ret := _m.Called(filter, connectionID) @@ -690,6 +1788,35 @@ func (_m *GrafanaService) ListLibraryElementsConnections(filter filters.Filter, return r0 } +// GrafanaService_ListLibraryElementsConnections_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListLibraryElementsConnections' +type GrafanaService_ListLibraryElementsConnections_Call struct { + *mock.Call +} + +// ListLibraryElementsConnections is a helper method to define mock.On call +// - filter filters.Filter +// - connectionID string +func (_e *GrafanaService_Expecter) ListLibraryElementsConnections(filter interface{}, connectionID interface{}) *GrafanaService_ListLibraryElementsConnections_Call { + return &GrafanaService_ListLibraryElementsConnections_Call{Call: _e.mock.On("ListLibraryElementsConnections", filter, connectionID)} +} + +func (_c *GrafanaService_ListLibraryElementsConnections_Call) Run(run func(filter filters.Filter, connectionID string)) *GrafanaService_ListLibraryElementsConnections_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter), args[1].(string)) + }) + return _c +} + +func (_c *GrafanaService_ListLibraryElementsConnections_Call) Return(_a0 []*models.DashboardFullWithMeta) *GrafanaService_ListLibraryElementsConnections_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *GrafanaService_ListLibraryElementsConnections_Call) RunAndReturn(run func(filters.Filter, string) []*models.DashboardFullWithMeta) *GrafanaService_ListLibraryElementsConnections_Call { + _c.Call.Return(run) + return _c +} + // ListOrgUsers provides a mock function with given fields: orgId func (_m *GrafanaService) ListOrgUsers(orgId int64) []*models.OrgUserDTO { ret := _m.Called(orgId) @@ -706,6 +1833,34 @@ func (_m *GrafanaService) ListOrgUsers(orgId int64) []*models.OrgUserDTO { return r0 } +// GrafanaService_ListOrgUsers_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListOrgUsers' +type GrafanaService_ListOrgUsers_Call struct { + *mock.Call +} + +// ListOrgUsers is a helper method to define mock.On call +// - orgId int64 +func (_e *GrafanaService_Expecter) ListOrgUsers(orgId interface{}) *GrafanaService_ListOrgUsers_Call { + return &GrafanaService_ListOrgUsers_Call{Call: _e.mock.On("ListOrgUsers", orgId)} +} + +func (_c *GrafanaService_ListOrgUsers_Call) Run(run func(orgId int64)) *GrafanaService_ListOrgUsers_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(int64)) + }) + return _c +} + +func (_c *GrafanaService_ListOrgUsers_Call) Return(_a0 []*models.OrgUserDTO) *GrafanaService_ListOrgUsers_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *GrafanaService_ListOrgUsers_Call) RunAndReturn(run func(int64) []*models.OrgUserDTO) *GrafanaService_ListOrgUsers_Call { + _c.Call.Return(run) + return _c +} + // ListOrganizations provides a mock function with given fields: func (_m *GrafanaService) ListOrganizations() []*models.OrgDTO { ret := _m.Called() @@ -722,22 +1877,76 @@ func (_m *GrafanaService) ListOrganizations() []*models.OrgDTO { return r0 } +// GrafanaService_ListOrganizations_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListOrganizations' +type GrafanaService_ListOrganizations_Call struct { + *mock.Call +} + +// ListOrganizations is a helper method to define mock.On call +func (_e *GrafanaService_Expecter) ListOrganizations() *GrafanaService_ListOrganizations_Call { + return &GrafanaService_ListOrganizations_Call{Call: _e.mock.On("ListOrganizations")} +} + +func (_c *GrafanaService_ListOrganizations_Call) Run(run func()) *GrafanaService_ListOrganizations_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *GrafanaService_ListOrganizations_Call) Return(_a0 []*models.OrgDTO) *GrafanaService_ListOrganizations_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *GrafanaService_ListOrganizations_Call) RunAndReturn(run func() []*models.OrgDTO) *GrafanaService_ListOrganizations_Call { + _c.Call.Return(run) + return _c +} + // ListServiceAccounts provides a mock function with given fields: -func (_m *GrafanaService) ListServiceAccounts() []*api.ServiceAccountDTOWithTokens { +func (_m *GrafanaService) ListServiceAccounts() []*types.ServiceAccountDTOWithTokens { ret := _m.Called() - var r0 []*api.ServiceAccountDTOWithTokens - if rf, ok := ret.Get(0).(func() []*api.ServiceAccountDTOWithTokens); ok { + var r0 []*types.ServiceAccountDTOWithTokens + if rf, ok := ret.Get(0).(func() []*types.ServiceAccountDTOWithTokens); ok { r0 = rf() } else { if ret.Get(0) != nil { - r0 = ret.Get(0).([]*api.ServiceAccountDTOWithTokens) + r0 = ret.Get(0).([]*types.ServiceAccountDTOWithTokens) } } return r0 } +// GrafanaService_ListServiceAccounts_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListServiceAccounts' +type GrafanaService_ListServiceAccounts_Call struct { + *mock.Call +} + +// ListServiceAccounts is a helper method to define mock.On call +func (_e *GrafanaService_Expecter) ListServiceAccounts() *GrafanaService_ListServiceAccounts_Call { + return &GrafanaService_ListServiceAccounts_Call{Call: _e.mock.On("ListServiceAccounts")} +} + +func (_c *GrafanaService_ListServiceAccounts_Call) Run(run func()) *GrafanaService_ListServiceAccounts_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *GrafanaService_ListServiceAccounts_Call) Return(_a0 []*types.ServiceAccountDTOWithTokens) *GrafanaService_ListServiceAccounts_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *GrafanaService_ListServiceAccounts_Call) RunAndReturn(run func() []*types.ServiceAccountDTOWithTokens) *GrafanaService_ListServiceAccounts_Call { + _c.Call.Return(run) + return _c +} + // ListServiceAccountsTokens provides a mock function with given fields: id func (_m *GrafanaService) ListServiceAccountsTokens(id int64) ([]*models.TokenDTO, error) { ret := _m.Called(id) @@ -764,6 +1973,34 @@ func (_m *GrafanaService) ListServiceAccountsTokens(id int64) ([]*models.TokenDT return r0, r1 } +// GrafanaService_ListServiceAccountsTokens_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListServiceAccountsTokens' +type GrafanaService_ListServiceAccountsTokens_Call struct { + *mock.Call +} + +// ListServiceAccountsTokens is a helper method to define mock.On call +// - id int64 +func (_e *GrafanaService_Expecter) ListServiceAccountsTokens(id interface{}) *GrafanaService_ListServiceAccountsTokens_Call { + return &GrafanaService_ListServiceAccountsTokens_Call{Call: _e.mock.On("ListServiceAccountsTokens", id)} +} + +func (_c *GrafanaService_ListServiceAccountsTokens_Call) Run(run func(id int64)) *GrafanaService_ListServiceAccountsTokens_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(int64)) + }) + return _c +} + +func (_c *GrafanaService_ListServiceAccountsTokens_Call) Return(_a0 []*models.TokenDTO, _a1 error) *GrafanaService_ListServiceAccountsTokens_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *GrafanaService_ListServiceAccountsTokens_Call) RunAndReturn(run func(int64) ([]*models.TokenDTO, error)) *GrafanaService_ListServiceAccountsTokens_Call { + _c.Call.Return(run) + return _c +} + // ListTeams provides a mock function with given fields: filter func (_m *GrafanaService) ListTeams(filter filters.Filter) map[*models.TeamDTO][]*models.TeamMemberDTO { ret := _m.Called(filter) @@ -780,6 +2017,34 @@ func (_m *GrafanaService) ListTeams(filter filters.Filter) map[*models.TeamDTO][ return r0 } +// GrafanaService_ListTeams_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListTeams' +type GrafanaService_ListTeams_Call struct { + *mock.Call +} + +// ListTeams is a helper method to define mock.On call +// - filter filters.Filter +func (_e *GrafanaService_Expecter) ListTeams(filter interface{}) *GrafanaService_ListTeams_Call { + return &GrafanaService_ListTeams_Call{Call: _e.mock.On("ListTeams", filter)} +} + +func (_c *GrafanaService_ListTeams_Call) Run(run func(filter filters.Filter)) *GrafanaService_ListTeams_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *GrafanaService_ListTeams_Call) Return(_a0 map[*models.TeamDTO][]*models.TeamMemberDTO) *GrafanaService_ListTeams_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *GrafanaService_ListTeams_Call) RunAndReturn(run func(filters.Filter) map[*models.TeamDTO][]*models.TeamMemberDTO) *GrafanaService_ListTeams_Call { + _c.Call.Return(run) + return _c +} + // ListUsers provides a mock function with given fields: filter func (_m *GrafanaService) ListUsers(filter filters.Filter) []*models.UserSearchHitDTO { ret := _m.Called(filter) @@ -796,11 +2061,66 @@ func (_m *GrafanaService) ListUsers(filter filters.Filter) []*models.UserSearchH return r0 } +// GrafanaService_ListUsers_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListUsers' +type GrafanaService_ListUsers_Call struct { + *mock.Call +} + +// ListUsers is a helper method to define mock.On call +// - filter filters.Filter +func (_e *GrafanaService_Expecter) ListUsers(filter interface{}) *GrafanaService_ListUsers_Call { + return &GrafanaService_ListUsers_Call{Call: _e.mock.On("ListUsers", filter)} +} + +func (_c *GrafanaService_ListUsers_Call) Run(run func(filter filters.Filter)) *GrafanaService_ListUsers_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *GrafanaService_ListUsers_Call) Return(_a0 []*models.UserSearchHitDTO) *GrafanaService_ListUsers_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *GrafanaService_ListUsers_Call) RunAndReturn(run func(filters.Filter) []*models.UserSearchHitDTO) *GrafanaService_ListUsers_Call { + _c.Call.Return(run) + return _c +} + // Login provides a mock function with given fields: func (_m *GrafanaService) Login() { _m.Called() } +// GrafanaService_Login_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Login' +type GrafanaService_Login_Call struct { + *mock.Call +} + +// Login is a helper method to define mock.On call +func (_e *GrafanaService_Expecter) Login() *GrafanaService_Login_Call { + return &GrafanaService_Login_Call{Call: _e.mock.On("Login")} +} + +func (_c *GrafanaService_Login_Call) Run(run func()) *GrafanaService_Login_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *GrafanaService_Login_Call) Return() *GrafanaService_Login_Call { + _c.Call.Return() + return _c +} + +func (_c *GrafanaService_Login_Call) RunAndReturn(run func()) *GrafanaService_Login_Call { + _c.Call.Return(run) + return _c +} + // PromoteUser provides a mock function with given fields: userLogin func (_m *GrafanaService) PromoteUser(userLogin string) (string, error) { ret := _m.Called(userLogin) @@ -825,6 +2145,34 @@ func (_m *GrafanaService) PromoteUser(userLogin string) (string, error) { return r0, r1 } +// GrafanaService_PromoteUser_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'PromoteUser' +type GrafanaService_PromoteUser_Call struct { + *mock.Call +} + +// PromoteUser is a helper method to define mock.On call +// - userLogin string +func (_e *GrafanaService_Expecter) PromoteUser(userLogin interface{}) *GrafanaService_PromoteUser_Call { + return &GrafanaService_PromoteUser_Call{Call: _e.mock.On("PromoteUser", userLogin)} +} + +func (_c *GrafanaService_PromoteUser_Call) Run(run func(userLogin string)) *GrafanaService_PromoteUser_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(string)) + }) + return _c +} + +func (_c *GrafanaService_PromoteUser_Call) Return(_a0 string, _a1 error) *GrafanaService_PromoteUser_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *GrafanaService_PromoteUser_Call) RunAndReturn(run func(string) (string, error)) *GrafanaService_PromoteUser_Call { + _c.Call.Return(run) + return _c +} + // SetOrganization provides a mock function with given fields: id func (_m *GrafanaService) SetOrganization(id int64) error { ret := _m.Called(id) @@ -839,6 +2187,34 @@ func (_m *GrafanaService) SetOrganization(id int64) error { return r0 } +// GrafanaService_SetOrganization_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'SetOrganization' +type GrafanaService_SetOrganization_Call struct { + *mock.Call +} + +// SetOrganization is a helper method to define mock.On call +// - id int64 +func (_e *GrafanaService_Expecter) SetOrganization(id interface{}) *GrafanaService_SetOrganization_Call { + return &GrafanaService_SetOrganization_Call{Call: _e.mock.On("SetOrganization", id)} +} + +func (_c *GrafanaService_SetOrganization_Call) Run(run func(id int64)) *GrafanaService_SetOrganization_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(int64)) + }) + return _c +} + +func (_c *GrafanaService_SetOrganization_Call) Return(_a0 error) *GrafanaService_SetOrganization_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *GrafanaService_SetOrganization_Call) RunAndReturn(run func(int64) error) *GrafanaService_SetOrganization_Call { + _c.Call.Return(run) + return _c +} + // SetUserOrganizations provides a mock function with given fields: id func (_m *GrafanaService) SetUserOrganizations(id int64) error { ret := _m.Called(id) @@ -853,6 +2229,34 @@ func (_m *GrafanaService) SetUserOrganizations(id int64) error { return r0 } +// GrafanaService_SetUserOrganizations_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'SetUserOrganizations' +type GrafanaService_SetUserOrganizations_Call struct { + *mock.Call +} + +// SetUserOrganizations is a helper method to define mock.On call +// - id int64 +func (_e *GrafanaService_Expecter) SetUserOrganizations(id interface{}) *GrafanaService_SetUserOrganizations_Call { + return &GrafanaService_SetUserOrganizations_Call{Call: _e.mock.On("SetUserOrganizations", id)} +} + +func (_c *GrafanaService_SetUserOrganizations_Call) Run(run func(id int64)) *GrafanaService_SetUserOrganizations_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(int64)) + }) + return _c +} + +func (_c *GrafanaService_SetUserOrganizations_Call) Return(_a0 error) *GrafanaService_SetUserOrganizations_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *GrafanaService_SetUserOrganizations_Call) RunAndReturn(run func(int64) error) *GrafanaService_SetUserOrganizations_Call { + _c.Call.Return(run) + return _c +} + // UpdateUserInOrg provides a mock function with given fields: role, userId, orgId func (_m *GrafanaService) UpdateUserInOrg(role string, userId int64, orgId int64) error { ret := _m.Called(role, userId, orgId) @@ -867,6 +2271,36 @@ func (_m *GrafanaService) UpdateUserInOrg(role string, userId int64, orgId int64 return r0 } +// GrafanaService_UpdateUserInOrg_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UpdateUserInOrg' +type GrafanaService_UpdateUserInOrg_Call struct { + *mock.Call +} + +// UpdateUserInOrg is a helper method to define mock.On call +// - role string +// - userId int64 +// - orgId int64 +func (_e *GrafanaService_Expecter) UpdateUserInOrg(role interface{}, userId interface{}, orgId interface{}) *GrafanaService_UpdateUserInOrg_Call { + return &GrafanaService_UpdateUserInOrg_Call{Call: _e.mock.On("UpdateUserInOrg", role, userId, orgId)} +} + +func (_c *GrafanaService_UpdateUserInOrg_Call) Run(run func(role string, userId int64, orgId int64)) *GrafanaService_UpdateUserInOrg_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(string), args[1].(int64), args[2].(int64)) + }) + return _c +} + +func (_c *GrafanaService_UpdateUserInOrg_Call) Return(_a0 error) *GrafanaService_UpdateUserInOrg_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *GrafanaService_UpdateUserInOrg_Call) RunAndReturn(run func(string, int64, int64) error) *GrafanaService_UpdateUserInOrg_Call { + _c.Call.Return(run) + return _c +} + // UploadAlertNotifications provides a mock function with given fields: func (_m *GrafanaService) UploadAlertNotifications() []string { ret := _m.Called() @@ -883,6 +2317,33 @@ func (_m *GrafanaService) UploadAlertNotifications() []string { return r0 } +// GrafanaService_UploadAlertNotifications_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UploadAlertNotifications' +type GrafanaService_UploadAlertNotifications_Call struct { + *mock.Call +} + +// UploadAlertNotifications is a helper method to define mock.On call +func (_e *GrafanaService_Expecter) UploadAlertNotifications() *GrafanaService_UploadAlertNotifications_Call { + return &GrafanaService_UploadAlertNotifications_Call{Call: _e.mock.On("UploadAlertNotifications")} +} + +func (_c *GrafanaService_UploadAlertNotifications_Call) Run(run func()) *GrafanaService_UploadAlertNotifications_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *GrafanaService_UploadAlertNotifications_Call) Return(_a0 []string) *GrafanaService_UploadAlertNotifications_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *GrafanaService_UploadAlertNotifications_Call) RunAndReturn(run func() []string) *GrafanaService_UploadAlertNotifications_Call { + _c.Call.Return(run) + return _c +} + // UploadConnectionPermissions provides a mock function with given fields: filter func (_m *GrafanaService) UploadConnectionPermissions(filter filters.Filter) []string { ret := _m.Called(filter) @@ -899,6 +2360,34 @@ func (_m *GrafanaService) UploadConnectionPermissions(filter filters.Filter) []s return r0 } +// GrafanaService_UploadConnectionPermissions_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UploadConnectionPermissions' +type GrafanaService_UploadConnectionPermissions_Call struct { + *mock.Call +} + +// UploadConnectionPermissions is a helper method to define mock.On call +// - filter filters.Filter +func (_e *GrafanaService_Expecter) UploadConnectionPermissions(filter interface{}) *GrafanaService_UploadConnectionPermissions_Call { + return &GrafanaService_UploadConnectionPermissions_Call{Call: _e.mock.On("UploadConnectionPermissions", filter)} +} + +func (_c *GrafanaService_UploadConnectionPermissions_Call) Run(run func(filter filters.Filter)) *GrafanaService_UploadConnectionPermissions_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *GrafanaService_UploadConnectionPermissions_Call) Return(_a0 []string) *GrafanaService_UploadConnectionPermissions_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *GrafanaService_UploadConnectionPermissions_Call) RunAndReturn(run func(filters.Filter) []string) *GrafanaService_UploadConnectionPermissions_Call { + _c.Call.Return(run) + return _c +} + // UploadConnections provides a mock function with given fields: filter func (_m *GrafanaService) UploadConnections(filter filters.Filter) []string { ret := _m.Called(filter) @@ -915,11 +2404,67 @@ func (_m *GrafanaService) UploadConnections(filter filters.Filter) []string { return r0 } +// GrafanaService_UploadConnections_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UploadConnections' +type GrafanaService_UploadConnections_Call struct { + *mock.Call +} + +// UploadConnections is a helper method to define mock.On call +// - filter filters.Filter +func (_e *GrafanaService_Expecter) UploadConnections(filter interface{}) *GrafanaService_UploadConnections_Call { + return &GrafanaService_UploadConnections_Call{Call: _e.mock.On("UploadConnections", filter)} +} + +func (_c *GrafanaService_UploadConnections_Call) Run(run func(filter filters.Filter)) *GrafanaService_UploadConnections_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *GrafanaService_UploadConnections_Call) Return(_a0 []string) *GrafanaService_UploadConnections_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *GrafanaService_UploadConnections_Call) RunAndReturn(run func(filters.Filter) []string) *GrafanaService_UploadConnections_Call { + _c.Call.Return(run) + return _c +} + // UploadDashboards provides a mock function with given fields: filter func (_m *GrafanaService) UploadDashboards(filter filters.Filter) { _m.Called(filter) } +// GrafanaService_UploadDashboards_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UploadDashboards' +type GrafanaService_UploadDashboards_Call struct { + *mock.Call +} + +// UploadDashboards is a helper method to define mock.On call +// - filter filters.Filter +func (_e *GrafanaService_Expecter) UploadDashboards(filter interface{}) *GrafanaService_UploadDashboards_Call { + return &GrafanaService_UploadDashboards_Call{Call: _e.mock.On("UploadDashboards", filter)} +} + +func (_c *GrafanaService_UploadDashboards_Call) Run(run func(filter filters.Filter)) *GrafanaService_UploadDashboards_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *GrafanaService_UploadDashboards_Call) Return() *GrafanaService_UploadDashboards_Call { + _c.Call.Return() + return _c +} + +func (_c *GrafanaService_UploadDashboards_Call) RunAndReturn(run func(filters.Filter)) *GrafanaService_UploadDashboards_Call { + _c.Call.Return(run) + return _c +} + // UploadFolderPermissions provides a mock function with given fields: filter func (_m *GrafanaService) UploadFolderPermissions(filter filters.Filter) []string { ret := _m.Called(filter) @@ -936,6 +2481,34 @@ func (_m *GrafanaService) UploadFolderPermissions(filter filters.Filter) []strin return r0 } +// GrafanaService_UploadFolderPermissions_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UploadFolderPermissions' +type GrafanaService_UploadFolderPermissions_Call struct { + *mock.Call +} + +// UploadFolderPermissions is a helper method to define mock.On call +// - filter filters.Filter +func (_e *GrafanaService_Expecter) UploadFolderPermissions(filter interface{}) *GrafanaService_UploadFolderPermissions_Call { + return &GrafanaService_UploadFolderPermissions_Call{Call: _e.mock.On("UploadFolderPermissions", filter)} +} + +func (_c *GrafanaService_UploadFolderPermissions_Call) Run(run func(filter filters.Filter)) *GrafanaService_UploadFolderPermissions_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *GrafanaService_UploadFolderPermissions_Call) Return(_a0 []string) *GrafanaService_UploadFolderPermissions_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *GrafanaService_UploadFolderPermissions_Call) RunAndReturn(run func(filters.Filter) []string) *GrafanaService_UploadFolderPermissions_Call { + _c.Call.Return(run) + return _c +} + // UploadFolders provides a mock function with given fields: filter func (_m *GrafanaService) UploadFolders(filter filters.Filter) []string { ret := _m.Called(filter) @@ -952,6 +2525,34 @@ func (_m *GrafanaService) UploadFolders(filter filters.Filter) []string { return r0 } +// GrafanaService_UploadFolders_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UploadFolders' +type GrafanaService_UploadFolders_Call struct { + *mock.Call +} + +// UploadFolders is a helper method to define mock.On call +// - filter filters.Filter +func (_e *GrafanaService_Expecter) UploadFolders(filter interface{}) *GrafanaService_UploadFolders_Call { + return &GrafanaService_UploadFolders_Call{Call: _e.mock.On("UploadFolders", filter)} +} + +func (_c *GrafanaService_UploadFolders_Call) Run(run func(filter filters.Filter)) *GrafanaService_UploadFolders_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *GrafanaService_UploadFolders_Call) Return(_a0 []string) *GrafanaService_UploadFolders_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *GrafanaService_UploadFolders_Call) RunAndReturn(run func(filters.Filter) []string) *GrafanaService_UploadFolders_Call { + _c.Call.Return(run) + return _c +} + // UploadLibraryElements provides a mock function with given fields: filter func (_m *GrafanaService) UploadLibraryElements(filter filters.Filter) []string { ret := _m.Called(filter) @@ -968,6 +2569,34 @@ func (_m *GrafanaService) UploadLibraryElements(filter filters.Filter) []string return r0 } +// GrafanaService_UploadLibraryElements_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UploadLibraryElements' +type GrafanaService_UploadLibraryElements_Call struct { + *mock.Call +} + +// UploadLibraryElements is a helper method to define mock.On call +// - filter filters.Filter +func (_e *GrafanaService_Expecter) UploadLibraryElements(filter interface{}) *GrafanaService_UploadLibraryElements_Call { + return &GrafanaService_UploadLibraryElements_Call{Call: _e.mock.On("UploadLibraryElements", filter)} +} + +func (_c *GrafanaService_UploadLibraryElements_Call) Run(run func(filter filters.Filter)) *GrafanaService_UploadLibraryElements_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *GrafanaService_UploadLibraryElements_Call) Return(_a0 []string) *GrafanaService_UploadLibraryElements_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *GrafanaService_UploadLibraryElements_Call) RunAndReturn(run func(filters.Filter) []string) *GrafanaService_UploadLibraryElements_Call { + _c.Call.Return(run) + return _c +} + // UploadOrganizations provides a mock function with given fields: func (_m *GrafanaService) UploadOrganizations() []string { ret := _m.Called() @@ -984,6 +2613,33 @@ func (_m *GrafanaService) UploadOrganizations() []string { return r0 } +// GrafanaService_UploadOrganizations_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UploadOrganizations' +type GrafanaService_UploadOrganizations_Call struct { + *mock.Call +} + +// UploadOrganizations is a helper method to define mock.On call +func (_e *GrafanaService_Expecter) UploadOrganizations() *GrafanaService_UploadOrganizations_Call { + return &GrafanaService_UploadOrganizations_Call{Call: _e.mock.On("UploadOrganizations")} +} + +func (_c *GrafanaService_UploadOrganizations_Call) Run(run func()) *GrafanaService_UploadOrganizations_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *GrafanaService_UploadOrganizations_Call) Return(_a0 []string) *GrafanaService_UploadOrganizations_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *GrafanaService_UploadOrganizations_Call) RunAndReturn(run func() []string) *GrafanaService_UploadOrganizations_Call { + _c.Call.Return(run) + return _c +} + // UploadTeams provides a mock function with given fields: filter func (_m *GrafanaService) UploadTeams(filter filters.Filter) map[*models.TeamDTO][]*models.TeamMemberDTO { ret := _m.Called(filter) @@ -1000,6 +2656,34 @@ func (_m *GrafanaService) UploadTeams(filter filters.Filter) map[*models.TeamDTO return r0 } +// GrafanaService_UploadTeams_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UploadTeams' +type GrafanaService_UploadTeams_Call struct { + *mock.Call +} + +// UploadTeams is a helper method to define mock.On call +// - filter filters.Filter +func (_e *GrafanaService_Expecter) UploadTeams(filter interface{}) *GrafanaService_UploadTeams_Call { + return &GrafanaService_UploadTeams_Call{Call: _e.mock.On("UploadTeams", filter)} +} + +func (_c *GrafanaService_UploadTeams_Call) Run(run func(filter filters.Filter)) *GrafanaService_UploadTeams_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *GrafanaService_UploadTeams_Call) Return(_a0 map[*models.TeamDTO][]*models.TeamMemberDTO) *GrafanaService_UploadTeams_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *GrafanaService_UploadTeams_Call) RunAndReturn(run func(filters.Filter) map[*models.TeamDTO][]*models.TeamMemberDTO) *GrafanaService_UploadTeams_Call { + _c.Call.Return(run) + return _c +} + // UploadUsers provides a mock function with given fields: filter func (_m *GrafanaService) UploadUsers(filter filters.Filter) []models.UserProfileDTO { ret := _m.Called(filter) @@ -1016,6 +2700,34 @@ func (_m *GrafanaService) UploadUsers(filter filters.Filter) []models.UserProfil return r0 } +// GrafanaService_UploadUsers_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UploadUsers' +type GrafanaService_UploadUsers_Call struct { + *mock.Call +} + +// UploadUsers is a helper method to define mock.On call +// - filter filters.Filter +func (_e *GrafanaService_Expecter) UploadUsers(filter interface{}) *GrafanaService_UploadUsers_Call { + return &GrafanaService_UploadUsers_Call{Call: _e.mock.On("UploadUsers", filter)} +} + +func (_c *GrafanaService_UploadUsers_Call) Run(run func(filter filters.Filter)) *GrafanaService_UploadUsers_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *GrafanaService_UploadUsers_Call) Return(_a0 []models.UserProfileDTO) *GrafanaService_UploadUsers_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *GrafanaService_UploadUsers_Call) RunAndReturn(run func(filters.Filter) []models.UserProfileDTO) *GrafanaService_UploadUsers_Call { + _c.Call.Return(run) + return _c +} + // NewGrafanaService creates a new instance of GrafanaService. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewGrafanaService(t interface { diff --git a/internal/service/mocks/LibraryElementsApi.go b/internal/service/mocks/LibraryElementsApi.go index 278f6b83..339ee80b 100644 --- a/internal/service/mocks/LibraryElementsApi.go +++ b/internal/service/mocks/LibraryElementsApi.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.34.0. DO NOT EDIT. +// Code generated by mockery v2.36.0. DO NOT EDIT. package mocks @@ -6,7 +6,7 @@ import ( filters "github.com/esnet/gdg/internal/service/filters" mock "github.com/stretchr/testify/mock" - models "github.com/esnet/grafana-swagger-api-golang/goclient/models" + models "github.com/grafana/grafana-openapi-client-go/models" ) // LibraryElementsApi is an autogenerated mock type for the LibraryElementsApi type @@ -14,6 +14,14 @@ type LibraryElementsApi struct { mock.Mock } +type LibraryElementsApi_Expecter struct { + mock *mock.Mock +} + +func (_m *LibraryElementsApi) EXPECT() *LibraryElementsApi_Expecter { + return &LibraryElementsApi_Expecter{mock: &_m.Mock} +} + // DeleteAllLibraryElements provides a mock function with given fields: filter func (_m *LibraryElementsApi) DeleteAllLibraryElements(filter filters.Filter) []string { ret := _m.Called(filter) @@ -30,6 +38,34 @@ func (_m *LibraryElementsApi) DeleteAllLibraryElements(filter filters.Filter) [] return r0 } +// LibraryElementsApi_DeleteAllLibraryElements_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteAllLibraryElements' +type LibraryElementsApi_DeleteAllLibraryElements_Call struct { + *mock.Call +} + +// DeleteAllLibraryElements is a helper method to define mock.On call +// - filter filters.Filter +func (_e *LibraryElementsApi_Expecter) DeleteAllLibraryElements(filter interface{}) *LibraryElementsApi_DeleteAllLibraryElements_Call { + return &LibraryElementsApi_DeleteAllLibraryElements_Call{Call: _e.mock.On("DeleteAllLibraryElements", filter)} +} + +func (_c *LibraryElementsApi_DeleteAllLibraryElements_Call) Run(run func(filter filters.Filter)) *LibraryElementsApi_DeleteAllLibraryElements_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *LibraryElementsApi_DeleteAllLibraryElements_Call) Return(_a0 []string) *LibraryElementsApi_DeleteAllLibraryElements_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *LibraryElementsApi_DeleteAllLibraryElements_Call) RunAndReturn(run func(filters.Filter) []string) *LibraryElementsApi_DeleteAllLibraryElements_Call { + _c.Call.Return(run) + return _c +} + // DownloadLibraryElements provides a mock function with given fields: filter func (_m *LibraryElementsApi) DownloadLibraryElements(filter filters.Filter) []string { ret := _m.Called(filter) @@ -46,6 +82,34 @@ func (_m *LibraryElementsApi) DownloadLibraryElements(filter filters.Filter) []s return r0 } +// LibraryElementsApi_DownloadLibraryElements_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DownloadLibraryElements' +type LibraryElementsApi_DownloadLibraryElements_Call struct { + *mock.Call +} + +// DownloadLibraryElements is a helper method to define mock.On call +// - filter filters.Filter +func (_e *LibraryElementsApi_Expecter) DownloadLibraryElements(filter interface{}) *LibraryElementsApi_DownloadLibraryElements_Call { + return &LibraryElementsApi_DownloadLibraryElements_Call{Call: _e.mock.On("DownloadLibraryElements", filter)} +} + +func (_c *LibraryElementsApi_DownloadLibraryElements_Call) Run(run func(filter filters.Filter)) *LibraryElementsApi_DownloadLibraryElements_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *LibraryElementsApi_DownloadLibraryElements_Call) Return(_a0 []string) *LibraryElementsApi_DownloadLibraryElements_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *LibraryElementsApi_DownloadLibraryElements_Call) RunAndReturn(run func(filters.Filter) []string) *LibraryElementsApi_DownloadLibraryElements_Call { + _c.Call.Return(run) + return _c +} + // ListLibraryElements provides a mock function with given fields: filter func (_m *LibraryElementsApi) ListLibraryElements(filter filters.Filter) []*models.LibraryElementDTO { ret := _m.Called(filter) @@ -62,6 +126,34 @@ func (_m *LibraryElementsApi) ListLibraryElements(filter filters.Filter) []*mode return r0 } +// LibraryElementsApi_ListLibraryElements_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListLibraryElements' +type LibraryElementsApi_ListLibraryElements_Call struct { + *mock.Call +} + +// ListLibraryElements is a helper method to define mock.On call +// - filter filters.Filter +func (_e *LibraryElementsApi_Expecter) ListLibraryElements(filter interface{}) *LibraryElementsApi_ListLibraryElements_Call { + return &LibraryElementsApi_ListLibraryElements_Call{Call: _e.mock.On("ListLibraryElements", filter)} +} + +func (_c *LibraryElementsApi_ListLibraryElements_Call) Run(run func(filter filters.Filter)) *LibraryElementsApi_ListLibraryElements_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *LibraryElementsApi_ListLibraryElements_Call) Return(_a0 []*models.LibraryElementDTO) *LibraryElementsApi_ListLibraryElements_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *LibraryElementsApi_ListLibraryElements_Call) RunAndReturn(run func(filters.Filter) []*models.LibraryElementDTO) *LibraryElementsApi_ListLibraryElements_Call { + _c.Call.Return(run) + return _c +} + // ListLibraryElementsConnections provides a mock function with given fields: filter, connectionID func (_m *LibraryElementsApi) ListLibraryElementsConnections(filter filters.Filter, connectionID string) []*models.DashboardFullWithMeta { ret := _m.Called(filter, connectionID) @@ -78,6 +170,35 @@ func (_m *LibraryElementsApi) ListLibraryElementsConnections(filter filters.Filt return r0 } +// LibraryElementsApi_ListLibraryElementsConnections_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListLibraryElementsConnections' +type LibraryElementsApi_ListLibraryElementsConnections_Call struct { + *mock.Call +} + +// ListLibraryElementsConnections is a helper method to define mock.On call +// - filter filters.Filter +// - connectionID string +func (_e *LibraryElementsApi_Expecter) ListLibraryElementsConnections(filter interface{}, connectionID interface{}) *LibraryElementsApi_ListLibraryElementsConnections_Call { + return &LibraryElementsApi_ListLibraryElementsConnections_Call{Call: _e.mock.On("ListLibraryElementsConnections", filter, connectionID)} +} + +func (_c *LibraryElementsApi_ListLibraryElementsConnections_Call) Run(run func(filter filters.Filter, connectionID string)) *LibraryElementsApi_ListLibraryElementsConnections_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter), args[1].(string)) + }) + return _c +} + +func (_c *LibraryElementsApi_ListLibraryElementsConnections_Call) Return(_a0 []*models.DashboardFullWithMeta) *LibraryElementsApi_ListLibraryElementsConnections_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *LibraryElementsApi_ListLibraryElementsConnections_Call) RunAndReturn(run func(filters.Filter, string) []*models.DashboardFullWithMeta) *LibraryElementsApi_ListLibraryElementsConnections_Call { + _c.Call.Return(run) + return _c +} + // UploadLibraryElements provides a mock function with given fields: filter func (_m *LibraryElementsApi) UploadLibraryElements(filter filters.Filter) []string { ret := _m.Called(filter) @@ -94,6 +215,34 @@ func (_m *LibraryElementsApi) UploadLibraryElements(filter filters.Filter) []str return r0 } +// LibraryElementsApi_UploadLibraryElements_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UploadLibraryElements' +type LibraryElementsApi_UploadLibraryElements_Call struct { + *mock.Call +} + +// UploadLibraryElements is a helper method to define mock.On call +// - filter filters.Filter +func (_e *LibraryElementsApi_Expecter) UploadLibraryElements(filter interface{}) *LibraryElementsApi_UploadLibraryElements_Call { + return &LibraryElementsApi_UploadLibraryElements_Call{Call: _e.mock.On("UploadLibraryElements", filter)} +} + +func (_c *LibraryElementsApi_UploadLibraryElements_Call) Run(run func(filter filters.Filter)) *LibraryElementsApi_UploadLibraryElements_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *LibraryElementsApi_UploadLibraryElements_Call) Return(_a0 []string) *LibraryElementsApi_UploadLibraryElements_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *LibraryElementsApi_UploadLibraryElements_Call) RunAndReturn(run func(filters.Filter) []string) *LibraryElementsApi_UploadLibraryElements_Call { + _c.Call.Return(run) + return _c +} + // NewLibraryElementsApi creates a new instance of LibraryElementsApi. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewLibraryElementsApi(t interface { diff --git a/internal/service/mocks/OrganizationsApi.go b/internal/service/mocks/OrganizationsApi.go index c0b00ff7..aaff266a 100644 --- a/internal/service/mocks/OrganizationsApi.go +++ b/internal/service/mocks/OrganizationsApi.go @@ -1,9 +1,9 @@ -// Code generated by mockery v2.34.0. DO NOT EDIT. +// Code generated by mockery v2.36.0. DO NOT EDIT. package mocks import ( - models "github.com/esnet/grafana-swagger-api-golang/goclient/models" + models "github.com/grafana/grafana-openapi-client-go/models" mock "github.com/stretchr/testify/mock" ) @@ -12,6 +12,14 @@ type OrganizationsApi struct { mock.Mock } +type OrganizationsApi_Expecter struct { + mock *mock.Mock +} + +func (_m *OrganizationsApi) EXPECT() *OrganizationsApi_Expecter { + return &OrganizationsApi_Expecter{mock: &_m.Mock} +} + // AddUserToOrg provides a mock function with given fields: role, userId, orgId func (_m *OrganizationsApi) AddUserToOrg(role string, userId int64, orgId int64) error { ret := _m.Called(role, userId, orgId) @@ -26,6 +34,36 @@ func (_m *OrganizationsApi) AddUserToOrg(role string, userId int64, orgId int64) return r0 } +// OrganizationsApi_AddUserToOrg_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'AddUserToOrg' +type OrganizationsApi_AddUserToOrg_Call struct { + *mock.Call +} + +// AddUserToOrg is a helper method to define mock.On call +// - role string +// - userId int64 +// - orgId int64 +func (_e *OrganizationsApi_Expecter) AddUserToOrg(role interface{}, userId interface{}, orgId interface{}) *OrganizationsApi_AddUserToOrg_Call { + return &OrganizationsApi_AddUserToOrg_Call{Call: _e.mock.On("AddUserToOrg", role, userId, orgId)} +} + +func (_c *OrganizationsApi_AddUserToOrg_Call) Run(run func(role string, userId int64, orgId int64)) *OrganizationsApi_AddUserToOrg_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(string), args[1].(int64), args[2].(int64)) + }) + return _c +} + +func (_c *OrganizationsApi_AddUserToOrg_Call) Return(_a0 error) *OrganizationsApi_AddUserToOrg_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *OrganizationsApi_AddUserToOrg_Call) RunAndReturn(run func(string, int64, int64) error) *OrganizationsApi_AddUserToOrg_Call { + _c.Call.Return(run) + return _c +} + // DeleteUserFromOrg provides a mock function with given fields: userId, orgId func (_m *OrganizationsApi) DeleteUserFromOrg(userId int64, orgId int64) error { ret := _m.Called(userId, orgId) @@ -40,6 +78,35 @@ func (_m *OrganizationsApi) DeleteUserFromOrg(userId int64, orgId int64) error { return r0 } +// OrganizationsApi_DeleteUserFromOrg_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteUserFromOrg' +type OrganizationsApi_DeleteUserFromOrg_Call struct { + *mock.Call +} + +// DeleteUserFromOrg is a helper method to define mock.On call +// - userId int64 +// - orgId int64 +func (_e *OrganizationsApi_Expecter) DeleteUserFromOrg(userId interface{}, orgId interface{}) *OrganizationsApi_DeleteUserFromOrg_Call { + return &OrganizationsApi_DeleteUserFromOrg_Call{Call: _e.mock.On("DeleteUserFromOrg", userId, orgId)} +} + +func (_c *OrganizationsApi_DeleteUserFromOrg_Call) Run(run func(userId int64, orgId int64)) *OrganizationsApi_DeleteUserFromOrg_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(int64), args[1].(int64)) + }) + return _c +} + +func (_c *OrganizationsApi_DeleteUserFromOrg_Call) Return(_a0 error) *OrganizationsApi_DeleteUserFromOrg_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *OrganizationsApi_DeleteUserFromOrg_Call) RunAndReturn(run func(int64, int64) error) *OrganizationsApi_DeleteUserFromOrg_Call { + _c.Call.Return(run) + return _c +} + // DownloadOrganizations provides a mock function with given fields: func (_m *OrganizationsApi) DownloadOrganizations() []string { ret := _m.Called() @@ -56,6 +123,33 @@ func (_m *OrganizationsApi) DownloadOrganizations() []string { return r0 } +// OrganizationsApi_DownloadOrganizations_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DownloadOrganizations' +type OrganizationsApi_DownloadOrganizations_Call struct { + *mock.Call +} + +// DownloadOrganizations is a helper method to define mock.On call +func (_e *OrganizationsApi_Expecter) DownloadOrganizations() *OrganizationsApi_DownloadOrganizations_Call { + return &OrganizationsApi_DownloadOrganizations_Call{Call: _e.mock.On("DownloadOrganizations")} +} + +func (_c *OrganizationsApi_DownloadOrganizations_Call) Run(run func()) *OrganizationsApi_DownloadOrganizations_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *OrganizationsApi_DownloadOrganizations_Call) Return(_a0 []string) *OrganizationsApi_DownloadOrganizations_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *OrganizationsApi_DownloadOrganizations_Call) RunAndReturn(run func() []string) *OrganizationsApi_DownloadOrganizations_Call { + _c.Call.Return(run) + return _c +} + // GetTokenOrganization provides a mock function with given fields: func (_m *OrganizationsApi) GetTokenOrganization() *models.OrgDetailsDTO { ret := _m.Called() @@ -72,6 +166,33 @@ func (_m *OrganizationsApi) GetTokenOrganization() *models.OrgDetailsDTO { return r0 } +// OrganizationsApi_GetTokenOrganization_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetTokenOrganization' +type OrganizationsApi_GetTokenOrganization_Call struct { + *mock.Call +} + +// GetTokenOrganization is a helper method to define mock.On call +func (_e *OrganizationsApi_Expecter) GetTokenOrganization() *OrganizationsApi_GetTokenOrganization_Call { + return &OrganizationsApi_GetTokenOrganization_Call{Call: _e.mock.On("GetTokenOrganization")} +} + +func (_c *OrganizationsApi_GetTokenOrganization_Call) Run(run func()) *OrganizationsApi_GetTokenOrganization_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *OrganizationsApi_GetTokenOrganization_Call) Return(_a0 *models.OrgDetailsDTO) *OrganizationsApi_GetTokenOrganization_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *OrganizationsApi_GetTokenOrganization_Call) RunAndReturn(run func() *models.OrgDetailsDTO) *OrganizationsApi_GetTokenOrganization_Call { + _c.Call.Return(run) + return _c +} + // GetUserOrganization provides a mock function with given fields: func (_m *OrganizationsApi) GetUserOrganization() *models.OrgDetailsDTO { ret := _m.Called() @@ -88,11 +209,65 @@ func (_m *OrganizationsApi) GetUserOrganization() *models.OrgDetailsDTO { return r0 } +// OrganizationsApi_GetUserOrganization_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetUserOrganization' +type OrganizationsApi_GetUserOrganization_Call struct { + *mock.Call +} + +// GetUserOrganization is a helper method to define mock.On call +func (_e *OrganizationsApi_Expecter) GetUserOrganization() *OrganizationsApi_GetUserOrganization_Call { + return &OrganizationsApi_GetUserOrganization_Call{Call: _e.mock.On("GetUserOrganization")} +} + +func (_c *OrganizationsApi_GetUserOrganization_Call) Run(run func()) *OrganizationsApi_GetUserOrganization_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *OrganizationsApi_GetUserOrganization_Call) Return(_a0 *models.OrgDetailsDTO) *OrganizationsApi_GetUserOrganization_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *OrganizationsApi_GetUserOrganization_Call) RunAndReturn(run func() *models.OrgDetailsDTO) *OrganizationsApi_GetUserOrganization_Call { + _c.Call.Return(run) + return _c +} + // InitOrganizations provides a mock function with given fields: func (_m *OrganizationsApi) InitOrganizations() { _m.Called() } +// OrganizationsApi_InitOrganizations_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'InitOrganizations' +type OrganizationsApi_InitOrganizations_Call struct { + *mock.Call +} + +// InitOrganizations is a helper method to define mock.On call +func (_e *OrganizationsApi_Expecter) InitOrganizations() *OrganizationsApi_InitOrganizations_Call { + return &OrganizationsApi_InitOrganizations_Call{Call: _e.mock.On("InitOrganizations")} +} + +func (_c *OrganizationsApi_InitOrganizations_Call) Run(run func()) *OrganizationsApi_InitOrganizations_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *OrganizationsApi_InitOrganizations_Call) Return() *OrganizationsApi_InitOrganizations_Call { + _c.Call.Return() + return _c +} + +func (_c *OrganizationsApi_InitOrganizations_Call) RunAndReturn(run func()) *OrganizationsApi_InitOrganizations_Call { + _c.Call.Return(run) + return _c +} + // ListOrgUsers provides a mock function with given fields: orgId func (_m *OrganizationsApi) ListOrgUsers(orgId int64) []*models.OrgUserDTO { ret := _m.Called(orgId) @@ -109,6 +284,34 @@ func (_m *OrganizationsApi) ListOrgUsers(orgId int64) []*models.OrgUserDTO { return r0 } +// OrganizationsApi_ListOrgUsers_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListOrgUsers' +type OrganizationsApi_ListOrgUsers_Call struct { + *mock.Call +} + +// ListOrgUsers is a helper method to define mock.On call +// - orgId int64 +func (_e *OrganizationsApi_Expecter) ListOrgUsers(orgId interface{}) *OrganizationsApi_ListOrgUsers_Call { + return &OrganizationsApi_ListOrgUsers_Call{Call: _e.mock.On("ListOrgUsers", orgId)} +} + +func (_c *OrganizationsApi_ListOrgUsers_Call) Run(run func(orgId int64)) *OrganizationsApi_ListOrgUsers_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(int64)) + }) + return _c +} + +func (_c *OrganizationsApi_ListOrgUsers_Call) Return(_a0 []*models.OrgUserDTO) *OrganizationsApi_ListOrgUsers_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *OrganizationsApi_ListOrgUsers_Call) RunAndReturn(run func(int64) []*models.OrgUserDTO) *OrganizationsApi_ListOrgUsers_Call { + _c.Call.Return(run) + return _c +} + // ListOrganizations provides a mock function with given fields: func (_m *OrganizationsApi) ListOrganizations() []*models.OrgDTO { ret := _m.Called() @@ -125,6 +328,33 @@ func (_m *OrganizationsApi) ListOrganizations() []*models.OrgDTO { return r0 } +// OrganizationsApi_ListOrganizations_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListOrganizations' +type OrganizationsApi_ListOrganizations_Call struct { + *mock.Call +} + +// ListOrganizations is a helper method to define mock.On call +func (_e *OrganizationsApi_Expecter) ListOrganizations() *OrganizationsApi_ListOrganizations_Call { + return &OrganizationsApi_ListOrganizations_Call{Call: _e.mock.On("ListOrganizations")} +} + +func (_c *OrganizationsApi_ListOrganizations_Call) Run(run func()) *OrganizationsApi_ListOrganizations_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *OrganizationsApi_ListOrganizations_Call) Return(_a0 []*models.OrgDTO) *OrganizationsApi_ListOrganizations_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *OrganizationsApi_ListOrganizations_Call) RunAndReturn(run func() []*models.OrgDTO) *OrganizationsApi_ListOrganizations_Call { + _c.Call.Return(run) + return _c +} + // SetOrganization provides a mock function with given fields: id func (_m *OrganizationsApi) SetOrganization(id int64) error { ret := _m.Called(id) @@ -139,6 +369,34 @@ func (_m *OrganizationsApi) SetOrganization(id int64) error { return r0 } +// OrganizationsApi_SetOrganization_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'SetOrganization' +type OrganizationsApi_SetOrganization_Call struct { + *mock.Call +} + +// SetOrganization is a helper method to define mock.On call +// - id int64 +func (_e *OrganizationsApi_Expecter) SetOrganization(id interface{}) *OrganizationsApi_SetOrganization_Call { + return &OrganizationsApi_SetOrganization_Call{Call: _e.mock.On("SetOrganization", id)} +} + +func (_c *OrganizationsApi_SetOrganization_Call) Run(run func(id int64)) *OrganizationsApi_SetOrganization_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(int64)) + }) + return _c +} + +func (_c *OrganizationsApi_SetOrganization_Call) Return(_a0 error) *OrganizationsApi_SetOrganization_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *OrganizationsApi_SetOrganization_Call) RunAndReturn(run func(int64) error) *OrganizationsApi_SetOrganization_Call { + _c.Call.Return(run) + return _c +} + // SetUserOrganizations provides a mock function with given fields: id func (_m *OrganizationsApi) SetUserOrganizations(id int64) error { ret := _m.Called(id) @@ -153,6 +411,34 @@ func (_m *OrganizationsApi) SetUserOrganizations(id int64) error { return r0 } +// OrganizationsApi_SetUserOrganizations_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'SetUserOrganizations' +type OrganizationsApi_SetUserOrganizations_Call struct { + *mock.Call +} + +// SetUserOrganizations is a helper method to define mock.On call +// - id int64 +func (_e *OrganizationsApi_Expecter) SetUserOrganizations(id interface{}) *OrganizationsApi_SetUserOrganizations_Call { + return &OrganizationsApi_SetUserOrganizations_Call{Call: _e.mock.On("SetUserOrganizations", id)} +} + +func (_c *OrganizationsApi_SetUserOrganizations_Call) Run(run func(id int64)) *OrganizationsApi_SetUserOrganizations_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(int64)) + }) + return _c +} + +func (_c *OrganizationsApi_SetUserOrganizations_Call) Return(_a0 error) *OrganizationsApi_SetUserOrganizations_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *OrganizationsApi_SetUserOrganizations_Call) RunAndReturn(run func(int64) error) *OrganizationsApi_SetUserOrganizations_Call { + _c.Call.Return(run) + return _c +} + // UpdateUserInOrg provides a mock function with given fields: role, userId, orgId func (_m *OrganizationsApi) UpdateUserInOrg(role string, userId int64, orgId int64) error { ret := _m.Called(role, userId, orgId) @@ -167,6 +453,36 @@ func (_m *OrganizationsApi) UpdateUserInOrg(role string, userId int64, orgId int return r0 } +// OrganizationsApi_UpdateUserInOrg_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UpdateUserInOrg' +type OrganizationsApi_UpdateUserInOrg_Call struct { + *mock.Call +} + +// UpdateUserInOrg is a helper method to define mock.On call +// - role string +// - userId int64 +// - orgId int64 +func (_e *OrganizationsApi_Expecter) UpdateUserInOrg(role interface{}, userId interface{}, orgId interface{}) *OrganizationsApi_UpdateUserInOrg_Call { + return &OrganizationsApi_UpdateUserInOrg_Call{Call: _e.mock.On("UpdateUserInOrg", role, userId, orgId)} +} + +func (_c *OrganizationsApi_UpdateUserInOrg_Call) Run(run func(role string, userId int64, orgId int64)) *OrganizationsApi_UpdateUserInOrg_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(string), args[1].(int64), args[2].(int64)) + }) + return _c +} + +func (_c *OrganizationsApi_UpdateUserInOrg_Call) Return(_a0 error) *OrganizationsApi_UpdateUserInOrg_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *OrganizationsApi_UpdateUserInOrg_Call) RunAndReturn(run func(string, int64, int64) error) *OrganizationsApi_UpdateUserInOrg_Call { + _c.Call.Return(run) + return _c +} + // UploadOrganizations provides a mock function with given fields: func (_m *OrganizationsApi) UploadOrganizations() []string { ret := _m.Called() @@ -183,6 +499,33 @@ func (_m *OrganizationsApi) UploadOrganizations() []string { return r0 } +// OrganizationsApi_UploadOrganizations_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UploadOrganizations' +type OrganizationsApi_UploadOrganizations_Call struct { + *mock.Call +} + +// UploadOrganizations is a helper method to define mock.On call +func (_e *OrganizationsApi_Expecter) UploadOrganizations() *OrganizationsApi_UploadOrganizations_Call { + return &OrganizationsApi_UploadOrganizations_Call{Call: _e.mock.On("UploadOrganizations")} +} + +func (_c *OrganizationsApi_UploadOrganizations_Call) Run(run func()) *OrganizationsApi_UploadOrganizations_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *OrganizationsApi_UploadOrganizations_Call) Return(_a0 []string) *OrganizationsApi_UploadOrganizations_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *OrganizationsApi_UploadOrganizations_Call) RunAndReturn(run func() []string) *OrganizationsApi_UploadOrganizations_Call { + _c.Call.Return(run) + return _c +} + // NewOrganizationsApi creates a new instance of OrganizationsApi. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewOrganizationsApi(t interface { diff --git a/internal/service/mocks/ServiceAccountApi.go b/internal/service/mocks/ServiceAccountApi.go index f6c6b631..35184051 100644 --- a/internal/service/mocks/ServiceAccountApi.go +++ b/internal/service/mocks/ServiceAccountApi.go @@ -1,12 +1,12 @@ -// Code generated by mockery v2.34.0. DO NOT EDIT. +// Code generated by mockery v2.36.0. DO NOT EDIT. package mocks import ( - api "github.com/esnet/gdg/internal/api" + models "github.com/grafana/grafana-openapi-client-go/models" mock "github.com/stretchr/testify/mock" - models "github.com/esnet/grafana-swagger-api-golang/goclient/models" + types "github.com/esnet/gdg/internal/types" ) // ServiceAccountApi is an autogenerated mock type for the ServiceAccountApi type @@ -14,6 +14,14 @@ type ServiceAccountApi struct { mock.Mock } +type ServiceAccountApi_Expecter struct { + mock *mock.Mock +} + +func (_m *ServiceAccountApi) EXPECT() *ServiceAccountApi_Expecter { + return &ServiceAccountApi_Expecter{mock: &_m.Mock} +} + // CreateServiceAccount provides a mock function with given fields: name, role, expiration func (_m *ServiceAccountApi) CreateServiceAccount(name string, role string, expiration int64) (*models.ServiceAccountDTO, error) { ret := _m.Called(name, role, expiration) @@ -40,6 +48,36 @@ func (_m *ServiceAccountApi) CreateServiceAccount(name string, role string, expi return r0, r1 } +// ServiceAccountApi_CreateServiceAccount_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateServiceAccount' +type ServiceAccountApi_CreateServiceAccount_Call struct { + *mock.Call +} + +// CreateServiceAccount is a helper method to define mock.On call +// - name string +// - role string +// - expiration int64 +func (_e *ServiceAccountApi_Expecter) CreateServiceAccount(name interface{}, role interface{}, expiration interface{}) *ServiceAccountApi_CreateServiceAccount_Call { + return &ServiceAccountApi_CreateServiceAccount_Call{Call: _e.mock.On("CreateServiceAccount", name, role, expiration)} +} + +func (_c *ServiceAccountApi_CreateServiceAccount_Call) Run(run func(name string, role string, expiration int64)) *ServiceAccountApi_CreateServiceAccount_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(string), args[1].(string), args[2].(int64)) + }) + return _c +} + +func (_c *ServiceAccountApi_CreateServiceAccount_Call) Return(_a0 *models.ServiceAccountDTO, _a1 error) *ServiceAccountApi_CreateServiceAccount_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *ServiceAccountApi_CreateServiceAccount_Call) RunAndReturn(run func(string, string, int64) (*models.ServiceAccountDTO, error)) *ServiceAccountApi_CreateServiceAccount_Call { + _c.Call.Return(run) + return _c +} + // CreateServiceAccountToken provides a mock function with given fields: name, role, expiration func (_m *ServiceAccountApi) CreateServiceAccountToken(name int64, role string, expiration int64) (*models.NewAPIKeyResult, error) { ret := _m.Called(name, role, expiration) @@ -66,6 +104,36 @@ func (_m *ServiceAccountApi) CreateServiceAccountToken(name int64, role string, return r0, r1 } +// ServiceAccountApi_CreateServiceAccountToken_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateServiceAccountToken' +type ServiceAccountApi_CreateServiceAccountToken_Call struct { + *mock.Call +} + +// CreateServiceAccountToken is a helper method to define mock.On call +// - name int64 +// - role string +// - expiration int64 +func (_e *ServiceAccountApi_Expecter) CreateServiceAccountToken(name interface{}, role interface{}, expiration interface{}) *ServiceAccountApi_CreateServiceAccountToken_Call { + return &ServiceAccountApi_CreateServiceAccountToken_Call{Call: _e.mock.On("CreateServiceAccountToken", name, role, expiration)} +} + +func (_c *ServiceAccountApi_CreateServiceAccountToken_Call) Run(run func(name int64, role string, expiration int64)) *ServiceAccountApi_CreateServiceAccountToken_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(int64), args[1].(string), args[2].(int64)) + }) + return _c +} + +func (_c *ServiceAccountApi_CreateServiceAccountToken_Call) Return(_a0 *models.NewAPIKeyResult, _a1 error) *ServiceAccountApi_CreateServiceAccountToken_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *ServiceAccountApi_CreateServiceAccountToken_Call) RunAndReturn(run func(int64, string, int64) (*models.NewAPIKeyResult, error)) *ServiceAccountApi_CreateServiceAccountToken_Call { + _c.Call.Return(run) + return _c +} + // DeleteAllServiceAccounts provides a mock function with given fields: func (_m *ServiceAccountApi) DeleteAllServiceAccounts() []string { ret := _m.Called() @@ -82,6 +150,33 @@ func (_m *ServiceAccountApi) DeleteAllServiceAccounts() []string { return r0 } +// ServiceAccountApi_DeleteAllServiceAccounts_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteAllServiceAccounts' +type ServiceAccountApi_DeleteAllServiceAccounts_Call struct { + *mock.Call +} + +// DeleteAllServiceAccounts is a helper method to define mock.On call +func (_e *ServiceAccountApi_Expecter) DeleteAllServiceAccounts() *ServiceAccountApi_DeleteAllServiceAccounts_Call { + return &ServiceAccountApi_DeleteAllServiceAccounts_Call{Call: _e.mock.On("DeleteAllServiceAccounts")} +} + +func (_c *ServiceAccountApi_DeleteAllServiceAccounts_Call) Run(run func()) *ServiceAccountApi_DeleteAllServiceAccounts_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *ServiceAccountApi_DeleteAllServiceAccounts_Call) Return(_a0 []string) *ServiceAccountApi_DeleteAllServiceAccounts_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *ServiceAccountApi_DeleteAllServiceAccounts_Call) RunAndReturn(run func() []string) *ServiceAccountApi_DeleteAllServiceAccounts_Call { + _c.Call.Return(run) + return _c +} + // DeleteServiceAccountTokens provides a mock function with given fields: serviceId func (_m *ServiceAccountApi) DeleteServiceAccountTokens(serviceId int64) []string { ret := _m.Called(serviceId) @@ -98,22 +193,77 @@ func (_m *ServiceAccountApi) DeleteServiceAccountTokens(serviceId int64) []strin return r0 } +// ServiceAccountApi_DeleteServiceAccountTokens_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteServiceAccountTokens' +type ServiceAccountApi_DeleteServiceAccountTokens_Call struct { + *mock.Call +} + +// DeleteServiceAccountTokens is a helper method to define mock.On call +// - serviceId int64 +func (_e *ServiceAccountApi_Expecter) DeleteServiceAccountTokens(serviceId interface{}) *ServiceAccountApi_DeleteServiceAccountTokens_Call { + return &ServiceAccountApi_DeleteServiceAccountTokens_Call{Call: _e.mock.On("DeleteServiceAccountTokens", serviceId)} +} + +func (_c *ServiceAccountApi_DeleteServiceAccountTokens_Call) Run(run func(serviceId int64)) *ServiceAccountApi_DeleteServiceAccountTokens_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(int64)) + }) + return _c +} + +func (_c *ServiceAccountApi_DeleteServiceAccountTokens_Call) Return(_a0 []string) *ServiceAccountApi_DeleteServiceAccountTokens_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *ServiceAccountApi_DeleteServiceAccountTokens_Call) RunAndReturn(run func(int64) []string) *ServiceAccountApi_DeleteServiceAccountTokens_Call { + _c.Call.Return(run) + return _c +} + // ListServiceAccounts provides a mock function with given fields: -func (_m *ServiceAccountApi) ListServiceAccounts() []*api.ServiceAccountDTOWithTokens { +func (_m *ServiceAccountApi) ListServiceAccounts() []*types.ServiceAccountDTOWithTokens { ret := _m.Called() - var r0 []*api.ServiceAccountDTOWithTokens - if rf, ok := ret.Get(0).(func() []*api.ServiceAccountDTOWithTokens); ok { + var r0 []*types.ServiceAccountDTOWithTokens + if rf, ok := ret.Get(0).(func() []*types.ServiceAccountDTOWithTokens); ok { r0 = rf() } else { if ret.Get(0) != nil { - r0 = ret.Get(0).([]*api.ServiceAccountDTOWithTokens) + r0 = ret.Get(0).([]*types.ServiceAccountDTOWithTokens) } } return r0 } +// ServiceAccountApi_ListServiceAccounts_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListServiceAccounts' +type ServiceAccountApi_ListServiceAccounts_Call struct { + *mock.Call +} + +// ListServiceAccounts is a helper method to define mock.On call +func (_e *ServiceAccountApi_Expecter) ListServiceAccounts() *ServiceAccountApi_ListServiceAccounts_Call { + return &ServiceAccountApi_ListServiceAccounts_Call{Call: _e.mock.On("ListServiceAccounts")} +} + +func (_c *ServiceAccountApi_ListServiceAccounts_Call) Run(run func()) *ServiceAccountApi_ListServiceAccounts_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *ServiceAccountApi_ListServiceAccounts_Call) Return(_a0 []*types.ServiceAccountDTOWithTokens) *ServiceAccountApi_ListServiceAccounts_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *ServiceAccountApi_ListServiceAccounts_Call) RunAndReturn(run func() []*types.ServiceAccountDTOWithTokens) *ServiceAccountApi_ListServiceAccounts_Call { + _c.Call.Return(run) + return _c +} + // ListServiceAccountsTokens provides a mock function with given fields: id func (_m *ServiceAccountApi) ListServiceAccountsTokens(id int64) ([]*models.TokenDTO, error) { ret := _m.Called(id) @@ -140,6 +290,34 @@ func (_m *ServiceAccountApi) ListServiceAccountsTokens(id int64) ([]*models.Toke return r0, r1 } +// ServiceAccountApi_ListServiceAccountsTokens_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListServiceAccountsTokens' +type ServiceAccountApi_ListServiceAccountsTokens_Call struct { + *mock.Call +} + +// ListServiceAccountsTokens is a helper method to define mock.On call +// - id int64 +func (_e *ServiceAccountApi_Expecter) ListServiceAccountsTokens(id interface{}) *ServiceAccountApi_ListServiceAccountsTokens_Call { + return &ServiceAccountApi_ListServiceAccountsTokens_Call{Call: _e.mock.On("ListServiceAccountsTokens", id)} +} + +func (_c *ServiceAccountApi_ListServiceAccountsTokens_Call) Run(run func(id int64)) *ServiceAccountApi_ListServiceAccountsTokens_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(int64)) + }) + return _c +} + +func (_c *ServiceAccountApi_ListServiceAccountsTokens_Call) Return(_a0 []*models.TokenDTO, _a1 error) *ServiceAccountApi_ListServiceAccountsTokens_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *ServiceAccountApi_ListServiceAccountsTokens_Call) RunAndReturn(run func(int64) ([]*models.TokenDTO, error)) *ServiceAccountApi_ListServiceAccountsTokens_Call { + _c.Call.Return(run) + return _c +} + // NewServiceAccountApi creates a new instance of ServiceAccountApi. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewServiceAccountApi(t interface { diff --git a/internal/service/mocks/Storage.go b/internal/service/mocks/Storage.go index 40f0a5ab..18fac206 100644 --- a/internal/service/mocks/Storage.go +++ b/internal/service/mocks/Storage.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.34.0. DO NOT EDIT. +// Code generated by mockery v2.36.0. DO NOT EDIT. package mocks @@ -9,6 +9,14 @@ type Storage struct { mock.Mock } +type Storage_Expecter struct { + mock *mock.Mock +} + +func (_m *Storage) EXPECT() *Storage_Expecter { + return &Storage_Expecter{mock: &_m.Mock} +} + // FindAllFiles provides a mock function with given fields: folder, fullPath func (_m *Storage) FindAllFiles(folder string, fullPath bool) ([]string, error) { ret := _m.Called(folder, fullPath) @@ -35,6 +43,35 @@ func (_m *Storage) FindAllFiles(folder string, fullPath bool) ([]string, error) return r0, r1 } +// Storage_FindAllFiles_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'FindAllFiles' +type Storage_FindAllFiles_Call struct { + *mock.Call +} + +// FindAllFiles is a helper method to define mock.On call +// - folder string +// - fullPath bool +func (_e *Storage_Expecter) FindAllFiles(folder interface{}, fullPath interface{}) *Storage_FindAllFiles_Call { + return &Storage_FindAllFiles_Call{Call: _e.mock.On("FindAllFiles", folder, fullPath)} +} + +func (_c *Storage_FindAllFiles_Call) Run(run func(folder string, fullPath bool)) *Storage_FindAllFiles_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(string), args[1].(bool)) + }) + return _c +} + +func (_c *Storage_FindAllFiles_Call) Return(_a0 []string, _a1 error) *Storage_FindAllFiles_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Storage_FindAllFiles_Call) RunAndReturn(run func(string, bool) ([]string, error)) *Storage_FindAllFiles_Call { + _c.Call.Return(run) + return _c +} + // Name provides a mock function with given fields: func (_m *Storage) Name() string { ret := _m.Called() @@ -49,6 +86,33 @@ func (_m *Storage) Name() string { return r0 } +// Storage_Name_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Name' +type Storage_Name_Call struct { + *mock.Call +} + +// Name is a helper method to define mock.On call +func (_e *Storage_Expecter) Name() *Storage_Name_Call { + return &Storage_Name_Call{Call: _e.mock.On("Name")} +} + +func (_c *Storage_Name_Call) Run(run func()) *Storage_Name_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *Storage_Name_Call) Return(_a0 string) *Storage_Name_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *Storage_Name_Call) RunAndReturn(run func() string) *Storage_Name_Call { + _c.Call.Return(run) + return _c +} + // ReadFile provides a mock function with given fields: filename func (_m *Storage) ReadFile(filename string) ([]byte, error) { ret := _m.Called(filename) @@ -75,6 +139,34 @@ func (_m *Storage) ReadFile(filename string) ([]byte, error) { return r0, r1 } +// Storage_ReadFile_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ReadFile' +type Storage_ReadFile_Call struct { + *mock.Call +} + +// ReadFile is a helper method to define mock.On call +// - filename string +func (_e *Storage_Expecter) ReadFile(filename interface{}) *Storage_ReadFile_Call { + return &Storage_ReadFile_Call{Call: _e.mock.On("ReadFile", filename)} +} + +func (_c *Storage_ReadFile_Call) Run(run func(filename string)) *Storage_ReadFile_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(string)) + }) + return _c +} + +func (_c *Storage_ReadFile_Call) Return(_a0 []byte, _a1 error) *Storage_ReadFile_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Storage_ReadFile_Call) RunAndReturn(run func(string) ([]byte, error)) *Storage_ReadFile_Call { + _c.Call.Return(run) + return _c +} + // WriteFile provides a mock function with given fields: filename, data func (_m *Storage) WriteFile(filename string, data []byte) error { ret := _m.Called(filename, data) @@ -89,6 +181,35 @@ func (_m *Storage) WriteFile(filename string, data []byte) error { return r0 } +// Storage_WriteFile_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'WriteFile' +type Storage_WriteFile_Call struct { + *mock.Call +} + +// WriteFile is a helper method to define mock.On call +// - filename string +// - data []byte +func (_e *Storage_Expecter) WriteFile(filename interface{}, data interface{}) *Storage_WriteFile_Call { + return &Storage_WriteFile_Call{Call: _e.mock.On("WriteFile", filename, data)} +} + +func (_c *Storage_WriteFile_Call) Run(run func(filename string, data []byte)) *Storage_WriteFile_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(string), args[1].([]byte)) + }) + return _c +} + +func (_c *Storage_WriteFile_Call) Return(_a0 error) *Storage_WriteFile_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *Storage_WriteFile_Call) RunAndReturn(run func(string, []byte) error) *Storage_WriteFile_Call { + _c.Call.Return(run) + return _c +} + // NewStorage creates a new instance of Storage. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewStorage(t interface { diff --git a/internal/service/mocks/TeamsApi.go b/internal/service/mocks/TeamsApi.go index 59cc906b..c279ce0f 100644 --- a/internal/service/mocks/TeamsApi.go +++ b/internal/service/mocks/TeamsApi.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.34.0. DO NOT EDIT. +// Code generated by mockery v2.36.0. DO NOT EDIT. package mocks @@ -6,7 +6,7 @@ import ( filters "github.com/esnet/gdg/internal/service/filters" mock "github.com/stretchr/testify/mock" - models "github.com/esnet/grafana-swagger-api-golang/goclient/models" + models "github.com/grafana/grafana-openapi-client-go/models" ) // TeamsApi is an autogenerated mock type for the TeamsApi type @@ -14,6 +14,14 @@ type TeamsApi struct { mock.Mock } +type TeamsApi_Expecter struct { + mock *mock.Mock +} + +func (_m *TeamsApi) EXPECT() *TeamsApi_Expecter { + return &TeamsApi_Expecter{mock: &_m.Mock} +} + // DeleteTeam provides a mock function with given fields: filter func (_m *TeamsApi) DeleteTeam(filter filters.Filter) ([]*models.TeamDTO, error) { ret := _m.Called(filter) @@ -40,6 +48,34 @@ func (_m *TeamsApi) DeleteTeam(filter filters.Filter) ([]*models.TeamDTO, error) return r0, r1 } +// TeamsApi_DeleteTeam_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteTeam' +type TeamsApi_DeleteTeam_Call struct { + *mock.Call +} + +// DeleteTeam is a helper method to define mock.On call +// - filter filters.Filter +func (_e *TeamsApi_Expecter) DeleteTeam(filter interface{}) *TeamsApi_DeleteTeam_Call { + return &TeamsApi_DeleteTeam_Call{Call: _e.mock.On("DeleteTeam", filter)} +} + +func (_c *TeamsApi_DeleteTeam_Call) Run(run func(filter filters.Filter)) *TeamsApi_DeleteTeam_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *TeamsApi_DeleteTeam_Call) Return(_a0 []*models.TeamDTO, _a1 error) *TeamsApi_DeleteTeam_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *TeamsApi_DeleteTeam_Call) RunAndReturn(run func(filters.Filter) ([]*models.TeamDTO, error)) *TeamsApi_DeleteTeam_Call { + _c.Call.Return(run) + return _c +} + // DownloadTeams provides a mock function with given fields: filter func (_m *TeamsApi) DownloadTeams(filter filters.Filter) map[*models.TeamDTO][]*models.TeamMemberDTO { ret := _m.Called(filter) @@ -56,6 +92,34 @@ func (_m *TeamsApi) DownloadTeams(filter filters.Filter) map[*models.TeamDTO][]* return r0 } +// TeamsApi_DownloadTeams_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DownloadTeams' +type TeamsApi_DownloadTeams_Call struct { + *mock.Call +} + +// DownloadTeams is a helper method to define mock.On call +// - filter filters.Filter +func (_e *TeamsApi_Expecter) DownloadTeams(filter interface{}) *TeamsApi_DownloadTeams_Call { + return &TeamsApi_DownloadTeams_Call{Call: _e.mock.On("DownloadTeams", filter)} +} + +func (_c *TeamsApi_DownloadTeams_Call) Run(run func(filter filters.Filter)) *TeamsApi_DownloadTeams_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *TeamsApi_DownloadTeams_Call) Return(_a0 map[*models.TeamDTO][]*models.TeamMemberDTO) *TeamsApi_DownloadTeams_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *TeamsApi_DownloadTeams_Call) RunAndReturn(run func(filters.Filter) map[*models.TeamDTO][]*models.TeamMemberDTO) *TeamsApi_DownloadTeams_Call { + _c.Call.Return(run) + return _c +} + // ListTeams provides a mock function with given fields: filter func (_m *TeamsApi) ListTeams(filter filters.Filter) map[*models.TeamDTO][]*models.TeamMemberDTO { ret := _m.Called(filter) @@ -72,6 +136,34 @@ func (_m *TeamsApi) ListTeams(filter filters.Filter) map[*models.TeamDTO][]*mode return r0 } +// TeamsApi_ListTeams_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListTeams' +type TeamsApi_ListTeams_Call struct { + *mock.Call +} + +// ListTeams is a helper method to define mock.On call +// - filter filters.Filter +func (_e *TeamsApi_Expecter) ListTeams(filter interface{}) *TeamsApi_ListTeams_Call { + return &TeamsApi_ListTeams_Call{Call: _e.mock.On("ListTeams", filter)} +} + +func (_c *TeamsApi_ListTeams_Call) Run(run func(filter filters.Filter)) *TeamsApi_ListTeams_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *TeamsApi_ListTeams_Call) Return(_a0 map[*models.TeamDTO][]*models.TeamMemberDTO) *TeamsApi_ListTeams_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *TeamsApi_ListTeams_Call) RunAndReturn(run func(filters.Filter) map[*models.TeamDTO][]*models.TeamMemberDTO) *TeamsApi_ListTeams_Call { + _c.Call.Return(run) + return _c +} + // UploadTeams provides a mock function with given fields: filter func (_m *TeamsApi) UploadTeams(filter filters.Filter) map[*models.TeamDTO][]*models.TeamMemberDTO { ret := _m.Called(filter) @@ -88,6 +180,34 @@ func (_m *TeamsApi) UploadTeams(filter filters.Filter) map[*models.TeamDTO][]*mo return r0 } +// TeamsApi_UploadTeams_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UploadTeams' +type TeamsApi_UploadTeams_Call struct { + *mock.Call +} + +// UploadTeams is a helper method to define mock.On call +// - filter filters.Filter +func (_e *TeamsApi_Expecter) UploadTeams(filter interface{}) *TeamsApi_UploadTeams_Call { + return &TeamsApi_UploadTeams_Call{Call: _e.mock.On("UploadTeams", filter)} +} + +func (_c *TeamsApi_UploadTeams_Call) Run(run func(filter filters.Filter)) *TeamsApi_UploadTeams_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *TeamsApi_UploadTeams_Call) Return(_a0 map[*models.TeamDTO][]*models.TeamMemberDTO) *TeamsApi_UploadTeams_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *TeamsApi_UploadTeams_Call) RunAndReturn(run func(filters.Filter) map[*models.TeamDTO][]*models.TeamMemberDTO) *TeamsApi_UploadTeams_Call { + _c.Call.Return(run) + return _c +} + // NewTeamsApi creates a new instance of TeamsApi. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewTeamsApi(t interface { diff --git a/internal/service/mocks/TokenApi.go b/internal/service/mocks/TokenApi.go index 22d58574..852dfc88 100644 --- a/internal/service/mocks/TokenApi.go +++ b/internal/service/mocks/TokenApi.go @@ -1,9 +1,9 @@ -// Code generated by mockery v2.34.0. DO NOT EDIT. +// Code generated by mockery v2.36.0. DO NOT EDIT. package mocks import ( - models "github.com/esnet/grafana-swagger-api-golang/goclient/models" + models "github.com/grafana/grafana-openapi-client-go/models" mock "github.com/stretchr/testify/mock" ) @@ -12,6 +12,14 @@ type TokenApi struct { mock.Mock } +type TokenApi_Expecter struct { + mock *mock.Mock +} + +func (_m *TokenApi) EXPECT() *TokenApi_Expecter { + return &TokenApi_Expecter{mock: &_m.Mock} +} + // CreateAPIKey provides a mock function with given fields: name, role, expiration func (_m *TokenApi) CreateAPIKey(name string, role string, expiration int64) (*models.NewAPIKeyResult, error) { ret := _m.Called(name, role, expiration) @@ -38,6 +46,36 @@ func (_m *TokenApi) CreateAPIKey(name string, role string, expiration int64) (*m return r0, r1 } +// TokenApi_CreateAPIKey_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateAPIKey' +type TokenApi_CreateAPIKey_Call struct { + *mock.Call +} + +// CreateAPIKey is a helper method to define mock.On call +// - name string +// - role string +// - expiration int64 +func (_e *TokenApi_Expecter) CreateAPIKey(name interface{}, role interface{}, expiration interface{}) *TokenApi_CreateAPIKey_Call { + return &TokenApi_CreateAPIKey_Call{Call: _e.mock.On("CreateAPIKey", name, role, expiration)} +} + +func (_c *TokenApi_CreateAPIKey_Call) Run(run func(name string, role string, expiration int64)) *TokenApi_CreateAPIKey_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(string), args[1].(string), args[2].(int64)) + }) + return _c +} + +func (_c *TokenApi_CreateAPIKey_Call) Return(_a0 *models.NewAPIKeyResult, _a1 error) *TokenApi_CreateAPIKey_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *TokenApi_CreateAPIKey_Call) RunAndReturn(run func(string, string, int64) (*models.NewAPIKeyResult, error)) *TokenApi_CreateAPIKey_Call { + _c.Call.Return(run) + return _c +} + // DeleteAllTokens provides a mock function with given fields: func (_m *TokenApi) DeleteAllTokens() []string { ret := _m.Called() @@ -54,6 +92,33 @@ func (_m *TokenApi) DeleteAllTokens() []string { return r0 } +// TokenApi_DeleteAllTokens_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteAllTokens' +type TokenApi_DeleteAllTokens_Call struct { + *mock.Call +} + +// DeleteAllTokens is a helper method to define mock.On call +func (_e *TokenApi_Expecter) DeleteAllTokens() *TokenApi_DeleteAllTokens_Call { + return &TokenApi_DeleteAllTokens_Call{Call: _e.mock.On("DeleteAllTokens")} +} + +func (_c *TokenApi_DeleteAllTokens_Call) Run(run func()) *TokenApi_DeleteAllTokens_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *TokenApi_DeleteAllTokens_Call) Return(_a0 []string) *TokenApi_DeleteAllTokens_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *TokenApi_DeleteAllTokens_Call) RunAndReturn(run func() []string) *TokenApi_DeleteAllTokens_Call { + _c.Call.Return(run) + return _c +} + // ListAPIKeys provides a mock function with given fields: func (_m *TokenApi) ListAPIKeys() []*models.APIKeyDTO { ret := _m.Called() @@ -70,6 +135,33 @@ func (_m *TokenApi) ListAPIKeys() []*models.APIKeyDTO { return r0 } +// TokenApi_ListAPIKeys_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListAPIKeys' +type TokenApi_ListAPIKeys_Call struct { + *mock.Call +} + +// ListAPIKeys is a helper method to define mock.On call +func (_e *TokenApi_Expecter) ListAPIKeys() *TokenApi_ListAPIKeys_Call { + return &TokenApi_ListAPIKeys_Call{Call: _e.mock.On("ListAPIKeys")} +} + +func (_c *TokenApi_ListAPIKeys_Call) Run(run func()) *TokenApi_ListAPIKeys_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *TokenApi_ListAPIKeys_Call) Return(_a0 []*models.APIKeyDTO) *TokenApi_ListAPIKeys_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *TokenApi_ListAPIKeys_Call) RunAndReturn(run func() []*models.APIKeyDTO) *TokenApi_ListAPIKeys_Call { + _c.Call.Return(run) + return _c +} + // NewTokenApi creates a new instance of TokenApi. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewTokenApi(t interface { diff --git a/internal/service/mocks/UsersApi.go b/internal/service/mocks/UsersApi.go index 7f3d0103..9d90968a 100644 --- a/internal/service/mocks/UsersApi.go +++ b/internal/service/mocks/UsersApi.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.34.0. DO NOT EDIT. +// Code generated by mockery v2.36.0. DO NOT EDIT. package mocks @@ -6,7 +6,7 @@ import ( filters "github.com/esnet/gdg/internal/service/filters" mock "github.com/stretchr/testify/mock" - models "github.com/esnet/grafana-swagger-api-golang/goclient/models" + models "github.com/grafana/grafana-openapi-client-go/models" ) // UsersApi is an autogenerated mock type for the UsersApi type @@ -14,6 +14,14 @@ type UsersApi struct { mock.Mock } +type UsersApi_Expecter struct { + mock *mock.Mock +} + +func (_m *UsersApi) EXPECT() *UsersApi_Expecter { + return &UsersApi_Expecter{mock: &_m.Mock} +} + // DeleteAllUsers provides a mock function with given fields: filter func (_m *UsersApi) DeleteAllUsers(filter filters.Filter) []string { ret := _m.Called(filter) @@ -30,6 +38,34 @@ func (_m *UsersApi) DeleteAllUsers(filter filters.Filter) []string { return r0 } +// UsersApi_DeleteAllUsers_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteAllUsers' +type UsersApi_DeleteAllUsers_Call struct { + *mock.Call +} + +// DeleteAllUsers is a helper method to define mock.On call +// - filter filters.Filter +func (_e *UsersApi_Expecter) DeleteAllUsers(filter interface{}) *UsersApi_DeleteAllUsers_Call { + return &UsersApi_DeleteAllUsers_Call{Call: _e.mock.On("DeleteAllUsers", filter)} +} + +func (_c *UsersApi_DeleteAllUsers_Call) Run(run func(filter filters.Filter)) *UsersApi_DeleteAllUsers_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *UsersApi_DeleteAllUsers_Call) Return(_a0 []string) *UsersApi_DeleteAllUsers_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *UsersApi_DeleteAllUsers_Call) RunAndReturn(run func(filters.Filter) []string) *UsersApi_DeleteAllUsers_Call { + _c.Call.Return(run) + return _c +} + // DownloadUsers provides a mock function with given fields: filter func (_m *UsersApi) DownloadUsers(filter filters.Filter) []string { ret := _m.Called(filter) @@ -46,6 +82,34 @@ func (_m *UsersApi) DownloadUsers(filter filters.Filter) []string { return r0 } +// UsersApi_DownloadUsers_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DownloadUsers' +type UsersApi_DownloadUsers_Call struct { + *mock.Call +} + +// DownloadUsers is a helper method to define mock.On call +// - filter filters.Filter +func (_e *UsersApi_Expecter) DownloadUsers(filter interface{}) *UsersApi_DownloadUsers_Call { + return &UsersApi_DownloadUsers_Call{Call: _e.mock.On("DownloadUsers", filter)} +} + +func (_c *UsersApi_DownloadUsers_Call) Run(run func(filter filters.Filter)) *UsersApi_DownloadUsers_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *UsersApi_DownloadUsers_Call) Return(_a0 []string) *UsersApi_DownloadUsers_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *UsersApi_DownloadUsers_Call) RunAndReturn(run func(filters.Filter) []string) *UsersApi_DownloadUsers_Call { + _c.Call.Return(run) + return _c +} + // GetUserInfo provides a mock function with given fields: func (_m *UsersApi) GetUserInfo() (*models.UserProfileDTO, error) { ret := _m.Called() @@ -72,6 +136,33 @@ func (_m *UsersApi) GetUserInfo() (*models.UserProfileDTO, error) { return r0, r1 } +// UsersApi_GetUserInfo_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetUserInfo' +type UsersApi_GetUserInfo_Call struct { + *mock.Call +} + +// GetUserInfo is a helper method to define mock.On call +func (_e *UsersApi_Expecter) GetUserInfo() *UsersApi_GetUserInfo_Call { + return &UsersApi_GetUserInfo_Call{Call: _e.mock.On("GetUserInfo")} +} + +func (_c *UsersApi_GetUserInfo_Call) Run(run func()) *UsersApi_GetUserInfo_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *UsersApi_GetUserInfo_Call) Return(_a0 *models.UserProfileDTO, _a1 error) *UsersApi_GetUserInfo_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *UsersApi_GetUserInfo_Call) RunAndReturn(run func() (*models.UserProfileDTO, error)) *UsersApi_GetUserInfo_Call { + _c.Call.Return(run) + return _c +} + // ListUsers provides a mock function with given fields: filter func (_m *UsersApi) ListUsers(filter filters.Filter) []*models.UserSearchHitDTO { ret := _m.Called(filter) @@ -88,6 +179,34 @@ func (_m *UsersApi) ListUsers(filter filters.Filter) []*models.UserSearchHitDTO return r0 } +// UsersApi_ListUsers_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListUsers' +type UsersApi_ListUsers_Call struct { + *mock.Call +} + +// ListUsers is a helper method to define mock.On call +// - filter filters.Filter +func (_e *UsersApi_Expecter) ListUsers(filter interface{}) *UsersApi_ListUsers_Call { + return &UsersApi_ListUsers_Call{Call: _e.mock.On("ListUsers", filter)} +} + +func (_c *UsersApi_ListUsers_Call) Run(run func(filter filters.Filter)) *UsersApi_ListUsers_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *UsersApi_ListUsers_Call) Return(_a0 []*models.UserSearchHitDTO) *UsersApi_ListUsers_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *UsersApi_ListUsers_Call) RunAndReturn(run func(filters.Filter) []*models.UserSearchHitDTO) *UsersApi_ListUsers_Call { + _c.Call.Return(run) + return _c +} + // PromoteUser provides a mock function with given fields: userLogin func (_m *UsersApi) PromoteUser(userLogin string) (string, error) { ret := _m.Called(userLogin) @@ -112,6 +231,34 @@ func (_m *UsersApi) PromoteUser(userLogin string) (string, error) { return r0, r1 } +// UsersApi_PromoteUser_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'PromoteUser' +type UsersApi_PromoteUser_Call struct { + *mock.Call +} + +// PromoteUser is a helper method to define mock.On call +// - userLogin string +func (_e *UsersApi_Expecter) PromoteUser(userLogin interface{}) *UsersApi_PromoteUser_Call { + return &UsersApi_PromoteUser_Call{Call: _e.mock.On("PromoteUser", userLogin)} +} + +func (_c *UsersApi_PromoteUser_Call) Run(run func(userLogin string)) *UsersApi_PromoteUser_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(string)) + }) + return _c +} + +func (_c *UsersApi_PromoteUser_Call) Return(_a0 string, _a1 error) *UsersApi_PromoteUser_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *UsersApi_PromoteUser_Call) RunAndReturn(run func(string) (string, error)) *UsersApi_PromoteUser_Call { + _c.Call.Return(run) + return _c +} + // UploadUsers provides a mock function with given fields: filter func (_m *UsersApi) UploadUsers(filter filters.Filter) []models.UserProfileDTO { ret := _m.Called(filter) @@ -128,6 +275,34 @@ func (_m *UsersApi) UploadUsers(filter filters.Filter) []models.UserProfileDTO { return r0 } +// UsersApi_UploadUsers_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UploadUsers' +type UsersApi_UploadUsers_Call struct { + *mock.Call +} + +// UploadUsers is a helper method to define mock.On call +// - filter filters.Filter +func (_e *UsersApi_Expecter) UploadUsers(filter interface{}) *UsersApi_UploadUsers_Call { + return &UsersApi_UploadUsers_Call{Call: _e.mock.On("UploadUsers", filter)} +} + +func (_c *UsersApi_UploadUsers_Call) Run(run func(filter filters.Filter)) *UsersApi_UploadUsers_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(filters.Filter)) + }) + return _c +} + +func (_c *UsersApi_UploadUsers_Call) Return(_a0 []models.UserProfileDTO) *UsersApi_UploadUsers_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *UsersApi_UploadUsers_Call) RunAndReturn(run func(filters.Filter) []models.UserProfileDTO) *UsersApi_UploadUsers_Call { + _c.Call.Return(run) + return _c +} + // NewUsersApi creates a new instance of UsersApi. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewUsersApi(t interface { diff --git a/internal/service/organizations.go b/internal/service/organizations.go index 96d94a84..fa2c512b 100644 --- a/internal/service/organizations.go +++ b/internal/service/organizations.go @@ -5,16 +5,14 @@ import ( "errors" "fmt" "github.com/esnet/gdg/internal/config" - "github.com/esnet/grafana-swagger-api-golang/goclient/client/org" - "github.com/esnet/grafana-swagger-api-golang/goclient/client/signed_in_user" - "github.com/esnet/grafana-swagger-api-golang/goclient/models" - "github.com/go-openapi/runtime" "github.com/gosimple/slug" + "github.com/grafana/grafana-openapi-client-go/client" + "github.com/grafana/grafana-openapi-client-go/client/orgs" + "github.com/grafana/grafana-openapi-client-go/models" + "log" + "log/slog" "path/filepath" "strings" - - "github.com/esnet/grafana-swagger-api-golang/goclient/client/orgs" - log "github.com/sirupsen/logrus" ) // OrganizationsApi Contract definition @@ -61,9 +59,7 @@ func (s *DashNGoImpl) InitOrganizations() { // getOrganizations returns organization for a given id. func (s *DashNGoImpl) getOrganization(id int64) (*models.OrgDetailsDTO, error) { - params := orgs.NewGetOrgByIDParams() - params.OrgID = id - data, err := s.client.Orgs.GetOrgByID(params, s.getAuth()) + data, err := s.GetClient().Orgs.GetOrgByID(id) if err != nil { return nil, err } @@ -76,7 +72,7 @@ func (s *DashNGoImpl) getOrganization(id int64) (*models.OrgDetailsDTO, error) { func (s *DashNGoImpl) SetOrganization(id int64) error { //Removes Org filter if id <= 1 { - log.Warnf("organization is not a valid value, resetting to default value of 1.") + slog.Warn("organization is not a valid value, resetting to default value of 1.") s.grafanaConf.OrganizationId = 1 } @@ -100,11 +96,11 @@ func (s *DashNGoImpl) SetOrganization(id int64) error { // ListOrganizations List all dashboards func (s *DashNGoImpl) ListOrganizations() []*models.OrgDTO { if !s.grafanaConf.IsAdminEnabled() { - log.Errorf("No valid Grafana Admin configured, cannot retrieve Organizations List") + slog.Error("No valid Grafana Admin configured, cannot retrieve Organizations List") return nil } - orgList, err := s.client.Orgs.SearchOrgs(orgs.NewSearchOrgsParams(), s.getGrafanaAdminAuth()) + orgList, err := s.GetAdminClient().Orgs.SearchOrgs(orgs.NewSearchOrgsParams()) if err != nil { var swaggerErr *orgs.SearchOrgsForbidden msg := "Cannot retrieve Orgs, you need additional permissions" @@ -112,9 +108,9 @@ func (s *DashNGoImpl) ListOrganizations() []*models.OrgDTO { case errors.As(err, &swaggerErr): var castError *orgs.SearchOrgsForbidden errors.As(err, &castError) - log.WithField("message", *castError.GetPayload().Message).Fatal(msg) + log.Fatalf("%s, message:%s", msg, *castError.GetPayload().Message) default: - log.WithError(err).Fatal(msg) + log.Fatalf("%s, err: %v", msg, err) } } return orgList.GetPayload() @@ -123,7 +119,7 @@ func (s *DashNGoImpl) ListOrganizations() []*models.OrgDTO { // DownloadOrganizations Download organizations func (s *DashNGoImpl) DownloadOrganizations() []string { if !s.grafanaConf.IsAdminEnabled() { - log.Errorf("No valid Grafana Admin configured, cannot retrieve Organizations") + slog.Error("No valid Grafana Admin configured, cannot retrieve Organizations") return nil } var ( @@ -135,12 +131,12 @@ func (s *DashNGoImpl) DownloadOrganizations() []string { orgsListing := s.ListOrganizations() for _, organisation := range orgsListing { if dsPacked, err = json.MarshalIndent(organisation, "", " "); err != nil { - log.Errorf("%s for %s\n", err, organisation.Name) + slog.Error("Unable to serialize organization object", "err", err, "organization", organisation.Name) continue } dsPath := buildResourcePath(slug.Make(organisation.Name), config.OrganizationResource) if err = s.storage.WriteFile(dsPath, dsPacked); err != nil { - log.Errorf("%s for %s\n", err.Error(), slug.Make(organisation.Name)) + slog.Error("Unable to write file", "err", err.Error(), "organization", slug.Make(organisation.Name)) } else { dataFiles = append(dataFiles, dsPath) } @@ -152,7 +148,7 @@ func (s *DashNGoImpl) DownloadOrganizations() []string { // UploadOrganizations Upload organizations to Grafana func (s *DashNGoImpl) UploadOrganizations() []string { if !s.grafanaConf.IsAdminEnabled() { - log.Errorf("No valid Grafana Admin configured, cannot upload Organizations") + slog.Error("No valid Grafana Admin configured, cannot upload Organizations") return nil } var ( @@ -161,7 +157,7 @@ func (s *DashNGoImpl) UploadOrganizations() []string { ) filesInDir, err := s.storage.FindAllFiles(config.Config().GetDefaultGrafanaConfig().GetPath(config.OrganizationResource), false) if err != nil { - log.WithError(err).Fatal("Failed to read folders imports") + log.Fatalf("Failed to read folders imports, err: %v", err) } orgListing := s.ListOrganizations() orgMap := map[string]bool{} @@ -173,25 +169,23 @@ func (s *DashNGoImpl) UploadOrganizations() []string { fileLocation := filepath.Join(config.Config().GetDefaultGrafanaConfig().GetPath(config.OrganizationResource), file) if strings.HasSuffix(file, ".json") { if rawFolder, err = s.storage.ReadFile(fileLocation); err != nil { - log.WithError(err).Errorf("failed to read file %s", fileLocation) + slog.Error("failed to read file", "filename", fileLocation, "err", err) continue } } var newOrg models.CreateOrgCommand if err = json.Unmarshal(rawFolder, &newOrg); err != nil { - log.WithError(err).Warn("failed to unmarshall folder") + slog.Warn("failed to unmarshall folder", "err", err) continue } if _, ok := orgMap[newOrg.Name]; ok { - log.Infof("Organization %s already exists, skipping", newOrg.Name) + slog.Info("Organization already exists, skipping", "organization", newOrg.Name) continue } - params := orgs.NewCreateOrgParams() - params.Body = &newOrg - _, err = s.client.Orgs.CreateOrg(params, s.getBasicAuth()) + _, err = s.GetBasicAuthClient().Orgs.CreateOrg(&newOrg) if err != nil { - log.Errorf("failed to create folder %s", newOrg.Name) + slog.Error("failed to create folder", "organization", newOrg.Name) continue } result = append(result, newOrg.Name) @@ -203,22 +197,22 @@ func (s *DashNGoImpl) UploadOrganizations() []string { // SwitchOrganization switch organization context func (s *DashNGoImpl) SwitchOrganization(id int64) error { if !s.grafanaConf.IsBasicAuth() { - log.Warnf("Basic auth required for Org switching. Ignoring Org setting and continuing") + slog.Warn("Basic auth required for Org switching. Ignoring Org setting and continuing") return nil } valid := false if id > 1 { var orgsPayload []*models.OrgDTO - orgList, err := s.client.Orgs.SearchOrgs(orgs.NewSearchOrgsParams(), s.getBasicAuth()) + orgList, err := s.GetBasicAuthClient().Orgs.SearchOrgs(orgs.NewSearchOrgsParams()) if err != nil { - log.Warn("Error fetch organizations requires (SuperAdmin Basic Auth), assuming valid ID was requested. Cannot validate OrgId") + slog.Warn("Error fetch organizations requires (SuperAdmin Basic SecureData), assuming valid ID was requested. Cannot validate OrgId") valid = true orgsPayload = make([]*models.OrgDTO, 0) } else { orgsPayload = orgList.GetPayload() } for _, orgEntry := range orgsPayload { - log.Debugf("%d %s\n", orgEntry.ID, orgEntry.Name) + slog.Debug("", "orgID", orgEntry.ID, "OrgName", orgEntry.Name) if orgEntry.ID == s.grafanaConf.GetOrganizationId() { valid = true break @@ -237,11 +231,9 @@ func (s *DashNGoImpl) SwitchOrganization(id int64) error { log.Fatalf("The Specified OrgId does not match any existing organization. Please check your configuration and try again.") } - params := signed_in_user.NewUserSetUsingOrgParams() - params.OrgID = id - status, err := s.client.SignedInUser.UserSetUsingOrg(params, s.getBasicAuth()) + status, err := s.GetBasicAuthClient().SignedInUser.UserSetUsingOrg(id) if err != nil { - log.WithError(err).Fatalf("%s for %v\n", err, status) + log.Fatalf("%s for %v\n", err, status) return err } @@ -250,47 +242,43 @@ func (s *DashNGoImpl) SwitchOrganization(id int64) error { // GetUserOrganization returns the organizations the user is a member of. func (s *DashNGoImpl) GetUserOrganization() *models.OrgDetailsDTO { - return s.getAssociatedActiveOrg(s.getBasicAuth()) + return s.getAssociatedActiveOrg(s.GetBasicAuthClient()) } // GetTokenOrganization returns the organizations associated with the given token. (This property is immutable) func (s *DashNGoImpl) GetTokenOrganization() *models.OrgDetailsDTO { - return s.getAssociatedActiveOrg(s.getAuth()) + return s.getAssociatedActiveOrg(s.GetClient()) } // getAssociatedActiveOrg returns the Org associated with the given authentication mechanism. -func (s *DashNGoImpl) getAssociatedActiveOrg(auth runtime.ClientAuthInfoWriter) *models.OrgDetailsDTO { - p := org.NewGetCurrentOrgParams() - payload, err := s.client.Org.GetCurrentOrg(p, auth) +func (s *DashNGoImpl) getAssociatedActiveOrg(apiClient *client.GrafanaHTTPAPI) *models.OrgDetailsDTO { + payload, err := apiClient.Org.GetCurrentOrg() if err != nil { - log.WithError(err).Fatal("Unable to retrieve current organization") + log.Fatalf("Unable to retrieve current organization, err: %v", err) } return payload.GetPayload() } func (s *DashNGoImpl) SetUserOrganizations(id int64) error { - p := signed_in_user.NewUserSetUsingOrgParams() - p.OrgID = id - payload, err := s.client.SignedInUser.UserSetUsingOrg(p, s.getBasicAuth()) + payload, err := s.GetBasicAuthClient().SignedInUser.UserSetUsingOrg(id) if err == nil { - log.Debugf(payload.GetPayload().Message) + slog.Debug(payload.GetPayload().Message) } return err } func (s *DashNGoImpl) UpdateCurrentOrganization(name string) error { - p := org.NewUpdateCurrentOrgParams() - p.Body = &models.UpdateOrgForm{Name: name} - _, err := s.client.Org.UpdateCurrentOrg(p, s.getAuth()) + p := &models.UpdateOrgForm{Name: name} + _, err := s.GetClient().Org.UpdateCurrentOrg(p) return err } func (s *DashNGoImpl) ListOrgUsers(orgId int64) []*models.OrgUserDTO { p := orgs.NewGetOrgUsersParams() p.OrgID = orgId - resp, err := s.client.Orgs.GetOrgUsers(p, s.getGrafanaAdminAuth()) + resp, err := s.GetAdminClient().Orgs.GetOrgUsers(orgId) if err != nil { - log.WithError(err).Fatal("failed to get org users") + log.Fatalf("failed to get org users, err: %v", err) } return resp.GetPayload() } @@ -300,13 +288,11 @@ func (s *DashNGoImpl) AddUserToOrg(role string, userId, orgId int64) error { if err != nil { return fmt.Errorf("failed to retrieve user with Id: %d", userId) } - p := orgs.NewAddOrgUserParams() - p.OrgID = orgId - p.Body = &models.AddOrgUserCommand{ + request := &models.AddOrgUserCommand{ LoginOrEmail: userInfo.Login, Role: role, } - _, err = s.client.Orgs.AddOrgUser(p, s.getGrafanaAdminAuth()) + _, err = s.GetAdminClient().Orgs.AddOrgUser(orgId, request) return err } @@ -314,7 +300,7 @@ func (s *DashNGoImpl) DeleteUserFromOrg(userId, orgId int64) error { p := orgs.NewRemoveOrgUserParams() p.OrgID = orgId p.UserID = userId - _, err := s.client.Orgs.RemoveOrgUser(p, s.getGrafanaAdminAuth()) + _, err := s.GetAdminClient().Orgs.RemoveOrgUser(userId, orgId) return err } @@ -325,6 +311,6 @@ func (s *DashNGoImpl) UpdateUserInOrg(role string, userId, orgId int64) error { p.Body = &models.UpdateOrgUserCommand{ Role: role, } - _, err := s.client.Orgs.UpdateOrgUser(p, s.getGrafanaAdminAuth()) + _, err := s.GetAdminClient().Orgs.UpdateOrgUser(p) return err } diff --git a/internal/service/server.go b/internal/service/server.go index 85ac5c52..13048de2 100644 --- a/internal/service/server.go +++ b/internal/service/server.go @@ -1,14 +1,14 @@ package service import ( - log "github.com/sirupsen/logrus" + "log" ) // GetServerInfo returns basic Grafana Server info func (s *DashNGoImpl) GetServerInfo() map[string]interface{} { t, err := s.extended.Health() if err != nil { - log.Panic("Unable to get server health info") + log.Fatalf("Unable to get server health info, err: %v", err) } result := make(map[string]interface{}) result["Database"] = t.Database diff --git a/internal/service/serviceaccounts.go b/internal/service/serviceaccounts.go index 69e1408c..acaf30c5 100644 --- a/internal/service/serviceaccounts.go +++ b/internal/service/serviceaccounts.go @@ -2,17 +2,18 @@ package service import ( "fmt" + "github.com/esnet/gdg/internal/types" + "log/slog" - "github.com/esnet/gdg/internal/api" "github.com/esnet/gdg/internal/tools" - "github.com/esnet/grafana-swagger-api-golang/goclient/client/service_accounts" - "github.com/esnet/grafana-swagger-api-golang/goclient/models" + "github.com/grafana/grafana-openapi-client-go/client/service_accounts" + "github.com/grafana/grafana-openapi-client-go/models" "github.com/samber/lo" - log "github.com/sirupsen/logrus" + "log" ) type ServiceAccountApi interface { - ListServiceAccounts() []*api.ServiceAccountDTOWithTokens + ListServiceAccounts() []*types.ServiceAccountDTOWithTokens ListServiceAccountsTokens(id int64) ([]*models.TokenDTO, error) DeleteAllServiceAccounts() []string DeleteServiceAccountTokens(serviceId int64) []string @@ -26,11 +27,10 @@ func (s *DashNGoImpl) CreateServiceAccount(name, role string, expiration int64) Name: name, Role: role, } - data, err := s.client.ServiceAccounts.CreateServiceAccount(p, s.getAuth()) + data, err := s.GetClient().ServiceAccounts.CreateServiceAccount(p) if err != nil { - log.WithField("serivceName", name). - WithField("role", role). - Fatal("unable to create a service request") + log.Fatalf("unable to create a service request, serviceName: %s, role: %s", name, role) + } return data.GetPayload(), nil @@ -43,28 +43,27 @@ func (s *DashNGoImpl) CreateServiceAccountToken(serviceAccountId int64, name str SecondsToLive: expiration, } p.ServiceAccountID = serviceAccountId - token, err := s.client.ServiceAccounts.CreateToken(p, s.getAuth()) + token, err := s.GetClient().ServiceAccounts.CreateToken(p) if err != nil { - log.Error(err.Error()) - log.Fatalf("unable to create token '%s' for service account ID: %d", name, serviceAccountId) + log.Fatalf("unable to create token '%s' for service account ID: %d, err: %v", name, serviceAccountId, err) } return token.GetPayload(), nil } -func (s *DashNGoImpl) ListServiceAccounts() []*api.ServiceAccountDTOWithTokens { +func (s *DashNGoImpl) ListServiceAccounts() []*types.ServiceAccountDTOWithTokens { p := service_accounts.NewSearchOrgServiceAccountsWithPagingParams() p.Disabled = tools.PtrOf(false) p.Perpage = tools.PtrOf(int64(5000)) - resp, err := s.client.ServiceAccounts.SearchOrgServiceAccountsWithPaging(p, s.getAuth()) + resp, err := s.GetClient().ServiceAccounts.SearchOrgServiceAccountsWithPaging(p) if err != nil { log.Fatal("unable to retrieve service accounts") } data := resp.GetPayload() - result := lo.Map(data.ServiceAccounts, func(entity *models.ServiceAccountDTO, _ int) *api.ServiceAccountDTOWithTokens { - t := api.ServiceAccountDTOWithTokens{ + result := lo.Map(data.ServiceAccounts, func(entity *models.ServiceAccountDTO, _ int) *types.ServiceAccountDTOWithTokens { + t := types.ServiceAccountDTOWithTokens{ ServiceAccount: entity, } return &t @@ -73,7 +72,7 @@ func (s *DashNGoImpl) ListServiceAccounts() []*api.ServiceAccountDTOWithTokens { if item.ServiceAccount.Tokens > 0 { item.Tokens, err = s.ListServiceAccountsTokens(item.ServiceAccount.ID) if err != nil { - log.Warnf("failed to retrieve tokens for service account %d", item.ServiceAccount.ID) + slog.Warn("failed to retrieve tokens for service account", "serviceAccountId", item.ServiceAccount.ID) } } @@ -83,28 +82,24 @@ func (s *DashNGoImpl) ListServiceAccounts() []*api.ServiceAccountDTOWithTokens { } func (s *DashNGoImpl) ListServiceAccountsTokens(id int64) ([]*models.TokenDTO, error) { - - p := service_accounts.NewListTokensParams() - p.ServiceAccountID = id - response, err := s.extended.ListTokens(p) + response, err := s.GetClient().ServiceAccounts.ListTokens(id) if err != nil { return nil, fmt.Errorf("failed to retrieve service account for %d response", id) } - return response, nil + return response.GetPayload(), nil } func (s *DashNGoImpl) DeleteAllServiceAccounts() []string { var accountNames []string accounts := s.ListServiceAccounts() for _, account := range accounts { - p := service_accounts.NewDeleteServiceAccountParams() - p.ServiceAccountID = account.ServiceAccount.ID - _, err := s.client.ServiceAccounts.DeleteServiceAccount(p, s.getAuth()) + accountId := account.ServiceAccount.ID + _, err := s.GetClient().ServiceAccounts.DeleteServiceAccount(accountId) if err != nil { - log.Warnf("Failed to delete service account %d", p.ServiceAccountID) + slog.Warn("Failed to delete service account", "ServiceAccountId", accountId) } else { - accountNames = append(accountNames, fmt.Sprintf("service account %d has been deleted", p.ServiceAccountID)) + accountNames = append(accountNames, fmt.Sprintf("service account %d has been deleted", accountId)) } } @@ -119,12 +114,9 @@ func (s *DashNGoImpl) DeleteServiceAccountTokens(serviceId int64) []string { } for _, token := range tokens { - p := service_accounts.NewDeleteTokenParams() - p.TokenID = token.ID - p.ServiceAccountID = serviceId - _, err := s.client.ServiceAccounts.DeleteToken(p, s.getAuth()) + _, err := s.GetClient().ServiceAccounts.DeleteToken(token.ID, serviceId) if err != nil { - log.Errorf("unable to delete token ID: %d", token.ID) + slog.Error("unable to delete token", "tokenID", token.ID) continue } result = append(result, token.Name) diff --git a/internal/service/storage_cloud.go b/internal/service/storage_cloud.go index 52dab9ea..a8a09f4b 100644 --- a/internal/service/storage_cloud.go +++ b/internal/service/storage_cloud.go @@ -8,9 +8,10 @@ import ( "github.com/aws/aws-sdk-go/aws/credentials" "github.com/aws/aws-sdk-go/aws/session" "github.com/aws/aws-sdk-go/service/s3" - log "github.com/sirupsen/logrus" "gocloud.dev/blob" "gocloud.dev/blob/s3blob" + "log" + "log/slog" "os" "path" "path/filepath" @@ -105,7 +106,7 @@ func (s *CloudStorage) FindAllFiles(folder string, fullPath bool) ([]string, err if strings.Contains(obj.Key, folderName) { fileList = append(fileList, obj.Key) } else { - log.Debugf("%s does not match folder path", obj.Key) + slog.Debug("key does not match folder path", "key", obj.Key) } } else { fileList = append(fileList, filepath.Base(obj.Key)) @@ -161,9 +162,9 @@ func NewCloudStorage(c context.Context) (Storage, error) { //attempt to create bucket _, err := client.CreateBucket(&m) if err != nil { - log.Warnf("%s bucket already exists or cannot be created", *m.Bucket) + slog.Warn("bucket already exists or cannot be created", "bucket", *m.Bucket) } else { - log.Infof("bucket %s has been created", *m.Bucket) + slog.Info("bucket has been created", "bucket", *m.Bucket) } }) @@ -176,7 +177,7 @@ func NewCloudStorage(c context.Context) (Storage, error) { } if err != nil { - log.WithError(err).WithField("Msg", errorMsg).Fatal("unable to connect to cloud provider") + log.Fatalf("unable to connect to cloud provider, err: %v, message: %s", err, errorMsg) } entity := &CloudStorage{ diff --git a/internal/service/storage_local.go b/internal/service/storage_local.go index 43617b17..49f70453 100644 --- a/internal/service/storage_local.go +++ b/internal/service/storage_local.go @@ -3,13 +3,12 @@ package service import ( "context" "errors" + "log/slog" "os" "path/filepath" - log "github.com/sirupsen/logrus" "gocloud.dev/blob" "gocloud.dev/blob/fileblob" - _ "gocloud.dev/blob/fileblob" ) // LocalStorage default storage engine @@ -52,7 +51,7 @@ func (s *LocalStorage) WriteFile(filename string, data []byte) error { if err == nil { //Remove attribute file being generated by local storage attrFile := filename + ".attrs" - log.Debugf("Removing file %s", attrFile) + slog.Debug("Removing file", "file", attrFile) defer os.Remove(attrFile) } diff --git a/internal/service/teams.go b/internal/service/teams.go index ced5b7a8..6dc5c175 100644 --- a/internal/service/teams.go +++ b/internal/service/teams.go @@ -5,16 +5,17 @@ import ( "fmt" "github.com/esnet/gdg/internal/config" "github.com/esnet/gdg/internal/service/filters" + "log/slog" - "github.com/esnet/grafana-swagger-api-golang/goclient/client/teams" - "github.com/esnet/grafana-swagger-api-golang/goclient/models" + "github.com/grafana/grafana-openapi-client-go/client/teams" + "github.com/grafana/grafana-openapi-client-go/models" "golang.org/x/exp/maps" "strings" "encoding/json" "path/filepath" - log "github.com/sirupsen/logrus" + "log" ) type TeamsApi interface { @@ -59,34 +60,32 @@ func NewTeamFilter(entries ...string) filters.Filter { func (s *DashNGoImpl) DownloadTeams(filter filters.Filter) map[*models.TeamDTO][]*models.TeamMemberDTO { teamListing := maps.Keys(s.ListTeams(filter)) importedTeams := make(map[*models.TeamDTO][]*models.TeamMemberDTO) - teamPath := buildResourceFolder("", config.TeamResource) + teamPath := BuildResourceFolder("", config.TeamResource) for ndx, team := range teamListing { //Teams teamFileName := filepath.Join(teamPath, GetSlug(team.Name), "team.json") teamData, err := json.MarshalIndent(&teamListing[ndx], "", "\t") if err != nil { - log.Errorf("could not serialize team object for team name: %s", team.Name) + slog.Error("could not serialize team object for team name", "teamName", team.Name) continue } //Members memberFileName := filepath.Join(teamPath, GetSlug(team.Name), "members.json") - p := teams.NewGetTeamMembersParams() - p.TeamID = fmt.Sprintf("%d", team.ID) - members, err := s.client.Teams.GetTeamMembers(p, s.getAuth()) + members, err := s.GetClient().Teams.GetTeamMembers(fmt.Sprintf("%d", team.ID)) if err != nil { - log.Errorf("could not get team members object for team name: %s", team.Name) + slog.Error("could not get team members object for team name", "teamName", team.Name) continue } membersData, err := json.MarshalIndent(members.GetPayload(), "", "\t") if err != nil { - log.Errorf("could not serialize team members object for team name: %s", team.Name) + slog.Error("could not serialize team members object for team name", "teamName", team.Name) continue } //Writing Files if err = s.storage.WriteFile(teamFileName, teamData); err != nil { - log.WithError(err).Errorf("for %s\n", team.Name) + slog.Error("could not write file", "teamName", team.Name, "err", err) } else if err = s.storage.WriteFile(memberFileName, membersData); err != nil { - log.WithError(err).Errorf("for %s\n", team.Name) + slog.Error("could not write team members file", "teamName", team.Name, "err", err) } else { importedTeams[team] = members.GetPayload() } @@ -98,7 +97,7 @@ func (s *DashNGoImpl) DownloadTeams(filter filters.Filter) map[*models.TeamDTO][ func (s *DashNGoImpl) UploadTeams(filter filters.Filter) map[*models.TeamDTO][]*models.TeamMemberDTO { filesInDir, err := s.storage.FindAllFiles(config.Config().GetDefaultGrafanaConfig().GetPath(config.TeamResource), true) if err != nil { - log.WithError(err).Errorf("failed to list files in directory for teams") + slog.Error("failed to list files in directory for teams", "err", err) } exportedTeams := make(map[*models.TeamDTO][]*models.TeamMemberDTO) //Clear previous data. @@ -111,23 +110,21 @@ func (s *DashNGoImpl) UploadTeams(filter filters.Filter) map[*models.TeamDTO][]* //Export Team var rawTeam []byte if rawTeam, err = s.storage.ReadFile(fileLocation); err != nil { - log.WithError(err).Errorf("failed to read file: %s", fileLocation) + slog.Error("failed to read file", "filename", fileLocation, "err", err) continue } var newTeam *models.TeamDTO if err = json.Unmarshal(rawTeam, &newTeam); err != nil { - log.WithError(err).Errorf("failed to unmarshal file: %s", fileLocation) + slog.Error("failed to unmarshal file", "filename", fileLocation, "err", err) continue } - p := teams.NewCreateTeamParams() - p.Body = &models.CreateTeamCommand{ + p := &models.CreateTeamCommand{ Name: newTeam.Name, Email: newTeam.Email, } - teamCreated, err := s.client.Teams.CreateTeam(p, s.getAuth()) + teamCreated, err := s.GetClient().Teams.CreateTeam(p) if err != nil { - log.WithError(err).Errorf("failed to create team for file: %s", fileLocation) - continue + slog.Error("failed to create team for file", "filename", fileLocation, "err", err) } newTeam.ID = teamCreated.GetPayload().TeamID @@ -137,22 +134,22 @@ func (s *DashNGoImpl) UploadTeams(filter filters.Filter) map[*models.TeamDTO][]* teamMemberLocation := filepath.Join(config.Config().GetDefaultGrafanaConfig().GetPath(config.TeamResource), GetSlug(newTeam.Name), "members.json") if rawMembers, err = s.storage.ReadFile(teamMemberLocation); err != nil { - log.WithError(err).Errorf("failed to find team members: %s", fileLocation) + slog.Error("failed to find team members", "filename", fileLocation, "err", err) continue } var newMembers []*models.TeamMemberDTO if err = json.Unmarshal(rawMembers, &newMembers); err != nil { - log.WithError(err).Errorf("failed to unmarshal file: %s", fileLocation) + slog.Error("failed to unmarshal file", "filename", fileLocation, "err", err) continue } for _, member := range newMembers { if s.isAdminUser(member.UserID, member.Name) { - log.Warnf("skipping admin user, already added when new team is created") + slog.Warn("skipping admin user, already added when new team is created") continue } _, err := s.addTeamMember(newTeam, member) if err != nil { - log.WithError(err).Errorf("failed to create team member for team %s with ID %d", newTeam.Name, member.UserID) + slog.Error("failed to create team member for team", "teamName", newTeam.Name, "MemberID", member.UserID, "err", err) } else { currentMembers = append(currentMembers, member) } @@ -169,7 +166,7 @@ func (s *DashNGoImpl) ListTeams(filter filters.Filter) map[*models.TeamDTO][]*mo var pageSize int64 = 99999 p := teams.NewSearchTeamsParams() p.Perpage = &pageSize - data, err := s.client.Teams.SearchTeams(p, s.getAuth()) + data, err := s.GetClient().Teams.SearchTeams(p) if err != nil { log.Fatal("unable to list teams") } @@ -217,11 +214,9 @@ func (s *DashNGoImpl) DeleteTeam(filter filters.Filter) ([]*models.TeamDTO, erro if filter != nil && !filter.ValidateAll(team.Name) { continue } - p := teams.NewDeleteTeamByIDParams() - p.TeamID = fmt.Sprintf("%d", team.ID) - _, err := s.client.Teams.DeleteTeamByID(p, s.getAuth()) + _, err := s.GetClient().Teams.DeleteTeamByID(fmt.Sprintf("%d", team.ID)) if err != nil { - log.Errorf("failed to delete team: '%s'", team.Name) + slog.Error("failed to delete team", "teamName", team.Name) continue } result = append(result, team) @@ -233,9 +228,7 @@ func (s *DashNGoImpl) DeleteTeam(filter filters.Filter) ([]*models.TeamDTO, erro // List Team Members of specific Team func (s *DashNGoImpl) listTeamMembers(filter filters.Filter, teamID int64) []*models.TeamMemberDTO { teamIDStr := fmt.Sprintf("%d", teamID) - fetchTeamParam := teams.NewGetTeamMembersParams() - fetchTeamParam.TeamID = teamIDStr - members, err := s.client.Teams.GetTeamMembers(fetchTeamParam, s.getAuth()) + members, err := s.GetClient().Teams.GetTeamMembers(teamIDStr) if err != nil { log.Fatal(fmt.Errorf("team: '%d' could not be found", teamID)) } @@ -260,14 +253,12 @@ func (s *DashNGoImpl) addTeamMember(team *models.TeamDTO, userDTO *models.TeamMe if user == nil { log.Fatal(fmt.Errorf("user: '%s' could not be found", userDTO.Login)) } - p := teams.NewAddTeamMemberParams() - p.TeamID = fmt.Sprintf("%d", team.ID) - p.Body = &models.AddTeamMemberCommand{UserID: user.ID} - msg, err := s.client.Teams.AddTeamMember(p, s.getAuth()) + body := &models.AddTeamMemberCommand{UserID: user.ID} + msg, err := s.GetClient().Teams.AddTeamMember(fmt.Sprintf("%d", team.ID), body) if err != nil { - log.Info(err.Error()) + slog.Info(err.Error()) errorMsg := fmt.Sprintf("failed to add member '%s' to team '%s'", userDTO.Login, team.Name) - log.Error(errorMsg) + slog.Error(errorMsg) return "", errors.New(errorMsg) } if userDTO.Permission == AdminUserPermission { @@ -275,11 +266,11 @@ func (s *DashNGoImpl) addTeamMember(team *models.TeamDTO, userDTO *models.TeamMe adminPatch.TeamID = fmt.Sprintf("%d", team.ID) adminPatch.UserID = userDTO.UserID adminPatch.Body = &models.UpdateTeamMemberCommand{Permission: AdminUserPermission} - response, err := s.client.Teams.UpdateTeamMember(adminPatch, s.getAuth()) + response, err := s.GetClient().Teams.UpdateTeamMember(adminPatch) if err != nil { return "", err } - log.WithField("message", response.GetPayload().Message).Debugf("Updated permissions for user %s on team %s", userDTO.Name, team.Name) + slog.Debug("Updated permissions for user on team ", "username", userDTO.Name, "teamName", team.Name, "message", response.GetPayload().Message) } return msg.GetPayload().Message, nil diff --git a/internal/service/tokens.go b/internal/service/tokens.go index 99ace136..7c6d4fbf 100644 --- a/internal/service/tokens.go +++ b/internal/service/tokens.go @@ -2,10 +2,11 @@ package service import ( "fmt" + "log/slog" - "github.com/esnet/grafana-swagger-api-golang/goclient/client/api_keys" - "github.com/esnet/grafana-swagger-api-golang/goclient/models" - log "github.com/sirupsen/logrus" + "github.com/grafana/grafana-openapi-client-go/client/api_keys" + "github.com/grafana/grafana-openapi-client-go/models" + "log" ) type TokenApi interface { @@ -17,7 +18,7 @@ type TokenApi interface { // ListAPIKeys returns a list of all known API Keys and service accounts func (s *DashNGoImpl) ListAPIKeys() []*models.APIKeyDTO { params := api_keys.NewGetAPIkeysParams() - keys, err := s.client.APIKeys.GetAPIkeys(params, s.getBasicAuth()) + keys, err := s.GetBasicAuthClient().APIKeys.GetAPIkeys(params) if err != nil { log.Fatal("unable to list API Keys") } @@ -31,7 +32,7 @@ func (s *DashNGoImpl) DeleteAllTokens() []string { for _, key := range keys { err := s.deleteAPIKey(key.ID) if err != nil { - log.Warnf("Failed to delete API key %d named %s", key.ID, key.Name) + slog.Warn("Failed to delete API key", "APIKeyID", key.ID, "APIKey", key.Name) continue } deleted = append(deleted, key.Name) @@ -42,15 +43,14 @@ func (s *DashNGoImpl) DeleteAllTokens() []string { // CreateAPIKey create a new key for the given role and expiration specified func (s *DashNGoImpl) CreateAPIKey(name, role string, expiration int64) (*models.NewAPIKeyResult, error) { - p := api_keys.NewAddAPIkeyParams() - p.Body = &models.AddCommand{ + p := &models.AddAPIKeyCommand{ Name: name, Role: role, } if expiration != 0 { - p.Body.SecondsToLive = expiration + p.SecondsToLive = expiration } - newKey, err := s.client.APIKeys.AddAPIkey(p, s.getAuth()) + newKey, err := s.GetClient().APIKeys.AddAPIkey(p) if err != nil { return nil, fmt.Errorf("unable to create a new API Key") } @@ -58,9 +58,7 @@ func (s *DashNGoImpl) CreateAPIKey(name, role string, expiration int64) (*models } func (s *DashNGoImpl) deleteAPIKey(id int64) error { - p := api_keys.NewDeleteAPIkeyParams() - p.ID = id - _, err := s.client.APIKeys.DeleteAPIkey(p, s.getAuth()) + _, err := s.GetClient().APIKeys.DeleteAPIkey(id) if err != nil { return fmt.Errorf("failed to delete API Key: %d", id) } diff --git a/internal/service/user.go b/internal/service/user.go index 719033dd..c336bbd7 100644 --- a/internal/service/user.go +++ b/internal/service/user.go @@ -7,14 +7,14 @@ import ( "fmt" "github.com/esnet/gdg/internal/config" "github.com/esnet/gdg/internal/service/filters" - gapi "github.com/esnet/grafana-swagger-api-golang" - "github.com/esnet/grafana-swagger-api-golang/goclient/client/admin_users" - "github.com/esnet/grafana-swagger-api-golang/goclient/client/signed_in_user" - "github.com/esnet/grafana-swagger-api-golang/goclient/client/users" - "github.com/esnet/grafana-swagger-api-golang/goclient/models" + "github.com/esnet/gdg/internal/tools" "github.com/gosimple/slug" - log "github.com/sirupsen/logrus" + "github.com/grafana/grafana-openapi-client-go/client/admin_users" + "github.com/grafana/grafana-openapi-client-go/client/users" + "github.com/grafana/grafana-openapi-client-go/models" "github.com/tidwall/pretty" + "log" + "log/slog" "path/filepath" "strings" ) @@ -70,8 +70,7 @@ func DefaultUserPassword(username string) string { // GetUserInfo get signed-in user info, requires Basic authentication func (s *DashNGoImpl) GetUserInfo() (*models.UserProfileDTO, error) { - p := signed_in_user.NewGetSignedInUserParams() - userInfo, err := s.client.SignedInUser.GetSignedInUser(p, s.getBasicAuth()) + userInfo, err := s.GetBasicAuthClient().SignedInUser.GetSignedInUser() if err == nil { return userInfo.GetPayload(), err } @@ -88,20 +87,20 @@ func (s *DashNGoImpl) DownloadUsers(filter filters.Filter) []string { userListing := s.ListUsers(filter) var importedUsers []string - userPath := buildResourceFolder("", config.UserResource) + userPath := BuildResourceFolder("", config.UserResource) for ndx, user := range userListing { if s.isAdminUser(user.ID, user.Name) { - log.Info("Skipping admin super user") + slog.Info("Skipping admin super user") continue } fileName := filepath.Join(userPath, fmt.Sprintf("%s.json", GetSlug(user.Login))) userData, err = json.Marshal(&userListing[ndx]) if err != nil { - log.Errorf("could not serialize user object for userId: %d", user.ID) + slog.Error("could not serialize user object for userId", "userID", user.ID) continue } if err = s.storage.WriteFile(fileName, pretty.Pretty(userData)); err != nil { - log.WithError(err).Errorf("for %s\n", user.Login) + slog.Error("Failed to write file", "filename", user.Login, "err", err) } else { importedUsers = append(importedUsers, fileName) } @@ -119,7 +118,7 @@ func (s *DashNGoImpl) isAdminUser(id int64, name string) bool { func (s *DashNGoImpl) UploadUsers(filter filters.Filter) []models.UserProfileDTO { filesInDir, err := s.storage.FindAllFiles(config.Config().GetDefaultGrafanaConfig().GetPath(config.UserResource), false) if err != nil { - log.WithError(err).Errorf("failed to list files in directory for userListings") + slog.Error("failed to list files in directory for userListings", "err", err) } var userListings []models.UserProfileDTO var rawUser []byte @@ -136,11 +135,11 @@ func (s *DashNGoImpl) UploadUsers(filter filters.Filter) []models.UserProfileDTO fileLocation := filepath.Join(config.Config().GetDefaultGrafanaConfig().GetPath(config.UserResource), file) if strings.HasSuffix(file, ".json") { if rawUser, err = s.storage.ReadFile(fileLocation); err != nil { - log.WithError(err).Errorf("failed to read file: %s", fileLocation) + slog.Error("failed to read file", "filename", fileLocation, "err", err) continue } if val, ok := currentUsers[filepath.Base(file)]; ok { - log.Warnf("User %s already exist, skipping", val.Login) + slog.Warn("User already exist, skipping", "username", val.Login) continue } var newUser models.AdminCreateUserForm @@ -150,37 +149,35 @@ func (s *DashNGoImpl) UploadUsers(filter filters.Filter) []models.UserProfileDTO var data = make(map[string]interface{}, 0) if err = json.Unmarshal(rawUser, &data); err != nil { - log.WithError(err).Errorf("failed to unmarshall file: %s", fileLocation) + slog.Error("failed to unmarshall file", "filename", fileLocation, "err", err) continue } data["password"] = password //Get raw version of payload once more with password if rawUser, err = json.Marshal(data); err != nil { - log.WithError(err).Errorf("failed to marshall file: %s to include password", fileLocation) + slog.Error("failed to marshall file to include password", "filename", fileLocation, "err", err) } if err = json.Unmarshal(rawUser, &newUser); err != nil { - log.WithError(err).Errorf("failed to unmarshall file: %s", fileLocation) + slog.Error("failed to unmarshall file", "filename", fileLocation, "err", err) continue } if newUser.Name == "admin" { - log.Info("Skipping admin user") + slog.Info("Skipping admin user") continue } params := admin_users.NewAdminCreateUserParams() params.Body = &newUser - userCreated, err := s.client.AdminUsers.AdminCreateUser(params, s.getBasicAuth()) + userCreated, err := s.GetBasicAuthClient().AdminUsers.AdminCreateUser(&newUser) if err != nil { - log.WithError(err).Errorf("Failed to create user for file: %s", fileLocation) + slog.Error("Failed to create user for file", "filename", fileLocation, "err", err) continue } - p := users.NewGetUserByIDParams() - p.UserID = userCreated.Payload.ID - resp, err := s.client.Users.GetUserByID(p, s.getBasicAuth()) + resp, err := s.GetBasicAuthClient().Users.GetUserByID(userCreated.Payload.ID) if err != nil { - log.Errorf("unable to read user: %s, ID: %d back from grafana", newUser.Email, userCreated.Payload.ID) + slog.Error("unable to read user back from grafana", "username", newUser.Email, "userID", userCreated.GetPayload().ID) continue } userListings = append(userListings, *resp.Payload) @@ -197,13 +194,13 @@ func (s *DashNGoImpl) ListUsers(filter filters.Filter) []*models.UserSearchHitDT } var filteredUsers []*models.UserSearchHitDTO params := users.NewSearchUsersParams() - params.Page = gapi.ToPtr(int64(1)) - params.Perpage = gapi.ToPtr(int64(5000)) - usersList, err := s.extended.SearchUsers(params) + params.Page = tools.PtrOf(int64(1)) + params.Perpage = tools.PtrOf(int64(5000)) + usersList, err := s.GetClient().Users.SearchUsers(params) if err != nil { log.Fatal(err.Error()) } - for _, entry := range usersList { + for _, entry := range usersList.GetPayload() { if len(entry.AuthLabels) == 0 { filteredUsers = append(filteredUsers, entry) } else if filter.ValidateAll(map[filters.FilterType]string{filters.AuthLabel: entry.AuthLabels[0]}) { @@ -219,13 +216,13 @@ func (s *DashNGoImpl) DeleteAllUsers(filter filters.Filter) []string { var deletedUsers []string for _, user := range userListing { if s.isAdminUser(user.ID, user.Name) { - log.Info("Skipping admin user") + slog.Info("Skipping admin user") continue } params := admin_users.NewAdminDeleteUserParams() params.UserID = user.ID - _, err := s.client.AdminUsers.AdminDeleteUser(params, s.getBasicAuth()) + _, err := s.GetBasicAuthClient().AdminUsers.AdminDeleteUser(user.ID) if err == nil { deletedUsers = append(deletedUsers, user.Email) } @@ -251,17 +248,12 @@ func (s *DashNGoImpl) PromoteUser(userLogin string) (string, error) { if user == nil { return "", fmt.Errorf("user: '%s' could not be found", userLogin) } + requestBody := &models.AdminUpdateUserPermissionsForm{IsGrafanaAdmin: true} - promoteUserParam := admin_users.NewAdminUpdateUserPermissionsParams() - promoteUserParam.UserID = user.ID - promoteUserParam.Body = &models.AdminUpdateUserPermissionsForm{ - IsGrafanaAdmin: true, - } - - msg, err := s.client.AdminUsers.AdminUpdateUserPermissions(promoteUserParam, s.getBasicAuth()) + msg, err := s.GetBasicAuthClient().AdminUsers.AdminUpdateUserPermissions(user.ID, requestBody) if err != nil { errorMsg := fmt.Sprintf("failed to promote user: '%s'", userLogin) - log.Error(errorMsg) + slog.Error("failed to promote user", "username", userLogin, "err", err) return "", errors.New(errorMsg) } @@ -271,9 +263,7 @@ func (s *DashNGoImpl) PromoteUser(userLogin string) (string, error) { // getUserById get the user by ID func (s *DashNGoImpl) getUserById(userId int64) (*models.UserProfileDTO, error) { - p := users.NewGetUserByIDParams() - p.UserID = userId - resp, err := s.client.Users.GetUserByID(p, s.getAuth()) + resp, err := s.GetClient().Users.GetUserByID(userId) if err != nil { return nil, err } diff --git a/internal/templating/templating.go b/internal/templating/templating.go new file mode 100644 index 00000000..cf044638 --- /dev/null +++ b/internal/templating/templating.go @@ -0,0 +1,132 @@ +package templating + +import ( + "fmt" + "github.com/Masterminds/sprig/v3" + "github.com/esnet/gdg/internal/config" + "github.com/esnet/gdg/internal/service" + "github.com/esnet/gdg/internal/tools" + "log/slog" + "os" + "strings" + "text/template" +) + +type Templating interface { + Generate(templateName string) (map[string][]string, error) + ListTemplates() []string +} + +type templateImpl struct { +} + +func NewTemplate() Templating { + return &templateImpl{} +} + +var fns = template.FuncMap{ + "ToSlug": service.GetSlug, + "QuotedStringJoin": func(arr []interface{}) string { + result := "" + for ndx, item := range arr { + if len(arr)-1 == ndx { + result += fmt.Sprintf("\"%v\"", item) + } else { + result += fmt.Sprintf("\"%v\",", item) + } + } + + return result + }, +} + +func (t *templateImpl) ListTemplates() []string { + cfg := config.Config() + var result []string + entities := cfg.GetTemplateConfig().Entities.Dashboards + for _, entry := range entities { + result = append(result, entry.TemplateName) + } + + return result +} + +func (t *templateImpl) Generate(templateName string) (map[string][]string, error) { + result := make(map[string][]string) + //Remove extension if included + templateName = strings.ReplaceAll(templateName, ".go.tmpl", "") + cfg := config.Config() + var entities []config.TemplateDashboards + entities = cfg.GetTemplateConfig().Entities.Dashboards + if templateName != "" { + entity, ok := cfg.GetTemplateConfig().GetTemplate(templateName) + if ok { + entities = append(entities, *entity) + } + } + for _, entity := range entities { + result[entity.TemplateName] = make([]string, 0) + slog.Info("Processing template:", slog.String("template", entity.TemplateName)) + tmplPath := cfg.GetDefaultGrafanaConfig().GetPath(config.TemplatesResource) + fileLocation := fmt.Sprintf("%s/%s.go.tmpl", tmplPath, entity.TemplateName) + _, err := os.Stat(fileLocation) + if err != nil { + slog.Error("Processing template, file could not be found", "template", entity.TemplateName, "file", fileLocation) + slog.Warn("Continuing to process remaining templates") + continue + } + templateData, err := os.ReadFile(fileLocation) + if err != nil { + slog.Error("unable to open file", slog.Any("file", fileLocation)) + slog.Warn("Continuing to process remaining templates") + continue + } + for _, outputEntity := range entity.DashboardEntities { + grafana := cfg.GetDefaultGrafanaConfig() + slog.Debug("Creating a new template", slog.String("folder", outputEntity.Folder), slog.Int64("orgId", outputEntity.OrgId), slog.Any("data", outputEntity.TemplateData)) + grafana.OrganizationId = outputEntity.OrgId + outputPath := service.BuildResourceFolder(outputEntity.Folder, config.DashboardResource) + //Merge two maps. + tmpl, err := template.New("").Funcs(fns).Parse(string(templateData)) + if err != nil { + slog.Error("unable to parse template") + } + + //Create new file. + tools.CreateDestinationPath(outputPath) + dashboardName := entity.TemplateName + if outputEntity.DashboardName != "" { + dashboardName = service.GetSlug(outputEntity.DashboardName) + } + f, err := os.Create(fmt.Sprintf("%s/%s.json", outputPath, dashboardName)) + if err != nil { + slog.Error("unable to create file: ", slog.Any("err", err)) + result[entity.TemplateName] = append(result[entity.TemplateName], err.Error()) + continue + } + slog.Debug("Writing data to destination", "output", f.Name()) + result[entity.TemplateName] = append(result[entity.TemplateName], f.Name()) + defer func() { + err = f.Close() + if err != nil { + slog.Warn("failed to close template file", "filename", f.Name()) + } + }() + + err = tmpl.Execute(f, outputEntity.TemplateData) // merge. + if err != nil { + slog.Error("execute", "err", err) + result[entity.TemplateName] = append(result[entity.TemplateName], err.Error()) + continue + } + } + } + return result, nil + +} + +func init() { + for key, value := range sprig.TxtFuncMap() { + fns[key] = value + } +} diff --git a/internal/templating/templating_test.go b/internal/templating/templating_test.go new file mode 100644 index 00000000..14d3ea4a --- /dev/null +++ b/internal/templating/templating_test.go @@ -0,0 +1,58 @@ +package templating + +import ( + "github.com/esnet/gdg/internal/config" + "github.com/esnet/gdg/internal/service" + "github.com/stretchr/testify/assert" + "github.com/tidwall/gjson" + "os" + "slices" + "strings" + "testing" +) + +func TestGenerate(t *testing.T) { + //Setup + dir, err := os.Getwd() + assert.Nil(t, err) + if strings.Contains(dir, "templating") { + os.Chdir("../..") + } + config.InitConfig("testing.yml", "") + config.InitTemplateConfig("templates-example") + template := NewTemplate() + data, err := template.Generate("template_example") + assert.Nil(t, err) + assert.Equal(t, len(data), 1) + generatedFiles := data["template_example"] + assert.True(t, slices.Contains(generatedFiles, "test/data/org_2/dashboards/General/testing-foobar.json")) + assert.True(t, slices.Contains(generatedFiles, "test/data/org_3/dashboards/Testing/template_example.json")) + //Remove output to avoid conflicting with other tests + defer func() { + os.Remove(generatedFiles[0]) + os.Remove(generatedFiles[1]) + }() + + //Obtain first Config and validate output. + cfg := config.Config().GetTemplateConfig() + templateCfg := cfg.Entities.Dashboards[0].DashboardEntities[0] + rawData, err := os.ReadFile("test/data/org_2/dashboards/General/testing-foobar.json") + assert.Nil(t, err) + parser := gjson.ParseBytes(rawData) + val := parser.Get("annotations.list.0.hashKey") + assert.True(t, val.Exists()) + expected := service.GetSlug(templateCfg.TemplateData["title"].(string)) + val = parser.Get("annotations.list.0.datasource") + expected = "elasticsearch" + assert.Equal(t, val.String(), expected) + expected = service.GetSlug(templateCfg.TemplateData["title"].(string)) + valArray := parser.Get("panels.0.link_text").Array() + val = parser.Get("panels.0.link_url.0") + lightsources := templateCfg.TemplateData["lightsources"].([]interface{}) + for ndx, entry := range valArray { + assert.Equal(t, entry.String(), lightsources[ndx].(string)) + assert.True(t, strings.Contains(val.String(), entry.String())) + + } + +} diff --git a/internal/tools/generics_tooling.go b/internal/tools/generics_tooling.go index 209e1e7d..579c9e32 100644 --- a/internal/tools/generics_tooling.go +++ b/internal/tools/generics_tooling.go @@ -1,6 +1,10 @@ package tools -import "encoding/json" +import ( + "encoding/json" + "log" + "os" +) func PtrOf[T any](value T) *T { return &value @@ -19,3 +23,11 @@ func DeepCopy[T any](value T) (*T, error) { return clone, nil } + +// CreateDestinationPath Handle osMkdir Errors +func CreateDestinationPath(v string) { + err := os.MkdirAll(v, 0750) + if err != nil { + log.Fatalf("unable to create path %s, err: %s", v, err.Error()) + } +} diff --git a/internal/tools/prompt_helpers.go b/internal/tools/prompt_helpers.go index 11b5143f..80f3f5bd 100644 --- a/internal/tools/prompt_helpers.go +++ b/internal/tools/prompt_helpers.go @@ -3,8 +3,9 @@ package tools import ( "bufio" "fmt" - log "github.com/sirupsen/logrus" "golang.org/x/exp/slices" + "log" + "log/slog" "os" "strings" ) @@ -27,7 +28,7 @@ func GetUserConfirmation(msg, error string, terminate bool) bool { ans, _ := r.ReadString('\n') ans = strings.ToLower(ans) if !slices.Contains(validResponse, rune(ans[0])) { - log.Error("Invalid response, please try again. Only [yes/no] are supported") + slog.Error("Invalid response, please try again. Only [yes/no] are supported") continue } //Validate Response diff --git a/internal/types/models.go b/internal/types/models.go new file mode 100644 index 00000000..b9d8289b --- /dev/null +++ b/internal/types/models.go @@ -0,0 +1,8 @@ +package types + +import "github.com/grafana/grafana-openapi-client-go/models" + +type ServiceAccountDTOWithTokens struct { + ServiceAccount *models.ServiceAccountDTO + Tokens []*models.TokenDTO +} diff --git a/internal/version/version.go b/internal/version/version.go index 65ea1c88..f63c92c8 100644 --- a/internal/version/version.go +++ b/internal/version/version.go @@ -9,7 +9,7 @@ import ( var GitCommit string // Version returns the main version number that is being run at the moment. -const Version = "0.5.1" +var Version = "0.5.1" // BuildDate returns the date the binary was built var BuildDate = "" diff --git a/test/cloud_integration_test.go b/test/cloud_integration_test.go index f76bf8fd..dde17228 100644 --- a/test/cloud_integration_test.go +++ b/test/cloud_integration_test.go @@ -2,9 +2,9 @@ package test import ( "github.com/esnet/gdg/internal/service" - log "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" _ "gocloud.dev/blob/memblob" + "log/slog" "os" "testing" ) @@ -15,8 +15,12 @@ func TestCloudDataSourceCRUD(t *testing.T) { } apiClient, _, cleanup := initTest(t, nil) - defer cleanup() - + defer func() { + cleanErr := cleanup() + if cleanErr != nil { + slog.Error("unable to clean up after test", slog.Any("err", cleanErr)) + } + }() //Wipe all data from grafana dsFilter := service.NewConnectionFilter("") apiClient.DeleteAllConnections(dsFilter) @@ -27,10 +31,10 @@ func TestCloudDataSourceCRUD(t *testing.T) { SetupCloudFunction([]string{"s3", "testing"}) //SetupCloudFunction(apiClient, []string{"mem", "testing"}) - log.Info("Importing DataSources") + slog.Info("Importing DataSources") dsStringList := apiClient.DownloadConnections(dsFilter) //Saving to S3 assert.Equal(t, len(dsList), len(dsStringList)) - log.Info("Deleting DataSources") + slog.Info("Deleting DataSources") deleteDSList := apiClient.DeleteAllConnections(dsFilter) // Cleaning up Grafana assert.Equal(t, len(deleteDSList), len(dsStringList)) dsList = apiClient.ListConnections(dsFilter) @@ -52,8 +56,12 @@ func TestDashboardCloudCRUD(t *testing.T) { assert.Nil(t, err, "Failed to set context name via env to testing") apiClient, _, cleanup := initTest(t, nil) - defer cleanup() - + defer func() { + cleanErr := cleanup() + if cleanErr != nil { + slog.Error("unable to clean up after test", slog.Any("err", cleanErr)) + } + }() //Wipe all data from grafana dashFilter := service.NewDashboardFilter("", "", "") apiClient.DeleteAllDashboards(dashFilter) @@ -62,14 +70,13 @@ func TestDashboardCloudCRUD(t *testing.T) { boards := apiClient.ListDashboards(dashFilter) assert.True(t, len(boards) > 0) - //SetupCloudFunction(apiClient, []string{"mem", "testing"}) _, apiClient = SetupCloudFunction([]string{"s3", "testing"}) //At this point all operations are reading/writing from Minio - log.Info("Importing Dashboards") + slog.Info("Importing Dashboards") list := apiClient.DownloadDashboards(dashFilter) //Saving to S3 assert.Equal(t, len(list), len(boards)) - log.Info("Deleting Dashboards") // Clearing Grafana + slog.Info("Deleting Dashboards") // Clearing Grafana deleteList := apiClient.DeleteAllDashboards(dashFilter) assert.Equal(t, len(list), len(deleteList)) boards = apiClient.ListDashboards(dashFilter) diff --git a/test/common_test.go b/test/common_test.go index cacb4591..2274c735 100644 --- a/test/common_test.go +++ b/test/common_test.go @@ -6,11 +6,14 @@ import ( "github.com/esnet/gdg/internal/config" "github.com/esnet/gdg/internal/service" "github.com/google/uuid" - "github.com/ory/dockertest/v3" - log "github.com/sirupsen/logrus" + "github.com/testcontainers/testcontainers-go" + "github.com/testcontainers/testcontainers-go/wait" + "golang.org/x/exp/maps" "gopkg.in/yaml.v3" - "net" + "log" + "log/slog" "os" + "slices" "sync" "time" @@ -21,97 +24,113 @@ import ( "github.com/stretchr/testify/assert" ) -var minioPortResource *dockertest.Resource -var grafanaResource *dockertest.Resource +var minioContainer testcontainers.Container +var grafnaContainer testcontainers.Container -func setupMinioContainer(pool *dockertest.Pool, wg *sync.WaitGroup) { +type Containers struct { + Cancel context.CancelFunc + Container testcontainers.Container +} + +func setupMinioContainer(wg *sync.WaitGroup, channels chan Containers) { // pulls an image, creates a container based on it and runs it defer wg.Done() - resource, err := pool.Run("bitnami/minio", "latest", - []string{"MINIO_ROOT_USER=test", "MINIO_ROOT_PASSWORD=secretsss"}) + + ctx := context.Background() + req := testcontainers.ContainerRequest{ + Image: "bitnami/minio:latest", + ExposedPorts: []string{"9000/tcp"}, + Env: map[string]string{"MINIO_ROOT_USER": "test", "MINIO_ROOT_PASSWORD": "secretsss"}, + WaitingFor: wait.ForListeningPort("9000/tcp"), + } + minioC, err := testcontainers.GenericContainer(ctx, testcontainers.GenericContainerRequest{ + ContainerRequest: req, + Started: true, + }) if err != nil { - log.Fatalf("Could not start resource: %s", err) + panic(err) } - minioPortResource = resource - validatePort(resource, 5*time.Second, []string{"9000"}, "Unable to connect to minio container. Cannot run test") - log.Info("Minio container is up and running") - -} - -func validatePort(resource *dockertest.Resource, delay time.Duration, ports []string, errorMsg string) { - time.Sleep(delay) - for _, port := range ports { - timeout := time.Second - actualPort := resource.GetPort(fmt.Sprintf("%s/tcp", port)) - conn, err := net.DialTimeout("tcp", net.JoinHostPort("localhost", actualPort), timeout) - if err != nil { - fmt.Println("Connecting error:", err) - log.Fatalf(errorMsg) - } - - if conn != nil { - defer conn.Close() + slog.Info("Minio container is up and running") + cancel := func() { + if err := minioC.Terminate(ctx); err != nil { + panic(err) + } else { + slog.Info("Minio container has been terminated") } } + result := Containers{ + Cancel: cancel, + Container: minioC, + } + channels <- result } -func setupGrafanaContainer(pool *dockertest.Pool, wg *sync.WaitGroup) { +func setupGrafanaContainer(wg *sync.WaitGroup, channels chan Containers) { // pulls an image, creates a container based on it and runs it defer wg.Done() - resource, err := pool.Run("grafana/grafana", "10.0.0-ubuntu", - []string{"GF_INSTALL_PLUGINS=grafana-googlesheets-datasource", "GF_AUTH_ANONYMOUS_ENABLED=true"}) - if err != nil { - log.Fatalf("Could not start resource: %s", err) + ctx := context.Background() + req := testcontainers.ContainerRequest{ + Image: "grafana/grafana:10.0.0-ubuntu", + ExposedPorts: []string{"3000/tcp"}, + Env: map[string]string{ + "GF_INSTALL_PLUGINS": "grafana-googlesheets-datasource", + "GF_AUTH_ANONYMOUS_ENABLED": "true", + }, + WaitingFor: wait.ForListeningPort("3000/tcp"), } - grafanaResource = resource - - validatePort(resource, 5*time.Second, []string{"3000"}, "Unable to connect to grafana container. Cannot run test") - - log.Info("Grafana container is up and running") -} - -func setupDockerTest() *dockertest.Pool { - // uses a sensible default on windows (tcp/http) and linux/osx (socket) - pool, err := dockertest.NewPool("") + grafanaC, err := testcontainers.GenericContainer(ctx, testcontainers.GenericContainerRequest{ + ContainerRequest: req, + Started: true, + }) if err != nil { - log.Fatalf("Could not construct pool: %s", err) + panic(err) } - // uses pool to try to connect to Docker - err = pool.Client.Ping() - if err != nil { - log.Fatalf("Could not connect to Docker: %s", err) + cancel := func() { + if err := grafanaC.Terminate(ctx); err != nil { + panic(err) + } else { + slog.Info("Grafana Container has been terminated") + } } - - return pool + result := Containers{ + Cancel: cancel, + Container: grafanaC, + } + channels <- result } func TestMain(m *testing.M) { - pool := setupDockerTest() - var wg *sync.WaitGroup = new(sync.WaitGroup) + channels := make(chan Containers, 2) + var wg = new(sync.WaitGroup) wg.Add(2) - log.Infof("Starting at: %s", time.Now().String()) - go setupMinioContainer(pool, wg) - go setupGrafanaContainer(pool, wg) + slog.Info("Starting at", "time", time.Now().String()) + go setupMinioContainer(wg, channels) + go setupGrafanaContainer(wg, channels) wg.Wait() - log.Infof("Ending at: %s", time.Now().String()) - - exitVal := m.Run() + close(channels) + slog.Info("Ending at", "end", time.Now().String()) - // You can't defer this because os.Exit doesn't care for defer - for _, resource := range []*dockertest.Resource{minioPortResource, grafanaResource} { - if resource == nil { - log.Warning("No resource set, skipping cleanup") + for entry := range channels { + defer entry.Cancel() + str, err := entry.Container.Ports(context.Background()) + if err != nil { + slog.Error("unable to obtain bound ports for container") continue } - if err := pool.Purge(resource); err != nil { - log.Fatalf("Could not purge resource: %s", err) - } else { - log.Info("Resource has been purged") + keys := maps.Keys(str) + if slices.Contains(keys, "9000/tcp") { + minioContainer = entry.Container + } + if slices.Contains(keys, "3000/tcp") { + grafnaContainer = entry.Container + } + } + exitVal := m.Run() os.Exit(exitVal) } @@ -166,13 +185,12 @@ func createSimpleClient(t *testing.T, cfgName *string) (service.GrafanaService, *cfgName = "testing.yml" } - actualPort := grafanaResource.GetPort(fmt.Sprintf("%s/tcp", "3000")) - err := os.Setenv("GDG_CONTEXTS__TESTING__URL", fmt.Sprintf("http://localhost:%s", actualPort)) - + actualPort, err := grafnaContainer.Endpoint(context.Background(), "") + err = os.Setenv("GDG_CONTEXTS__TESTING__URL", fmt.Sprintf("http://%s", actualPort)) assert.Nil(t, err) config.InitConfig(*cfgName, "'") - conf := config.Config().ViperConfig() + conf := config.Config().GetViperConfig(config.ViperGdgConfig) assert.NotNil(t, conf) //Hack for Local testing contextName := conf.GetString("context_name") @@ -183,7 +201,7 @@ func createSimpleClient(t *testing.T, cfgName *string) (service.GrafanaService, if strings.Contains(path, "test") { err := os.Chdir("..") if err != nil { - log.Warning("unable to set directory to parent") + slog.Warn("unable to set directory to parent") } } return client, conf @@ -193,7 +211,7 @@ func SetupCloudFunction(params []string) (context.Context, service.GrafanaServic _ = os.Setenv(service.InitBucket, "true") bucketName := params[1] - actualPort := minioPortResource.GetPort(fmt.Sprintf("%s/tcp", "9000")) + actualPort, err := minioContainer.Endpoint(context.Background(), "") var m = map[string]string{ service.InitBucket: "true", service.CloudType: params[0], @@ -203,11 +221,11 @@ func SetupCloudFunction(params []string) (context.Context, service.GrafanaServic service.BucketName: bucketName, service.Kind: "cloud", service.Custom: "true", - service.Endpoint: fmt.Sprintf("http://localhost:%s", actualPort), + service.Endpoint: fmt.Sprintf("http://%s", actualPort), service.SSLEnabled: "false", } - cfgObj := config.Config().GetAppConfig() + cfgObj := config.Config().GetGDGConfig() defaultCfg := config.Config().GetDefaultGrafanaConfig() defaultCfg.Storage = "test" cfgObj.StorageEngine["test"] = m diff --git a/test/connections_integration_test.go b/test/connections_integration_test.go index 18307413..210b37d2 100644 --- a/test/connections_integration_test.go +++ b/test/connections_integration_test.go @@ -3,10 +3,11 @@ package test import ( "github.com/esnet/gdg/internal/config" "github.com/esnet/gdg/internal/service" - "github.com/esnet/grafana-swagger-api-golang/goclient/models" + + "github.com/grafana/grafana-openapi-client-go/models" + "log/slog" "testing" - log "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" ) @@ -16,11 +17,16 @@ func TestConnectionsCRUD(t *testing.T) { } apiClient, _, cleanup := initTest(t, nil) - defer cleanup() + defer func() { + cleanErr := cleanup() + if cleanErr != nil { + slog.Error("unable to clean up after test", slog.Any("err", cleanErr)) + } + }() filtersEntity := service.NewConnectionFilter("") - log.Info("Exporting all connections") + slog.Info("Exporting all connections") apiClient.UploadConnections(filtersEntity) - log.Info("Listing all connections") + slog.Info("Listing all connections") dataSources := apiClient.ListConnections(filtersEntity) assert.Equal(t, len(dataSources), 3) var dsItem *models.DataSourceListItemDTO @@ -33,13 +39,13 @@ func TestConnectionsCRUD(t *testing.T) { assert.NotNil(t, dsItem) validateConnection(t, *dsItem) //Import Dashboards - log.Info("Importing connections") + slog.Info("Importing connections") list := apiClient.DownloadConnections(filtersEntity) assert.Equal(t, len(list), len(dataSources)) - log.Info("Deleting connections") + slog.Info("Deleting connections") deleteList := apiClient.DeleteAllConnections(filtersEntity) assert.Equal(t, len(deleteList), len(dataSources)) - log.Info("List connections again") + slog.Info("List connections again") dataSources = apiClient.ListConnections(filtersEntity) assert.Equal(t, len(dataSources), 0) } @@ -49,10 +55,15 @@ func TestConnectionFilter(t *testing.T) { if testing.Short() { t.Skip("skipping integration test") } - _, _, clean := initTest(t, nil) - defer clean() + _, _, cleanup := initTest(t, nil) + defer func() { + cleanErr := cleanup() + if cleanErr != nil { + slog.Error("unable to clean up after test", slog.Any("err", cleanErr)) + } + }() - testingContext := config.Config().GetAppConfig().GetContexts()["testing"] + testingContext := config.Config().GetGDGConfig().GetContexts()["testing"] testingContext.GetDataSourceSettings().FilterRules = []config.MatchingRule{ { Field: "name", @@ -64,15 +75,15 @@ func TestConnectionFilter(t *testing.T) { Regex: "elasticsearch|globalnoc-tsds-datasource", }, } - testingContext = config.Config().GetAppConfig().GetContexts()["testing"] + testingContext = config.Config().GetGDGConfig().GetContexts()["testing"] _ = testingContext apiClient := service.NewApiService("dummy") filtersEntity := service.NewConnectionFilter("") - log.Info("Exporting all connections") + slog.Info("Exporting all connections") apiClient.UploadConnections(filtersEntity) - log.Info("Listing all connections") + slog.Info("Listing all connections") dataSources := apiClient.ListConnections(filtersEntity) assert.Equal(t, len(dataSources), 2) var dsItem *models.DataSourceListItemDTO @@ -85,13 +96,13 @@ func TestConnectionFilter(t *testing.T) { assert.NotNil(t, dsItem) validateConnection(t, *dsItem) //Import Dashboards - log.Info("Importing connections") + slog.Info("Importing connections") list := apiClient.DownloadConnections(filtersEntity) assert.Equal(t, len(list), len(dataSources)) - log.Info("Deleting connections") + slog.Info("Deleting connections") deleteList := apiClient.DeleteAllConnections(filtersEntity) assert.Equal(t, len(deleteList), len(dataSources)) - log.Info("List connections again") + slog.Info("List connections again") dataSources = apiClient.ListConnections(filtersEntity) assert.Equal(t, len(dataSources), 0) } diff --git a/test/dashboard_integration_test.go b/test/dashboard_integration_test.go index 1010d5c5..ed2015b6 100644 --- a/test/dashboard_integration_test.go +++ b/test/dashboard_integration_test.go @@ -1,39 +1,42 @@ package test import ( + "encoding/json" "github.com/esnet/gdg/internal/config" "github.com/esnet/gdg/internal/service" "github.com/esnet/gdg/internal/service/filters" - "github.com/esnet/grafana-swagger-api-golang/goclient/models" + "github.com/grafana/grafana-openapi-client-go/models" + "os" "strings" "testing" - log "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" - "golang.org/x/exp/slices" + "log/slog" + "slices" ) -//TODO: with full CRUD. -// - Add single dashboard test -d <> -// - Add Folder dashboard test -f <> - func TestDashboardCRUD(t *testing.T) { if testing.Short() { t.Skip("skipping integration test") } apiClient, _, cleanup := initTest(t, nil) - defer cleanup() + defer func() { + err := cleanup() + if err != nil { + slog.Warn("Unable to clean up after dashboard tests") + } + }() filtersEntity := service.NewDashboardFilter("", "", "") - log.Info("Exporting all dashboards") + slog.Info("Exporting all dashboards") apiClient.UploadDashboards(filtersEntity) - log.Info("Listing all dashboards") + slog.Info("Listing all dashboards") boards := apiClient.ListDashboards(filtersEntity) - log.Infof("Imported %d dashboards", len(boards)) + slog.Info("Imported dashboards", "count", len(boards)) ignoredSkipped := true var generalBoard *models.Hit var otherBoard *models.Hit for ndx, board := range boards { - log.Infof(board.Slug) + slog.Info(board.Slug) if board.Slug == "latency-patterns" { ignoredSkipped = false } @@ -49,18 +52,92 @@ func TestDashboardCRUD(t *testing.T) { assert.True(t, ignoredSkipped) validateGeneralBoard(t, generalBoard) validateOtherBoard(t, otherBoard) + //Validate filters + + filterFolder := service.NewDashboardFilter("Other", "", "") + boards = apiClient.ListDashboards(filterFolder) + assert.Equal(t, 8, len(boards)) + dashboardFilter := service.NewDashboardFilter("", "flow-information", "") + boards = apiClient.ListDashboards(dashboardFilter) + assert.Equal(t, 1, len(boards)) + //Import Dashboards - log.Info("Importing Dashboards") + numBoards := 16 + slog.Info("Importing Dashboards") list := apiClient.DownloadDashboards(filtersEntity) - assert.Equal(t, len(list), len(boards)) - log.Info("Deleting Dashboards") + assert.Equal(t, len(list), numBoards) + slog.Info("Deleting Dashboards") deleteList := apiClient.DeleteAllDashboards(filtersEntity) - assert.Equal(t, len(deleteList), len(boards)) - log.Info("List Dashboards again") + assert.Equal(t, len(deleteList), numBoards) + slog.Info("List Dashboards again") boards = apiClient.ListDashboards(filtersEntity) assert.Equal(t, len(boards), 0) } +func TestDashboardCRUDTags(t *testing.T) { + if testing.Short() { + t.Skip("skipping integration test") + } + apiClient, _, cleanup := initTest(t, nil) + defer func() { + err := cleanup() + if err != nil { + slog.Warn("Unable to clean up after dashboard tests") + } + }() + + data, err := json.Marshal([]string{"netsage"}) + assert.NoError(t, err) + filtersEntity := service.NewDashboardFilter("", "", string(data)) + + slog.Info("Uploading all dashboards, filtered by tags") + apiClient.UploadDashboards(filtersEntity) + slog.Info("Listing all dashboards") + boards := apiClient.ListDashboards(filtersEntity) + slog.Info("Removing all dashboards") + assert.Equal(t, 13, len(boards)) + deleteList := apiClient.DeleteAllDashboards(filtersEntity) + assert.Equal(t, 13, len(deleteList)) + //Multiple Tags behavior + slog.Info("Uploading all dashboards, filtered by tags") + data, err = json.Marshal([]string{"flow"}) + assert.NoError(t, err) + filtersEntity = service.NewDashboardFilter("", "", string(data)) + apiClient.UploadDashboards(filtersEntity) + slog.Info("Listing all dashboards") + boards = apiClient.ListDashboards(filtersEntity) + assert.Equal(t, 8, len(boards)) + slog.Info("Removing all dashboards") + deleteList = apiClient.DeleteAllDashboards(filtersEntity) + assert.Equal(t, 8, len(deleteList)) + // + os.Setenv("GDG_CONTEXTS__TESTING__IGNORE_FILTERS", "true") + defer os.Unsetenv("") + apiClient, _ = createSimpleClient(t, nil) + filterNone := service.NewDashboardFilter("", "", "") + apiClient.UploadDashboards(filterNone) + //Listing with no filter + boards = apiClient.ListDashboards(filterNone) + assert.Equal(t, 16, len(boards)) + + data, err = json.Marshal([]string{"flow"}) + assert.NoError(t, err) + filtersEntity = service.NewDashboardFilter("", "", string(data)) + + slog.Info("Listing dashboards by tag") + boards = apiClient.ListDashboards(filtersEntity) + assert.Equal(t, 8, len(deleteList)) + //Listing with + data, err = json.Marshal([]string{"flow", "netsage"}) + assert.NoError(t, err) + filtersEntity = service.NewDashboardFilter("", "", string(data)) + + boards = apiClient.ListDashboards(filtersEntity) + assert.Equal(t, 13, len(boards)) + deleteList = apiClient.DeleteAllDashboards(filtersEntity) + assert.Equal(t, 13, len(deleteList)) +} + func TestDashboardTagsFilter(t *testing.T) { if testing.Short() { t.Skip("skipping integration test") @@ -69,30 +146,31 @@ func TestDashboardTagsFilter(t *testing.T) { defer cleanup() emptyFilter := filters.NewBaseFilter() - filtersEntity := service.NewDashboardFilter("", "", "") - filtersEntity.AddFilter(filters.TagsFilter, strings.Join([]string{"flow", "netsage"}, ",")) + data, err := json.Marshal([]string{"flow", "netsage"}) + assert.NoError(t, err) + filtersEntity := service.NewDashboardFilter("", "", string(data)) - log.Info("Exporting all dashboards") + slog.Info("Exporting all dashboards") apiClient.UploadDashboards(emptyFilter) - log.Info("Listing all dashboards") + slog.Info("Listing all dashboards") boards := apiClient.ListDashboards(filtersEntity) - log.Infof("Imported %d dashboards", len(boards)) + slog.Info("Imported %d dashboards", "count", len(boards)) for _, board := range boards { validateTags(t, board) } //Import Dashboards - log.Info("Importing Dashboards") + slog.Info("Importing Dashboards") list := apiClient.DownloadDashboards(filtersEntity) assert.Equal(t, len(list), len(boards)) - log.Info("Deleting Dashboards") + slog.Info("Deleting Dashboards") deleteList := apiClient.DeleteAllDashboards(filtersEntity) assert.Equal(t, len(deleteList), len(boards)) - log.Info("List Dashboards again") + slog.Info("List Dashboards again") boards = apiClient.ListDashboards(filtersEntity) assert.Equal(t, len(boards), 0) } @@ -105,13 +183,14 @@ func TestWildcardFilter(t *testing.T) { // Setup Filters apiClient, _, cleanup := initTest(t, nil) defer cleanup() - emptyFilter := filters.NewBaseFilter() + emptyFilter := service.NewDashboardFilter("", "", "") - filtersEntity := service.NewDashboardFilter("", "", "") - filtersEntity.AddFilter(filters.TagsFilter, strings.Join([]string{"flow", "netsage"}, ",")) + data, err := json.Marshal([]string{"flow", "netsage"}) + assert.NoError(t, err) + filtersEntity := service.NewDashboardFilter("", "", string(data)) // Enable Wildcard - testingContext := config.Config().GetAppConfig().GetContexts()["testing"] + testingContext := config.Config().GetGDGConfig().GetContexts()["testing"] testingContext.GetFilterOverrides().IgnoreDashboardFilters = true assert.True(t, testingContext.GetFilterOverrides().IgnoreDashboardFilters) @@ -125,23 +204,23 @@ func TestWildcardFilter(t *testing.T) { assert.Equal(t, len(boards), len(boards_filtered)) // Testing Listing with Wildcard - log.Info("Listing all dashboards without filter") + slog.Info("Listing all dashboards without filter") boards = apiClient.ListDashboards(emptyFilter) - log.Info("Listing all dashboards ignoring filter") + slog.Info("Listing all dashboards ignoring filter") boards_filtered = apiClient.ListDashboards(filtersEntity) - assert.Equal(t, len(boards), len(boards_filtered)) + assert.Equal(t, 14, len(boards_filtered)) - log.Info("Importing Dashboards") + slog.Info("Importing Dashboards") list := apiClient.DownloadDashboards(emptyFilter) assert.Equal(t, len(list), len(boards)) - log.Info("Deleting Dashboards") + slog.Info("Deleting Dashboards") deleteList := apiClient.DeleteAllDashboards(emptyFilter) assert.Equal(t, len(deleteList), len(boards)) - log.Info("List Dashboards again") + slog.Info("List Dashboards again") boards = apiClient.ListDashboards(filtersEntity) assert.Equal(t, len(boards), 0) } @@ -172,7 +251,10 @@ func validateGeneralBoard(t *testing.T, board *models.Hit) { func validateTags(t *testing.T, board *models.Hit) { assert.True(t, board.UID != "") - assert.Equal(t, len(board.Tags), 2) - assert.True(t, slices.Contains(board.Tags, "netsage")) - assert.True(t, slices.Contains(board.Tags, "flow")) + assert.True(t, len(board.Tags) > 0) + all_tags := []string{"netsage", "flow"} + for _, tag := range board.Tags { + assert.True(t, slices.Contains(all_tags, tag)) + + } } diff --git a/test/data/org_1/dashboards/General/top-talkers-over-time.json b/test/data/org_1/dashboards/General/top-talkers-over-time.json index ca9fc565..dd893526 100644 --- a/test/data/org_1/dashboards/General/top-talkers-over-time.json +++ b/test/data/org_1/dashboards/General/top-talkers-over-time.json @@ -300,7 +300,7 @@ ], "schemaVersion": 26, "style": "dark", - "tags": [], + "tags": ["netsage", "moo", "flow"], "templating": { "list": [ { diff --git a/test/data/secure/complex.json b/test/data/secure/complex.json new file mode 100644 index 00000000..d16c303c --- /dev/null +++ b/test/data/secure/complex.json @@ -0,0 +1,4 @@ +{ + "user": "test", + "basicAuthPassword": "secret" +} diff --git a/test/data/secure/default.json b/test/data/secure/default.json new file mode 100644 index 00000000..cd499466 --- /dev/null +++ b/test/data/secure/default.json @@ -0,0 +1,4 @@ +{ + "basicAuthPassword": "password", + "user": "user" +} diff --git a/test/data/templates/template_example.go.tmpl b/test/data/templates/template_example.go.tmpl new file mode 100644 index 00000000..a5d949d3 --- /dev/null +++ b/test/data/templates/template_example.go.tmpl @@ -0,0 +1,12785 @@ +{ + "annotations": { + "list": [ + { + "hashKey": "{{ .title | lower | ToSlug }}", + "builtIn": 1, + "datasource": "{{ default "elasticsearch" .datasource }}", + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations \u0026 Alerts", + "type": "{{ uuidv4 }}" + } + ] + }, + "editable": true, + "gnetId": null, + "graphTooltip": 1, + "id": 62, + "iteration": 1618867139860, + "links": [], + "panels": [ + { + "__netsage_template": "navigation", + "choices": [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11], + "cycleview": true, + "dashboardselection": true, + "datasource": null, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "grafanafavorites": true, + "gridPos": { + "h": 3, + "w": 2, + "x": 0, + "y": 0 + }, + "hamburgerPath": "https://portal.netsage.global/hamburger-v4.gif", + "id": 1, + "link_text": [ + {{if .enabledlight}} + {{ range $v := .lightsources}} + "{{ $v }}", + {{ end }} + {{ end }} + ], + "link_url": [ + "{{ .lightsources | join "," }}", + "/grafana/d/000000003/bandwidth-dashboard", + "/grafana/d/xk26IFhmk/flow-data", + "/grafana/d/QfzDJKhik/flow-data-per-organization", + "/grafana/d/-l3_u8nWk/individual-flows", + "/grafana/d/fgrOzz_mk/flow-data-per-country", + "/grafana/d/WNn1qyaiz/flows-by-science-discipline", + "/grafana/d/ie7TeomGz/flow-data-for-projects", + "/grafana/d/b35BWxAZz/top-talkers-over-time", + "/grafana/d/ufIS9W7Zk/science-discipline-patterns", + "/grafana/d/000000004/bandwidth-patterns", + "/grafana/d/CJC1FFhmz/other-flow-stats", + "/grafana/d/VuuXrnPWz/flow-analysis" + ], + "links": [], + "sharescreen": true, + "sideLogoPath": "https://portal.netsage.global/netsage-header-logo.png", + "sidebar": true, + "tablefilters": true, + "title": "", + "topLogoPath": "https://portal.netsage.global/netsage-cropped.png", + "transparent": true, + "type": "netsagenavigation" + }, + { + "datasource": null, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "gridPos": { + "h": 4, + "w": 20, + "x": 2, + "y": 0 + }, + "id": 2, + "links": [], + "options": { + "content": "\u003ccenter\u003e\u003ch1\u003e\u003cb\u003eBandwidth Dashboard\u003c/b\u003e\u003c/h1\u003e\u003c/center\u003e\n\u003ccenter\u003eThe map shows the minimum, maximum, and average bandwidth utilization of the circuits and exchange points over the selected time period.\u003c/center\u003e\n\u003ccenter\u003eThe rows below the map show each of the links in more detail, including traffic rate and total volume transferred.\u003c/center\u003e\n\u003ccenter\u003eA combined view of the average and maximum bandwidth utilization is shown at the bottom of the page.\u003c/center\u003e\n\u003ccenter\u003eAll times are displayed in browser local time\u003c/center\u003e\n\n\u003c!-- Global site tag (gtag.js) - Google Analytics --\u003e\n\u003cscript async src=\"https://www.googletagmanager.com/gtag/js?id=UA-142763676-1\"\u003e\u003c/script\u003e\n\u003cscript\u003e\n window.dataLayer = window.dataLayer || [];\n function gtag(){dataLayer.push(arguments);}\n gtag('js', new Date());\n\n gtag('config', 'UA-142763676-1');\n\u003c/script\u003e", + "mode": "html" + }, + "pluginVersion": "7.3.3", + "title": "", + "transparent": true, + "type": "text" + }, + { + "cacheTimeout": null, + "datasource": "Netsage TSDS", + "fieldConfig": { + "defaults": { + "custom": {}, + "decimals": 1, + "mappings": [ + { + "id": 0, + "op": "=", + "text": "N/A", + "type": 1, + "value": "null" + } + ], + "nullValueMode": "connected", + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "light-blue", + "value": null + }, + { + "color": "light-blue", + "value": 0 + }, + { + "color": "light-blue", + "value": 1 + } + ] + }, + "unit": "bps" + }, + "overrides": [] + }, + "gridPos": { + "h": 3, + "w": 6, + "x": 0, + "y": 4 + }, + "id": 3, + "interval": null, + "links": [], + "maxDataPoints": 100, + "options": { + "colorMode": "value", + "fieldOptions": { + "calcs": ["mean"] + }, + "graphMode": "none", + "justifyMode": "auto", + "orientation": "horizontal", + "reduceOptions": { + "calcs": ["mean"], + "fields": "", + "values": false + }, + "textMode": "auto" + }, + "pluginVersion": "7.3.3", + "targets": [ + { + "aggregate_all": false, + "aggregator": ["average"], + "bucket": [], + "bucketAggs": [ + { + "field": "start", + "id": "2", + "settings": { + "interval": "auto", + "min_doc_count": 0, + "trimEdges": 0 + }, + "type": "date_histogram" + } + ], + "bucketValue": [], + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "drillDown": [], + "drillDownAlias": "", + "drillDownValue": [], + "dsType": "elasticsearch", + "func": [ + { + "alias": "", + "align": "", + "bucket": "", + "expanded": false, + "method": "average", + "operation": "", + "percentile": "85", + "root": true, + "target": "input", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": " ", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": {}, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["Select Metric"], + "metrics": [ + { + "field": "select field", + "id": "1", + "type": "count" + } + ], + "orderby_field": "", + "outerGroupOperator": [""], + "percentileValue": [""], + "rawQuery": true, + "refId": "A", + "series": "select table", + "target": "get max(aggregate(values.output, $quantify, max)) between ($START,$END) from interface where link_name != null", + "target_alias": "", + "templateVariableValue": [""], + "timeField": "start", + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "Select Metric", + "op": "", + "right": "" + } + ] + ] + } + ], + "title": "Single Link Max A-Z", + "transparent": true, + "type": "stat" + }, + { + "cacheTimeout": null, + "datasource": "Netsage TSDS", + "fieldConfig": { + "defaults": { + "custom": {}, + "decimals": 1, + "mappings": [ + { + "id": 0, + "op": "=", + "text": "N/A", + "type": 1, + "value": "null" + } + ], + "nullValueMode": "connected", + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "light-blue", + "value": null + }, + { + "color": "light-blue", + "value": 0 + }, + { + "color": "light-blue", + "value": 1 + } + ] + }, + "unit": "bps" + }, + "overrides": [] + }, + "gridPos": { + "h": 3, + "w": 6, + "x": 6, + "y": 4 + }, + "id": 4, + "interval": null, + "links": [], + "maxDataPoints": 100, + "options": { + "colorMode": "value", + "fieldOptions": { + "calcs": ["mean"] + }, + "graphMode": "none", + "justifyMode": "auto", + "orientation": "horizontal", + "reduceOptions": { + "calcs": ["mean"], + "fields": "", + "values": false + }, + "textMode": "auto" + }, + "pluginVersion": "7.3.3", + "targets": [ + { + "aggregate_all": false, + "aggregator": ["average"], + "bucket": [], + "bucketAggs": [ + { + "field": "start", + "id": "2", + "settings": { + "interval": "auto", + "min_doc_count": 0, + "trimEdges": 0 + }, + "type": "date_histogram" + } + ], + "bucketValue": [], + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "drillDown": [], + "drillDownAlias": "", + "drillDownValue": [], + "dsType": "elasticsearch", + "func": [ + { + "alias": "", + "align": "", + "bucket": "", + "expanded": false, + "method": "average", + "operation": "", + "percentile": "85", + "root": true, + "target": "input", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": " ", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": {}, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["Select Metric"], + "metrics": [ + { + "field": "select field", + "id": "1", + "type": "count" + } + ], + "orderby_field": "", + "outerGroupOperator": [""], + "percentileValue": [""], + "rawQuery": true, + "refId": "A", + "series": "select table", + "target": "get max(aggregate(values.input, $quantify, max)) between ($START,$END) from interface where link_name != null", + "target_alias": "", + "templateVariableValue": [""], + "timeField": "start", + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "Select Metric", + "op": "", + "right": "" + } + ] + ] + } + ], + "title": "Single Link Max Z-A", + "transparent": true, + "type": "stat" + }, + { + "cacheTimeout": null, + "datasource": "Netsage TSDS", + "fieldConfig": { + "defaults": { + "custom": {}, + "decimals": 2, + "mappings": [ + { + "id": 0, + "op": "=", + "text": "N/A", + "type": 1, + "value": "null" + } + ], + "nullValueMode": "connected", + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "light-blue", + "value": null + }, + { + "color": "light-blue", + "value": 0 + }, + { + "color": "light-blue", + "value": 1 + } + ] + }, + "unit": "bps" + }, + "overrides": [] + }, + "gridPos": { + "h": 3, + "w": 7, + "x": 12, + "y": 4 + }, + "id": 5, + "interval": null, + "links": [], + "maxDataPoints": 100, + "options": { + "colorMode": "value", + "fieldOptions": { + "calcs": ["mean"] + }, + "graphMode": "none", + "justifyMode": "auto", + "orientation": "horizontal", + "reduceOptions": { + "calcs": ["mean"], + "fields": "", + "values": false + }, + "textMode": "auto" + }, + "pluginVersion": "7.3.3", + "targets": [ + { + "aggregate_all": false, + "aggregator": ["average"], + "bucket": [], + "bucketAggs": [ + { + "field": "start", + "id": "2", + "settings": { + "interval": "auto", + "min_doc_count": 0, + "trimEdges": 0 + }, + "type": "date_histogram" + } + ], + "bucketValue": [], + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "drillDown": [], + "drillDownAlias": "", + "drillDownValue": [], + "dsType": "elasticsearch", + "func": [ + { + "alias": "", + "align": "", + "bucket": "", + "expanded": false, + "method": "average", + "operation": "", + "percentile": "85", + "root": true, + "target": "input", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": " ", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": {}, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["Select Metric"], + "metrics": [ + { + "field": "select field", + "id": "1", + "type": "count" + } + ], + "orderby_field": "", + "outerGroupOperator": [""], + "percentileValue": [""], + "rawQuery": true, + "refId": "A", + "series": "select table", + "target": "get input_avg + output_avg as values.total_avg from ( get average(aggregate(values.input, $quantify, average)) as input_avg, average(aggregate(values.output, $quantify, average)) as output_avg between ($START,$END) by nothing from interface where link_name != null )", + "target_alias": "", + "templateVariableValue": [""], + "timeField": "start", + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "Select Metric", + "op": "", + "right": "" + } + ] + ] + } + ], + "title": "Average Across All Links", + "transparent": true, + "type": "stat" + }, + { + "cacheTimeout": null, + "datasource": "Netsage TSDS", + "fieldConfig": { + "defaults": { + "custom": {}, + "mappings": [ + { + "id": 0, + "op": "=", + "text": "N/A", + "type": 1, + "value": "null" + } + ], + "nullValueMode": "connected", + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "light-blue", + "value": null + }, + { + "color": "light-blue", + "value": 0 + }, + { + "color": "light-blue", + "value": 1 + } + ] + }, + "unit": "decbytes" + }, + "overrides": [] + }, + "gridPos": { + "h": 3, + "w": 5, + "x": 19, + "y": 4 + }, + "id": 6, + "interval": null, + "links": [], + "maxDataPoints": 100, + "options": { + "colorMode": "value", + "fieldOptions": { + "calcs": ["sum"] + }, + "graphMode": "none", + "justifyMode": "auto", + "orientation": "horizontal", + "reduceOptions": { + "calcs": ["sum"], + "fields": "", + "values": false + }, + "textMode": "auto" + }, + "pluginVersion": "7.3.3", + "targets": [ + { + "aggregate_all": false, + "aggregator": ["average"], + "bucket": [], + "bucketAggs": [ + { + "field": "start", + "id": "2", + "settings": { + "interval": "auto", + "min_doc_count": 0, + "trimEdges": 0 + }, + "type": "date_histogram" + } + ], + "bucketValue": [], + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "drillDown": [], + "drillDownAlias": "", + "drillDownValue": [], + "dsType": "elasticsearch", + "func": [ + { + "alias": "", + "align": "", + "bucket": "", + "expanded": false, + "method": "average", + "operation": "", + "percentile": "85", + "root": true, + "target": "input", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": " ", + "hide": false, + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": {}, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["Select Metric"], + "metrics": [ + { + "field": "select field", + "id": "1", + "type": "count" + } + ], + "orderby_field": "", + "outerGroupOperator": [""], + "percentileValue": [""], + "rawQuery": true, + "refId": "A", + "series": "select table", + "target": "get total_bytes_sec * 3600 as values.total_bytes from ( get input_bytes_sec_total + output_bytes_sec_total as total_bytes_sec from ( get aggregate(input_bytes_sec, 3600, sum) as input_bytes_sec_total, aggregate(output_bytes_sec, 3600, sum) as output_bytes_sec_total by nothing from ( get aggregate(values.input, 3600, average) / 8 as input_bytes_sec, aggregate(values.output, 3600, average) / 8 as output_bytes_sec between ($START,$END) by link_name from interface where link_name != null ) ) )", + "target_alias": "", + "templateVariableValue": [""], + "timeField": "start", + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "Select Metric", + "op": "", + "right": "" + } + ] + ] + } + ], + "title": "Total Transferred", + "transparent": true, + "type": "stat" + }, + { + "bing_api_key": "AplW162gFohrU9tZYti5XUCVeCG0ljiq5KgvLQREoqNBCl872zhHUs8PoVZ2j6Fw", + "choices": [1, 2, 4, 5, 7], + "color": { + "cardColor": "#82b5d8", + "colorScale": "linear", + "colorScheme": "interpolateOranges", + "exponent": 0.5, + "fillBackground": false, + "mode": "spectrum" + }, + "colorScheme": "interpolateBlues", + "data": [], + "datasource": "Netsage TSDS", + "downLinkColor": "rgb(200,200,200)", + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "gridPos": { + "h": 17, + "w": 24, + "x": 0, + "y": 7 + }, + "hide_layers": false, + "id": 7, + "lat": 10, + "layer": { + "criteria": ["Minimum", "Maximum", "Average", "Current"], + "link": { + "selected": "Current" + }, + "node": { + "selected": "Current" + } + }, + "layers": [ + { + "__baseFactory__": "Layer" + }, + { + "__baseFactory__": "Layer" + }, + { + "__baseFactory__": "Layer" + }, + { + "__baseFactory__": "Layer" + }, + { + "__baseFactory__": "Layer" + } + ], + "legend": { + "adjLoadLegend": { + "horizontal": true + }, + "invert": true, + "legend_colors": [ + "#c8c8ff", + "#c4c4ff", + "#c0c0ff", + "#bcbcff", + "#b8b8ff", + "#b4b4ff", + "#b0b0ff", + "#acacff", + "#a8a8ff", + "#a4a4ff", + "#a0a0ff", + "#9c9cff", + "#9898ff", + "#9494ff", + "#9090ff", + "#8c8cff", + "#8888ff", + "#8484ff", + "#8080ff", + "#7c7cff", + "#7878ff", + "#7474ff", + "#7070ff", + "#6c6cff", + "#6868ff", + "#6464ff", + "#6060ff", + "#5c5cff", + "#5858ff", + "#5454ff", + "#5050ff", + "#4c4cff", + "#4848ff", + "#4444ff", + "#4040ff", + "#3c3cff", + "#3838ff", + "#3434ff", + "#3030ff", + "#2c2cff", + "#2828ff", + "#2424ff", + "#2020ff", + "#1c1cff", + "#1818ff", + "#1414ff", + "#1010ff", + "#0c0cff", + "#0808ff", + "#0404ff" + ], + "mode": "spectrum", + "show": true + }, + "legendTypes": ["opacity", "spectrum", "threshold"], + "line": { + "criteria": ["Minimum", "Maximum", "Average", "Current"], + "selected": "Current" + }, + "links": [], + "lng": -110, + "mapSrc": [ + "{\n \"results\": [\n {\n \"links\": [\n {\n \"endpoints\": [\n {\n \"name\": \"PIREN: Seattle to Oahu 100GE input\",\n \"label\": \"Seattle to Oahu\"\n },\n {\n \"name\": \"PIREN: Seattle to Oahu 100GE output\",\n \"label\": \"Oahu to Seattle\"\n }\n ],\n \"path\": [\n {\n \"lon\": \"-122.3359058\",\n \"lat\": \"47.614848\",\n \"name\": \"Seattle\"\n },\n {\n \"lon\": \"-148.169708\",\n \"lat\": \"37.383253\"\n },\n {\n \"lon\": \"-157.9652284\",\n \"lat\": \"21.4837543\",\n \"name\": \"Oahu\"\n }\n ],\n \"name\": \"PIREN: Seattle to Oahu 100GE\"\n },\n {\n \"endpoints\": [\n {\n \"name\": \"PIREN: Los Angeles to Mauna Lani 100GE input\",\n \"label\": \"Los Angeles to Mauna Lani\"\n },\n {\n \"name\": \"PIREN: Los Angeles to Mauna Lani 100GE output\",\n \"label\": \"Mauna Lani to Los Angeles\"\n }\n ],\n \"path\": [\n {\n \"lon\": \"-118.411\",\n \"lat\": \"34.0204989\",\n \"name\": \"Los Angeles\"\n },\n {\n \"lon\": \"-145\",\n \"lat\": \"28\"\n },\n {\n \"lon\": \"-155.8220663\",\n \"lat\": \"20.0233385\",\n \"name\": \"Mauna Lani\"\n }\n ],\n \"name\": \"PIREN: Los Angeles to Mauna Lani 100GE\"\n },\n {\n \"endpoints\": [\n {\n \"name\": \"PIREN: Oahu to Los Angeles 100GE input\",\n \"label\": \"Los Angeles to Oahu\"\n },\n {\n \"name\": \"PIREN: Oahu to Los Angeles 100GE output\",\n \"label\": \"Oahu to Los Angeles\"\n }\n ],\n \"path\": [\n {\n \"lon\": \"-118.411\",\n \"lat\": \"34.0204989\",\n \"name\": \"Los Angeles\"\n },\n {\n \"lon\": \"-147\",\n \"lat\": \"33\"\n },\n {\n \"lon\": \"-157.9652284\",\n \"lat\": \"21.4837543\",\n \"name\": \"Oahu\"\n }\n ],\n \"name\": \"PIREN: Oahu to Los Angeles 100GE\"\n },\n {\n \"endpoints\": [\n {\n \"name\": \"PIREN: Oahu to Sydney 100GE input\",\n \"label\": \"Oahu to Sydney\"\n },\n {\n \"name\": \"PIREN: Oahu to Sydney 100GE output\",\n \"label\": \"Sydney to Oahu\"\n }\n ],\n \"path\": [\n {\n \"lon\": \"-157.9652284\",\n \"lat\": \"21.4837543\",\n \"name\": \"Oahu\"\n },\n {\n \"lon\": \"-185.522049\",\n \"lat\": \"2.218017\"\n },\n {\n \"lon\": \"151.2100445\",\n \"lat\": \"-33.8679519\",\n \"name\": \"Sidney\"\n }\n ],\n \"name\": \"PIREN: Oahu to Sydney 100GE\"\n },\n {\n \"endpoints\": [\n {\n \"name\": \"PIREN: Mauna Lani to Sydney 100GE input\",\n \"label\": \"Mauna Lani to Sydney\"\n },\n {\n \"name\": \"PIREN: Mauna Lani to Sydney 100GE output\",\n \"label\": \"Sydney to Mauna Lani\"\n }\n ],\n \"path\": [\n {\n \"lon\": \"-155.8220663\",\n \"lat\": \"20.0233385\",\n \"name\": \"Mauna Lani\"\n },\n {\n \"lon\": \"185.8654694\",\n \"lat\": \"-58.863023\"\n },\n {\n \"lon\": \"151.2100445\",\n \"lat\": \"-33.8679519\",\n \"name\": \"Sidney\"\n }\n ],\n \"name\": \"PIREN: Mauna Lani to Sydney 100GE\"\n },\n {\n \"endpoints\": [\n {\n \"name\": \"PIREN: Oahu to Guam 100GE input\",\n \"label\": \"Guam to Oahu\"\n },\n {\n \"name\": \"PIREN: Oahu to Guam 100GE output\",\n \"label\": \"Oahu to Guam\"\n }\n ],\n \"path\": [\n {\n \"lon\": \"-157.9652284\",\n \"lat\": \"21.4837543\",\n \"name\": \"Oahu\"\n },\n {\n \"lon\": \"-193.823547\",\n \"lat\": \"24.006939\"\n },\n {\n \"lon\": \"144.7937\",\n \"lat\": \"13.4443\",\n \"name\": \"Guam\"\n }\n ],\n \"name\": \"PIREN: Oahu to Guam 100GE\"\n }\n ],\n \"endpoints\": [\n {\n \"lon\": \"-122.3359058\",\n \"lat\": \"47.614848\",\n \"name\": \"Seattle\"\n },\n {\n \"lon\": \"-118.411\",\n \"lat\": \"34.0204989\",\n \"name\": \"Los Angeles\"\n },\n {\n \"lon\": \"-157.9652284\",\n \"lat\": \"21.4837543\",\n \"name\": \"Oahu\"\n },\n {\n \"lon\": \"151.2100445\",\n \"lat\": \"-33.8679519\",\n \"name\": \"Sidney\"\n },\n {\n \"lon\": \"174.8654694\",\n \"lat\": \"-36.863023\",\n \"name\": \"Auckland\"\n },\n {\n \"lon\": \"-155.8220663\",\n \"lat\": \"20.0233385\",\n \"name\": \"Mauna Lani\"\n },\n {\n \"lon\": \"144.7937\",\n \"lat\": \"13.4443\",\n \"name\": \"Guam\"\n }\n ]\n }\n ]\n}", + "{\n \"results\": [\n {\n \"links\": [\n {\n \"endpoints\": [\n {\n \"name\": \"TransPAC: Seattle to Tokyo 100GE input\",\n \"label\": \"Tokyo to Seattle\"\n },\n {\n \"name\": \"TransPAC: Seattle to Tokyo 100GE output\",\n \"label\": \"Seattle to Tokyo\"\n }\n ],\n \"path\": [\n {\n \"lon\": \"139.853142695116\",\n \"order\": \"10\",\n \"lat\": \"35.7653023546885\"\n },\n {\n \"lon\": \" -172.86232\",\n \"lat\": \"47.60894\"\n },\n {\n \"lon\": \"-122.335927373024\",\n \"order\": \"20\",\n \"lat\": \"47.5652166492485\"\n }\n ],\n \"name\": \"TransPAC: Seattle to Tokyo 100GE\"\n }\n ],\n \"endpoints\": [\n {\n \"pop_id\": null,\n \"lon\": \"139.853142695116\",\n \"real_lon\": null,\n \"real_lat\": null,\n \"name\": \"TOKY\",\n \"lat\": \"35.7653023546885\"\n },\n {\n \"pop_id\": null,\n \"lon\": \"-122.335927373024\",\n \"real_lon\": null,\n \"real_lat\": null,\n \"name\": \"SEAT-TP\",\n \"lat\": \"47.5652166492485\"\n }\n ]\n }\n ]\n}", + "{\n \"results\": [\n {\n \"links\": [\n {\n \"endpoints\": [\n {\n \"name\": \"NEAAR: New York to London 100GE input\",\n \"label\": \"London to New York\"\n },\n {\n \"name\": \"NEAAR: New York to London 100GE output\",\n \"label\": \"New York to London\"\n }\n ],\n \"path\": [\n {\n \"lon\": \"-74.004561\",\n \"order\": \"10\",\n \"lat\": \"40.72\"\n },\n {\n \"lon\": \"-40.803412\",\n \"lat\": \"42.538777\"\n },\n {\n \"lon\": \"-0.127758\",\n \"order\": \"20\",\n \"lat\": \"51.507351\"\n }\n ],\n \"name\": \"NEAAR: New York to London 100GE\"\n },\n {\n \"endpoints\": [\n {\n \"name\": \"NEAAR: New York to Amsterdam 100GE input\",\n \"label\": \"Amsterdam to New York\"\n },\n {\n \"name\": \"NEAAR: New York to Amsterdam 100GE output\",\n \"label\": \"New York to Amsterdam\"\n }\n ],\n \"path\": [\n {\n \"lon\": \"-74.004561\",\n \"order\": \"10\",\n \"lat\": \"40.72\"\n },\n {\n \"lon\": \"-20.803412\",\n \"lat\": \"59.538777\"\n },\n {\n \"lon\": \"4.897070\",\n \"order\": \"20\",\n \"lat\": \"52.377956\"\n }\n ],\n \"name\": \"NEAAR: New York to Amsterdam 100GE\"\n }\n ],\n \"name\": \"Backbone\",\n \"endpoints\": [\n {\n \"lon\": \"-74.004561\",\n \"real_lon\": \"-74.004561\",\n \"real_lat\": \"40.72\",\n \"name\": \"NEWY32AOA\",\n \"lat\": \"40.72\"\n },\n {\n \"lon\": \"-0.127758\",\n \"real_lon\": null,\n \"real_lat\": null,\n \"name\": \"GEANT London\",\n \"lat\": \"51.507351\"\n },\n {\n \"lon\": \"4.897070\",\n \"real_lon\": null,\n \"real_lat\": null,\n \"name\": \"Netherlight Amsterdam\",\n \"lat\": \"52.377956\"\n }\n ],\n \"network_abbr_name\": \"NEAAR\",\n \"network_name\": \"NEAAR\"\n }\n ]\n}", + "{\n \"results\": [\n {\n \"links\": [\n {\n \"endpoints\": [\n {\n \"name\": \"TransPAC: Hong Kong to Guam 10GE input\",\n \"label\": \"Guam to Hong Kong\"\n },\n {\n \"name\": \"TransPAC: Hong Kong to Guam 10GE output\",\n \"label\": \"Hong Kong to Guam\"\n }\n ],\n \"path\": [\n {\n \"lon\": \"114.1095\",\n \"order\": \"10\",\n \"lat\": \"22.3964\"\n },\n {\n \"lon\": \"129.35165\",\n \"lat\": \"21.06656\"\n },\n {\n \"lon\": \"144.7937\",\n \"order\": \"20\",\n \"lat\": \"13.4443\"\n }\n ],\n \"name\": \"TransPAC: Guam to Hong Kong 10GE\"\n }\n ],\n \"endpoints\": [\n {\n \"lon\": \"114.1095\",\n \"lat\": \"22.3964\",\n \"name\": \"Hong Kong\"\n },\n {\n \"lon\": \"144.7937\",\n \"lat\": \"13.4443\",\n \"name\": \"Guam\"\n }\n ],\n \"network_abbr_name\": \"TP\",\n \"network_name\": \"TransPAC\"\n }\n ]\n}", + "{\n \"results\": [\n {\n \"name\": \"Pacific Wave Exchange\",\n \"endpoints\": [\n {\n \"lon\": \"-122.335927373024\",\n \"name\": \"SEATTLE\",\n \"lat\": \"47.5652166492485\"\n },\n {\n \"lon\": \"-118.411\",\n \"lat\": \"34.0204989\",\n \"name\": \"Los Angeles\"\n }\n ],\n \"links\": [\n {\n \"name\": \"Pacific Wave Exchange\",\n \"path\": [\n {\n \"lon\": \"-122.335927373024\",\n \"order\": \"10\",\n \"lat\": \"47.5652166492485\"\n },\n {\n \"lon\": \"-114.335927373024\",\n \"order\": \"20\",\n \"lat\": \"40\"\n },\n {\n \"lon\": \"-115.411\",\n \"order\": \"30\",\n \"lat\": \"20.8\"\n },\n {\n \"lon\": \"-125.411\",\n \"order\": \"40\",\n \"lat\": \"40.0204989\"\n },\n {\n \"lon\": \"-122.335927373024\",\n \"order\": \"50\",\n \"lat\": \"47.5652166492485\"\n }\n ],\n \"endpoints\": [\n {\n \"name\": \"CENIC values.input\",\n \"label\": \"Inbound to CENIC\"\n },\n {\n \"name\": \"CENIC values.output\",\n \"label\": \"Outbound from CENIC\"\n }\n ]\n }\n ]\n }\n ]\n}", + "" + ], + "map_tile_url": "https://stamen-tiles-{s}.a.ssl.fastly.net/toner-lite/{z}/{x}/{y}{r}.png", + "max": [ + "100000000000", + "100000000000", + "100000000000", + "10000000000", + "100000000000" + ], + "min": ["0", "0", "0", "0", "0"], + "name": ["PIREN", "TransPAC", "NEAAR", "HONG", "CENIC"], + "nodeFillColor": "#eab839", + "opacityScales": ["linear", "sqrt"], + "opacity_values": [], + "rgb_values": [ + "rgb(200,200,255)", + "rgb(196,196,255)", + "rgb(192,192,255)", + "rgb(188,188,255)", + "rgb(184,184,255)", + "rgb(180,180,255)", + "rgb(176,176,255)", + "rgb(172,172,255)", + "rgb(168,168,255)", + "rgb(164,164,255)", + "rgb(160,160,255)", + "rgb(156,156,255)", + "rgb(152,152,255)", + "rgb(148,148,255)", + "rgb(144,144,255)", + "rgb(140,140,255)", + "rgb(136,136,255)", + "rgb(132,132,255)", + "rgb(128,128,255)", + "rgb(124,124,255)", + "rgb(120,120,255)", + "rgb(116,116,255)", + "rgb(112,112,255)", + "rgb(108,108,255)", + "rgb(104,104,255)", + "rgb(100,100,255)", + "rgb(96,96,255)", + "rgb(92,92,255)", + "rgb(88,88,255)", + "rgb(84,84,255)", + "rgb(80,80,255)", + "rgb(76,76,255)", + "rgb(72,72,255)", + "rgb(68,68,255)", + "rgb(64,64,255)", + "rgb(60,60,255)", + "rgb(56,56,255)", + "rgb(52,52,255)", + "rgb(48,48,255)", + "rgb(44,44,255)", + "rgb(40,40,255)", + "rgb(36,36,255)", + "rgb(32,32,255)", + "rgb(28,28,255)", + "rgb(24,24,255)", + "rgb(20,20,255)", + "rgb(16,16,255)", + "rgb(12,12,255)", + "rgb(8,8,255)", + "rgb(4,4,255)" + ], + "size": ["3", "3", "3", "3", "3"], + "targets": [ + { + "aggregate_all": false, + "aggregator": ["average", "average"], + "bucket": [""], + "bucketAggs": [ + { + "field": "start", + "id": "2", + "settings": { + "interval": "auto", + "min_doc_count": 0, + "trimEdges": 0 + }, + "type": "date_histogram" + } + ], + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "drillDown": [], + "drillDownAlias": "", + "drillDownValue": [], + "dsType": "elasticsearch", + "func": [ + { + "alias": "input", + "align": "", + "bucket": "", + "expanded": false, + "method": "average", + "operation": "", + "percentile": "85", + "root": true, + "target": "input", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + }, + { + "alias": "output", + "align": "", + "bucket": "", + "expanded": false, + "method": "average", + "operation": "", + "percentile": "85", + "root": true, + "target": "output", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.input, 60, average)": "input", + "aggregate(values.output, 60, average)": "output" + }, + "metricValueAliases": ["AZ", ""], + "metricValues_array": ["input", "Select Metric Value"], + "metric_array": ["link_name", "node"], + "metrics": [ + { + "field": "select field", + "id": "1", + "type": "count" + } + ], + "orderby_field": "", + "outerGroupOperator": [""], + "percentileValue": ["", ""], + "rawQuery": false, + "refId": "A", + "series": "interface", + "target": "get link_name, node, aggregate(values.input, 60, average),aggregate(values.output, 60, average) between (1570571291, 1570657691) by link_name from interface where (link_name like \".+\")", + "target_alias": "$link_name $VALUE", + "templateVariableValue": [""], + "timeField": "start", + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "$$hashKey": "object:453", + "left": "link_name", + "op": "like", + "right": ".+" + } + ] + ] + }, + { + "aggregate_all": false, + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "func": [ + { + "alias": "", + "align": "", + "bucket": "", + "expanded": false, + "method": "average", + "operation": "", + "percentile": "85", + "root": true, + "target": "input", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": {}, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["Select Metric"], + "orderby_field": "", + "outerGroupOperator": [""], + "rawQuery": true, + "refId": "D", + "series": "select table", + "target": "get node, intf, aggregate(values.input, $quantify, sum) as values.input, aggregate(values.output, $quantify, sum) as values.output by nothing from ( get node, intf, aggregate(values.input, $quantify, average) as values.input,aggregate(values.output, $quantify, average) as values.output between ($START, $END) by node, intf from interface where (node = \"wrn-albu-sw-3.cenic.net\" and intf = \"ethernet5/1\") or (node = \"snvl2-pw-sw-1-mgmt-2.cenic.net\" and intf = \"ethernet6/15\") or (node = \"wrn-albu-sw-4.cenic.net\" and intf = \"ethernet3/1\") or (node = \"snvl2-pw-sw-1-mgmt-2.cenic.net\" and intf = \"ethernet8/3\") or (node = \"snvl2-pw-sw-1-mgmt-2.cenic.net\" and intf = \"ethernet8/10\") or (node = \"wrn-albu-sw-3.cenic.net\" and intf = \"ethernet7/1\") or (node = \"losa3-pw-sw-1.cenic.net\" and intf = \"ethernet4/1\") or (node = \"wrn-denv-sw-4.cenic.net\" and intf = \"ethernet7/2\") or (node = \"losa2-pw-sw-1-mgmt-2.cenic.net\" and intf = \"ethernet15/2\") or (node = \"snvl2-pw-sw-1-mgmt-2.cenic.net\" and intf = \"ethernet1/1\") or (node = \"losa2-pw-sw-1-mgmt-2.cenic.net\" and intf = \"ethernet4/1\") or (node = \"losa2-pw-sw-1-mgmt-2.cenic.net\" and intf = \"ethernet15/8\") or (node = \"losa2-pw-sw-1-mgmt-2.cenic.net\" and intf = \"ethernet7/7\") or (node = \"snvl2-pw-sw-1-mgmt-2.cenic.net\" and intf = \"ethernet3/1\") or (node = \"wrn-elpa-sw-1.cenic.net\" and intf = \"ethernet1/1\") or (node = \"losa3-pw-sw-1.cenic.net\" and intf = \"ethernet7/1\") or (node = \"snvl2-pw-sw-1-mgmt-2.cenic.net\" and intf = \"ethernet7/2\") or (node = \"losa2-pw-sw-1-mgmt-2.cenic.net\" and intf = \"ethernet15/5\") or (node = \"wrn-elpa-sw-1.cenic.net\" and intf = \"ethernet5/1\") or (node = \"losa2-pw-sw-1-mgmt-2.cenic.net\" and intf = \"ethernet1/2\") or (node = \"losa2-pw-sw-1-mgmt-2.cenic.net\" and intf = \"ethernet16/5\") or (node = \"wrn-denv-sw-3.cenic.net\" and intf = \"ethernet7/1\") or (node = \"snvl2-pw-sw-1-mgmt-2.cenic.net\" and intf = \"ethernet6/20\") or (node = \"losa2-pw-sw-1-mgmt-2.cenic.net\" and intf = \"ethernet13/1\") or (node = \"snvl2-pw-sw-1-mgmt-2.cenic.net\" and intf = \"ethernet8/5\") or (node = \"losa2-pw-sw-1-mgmt-2.cenic.net\" and intf = \"ethernet15/6\") or (node = \"wrn-denv-sw-4.cenic.net\" and intf = \"ethernet7/1\") or (node = \"wrn-albu-sw-4.cenic.net\" and intf = \"ethernet5/1\") or (node = \"losa2-pw-sw-1-mgmt-2.cenic.net\" and intf = \"ethernet3/2\") or (node = \"wrn-denv-sw-4.cenic.net\" and intf = \"ethernet1/1\") or (node = \"wrn-denv-sw-3.cenic.net\" and intf = \"ethernet1/2\") or (node = \"losa2-pw-sw-1-mgmt-2.cenic.net\" and intf = \"ethernet1/1\") or (node = \"losa3-pw-sw-1.cenic.net\" and intf = \"ethernet7/2\") or (node = \"losa2-pw-sw-1-mgmt-2.cenic.net\" and intf = \"ethernet15/7\") or (node = \"snvl2-pw-sw-1-mgmt-2.cenic.net\" and intf = \"ethernet8/9\") or (node = \"losa2-pw-sw-1-mgmt-2.cenic.net\" and intf = \"ethernet16/7\") or (node = \"losa2-pw-sw-1-mgmt-2.cenic.net\" and intf = \"ethernet6/2\") or (node = \"snvl2-pw-sw-1-mgmt-2.cenic.net\" and intf = \"ethernet3/2\") or (node = \"wrn-denv-sw-4.cenic.net\" and intf = \"ethernet5/2\") or (node = \"losa2-pw-sw-1-mgmt-2.cenic.net\" and intf = \"ethernet4/2\") or (node = \"losa2-pw-sw-1-mgmt-2.cenic.net\" and intf = \"ethernet5/2\") or (node = \"losa2-pw-sw-1-mgmt-2.cenic.net\" and intf = \"ethernet9/1\") or (node = \"wrn-denv-sw-3.cenic.net\" and intf = \"ethernet5/2\") )", + "target_alias": "CENIC $VALUE", + "templateVariableValue": [""], + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "Select Metric", + "op": "", + "right": "" + } + ] + ] + } + ], + "threshold_colors": [], + "title": "", + "to_si": 1000000000, + "tooltip": { + "content": "\u003cdiv class= \"wrapper\" style= \"padding:0px; margin:0px; border-radius:0px; \"\u003e \u003ctable id= \"dataTable\"\u003e\n\u003ctr\u003e\n\u003ctd colspan = \"6\" class = \"mainTitle\"\u003e$name\u003c/td\u003e \u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd colspan = \"3\" class = \"mainHead borderBot\"\u003e$input.name\u003c/td\u003e\n\u003ctd colspan = \"3\" class = \"mainHead borderBot\"\u003e$output.name\u003c/td\u003e \u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd colspan = \"1\" class = \"headerTxt\"\u003eMin\u003c/td\u003e\n\u003ctd colspan = \"1\" class = \"headerTxt\"\u003eAvg\u003c/td\u003e\n\u003ctd colspan = \"1\" class = \"headerTxt rightBorder\"\u003eMax\u003c/td\u003e \u003ctd colspan = \"1\" class = \"headerTxt\"\u003eMin\u003c/td\u003e\n\u003ctd colspan = \"1\" class = \"headerTxt\"\u003eAvg\u003c/td\u003e\n\u003ctd colspan = \"1\" class = \"headerTxt \"\u003eMax\u003c/td\u003e\n\u003c/tr\u003e \u003ctr\u003e\n\u003ctd colspan = \"1\" class = \"bodyTxt\"\u003e$input.min\u003c/td\u003e\n\u003ctd colspan = \"1\" class = \"bodyTxt\"\u003e$input.avg\u003c/td\u003e\n\u003ctd colspan = \"1\" class = \"bodyTxt rightBorder\"\u003e$input.max\u003c/td\u003e \u003ctd colspan = \"1\" class = \"bodyTxt\"\u003e$output.min\u003c/td\u003e\n\u003ctd colspan = \"1\" class = \"bodyTxt\"\u003e$output.avg\u003c/td\u003e\n\u003ctd colspan = \"1\" class = \"bodyTxt\"\u003e$output.max\u003c/td\u003e\n\u003c/tr\u003e \u003ctr\u003e\n\u003ctd colspan = \"3\" class = \"mainHead rightBorder\"\u003eGb/s\u003c/td\u003e \u003ctd colspan = \"3\" class = \"mainHead\"\u003eGb/s\u003c/td\u003e\n\u003c/tr\u003e \u003c/table\u003e\n\u003cbr/\u003e\n\u003cstyle\u003e .wrapper{ color:snow;\n}\n.mainTitle{\nfont-size: 2em; //border-bottom:1px solid; padding:4px; font-weight:bold; color:orange; border-color:white;\n}\n.headerTxt{ padding:0px;\n}\n.bodyTxt{ font-weight:bold; font-size:2em; padding:0px;\npadding :1px 5px 5px 5px; }\n.mainHead{ font-weight:bold; font-size:1.2em; padding-left:20px; padding-right:20px;\n}\n.borderBot{ //border-bottom:1px solid; color:rgba(18,175,255,1); border-color:white;\n\n}\n.rightBorder{ border-right:0.5px solid; }\n#dataTable{\ntable-layout: fixed;\nwidth: 100%;\ntext-align: center;\n}\n.atlas-info-div{\nborder-radius: 2px; background-color:rgba(0,0,0,0.8); }\nhr{ padding:0px; margin:2px; }\n\u003c/style\u003e", + "node_content": "\u003cdiv style=\"text-align: center; font-size: 20px; color:orange; font-weight:bold; \"\u003e $name\n\u003c/div\u003e\n\u003cstyle\u003e\n.atlas-info-div{\nborder-radius: 2px; background-color:rgba(0,0,0,0.8); }\n\u003c/style\u003e", + "show": true, + "showDefault": true, + "showLinkHover": true, + "showNodeHover": true + }, + "transparent": true, + "twin_tubes": false, + "type": "globalnoc-networkmap-panel", + "use_json": true, + "zoom": 2.7 + }, + { + "datasource": null, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "gridPos": { + "h": 1, + "w": 2, + "x": 0, + "y": 24 + }, + "id": 8, + "links": [], + "options": { + "content": "", + "mode": "markdown" + }, + "pluginVersion": "7.3.3", + "title": "Link", + "type": "text" + }, + { + "datasource": null, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "gridPos": { + "h": 1, + "w": 14, + "x": 2, + "y": 24 + }, + "id": 9, + "links": [], + "options": { + "content": "", + "mode": "markdown" + }, + "pluginVersion": "7.3.3", + "title": "Traffic Rate", + "type": "text" + }, + { + "datasource": null, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "gridPos": { + "h": 1, + "w": 8, + "x": 16, + "y": 24 + }, + "id": 10, + "links": [], + "options": { + "content": "", + "mode": "markdown" + }, + "pluginVersion": "7.3.3", + "title": "Total Volume", + "type": "text" + }, + { + "collapsed": false, + "datasource": null, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 25 + }, + "id": 11, + "panels": [], + "repeat": "links", + "scopedVars": { + "links": { + "selected": false, + "text": "AmLight: Boca Raton to Sao Paulo 100GE", + "value": "AmLight: Boca Raton to Sao Paulo 100GE" + } + }, + "title": "", + "type": "row" + }, + { + "datasource": null, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "gridPos": { + "h": 7, + "w": 2, + "x": 0, + "y": 26 + }, + "id": 12, + "links": [], + "options": { + "content": "**$links**", + "mode": "markdown" + }, + "pluginVersion": "7.3.3", + "scopedVars": { + "links": { + "selected": false, + "text": "AmLight: Boca Raton to Sao Paulo 100GE", + "value": "AmLight: Boca Raton to Sao Paulo 100GE" + } + }, + "title": "", + "transparent": true, + "type": "text" + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Netsage TSDS", + "decimals": 1, + "fieldConfig": { + "defaults": { + "custom": {}, + "links": [] + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 7, + "w": 14, + "x": 2, + "y": 26 + }, + "hiddenSeries": false, + "id": 13, + "legend": { + "alignAsTable": true, + "avg": true, + "current": false, + "max": true, + "min": false, + "rightSide": false, + "show": true, + "total": false, + "values": true + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.3", + "pointradius": 5, + "points": false, + "renderer": "flot", + "scopedVars": { + "links": { + "selected": false, + "text": "AmLight: Boca Raton to Sao Paulo 100GE", + "value": "AmLight: Boca Raton to Sao Paulo 100GE" + } + }, + "seriesOverrides": [ + { + "alias": "A-Z", + "color": "#FBAE60" + }, + { + "alias": "Z-A", + "color": "#80B2E7" + } + ], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "aggregate_all": false, + "aggregator": ["average", "average"], + "bucket": [], + "bucketAggs": [ + { + "field": "start", + "id": "2", + "settings": { + "interval": "auto", + "min_doc_count": 0, + "trimEdges": 0 + }, + "type": "date_histogram" + } + ], + "bucketValue": [], + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "drillDown": [], + "drillDownAlias": "", + "drillDownValue": [], + "dsType": "elasticsearch", + "func": [ + { + "aggregate_all": false, + "alias": "A-Z", + "align": "", + "bucket": "", + "expanded": false, + "method": "average", + "operation": "", + "percentile": "85", + "root": true, + "target": "output", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.output, 210, average)": "A-Z" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "metrics": [ + { + "field": "select field", + "id": "1", + "type": "count" + } + ], + "orderby_field": "", + "outerGroupOperator": [""], + "percentileValue": ["", ""], + "refId": "A", + "series": "interface", + "target": "get link_name, aggregate(values.output, 210, average) between (1570571291, 1570657691) by link_name from interface where ((link_name like \"AmLight: Chile to Sao Paolo 100GE\"))", + "target_alias": "$VALUE", + "templateVariableValue": [""], + "timeField": "start", + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + }, + { + "aggregate_all": false, + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "func": [ + { + "aggregate_all": false, + "alias": "Z-A", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "", + "percentile": "85", + "root": true, + "target": "input", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.input, 210, average)": "Z-A" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "orderby_field": "", + "outerGroupOperator": [""], + "refId": "B", + "series": "interface", + "target": "get link_name, aggregate(values.input, 210, average) between (1570571291, 1570657691) by link_name from interface where ((link_name like \"AmLight: Chile to Sao Paolo 100GE\"))", + "target_alias": "$VALUE", + "templateVariableValue": [""], + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "transparent": true, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "bps", + "label": "Rate", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": true, + "dashLength": 10, + "dashes": false, + "datasource": "Netsage TSDS", + "decimals": 1, + "fieldConfig": { + "defaults": { + "custom": {}, + "links": [] + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 7, + "w": 8, + "x": 16, + "y": 26 + }, + "hiddenSeries": false, + "id": 14, + "legend": { + "alignAsTable": true, + "avg": true, + "current": false, + "max": true, + "min": false, + "show": false, + "total": false, + "values": true + }, + "lines": false, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.3", + "pointradius": 5, + "points": false, + "renderer": "flot", + "scopedVars": { + "links": { + "selected": false, + "text": "AmLight: Boca Raton to Sao Paulo 100GE", + "value": "AmLight: Boca Raton to Sao Paulo 100GE" + } + }, + "seriesOverrides": [ + { + "alias": "A-Z", + "color": "#FBAE60" + }, + { + "alias": "Z-A", + "color": "#80B2E7" + } + ], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "aggregate_all": false, + "aggregator": ["average", "average"], + "bucket": [], + "bucketAggs": [ + { + "field": "start", + "id": "2", + "settings": { + "interval": "auto", + "min_doc_count": 0, + "trimEdges": 0 + }, + "type": "date_histogram" + } + ], + "bucketValue": [], + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "drillDown": [], + "drillDownAlias": "", + "drillDownValue": [], + "dsType": "elasticsearch", + "func": [ + { + "alias": "A-Z", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "* $TIMESPAN", + "percentile": "85", + "root": true, + "target": "output", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.output, 310, average)": "A-Z" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "metrics": [ + { + "field": "select field", + "id": "1", + "type": "count" + } + ], + "orderby_field": "", + "outerGroupOperator": [""], + "percentileValue": ["", ""], + "rawQuery": true, + "refId": "A", + "series": "interface", + "target": "get link_name, values.output * 3600 from (get link_name, aggregate(values.output, 3600, average) / 8 as values.output between ($START, $END) by link_name from interface where ((link_name like \"$links\")))", + "target_alias": "A-Z", + "templateVariableValue": [""], + "timeField": "start", + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + }, + { + "aggregate_all": false, + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "func": [ + { + "alias": "Z-A", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "* $TIMESPAN", + "percentile": "85", + "root": true, + "target": "input", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "hide": false, + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.input, 310, average)": "Z-A" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "orderby_field": "", + "outerGroupOperator": [""], + "rawQuery": true, + "refId": "B", + "series": "interface", + "target": "get link_name, values.input * 3600 from (get link_name, aggregate(values.input, 3600, average) / 8 as values.input between ($START, $END) by link_name from interface where ((link_name like \"$links\")))", + "target_alias": "Z-A", + "templateVariableValue": [""], + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "", + "tooltip": { + "shared": false, + "sort": 0, + "value_type": "individual" + }, + "transparent": true, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "series", + "name": null, + "show": true, + "values": ["total"] + }, + "yaxes": [ + { + "format": "decbytes", + "label": null, + "logBase": 1, + "max": null, + "min": 0, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "collapsed": false, + "datasource": null, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 33 + }, + "id": 97, + "panels": [], + "repeatIteration": 1618867139860, + "repeatPanelId": 11, + "scopedVars": { + "links": { + "selected": false, + "text": "AmLight: Fortaleza to Sao Paulo 100GE", + "value": "AmLight: Fortaleza to Sao Paulo 100GE" + } + }, + "title": "", + "type": "row" + }, + { + "datasource": null, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "gridPos": { + "h": 7, + "w": 2, + "x": 0, + "y": 34 + }, + "id": 98, + "links": [], + "options": { + "content": "**$links**", + "mode": "markdown" + }, + "pluginVersion": "7.3.3", + "repeatIteration": 1618867139860, + "repeatPanelId": 12, + "repeatedByRow": true, + "scopedVars": { + "links": { + "selected": false, + "text": "AmLight: Fortaleza to Sao Paulo 100GE", + "value": "AmLight: Fortaleza to Sao Paulo 100GE" + } + }, + "title": "", + "transparent": true, + "type": "text" + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Netsage TSDS", + "decimals": 1, + "fieldConfig": { + "defaults": { + "custom": {}, + "links": [] + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 7, + "w": 14, + "x": 2, + "y": 34 + }, + "hiddenSeries": false, + "id": 99, + "legend": { + "alignAsTable": true, + "avg": true, + "current": false, + "max": true, + "min": false, + "rightSide": false, + "show": true, + "total": false, + "values": true + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.3", + "pointradius": 5, + "points": false, + "renderer": "flot", + "repeatIteration": 1618867139860, + "repeatPanelId": 13, + "repeatedByRow": true, + "scopedVars": { + "links": { + "selected": false, + "text": "AmLight: Fortaleza to Sao Paulo 100GE", + "value": "AmLight: Fortaleza to Sao Paulo 100GE" + } + }, + "seriesOverrides": [ + { + "alias": "A-Z", + "color": "#FBAE60" + }, + { + "alias": "Z-A", + "color": "#80B2E7" + } + ], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "aggregate_all": false, + "aggregator": ["average", "average"], + "bucket": [], + "bucketAggs": [ + { + "field": "start", + "id": "2", + "settings": { + "interval": "auto", + "min_doc_count": 0, + "trimEdges": 0 + }, + "type": "date_histogram" + } + ], + "bucketValue": [], + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "drillDown": [], + "drillDownAlias": "", + "drillDownValue": [], + "dsType": "elasticsearch", + "func": [ + { + "aggregate_all": false, + "alias": "A-Z", + "align": "", + "bucket": "", + "expanded": false, + "method": "average", + "operation": "", + "percentile": "85", + "root": true, + "target": "output", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.output, 210, average)": "A-Z" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "metrics": [ + { + "field": "select field", + "id": "1", + "type": "count" + } + ], + "orderby_field": "", + "outerGroupOperator": [""], + "percentileValue": ["", ""], + "refId": "A", + "series": "interface", + "target": "get link_name, aggregate(values.output, 210, average) between (1570571291, 1570657691) by link_name from interface where ((link_name like \"AmLight: Chile to Sao Paolo 100GE\"))", + "target_alias": "$VALUE", + "templateVariableValue": [""], + "timeField": "start", + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + }, + { + "aggregate_all": false, + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "func": [ + { + "aggregate_all": false, + "alias": "Z-A", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "", + "percentile": "85", + "root": true, + "target": "input", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.input, 210, average)": "Z-A" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "orderby_field": "", + "outerGroupOperator": [""], + "refId": "B", + "series": "interface", + "target": "get link_name, aggregate(values.input, 210, average) between (1570571291, 1570657691) by link_name from interface where ((link_name like \"AmLight: Chile to Sao Paolo 100GE\"))", + "target_alias": "$VALUE", + "templateVariableValue": [""], + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "transparent": true, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "bps", + "label": "Rate", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": true, + "dashLength": 10, + "dashes": false, + "datasource": "Netsage TSDS", + "decimals": 1, + "fieldConfig": { + "defaults": { + "custom": {}, + "links": [] + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 7, + "w": 8, + "x": 16, + "y": 34 + }, + "hiddenSeries": false, + "id": 100, + "legend": { + "alignAsTable": true, + "avg": true, + "current": false, + "max": true, + "min": false, + "show": false, + "total": false, + "values": true + }, + "lines": false, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.3", + "pointradius": 5, + "points": false, + "renderer": "flot", + "repeatIteration": 1618867139860, + "repeatPanelId": 14, + "repeatedByRow": true, + "scopedVars": { + "links": { + "selected": false, + "text": "AmLight: Fortaleza to Sao Paulo 100GE", + "value": "AmLight: Fortaleza to Sao Paulo 100GE" + } + }, + "seriesOverrides": [ + { + "alias": "A-Z", + "color": "#FBAE60" + }, + { + "alias": "Z-A", + "color": "#80B2E7" + } + ], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "aggregate_all": false, + "aggregator": ["average", "average"], + "bucket": [], + "bucketAggs": [ + { + "field": "start", + "id": "2", + "settings": { + "interval": "auto", + "min_doc_count": 0, + "trimEdges": 0 + }, + "type": "date_histogram" + } + ], + "bucketValue": [], + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "drillDown": [], + "drillDownAlias": "", + "drillDownValue": [], + "dsType": "elasticsearch", + "func": [ + { + "alias": "A-Z", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "* $TIMESPAN", + "percentile": "85", + "root": true, + "target": "output", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.output, 310, average)": "A-Z" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "metrics": [ + { + "field": "select field", + "id": "1", + "type": "count" + } + ], + "orderby_field": "", + "outerGroupOperator": [""], + "percentileValue": ["", ""], + "rawQuery": true, + "refId": "A", + "series": "interface", + "target": "get link_name, values.output * 3600 from (get link_name, aggregate(values.output, 3600, average) / 8 as values.output between ($START, $END) by link_name from interface where ((link_name like \"$links\")))", + "target_alias": "A-Z", + "templateVariableValue": [""], + "timeField": "start", + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + }, + { + "aggregate_all": false, + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "func": [ + { + "alias": "Z-A", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "* $TIMESPAN", + "percentile": "85", + "root": true, + "target": "input", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "hide": false, + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.input, 310, average)": "Z-A" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "orderby_field": "", + "outerGroupOperator": [""], + "rawQuery": true, + "refId": "B", + "series": "interface", + "target": "get link_name, values.input * 3600 from (get link_name, aggregate(values.input, 3600, average) / 8 as values.input between ($START, $END) by link_name from interface where ((link_name like \"$links\")))", + "target_alias": "Z-A", + "templateVariableValue": [""], + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "", + "tooltip": { + "shared": false, + "sort": 0, + "value_type": "individual" + }, + "transparent": true, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "series", + "name": null, + "show": true, + "values": ["total"] + }, + "yaxes": [ + { + "format": "decbytes", + "label": null, + "logBase": 1, + "max": null, + "min": 0, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "collapsed": false, + "datasource": null, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 41 + }, + "id": 101, + "panels": [], + "repeatIteration": 1618867139860, + "repeatPanelId": 11, + "scopedVars": { + "links": { + "selected": false, + "text": "AmLight: Miami to Cape Town 100GE", + "value": "AmLight: Miami to Cape Town 100GE" + } + }, + "title": "", + "type": "row" + }, + { + "datasource": null, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "gridPos": { + "h": 7, + "w": 2, + "x": 0, + "y": 42 + }, + "id": 102, + "links": [], + "options": { + "content": "**$links**", + "mode": "markdown" + }, + "pluginVersion": "7.3.3", + "repeatIteration": 1618867139860, + "repeatPanelId": 12, + "repeatedByRow": true, + "scopedVars": { + "links": { + "selected": false, + "text": "AmLight: Miami to Cape Town 100GE", + "value": "AmLight: Miami to Cape Town 100GE" + } + }, + "title": "", + "transparent": true, + "type": "text" + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Netsage TSDS", + "decimals": 1, + "fieldConfig": { + "defaults": { + "custom": {}, + "links": [] + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 7, + "w": 14, + "x": 2, + "y": 42 + }, + "hiddenSeries": false, + "id": 103, + "legend": { + "alignAsTable": true, + "avg": true, + "current": false, + "max": true, + "min": false, + "rightSide": false, + "show": true, + "total": false, + "values": true + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.3", + "pointradius": 5, + "points": false, + "renderer": "flot", + "repeatIteration": 1618867139860, + "repeatPanelId": 13, + "repeatedByRow": true, + "scopedVars": { + "links": { + "selected": false, + "text": "AmLight: Miami to Cape Town 100GE", + "value": "AmLight: Miami to Cape Town 100GE" + } + }, + "seriesOverrides": [ + { + "alias": "A-Z", + "color": "#FBAE60" + }, + { + "alias": "Z-A", + "color": "#80B2E7" + } + ], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "aggregate_all": false, + "aggregator": ["average", "average"], + "bucket": [], + "bucketAggs": [ + { + "field": "start", + "id": "2", + "settings": { + "interval": "auto", + "min_doc_count": 0, + "trimEdges": 0 + }, + "type": "date_histogram" + } + ], + "bucketValue": [], + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "drillDown": [], + "drillDownAlias": "", + "drillDownValue": [], + "dsType": "elasticsearch", + "func": [ + { + "aggregate_all": false, + "alias": "A-Z", + "align": "", + "bucket": "", + "expanded": false, + "method": "average", + "operation": "", + "percentile": "85", + "root": true, + "target": "output", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.output, 210, average)": "A-Z" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "metrics": [ + { + "field": "select field", + "id": "1", + "type": "count" + } + ], + "orderby_field": "", + "outerGroupOperator": [""], + "percentileValue": ["", ""], + "refId": "A", + "series": "interface", + "target": "get link_name, aggregate(values.output, 210, average) between (1570571291, 1570657691) by link_name from interface where ((link_name like \"AmLight: Chile to Sao Paolo 100GE\"))", + "target_alias": "$VALUE", + "templateVariableValue": [""], + "timeField": "start", + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + }, + { + "aggregate_all": false, + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "func": [ + { + "aggregate_all": false, + "alias": "Z-A", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "", + "percentile": "85", + "root": true, + "target": "input", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.input, 210, average)": "Z-A" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "orderby_field": "", + "outerGroupOperator": [""], + "refId": "B", + "series": "interface", + "target": "get link_name, aggregate(values.input, 210, average) between (1570571291, 1570657691) by link_name from interface where ((link_name like \"AmLight: Chile to Sao Paolo 100GE\"))", + "target_alias": "$VALUE", + "templateVariableValue": [""], + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "transparent": true, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "bps", + "label": "Rate", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": true, + "dashLength": 10, + "dashes": false, + "datasource": "Netsage TSDS", + "decimals": 1, + "fieldConfig": { + "defaults": { + "custom": {}, + "links": [] + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 7, + "w": 8, + "x": 16, + "y": 42 + }, + "hiddenSeries": false, + "id": 104, + "legend": { + "alignAsTable": true, + "avg": true, + "current": false, + "max": true, + "min": false, + "show": false, + "total": false, + "values": true + }, + "lines": false, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.3", + "pointradius": 5, + "points": false, + "renderer": "flot", + "repeatIteration": 1618867139860, + "repeatPanelId": 14, + "repeatedByRow": true, + "scopedVars": { + "links": { + "selected": false, + "text": "AmLight: Miami to Cape Town 100GE", + "value": "AmLight: Miami to Cape Town 100GE" + } + }, + "seriesOverrides": [ + { + "alias": "A-Z", + "color": "#FBAE60" + }, + { + "alias": "Z-A", + "color": "#80B2E7" + } + ], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "aggregate_all": false, + "aggregator": ["average", "average"], + "bucket": [], + "bucketAggs": [ + { + "field": "start", + "id": "2", + "settings": { + "interval": "auto", + "min_doc_count": 0, + "trimEdges": 0 + }, + "type": "date_histogram" + } + ], + "bucketValue": [], + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "drillDown": [], + "drillDownAlias": "", + "drillDownValue": [], + "dsType": "elasticsearch", + "func": [ + { + "alias": "A-Z", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "* $TIMESPAN", + "percentile": "85", + "root": true, + "target": "output", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.output, 310, average)": "A-Z" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "metrics": [ + { + "field": "select field", + "id": "1", + "type": "count" + } + ], + "orderby_field": "", + "outerGroupOperator": [""], + "percentileValue": ["", ""], + "rawQuery": true, + "refId": "A", + "series": "interface", + "target": "get link_name, values.output * 3600 from (get link_name, aggregate(values.output, 3600, average) / 8 as values.output between ($START, $END) by link_name from interface where ((link_name like \"$links\")))", + "target_alias": "A-Z", + "templateVariableValue": [""], + "timeField": "start", + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + }, + { + "aggregate_all": false, + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "func": [ + { + "alias": "Z-A", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "* $TIMESPAN", + "percentile": "85", + "root": true, + "target": "input", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "hide": false, + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.input, 310, average)": "Z-A" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "orderby_field": "", + "outerGroupOperator": [""], + "rawQuery": true, + "refId": "B", + "series": "interface", + "target": "get link_name, values.input * 3600 from (get link_name, aggregate(values.input, 3600, average) / 8 as values.input between ($START, $END) by link_name from interface where ((link_name like \"$links\")))", + "target_alias": "Z-A", + "templateVariableValue": [""], + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "", + "tooltip": { + "shared": false, + "sort": 0, + "value_type": "individual" + }, + "transparent": true, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "series", + "name": null, + "show": true, + "values": ["total"] + }, + "yaxes": [ + { + "format": "decbytes", + "label": null, + "logBase": 1, + "max": null, + "min": 0, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "collapsed": false, + "datasource": null, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 49 + }, + "id": 105, + "panels": [], + "repeatIteration": 1618867139860, + "repeatPanelId": 11, + "scopedVars": { + "links": { + "selected": false, + "text": "AmLight: Miami to Fortaleza 100GE", + "value": "AmLight: Miami to Fortaleza 100GE" + } + }, + "title": "", + "type": "row" + }, + { + "datasource": null, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "gridPos": { + "h": 7, + "w": 2, + "x": 0, + "y": 50 + }, + "id": 106, + "links": [], + "options": { + "content": "**$links**", + "mode": "markdown" + }, + "pluginVersion": "7.3.3", + "repeatIteration": 1618867139860, + "repeatPanelId": 12, + "repeatedByRow": true, + "scopedVars": { + "links": { + "selected": false, + "text": "AmLight: Miami to Fortaleza 100GE", + "value": "AmLight: Miami to Fortaleza 100GE" + } + }, + "title": "", + "transparent": true, + "type": "text" + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Netsage TSDS", + "decimals": 1, + "fieldConfig": { + "defaults": { + "custom": {}, + "links": [] + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 7, + "w": 14, + "x": 2, + "y": 50 + }, + "hiddenSeries": false, + "id": 107, + "legend": { + "alignAsTable": true, + "avg": true, + "current": false, + "max": true, + "min": false, + "rightSide": false, + "show": true, + "total": false, + "values": true + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.3", + "pointradius": 5, + "points": false, + "renderer": "flot", + "repeatIteration": 1618867139860, + "repeatPanelId": 13, + "repeatedByRow": true, + "scopedVars": { + "links": { + "selected": false, + "text": "AmLight: Miami to Fortaleza 100GE", + "value": "AmLight: Miami to Fortaleza 100GE" + } + }, + "seriesOverrides": [ + { + "alias": "A-Z", + "color": "#FBAE60" + }, + { + "alias": "Z-A", + "color": "#80B2E7" + } + ], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "aggregate_all": false, + "aggregator": ["average", "average"], + "bucket": [], + "bucketAggs": [ + { + "field": "start", + "id": "2", + "settings": { + "interval": "auto", + "min_doc_count": 0, + "trimEdges": 0 + }, + "type": "date_histogram" + } + ], + "bucketValue": [], + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "drillDown": [], + "drillDownAlias": "", + "drillDownValue": [], + "dsType": "elasticsearch", + "func": [ + { + "aggregate_all": false, + "alias": "A-Z", + "align": "", + "bucket": "", + "expanded": false, + "method": "average", + "operation": "", + "percentile": "85", + "root": true, + "target": "output", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.output, 210, average)": "A-Z" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "metrics": [ + { + "field": "select field", + "id": "1", + "type": "count" + } + ], + "orderby_field": "", + "outerGroupOperator": [""], + "percentileValue": ["", ""], + "refId": "A", + "series": "interface", + "target": "get link_name, aggregate(values.output, 210, average) between (1570571291, 1570657691) by link_name from interface where ((link_name like \"AmLight: Chile to Sao Paolo 100GE\"))", + "target_alias": "$VALUE", + "templateVariableValue": [""], + "timeField": "start", + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + }, + { + "aggregate_all": false, + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "func": [ + { + "aggregate_all": false, + "alias": "Z-A", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "", + "percentile": "85", + "root": true, + "target": "input", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.input, 210, average)": "Z-A" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "orderby_field": "", + "outerGroupOperator": [""], + "refId": "B", + "series": "interface", + "target": "get link_name, aggregate(values.input, 210, average) between (1570571291, 1570657691) by link_name from interface where ((link_name like \"AmLight: Chile to Sao Paolo 100GE\"))", + "target_alias": "$VALUE", + "templateVariableValue": [""], + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "transparent": true, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "bps", + "label": "Rate", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": true, + "dashLength": 10, + "dashes": false, + "datasource": "Netsage TSDS", + "decimals": 1, + "fieldConfig": { + "defaults": { + "custom": {}, + "links": [] + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 7, + "w": 8, + "x": 16, + "y": 50 + }, + "hiddenSeries": false, + "id": 108, + "legend": { + "alignAsTable": true, + "avg": true, + "current": false, + "max": true, + "min": false, + "show": false, + "total": false, + "values": true + }, + "lines": false, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.3", + "pointradius": 5, + "points": false, + "renderer": "flot", + "repeatIteration": 1618867139860, + "repeatPanelId": 14, + "repeatedByRow": true, + "scopedVars": { + "links": { + "selected": false, + "text": "AmLight: Miami to Fortaleza 100GE", + "value": "AmLight: Miami to Fortaleza 100GE" + } + }, + "seriesOverrides": [ + { + "alias": "A-Z", + "color": "#FBAE60" + }, + { + "alias": "Z-A", + "color": "#80B2E7" + } + ], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "aggregate_all": false, + "aggregator": ["average", "average"], + "bucket": [], + "bucketAggs": [ + { + "field": "start", + "id": "2", + "settings": { + "interval": "auto", + "min_doc_count": 0, + "trimEdges": 0 + }, + "type": "date_histogram" + } + ], + "bucketValue": [], + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "drillDown": [], + "drillDownAlias": "", + "drillDownValue": [], + "dsType": "elasticsearch", + "func": [ + { + "alias": "A-Z", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "* $TIMESPAN", + "percentile": "85", + "root": true, + "target": "output", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.output, 310, average)": "A-Z" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "metrics": [ + { + "field": "select field", + "id": "1", + "type": "count" + } + ], + "orderby_field": "", + "outerGroupOperator": [""], + "percentileValue": ["", ""], + "rawQuery": true, + "refId": "A", + "series": "interface", + "target": "get link_name, values.output * 3600 from (get link_name, aggregate(values.output, 3600, average) / 8 as values.output between ($START, $END) by link_name from interface where ((link_name like \"$links\")))", + "target_alias": "A-Z", + "templateVariableValue": [""], + "timeField": "start", + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + }, + { + "aggregate_all": false, + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "func": [ + { + "alias": "Z-A", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "* $TIMESPAN", + "percentile": "85", + "root": true, + "target": "input", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "hide": false, + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.input, 310, average)": "Z-A" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "orderby_field": "", + "outerGroupOperator": [""], + "rawQuery": true, + "refId": "B", + "series": "interface", + "target": "get link_name, values.input * 3600 from (get link_name, aggregate(values.input, 3600, average) / 8 as values.input between ($START, $END) by link_name from interface where ((link_name like \"$links\")))", + "target_alias": "Z-A", + "templateVariableValue": [""], + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "", + "tooltip": { + "shared": false, + "sort": 0, + "value_type": "individual" + }, + "transparent": true, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "series", + "name": null, + "show": true, + "values": ["total"] + }, + "yaxes": [ + { + "format": "decbytes", + "label": null, + "logBase": 1, + "max": null, + "min": 0, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "collapsed": false, + "datasource": null, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 57 + }, + "id": 109, + "panels": [], + "repeatIteration": 1618867139860, + "repeatPanelId": 11, + "scopedVars": { + "links": { + "selected": false, + "text": "AmLight: Miami to Santiago 100GE", + "value": "AmLight: Miami to Santiago 100GE" + } + }, + "title": "", + "type": "row" + }, + { + "datasource": null, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "gridPos": { + "h": 7, + "w": 2, + "x": 0, + "y": 58 + }, + "id": 110, + "links": [], + "options": { + "content": "**$links**", + "mode": "markdown" + }, + "pluginVersion": "7.3.3", + "repeatIteration": 1618867139860, + "repeatPanelId": 12, + "repeatedByRow": true, + "scopedVars": { + "links": { + "selected": false, + "text": "AmLight: Miami to Santiago 100GE", + "value": "AmLight: Miami to Santiago 100GE" + } + }, + "title": "", + "transparent": true, + "type": "text" + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Netsage TSDS", + "decimals": 1, + "fieldConfig": { + "defaults": { + "custom": {}, + "links": [] + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 7, + "w": 14, + "x": 2, + "y": 58 + }, + "hiddenSeries": false, + "id": 111, + "legend": { + "alignAsTable": true, + "avg": true, + "current": false, + "max": true, + "min": false, + "rightSide": false, + "show": true, + "total": false, + "values": true + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.3", + "pointradius": 5, + "points": false, + "renderer": "flot", + "repeatIteration": 1618867139860, + "repeatPanelId": 13, + "repeatedByRow": true, + "scopedVars": { + "links": { + "selected": false, + "text": "AmLight: Miami to Santiago 100GE", + "value": "AmLight: Miami to Santiago 100GE" + } + }, + "seriesOverrides": [ + { + "alias": "A-Z", + "color": "#FBAE60" + }, + { + "alias": "Z-A", + "color": "#80B2E7" + } + ], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "aggregate_all": false, + "aggregator": ["average", "average"], + "bucket": [], + "bucketAggs": [ + { + "field": "start", + "id": "2", + "settings": { + "interval": "auto", + "min_doc_count": 0, + "trimEdges": 0 + }, + "type": "date_histogram" + } + ], + "bucketValue": [], + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "drillDown": [], + "drillDownAlias": "", + "drillDownValue": [], + "dsType": "elasticsearch", + "func": [ + { + "aggregate_all": false, + "alias": "A-Z", + "align": "", + "bucket": "", + "expanded": false, + "method": "average", + "operation": "", + "percentile": "85", + "root": true, + "target": "output", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.output, 210, average)": "A-Z" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "metrics": [ + { + "field": "select field", + "id": "1", + "type": "count" + } + ], + "orderby_field": "", + "outerGroupOperator": [""], + "percentileValue": ["", ""], + "refId": "A", + "series": "interface", + "target": "get link_name, aggregate(values.output, 210, average) between (1570571291, 1570657691) by link_name from interface where ((link_name like \"AmLight: Chile to Sao Paolo 100GE\"))", + "target_alias": "$VALUE", + "templateVariableValue": [""], + "timeField": "start", + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + }, + { + "aggregate_all": false, + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "func": [ + { + "aggregate_all": false, + "alias": "Z-A", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "", + "percentile": "85", + "root": true, + "target": "input", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.input, 210, average)": "Z-A" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "orderby_field": "", + "outerGroupOperator": [""], + "refId": "B", + "series": "interface", + "target": "get link_name, aggregate(values.input, 210, average) between (1570571291, 1570657691) by link_name from interface where ((link_name like \"AmLight: Chile to Sao Paolo 100GE\"))", + "target_alias": "$VALUE", + "templateVariableValue": [""], + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "transparent": true, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "bps", + "label": "Rate", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": true, + "dashLength": 10, + "dashes": false, + "datasource": "Netsage TSDS", + "decimals": 1, + "fieldConfig": { + "defaults": { + "custom": {}, + "links": [] + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 7, + "w": 8, + "x": 16, + "y": 58 + }, + "hiddenSeries": false, + "id": 112, + "legend": { + "alignAsTable": true, + "avg": true, + "current": false, + "max": true, + "min": false, + "show": false, + "total": false, + "values": true + }, + "lines": false, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.3", + "pointradius": 5, + "points": false, + "renderer": "flot", + "repeatIteration": 1618867139860, + "repeatPanelId": 14, + "repeatedByRow": true, + "scopedVars": { + "links": { + "selected": false, + "text": "AmLight: Miami to Santiago 100GE", + "value": "AmLight: Miami to Santiago 100GE" + } + }, + "seriesOverrides": [ + { + "alias": "A-Z", + "color": "#FBAE60" + }, + { + "alias": "Z-A", + "color": "#80B2E7" + } + ], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "aggregate_all": false, + "aggregator": ["average", "average"], + "bucket": [], + "bucketAggs": [ + { + "field": "start", + "id": "2", + "settings": { + "interval": "auto", + "min_doc_count": 0, + "trimEdges": 0 + }, + "type": "date_histogram" + } + ], + "bucketValue": [], + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "drillDown": [], + "drillDownAlias": "", + "drillDownValue": [], + "dsType": "elasticsearch", + "func": [ + { + "alias": "A-Z", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "* $TIMESPAN", + "percentile": "85", + "root": true, + "target": "output", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.output, 310, average)": "A-Z" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "metrics": [ + { + "field": "select field", + "id": "1", + "type": "count" + } + ], + "orderby_field": "", + "outerGroupOperator": [""], + "percentileValue": ["", ""], + "rawQuery": true, + "refId": "A", + "series": "interface", + "target": "get link_name, values.output * 3600 from (get link_name, aggregate(values.output, 3600, average) / 8 as values.output between ($START, $END) by link_name from interface where ((link_name like \"$links\")))", + "target_alias": "A-Z", + "templateVariableValue": [""], + "timeField": "start", + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + }, + { + "aggregate_all": false, + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "func": [ + { + "alias": "Z-A", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "* $TIMESPAN", + "percentile": "85", + "root": true, + "target": "input", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "hide": false, + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.input, 310, average)": "Z-A" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "orderby_field": "", + "outerGroupOperator": [""], + "rawQuery": true, + "refId": "B", + "series": "interface", + "target": "get link_name, values.input * 3600 from (get link_name, aggregate(values.input, 3600, average) / 8 as values.input between ($START, $END) by link_name from interface where ((link_name like \"$links\")))", + "target_alias": "Z-A", + "templateVariableValue": [""], + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "", + "tooltip": { + "shared": false, + "sort": 0, + "value_type": "individual" + }, + "transparent": true, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "series", + "name": null, + "show": true, + "values": ["total"] + }, + "yaxes": [ + { + "format": "decbytes", + "label": null, + "logBase": 1, + "max": null, + "min": 0, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "collapsed": false, + "datasource": null, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 65 + }, + "id": 113, + "panels": [], + "repeatIteration": 1618867139860, + "repeatPanelId": 11, + "scopedVars": { + "links": { + "selected": false, + "text": "AmLight: Miami to Sao Paulo 100GE", + "value": "AmLight: Miami to Sao Paulo 100GE" + } + }, + "title": "", + "type": "row" + }, + { + "datasource": null, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "gridPos": { + "h": 7, + "w": 2, + "x": 0, + "y": 66 + }, + "id": 114, + "links": [], + "options": { + "content": "**$links**", + "mode": "markdown" + }, + "pluginVersion": "7.3.3", + "repeatIteration": 1618867139860, + "repeatPanelId": 12, + "repeatedByRow": true, + "scopedVars": { + "links": { + "selected": false, + "text": "AmLight: Miami to Sao Paulo 100GE", + "value": "AmLight: Miami to Sao Paulo 100GE" + } + }, + "title": "", + "transparent": true, + "type": "text" + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Netsage TSDS", + "decimals": 1, + "fieldConfig": { + "defaults": { + "custom": {}, + "links": [] + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 7, + "w": 14, + "x": 2, + "y": 66 + }, + "hiddenSeries": false, + "id": 115, + "legend": { + "alignAsTable": true, + "avg": true, + "current": false, + "max": true, + "min": false, + "rightSide": false, + "show": true, + "total": false, + "values": true + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.3", + "pointradius": 5, + "points": false, + "renderer": "flot", + "repeatIteration": 1618867139860, + "repeatPanelId": 13, + "repeatedByRow": true, + "scopedVars": { + "links": { + "selected": false, + "text": "AmLight: Miami to Sao Paulo 100GE", + "value": "AmLight: Miami to Sao Paulo 100GE" + } + }, + "seriesOverrides": [ + { + "alias": "A-Z", + "color": "#FBAE60" + }, + { + "alias": "Z-A", + "color": "#80B2E7" + } + ], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "aggregate_all": false, + "aggregator": ["average", "average"], + "bucket": [], + "bucketAggs": [ + { + "field": "start", + "id": "2", + "settings": { + "interval": "auto", + "min_doc_count": 0, + "trimEdges": 0 + }, + "type": "date_histogram" + } + ], + "bucketValue": [], + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "drillDown": [], + "drillDownAlias": "", + "drillDownValue": [], + "dsType": "elasticsearch", + "func": [ + { + "aggregate_all": false, + "alias": "A-Z", + "align": "", + "bucket": "", + "expanded": false, + "method": "average", + "operation": "", + "percentile": "85", + "root": true, + "target": "output", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.output, 210, average)": "A-Z" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "metrics": [ + { + "field": "select field", + "id": "1", + "type": "count" + } + ], + "orderby_field": "", + "outerGroupOperator": [""], + "percentileValue": ["", ""], + "refId": "A", + "series": "interface", + "target": "get link_name, aggregate(values.output, 210, average) between (1570571291, 1570657691) by link_name from interface where ((link_name like \"AmLight: Chile to Sao Paolo 100GE\"))", + "target_alias": "$VALUE", + "templateVariableValue": [""], + "timeField": "start", + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + }, + { + "aggregate_all": false, + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "func": [ + { + "aggregate_all": false, + "alias": "Z-A", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "", + "percentile": "85", + "root": true, + "target": "input", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.input, 210, average)": "Z-A" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "orderby_field": "", + "outerGroupOperator": [""], + "refId": "B", + "series": "interface", + "target": "get link_name, aggregate(values.input, 210, average) between (1570571291, 1570657691) by link_name from interface where ((link_name like \"AmLight: Chile to Sao Paolo 100GE\"))", + "target_alias": "$VALUE", + "templateVariableValue": [""], + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "transparent": true, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "bps", + "label": "Rate", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": true, + "dashLength": 10, + "dashes": false, + "datasource": "Netsage TSDS", + "decimals": 1, + "fieldConfig": { + "defaults": { + "custom": {}, + "links": [] + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 7, + "w": 8, + "x": 16, + "y": 66 + }, + "hiddenSeries": false, + "id": 116, + "legend": { + "alignAsTable": true, + "avg": true, + "current": false, + "max": true, + "min": false, + "show": false, + "total": false, + "values": true + }, + "lines": false, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.3", + "pointradius": 5, + "points": false, + "renderer": "flot", + "repeatIteration": 1618867139860, + "repeatPanelId": 14, + "repeatedByRow": true, + "scopedVars": { + "links": { + "selected": false, + "text": "AmLight: Miami to Sao Paulo 100GE", + "value": "AmLight: Miami to Sao Paulo 100GE" + } + }, + "seriesOverrides": [ + { + "alias": "A-Z", + "color": "#FBAE60" + }, + { + "alias": "Z-A", + "color": "#80B2E7" + } + ], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "aggregate_all": false, + "aggregator": ["average", "average"], + "bucket": [], + "bucketAggs": [ + { + "field": "start", + "id": "2", + "settings": { + "interval": "auto", + "min_doc_count": 0, + "trimEdges": 0 + }, + "type": "date_histogram" + } + ], + "bucketValue": [], + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "drillDown": [], + "drillDownAlias": "", + "drillDownValue": [], + "dsType": "elasticsearch", + "func": [ + { + "alias": "A-Z", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "* $TIMESPAN", + "percentile": "85", + "root": true, + "target": "output", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.output, 310, average)": "A-Z" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "metrics": [ + { + "field": "select field", + "id": "1", + "type": "count" + } + ], + "orderby_field": "", + "outerGroupOperator": [""], + "percentileValue": ["", ""], + "rawQuery": true, + "refId": "A", + "series": "interface", + "target": "get link_name, values.output * 3600 from (get link_name, aggregate(values.output, 3600, average) / 8 as values.output between ($START, $END) by link_name from interface where ((link_name like \"$links\")))", + "target_alias": "A-Z", + "templateVariableValue": [""], + "timeField": "start", + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + }, + { + "aggregate_all": false, + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "func": [ + { + "alias": "Z-A", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "* $TIMESPAN", + "percentile": "85", + "root": true, + "target": "input", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "hide": false, + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.input, 310, average)": "Z-A" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "orderby_field": "", + "outerGroupOperator": [""], + "rawQuery": true, + "refId": "B", + "series": "interface", + "target": "get link_name, values.input * 3600 from (get link_name, aggregate(values.input, 3600, average) / 8 as values.input between ($START, $END) by link_name from interface where ((link_name like \"$links\")))", + "target_alias": "Z-A", + "templateVariableValue": [""], + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "", + "tooltip": { + "shared": false, + "sort": 0, + "value_type": "individual" + }, + "transparent": true, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "series", + "name": null, + "show": true, + "values": ["total"] + }, + "yaxes": [ + { + "format": "decbytes", + "label": null, + "logBase": 1, + "max": null, + "min": 0, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "collapsed": false, + "datasource": null, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 73 + }, + "id": 117, + "panels": [], + "repeatIteration": 1618867139860, + "repeatPanelId": 11, + "scopedVars": { + "links": { + "selected": false, + "text": "AmLight: Panama to Puerto Rico 100GE", + "value": "AmLight: Panama to Puerto Rico 100GE" + } + }, + "title": "", + "type": "row" + }, + { + "datasource": null, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "gridPos": { + "h": 7, + "w": 2, + "x": 0, + "y": 74 + }, + "id": 118, + "links": [], + "options": { + "content": "**$links**", + "mode": "markdown" + }, + "pluginVersion": "7.3.3", + "repeatIteration": 1618867139860, + "repeatPanelId": 12, + "repeatedByRow": true, + "scopedVars": { + "links": { + "selected": false, + "text": "AmLight: Panama to Puerto Rico 100GE", + "value": "AmLight: Panama to Puerto Rico 100GE" + } + }, + "title": "", + "transparent": true, + "type": "text" + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Netsage TSDS", + "decimals": 1, + "fieldConfig": { + "defaults": { + "custom": {}, + "links": [] + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 7, + "w": 14, + "x": 2, + "y": 74 + }, + "hiddenSeries": false, + "id": 119, + "legend": { + "alignAsTable": true, + "avg": true, + "current": false, + "max": true, + "min": false, + "rightSide": false, + "show": true, + "total": false, + "values": true + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.3", + "pointradius": 5, + "points": false, + "renderer": "flot", + "repeatIteration": 1618867139860, + "repeatPanelId": 13, + "repeatedByRow": true, + "scopedVars": { + "links": { + "selected": false, + "text": "AmLight: Panama to Puerto Rico 100GE", + "value": "AmLight: Panama to Puerto Rico 100GE" + } + }, + "seriesOverrides": [ + { + "alias": "A-Z", + "color": "#FBAE60" + }, + { + "alias": "Z-A", + "color": "#80B2E7" + } + ], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "aggregate_all": false, + "aggregator": ["average", "average"], + "bucket": [], + "bucketAggs": [ + { + "field": "start", + "id": "2", + "settings": { + "interval": "auto", + "min_doc_count": 0, + "trimEdges": 0 + }, + "type": "date_histogram" + } + ], + "bucketValue": [], + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "drillDown": [], + "drillDownAlias": "", + "drillDownValue": [], + "dsType": "elasticsearch", + "func": [ + { + "aggregate_all": false, + "alias": "A-Z", + "align": "", + "bucket": "", + "expanded": false, + "method": "average", + "operation": "", + "percentile": "85", + "root": true, + "target": "output", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.output, 210, average)": "A-Z" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "metrics": [ + { + "field": "select field", + "id": "1", + "type": "count" + } + ], + "orderby_field": "", + "outerGroupOperator": [""], + "percentileValue": ["", ""], + "refId": "A", + "series": "interface", + "target": "get link_name, aggregate(values.output, 210, average) between (1570571291, 1570657691) by link_name from interface where ((link_name like \"AmLight: Chile to Sao Paolo 100GE\"))", + "target_alias": "$VALUE", + "templateVariableValue": [""], + "timeField": "start", + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + }, + { + "aggregate_all": false, + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "func": [ + { + "aggregate_all": false, + "alias": "Z-A", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "", + "percentile": "85", + "root": true, + "target": "input", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.input, 210, average)": "Z-A" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "orderby_field": "", + "outerGroupOperator": [""], + "refId": "B", + "series": "interface", + "target": "get link_name, aggregate(values.input, 210, average) between (1570571291, 1570657691) by link_name from interface where ((link_name like \"AmLight: Chile to Sao Paolo 100GE\"))", + "target_alias": "$VALUE", + "templateVariableValue": [""], + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "transparent": true, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "bps", + "label": "Rate", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": true, + "dashLength": 10, + "dashes": false, + "datasource": "Netsage TSDS", + "decimals": 1, + "fieldConfig": { + "defaults": { + "custom": {}, + "links": [] + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 7, + "w": 8, + "x": 16, + "y": 74 + }, + "hiddenSeries": false, + "id": 120, + "legend": { + "alignAsTable": true, + "avg": true, + "current": false, + "max": true, + "min": false, + "show": false, + "total": false, + "values": true + }, + "lines": false, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.3", + "pointradius": 5, + "points": false, + "renderer": "flot", + "repeatIteration": 1618867139860, + "repeatPanelId": 14, + "repeatedByRow": true, + "scopedVars": { + "links": { + "selected": false, + "text": "AmLight: Panama to Puerto Rico 100GE", + "value": "AmLight: Panama to Puerto Rico 100GE" + } + }, + "seriesOverrides": [ + { + "alias": "A-Z", + "color": "#FBAE60" + }, + { + "alias": "Z-A", + "color": "#80B2E7" + } + ], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "aggregate_all": false, + "aggregator": ["average", "average"], + "bucket": [], + "bucketAggs": [ + { + "field": "start", + "id": "2", + "settings": { + "interval": "auto", + "min_doc_count": 0, + "trimEdges": 0 + }, + "type": "date_histogram" + } + ], + "bucketValue": [], + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "drillDown": [], + "drillDownAlias": "", + "drillDownValue": [], + "dsType": "elasticsearch", + "func": [ + { + "alias": "A-Z", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "* $TIMESPAN", + "percentile": "85", + "root": true, + "target": "output", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.output, 310, average)": "A-Z" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "metrics": [ + { + "field": "select field", + "id": "1", + "type": "count" + } + ], + "orderby_field": "", + "outerGroupOperator": [""], + "percentileValue": ["", ""], + "rawQuery": true, + "refId": "A", + "series": "interface", + "target": "get link_name, values.output * 3600 from (get link_name, aggregate(values.output, 3600, average) / 8 as values.output between ($START, $END) by link_name from interface where ((link_name like \"$links\")))", + "target_alias": "A-Z", + "templateVariableValue": [""], + "timeField": "start", + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + }, + { + "aggregate_all": false, + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "func": [ + { + "alias": "Z-A", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "* $TIMESPAN", + "percentile": "85", + "root": true, + "target": "input", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "hide": false, + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.input, 310, average)": "Z-A" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "orderby_field": "", + "outerGroupOperator": [""], + "rawQuery": true, + "refId": "B", + "series": "interface", + "target": "get link_name, values.input * 3600 from (get link_name, aggregate(values.input, 3600, average) / 8 as values.input between ($START, $END) by link_name from interface where ((link_name like \"$links\")))", + "target_alias": "Z-A", + "templateVariableValue": [""], + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "", + "tooltip": { + "shared": false, + "sort": 0, + "value_type": "individual" + }, + "transparent": true, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "series", + "name": null, + "show": true, + "values": ["total"] + }, + "yaxes": [ + { + "format": "decbytes", + "label": null, + "logBase": 1, + "max": null, + "min": 0, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "collapsed": false, + "datasource": null, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 81 + }, + "id": 121, + "panels": [], + "repeatIteration": 1618867139860, + "repeatPanelId": 11, + "scopedVars": { + "links": { + "selected": false, + "text": "AmLight: Puerto Rico to Miami 100GE", + "value": "AmLight: Puerto Rico to Miami 100GE" + } + }, + "title": "", + "type": "row" + }, + { + "datasource": null, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "gridPos": { + "h": 7, + "w": 2, + "x": 0, + "y": 82 + }, + "id": 122, + "links": [], + "options": { + "content": "**$links**", + "mode": "markdown" + }, + "pluginVersion": "7.3.3", + "repeatIteration": 1618867139860, + "repeatPanelId": 12, + "repeatedByRow": true, + "scopedVars": { + "links": { + "selected": false, + "text": "AmLight: Puerto Rico to Miami 100GE", + "value": "AmLight: Puerto Rico to Miami 100GE" + } + }, + "title": "", + "transparent": true, + "type": "text" + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Netsage TSDS", + "decimals": 1, + "fieldConfig": { + "defaults": { + "custom": {}, + "links": [] + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 7, + "w": 14, + "x": 2, + "y": 82 + }, + "hiddenSeries": false, + "id": 123, + "legend": { + "alignAsTable": true, + "avg": true, + "current": false, + "max": true, + "min": false, + "rightSide": false, + "show": true, + "total": false, + "values": true + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.3", + "pointradius": 5, + "points": false, + "renderer": "flot", + "repeatIteration": 1618867139860, + "repeatPanelId": 13, + "repeatedByRow": true, + "scopedVars": { + "links": { + "selected": false, + "text": "AmLight: Puerto Rico to Miami 100GE", + "value": "AmLight: Puerto Rico to Miami 100GE" + } + }, + "seriesOverrides": [ + { + "alias": "A-Z", + "color": "#FBAE60" + }, + { + "alias": "Z-A", + "color": "#80B2E7" + } + ], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "aggregate_all": false, + "aggregator": ["average", "average"], + "bucket": [], + "bucketAggs": [ + { + "field": "start", + "id": "2", + "settings": { + "interval": "auto", + "min_doc_count": 0, + "trimEdges": 0 + }, + "type": "date_histogram" + } + ], + "bucketValue": [], + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "drillDown": [], + "drillDownAlias": "", + "drillDownValue": [], + "dsType": "elasticsearch", + "func": [ + { + "aggregate_all": false, + "alias": "A-Z", + "align": "", + "bucket": "", + "expanded": false, + "method": "average", + "operation": "", + "percentile": "85", + "root": true, + "target": "output", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.output, 210, average)": "A-Z" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "metrics": [ + { + "field": "select field", + "id": "1", + "type": "count" + } + ], + "orderby_field": "", + "outerGroupOperator": [""], + "percentileValue": ["", ""], + "refId": "A", + "series": "interface", + "target": "get link_name, aggregate(values.output, 210, average) between (1570571291, 1570657691) by link_name from interface where ((link_name like \"AmLight: Chile to Sao Paolo 100GE\"))", + "target_alias": "$VALUE", + "templateVariableValue": [""], + "timeField": "start", + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + }, + { + "aggregate_all": false, + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "func": [ + { + "aggregate_all": false, + "alias": "Z-A", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "", + "percentile": "85", + "root": true, + "target": "input", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.input, 210, average)": "Z-A" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "orderby_field": "", + "outerGroupOperator": [""], + "refId": "B", + "series": "interface", + "target": "get link_name, aggregate(values.input, 210, average) between (1570571291, 1570657691) by link_name from interface where ((link_name like \"AmLight: Chile to Sao Paolo 100GE\"))", + "target_alias": "$VALUE", + "templateVariableValue": [""], + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "transparent": true, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "bps", + "label": "Rate", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": true, + "dashLength": 10, + "dashes": false, + "datasource": "Netsage TSDS", + "decimals": 1, + "fieldConfig": { + "defaults": { + "custom": {}, + "links": [] + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 7, + "w": 8, + "x": 16, + "y": 82 + }, + "hiddenSeries": false, + "id": 124, + "legend": { + "alignAsTable": true, + "avg": true, + "current": false, + "max": true, + "min": false, + "show": false, + "total": false, + "values": true + }, + "lines": false, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.3", + "pointradius": 5, + "points": false, + "renderer": "flot", + "repeatIteration": 1618867139860, + "repeatPanelId": 14, + "repeatedByRow": true, + "scopedVars": { + "links": { + "selected": false, + "text": "AmLight: Puerto Rico to Miami 100GE", + "value": "AmLight: Puerto Rico to Miami 100GE" + } + }, + "seriesOverrides": [ + { + "alias": "A-Z", + "color": "#FBAE60" + }, + { + "alias": "Z-A", + "color": "#80B2E7" + } + ], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "aggregate_all": false, + "aggregator": ["average", "average"], + "bucket": [], + "bucketAggs": [ + { + "field": "start", + "id": "2", + "settings": { + "interval": "auto", + "min_doc_count": 0, + "trimEdges": 0 + }, + "type": "date_histogram" + } + ], + "bucketValue": [], + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "drillDown": [], + "drillDownAlias": "", + "drillDownValue": [], + "dsType": "elasticsearch", + "func": [ + { + "alias": "A-Z", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "* $TIMESPAN", + "percentile": "85", + "root": true, + "target": "output", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.output, 310, average)": "A-Z" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "metrics": [ + { + "field": "select field", + "id": "1", + "type": "count" + } + ], + "orderby_field": "", + "outerGroupOperator": [""], + "percentileValue": ["", ""], + "rawQuery": true, + "refId": "A", + "series": "interface", + "target": "get link_name, values.output * 3600 from (get link_name, aggregate(values.output, 3600, average) / 8 as values.output between ($START, $END) by link_name from interface where ((link_name like \"$links\")))", + "target_alias": "A-Z", + "templateVariableValue": [""], + "timeField": "start", + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + }, + { + "aggregate_all": false, + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "func": [ + { + "alias": "Z-A", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "* $TIMESPAN", + "percentile": "85", + "root": true, + "target": "input", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "hide": false, + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.input, 310, average)": "Z-A" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "orderby_field": "", + "outerGroupOperator": [""], + "rawQuery": true, + "refId": "B", + "series": "interface", + "target": "get link_name, values.input * 3600 from (get link_name, aggregate(values.input, 3600, average) / 8 as values.input between ($START, $END) by link_name from interface where ((link_name like \"$links\")))", + "target_alias": "Z-A", + "templateVariableValue": [""], + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "", + "tooltip": { + "shared": false, + "sort": 0, + "value_type": "individual" + }, + "transparent": true, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "series", + "name": null, + "show": true, + "values": ["total"] + }, + "yaxes": [ + { + "format": "decbytes", + "label": null, + "logBase": 1, + "max": null, + "min": 0, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "collapsed": false, + "datasource": null, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 89 + }, + "id": 125, + "panels": [], + "repeatIteration": 1618867139860, + "repeatPanelId": 11, + "scopedVars": { + "links": { + "selected": false, + "text": "AmLight: Santiago to Panama 100GE", + "value": "AmLight: Santiago to Panama 100GE" + } + }, + "title": "", + "type": "row" + }, + { + "datasource": null, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "gridPos": { + "h": 7, + "w": 2, + "x": 0, + "y": 90 + }, + "id": 126, + "links": [], + "options": { + "content": "**$links**", + "mode": "markdown" + }, + "pluginVersion": "7.3.3", + "repeatIteration": 1618867139860, + "repeatPanelId": 12, + "repeatedByRow": true, + "scopedVars": { + "links": { + "selected": false, + "text": "AmLight: Santiago to Panama 100GE", + "value": "AmLight: Santiago to Panama 100GE" + } + }, + "title": "", + "transparent": true, + "type": "text" + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Netsage TSDS", + "decimals": 1, + "fieldConfig": { + "defaults": { + "custom": {}, + "links": [] + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 7, + "w": 14, + "x": 2, + "y": 90 + }, + "hiddenSeries": false, + "id": 127, + "legend": { + "alignAsTable": true, + "avg": true, + "current": false, + "max": true, + "min": false, + "rightSide": false, + "show": true, + "total": false, + "values": true + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.3", + "pointradius": 5, + "points": false, + "renderer": "flot", + "repeatIteration": 1618867139860, + "repeatPanelId": 13, + "repeatedByRow": true, + "scopedVars": { + "links": { + "selected": false, + "text": "AmLight: Santiago to Panama 100GE", + "value": "AmLight: Santiago to Panama 100GE" + } + }, + "seriesOverrides": [ + { + "alias": "A-Z", + "color": "#FBAE60" + }, + { + "alias": "Z-A", + "color": "#80B2E7" + } + ], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "aggregate_all": false, + "aggregator": ["average", "average"], + "bucket": [], + "bucketAggs": [ + { + "field": "start", + "id": "2", + "settings": { + "interval": "auto", + "min_doc_count": 0, + "trimEdges": 0 + }, + "type": "date_histogram" + } + ], + "bucketValue": [], + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "drillDown": [], + "drillDownAlias": "", + "drillDownValue": [], + "dsType": "elasticsearch", + "func": [ + { + "aggregate_all": false, + "alias": "A-Z", + "align": "", + "bucket": "", + "expanded": false, + "method": "average", + "operation": "", + "percentile": "85", + "root": true, + "target": "output", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.output, 210, average)": "A-Z" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "metrics": [ + { + "field": "select field", + "id": "1", + "type": "count" + } + ], + "orderby_field": "", + "outerGroupOperator": [""], + "percentileValue": ["", ""], + "refId": "A", + "series": "interface", + "target": "get link_name, aggregate(values.output, 210, average) between (1570571291, 1570657691) by link_name from interface where ((link_name like \"AmLight: Chile to Sao Paolo 100GE\"))", + "target_alias": "$VALUE", + "templateVariableValue": [""], + "timeField": "start", + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + }, + { + "aggregate_all": false, + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "func": [ + { + "aggregate_all": false, + "alias": "Z-A", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "", + "percentile": "85", + "root": true, + "target": "input", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.input, 210, average)": "Z-A" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "orderby_field": "", + "outerGroupOperator": [""], + "refId": "B", + "series": "interface", + "target": "get link_name, aggregate(values.input, 210, average) between (1570571291, 1570657691) by link_name from interface where ((link_name like \"AmLight: Chile to Sao Paolo 100GE\"))", + "target_alias": "$VALUE", + "templateVariableValue": [""], + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "transparent": true, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "bps", + "label": "Rate", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": true, + "dashLength": 10, + "dashes": false, + "datasource": "Netsage TSDS", + "decimals": 1, + "fieldConfig": { + "defaults": { + "custom": {}, + "links": [] + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 7, + "w": 8, + "x": 16, + "y": 90 + }, + "hiddenSeries": false, + "id": 128, + "legend": { + "alignAsTable": true, + "avg": true, + "current": false, + "max": true, + "min": false, + "show": false, + "total": false, + "values": true + }, + "lines": false, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.3", + "pointradius": 5, + "points": false, + "renderer": "flot", + "repeatIteration": 1618867139860, + "repeatPanelId": 14, + "repeatedByRow": true, + "scopedVars": { + "links": { + "selected": false, + "text": "AmLight: Santiago to Panama 100GE", + "value": "AmLight: Santiago to Panama 100GE" + } + }, + "seriesOverrides": [ + { + "alias": "A-Z", + "color": "#FBAE60" + }, + { + "alias": "Z-A", + "color": "#80B2E7" + } + ], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "aggregate_all": false, + "aggregator": ["average", "average"], + "bucket": [], + "bucketAggs": [ + { + "field": "start", + "id": "2", + "settings": { + "interval": "auto", + "min_doc_count": 0, + "trimEdges": 0 + }, + "type": "date_histogram" + } + ], + "bucketValue": [], + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "drillDown": [], + "drillDownAlias": "", + "drillDownValue": [], + "dsType": "elasticsearch", + "func": [ + { + "alias": "A-Z", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "* $TIMESPAN", + "percentile": "85", + "root": true, + "target": "output", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.output, 310, average)": "A-Z" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "metrics": [ + { + "field": "select field", + "id": "1", + "type": "count" + } + ], + "orderby_field": "", + "outerGroupOperator": [""], + "percentileValue": ["", ""], + "rawQuery": true, + "refId": "A", + "series": "interface", + "target": "get link_name, values.output * 3600 from (get link_name, aggregate(values.output, 3600, average) / 8 as values.output between ($START, $END) by link_name from interface where ((link_name like \"$links\")))", + "target_alias": "A-Z", + "templateVariableValue": [""], + "timeField": "start", + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + }, + { + "aggregate_all": false, + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "func": [ + { + "alias": "Z-A", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "* $TIMESPAN", + "percentile": "85", + "root": true, + "target": "input", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "hide": false, + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.input, 310, average)": "Z-A" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "orderby_field": "", + "outerGroupOperator": [""], + "rawQuery": true, + "refId": "B", + "series": "interface", + "target": "get link_name, values.input * 3600 from (get link_name, aggregate(values.input, 3600, average) / 8 as values.input between ($START, $END) by link_name from interface where ((link_name like \"$links\")))", + "target_alias": "Z-A", + "templateVariableValue": [""], + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "", + "tooltip": { + "shared": false, + "sort": 0, + "value_type": "individual" + }, + "transparent": true, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "series", + "name": null, + "show": true, + "values": ["total"] + }, + "yaxes": [ + { + "format": "decbytes", + "label": null, + "logBase": 1, + "max": null, + "min": 0, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "collapsed": false, + "datasource": null, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 97 + }, + "id": 129, + "panels": [], + "repeatIteration": 1618867139860, + "repeatPanelId": 11, + "scopedVars": { + "links": { + "selected": false, + "text": "AmLight: Santiago to Sao Paolo 100GE", + "value": "AmLight: Santiago to Sao Paolo 100GE" + } + }, + "title": "", + "type": "row" + }, + { + "datasource": null, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "gridPos": { + "h": 7, + "w": 2, + "x": 0, + "y": 98 + }, + "id": 130, + "links": [], + "options": { + "content": "**$links**", + "mode": "markdown" + }, + "pluginVersion": "7.3.3", + "repeatIteration": 1618867139860, + "repeatPanelId": 12, + "repeatedByRow": true, + "scopedVars": { + "links": { + "selected": false, + "text": "AmLight: Santiago to Sao Paolo 100GE", + "value": "AmLight: Santiago to Sao Paolo 100GE" + } + }, + "title": "", + "transparent": true, + "type": "text" + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Netsage TSDS", + "decimals": 1, + "fieldConfig": { + "defaults": { + "custom": {}, + "links": [] + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 7, + "w": 14, + "x": 2, + "y": 98 + }, + "hiddenSeries": false, + "id": 131, + "legend": { + "alignAsTable": true, + "avg": true, + "current": false, + "max": true, + "min": false, + "rightSide": false, + "show": true, + "total": false, + "values": true + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.3", + "pointradius": 5, + "points": false, + "renderer": "flot", + "repeatIteration": 1618867139860, + "repeatPanelId": 13, + "repeatedByRow": true, + "scopedVars": { + "links": { + "selected": false, + "text": "AmLight: Santiago to Sao Paolo 100GE", + "value": "AmLight: Santiago to Sao Paolo 100GE" + } + }, + "seriesOverrides": [ + { + "alias": "A-Z", + "color": "#FBAE60" + }, + { + "alias": "Z-A", + "color": "#80B2E7" + } + ], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "aggregate_all": false, + "aggregator": ["average", "average"], + "bucket": [], + "bucketAggs": [ + { + "field": "start", + "id": "2", + "settings": { + "interval": "auto", + "min_doc_count": 0, + "trimEdges": 0 + }, + "type": "date_histogram" + } + ], + "bucketValue": [], + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "drillDown": [], + "drillDownAlias": "", + "drillDownValue": [], + "dsType": "elasticsearch", + "func": [ + { + "aggregate_all": false, + "alias": "A-Z", + "align": "", + "bucket": "", + "expanded": false, + "method": "average", + "operation": "", + "percentile": "85", + "root": true, + "target": "output", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.output, 210, average)": "A-Z" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "metrics": [ + { + "field": "select field", + "id": "1", + "type": "count" + } + ], + "orderby_field": "", + "outerGroupOperator": [""], + "percentileValue": ["", ""], + "refId": "A", + "series": "interface", + "target": "get link_name, aggregate(values.output, 210, average) between (1570571291, 1570657691) by link_name from interface where ((link_name like \"AmLight: Chile to Sao Paolo 100GE\"))", + "target_alias": "$VALUE", + "templateVariableValue": [""], + "timeField": "start", + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + }, + { + "aggregate_all": false, + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "func": [ + { + "aggregate_all": false, + "alias": "Z-A", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "", + "percentile": "85", + "root": true, + "target": "input", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.input, 210, average)": "Z-A" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "orderby_field": "", + "outerGroupOperator": [""], + "refId": "B", + "series": "interface", + "target": "get link_name, aggregate(values.input, 210, average) between (1570571291, 1570657691) by link_name from interface where ((link_name like \"AmLight: Chile to Sao Paolo 100GE\"))", + "target_alias": "$VALUE", + "templateVariableValue": [""], + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "transparent": true, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "bps", + "label": "Rate", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": true, + "dashLength": 10, + "dashes": false, + "datasource": "Netsage TSDS", + "decimals": 1, + "fieldConfig": { + "defaults": { + "custom": {}, + "links": [] + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 7, + "w": 8, + "x": 16, + "y": 98 + }, + "hiddenSeries": false, + "id": 132, + "legend": { + "alignAsTable": true, + "avg": true, + "current": false, + "max": true, + "min": false, + "show": false, + "total": false, + "values": true + }, + "lines": false, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.3", + "pointradius": 5, + "points": false, + "renderer": "flot", + "repeatIteration": 1618867139860, + "repeatPanelId": 14, + "repeatedByRow": true, + "scopedVars": { + "links": { + "selected": false, + "text": "AmLight: Santiago to Sao Paolo 100GE", + "value": "AmLight: Santiago to Sao Paolo 100GE" + } + }, + "seriesOverrides": [ + { + "alias": "A-Z", + "color": "#FBAE60" + }, + { + "alias": "Z-A", + "color": "#80B2E7" + } + ], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "aggregate_all": false, + "aggregator": ["average", "average"], + "bucket": [], + "bucketAggs": [ + { + "field": "start", + "id": "2", + "settings": { + "interval": "auto", + "min_doc_count": 0, + "trimEdges": 0 + }, + "type": "date_histogram" + } + ], + "bucketValue": [], + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "drillDown": [], + "drillDownAlias": "", + "drillDownValue": [], + "dsType": "elasticsearch", + "func": [ + { + "alias": "A-Z", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "* $TIMESPAN", + "percentile": "85", + "root": true, + "target": "output", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.output, 310, average)": "A-Z" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "metrics": [ + { + "field": "select field", + "id": "1", + "type": "count" + } + ], + "orderby_field": "", + "outerGroupOperator": [""], + "percentileValue": ["", ""], + "rawQuery": true, + "refId": "A", + "series": "interface", + "target": "get link_name, values.output * 3600 from (get link_name, aggregate(values.output, 3600, average) / 8 as values.output between ($START, $END) by link_name from interface where ((link_name like \"$links\")))", + "target_alias": "A-Z", + "templateVariableValue": [""], + "timeField": "start", + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + }, + { + "aggregate_all": false, + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "func": [ + { + "alias": "Z-A", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "* $TIMESPAN", + "percentile": "85", + "root": true, + "target": "input", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "hide": false, + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.input, 310, average)": "Z-A" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "orderby_field": "", + "outerGroupOperator": [""], + "rawQuery": true, + "refId": "B", + "series": "interface", + "target": "get link_name, values.input * 3600 from (get link_name, aggregate(values.input, 3600, average) / 8 as values.input between ($START, $END) by link_name from interface where ((link_name like \"$links\")))", + "target_alias": "Z-A", + "templateVariableValue": [""], + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "", + "tooltip": { + "shared": false, + "sort": 0, + "value_type": "individual" + }, + "transparent": true, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "series", + "name": null, + "show": true, + "values": ["total"] + }, + "yaxes": [ + { + "format": "decbytes", + "label": null, + "logBase": 1, + "max": null, + "min": 0, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "collapsed": false, + "datasource": null, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 105 + }, + "id": 133, + "panels": [], + "repeatIteration": 1618867139860, + "repeatPanelId": 11, + "scopedVars": { + "links": { + "selected": false, + "text": "NEAAR: New York to Amsterdam 100GE", + "value": "NEAAR: New York to Amsterdam 100GE" + } + }, + "title": "", + "type": "row" + }, + { + "datasource": null, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "gridPos": { + "h": 7, + "w": 2, + "x": 0, + "y": 106 + }, + "id": 134, + "links": [], + "options": { + "content": "**$links**", + "mode": "markdown" + }, + "pluginVersion": "7.3.3", + "repeatIteration": 1618867139860, + "repeatPanelId": 12, + "repeatedByRow": true, + "scopedVars": { + "links": { + "selected": false, + "text": "NEAAR: New York to Amsterdam 100GE", + "value": "NEAAR: New York to Amsterdam 100GE" + } + }, + "title": "", + "transparent": true, + "type": "text" + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Netsage TSDS", + "decimals": 1, + "fieldConfig": { + "defaults": { + "custom": {}, + "links": [] + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 7, + "w": 14, + "x": 2, + "y": 106 + }, + "hiddenSeries": false, + "id": 135, + "legend": { + "alignAsTable": true, + "avg": true, + "current": false, + "max": true, + "min": false, + "rightSide": false, + "show": true, + "total": false, + "values": true + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.3", + "pointradius": 5, + "points": false, + "renderer": "flot", + "repeatIteration": 1618867139860, + "repeatPanelId": 13, + "repeatedByRow": true, + "scopedVars": { + "links": { + "selected": false, + "text": "NEAAR: New York to Amsterdam 100GE", + "value": "NEAAR: New York to Amsterdam 100GE" + } + }, + "seriesOverrides": [ + { + "alias": "A-Z", + "color": "#FBAE60" + }, + { + "alias": "Z-A", + "color": "#80B2E7" + } + ], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "aggregate_all": false, + "aggregator": ["average", "average"], + "bucket": [], + "bucketAggs": [ + { + "field": "start", + "id": "2", + "settings": { + "interval": "auto", + "min_doc_count": 0, + "trimEdges": 0 + }, + "type": "date_histogram" + } + ], + "bucketValue": [], + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "drillDown": [], + "drillDownAlias": "", + "drillDownValue": [], + "dsType": "elasticsearch", + "func": [ + { + "aggregate_all": false, + "alias": "A-Z", + "align": "", + "bucket": "", + "expanded": false, + "method": "average", + "operation": "", + "percentile": "85", + "root": true, + "target": "output", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.output, 210, average)": "A-Z" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "metrics": [ + { + "field": "select field", + "id": "1", + "type": "count" + } + ], + "orderby_field": "", + "outerGroupOperator": [""], + "percentileValue": ["", ""], + "refId": "A", + "series": "interface", + "target": "get link_name, aggregate(values.output, 210, average) between (1570571291, 1570657691) by link_name from interface where ((link_name like \"AmLight: Chile to Sao Paolo 100GE\"))", + "target_alias": "$VALUE", + "templateVariableValue": [""], + "timeField": "start", + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + }, + { + "aggregate_all": false, + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "func": [ + { + "aggregate_all": false, + "alias": "Z-A", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "", + "percentile": "85", + "root": true, + "target": "input", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.input, 210, average)": "Z-A" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "orderby_field": "", + "outerGroupOperator": [""], + "refId": "B", + "series": "interface", + "target": "get link_name, aggregate(values.input, 210, average) between (1570571291, 1570657691) by link_name from interface where ((link_name like \"AmLight: Chile to Sao Paolo 100GE\"))", + "target_alias": "$VALUE", + "templateVariableValue": [""], + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "transparent": true, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "bps", + "label": "Rate", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": true, + "dashLength": 10, + "dashes": false, + "datasource": "Netsage TSDS", + "decimals": 1, + "fieldConfig": { + "defaults": { + "custom": {}, + "links": [] + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 7, + "w": 8, + "x": 16, + "y": 106 + }, + "hiddenSeries": false, + "id": 136, + "legend": { + "alignAsTable": true, + "avg": true, + "current": false, + "max": true, + "min": false, + "show": false, + "total": false, + "values": true + }, + "lines": false, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.3", + "pointradius": 5, + "points": false, + "renderer": "flot", + "repeatIteration": 1618867139860, + "repeatPanelId": 14, + "repeatedByRow": true, + "scopedVars": { + "links": { + "selected": false, + "text": "NEAAR: New York to Amsterdam 100GE", + "value": "NEAAR: New York to Amsterdam 100GE" + } + }, + "seriesOverrides": [ + { + "alias": "A-Z", + "color": "#FBAE60" + }, + { + "alias": "Z-A", + "color": "#80B2E7" + } + ], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "aggregate_all": false, + "aggregator": ["average", "average"], + "bucket": [], + "bucketAggs": [ + { + "field": "start", + "id": "2", + "settings": { + "interval": "auto", + "min_doc_count": 0, + "trimEdges": 0 + }, + "type": "date_histogram" + } + ], + "bucketValue": [], + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "drillDown": [], + "drillDownAlias": "", + "drillDownValue": [], + "dsType": "elasticsearch", + "func": [ + { + "alias": "A-Z", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "* $TIMESPAN", + "percentile": "85", + "root": true, + "target": "output", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.output, 310, average)": "A-Z" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "metrics": [ + { + "field": "select field", + "id": "1", + "type": "count" + } + ], + "orderby_field": "", + "outerGroupOperator": [""], + "percentileValue": ["", ""], + "rawQuery": true, + "refId": "A", + "series": "interface", + "target": "get link_name, values.output * 3600 from (get link_name, aggregate(values.output, 3600, average) / 8 as values.output between ($START, $END) by link_name from interface where ((link_name like \"$links\")))", + "target_alias": "A-Z", + "templateVariableValue": [""], + "timeField": "start", + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + }, + { + "aggregate_all": false, + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "func": [ + { + "alias": "Z-A", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "* $TIMESPAN", + "percentile": "85", + "root": true, + "target": "input", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "hide": false, + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.input, 310, average)": "Z-A" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "orderby_field": "", + "outerGroupOperator": [""], + "rawQuery": true, + "refId": "B", + "series": "interface", + "target": "get link_name, values.input * 3600 from (get link_name, aggregate(values.input, 3600, average) / 8 as values.input between ($START, $END) by link_name from interface where ((link_name like \"$links\")))", + "target_alias": "Z-A", + "templateVariableValue": [""], + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "", + "tooltip": { + "shared": false, + "sort": 0, + "value_type": "individual" + }, + "transparent": true, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "series", + "name": null, + "show": true, + "values": ["total"] + }, + "yaxes": [ + { + "format": "decbytes", + "label": null, + "logBase": 1, + "max": null, + "min": 0, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "collapsed": false, + "datasource": null, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 113 + }, + "id": 137, + "panels": [], + "repeatIteration": 1618867139860, + "repeatPanelId": 11, + "scopedVars": { + "links": { + "selected": false, + "text": "NEAAR: New York to London 100GE", + "value": "NEAAR: New York to London 100GE" + } + }, + "title": "", + "type": "row" + }, + { + "datasource": null, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "gridPos": { + "h": 7, + "w": 2, + "x": 0, + "y": 114 + }, + "id": 138, + "links": [], + "options": { + "content": "**$links**", + "mode": "markdown" + }, + "pluginVersion": "7.3.3", + "repeatIteration": 1618867139860, + "repeatPanelId": 12, + "repeatedByRow": true, + "scopedVars": { + "links": { + "selected": false, + "text": "NEAAR: New York to London 100GE", + "value": "NEAAR: New York to London 100GE" + } + }, + "title": "", + "transparent": true, + "type": "text" + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Netsage TSDS", + "decimals": 1, + "fieldConfig": { + "defaults": { + "custom": {}, + "links": [] + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 7, + "w": 14, + "x": 2, + "y": 114 + }, + "hiddenSeries": false, + "id": 139, + "legend": { + "alignAsTable": true, + "avg": true, + "current": false, + "max": true, + "min": false, + "rightSide": false, + "show": true, + "total": false, + "values": true + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.3", + "pointradius": 5, + "points": false, + "renderer": "flot", + "repeatIteration": 1618867139860, + "repeatPanelId": 13, + "repeatedByRow": true, + "scopedVars": { + "links": { + "selected": false, + "text": "NEAAR: New York to London 100GE", + "value": "NEAAR: New York to London 100GE" + } + }, + "seriesOverrides": [ + { + "alias": "A-Z", + "color": "#FBAE60" + }, + { + "alias": "Z-A", + "color": "#80B2E7" + } + ], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "aggregate_all": false, + "aggregator": ["average", "average"], + "bucket": [], + "bucketAggs": [ + { + "field": "start", + "id": "2", + "settings": { + "interval": "auto", + "min_doc_count": 0, + "trimEdges": 0 + }, + "type": "date_histogram" + } + ], + "bucketValue": [], + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "drillDown": [], + "drillDownAlias": "", + "drillDownValue": [], + "dsType": "elasticsearch", + "func": [ + { + "aggregate_all": false, + "alias": "A-Z", + "align": "", + "bucket": "", + "expanded": false, + "method": "average", + "operation": "", + "percentile": "85", + "root": true, + "target": "output", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.output, 210, average)": "A-Z" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "metrics": [ + { + "field": "select field", + "id": "1", + "type": "count" + } + ], + "orderby_field": "", + "outerGroupOperator": [""], + "percentileValue": ["", ""], + "refId": "A", + "series": "interface", + "target": "get link_name, aggregate(values.output, 210, average) between (1570571291, 1570657691) by link_name from interface where ((link_name like \"AmLight: Chile to Sao Paolo 100GE\"))", + "target_alias": "$VALUE", + "templateVariableValue": [""], + "timeField": "start", + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + }, + { + "aggregate_all": false, + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "func": [ + { + "aggregate_all": false, + "alias": "Z-A", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "", + "percentile": "85", + "root": true, + "target": "input", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.input, 210, average)": "Z-A" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "orderby_field": "", + "outerGroupOperator": [""], + "refId": "B", + "series": "interface", + "target": "get link_name, aggregate(values.input, 210, average) between (1570571291, 1570657691) by link_name from interface where ((link_name like \"AmLight: Chile to Sao Paolo 100GE\"))", + "target_alias": "$VALUE", + "templateVariableValue": [""], + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "transparent": true, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "bps", + "label": "Rate", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": true, + "dashLength": 10, + "dashes": false, + "datasource": "Netsage TSDS", + "decimals": 1, + "fieldConfig": { + "defaults": { + "custom": {}, + "links": [] + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 7, + "w": 8, + "x": 16, + "y": 114 + }, + "hiddenSeries": false, + "id": 140, + "legend": { + "alignAsTable": true, + "avg": true, + "current": false, + "max": true, + "min": false, + "show": false, + "total": false, + "values": true + }, + "lines": false, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.3", + "pointradius": 5, + "points": false, + "renderer": "flot", + "repeatIteration": 1618867139860, + "repeatPanelId": 14, + "repeatedByRow": true, + "scopedVars": { + "links": { + "selected": false, + "text": "NEAAR: New York to London 100GE", + "value": "NEAAR: New York to London 100GE" + } + }, + "seriesOverrides": [ + { + "alias": "A-Z", + "color": "#FBAE60" + }, + { + "alias": "Z-A", + "color": "#80B2E7" + } + ], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "aggregate_all": false, + "aggregator": ["average", "average"], + "bucket": [], + "bucketAggs": [ + { + "field": "start", + "id": "2", + "settings": { + "interval": "auto", + "min_doc_count": 0, + "trimEdges": 0 + }, + "type": "date_histogram" + } + ], + "bucketValue": [], + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "drillDown": [], + "drillDownAlias": "", + "drillDownValue": [], + "dsType": "elasticsearch", + "func": [ + { + "alias": "A-Z", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "* $TIMESPAN", + "percentile": "85", + "root": true, + "target": "output", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.output, 310, average)": "A-Z" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "metrics": [ + { + "field": "select field", + "id": "1", + "type": "count" + } + ], + "orderby_field": "", + "outerGroupOperator": [""], + "percentileValue": ["", ""], + "rawQuery": true, + "refId": "A", + "series": "interface", + "target": "get link_name, values.output * 3600 from (get link_name, aggregate(values.output, 3600, average) / 8 as values.output between ($START, $END) by link_name from interface where ((link_name like \"$links\")))", + "target_alias": "A-Z", + "templateVariableValue": [""], + "timeField": "start", + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + }, + { + "aggregate_all": false, + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "func": [ + { + "alias": "Z-A", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "* $TIMESPAN", + "percentile": "85", + "root": true, + "target": "input", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "hide": false, + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.input, 310, average)": "Z-A" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "orderby_field": "", + "outerGroupOperator": [""], + "rawQuery": true, + "refId": "B", + "series": "interface", + "target": "get link_name, values.input * 3600 from (get link_name, aggregate(values.input, 3600, average) / 8 as values.input between ($START, $END) by link_name from interface where ((link_name like \"$links\")))", + "target_alias": "Z-A", + "templateVariableValue": [""], + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "", + "tooltip": { + "shared": false, + "sort": 0, + "value_type": "individual" + }, + "transparent": true, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "series", + "name": null, + "show": true, + "values": ["total"] + }, + "yaxes": [ + { + "format": "decbytes", + "label": null, + "logBase": 1, + "max": null, + "min": 0, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "collapsed": false, + "datasource": null, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 121 + }, + "id": 141, + "panels": [], + "repeatIteration": 1618867139860, + "repeatPanelId": 11, + "scopedVars": { + "links": { + "selected": false, + "text": "PIREN: Los Angeles to Mauna Lani 100GE", + "value": "PIREN: Los Angeles to Mauna Lani 100GE" + } + }, + "title": "", + "type": "row" + }, + { + "datasource": null, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "gridPos": { + "h": 7, + "w": 2, + "x": 0, + "y": 122 + }, + "id": 142, + "links": [], + "options": { + "content": "**$links**", + "mode": "markdown" + }, + "pluginVersion": "7.3.3", + "repeatIteration": 1618867139860, + "repeatPanelId": 12, + "repeatedByRow": true, + "scopedVars": { + "links": { + "selected": false, + "text": "PIREN: Los Angeles to Mauna Lani 100GE", + "value": "PIREN: Los Angeles to Mauna Lani 100GE" + } + }, + "title": "", + "transparent": true, + "type": "text" + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Netsage TSDS", + "decimals": 1, + "fieldConfig": { + "defaults": { + "custom": {}, + "links": [] + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 7, + "w": 14, + "x": 2, + "y": 122 + }, + "hiddenSeries": false, + "id": 143, + "legend": { + "alignAsTable": true, + "avg": true, + "current": false, + "max": true, + "min": false, + "rightSide": false, + "show": true, + "total": false, + "values": true + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.3", + "pointradius": 5, + "points": false, + "renderer": "flot", + "repeatIteration": 1618867139860, + "repeatPanelId": 13, + "repeatedByRow": true, + "scopedVars": { + "links": { + "selected": false, + "text": "PIREN: Los Angeles to Mauna Lani 100GE", + "value": "PIREN: Los Angeles to Mauna Lani 100GE" + } + }, + "seriesOverrides": [ + { + "alias": "A-Z", + "color": "#FBAE60" + }, + { + "alias": "Z-A", + "color": "#80B2E7" + } + ], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "aggregate_all": false, + "aggregator": ["average", "average"], + "bucket": [], + "bucketAggs": [ + { + "field": "start", + "id": "2", + "settings": { + "interval": "auto", + "min_doc_count": 0, + "trimEdges": 0 + }, + "type": "date_histogram" + } + ], + "bucketValue": [], + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "drillDown": [], + "drillDownAlias": "", + "drillDownValue": [], + "dsType": "elasticsearch", + "func": [ + { + "aggregate_all": false, + "alias": "A-Z", + "align": "", + "bucket": "", + "expanded": false, + "method": "average", + "operation": "", + "percentile": "85", + "root": true, + "target": "output", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.output, 210, average)": "A-Z" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "metrics": [ + { + "field": "select field", + "id": "1", + "type": "count" + } + ], + "orderby_field": "", + "outerGroupOperator": [""], + "percentileValue": ["", ""], + "refId": "A", + "series": "interface", + "target": "get link_name, aggregate(values.output, 210, average) between (1570571291, 1570657691) by link_name from interface where ((link_name like \"AmLight: Chile to Sao Paolo 100GE\"))", + "target_alias": "$VALUE", + "templateVariableValue": [""], + "timeField": "start", + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + }, + { + "aggregate_all": false, + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "func": [ + { + "aggregate_all": false, + "alias": "Z-A", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "", + "percentile": "85", + "root": true, + "target": "input", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.input, 210, average)": "Z-A" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "orderby_field": "", + "outerGroupOperator": [""], + "refId": "B", + "series": "interface", + "target": "get link_name, aggregate(values.input, 210, average) between (1570571291, 1570657691) by link_name from interface where ((link_name like \"AmLight: Chile to Sao Paolo 100GE\"))", + "target_alias": "$VALUE", + "templateVariableValue": [""], + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "transparent": true, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "bps", + "label": "Rate", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": true, + "dashLength": 10, + "dashes": false, + "datasource": "Netsage TSDS", + "decimals": 1, + "fieldConfig": { + "defaults": { + "custom": {}, + "links": [] + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 7, + "w": 8, + "x": 16, + "y": 122 + }, + "hiddenSeries": false, + "id": 144, + "legend": { + "alignAsTable": true, + "avg": true, + "current": false, + "max": true, + "min": false, + "show": false, + "total": false, + "values": true + }, + "lines": false, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.3", + "pointradius": 5, + "points": false, + "renderer": "flot", + "repeatIteration": 1618867139860, + "repeatPanelId": 14, + "repeatedByRow": true, + "scopedVars": { + "links": { + "selected": false, + "text": "PIREN: Los Angeles to Mauna Lani 100GE", + "value": "PIREN: Los Angeles to Mauna Lani 100GE" + } + }, + "seriesOverrides": [ + { + "alias": "A-Z", + "color": "#FBAE60" + }, + { + "alias": "Z-A", + "color": "#80B2E7" + } + ], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "aggregate_all": false, + "aggregator": ["average", "average"], + "bucket": [], + "bucketAggs": [ + { + "field": "start", + "id": "2", + "settings": { + "interval": "auto", + "min_doc_count": 0, + "trimEdges": 0 + }, + "type": "date_histogram" + } + ], + "bucketValue": [], + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "drillDown": [], + "drillDownAlias": "", + "drillDownValue": [], + "dsType": "elasticsearch", + "func": [ + { + "alias": "A-Z", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "* $TIMESPAN", + "percentile": "85", + "root": true, + "target": "output", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.output, 310, average)": "A-Z" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "metrics": [ + { + "field": "select field", + "id": "1", + "type": "count" + } + ], + "orderby_field": "", + "outerGroupOperator": [""], + "percentileValue": ["", ""], + "rawQuery": true, + "refId": "A", + "series": "interface", + "target": "get link_name, values.output * 3600 from (get link_name, aggregate(values.output, 3600, average) / 8 as values.output between ($START, $END) by link_name from interface where ((link_name like \"$links\")))", + "target_alias": "A-Z", + "templateVariableValue": [""], + "timeField": "start", + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + }, + { + "aggregate_all": false, + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "func": [ + { + "alias": "Z-A", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "* $TIMESPAN", + "percentile": "85", + "root": true, + "target": "input", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "hide": false, + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.input, 310, average)": "Z-A" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "orderby_field": "", + "outerGroupOperator": [""], + "rawQuery": true, + "refId": "B", + "series": "interface", + "target": "get link_name, values.input * 3600 from (get link_name, aggregate(values.input, 3600, average) / 8 as values.input between ($START, $END) by link_name from interface where ((link_name like \"$links\")))", + "target_alias": "Z-A", + "templateVariableValue": [""], + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "", + "tooltip": { + "shared": false, + "sort": 0, + "value_type": "individual" + }, + "transparent": true, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "series", + "name": null, + "show": true, + "values": ["total"] + }, + "yaxes": [ + { + "format": "decbytes", + "label": null, + "logBase": 1, + "max": null, + "min": 0, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "collapsed": false, + "datasource": null, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 129 + }, + "id": 145, + "panels": [], + "repeatIteration": 1618867139860, + "repeatPanelId": 11, + "scopedVars": { + "links": { + "selected": false, + "text": "PIREN: Mauna Lani to Sydney 100GE", + "value": "PIREN: Mauna Lani to Sydney 100GE" + } + }, + "title": "", + "type": "row" + }, + { + "datasource": null, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "gridPos": { + "h": 7, + "w": 2, + "x": 0, + "y": 130 + }, + "id": 146, + "links": [], + "options": { + "content": "**$links**", + "mode": "markdown" + }, + "pluginVersion": "7.3.3", + "repeatIteration": 1618867139860, + "repeatPanelId": 12, + "repeatedByRow": true, + "scopedVars": { + "links": { + "selected": false, + "text": "PIREN: Mauna Lani to Sydney 100GE", + "value": "PIREN: Mauna Lani to Sydney 100GE" + } + }, + "title": "", + "transparent": true, + "type": "text" + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Netsage TSDS", + "decimals": 1, + "fieldConfig": { + "defaults": { + "custom": {}, + "links": [] + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 7, + "w": 14, + "x": 2, + "y": 130 + }, + "hiddenSeries": false, + "id": 147, + "legend": { + "alignAsTable": true, + "avg": true, + "current": false, + "max": true, + "min": false, + "rightSide": false, + "show": true, + "total": false, + "values": true + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.3", + "pointradius": 5, + "points": false, + "renderer": "flot", + "repeatIteration": 1618867139860, + "repeatPanelId": 13, + "repeatedByRow": true, + "scopedVars": { + "links": { + "selected": false, + "text": "PIREN: Mauna Lani to Sydney 100GE", + "value": "PIREN: Mauna Lani to Sydney 100GE" + } + }, + "seriesOverrides": [ + { + "alias": "A-Z", + "color": "#FBAE60" + }, + { + "alias": "Z-A", + "color": "#80B2E7" + } + ], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "aggregate_all": false, + "aggregator": ["average", "average"], + "bucket": [], + "bucketAggs": [ + { + "field": "start", + "id": "2", + "settings": { + "interval": "auto", + "min_doc_count": 0, + "trimEdges": 0 + }, + "type": "date_histogram" + } + ], + "bucketValue": [], + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "drillDown": [], + "drillDownAlias": "", + "drillDownValue": [], + "dsType": "elasticsearch", + "func": [ + { + "aggregate_all": false, + "alias": "A-Z", + "align": "", + "bucket": "", + "expanded": false, + "method": "average", + "operation": "", + "percentile": "85", + "root": true, + "target": "output", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.output, 210, average)": "A-Z" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "metrics": [ + { + "field": "select field", + "id": "1", + "type": "count" + } + ], + "orderby_field": "", + "outerGroupOperator": [""], + "percentileValue": ["", ""], + "refId": "A", + "series": "interface", + "target": "get link_name, aggregate(values.output, 210, average) between (1570571291, 1570657691) by link_name from interface where ((link_name like \"AmLight: Chile to Sao Paolo 100GE\"))", + "target_alias": "$VALUE", + "templateVariableValue": [""], + "timeField": "start", + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + }, + { + "aggregate_all": false, + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "func": [ + { + "aggregate_all": false, + "alias": "Z-A", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "", + "percentile": "85", + "root": true, + "target": "input", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.input, 210, average)": "Z-A" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "orderby_field": "", + "outerGroupOperator": [""], + "refId": "B", + "series": "interface", + "target": "get link_name, aggregate(values.input, 210, average) between (1570571291, 1570657691) by link_name from interface where ((link_name like \"AmLight: Chile to Sao Paolo 100GE\"))", + "target_alias": "$VALUE", + "templateVariableValue": [""], + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "transparent": true, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "bps", + "label": "Rate", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": true, + "dashLength": 10, + "dashes": false, + "datasource": "Netsage TSDS", + "decimals": 1, + "fieldConfig": { + "defaults": { + "custom": {}, + "links": [] + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 7, + "w": 8, + "x": 16, + "y": 130 + }, + "hiddenSeries": false, + "id": 148, + "legend": { + "alignAsTable": true, + "avg": true, + "current": false, + "max": true, + "min": false, + "show": false, + "total": false, + "values": true + }, + "lines": false, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.3", + "pointradius": 5, + "points": false, + "renderer": "flot", + "repeatIteration": 1618867139860, + "repeatPanelId": 14, + "repeatedByRow": true, + "scopedVars": { + "links": { + "selected": false, + "text": "PIREN: Mauna Lani to Sydney 100GE", + "value": "PIREN: Mauna Lani to Sydney 100GE" + } + }, + "seriesOverrides": [ + { + "alias": "A-Z", + "color": "#FBAE60" + }, + { + "alias": "Z-A", + "color": "#80B2E7" + } + ], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "aggregate_all": false, + "aggregator": ["average", "average"], + "bucket": [], + "bucketAggs": [ + { + "field": "start", + "id": "2", + "settings": { + "interval": "auto", + "min_doc_count": 0, + "trimEdges": 0 + }, + "type": "date_histogram" + } + ], + "bucketValue": [], + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "drillDown": [], + "drillDownAlias": "", + "drillDownValue": [], + "dsType": "elasticsearch", + "func": [ + { + "alias": "A-Z", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "* $TIMESPAN", + "percentile": "85", + "root": true, + "target": "output", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.output, 310, average)": "A-Z" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "metrics": [ + { + "field": "select field", + "id": "1", + "type": "count" + } + ], + "orderby_field": "", + "outerGroupOperator": [""], + "percentileValue": ["", ""], + "rawQuery": true, + "refId": "A", + "series": "interface", + "target": "get link_name, values.output * 3600 from (get link_name, aggregate(values.output, 3600, average) / 8 as values.output between ($START, $END) by link_name from interface where ((link_name like \"$links\")))", + "target_alias": "A-Z", + "templateVariableValue": [""], + "timeField": "start", + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + }, + { + "aggregate_all": false, + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "func": [ + { + "alias": "Z-A", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "* $TIMESPAN", + "percentile": "85", + "root": true, + "target": "input", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "hide": false, + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.input, 310, average)": "Z-A" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "orderby_field": "", + "outerGroupOperator": [""], + "rawQuery": true, + "refId": "B", + "series": "interface", + "target": "get link_name, values.input * 3600 from (get link_name, aggregate(values.input, 3600, average) / 8 as values.input between ($START, $END) by link_name from interface where ((link_name like \"$links\")))", + "target_alias": "Z-A", + "templateVariableValue": [""], + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "", + "tooltip": { + "shared": false, + "sort": 0, + "value_type": "individual" + }, + "transparent": true, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "series", + "name": null, + "show": true, + "values": ["total"] + }, + "yaxes": [ + { + "format": "decbytes", + "label": null, + "logBase": 1, + "max": null, + "min": 0, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "collapsed": false, + "datasource": null, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 137 + }, + "id": 149, + "panels": [], + "repeatIteration": 1618867139860, + "repeatPanelId": 11, + "scopedVars": { + "links": { + "selected": false, + "text": "PIREN: Oahu to Guam 100GE", + "value": "PIREN: Oahu to Guam 100GE" + } + }, + "title": "", + "type": "row" + }, + { + "datasource": null, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "gridPos": { + "h": 7, + "w": 2, + "x": 0, + "y": 138 + }, + "id": 150, + "links": [], + "options": { + "content": "**$links**", + "mode": "markdown" + }, + "pluginVersion": "7.3.3", + "repeatIteration": 1618867139860, + "repeatPanelId": 12, + "repeatedByRow": true, + "scopedVars": { + "links": { + "selected": false, + "text": "PIREN: Oahu to Guam 100GE", + "value": "PIREN: Oahu to Guam 100GE" + } + }, + "title": "", + "transparent": true, + "type": "text" + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Netsage TSDS", + "decimals": 1, + "fieldConfig": { + "defaults": { + "custom": {}, + "links": [] + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 7, + "w": 14, + "x": 2, + "y": 138 + }, + "hiddenSeries": false, + "id": 151, + "legend": { + "alignAsTable": true, + "avg": true, + "current": false, + "max": true, + "min": false, + "rightSide": false, + "show": true, + "total": false, + "values": true + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.3", + "pointradius": 5, + "points": false, + "renderer": "flot", + "repeatIteration": 1618867139860, + "repeatPanelId": 13, + "repeatedByRow": true, + "scopedVars": { + "links": { + "selected": false, + "text": "PIREN: Oahu to Guam 100GE", + "value": "PIREN: Oahu to Guam 100GE" + } + }, + "seriesOverrides": [ + { + "alias": "A-Z", + "color": "#FBAE60" + }, + { + "alias": "Z-A", + "color": "#80B2E7" + } + ], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "aggregate_all": false, + "aggregator": ["average", "average"], + "bucket": [], + "bucketAggs": [ + { + "field": "start", + "id": "2", + "settings": { + "interval": "auto", + "min_doc_count": 0, + "trimEdges": 0 + }, + "type": "date_histogram" + } + ], + "bucketValue": [], + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "drillDown": [], + "drillDownAlias": "", + "drillDownValue": [], + "dsType": "elasticsearch", + "func": [ + { + "aggregate_all": false, + "alias": "A-Z", + "align": "", + "bucket": "", + "expanded": false, + "method": "average", + "operation": "", + "percentile": "85", + "root": true, + "target": "output", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.output, 210, average)": "A-Z" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "metrics": [ + { + "field": "select field", + "id": "1", + "type": "count" + } + ], + "orderby_field": "", + "outerGroupOperator": [""], + "percentileValue": ["", ""], + "refId": "A", + "series": "interface", + "target": "get link_name, aggregate(values.output, 210, average) between (1570571291, 1570657691) by link_name from interface where ((link_name like \"AmLight: Chile to Sao Paolo 100GE\"))", + "target_alias": "$VALUE", + "templateVariableValue": [""], + "timeField": "start", + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + }, + { + "aggregate_all": false, + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "func": [ + { + "aggregate_all": false, + "alias": "Z-A", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "", + "percentile": "85", + "root": true, + "target": "input", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.input, 210, average)": "Z-A" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "orderby_field": "", + "outerGroupOperator": [""], + "refId": "B", + "series": "interface", + "target": "get link_name, aggregate(values.input, 210, average) between (1570571291, 1570657691) by link_name from interface where ((link_name like \"AmLight: Chile to Sao Paolo 100GE\"))", + "target_alias": "$VALUE", + "templateVariableValue": [""], + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "transparent": true, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "bps", + "label": "Rate", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": true, + "dashLength": 10, + "dashes": false, + "datasource": "Netsage TSDS", + "decimals": 1, + "fieldConfig": { + "defaults": { + "custom": {}, + "links": [] + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 7, + "w": 8, + "x": 16, + "y": 138 + }, + "hiddenSeries": false, + "id": 152, + "legend": { + "alignAsTable": true, + "avg": true, + "current": false, + "max": true, + "min": false, + "show": false, + "total": false, + "values": true + }, + "lines": false, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.3", + "pointradius": 5, + "points": false, + "renderer": "flot", + "repeatIteration": 1618867139860, + "repeatPanelId": 14, + "repeatedByRow": true, + "scopedVars": { + "links": { + "selected": false, + "text": "PIREN: Oahu to Guam 100GE", + "value": "PIREN: Oahu to Guam 100GE" + } + }, + "seriesOverrides": [ + { + "alias": "A-Z", + "color": "#FBAE60" + }, + { + "alias": "Z-A", + "color": "#80B2E7" + } + ], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "aggregate_all": false, + "aggregator": ["average", "average"], + "bucket": [], + "bucketAggs": [ + { + "field": "start", + "id": "2", + "settings": { + "interval": "auto", + "min_doc_count": 0, + "trimEdges": 0 + }, + "type": "date_histogram" + } + ], + "bucketValue": [], + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "drillDown": [], + "drillDownAlias": "", + "drillDownValue": [], + "dsType": "elasticsearch", + "func": [ + { + "alias": "A-Z", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "* $TIMESPAN", + "percentile": "85", + "root": true, + "target": "output", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.output, 310, average)": "A-Z" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "metrics": [ + { + "field": "select field", + "id": "1", + "type": "count" + } + ], + "orderby_field": "", + "outerGroupOperator": [""], + "percentileValue": ["", ""], + "rawQuery": true, + "refId": "A", + "series": "interface", + "target": "get link_name, values.output * 3600 from (get link_name, aggregate(values.output, 3600, average) / 8 as values.output between ($START, $END) by link_name from interface where ((link_name like \"$links\")))", + "target_alias": "A-Z", + "templateVariableValue": [""], + "timeField": "start", + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + }, + { + "aggregate_all": false, + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "func": [ + { + "alias": "Z-A", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "* $TIMESPAN", + "percentile": "85", + "root": true, + "target": "input", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "hide": false, + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.input, 310, average)": "Z-A" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "orderby_field": "", + "outerGroupOperator": [""], + "rawQuery": true, + "refId": "B", + "series": "interface", + "target": "get link_name, values.input * 3600 from (get link_name, aggregate(values.input, 3600, average) / 8 as values.input between ($START, $END) by link_name from interface where ((link_name like \"$links\")))", + "target_alias": "Z-A", + "templateVariableValue": [""], + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "", + "tooltip": { + "shared": false, + "sort": 0, + "value_type": "individual" + }, + "transparent": true, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "series", + "name": null, + "show": true, + "values": ["total"] + }, + "yaxes": [ + { + "format": "decbytes", + "label": null, + "logBase": 1, + "max": null, + "min": 0, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "collapsed": false, + "datasource": null, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 145 + }, + "id": 153, + "panels": [], + "repeatIteration": 1618867139860, + "repeatPanelId": 11, + "scopedVars": { + "links": { + "selected": false, + "text": "PIREN: Oahu to Los Angeles 100GE", + "value": "PIREN: Oahu to Los Angeles 100GE" + } + }, + "title": "", + "type": "row" + }, + { + "datasource": null, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "gridPos": { + "h": 7, + "w": 2, + "x": 0, + "y": 146 + }, + "id": 154, + "links": [], + "options": { + "content": "**$links**", + "mode": "markdown" + }, + "pluginVersion": "7.3.3", + "repeatIteration": 1618867139860, + "repeatPanelId": 12, + "repeatedByRow": true, + "scopedVars": { + "links": { + "selected": false, + "text": "PIREN: Oahu to Los Angeles 100GE", + "value": "PIREN: Oahu to Los Angeles 100GE" + } + }, + "title": "", + "transparent": true, + "type": "text" + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Netsage TSDS", + "decimals": 1, + "fieldConfig": { + "defaults": { + "custom": {}, + "links": [] + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 7, + "w": 14, + "x": 2, + "y": 146 + }, + "hiddenSeries": false, + "id": 155, + "legend": { + "alignAsTable": true, + "avg": true, + "current": false, + "max": true, + "min": false, + "rightSide": false, + "show": true, + "total": false, + "values": true + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.3", + "pointradius": 5, + "points": false, + "renderer": "flot", + "repeatIteration": 1618867139860, + "repeatPanelId": 13, + "repeatedByRow": true, + "scopedVars": { + "links": { + "selected": false, + "text": "PIREN: Oahu to Los Angeles 100GE", + "value": "PIREN: Oahu to Los Angeles 100GE" + } + }, + "seriesOverrides": [ + { + "alias": "A-Z", + "color": "#FBAE60" + }, + { + "alias": "Z-A", + "color": "#80B2E7" + } + ], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "aggregate_all": false, + "aggregator": ["average", "average"], + "bucket": [], + "bucketAggs": [ + { + "field": "start", + "id": "2", + "settings": { + "interval": "auto", + "min_doc_count": 0, + "trimEdges": 0 + }, + "type": "date_histogram" + } + ], + "bucketValue": [], + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "drillDown": [], + "drillDownAlias": "", + "drillDownValue": [], + "dsType": "elasticsearch", + "func": [ + { + "aggregate_all": false, + "alias": "A-Z", + "align": "", + "bucket": "", + "expanded": false, + "method": "average", + "operation": "", + "percentile": "85", + "root": true, + "target": "output", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.output, 210, average)": "A-Z" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "metrics": [ + { + "field": "select field", + "id": "1", + "type": "count" + } + ], + "orderby_field": "", + "outerGroupOperator": [""], + "percentileValue": ["", ""], + "refId": "A", + "series": "interface", + "target": "get link_name, aggregate(values.output, 210, average) between (1570571291, 1570657691) by link_name from interface where ((link_name like \"AmLight: Chile to Sao Paolo 100GE\"))", + "target_alias": "$VALUE", + "templateVariableValue": [""], + "timeField": "start", + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + }, + { + "aggregate_all": false, + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "func": [ + { + "aggregate_all": false, + "alias": "Z-A", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "", + "percentile": "85", + "root": true, + "target": "input", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.input, 210, average)": "Z-A" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "orderby_field": "", + "outerGroupOperator": [""], + "refId": "B", + "series": "interface", + "target": "get link_name, aggregate(values.input, 210, average) between (1570571291, 1570657691) by link_name from interface where ((link_name like \"AmLight: Chile to Sao Paolo 100GE\"))", + "target_alias": "$VALUE", + "templateVariableValue": [""], + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "transparent": true, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "bps", + "label": "Rate", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": true, + "dashLength": 10, + "dashes": false, + "datasource": "Netsage TSDS", + "decimals": 1, + "fieldConfig": { + "defaults": { + "custom": {}, + "links": [] + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 7, + "w": 8, + "x": 16, + "y": 146 + }, + "hiddenSeries": false, + "id": 156, + "legend": { + "alignAsTable": true, + "avg": true, + "current": false, + "max": true, + "min": false, + "show": false, + "total": false, + "values": true + }, + "lines": false, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.3", + "pointradius": 5, + "points": false, + "renderer": "flot", + "repeatIteration": 1618867139860, + "repeatPanelId": 14, + "repeatedByRow": true, + "scopedVars": { + "links": { + "selected": false, + "text": "PIREN: Oahu to Los Angeles 100GE", + "value": "PIREN: Oahu to Los Angeles 100GE" + } + }, + "seriesOverrides": [ + { + "alias": "A-Z", + "color": "#FBAE60" + }, + { + "alias": "Z-A", + "color": "#80B2E7" + } + ], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "aggregate_all": false, + "aggregator": ["average", "average"], + "bucket": [], + "bucketAggs": [ + { + "field": "start", + "id": "2", + "settings": { + "interval": "auto", + "min_doc_count": 0, + "trimEdges": 0 + }, + "type": "date_histogram" + } + ], + "bucketValue": [], + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "drillDown": [], + "drillDownAlias": "", + "drillDownValue": [], + "dsType": "elasticsearch", + "func": [ + { + "alias": "A-Z", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "* $TIMESPAN", + "percentile": "85", + "root": true, + "target": "output", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.output, 310, average)": "A-Z" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "metrics": [ + { + "field": "select field", + "id": "1", + "type": "count" + } + ], + "orderby_field": "", + "outerGroupOperator": [""], + "percentileValue": ["", ""], + "rawQuery": true, + "refId": "A", + "series": "interface", + "target": "get link_name, values.output * 3600 from (get link_name, aggregate(values.output, 3600, average) / 8 as values.output between ($START, $END) by link_name from interface where ((link_name like \"$links\")))", + "target_alias": "A-Z", + "templateVariableValue": [""], + "timeField": "start", + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + }, + { + "aggregate_all": false, + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "func": [ + { + "alias": "Z-A", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "* $TIMESPAN", + "percentile": "85", + "root": true, + "target": "input", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "hide": false, + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.input, 310, average)": "Z-A" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "orderby_field": "", + "outerGroupOperator": [""], + "rawQuery": true, + "refId": "B", + "series": "interface", + "target": "get link_name, values.input * 3600 from (get link_name, aggregate(values.input, 3600, average) / 8 as values.input between ($START, $END) by link_name from interface where ((link_name like \"$links\")))", + "target_alias": "Z-A", + "templateVariableValue": [""], + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "", + "tooltip": { + "shared": false, + "sort": 0, + "value_type": "individual" + }, + "transparent": true, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "series", + "name": null, + "show": true, + "values": ["total"] + }, + "yaxes": [ + { + "format": "decbytes", + "label": null, + "logBase": 1, + "max": null, + "min": 0, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "collapsed": false, + "datasource": null, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 153 + }, + "id": 157, + "panels": [], + "repeatIteration": 1618867139860, + "repeatPanelId": 11, + "scopedVars": { + "links": { + "selected": false, + "text": "PIREN: Oahu to Sydney 100GE", + "value": "PIREN: Oahu to Sydney 100GE" + } + }, + "title": "", + "type": "row" + }, + { + "datasource": null, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "gridPos": { + "h": 7, + "w": 2, + "x": 0, + "y": 154 + }, + "id": 158, + "links": [], + "options": { + "content": "**$links**", + "mode": "markdown" + }, + "pluginVersion": "7.3.3", + "repeatIteration": 1618867139860, + "repeatPanelId": 12, + "repeatedByRow": true, + "scopedVars": { + "links": { + "selected": false, + "text": "PIREN: Oahu to Sydney 100GE", + "value": "PIREN: Oahu to Sydney 100GE" + } + }, + "title": "", + "transparent": true, + "type": "text" + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Netsage TSDS", + "decimals": 1, + "fieldConfig": { + "defaults": { + "custom": {}, + "links": [] + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 7, + "w": 14, + "x": 2, + "y": 154 + }, + "hiddenSeries": false, + "id": 159, + "legend": { + "alignAsTable": true, + "avg": true, + "current": false, + "max": true, + "min": false, + "rightSide": false, + "show": true, + "total": false, + "values": true + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.3", + "pointradius": 5, + "points": false, + "renderer": "flot", + "repeatIteration": 1618867139860, + "repeatPanelId": 13, + "repeatedByRow": true, + "scopedVars": { + "links": { + "selected": false, + "text": "PIREN: Oahu to Sydney 100GE", + "value": "PIREN: Oahu to Sydney 100GE" + } + }, + "seriesOverrides": [ + { + "alias": "A-Z", + "color": "#FBAE60" + }, + { + "alias": "Z-A", + "color": "#80B2E7" + } + ], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "aggregate_all": false, + "aggregator": ["average", "average"], + "bucket": [], + "bucketAggs": [ + { + "field": "start", + "id": "2", + "settings": { + "interval": "auto", + "min_doc_count": 0, + "trimEdges": 0 + }, + "type": "date_histogram" + } + ], + "bucketValue": [], + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "drillDown": [], + "drillDownAlias": "", + "drillDownValue": [], + "dsType": "elasticsearch", + "func": [ + { + "aggregate_all": false, + "alias": "A-Z", + "align": "", + "bucket": "", + "expanded": false, + "method": "average", + "operation": "", + "percentile": "85", + "root": true, + "target": "output", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.output, 210, average)": "A-Z" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "metrics": [ + { + "field": "select field", + "id": "1", + "type": "count" + } + ], + "orderby_field": "", + "outerGroupOperator": [""], + "percentileValue": ["", ""], + "refId": "A", + "series": "interface", + "target": "get link_name, aggregate(values.output, 210, average) between (1570571291, 1570657691) by link_name from interface where ((link_name like \"AmLight: Chile to Sao Paolo 100GE\"))", + "target_alias": "$VALUE", + "templateVariableValue": [""], + "timeField": "start", + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + }, + { + "aggregate_all": false, + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "func": [ + { + "aggregate_all": false, + "alias": "Z-A", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "", + "percentile": "85", + "root": true, + "target": "input", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.input, 210, average)": "Z-A" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "orderby_field": "", + "outerGroupOperator": [""], + "refId": "B", + "series": "interface", + "target": "get link_name, aggregate(values.input, 210, average) between (1570571291, 1570657691) by link_name from interface where ((link_name like \"AmLight: Chile to Sao Paolo 100GE\"))", + "target_alias": "$VALUE", + "templateVariableValue": [""], + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "transparent": true, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "bps", + "label": "Rate", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": true, + "dashLength": 10, + "dashes": false, + "datasource": "Netsage TSDS", + "decimals": 1, + "fieldConfig": { + "defaults": { + "custom": {}, + "links": [] + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 7, + "w": 8, + "x": 16, + "y": 154 + }, + "hiddenSeries": false, + "id": 160, + "legend": { + "alignAsTable": true, + "avg": true, + "current": false, + "max": true, + "min": false, + "show": false, + "total": false, + "values": true + }, + "lines": false, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.3", + "pointradius": 5, + "points": false, + "renderer": "flot", + "repeatIteration": 1618867139860, + "repeatPanelId": 14, + "repeatedByRow": true, + "scopedVars": { + "links": { + "selected": false, + "text": "PIREN: Oahu to Sydney 100GE", + "value": "PIREN: Oahu to Sydney 100GE" + } + }, + "seriesOverrides": [ + { + "alias": "A-Z", + "color": "#FBAE60" + }, + { + "alias": "Z-A", + "color": "#80B2E7" + } + ], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "aggregate_all": false, + "aggregator": ["average", "average"], + "bucket": [], + "bucketAggs": [ + { + "field": "start", + "id": "2", + "settings": { + "interval": "auto", + "min_doc_count": 0, + "trimEdges": 0 + }, + "type": "date_histogram" + } + ], + "bucketValue": [], + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "drillDown": [], + "drillDownAlias": "", + "drillDownValue": [], + "dsType": "elasticsearch", + "func": [ + { + "alias": "A-Z", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "* $TIMESPAN", + "percentile": "85", + "root": true, + "target": "output", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.output, 310, average)": "A-Z" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "metrics": [ + { + "field": "select field", + "id": "1", + "type": "count" + } + ], + "orderby_field": "", + "outerGroupOperator": [""], + "percentileValue": ["", ""], + "rawQuery": true, + "refId": "A", + "series": "interface", + "target": "get link_name, values.output * 3600 from (get link_name, aggregate(values.output, 3600, average) / 8 as values.output between ($START, $END) by link_name from interface where ((link_name like \"$links\")))", + "target_alias": "A-Z", + "templateVariableValue": [""], + "timeField": "start", + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + }, + { + "aggregate_all": false, + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "func": [ + { + "alias": "Z-A", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "* $TIMESPAN", + "percentile": "85", + "root": true, + "target": "input", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "hide": false, + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.input, 310, average)": "Z-A" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "orderby_field": "", + "outerGroupOperator": [""], + "rawQuery": true, + "refId": "B", + "series": "interface", + "target": "get link_name, values.input * 3600 from (get link_name, aggregate(values.input, 3600, average) / 8 as values.input between ($START, $END) by link_name from interface where ((link_name like \"$links\")))", + "target_alias": "Z-A", + "templateVariableValue": [""], + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "", + "tooltip": { + "shared": false, + "sort": 0, + "value_type": "individual" + }, + "transparent": true, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "series", + "name": null, + "show": true, + "values": ["total"] + }, + "yaxes": [ + { + "format": "decbytes", + "label": null, + "logBase": 1, + "max": null, + "min": 0, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "collapsed": false, + "datasource": null, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 161 + }, + "id": 161, + "panels": [], + "repeatIteration": 1618867139860, + "repeatPanelId": 11, + "scopedVars": { + "links": { + "selected": false, + "text": "PIREN: Seattle to Oahu 100GE", + "value": "PIREN: Seattle to Oahu 100GE" + } + }, + "title": "", + "type": "row" + }, + { + "datasource": null, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "gridPos": { + "h": 7, + "w": 2, + "x": 0, + "y": 162 + }, + "id": 162, + "links": [], + "options": { + "content": "**$links**", + "mode": "markdown" + }, + "pluginVersion": "7.3.3", + "repeatIteration": 1618867139860, + "repeatPanelId": 12, + "repeatedByRow": true, + "scopedVars": { + "links": { + "selected": false, + "text": "PIREN: Seattle to Oahu 100GE", + "value": "PIREN: Seattle to Oahu 100GE" + } + }, + "title": "", + "transparent": true, + "type": "text" + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Netsage TSDS", + "decimals": 1, + "fieldConfig": { + "defaults": { + "custom": {}, + "links": [] + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 7, + "w": 14, + "x": 2, + "y": 162 + }, + "hiddenSeries": false, + "id": 163, + "legend": { + "alignAsTable": true, + "avg": true, + "current": false, + "max": true, + "min": false, + "rightSide": false, + "show": true, + "total": false, + "values": true + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.3", + "pointradius": 5, + "points": false, + "renderer": "flot", + "repeatIteration": 1618867139860, + "repeatPanelId": 13, + "repeatedByRow": true, + "scopedVars": { + "links": { + "selected": false, + "text": "PIREN: Seattle to Oahu 100GE", + "value": "PIREN: Seattle to Oahu 100GE" + } + }, + "seriesOverrides": [ + { + "alias": "A-Z", + "color": "#FBAE60" + }, + { + "alias": "Z-A", + "color": "#80B2E7" + } + ], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "aggregate_all": false, + "aggregator": ["average", "average"], + "bucket": [], + "bucketAggs": [ + { + "field": "start", + "id": "2", + "settings": { + "interval": "auto", + "min_doc_count": 0, + "trimEdges": 0 + }, + "type": "date_histogram" + } + ], + "bucketValue": [], + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "drillDown": [], + "drillDownAlias": "", + "drillDownValue": [], + "dsType": "elasticsearch", + "func": [ + { + "aggregate_all": false, + "alias": "A-Z", + "align": "", + "bucket": "", + "expanded": false, + "method": "average", + "operation": "", + "percentile": "85", + "root": true, + "target": "output", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.output, 210, average)": "A-Z" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "metrics": [ + { + "field": "select field", + "id": "1", + "type": "count" + } + ], + "orderby_field": "", + "outerGroupOperator": [""], + "percentileValue": ["", ""], + "refId": "A", + "series": "interface", + "target": "get link_name, aggregate(values.output, 210, average) between (1570571291, 1570657691) by link_name from interface where ((link_name like \"AmLight: Chile to Sao Paolo 100GE\"))", + "target_alias": "$VALUE", + "templateVariableValue": [""], + "timeField": "start", + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + }, + { + "aggregate_all": false, + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "func": [ + { + "aggregate_all": false, + "alias": "Z-A", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "", + "percentile": "85", + "root": true, + "target": "input", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.input, 210, average)": "Z-A" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "orderby_field": "", + "outerGroupOperator": [""], + "refId": "B", + "series": "interface", + "target": "get link_name, aggregate(values.input, 210, average) between (1570571291, 1570657691) by link_name from interface where ((link_name like \"AmLight: Chile to Sao Paolo 100GE\"))", + "target_alias": "$VALUE", + "templateVariableValue": [""], + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "transparent": true, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "bps", + "label": "Rate", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": true, + "dashLength": 10, + "dashes": false, + "datasource": "Netsage TSDS", + "decimals": 1, + "fieldConfig": { + "defaults": { + "custom": {}, + "links": [] + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 7, + "w": 8, + "x": 16, + "y": 162 + }, + "hiddenSeries": false, + "id": 164, + "legend": { + "alignAsTable": true, + "avg": true, + "current": false, + "max": true, + "min": false, + "show": false, + "total": false, + "values": true + }, + "lines": false, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.3", + "pointradius": 5, + "points": false, + "renderer": "flot", + "repeatIteration": 1618867139860, + "repeatPanelId": 14, + "repeatedByRow": true, + "scopedVars": { + "links": { + "selected": false, + "text": "PIREN: Seattle to Oahu 100GE", + "value": "PIREN: Seattle to Oahu 100GE" + } + }, + "seriesOverrides": [ + { + "alias": "A-Z", + "color": "#FBAE60" + }, + { + "alias": "Z-A", + "color": "#80B2E7" + } + ], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "aggregate_all": false, + "aggregator": ["average", "average"], + "bucket": [], + "bucketAggs": [ + { + "field": "start", + "id": "2", + "settings": { + "interval": "auto", + "min_doc_count": 0, + "trimEdges": 0 + }, + "type": "date_histogram" + } + ], + "bucketValue": [], + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "drillDown": [], + "drillDownAlias": "", + "drillDownValue": [], + "dsType": "elasticsearch", + "func": [ + { + "alias": "A-Z", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "* $TIMESPAN", + "percentile": "85", + "root": true, + "target": "output", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.output, 310, average)": "A-Z" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "metrics": [ + { + "field": "select field", + "id": "1", + "type": "count" + } + ], + "orderby_field": "", + "outerGroupOperator": [""], + "percentileValue": ["", ""], + "rawQuery": true, + "refId": "A", + "series": "interface", + "target": "get link_name, values.output * 3600 from (get link_name, aggregate(values.output, 3600, average) / 8 as values.output between ($START, $END) by link_name from interface where ((link_name like \"$links\")))", + "target_alias": "A-Z", + "templateVariableValue": [""], + "timeField": "start", + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + }, + { + "aggregate_all": false, + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "func": [ + { + "alias": "Z-A", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "* $TIMESPAN", + "percentile": "85", + "root": true, + "target": "input", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "hide": false, + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.input, 310, average)": "Z-A" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "orderby_field": "", + "outerGroupOperator": [""], + "rawQuery": true, + "refId": "B", + "series": "interface", + "target": "get link_name, values.input * 3600 from (get link_name, aggregate(values.input, 3600, average) / 8 as values.input between ($START, $END) by link_name from interface where ((link_name like \"$links\")))", + "target_alias": "Z-A", + "templateVariableValue": [""], + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "", + "tooltip": { + "shared": false, + "sort": 0, + "value_type": "individual" + }, + "transparent": true, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "series", + "name": null, + "show": true, + "values": ["total"] + }, + "yaxes": [ + { + "format": "decbytes", + "label": null, + "logBase": 1, + "max": null, + "min": 0, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "collapsed": false, + "datasource": null, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 169 + }, + "id": 165, + "panels": [], + "repeatIteration": 1618867139860, + "repeatPanelId": 11, + "scopedVars": { + "links": { + "selected": false, + "text": "TransPAC: Hong Kong to Guam 10GE", + "value": "TransPAC: Hong Kong to Guam 10GE" + } + }, + "title": "", + "type": "row" + }, + { + "datasource": null, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "gridPos": { + "h": 7, + "w": 2, + "x": 0, + "y": 170 + }, + "id": 166, + "links": [], + "options": { + "content": "**$links**", + "mode": "markdown" + }, + "pluginVersion": "7.3.3", + "repeatIteration": 1618867139860, + "repeatPanelId": 12, + "repeatedByRow": true, + "scopedVars": { + "links": { + "selected": false, + "text": "TransPAC: Hong Kong to Guam 10GE", + "value": "TransPAC: Hong Kong to Guam 10GE" + } + }, + "title": "", + "transparent": true, + "type": "text" + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Netsage TSDS", + "decimals": 1, + "fieldConfig": { + "defaults": { + "custom": {}, + "links": [] + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 7, + "w": 14, + "x": 2, + "y": 170 + }, + "hiddenSeries": false, + "id": 167, + "legend": { + "alignAsTable": true, + "avg": true, + "current": false, + "max": true, + "min": false, + "rightSide": false, + "show": true, + "total": false, + "values": true + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.3", + "pointradius": 5, + "points": false, + "renderer": "flot", + "repeatIteration": 1618867139860, + "repeatPanelId": 13, + "repeatedByRow": true, + "scopedVars": { + "links": { + "selected": false, + "text": "TransPAC: Hong Kong to Guam 10GE", + "value": "TransPAC: Hong Kong to Guam 10GE" + } + }, + "seriesOverrides": [ + { + "alias": "A-Z", + "color": "#FBAE60" + }, + { + "alias": "Z-A", + "color": "#80B2E7" + } + ], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "aggregate_all": false, + "aggregator": ["average", "average"], + "bucket": [], + "bucketAggs": [ + { + "field": "start", + "id": "2", + "settings": { + "interval": "auto", + "min_doc_count": 0, + "trimEdges": 0 + }, + "type": "date_histogram" + } + ], + "bucketValue": [], + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "drillDown": [], + "drillDownAlias": "", + "drillDownValue": [], + "dsType": "elasticsearch", + "func": [ + { + "aggregate_all": false, + "alias": "A-Z", + "align": "", + "bucket": "", + "expanded": false, + "method": "average", + "operation": "", + "percentile": "85", + "root": true, + "target": "output", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.output, 210, average)": "A-Z" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "metrics": [ + { + "field": "select field", + "id": "1", + "type": "count" + } + ], + "orderby_field": "", + "outerGroupOperator": [""], + "percentileValue": ["", ""], + "refId": "A", + "series": "interface", + "target": "get link_name, aggregate(values.output, 210, average) between (1570571291, 1570657691) by link_name from interface where ((link_name like \"AmLight: Chile to Sao Paolo 100GE\"))", + "target_alias": "$VALUE", + "templateVariableValue": [""], + "timeField": "start", + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + }, + { + "aggregate_all": false, + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "func": [ + { + "aggregate_all": false, + "alias": "Z-A", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "", + "percentile": "85", + "root": true, + "target": "input", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.input, 210, average)": "Z-A" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "orderby_field": "", + "outerGroupOperator": [""], + "refId": "B", + "series": "interface", + "target": "get link_name, aggregate(values.input, 210, average) between (1570571291, 1570657691) by link_name from interface where ((link_name like \"AmLight: Chile to Sao Paolo 100GE\"))", + "target_alias": "$VALUE", + "templateVariableValue": [""], + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "transparent": true, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "bps", + "label": "Rate", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": true, + "dashLength": 10, + "dashes": false, + "datasource": "Netsage TSDS", + "decimals": 1, + "fieldConfig": { + "defaults": { + "custom": {}, + "links": [] + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 7, + "w": 8, + "x": 16, + "y": 170 + }, + "hiddenSeries": false, + "id": 168, + "legend": { + "alignAsTable": true, + "avg": true, + "current": false, + "max": true, + "min": false, + "show": false, + "total": false, + "values": true + }, + "lines": false, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.3", + "pointradius": 5, + "points": false, + "renderer": "flot", + "repeatIteration": 1618867139860, + "repeatPanelId": 14, + "repeatedByRow": true, + "scopedVars": { + "links": { + "selected": false, + "text": "TransPAC: Hong Kong to Guam 10GE", + "value": "TransPAC: Hong Kong to Guam 10GE" + } + }, + "seriesOverrides": [ + { + "alias": "A-Z", + "color": "#FBAE60" + }, + { + "alias": "Z-A", + "color": "#80B2E7" + } + ], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "aggregate_all": false, + "aggregator": ["average", "average"], + "bucket": [], + "bucketAggs": [ + { + "field": "start", + "id": "2", + "settings": { + "interval": "auto", + "min_doc_count": 0, + "trimEdges": 0 + }, + "type": "date_histogram" + } + ], + "bucketValue": [], + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "drillDown": [], + "drillDownAlias": "", + "drillDownValue": [], + "dsType": "elasticsearch", + "func": [ + { + "alias": "A-Z", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "* $TIMESPAN", + "percentile": "85", + "root": true, + "target": "output", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.output, 310, average)": "A-Z" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "metrics": [ + { + "field": "select field", + "id": "1", + "type": "count" + } + ], + "orderby_field": "", + "outerGroupOperator": [""], + "percentileValue": ["", ""], + "rawQuery": true, + "refId": "A", + "series": "interface", + "target": "get link_name, values.output * 3600 from (get link_name, aggregate(values.output, 3600, average) / 8 as values.output between ($START, $END) by link_name from interface where ((link_name like \"$links\")))", + "target_alias": "A-Z", + "templateVariableValue": [""], + "timeField": "start", + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + }, + { + "aggregate_all": false, + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "func": [ + { + "alias": "Z-A", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "* $TIMESPAN", + "percentile": "85", + "root": true, + "target": "input", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "hide": false, + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.input, 310, average)": "Z-A" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "orderby_field": "", + "outerGroupOperator": [""], + "rawQuery": true, + "refId": "B", + "series": "interface", + "target": "get link_name, values.input * 3600 from (get link_name, aggregate(values.input, 3600, average) / 8 as values.input between ($START, $END) by link_name from interface where ((link_name like \"$links\")))", + "target_alias": "Z-A", + "templateVariableValue": [""], + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "", + "tooltip": { + "shared": false, + "sort": 0, + "value_type": "individual" + }, + "transparent": true, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "series", + "name": null, + "show": true, + "values": ["total"] + }, + "yaxes": [ + { + "format": "decbytes", + "label": null, + "logBase": 1, + "max": null, + "min": 0, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "collapsed": false, + "datasource": null, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 177 + }, + "id": 169, + "panels": [], + "repeatIteration": 1618867139860, + "repeatPanelId": 11, + "scopedVars": { + "links": { + "selected": false, + "text": "TransPAC: Seattle to Tokyo 100GE", + "value": "TransPAC: Seattle to Tokyo 100GE" + } + }, + "title": "", + "type": "row" + }, + { + "datasource": null, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "gridPos": { + "h": 7, + "w": 2, + "x": 0, + "y": 178 + }, + "id": 170, + "links": [], + "options": { + "content": "**$links**", + "mode": "markdown" + }, + "pluginVersion": "7.3.3", + "repeatIteration": 1618867139860, + "repeatPanelId": 12, + "repeatedByRow": true, + "scopedVars": { + "links": { + "selected": false, + "text": "TransPAC: Seattle to Tokyo 100GE", + "value": "TransPAC: Seattle to Tokyo 100GE" + } + }, + "title": "", + "transparent": true, + "type": "text" + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Netsage TSDS", + "decimals": 1, + "fieldConfig": { + "defaults": { + "custom": {}, + "links": [] + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 7, + "w": 14, + "x": 2, + "y": 178 + }, + "hiddenSeries": false, + "id": 171, + "legend": { + "alignAsTable": true, + "avg": true, + "current": false, + "max": true, + "min": false, + "rightSide": false, + "show": true, + "total": false, + "values": true + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.3", + "pointradius": 5, + "points": false, + "renderer": "flot", + "repeatIteration": 1618867139860, + "repeatPanelId": 13, + "repeatedByRow": true, + "scopedVars": { + "links": { + "selected": false, + "text": "TransPAC: Seattle to Tokyo 100GE", + "value": "TransPAC: Seattle to Tokyo 100GE" + } + }, + "seriesOverrides": [ + { + "alias": "A-Z", + "color": "#FBAE60" + }, + { + "alias": "Z-A", + "color": "#80B2E7" + } + ], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "aggregate_all": false, + "aggregator": ["average", "average"], + "bucket": [], + "bucketAggs": [ + { + "field": "start", + "id": "2", + "settings": { + "interval": "auto", + "min_doc_count": 0, + "trimEdges": 0 + }, + "type": "date_histogram" + } + ], + "bucketValue": [], + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "drillDown": [], + "drillDownAlias": "", + "drillDownValue": [], + "dsType": "elasticsearch", + "func": [ + { + "aggregate_all": false, + "alias": "A-Z", + "align": "", + "bucket": "", + "expanded": false, + "method": "average", + "operation": "", + "percentile": "85", + "root": true, + "target": "output", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.output, 210, average)": "A-Z" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "metrics": [ + { + "field": "select field", + "id": "1", + "type": "count" + } + ], + "orderby_field": "", + "outerGroupOperator": [""], + "percentileValue": ["", ""], + "refId": "A", + "series": "interface", + "target": "get link_name, aggregate(values.output, 210, average) between (1570571291, 1570657691) by link_name from interface where ((link_name like \"AmLight: Chile to Sao Paolo 100GE\"))", + "target_alias": "$VALUE", + "templateVariableValue": [""], + "timeField": "start", + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + }, + { + "aggregate_all": false, + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "func": [ + { + "aggregate_all": false, + "alias": "Z-A", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "", + "percentile": "85", + "root": true, + "target": "input", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.input, 210, average)": "Z-A" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "orderby_field": "", + "outerGroupOperator": [""], + "refId": "B", + "series": "interface", + "target": "get link_name, aggregate(values.input, 210, average) between (1570571291, 1570657691) by link_name from interface where ((link_name like \"AmLight: Chile to Sao Paolo 100GE\"))", + "target_alias": "$VALUE", + "templateVariableValue": [""], + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "transparent": true, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "bps", + "label": "Rate", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": true, + "dashLength": 10, + "dashes": false, + "datasource": "Netsage TSDS", + "decimals": 1, + "fieldConfig": { + "defaults": { + "custom": {}, + "links": [] + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 7, + "w": 8, + "x": 16, + "y": 178 + }, + "hiddenSeries": false, + "id": 172, + "legend": { + "alignAsTable": true, + "avg": true, + "current": false, + "max": true, + "min": false, + "show": false, + "total": false, + "values": true + }, + "lines": false, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.3", + "pointradius": 5, + "points": false, + "renderer": "flot", + "repeatIteration": 1618867139860, + "repeatPanelId": 14, + "repeatedByRow": true, + "scopedVars": { + "links": { + "selected": false, + "text": "TransPAC: Seattle to Tokyo 100GE", + "value": "TransPAC: Seattle to Tokyo 100GE" + } + }, + "seriesOverrides": [ + { + "alias": "A-Z", + "color": "#FBAE60" + }, + { + "alias": "Z-A", + "color": "#80B2E7" + } + ], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "aggregate_all": false, + "aggregator": ["average", "average"], + "bucket": [], + "bucketAggs": [ + { + "field": "start", + "id": "2", + "settings": { + "interval": "auto", + "min_doc_count": 0, + "trimEdges": 0 + }, + "type": "date_histogram" + } + ], + "bucketValue": [], + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "drillDown": [], + "drillDownAlias": "", + "drillDownValue": [], + "dsType": "elasticsearch", + "func": [ + { + "alias": "A-Z", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "* $TIMESPAN", + "percentile": "85", + "root": true, + "target": "output", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.output, 310, average)": "A-Z" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "metrics": [ + { + "field": "select field", + "id": "1", + "type": "count" + } + ], + "orderby_field": "", + "outerGroupOperator": [""], + "percentileValue": ["", ""], + "rawQuery": true, + "refId": "A", + "series": "interface", + "target": "get link_name, values.output * 3600 from (get link_name, aggregate(values.output, 3600, average) / 8 as values.output between ($START, $END) by link_name from interface where ((link_name like \"$links\")))", + "target_alias": "A-Z", + "templateVariableValue": [""], + "timeField": "start", + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + }, + { + "aggregate_all": false, + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "func": [ + { + "alias": "Z-A", + "align": "", + "bucket": "", + "expanded": true, + "method": "average", + "operation": "* $TIMESPAN", + "percentile": "85", + "root": true, + "target": "input", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "hide": false, + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.input, 310, average)": "Z-A" + }, + "metricValueAliases": [""], + "metricValues_array": ["Select Metric Value"], + "metric_array": ["link_name"], + "orderby_field": "", + "outerGroupOperator": [""], + "rawQuery": true, + "refId": "B", + "series": "interface", + "target": "get link_name, values.input * 3600 from (get link_name, aggregate(values.input, 3600, average) / 8 as values.input between ($START, $END) by link_name from interface where ((link_name like \"$links\")))", + "target_alias": "Z-A", + "templateVariableValue": [""], + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "left": "link_name", + "op": "like", + "right": "$links" + } + ] + ] + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "", + "tooltip": { + "shared": false, + "sort": 0, + "value_type": "individual" + }, + "transparent": true, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "series", + "name": null, + "show": true, + "values": ["total"] + }, + "yaxes": [ + { + "format": "decbytes", + "label": null, + "logBase": 1, + "max": null, + "min": 0, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "collapsed": false, + "datasource": null, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 185 + }, + "id": 87, + "panels": [], + "title": "", + "type": "row" + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Netsage TSDS", + "decimals": 1, + "fieldConfig": { + "defaults": { + "custom": {}, + "links": [] + }, + "overrides": [] + }, + "fill": 0, + "fillGradient": 0, + "gridPos": { + "h": 14, + "w": 24, + "x": 0, + "y": 186 + }, + "hiddenSeries": false, + "id": 88, + "legend": { + "avg": true, + "current": false, + "max": false, + "min": false, + "rightSide": false, + "show": true, + "total": false, + "values": true + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.3", + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [ + {} + ], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "aggregate_all": false, + "aggregator": ["average"], + "bucket": [], + "bucketAggs": [ + { + "field": "start", + "id": "2", + "settings": { + "interval": "auto", + "min_doc_count": 0, + "trimEdges": 0 + }, + "type": "date_histogram" + } + ], + "bucketValue": [], + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "drillDown": [], + "drillDownAlias": "", + "drillDownValue": [], + "dsType": "elasticsearch", + "func": [ + { + "alias": "A-Z", + "align": "", + "bucket": "", + "expanded": false, + "method": "average", + "operation": "", + "percentile": "85", + "root": true, + "target": "output", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.output, 60, average)": "A-Z" + }, + "metricValueAliases": ["A-Z"], + "metricValues_array": ["output"], + "metric_array": ["link_name"], + "metrics": [ + { + "field": "select field", + "id": "1", + "type": "count" + } + ], + "orderby_field": "link_name", + "outerGroupOperator": [""], + "percentileValue": [""], + "refId": "A", + "series": "interface", + "target": "get link_name, aggregate(values.output, 60, average) between (1570571291, 1570657691) by link_name from interface where (link_name like \".*\") ordered by link_name", + "target_alias": "$VALUES $link_name", + "templateVariableValue": [""], + "timeField": "start", + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "$$hashKey": "object:284", + "left": "link_name", + "op": "like", + "right": ".*" + } + ] + ] + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "(A to Z) Average Utilization Over Time", + "tooltip": { + "shared": true, + "sort": 2, + "value_type": "individual" + }, + "transparent": true, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "bps", + "label": "Rate", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "collapsed": false, + "datasource": null, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 200 + }, + "id": 89, + "panels": [], + "title": "", + "type": "row" + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Netsage TSDS", + "decimals": 1, + "fieldConfig": { + "defaults": { + "custom": {}, + "links": [] + }, + "overrides": [] + }, + "fill": 0, + "fillGradient": 0, + "gridPos": { + "h": 14, + "w": 24, + "x": 0, + "y": 201 + }, + "hiddenSeries": false, + "id": 90, + "legend": { + "avg": true, + "current": false, + "max": false, + "min": false, + "rightSide": false, + "show": true, + "total": false, + "values": true + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.3", + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [ + {} + ], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "aggregate_all": false, + "aggregator": ["average"], + "bucket": [], + "bucketAggs": [ + { + "field": "start", + "id": "2", + "settings": { + "interval": "auto", + "min_doc_count": 0, + "trimEdges": 0 + }, + "type": "date_histogram" + } + ], + "bucketValue": [], + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "drillDown": [], + "drillDownAlias": "", + "drillDownValue": [], + "dsType": "elasticsearch", + "func": [ + { + "alias": "Z-A", + "align": "", + "bucket": "", + "expanded": false, + "method": "average", + "operation": "", + "percentile": "85", + "root": true, + "target": "input", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.input, 60, average)": "Z-A" + }, + "metricValueAliases": ["Z-A"], + "metricValues_array": ["input"], + "metric_array": ["link_name"], + "metrics": [ + { + "field": "select field", + "id": "1", + "type": "count" + } + ], + "orderby_field": "link_name", + "outerGroupOperator": [""], + "percentileValue": [""], + "refId": "A", + "series": "interface", + "target": "get link_name, aggregate(values.input, 60, average) between (1570571291, 1570657691) by link_name from interface where (link_name like \".*\") ordered by link_name", + "target_alias": "$VALUES $link_name", + "templateVariableValue": [""], + "timeField": "start", + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "$$hashKey": "object:326", + "left": "link_name", + "op": "like", + "right": ".*" + } + ] + ] + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "(Z to A) Average Utilization Over Time", + "tooltip": { + "shared": true, + "sort": 2, + "value_type": "individual" + }, + "transparent": true, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "bps", + "label": "Rate", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "collapsed": false, + "datasource": null, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 215 + }, + "id": 91, + "panels": [], + "title": "", + "type": "row" + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Netsage TSDS", + "decimals": 1, + "fieldConfig": { + "defaults": { + "custom": {}, + "links": [] + }, + "overrides": [] + }, + "fill": 0, + "fillGradient": 0, + "gridPos": { + "h": 14, + "w": 24, + "x": 0, + "y": 216 + }, + "hiddenSeries": false, + "id": 92, + "legend": { + "avg": false, + "current": false, + "max": true, + "min": false, + "show": true, + "total": false, + "values": true + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.3", + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "aggregate_all": false, + "aggregator": ["max"], + "bucket": [], + "bucketAggs": [ + { + "field": "start", + "id": "2", + "settings": { + "interval": "auto", + "min_doc_count": 0, + "trimEdges": 0 + }, + "type": "date_histogram" + } + ], + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "drillDown": [], + "drillDownAlias": "", + "drillDownValue": [], + "dsType": "elasticsearch", + "func": [ + { + "alias": "Z-A", + "align": "", + "bucket": "", + "expanded": false, + "method": "max", + "operation": "", + "percentile": "85", + "root": true, + "target": "output", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.output, 60, max)": "Z-A" + }, + "metricValueAliases": ["A-Z"], + "metricValues_array": ["output"], + "metric_array": ["link_name"], + "metrics": [ + { + "field": "select field", + "id": "1", + "type": "count" + } + ], + "orderby_field": "link_name", + "outerGroupOperator": [""], + "percentileValue": [""], + "refId": "A", + "series": "interface", + "target": "get link_name, aggregate(values.output, 60, max) between (1570571291, 1570657691) by link_name from interface where (link_name like \".*\") ordered by link_name", + "target_alias": "$VALUES $link_name", + "templateVariableValue": [""], + "timeField": "start", + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "$$hashKey": "object:457", + "left": "link_name", + "op": "like", + "right": ".*" + } + ] + ] + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "(A to Z) Maximum Utilization Over Time", + "tooltip": { + "shared": true, + "sort": 2, + "value_type": "individual" + }, + "transparent": true, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "bps", + "label": "Rate", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "collapsed": false, + "datasource": null, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 230 + }, + "id": 93, + "panels": [], + "title": "", + "type": "row" + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Netsage TSDS", + "decimals": 1, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "fill": 0, + "fillGradient": 0, + "gridPos": { + "h": 14, + "w": 24, + "x": 0, + "y": 231 + }, + "hiddenSeries": false, + "id": 94, + "legend": { + "avg": false, + "current": false, + "max": true, + "min": false, + "show": true, + "total": false, + "values": true + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.3", + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "aggregate_all": false, + "aggregator": ["max"], + "bucket": [], + "bucketAggs": [ + { + "field": "start", + "id": "2", + "settings": { + "interval": "auto", + "min_doc_count": 0, + "trimEdges": 0 + }, + "type": "date_histogram" + } + ], + "combineAllBy": "nothing", + "condition": [], + "dateFormat": "", + "displayFormat": "series", + "drillDown": [], + "drillDownAlias": "", + "drillDownValue": [], + "dsType": "elasticsearch", + "func": [ + { + "alias": "A-Z", + "align": "", + "bucket": "", + "expanded": false, + "method": "max", + "operation": "", + "percentile": "85", + "root": true, + "target": "input", + "template": "", + "title": "Aggregate", + "type": "Aggregate", + "wrapper": [] + } + ], + "groupby_field": "link_name", + "inlineGroupOperator": [[""]], + "metricValueAliasMappings": { + "aggregate(values.input, 60, max)": "A-Z" + }, + "metricValueAliases": ["Z-A"], + "metricValues_array": ["input"], + "metric_array": ["link_name"], + "metrics": [ + { + "field": "select field", + "id": "1", + "type": "count" + } + ], + "orderby_field": "link_name", + "outerGroupOperator": [""], + "percentileValue": [""], + "refId": "A", + "series": "interface", + "target": "get link_name, aggregate(values.input, 60, max) between (1570571291, 1570657691) by link_name from interface where (link_name like \".*\") ordered by link_name", + "target_alias": "$VALUES $link_name", + "templateVariableValue": [""], + "timeField": "start", + "type": "timeserie", + "whereClauseGroup": [ + [ + { + "$$hashKey": "object:521", + "left": "link_name", + "op": "like", + "right": ".*" + } + ] + ] + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "(Z to A) Maximum Utilization Over Time", + "tooltip": { + "shared": true, + "sort": 2, + "value_type": "individual" + }, + "transparent": true, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "bps", + "label": "Rate", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "collapsed": false, + "datasource": null, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 245 + }, + "id": 95, + "panels": [], + "title": "", + "type": "row" + }, + { + "content": "\u003cdiv class=\"netsage_footer\"\u003e If you have any questions, concerns, or other issues, feel free to contact us at \u003ca href=\"mailto:netsage@lbl.gov\"\u003enetsage@lbl.gov \u003c/a\u003e Thanks! \u003cimg style=\"margin-left:10px\" src=\"https://www.nsf.gov/images/logos/NSF_4-Color_bitmap_Logo.png\" width=50 height=50\u003e \u003ca href=\"https://www.nsf.gov/awardsearch/showAward?AWD_ID=1540933\"\u003e NSF GRANT 1540933 \u003c/a\u003e \u003c/img\u003e \u003cspan style=\"float:right; position:relative; top:15px\"\u003e To Review the NetSage Data Policy \u003ca href=\"http://www.netsage.global/home/netsage-privacy-policy\"\u003e click here \u003c/a\u003e \u003c/div\u003e\n", + "datasource": null, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "gridPos": { + "h": 3, + "w": 24, + "x": 0, + "y": 246 + }, + "id": 96, + "links": [], + "options": { + "content": "\u003cdiv\u003e If you have any questions, concerns, or other issues, feel free to contact us at \u003ca href=\"mailto:netsage@iu.edu\"\u003enetsage@iu.edu\u003c/a\u003e. Thanks! \u003cimg style=\"margin-left:10px\" src=\"https://www.nsf.gov/images/logos/NSF_4-Color_bitmap_Logo.png\" width=50 height=50\u003e \u003ca href=\"https://www.nsf.gov/awardsearch/showAward?AWD_ID=1540933\"\u003e NSF GRANT 1540933 \u003c/a\u003e \u003c/img\u003e \u003cspan style=\"float:right; position:relative; top:15px\"\u003e To Review the NetSage Data Policy \u003ca href=\"http://www.netsage.global/home/netsage-privacy-policy\"\u003e click here \u003c/a\u003e \u003c/div\u003e", + "mode": "html" + }, + "pluginVersion": "7.3.3", + "title": "", + "transparent": true, + "type": "text" + } + ], + "refresh": "", + "schemaVersion": 26, + "style": "dark", + "tags": ["netsage"], + "templating": { + "list": [ + { + "allValue": null, + "current": { + "text": "All", + "value": "$__all" + }, + "datasource": "Netsage TSDS", + "definition": "", + "error": null, + "hide": 2, + "includeAll": true, + "label": "Links", + "multi": true, + "name": "links", + "options": [], + "query": "get link_name between(1451606400, $END) by link_name from interface where link_name != null and link_name not like \"ACE\" limit 100 offset 0 ordered by link_name asc", + "refresh": 2, + "regex": "", + "skipUrlSync": false, + "sort": 0, + "tagValuesQuery": "", + "tags": [], + "tagsQuery": "", + "type": "query", + "useTags": false + } + ] + }, + "time": { + "from": "now-24h", + "to": "now" + }, + "timepicker": { + "refresh_intervals": [ + "10s", + "30s", + "1m", + "5m", + "15m", + "30m", + "1h", + "2h", + "1d" + ], + "time_options": ["5m", "15m", "1h", "6h", "12h", "24h", "2d", "7d", "30d"] + }, + "timezone": "", + "title": "Bandwidth Dashboard", + "uid": "000000003", + "version": 1 +} diff --git a/test/folder_integration_test.go b/test/folder_integration_test.go index 59a37410..707f7c5b 100644 --- a/test/folder_integration_test.go +++ b/test/folder_integration_test.go @@ -3,8 +3,8 @@ package test import ( "testing" - log "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" + "log/slog" ) func TestFolderCRUD(t *testing.T) { @@ -13,9 +13,9 @@ func TestFolderCRUD(t *testing.T) { } apiClient, _, cleanup := initTest(t, nil) defer cleanup() - log.Info("Exporting all folders") + slog.Info("Exporting all folders") apiClient.UploadFolders(nil) - log.Info("Listing all Folders") + slog.Info("Listing all Folders") folders := apiClient.ListFolder(nil) assert.Equal(t, len(folders), 2) var firstDsItem = folders[0] @@ -23,13 +23,13 @@ func TestFolderCRUD(t *testing.T) { var secondDsItem = folders[1] assert.Equal(t, secondDsItem.Title, "Other") //Import Folders - log.Info("Importing folders") + slog.Info("Importing folders") list := apiClient.DownloadFolders(nil) assert.Equal(t, len(list), len(folders)) - log.Info("Deleting Folders") + slog.Info("Deleting Folders") deleteList := apiClient.DeleteAllFolders(nil) assert.Equal(t, len(deleteList), len(folders)) - log.Info("List Folders again") + slog.Info("List Folders again") folders = apiClient.ListFolder(nil) assert.Equal(t, len(folders), 0) } diff --git a/test/libraryelements_integration_test.go b/test/libraryelements_integration_test.go index 24f8107d..73c4a1df 100644 --- a/test/libraryelements_integration_test.go +++ b/test/libraryelements_integration_test.go @@ -2,10 +2,10 @@ package test import ( "github.com/esnet/gdg/internal/service" - "github.com/esnet/grafana-swagger-api-golang/goclient/models" "github.com/gosimple/slug" - log "github.com/sirupsen/logrus" + "github.com/grafana/grafana-openapi-client-go/models" "github.com/stretchr/testify/assert" + "log/slog" "testing" ) @@ -18,15 +18,15 @@ func TestLibraryElementsCRUD(t *testing.T) { defer cleanup() apiClient.DeleteAllDashboards(service.NewDashboardFilter("", "", "")) filtersEntity := service.NewDashboardFilter("", "", "") - log.Info("Exporting all Library Elements") + slog.Info("Exporting all Library Elements") apiClient.UploadLibraryElements(filtersEntity) - log.Info("Listing all library elements") + slog.Info("Listing all library elements") boards := apiClient.ListLibraryElements(filtersEntity) - log.Infof("Imported %d library elements", len(boards)) + slog.Info("Imported library elements", "count", len(boards)) var generalBoard *models.LibraryElementDTO var otherBoard *models.LibraryElementDTO for ndx, board := range boards { - log.Infof(board.Name) + slog.Info(board.Name) if slug.Make(board.Name) == "dashboard-makeover-extra-cleaning-duty-assignment-today" { generalBoard = boards[ndx] } @@ -42,7 +42,7 @@ func TestLibraryElementsCRUD(t *testing.T) { "Type": "stat", "UID": "VvzpJ5X7z", "Kind": int64(1)}) //Import Library Elements - log.Info("Importing Library Elements") + slog.Info("Importing Library Elements") list := apiClient.DownloadLibraryElements(filtersEntity) assert.Equal(t, len(list), len(boards)) //Export all Dashboards @@ -60,10 +60,10 @@ func TestLibraryElementsCRUD(t *testing.T) { //Delete All Dashboards apiClient.DeleteAllDashboards(service.NewDashboardFilter("", "", "")) - log.Info("Deleting Library Elements") + slog.Info("Deleting Library Elements") deleteList := apiClient.DeleteAllLibraryElements(filtersEntity) assert.Equal(t, len(deleteList), len(boards)) - log.Info("List Dashboards again") + slog.Info("List Dashboards again") boards = apiClient.ListLibraryElements(filtersEntity) assert.Equal(t, len(boards), 0) diff --git a/test/organizations_integration_test.go b/test/organizations_integration_test.go index cc4597f3..bc3c3066 100644 --- a/test/organizations_integration_test.go +++ b/test/organizations_integration_test.go @@ -2,7 +2,7 @@ package test import ( "github.com/esnet/gdg/internal/service" - "github.com/esnet/grafana-swagger-api-golang/goclient/models" + "github.com/grafana/grafana-openapi-client-go/models" "golang.org/x/exp/slices" "os" "sort" @@ -16,7 +16,7 @@ func TestOrgsCrud(t *testing.T) { t.Skip("skipping integration test") } if os.Getenv("TEST_TOKEN_CONFIG") == "1" { - t.Skip("Skipping Token configuration, Organization CRUD requires Basic Auth") + t.Skip("Skipping Token configuration, Organization CRUD requires Basic SecureData") } apiClient, _, cleanup := initTest(t, nil) defer cleanup() @@ -37,7 +37,7 @@ func TestOrgUserMembership(t *testing.T) { t.Skip("skipping integration test") } if os.Getenv("TEST_TOKEN_CONFIG") == "1" { - t.Skip("Skipping Token configuration, Organization CRUD requires Basic Auth") + t.Skip("Skipping Token configuration, Organization CRUD requires Basic SecureData") } apiClient, _, cleanup := initTest(t, nil) defer cleanup() diff --git a/test/team_integration_test.go b/test/team_integration_test.go index c12504cc..4d7ddb0f 100644 --- a/test/team_integration_test.go +++ b/test/team_integration_test.go @@ -2,12 +2,12 @@ package test import ( "github.com/esnet/gdg/internal/service" - "github.com/esnet/grafana-swagger-api-golang/goclient/models" + "github.com/grafana/grafana-openapi-client-go/models" "golang.org/x/exp/maps" + "log/slog" "os" "testing" - log "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" ) @@ -16,18 +16,18 @@ func TestTeamCRUD(t *testing.T) { t.Skip("skipping integration test") } if os.Getenv("TEST_TOKEN_CONFIG") == "1" { - t.Skip("Skipping Token configuration, Team and User CRUD requires Basic Auth") + t.Skip("Skipping Token configuration, Team and User CRUD requires Basic SecureData") } filter := service.NewTeamFilter("") apiClient, _, cleanup := initTest(t, nil) defer cleanup() - log.Info("Exporting current user list") + slog.Info("Exporting current user list") apiClient.UploadUsers(service.NewUserFilter("")) users := apiClient.ListUsers(service.NewUserFilter("")) assert.Equal(t, len(users), 2) - log.Info("Exporting all teams") + slog.Info("Exporting all teams") apiClient.UploadTeams(filter) - log.Info("Listing all Teams") + slog.Info("Listing all Teams") teamsMap := apiClient.ListTeams(filter) teams := maps.Keys(teamsMap) assert.Equal(t, len(teams), 2) @@ -46,7 +46,7 @@ func TestTeamCRUD(t *testing.T) { assert.Equal(t, engineers[1].Login, "tux") assert.Equal(t, musicianTeam.Name, "musicians") //Import Teams - log.Info("Importing teams") + slog.Info("Importing teams") list := apiClient.DownloadTeams(filter) assert.Equal(t, len(list), len(teams)) //CleanUp diff --git a/test/users_integration_test.go b/test/users_integration_test.go index 2d4dbde8..021d0b6c 100644 --- a/test/users_integration_test.go +++ b/test/users_integration_test.go @@ -13,7 +13,7 @@ func TestUsers(t *testing.T) { t.Skip("skipping integration test") } if os.Getenv("TEST_TOKEN_CONFIG") == "1" { - t.Skip("Skipping Token configuration, Team and User CRUD requires Basic Auth") + t.Skip("Skipping Token configuration, Team and User CRUD requires Basic SecureData") } apiClient, _, cleanup := initTest(t, nil) defer cleanup()