diff --git a/backend/ci_backends/ci_backends.go b/backend/ci_backends/ci_backends.go index 766c73b35..6c3afd568 100644 --- a/backend/ci_backends/ci_backends.go +++ b/backend/ci_backends/ci_backends.go @@ -14,6 +14,7 @@ type JenkinsCi struct{} type CiBackendOptions struct { GithubClientProvider utils.GithubClientProvider GithubInstallationId int64 + GithubAppId int64 GitlabProjectId int GitlabmergeRequestEventName string GitlabCIPipelineID string diff --git a/backend/ci_backends/provider.go b/backend/ci_backends/provider.go index 13ca5805e..4f8b699bd 100644 --- a/backend/ci_backends/provider.go +++ b/backend/ci_backends/provider.go @@ -13,7 +13,7 @@ type CiBackendProvider interface { type DefaultBackendProvider struct{} func (d DefaultBackendProvider) GetCiBackend(options CiBackendOptions) (CiBackend, error) { - client, _, err := utils.GetGithubClient(options.GithubClientProvider, options.GithubInstallationId, options.RepoFullName) + client, _, err := utils.GetGithubClientFromAppId(options.GithubClientProvider, options.GithubInstallationId, options.GithubAppId, options.RepoFullName) if err != nil { log.Printf("GetCiBackend: could not get github client: %v", err) return nil, fmt.Errorf("could not get github client: %v", err) diff --git a/backend/controllers/github.go b/backend/controllers/github.go index 2c4557270..9da47a340 100644 --- a/backend/controllers/github.go +++ b/backend/controllers/github.go @@ -396,6 +396,7 @@ func handlePushEvent(gh utils.GithubClientProvider, payload *github.PushEvent) e func handlePullRequestEvent(gh utils.GithubClientProvider, payload *github.PullRequestEvent, ciBackendProvider ci_backends.CiBackendProvider) error { installationId := *payload.Installation.ID + appId := *payload.Installation.AppID repoName := *payload.Repo.Name repoOwner := *payload.Repo.Owner.Login repoFullName := *payload.Repo.FullName @@ -591,6 +592,7 @@ func handlePullRequestEvent(gh utils.GithubClientProvider, payload *github.PullR ci_backends.CiBackendOptions{ GithubClientProvider: gh, GithubInstallationId: installationId, + GithubAppId: appId, RepoName: repoName, RepoOwner: repoOwner, RepoFullName: repoFullName, @@ -703,6 +705,7 @@ func getBatchType(jobs []orchestrator_scheduler.Job) orchestrator_scheduler.Digg func handleIssueCommentEvent(gh utils.GithubClientProvider, payload *github.IssueCommentEvent, ciBackendProvider ci_backends.CiBackendProvider) error { installationId := *payload.Installation.ID + appId := *payload.Installation.AppID repoName := *payload.Repo.Name repoOwner := *payload.Repo.Owner.Login repoFullName := *payload.Repo.FullName @@ -904,6 +907,7 @@ func handleIssueCommentEvent(gh utils.GithubClientProvider, payload *github.Issu ci_backends.CiBackendOptions{ GithubClientProvider: gh, GithubInstallationId: installationId, + GithubAppId: appId, RepoName: repoName, RepoOwner: repoOwner, RepoFullName: repoFullName, diff --git a/backend/utils/github.go b/backend/utils/github.go index fa833df37..be5e4c1e4 100644 --- a/backend/utils/github.go +++ b/backend/utils/github.go @@ -144,6 +144,12 @@ func GetGithubClient(gh GithubClientProvider, installationId int64, repoFullName ghClient, token, err := gh.Get(installation.GithubAppId, installation.GithubInstallationId) return ghClient, token, err } + +func GetGithubClientFromAppId(gh GithubClientProvider, installationId int64, githubAppId int64, repoFullName string) (*github.Client, *string, error) { + ghClient, token, err := gh.Get(githubAppId, installationId) + return ghClient, token, err +} + func GetGithubService(gh GithubClientProvider, installationId int64, repoFullName string, repoOwner string, repoName string) (*github2.GithubService, *string, error) { ghClient, token, err := GetGithubClient(gh, installationId, repoFullName) if err != nil { diff --git a/ee/drift/controllers/controllers.go b/ee/drift/controllers/controllers.go index a48e87e24..2b820dccb 100644 --- a/ee/drift/controllers/controllers.go +++ b/ee/drift/controllers/controllers.go @@ -1,7 +1,11 @@ package controllers -import "github.com/diggerhq/digger/next/utils" +import ( + "github.com/diggerhq/digger/backend/ci_backends" + "github.com/diggerhq/digger/backend/utils" +) type MainController struct { GithubClientProvider utils.GithubClientProvider + CiBackendProvider ci_backends.CiBackendProvider } diff --git a/ee/drift/controllers/drift.go b/ee/drift/controllers/drift.go new file mode 100644 index 000000000..ab5b5f9d2 --- /dev/null +++ b/ee/drift/controllers/drift.go @@ -0,0 +1,180 @@ +package controllers + +import ( + "fmt" + "github.com/diggerhq/digger/backend/ci_backends" + "github.com/diggerhq/digger/ee/drift/dbmodels" + services2 "github.com/diggerhq/digger/ee/drift/services" + "github.com/diggerhq/digger/ee/drift/utils" + "github.com/diggerhq/digger/libs/ci/generic" + dg_configuration "github.com/diggerhq/digger/libs/digger_config" + "github.com/diggerhq/digger/libs/scheduler" + "github.com/diggerhq/digger/libs/spec" + "github.com/gin-gonic/gin" + "github.com/google/uuid" + "log" + "net/http" + "os" + "strconv" +) + +type TriggerDriftRunRequest struct { + ProjectId string `json:"project_id"` +} + +func (mc MainController) TriggerDriftRunForProject(c *gin.Context) { + var request TriggerDriftRunRequest + err := c.BindJSON(&request) + if err != nil { + log.Printf("Error binding JSON: %v", err) + c.JSON(http.StatusInternalServerError, gin.H{"error": "Error binding JSON"}) + return + } + projectId := request.ProjectId + + p := dbmodels.DB.Query.Project + project, err := dbmodels.DB.Query.Project.Where(p.ID.Eq(projectId)).First() + if err != nil { + log.Printf("could not find project %v: %v", projectId, err) + c.JSON(http.StatusBadRequest, gin.H{"error": "could not find project"}) + return + } + + r := dbmodels.DB.Query.Repo + repo, err := dbmodels.DB.Query.Repo.Where(r.ID.Eq(project.RepoID)).First() + if err != nil { + log.Printf("could not find repo: %v for project %v: %v", project.RepoID, project.ID, err) + c.JSON(http.StatusBadRequest, gin.H{"error": "could not find repo"}) + return + } + + orgId := repo.OrganisationID + issueNumber := 0 + repoFullName := repo.RepoFullName + repoOwner := repo.RepoOrganisation + repoName := repo.RepoName + githubAppId := repo.GithubAppID + installationid := repo.GithubInstallationID + installationid64, err := strconv.ParseInt(installationid, 10, 64) + cloneUrl := repo.CloneURL + branch := repo.DefaultBranch + command := "digger plan" + workflowFile := "digger_workflow.yml" + + if err != nil { + log.Printf("could not convert installationID to int64 %v", installationid) + c.JSON(http.StatusInternalServerError, gin.H{"error": "could not prarse installation id"}) + return + } + + _, _, config, _, err := utils.GetDiggerConfigForBranch(mc.GithubClientProvider, installationid64, repoFullName, repoOwner, repoName, cloneUrl, branch) + if err != nil { + log.Printf("Error loading digger config: %v", err) + c.JSON(http.StatusInternalServerError, gin.H{"error": "error loading digger config"}) + return + } + + theProject := config.GetProject(project.Name) + if theProject == nil { + log.Printf("Could find project %v in digger yml", project.Name) + c.JSON(http.StatusInternalServerError, gin.H{"error": fmt.Sprintf("could not find project %v in digger.yml", theProject)}) + return + } + projects := []dg_configuration.Project{*theProject} + + jobsForImpactedProjects, err := generic.CreateJobsForProjects(projects, command, "drift", repoFullName, "digger", config.Workflows, &issueNumber, nil, branch, branch) + if err != nil { + log.Printf("error converting digger project %v to job", project.Name, err) + c.JSON(http.StatusInternalServerError, gin.H{"error": fmt.Sprintf("could not find project %v in digger.yml", theProject)}) + return + } + + jobToken, err := dbmodels.DB.CreateDiggerJobToken(orgId) + if err != nil { + log.Printf("Error creating job token: %v %v", project.Name, err) + c.JSON(http.StatusInternalServerError, gin.H{"error": fmt.Sprintf("error creating job token")}) + return + } + + backendHostName := os.Getenv("DIGGER_HOSTNAME") + jobSpec := scheduler.JobToJson(jobsForImpactedProjects[0], "plan", "digger", branch, "", jobToken.Value, backendHostName, *theProject) + + spec := spec.Spec{ + JobId: uuid.NewString(), + CommentId: "", + Job: jobSpec, + Reporter: spec.ReporterSpec{ + ReportingStrategy: "noop", + }, + Lock: spec.LockSpec{ + LockType: "noop", + }, + Backend: spec.BackendSpec{ + BackendHostname: jobSpec.BackendHostname, + BackendOrganisationName: jobSpec.BackendOrganisationName, + BackendJobToken: jobSpec.BackendJobToken, + BackendType: "backend", + }, + VCS: spec.VcsSpec{ + VcsType: "noop", + Actor: "digger", + RepoFullname: repoFullName, + RepoOwner: repoOwner, + RepoName: repoName, + WorkflowFile: workflowFile, + }, + Variables: make([]spec.VariableSpec, 0), + Policy: spec.PolicySpec{ + PolicyType: "http", + }, + CommentUpdater: spec.CommentUpdaterSpec{ + CommentUpdaterType: dg_configuration.CommentRenderModeBasic, + }, + } + + runName, err := services2.GetRunNameFromJob(spec) + if err != nil { + log.Printf("Error creating ru name: %v %v", project.Name, err) + c.JSON(http.StatusInternalServerError, gin.H{"error": fmt.Sprintf("error creating run name")}) + return + } + + vcsToken, err := services2.GetVCSToken("github", repoFullName, repoOwner, repoName, installationid64, mc.GithubClientProvider) + if err != nil { + log.Printf("Error creating vcs token: %v %v", project.Name, err) + c.JSON(http.StatusInternalServerError, gin.H{"error": fmt.Sprintf("error creating vcs token")}) + return + } + + ciBackend, err := mc.CiBackendProvider.GetCiBackend( + ci_backends.CiBackendOptions{ + GithubClientProvider: mc.GithubClientProvider, + GithubInstallationId: installationid64, + GithubAppId: githubAppId, + RepoName: repoName, + RepoOwner: repoOwner, + RepoFullName: repoFullName, + }, + ) + if err != nil { + log.Printf("Error creating CI backend: %v %v", project.Name, err) + c.JSON(http.StatusInternalServerError, gin.H{"error": fmt.Sprintf("error creating CI backend")}) + return + + } + + err = ciBackend.TriggerWorkflow(spec, *runName, *vcsToken) + if err != nil { + log.Printf("TriggerWorkflow err: %v\n", err) + c.JSON(http.StatusInternalServerError, gin.H{"error": fmt.Sprintf("Trigger workflow error")}) + return + } + + //job.Status = orchestrator_scheduler.DiggerJobTriggered + //err = models.DB.UpdateDiggerJob(job) + //if err != nil { + // log.Printf("failed to Update digger job state: %v\n", err) + // return err + //} + +} diff --git a/ee/drift/controllers/github.go b/ee/drift/controllers/github.go index c87d745f8..070bc098f 100644 --- a/ee/drift/controllers/github.go +++ b/ee/drift/controllers/github.go @@ -153,7 +153,7 @@ func (mc MainController) GithubAppCallbackPage(c *gin.Context) { repoName := *repo.Name repoUrl := fmt.Sprintf("https://github.com/%v", repoFullName) - _, _, err = dbmodels.CreateOrGetDiggerRepoForGithubRepo(repoFullName, repoOwner, repoName, repoUrl, installationId, *installation.AppID, *installation.Account.ID, *installation.Account.Login) + _, _, err = dbmodels.CreateOrGetDiggerRepoForGithubRepo(repoFullName, repoOwner, repoName, repoUrl, installationId, *installation.AppID, *installation.Account.ID, *installation.Account.Login, defaultBranch, cloneUrl) if err != nil { log.Printf("createOrGetDiggerRepoForGithubRepo error: %v", err) c.String(http.StatusInternalServerError, "createOrGetDiggerRepoForGithubRepo error: %v", err) diff --git a/ee/drift/dbmodels/github.go b/ee/drift/dbmodels/github.go index dbcc55b56..11a82c66b 100644 --- a/ee/drift/dbmodels/github.go +++ b/ee/drift/dbmodels/github.go @@ -28,7 +28,7 @@ func (db *Database) GetGithubInstallationLinkForInstallationId(installationId st return &l, nil } -func CreateOrGetDiggerRepoForGithubRepo(ghRepoFullName string, ghRepoOrganisation string, ghRepoName string, ghRepoUrl string, installationId string, githubAppId int64, accountId int64, login string) (*model.Repo, *model.Organisation, error) { +func CreateOrGetDiggerRepoForGithubRepo(ghRepoFullName string, ghRepoOrganisation string, ghRepoName string, ghRepoUrl string, installationId string, githubAppId int64, accountId int64, login string, defaultBranch string, cloneUrl string) (*model.Repo, *model.Organisation, error) { link, err := DB.GetGithubInstallationLinkForInstallationId(installationId) if err != nil { log.Printf("Error fetching installation link: %v", err) @@ -63,7 +63,7 @@ func CreateOrGetDiggerRepoForGithubRepo(ghRepoFullName string, ghRepoOrganisatio return &existingRepo, org, nil } - repo, err := DB.CreateRepo(diggerRepoName, ghRepoFullName, ghRepoOrganisation, ghRepoName, ghRepoUrl, org, "", installationId, githubAppId, accountId, login) + repo, err := DB.CreateRepo(diggerRepoName, ghRepoFullName, ghRepoOrganisation, ghRepoName, ghRepoUrl, org, "", installationId, githubAppId, accountId, login, defaultBranch, cloneUrl) if err != nil { log.Printf("Error creating digger repo: %v", err) return nil, nil, err diff --git a/ee/drift/dbmodels/storage.go b/ee/drift/dbmodels/storage.go index c80f3168f..ba01fe926 100644 --- a/ee/drift/dbmodels/storage.go +++ b/ee/drift/dbmodels/storage.go @@ -58,7 +58,7 @@ func (db *Database) CreateGithubInstallationLink(orgId string, installationId st return &link, nil } -func (db *Database) CreateRepo(name string, repoFullName string, repoOrganisation string, repoName string, repoUrl string, org *model.Organisation, diggerConfig string, githubInstallationId string, githubAppId int64, accountId int64, login string) (*model.Repo, error) { +func (db *Database) CreateRepo(name string, repoFullName string, repoOrganisation string, repoName string, repoUrl string, org *model.Organisation, diggerConfig string, githubInstallationId string, githubAppId int64, accountId int64, login string, defaultBranch string, cloneUrl string) (*model.Repo, error) { var repo model.Repo // check if repo exist already, do nothing in this case result := db.GormDB.Where("name = ? AND organisation_id=?", name, org.ID).Find(&repo) @@ -83,6 +83,8 @@ func (db *Database) CreateRepo(name string, repoFullName string, repoOrganisatio GithubAppID: githubAppId, AccountID: accountId, Login: login, + DefaultBranch: defaultBranch, + CloneURL: cloneUrl, } result = db.GormDB.Save(&repo) if result.Error != nil { diff --git a/ee/drift/dbmodels/tokens.go b/ee/drift/dbmodels/tokens.go new file mode 100644 index 000000000..15b68b24e --- /dev/null +++ b/ee/drift/dbmodels/tokens.go @@ -0,0 +1,34 @@ +package dbmodels + +import ( + "github.com/diggerhq/digger/ee/drift/model" + "github.com/google/uuid" + "log" + "time" +) + +const ( + AccessPolicyType = "access" + AdminPolicyType = "admin" + CliJobAccessType = "cli_access" +) + +func (db *Database) CreateDiggerJobToken(organisationId string) (*model.DiggerCiJobToken, error) { + + // create a digger job token + // prefixing token to make easier to retire this type of tokens later + token := "cli:" + uuid.New().String() + jobToken := &model.DiggerCiJobToken{ + ID: uuid.NewString(), + Value: token, + OrganisationID: organisationId, + Type: CliJobAccessType, + Expiry: time.Now().Add(time.Hour * 2), // some jobs can take >30 mins (k8s cluster) + } + err := db.GormDB.Create(jobToken).Error + if err != nil { + log.Printf("failed to create token: %v", err) + return nil, err + } + return jobToken, nil +} diff --git a/ee/drift/main.go b/ee/drift/main.go index 445a3de78..3bf77949a 100644 --- a/ee/drift/main.go +++ b/ee/drift/main.go @@ -2,6 +2,7 @@ package main import ( "fmt" + "github.com/diggerhq/digger/backend/ci_backends" "github.com/diggerhq/digger/ee/drift/controllers" "github.com/diggerhq/digger/ee/drift/dbmodels" "github.com/diggerhq/digger/ee/drift/middleware" @@ -57,6 +58,7 @@ func main() { controller := controllers.MainController{ GithubClientProvider: next_utils.DiggerGithubRealClientProvider{}, + CiBackendProvider: ci_backends.DefaultBackendProvider{}, } r.GET("/ping", controller.Ping) @@ -72,6 +74,8 @@ func main() { r.POST("github-app-webhook", controller.GithubAppWebHook) r.GET("/github/callback_fe", middleware.WebhookAuth(), controller.GithubAppCallbackPage) + r.POST("/_internal/trigger_drift_for_project", middleware.WebhookAuth(), controller.TriggerDriftRunForProject) + port := os.Getenv("DIGGER_PORT") if port == "" { port = "3000" diff --git a/ee/drift/model/digger_ci_job_tokens.gen.go b/ee/drift/model/digger_ci_job_tokens.gen.go new file mode 100644 index 000000000..3eb6723fe --- /dev/null +++ b/ee/drift/model/digger_ci_job_tokens.gen.go @@ -0,0 +1,30 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package model + +import ( + "time" + + "gorm.io/gorm" +) + +const TableNameDiggerCiJobToken = "digger_ci_job_tokens" + +// DiggerCiJobToken mapped from table +type DiggerCiJobToken struct { + ID string `gorm:"column:id;primaryKey" json:"id"` + CreatedAt time.Time `gorm:"column:created_at" json:"created_at"` + UpdatedAt time.Time `gorm:"column:updated_at" json:"updated_at"` + DeletedAt gorm.DeletedAt `gorm:"column:deleted_at" json:"deleted_at"` + Value string `gorm:"column:value" json:"value"` + Expiry time.Time `gorm:"column:expiry" json:"expiry"` + OrganisationID string `gorm:"column:organisation_id" json:"organisation_id"` + Type string `gorm:"column:type" json:"type"` +} + +// TableName DiggerCiJobToken's table name +func (*DiggerCiJobToken) TableName() string { + return TableNameDiggerCiJobToken +} diff --git a/ee/drift/model/digger_ci_jobs.gen.go b/ee/drift/model/digger_ci_jobs.gen.go new file mode 100644 index 000000000..85e7adc1a --- /dev/null +++ b/ee/drift/model/digger_ci_jobs.gen.go @@ -0,0 +1,44 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package model + +import ( + "time" + + "gorm.io/gorm" +) + +const TableNameDiggerCiJob = "digger_ci_jobs" + +// DiggerCiJob mapped from table +type DiggerCiJob struct { + ID string `gorm:"column:id;primaryKey" json:"id"` + Spectype string `gorm:"column:spectype" json:"spectype"` + Commentid string `gorm:"column:commentid" json:"commentid"` + Runname string `gorm:"column:runname" json:"runname"` + Jobspec []uint8 `gorm:"column:jobspec" json:"jobspec"` + Reporterspec []uint8 `gorm:"column:reporterspec" json:"reporterspec"` + Commentupdaterspec []uint8 `gorm:"column:commentupdaterspec" json:"commentupdaterspec"` + Lockspec []uint8 `gorm:"column:lockspec" json:"lockspec"` + Backendspec []uint8 `gorm:"column:backendspec" json:"backendspec"` + Vcsspec []uint8 `gorm:"column:vcsspec" json:"vcsspec"` + Policyspec []uint8 `gorm:"column:policyspec" json:"policyspec"` + Variablesspec []uint8 `gorm:"column:variablesspec" json:"variablesspec"` + CreatedAt time.Time `gorm:"column:created_at" json:"created_at"` + UpdatedAt time.Time `gorm:"column:updated_at" json:"updated_at"` + DeletedAt gorm.DeletedAt `gorm:"column:deleted_at" json:"deleted_at"` + WorkflowFile string `gorm:"column:workflow_file" json:"workflow_file"` + WorkflowURL string `gorm:"column:workflow_url" json:"workflow_url"` + Status string `gorm:"column:status" json:"status"` + ResourcesCreated int32 `gorm:"column:resources_created" json:"resources_created"` + ResourcesUpdated int32 `gorm:"column:resources_updated" json:"resources_updated"` + ResourcesDeleted int32 `gorm:"column:resources_deleted" json:"resources_deleted"` + ProjectID string `gorm:"column:project_id" json:"project_id"` +} + +// TableName DiggerCiJob's table name +func (*DiggerCiJob) TableName() string { + return TableNameDiggerCiJob +} diff --git a/ee/drift/model/org_settings.gen.go b/ee/drift/model/org_settings.gen.go new file mode 100644 index 000000000..b539ccbb9 --- /dev/null +++ b/ee/drift/model/org_settings.gen.go @@ -0,0 +1,26 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package model + +import ( + "time" +) + +const TableNameOrgSetting = "org_settings" + +// OrgSetting mapped from table +type OrgSetting struct { + ID int64 `gorm:"column:id;primaryKey;autoIncrement:true" json:"id"` + CreatedAt time.Time `gorm:"column:created_at;not null;default:now()" json:"created_at"` + ScheduleType string `gorm:"column:schedule_type" json:"schedule_type"` + Schedule string `gorm:"column:schedule" json:"schedule"` + SlackNotificationURL string `gorm:"column:slack_notification_url" json:"slack_notification_url"` + OrgID string `gorm:"column:org_id;not null" json:"org_id"` +} + +// TableName OrgSetting's table name +func (*OrgSetting) TableName() string { + return TableNameOrgSetting +} diff --git a/ee/drift/model/repos.gen.go b/ee/drift/model/repos.gen.go index 355149923..f94c328b2 100644 --- a/ee/drift/model/repos.gen.go +++ b/ee/drift/model/repos.gen.go @@ -29,6 +29,8 @@ type Repo struct { GithubAppID int64 `gorm:"column:github_app_id" json:"github_app_id"` AccountID int64 `gorm:"column:account_id" json:"account_id"` Login string `gorm:"column:login" json:"login"` + CloneURL string `gorm:"column:clone_url" json:"clone_url"` + DefaultBranch string `gorm:"column:default_branch" json:"default_branch"` } // TableName Repo's table name diff --git a/ee/drift/models_generated/digger_ci_job_tokens.gen.go b/ee/drift/models_generated/digger_ci_job_tokens.gen.go new file mode 100644 index 000000000..71f5fc8a7 --- /dev/null +++ b/ee/drift/models_generated/digger_ci_job_tokens.gen.go @@ -0,0 +1,408 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package models_generated + +import ( + "context" + + "gorm.io/gorm" + "gorm.io/gorm/clause" + "gorm.io/gorm/schema" + + "gorm.io/gen" + "gorm.io/gen/field" + + "gorm.io/plugin/dbresolver" + + "github.com/diggerhq/digger/ee/drift/model" +) + +func newDiggerCiJobToken(db *gorm.DB, opts ...gen.DOOption) diggerCiJobToken { + _diggerCiJobToken := diggerCiJobToken{} + + _diggerCiJobToken.diggerCiJobTokenDo.UseDB(db, opts...) + _diggerCiJobToken.diggerCiJobTokenDo.UseModel(&model.DiggerCiJobToken{}) + + tableName := _diggerCiJobToken.diggerCiJobTokenDo.TableName() + _diggerCiJobToken.ALL = field.NewAsterisk(tableName) + _diggerCiJobToken.ID = field.NewString(tableName, "id") + _diggerCiJobToken.CreatedAt = field.NewTime(tableName, "created_at") + _diggerCiJobToken.UpdatedAt = field.NewTime(tableName, "updated_at") + _diggerCiJobToken.DeletedAt = field.NewField(tableName, "deleted_at") + _diggerCiJobToken.Value = field.NewString(tableName, "value") + _diggerCiJobToken.Expiry = field.NewTime(tableName, "expiry") + _diggerCiJobToken.OrganisationID = field.NewString(tableName, "organisation_id") + _diggerCiJobToken.Type = field.NewString(tableName, "type") + + _diggerCiJobToken.fillFieldMap() + + return _diggerCiJobToken +} + +type diggerCiJobToken struct { + diggerCiJobTokenDo + + ALL field.Asterisk + ID field.String + CreatedAt field.Time + UpdatedAt field.Time + DeletedAt field.Field + Value field.String + Expiry field.Time + OrganisationID field.String + Type field.String + + fieldMap map[string]field.Expr +} + +func (d diggerCiJobToken) Table(newTableName string) *diggerCiJobToken { + d.diggerCiJobTokenDo.UseTable(newTableName) + return d.updateTableName(newTableName) +} + +func (d diggerCiJobToken) As(alias string) *diggerCiJobToken { + d.diggerCiJobTokenDo.DO = *(d.diggerCiJobTokenDo.As(alias).(*gen.DO)) + return d.updateTableName(alias) +} + +func (d *diggerCiJobToken) updateTableName(table string) *diggerCiJobToken { + d.ALL = field.NewAsterisk(table) + d.ID = field.NewString(table, "id") + d.CreatedAt = field.NewTime(table, "created_at") + d.UpdatedAt = field.NewTime(table, "updated_at") + d.DeletedAt = field.NewField(table, "deleted_at") + d.Value = field.NewString(table, "value") + d.Expiry = field.NewTime(table, "expiry") + d.OrganisationID = field.NewString(table, "organisation_id") + d.Type = field.NewString(table, "type") + + d.fillFieldMap() + + return d +} + +func (d *diggerCiJobToken) GetFieldByName(fieldName string) (field.OrderExpr, bool) { + _f, ok := d.fieldMap[fieldName] + if !ok || _f == nil { + return nil, false + } + _oe, ok := _f.(field.OrderExpr) + return _oe, ok +} + +func (d *diggerCiJobToken) fillFieldMap() { + d.fieldMap = make(map[string]field.Expr, 8) + d.fieldMap["id"] = d.ID + d.fieldMap["created_at"] = d.CreatedAt + d.fieldMap["updated_at"] = d.UpdatedAt + d.fieldMap["deleted_at"] = d.DeletedAt + d.fieldMap["value"] = d.Value + d.fieldMap["expiry"] = d.Expiry + d.fieldMap["organisation_id"] = d.OrganisationID + d.fieldMap["type"] = d.Type +} + +func (d diggerCiJobToken) clone(db *gorm.DB) diggerCiJobToken { + d.diggerCiJobTokenDo.ReplaceConnPool(db.Statement.ConnPool) + return d +} + +func (d diggerCiJobToken) replaceDB(db *gorm.DB) diggerCiJobToken { + d.diggerCiJobTokenDo.ReplaceDB(db) + return d +} + +type diggerCiJobTokenDo struct{ gen.DO } + +type IDiggerCiJobTokenDo interface { + gen.SubQuery + Debug() IDiggerCiJobTokenDo + WithContext(ctx context.Context) IDiggerCiJobTokenDo + WithResult(fc func(tx gen.Dao)) gen.ResultInfo + ReplaceDB(db *gorm.DB) + ReadDB() IDiggerCiJobTokenDo + WriteDB() IDiggerCiJobTokenDo + As(alias string) gen.Dao + Session(config *gorm.Session) IDiggerCiJobTokenDo + Columns(cols ...field.Expr) gen.Columns + Clauses(conds ...clause.Expression) IDiggerCiJobTokenDo + Not(conds ...gen.Condition) IDiggerCiJobTokenDo + Or(conds ...gen.Condition) IDiggerCiJobTokenDo + Select(conds ...field.Expr) IDiggerCiJobTokenDo + Where(conds ...gen.Condition) IDiggerCiJobTokenDo + Order(conds ...field.Expr) IDiggerCiJobTokenDo + Distinct(cols ...field.Expr) IDiggerCiJobTokenDo + Omit(cols ...field.Expr) IDiggerCiJobTokenDo + Join(table schema.Tabler, on ...field.Expr) IDiggerCiJobTokenDo + LeftJoin(table schema.Tabler, on ...field.Expr) IDiggerCiJobTokenDo + RightJoin(table schema.Tabler, on ...field.Expr) IDiggerCiJobTokenDo + Group(cols ...field.Expr) IDiggerCiJobTokenDo + Having(conds ...gen.Condition) IDiggerCiJobTokenDo + Limit(limit int) IDiggerCiJobTokenDo + Offset(offset int) IDiggerCiJobTokenDo + Count() (count int64, err error) + Scopes(funcs ...func(gen.Dao) gen.Dao) IDiggerCiJobTokenDo + Unscoped() IDiggerCiJobTokenDo + Create(values ...*model.DiggerCiJobToken) error + CreateInBatches(values []*model.DiggerCiJobToken, batchSize int) error + Save(values ...*model.DiggerCiJobToken) error + First() (*model.DiggerCiJobToken, error) + Take() (*model.DiggerCiJobToken, error) + Last() (*model.DiggerCiJobToken, error) + Find() ([]*model.DiggerCiJobToken, error) + FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.DiggerCiJobToken, err error) + FindInBatches(result *[]*model.DiggerCiJobToken, batchSize int, fc func(tx gen.Dao, batch int) error) error + Pluck(column field.Expr, dest interface{}) error + Delete(...*model.DiggerCiJobToken) (info gen.ResultInfo, err error) + Update(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + Updates(value interface{}) (info gen.ResultInfo, err error) + UpdateColumn(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateColumnSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + UpdateColumns(value interface{}) (info gen.ResultInfo, err error) + UpdateFrom(q gen.SubQuery) gen.Dao + Attrs(attrs ...field.AssignExpr) IDiggerCiJobTokenDo + Assign(attrs ...field.AssignExpr) IDiggerCiJobTokenDo + Joins(fields ...field.RelationField) IDiggerCiJobTokenDo + Preload(fields ...field.RelationField) IDiggerCiJobTokenDo + FirstOrInit() (*model.DiggerCiJobToken, error) + FirstOrCreate() (*model.DiggerCiJobToken, error) + FindByPage(offset int, limit int) (result []*model.DiggerCiJobToken, count int64, err error) + ScanByPage(result interface{}, offset int, limit int) (count int64, err error) + Scan(result interface{}) (err error) + Returning(value interface{}, columns ...string) IDiggerCiJobTokenDo + UnderlyingDB() *gorm.DB + schema.Tabler +} + +func (d diggerCiJobTokenDo) Debug() IDiggerCiJobTokenDo { + return d.withDO(d.DO.Debug()) +} + +func (d diggerCiJobTokenDo) WithContext(ctx context.Context) IDiggerCiJobTokenDo { + return d.withDO(d.DO.WithContext(ctx)) +} + +func (d diggerCiJobTokenDo) ReadDB() IDiggerCiJobTokenDo { + return d.Clauses(dbresolver.Read) +} + +func (d diggerCiJobTokenDo) WriteDB() IDiggerCiJobTokenDo { + return d.Clauses(dbresolver.Write) +} + +func (d diggerCiJobTokenDo) Session(config *gorm.Session) IDiggerCiJobTokenDo { + return d.withDO(d.DO.Session(config)) +} + +func (d diggerCiJobTokenDo) Clauses(conds ...clause.Expression) IDiggerCiJobTokenDo { + return d.withDO(d.DO.Clauses(conds...)) +} + +func (d diggerCiJobTokenDo) Returning(value interface{}, columns ...string) IDiggerCiJobTokenDo { + return d.withDO(d.DO.Returning(value, columns...)) +} + +func (d diggerCiJobTokenDo) Not(conds ...gen.Condition) IDiggerCiJobTokenDo { + return d.withDO(d.DO.Not(conds...)) +} + +func (d diggerCiJobTokenDo) Or(conds ...gen.Condition) IDiggerCiJobTokenDo { + return d.withDO(d.DO.Or(conds...)) +} + +func (d diggerCiJobTokenDo) Select(conds ...field.Expr) IDiggerCiJobTokenDo { + return d.withDO(d.DO.Select(conds...)) +} + +func (d diggerCiJobTokenDo) Where(conds ...gen.Condition) IDiggerCiJobTokenDo { + return d.withDO(d.DO.Where(conds...)) +} + +func (d diggerCiJobTokenDo) Order(conds ...field.Expr) IDiggerCiJobTokenDo { + return d.withDO(d.DO.Order(conds...)) +} + +func (d diggerCiJobTokenDo) Distinct(cols ...field.Expr) IDiggerCiJobTokenDo { + return d.withDO(d.DO.Distinct(cols...)) +} + +func (d diggerCiJobTokenDo) Omit(cols ...field.Expr) IDiggerCiJobTokenDo { + return d.withDO(d.DO.Omit(cols...)) +} + +func (d diggerCiJobTokenDo) Join(table schema.Tabler, on ...field.Expr) IDiggerCiJobTokenDo { + return d.withDO(d.DO.Join(table, on...)) +} + +func (d diggerCiJobTokenDo) LeftJoin(table schema.Tabler, on ...field.Expr) IDiggerCiJobTokenDo { + return d.withDO(d.DO.LeftJoin(table, on...)) +} + +func (d diggerCiJobTokenDo) RightJoin(table schema.Tabler, on ...field.Expr) IDiggerCiJobTokenDo { + return d.withDO(d.DO.RightJoin(table, on...)) +} + +func (d diggerCiJobTokenDo) Group(cols ...field.Expr) IDiggerCiJobTokenDo { + return d.withDO(d.DO.Group(cols...)) +} + +func (d diggerCiJobTokenDo) Having(conds ...gen.Condition) IDiggerCiJobTokenDo { + return d.withDO(d.DO.Having(conds...)) +} + +func (d diggerCiJobTokenDo) Limit(limit int) IDiggerCiJobTokenDo { + return d.withDO(d.DO.Limit(limit)) +} + +func (d diggerCiJobTokenDo) Offset(offset int) IDiggerCiJobTokenDo { + return d.withDO(d.DO.Offset(offset)) +} + +func (d diggerCiJobTokenDo) Scopes(funcs ...func(gen.Dao) gen.Dao) IDiggerCiJobTokenDo { + return d.withDO(d.DO.Scopes(funcs...)) +} + +func (d diggerCiJobTokenDo) Unscoped() IDiggerCiJobTokenDo { + return d.withDO(d.DO.Unscoped()) +} + +func (d diggerCiJobTokenDo) Create(values ...*model.DiggerCiJobToken) error { + if len(values) == 0 { + return nil + } + return d.DO.Create(values) +} + +func (d diggerCiJobTokenDo) CreateInBatches(values []*model.DiggerCiJobToken, batchSize int) error { + return d.DO.CreateInBatches(values, batchSize) +} + +// Save : !!! underlying implementation is different with GORM +// The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values) +func (d diggerCiJobTokenDo) Save(values ...*model.DiggerCiJobToken) error { + if len(values) == 0 { + return nil + } + return d.DO.Save(values) +} + +func (d diggerCiJobTokenDo) First() (*model.DiggerCiJobToken, error) { + if result, err := d.DO.First(); err != nil { + return nil, err + } else { + return result.(*model.DiggerCiJobToken), nil + } +} + +func (d diggerCiJobTokenDo) Take() (*model.DiggerCiJobToken, error) { + if result, err := d.DO.Take(); err != nil { + return nil, err + } else { + return result.(*model.DiggerCiJobToken), nil + } +} + +func (d diggerCiJobTokenDo) Last() (*model.DiggerCiJobToken, error) { + if result, err := d.DO.Last(); err != nil { + return nil, err + } else { + return result.(*model.DiggerCiJobToken), nil + } +} + +func (d diggerCiJobTokenDo) Find() ([]*model.DiggerCiJobToken, error) { + result, err := d.DO.Find() + return result.([]*model.DiggerCiJobToken), err +} + +func (d diggerCiJobTokenDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.DiggerCiJobToken, err error) { + buf := make([]*model.DiggerCiJobToken, 0, batchSize) + err = d.DO.FindInBatches(&buf, batchSize, func(tx gen.Dao, batch int) error { + defer func() { results = append(results, buf...) }() + return fc(tx, batch) + }) + return results, err +} + +func (d diggerCiJobTokenDo) FindInBatches(result *[]*model.DiggerCiJobToken, batchSize int, fc func(tx gen.Dao, batch int) error) error { + return d.DO.FindInBatches(result, batchSize, fc) +} + +func (d diggerCiJobTokenDo) Attrs(attrs ...field.AssignExpr) IDiggerCiJobTokenDo { + return d.withDO(d.DO.Attrs(attrs...)) +} + +func (d diggerCiJobTokenDo) Assign(attrs ...field.AssignExpr) IDiggerCiJobTokenDo { + return d.withDO(d.DO.Assign(attrs...)) +} + +func (d diggerCiJobTokenDo) Joins(fields ...field.RelationField) IDiggerCiJobTokenDo { + for _, _f := range fields { + d = *d.withDO(d.DO.Joins(_f)) + } + return &d +} + +func (d diggerCiJobTokenDo) Preload(fields ...field.RelationField) IDiggerCiJobTokenDo { + for _, _f := range fields { + d = *d.withDO(d.DO.Preload(_f)) + } + return &d +} + +func (d diggerCiJobTokenDo) FirstOrInit() (*model.DiggerCiJobToken, error) { + if result, err := d.DO.FirstOrInit(); err != nil { + return nil, err + } else { + return result.(*model.DiggerCiJobToken), nil + } +} + +func (d diggerCiJobTokenDo) FirstOrCreate() (*model.DiggerCiJobToken, error) { + if result, err := d.DO.FirstOrCreate(); err != nil { + return nil, err + } else { + return result.(*model.DiggerCiJobToken), nil + } +} + +func (d diggerCiJobTokenDo) FindByPage(offset int, limit int) (result []*model.DiggerCiJobToken, count int64, err error) { + result, err = d.Offset(offset).Limit(limit).Find() + if err != nil { + return + } + + if size := len(result); 0 < limit && 0 < size && size < limit { + count = int64(size + offset) + return + } + + count, err = d.Offset(-1).Limit(-1).Count() + return +} + +func (d diggerCiJobTokenDo) ScanByPage(result interface{}, offset int, limit int) (count int64, err error) { + count, err = d.Count() + if err != nil { + return + } + + err = d.Offset(offset).Limit(limit).Scan(result) + return +} + +func (d diggerCiJobTokenDo) Scan(result interface{}) (err error) { + return d.DO.Scan(result) +} + +func (d diggerCiJobTokenDo) Delete(models ...*model.DiggerCiJobToken) (result gen.ResultInfo, err error) { + return d.DO.Delete(models) +} + +func (d *diggerCiJobTokenDo) withDO(do gen.Dao) *diggerCiJobTokenDo { + d.DO = *do.(*gen.DO) + return d +} diff --git a/ee/drift/models_generated/digger_ci_jobs.gen.go b/ee/drift/models_generated/digger_ci_jobs.gen.go new file mode 100644 index 000000000..5d5a809fc --- /dev/null +++ b/ee/drift/models_generated/digger_ci_jobs.gen.go @@ -0,0 +1,464 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package models_generated + +import ( + "context" + + "gorm.io/gorm" + "gorm.io/gorm/clause" + "gorm.io/gorm/schema" + + "gorm.io/gen" + "gorm.io/gen/field" + + "gorm.io/plugin/dbresolver" + + "github.com/diggerhq/digger/ee/drift/model" +) + +func newDiggerCiJob(db *gorm.DB, opts ...gen.DOOption) diggerCiJob { + _diggerCiJob := diggerCiJob{} + + _diggerCiJob.diggerCiJobDo.UseDB(db, opts...) + _diggerCiJob.diggerCiJobDo.UseModel(&model.DiggerCiJob{}) + + tableName := _diggerCiJob.diggerCiJobDo.TableName() + _diggerCiJob.ALL = field.NewAsterisk(tableName) + _diggerCiJob.ID = field.NewString(tableName, "id") + _diggerCiJob.Spectype = field.NewString(tableName, "spectype") + _diggerCiJob.Commentid = field.NewString(tableName, "commentid") + _diggerCiJob.Runname = field.NewString(tableName, "runname") + _diggerCiJob.Jobspec = field.NewField(tableName, "jobspec") + _diggerCiJob.Reporterspec = field.NewField(tableName, "reporterspec") + _diggerCiJob.Commentupdaterspec = field.NewField(tableName, "commentupdaterspec") + _diggerCiJob.Lockspec = field.NewField(tableName, "lockspec") + _diggerCiJob.Backendspec = field.NewField(tableName, "backendspec") + _diggerCiJob.Vcsspec = field.NewField(tableName, "vcsspec") + _diggerCiJob.Policyspec = field.NewField(tableName, "policyspec") + _diggerCiJob.Variablesspec = field.NewField(tableName, "variablesspec") + _diggerCiJob.CreatedAt = field.NewTime(tableName, "created_at") + _diggerCiJob.UpdatedAt = field.NewTime(tableName, "updated_at") + _diggerCiJob.DeletedAt = field.NewField(tableName, "deleted_at") + _diggerCiJob.WorkflowFile = field.NewString(tableName, "workflow_file") + _diggerCiJob.WorkflowURL = field.NewString(tableName, "workflow_url") + _diggerCiJob.Status = field.NewString(tableName, "status") + _diggerCiJob.ResourcesCreated = field.NewInt32(tableName, "resources_created") + _diggerCiJob.ResourcesUpdated = field.NewInt32(tableName, "resources_updated") + _diggerCiJob.ResourcesDeleted = field.NewInt32(tableName, "resources_deleted") + _diggerCiJob.ProjectID = field.NewString(tableName, "project_id") + + _diggerCiJob.fillFieldMap() + + return _diggerCiJob +} + +type diggerCiJob struct { + diggerCiJobDo + + ALL field.Asterisk + ID field.String + Spectype field.String + Commentid field.String + Runname field.String + Jobspec field.Field + Reporterspec field.Field + Commentupdaterspec field.Field + Lockspec field.Field + Backendspec field.Field + Vcsspec field.Field + Policyspec field.Field + Variablesspec field.Field + CreatedAt field.Time + UpdatedAt field.Time + DeletedAt field.Field + WorkflowFile field.String + WorkflowURL field.String + Status field.String + ResourcesCreated field.Int32 + ResourcesUpdated field.Int32 + ResourcesDeleted field.Int32 + ProjectID field.String + + fieldMap map[string]field.Expr +} + +func (d diggerCiJob) Table(newTableName string) *diggerCiJob { + d.diggerCiJobDo.UseTable(newTableName) + return d.updateTableName(newTableName) +} + +func (d diggerCiJob) As(alias string) *diggerCiJob { + d.diggerCiJobDo.DO = *(d.diggerCiJobDo.As(alias).(*gen.DO)) + return d.updateTableName(alias) +} + +func (d *diggerCiJob) updateTableName(table string) *diggerCiJob { + d.ALL = field.NewAsterisk(table) + d.ID = field.NewString(table, "id") + d.Spectype = field.NewString(table, "spectype") + d.Commentid = field.NewString(table, "commentid") + d.Runname = field.NewString(table, "runname") + d.Jobspec = field.NewField(table, "jobspec") + d.Reporterspec = field.NewField(table, "reporterspec") + d.Commentupdaterspec = field.NewField(table, "commentupdaterspec") + d.Lockspec = field.NewField(table, "lockspec") + d.Backendspec = field.NewField(table, "backendspec") + d.Vcsspec = field.NewField(table, "vcsspec") + d.Policyspec = field.NewField(table, "policyspec") + d.Variablesspec = field.NewField(table, "variablesspec") + d.CreatedAt = field.NewTime(table, "created_at") + d.UpdatedAt = field.NewTime(table, "updated_at") + d.DeletedAt = field.NewField(table, "deleted_at") + d.WorkflowFile = field.NewString(table, "workflow_file") + d.WorkflowURL = field.NewString(table, "workflow_url") + d.Status = field.NewString(table, "status") + d.ResourcesCreated = field.NewInt32(table, "resources_created") + d.ResourcesUpdated = field.NewInt32(table, "resources_updated") + d.ResourcesDeleted = field.NewInt32(table, "resources_deleted") + d.ProjectID = field.NewString(table, "project_id") + + d.fillFieldMap() + + return d +} + +func (d *diggerCiJob) GetFieldByName(fieldName string) (field.OrderExpr, bool) { + _f, ok := d.fieldMap[fieldName] + if !ok || _f == nil { + return nil, false + } + _oe, ok := _f.(field.OrderExpr) + return _oe, ok +} + +func (d *diggerCiJob) fillFieldMap() { + d.fieldMap = make(map[string]field.Expr, 22) + d.fieldMap["id"] = d.ID + d.fieldMap["spectype"] = d.Spectype + d.fieldMap["commentid"] = d.Commentid + d.fieldMap["runname"] = d.Runname + d.fieldMap["jobspec"] = d.Jobspec + d.fieldMap["reporterspec"] = d.Reporterspec + d.fieldMap["commentupdaterspec"] = d.Commentupdaterspec + d.fieldMap["lockspec"] = d.Lockspec + d.fieldMap["backendspec"] = d.Backendspec + d.fieldMap["vcsspec"] = d.Vcsspec + d.fieldMap["policyspec"] = d.Policyspec + d.fieldMap["variablesspec"] = d.Variablesspec + d.fieldMap["created_at"] = d.CreatedAt + d.fieldMap["updated_at"] = d.UpdatedAt + d.fieldMap["deleted_at"] = d.DeletedAt + d.fieldMap["workflow_file"] = d.WorkflowFile + d.fieldMap["workflow_url"] = d.WorkflowURL + d.fieldMap["status"] = d.Status + d.fieldMap["resources_created"] = d.ResourcesCreated + d.fieldMap["resources_updated"] = d.ResourcesUpdated + d.fieldMap["resources_deleted"] = d.ResourcesDeleted + d.fieldMap["project_id"] = d.ProjectID +} + +func (d diggerCiJob) clone(db *gorm.DB) diggerCiJob { + d.diggerCiJobDo.ReplaceConnPool(db.Statement.ConnPool) + return d +} + +func (d diggerCiJob) replaceDB(db *gorm.DB) diggerCiJob { + d.diggerCiJobDo.ReplaceDB(db) + return d +} + +type diggerCiJobDo struct{ gen.DO } + +type IDiggerCiJobDo interface { + gen.SubQuery + Debug() IDiggerCiJobDo + WithContext(ctx context.Context) IDiggerCiJobDo + WithResult(fc func(tx gen.Dao)) gen.ResultInfo + ReplaceDB(db *gorm.DB) + ReadDB() IDiggerCiJobDo + WriteDB() IDiggerCiJobDo + As(alias string) gen.Dao + Session(config *gorm.Session) IDiggerCiJobDo + Columns(cols ...field.Expr) gen.Columns + Clauses(conds ...clause.Expression) IDiggerCiJobDo + Not(conds ...gen.Condition) IDiggerCiJobDo + Or(conds ...gen.Condition) IDiggerCiJobDo + Select(conds ...field.Expr) IDiggerCiJobDo + Where(conds ...gen.Condition) IDiggerCiJobDo + Order(conds ...field.Expr) IDiggerCiJobDo + Distinct(cols ...field.Expr) IDiggerCiJobDo + Omit(cols ...field.Expr) IDiggerCiJobDo + Join(table schema.Tabler, on ...field.Expr) IDiggerCiJobDo + LeftJoin(table schema.Tabler, on ...field.Expr) IDiggerCiJobDo + RightJoin(table schema.Tabler, on ...field.Expr) IDiggerCiJobDo + Group(cols ...field.Expr) IDiggerCiJobDo + Having(conds ...gen.Condition) IDiggerCiJobDo + Limit(limit int) IDiggerCiJobDo + Offset(offset int) IDiggerCiJobDo + Count() (count int64, err error) + Scopes(funcs ...func(gen.Dao) gen.Dao) IDiggerCiJobDo + Unscoped() IDiggerCiJobDo + Create(values ...*model.DiggerCiJob) error + CreateInBatches(values []*model.DiggerCiJob, batchSize int) error + Save(values ...*model.DiggerCiJob) error + First() (*model.DiggerCiJob, error) + Take() (*model.DiggerCiJob, error) + Last() (*model.DiggerCiJob, error) + Find() ([]*model.DiggerCiJob, error) + FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.DiggerCiJob, err error) + FindInBatches(result *[]*model.DiggerCiJob, batchSize int, fc func(tx gen.Dao, batch int) error) error + Pluck(column field.Expr, dest interface{}) error + Delete(...*model.DiggerCiJob) (info gen.ResultInfo, err error) + Update(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + Updates(value interface{}) (info gen.ResultInfo, err error) + UpdateColumn(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateColumnSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + UpdateColumns(value interface{}) (info gen.ResultInfo, err error) + UpdateFrom(q gen.SubQuery) gen.Dao + Attrs(attrs ...field.AssignExpr) IDiggerCiJobDo + Assign(attrs ...field.AssignExpr) IDiggerCiJobDo + Joins(fields ...field.RelationField) IDiggerCiJobDo + Preload(fields ...field.RelationField) IDiggerCiJobDo + FirstOrInit() (*model.DiggerCiJob, error) + FirstOrCreate() (*model.DiggerCiJob, error) + FindByPage(offset int, limit int) (result []*model.DiggerCiJob, count int64, err error) + ScanByPage(result interface{}, offset int, limit int) (count int64, err error) + Scan(result interface{}) (err error) + Returning(value interface{}, columns ...string) IDiggerCiJobDo + UnderlyingDB() *gorm.DB + schema.Tabler +} + +func (d diggerCiJobDo) Debug() IDiggerCiJobDo { + return d.withDO(d.DO.Debug()) +} + +func (d diggerCiJobDo) WithContext(ctx context.Context) IDiggerCiJobDo { + return d.withDO(d.DO.WithContext(ctx)) +} + +func (d diggerCiJobDo) ReadDB() IDiggerCiJobDo { + return d.Clauses(dbresolver.Read) +} + +func (d diggerCiJobDo) WriteDB() IDiggerCiJobDo { + return d.Clauses(dbresolver.Write) +} + +func (d diggerCiJobDo) Session(config *gorm.Session) IDiggerCiJobDo { + return d.withDO(d.DO.Session(config)) +} + +func (d diggerCiJobDo) Clauses(conds ...clause.Expression) IDiggerCiJobDo { + return d.withDO(d.DO.Clauses(conds...)) +} + +func (d diggerCiJobDo) Returning(value interface{}, columns ...string) IDiggerCiJobDo { + return d.withDO(d.DO.Returning(value, columns...)) +} + +func (d diggerCiJobDo) Not(conds ...gen.Condition) IDiggerCiJobDo { + return d.withDO(d.DO.Not(conds...)) +} + +func (d diggerCiJobDo) Or(conds ...gen.Condition) IDiggerCiJobDo { + return d.withDO(d.DO.Or(conds...)) +} + +func (d diggerCiJobDo) Select(conds ...field.Expr) IDiggerCiJobDo { + return d.withDO(d.DO.Select(conds...)) +} + +func (d diggerCiJobDo) Where(conds ...gen.Condition) IDiggerCiJobDo { + return d.withDO(d.DO.Where(conds...)) +} + +func (d diggerCiJobDo) Order(conds ...field.Expr) IDiggerCiJobDo { + return d.withDO(d.DO.Order(conds...)) +} + +func (d diggerCiJobDo) Distinct(cols ...field.Expr) IDiggerCiJobDo { + return d.withDO(d.DO.Distinct(cols...)) +} + +func (d diggerCiJobDo) Omit(cols ...field.Expr) IDiggerCiJobDo { + return d.withDO(d.DO.Omit(cols...)) +} + +func (d diggerCiJobDo) Join(table schema.Tabler, on ...field.Expr) IDiggerCiJobDo { + return d.withDO(d.DO.Join(table, on...)) +} + +func (d diggerCiJobDo) LeftJoin(table schema.Tabler, on ...field.Expr) IDiggerCiJobDo { + return d.withDO(d.DO.LeftJoin(table, on...)) +} + +func (d diggerCiJobDo) RightJoin(table schema.Tabler, on ...field.Expr) IDiggerCiJobDo { + return d.withDO(d.DO.RightJoin(table, on...)) +} + +func (d diggerCiJobDo) Group(cols ...field.Expr) IDiggerCiJobDo { + return d.withDO(d.DO.Group(cols...)) +} + +func (d diggerCiJobDo) Having(conds ...gen.Condition) IDiggerCiJobDo { + return d.withDO(d.DO.Having(conds...)) +} + +func (d diggerCiJobDo) Limit(limit int) IDiggerCiJobDo { + return d.withDO(d.DO.Limit(limit)) +} + +func (d diggerCiJobDo) Offset(offset int) IDiggerCiJobDo { + return d.withDO(d.DO.Offset(offset)) +} + +func (d diggerCiJobDo) Scopes(funcs ...func(gen.Dao) gen.Dao) IDiggerCiJobDo { + return d.withDO(d.DO.Scopes(funcs...)) +} + +func (d diggerCiJobDo) Unscoped() IDiggerCiJobDo { + return d.withDO(d.DO.Unscoped()) +} + +func (d diggerCiJobDo) Create(values ...*model.DiggerCiJob) error { + if len(values) == 0 { + return nil + } + return d.DO.Create(values) +} + +func (d diggerCiJobDo) CreateInBatches(values []*model.DiggerCiJob, batchSize int) error { + return d.DO.CreateInBatches(values, batchSize) +} + +// Save : !!! underlying implementation is different with GORM +// The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values) +func (d diggerCiJobDo) Save(values ...*model.DiggerCiJob) error { + if len(values) == 0 { + return nil + } + return d.DO.Save(values) +} + +func (d diggerCiJobDo) First() (*model.DiggerCiJob, error) { + if result, err := d.DO.First(); err != nil { + return nil, err + } else { + return result.(*model.DiggerCiJob), nil + } +} + +func (d diggerCiJobDo) Take() (*model.DiggerCiJob, error) { + if result, err := d.DO.Take(); err != nil { + return nil, err + } else { + return result.(*model.DiggerCiJob), nil + } +} + +func (d diggerCiJobDo) Last() (*model.DiggerCiJob, error) { + if result, err := d.DO.Last(); err != nil { + return nil, err + } else { + return result.(*model.DiggerCiJob), nil + } +} + +func (d diggerCiJobDo) Find() ([]*model.DiggerCiJob, error) { + result, err := d.DO.Find() + return result.([]*model.DiggerCiJob), err +} + +func (d diggerCiJobDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.DiggerCiJob, err error) { + buf := make([]*model.DiggerCiJob, 0, batchSize) + err = d.DO.FindInBatches(&buf, batchSize, func(tx gen.Dao, batch int) error { + defer func() { results = append(results, buf...) }() + return fc(tx, batch) + }) + return results, err +} + +func (d diggerCiJobDo) FindInBatches(result *[]*model.DiggerCiJob, batchSize int, fc func(tx gen.Dao, batch int) error) error { + return d.DO.FindInBatches(result, batchSize, fc) +} + +func (d diggerCiJobDo) Attrs(attrs ...field.AssignExpr) IDiggerCiJobDo { + return d.withDO(d.DO.Attrs(attrs...)) +} + +func (d diggerCiJobDo) Assign(attrs ...field.AssignExpr) IDiggerCiJobDo { + return d.withDO(d.DO.Assign(attrs...)) +} + +func (d diggerCiJobDo) Joins(fields ...field.RelationField) IDiggerCiJobDo { + for _, _f := range fields { + d = *d.withDO(d.DO.Joins(_f)) + } + return &d +} + +func (d diggerCiJobDo) Preload(fields ...field.RelationField) IDiggerCiJobDo { + for _, _f := range fields { + d = *d.withDO(d.DO.Preload(_f)) + } + return &d +} + +func (d diggerCiJobDo) FirstOrInit() (*model.DiggerCiJob, error) { + if result, err := d.DO.FirstOrInit(); err != nil { + return nil, err + } else { + return result.(*model.DiggerCiJob), nil + } +} + +func (d diggerCiJobDo) FirstOrCreate() (*model.DiggerCiJob, error) { + if result, err := d.DO.FirstOrCreate(); err != nil { + return nil, err + } else { + return result.(*model.DiggerCiJob), nil + } +} + +func (d diggerCiJobDo) FindByPage(offset int, limit int) (result []*model.DiggerCiJob, count int64, err error) { + result, err = d.Offset(offset).Limit(limit).Find() + if err != nil { + return + } + + if size := len(result); 0 < limit && 0 < size && size < limit { + count = int64(size + offset) + return + } + + count, err = d.Offset(-1).Limit(-1).Count() + return +} + +func (d diggerCiJobDo) ScanByPage(result interface{}, offset int, limit int) (count int64, err error) { + count, err = d.Count() + if err != nil { + return + } + + err = d.Offset(offset).Limit(limit).Scan(result) + return +} + +func (d diggerCiJobDo) Scan(result interface{}) (err error) { + return d.DO.Scan(result) +} + +func (d diggerCiJobDo) Delete(models ...*model.DiggerCiJob) (result gen.ResultInfo, err error) { + return d.DO.Delete(models) +} + +func (d *diggerCiJobDo) withDO(do gen.Dao) *diggerCiJobDo { + d.DO = *do.(*gen.DO) + return d +} diff --git a/ee/drift/models_generated/gen.go b/ee/drift/models_generated/gen.go index 69e7b24a7..987715c34 100644 --- a/ee/drift/models_generated/gen.go +++ b/ee/drift/models_generated/gen.go @@ -17,45 +17,53 @@ import ( var ( Q = new(Query) + DiggerCiJob *diggerCiJob + DiggerCiJobToken *diggerCiJobToken GithubAppInstallationLink *githubAppInstallationLink + OrgSetting *orgSetting Organisation *organisation Project *project Repo *repo User *user - UserSetting *userSetting ) func SetDefault(db *gorm.DB, opts ...gen.DOOption) { *Q = *Use(db, opts...) + DiggerCiJob = &Q.DiggerCiJob + DiggerCiJobToken = &Q.DiggerCiJobToken GithubAppInstallationLink = &Q.GithubAppInstallationLink + OrgSetting = &Q.OrgSetting Organisation = &Q.Organisation Project = &Q.Project Repo = &Q.Repo User = &Q.User - UserSetting = &Q.UserSetting } func Use(db *gorm.DB, opts ...gen.DOOption) *Query { return &Query{ db: db, + DiggerCiJob: newDiggerCiJob(db, opts...), + DiggerCiJobToken: newDiggerCiJobToken(db, opts...), GithubAppInstallationLink: newGithubAppInstallationLink(db, opts...), + OrgSetting: newOrgSetting(db, opts...), Organisation: newOrganisation(db, opts...), Project: newProject(db, opts...), Repo: newRepo(db, opts...), User: newUser(db, opts...), - UserSetting: newUserSetting(db, opts...), } } type Query struct { db *gorm.DB + DiggerCiJob diggerCiJob + DiggerCiJobToken diggerCiJobToken GithubAppInstallationLink githubAppInstallationLink + OrgSetting orgSetting Organisation organisation Project project Repo repo User user - UserSetting userSetting } func (q *Query) Available() bool { return q.db != nil } @@ -63,12 +71,14 @@ func (q *Query) Available() bool { return q.db != nil } func (q *Query) clone(db *gorm.DB) *Query { return &Query{ db: db, + DiggerCiJob: q.DiggerCiJob.clone(db), + DiggerCiJobToken: q.DiggerCiJobToken.clone(db), GithubAppInstallationLink: q.GithubAppInstallationLink.clone(db), + OrgSetting: q.OrgSetting.clone(db), Organisation: q.Organisation.clone(db), Project: q.Project.clone(db), Repo: q.Repo.clone(db), User: q.User.clone(db), - UserSetting: q.UserSetting.clone(db), } } @@ -83,32 +93,38 @@ func (q *Query) WriteDB() *Query { func (q *Query) ReplaceDB(db *gorm.DB) *Query { return &Query{ db: db, + DiggerCiJob: q.DiggerCiJob.replaceDB(db), + DiggerCiJobToken: q.DiggerCiJobToken.replaceDB(db), GithubAppInstallationLink: q.GithubAppInstallationLink.replaceDB(db), + OrgSetting: q.OrgSetting.replaceDB(db), Organisation: q.Organisation.replaceDB(db), Project: q.Project.replaceDB(db), Repo: q.Repo.replaceDB(db), User: q.User.replaceDB(db), - UserSetting: q.UserSetting.replaceDB(db), } } type queryCtx struct { + DiggerCiJob IDiggerCiJobDo + DiggerCiJobToken IDiggerCiJobTokenDo GithubAppInstallationLink IGithubAppInstallationLinkDo + OrgSetting IOrgSettingDo Organisation IOrganisationDo Project IProjectDo Repo IRepoDo User IUserDo - UserSetting IUserSettingDo } func (q *Query) WithContext(ctx context.Context) *queryCtx { return &queryCtx{ + DiggerCiJob: q.DiggerCiJob.WithContext(ctx), + DiggerCiJobToken: q.DiggerCiJobToken.WithContext(ctx), GithubAppInstallationLink: q.GithubAppInstallationLink.WithContext(ctx), + OrgSetting: q.OrgSetting.WithContext(ctx), Organisation: q.Organisation.WithContext(ctx), Project: q.Project.WithContext(ctx), Repo: q.Repo.WithContext(ctx), User: q.User.WithContext(ctx), - UserSetting: q.UserSetting.WithContext(ctx), } } diff --git a/ee/drift/models_generated/org_settings.gen.go b/ee/drift/models_generated/org_settings.gen.go new file mode 100644 index 000000000..f61f2f82e --- /dev/null +++ b/ee/drift/models_generated/org_settings.gen.go @@ -0,0 +1,400 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package models_generated + +import ( + "context" + + "gorm.io/gorm" + "gorm.io/gorm/clause" + "gorm.io/gorm/schema" + + "gorm.io/gen" + "gorm.io/gen/field" + + "gorm.io/plugin/dbresolver" + + "github.com/diggerhq/digger/ee/drift/model" +) + +func newOrgSetting(db *gorm.DB, opts ...gen.DOOption) orgSetting { + _orgSetting := orgSetting{} + + _orgSetting.orgSettingDo.UseDB(db, opts...) + _orgSetting.orgSettingDo.UseModel(&model.OrgSetting{}) + + tableName := _orgSetting.orgSettingDo.TableName() + _orgSetting.ALL = field.NewAsterisk(tableName) + _orgSetting.ID = field.NewInt64(tableName, "id") + _orgSetting.CreatedAt = field.NewTime(tableName, "created_at") + _orgSetting.ScheduleType = field.NewString(tableName, "schedule_type") + _orgSetting.Schedule = field.NewString(tableName, "schedule") + _orgSetting.SlackNotificationURL = field.NewString(tableName, "slack_notification_url") + _orgSetting.OrgID = field.NewString(tableName, "org_id") + + _orgSetting.fillFieldMap() + + return _orgSetting +} + +type orgSetting struct { + orgSettingDo + + ALL field.Asterisk + ID field.Int64 + CreatedAt field.Time + ScheduleType field.String + Schedule field.String + SlackNotificationURL field.String + OrgID field.String + + fieldMap map[string]field.Expr +} + +func (o orgSetting) Table(newTableName string) *orgSetting { + o.orgSettingDo.UseTable(newTableName) + return o.updateTableName(newTableName) +} + +func (o orgSetting) As(alias string) *orgSetting { + o.orgSettingDo.DO = *(o.orgSettingDo.As(alias).(*gen.DO)) + return o.updateTableName(alias) +} + +func (o *orgSetting) updateTableName(table string) *orgSetting { + o.ALL = field.NewAsterisk(table) + o.ID = field.NewInt64(table, "id") + o.CreatedAt = field.NewTime(table, "created_at") + o.ScheduleType = field.NewString(table, "schedule_type") + o.Schedule = field.NewString(table, "schedule") + o.SlackNotificationURL = field.NewString(table, "slack_notification_url") + o.OrgID = field.NewString(table, "org_id") + + o.fillFieldMap() + + return o +} + +func (o *orgSetting) GetFieldByName(fieldName string) (field.OrderExpr, bool) { + _f, ok := o.fieldMap[fieldName] + if !ok || _f == nil { + return nil, false + } + _oe, ok := _f.(field.OrderExpr) + return _oe, ok +} + +func (o *orgSetting) fillFieldMap() { + o.fieldMap = make(map[string]field.Expr, 6) + o.fieldMap["id"] = o.ID + o.fieldMap["created_at"] = o.CreatedAt + o.fieldMap["schedule_type"] = o.ScheduleType + o.fieldMap["schedule"] = o.Schedule + o.fieldMap["slack_notification_url"] = o.SlackNotificationURL + o.fieldMap["org_id"] = o.OrgID +} + +func (o orgSetting) clone(db *gorm.DB) orgSetting { + o.orgSettingDo.ReplaceConnPool(db.Statement.ConnPool) + return o +} + +func (o orgSetting) replaceDB(db *gorm.DB) orgSetting { + o.orgSettingDo.ReplaceDB(db) + return o +} + +type orgSettingDo struct{ gen.DO } + +type IOrgSettingDo interface { + gen.SubQuery + Debug() IOrgSettingDo + WithContext(ctx context.Context) IOrgSettingDo + WithResult(fc func(tx gen.Dao)) gen.ResultInfo + ReplaceDB(db *gorm.DB) + ReadDB() IOrgSettingDo + WriteDB() IOrgSettingDo + As(alias string) gen.Dao + Session(config *gorm.Session) IOrgSettingDo + Columns(cols ...field.Expr) gen.Columns + Clauses(conds ...clause.Expression) IOrgSettingDo + Not(conds ...gen.Condition) IOrgSettingDo + Or(conds ...gen.Condition) IOrgSettingDo + Select(conds ...field.Expr) IOrgSettingDo + Where(conds ...gen.Condition) IOrgSettingDo + Order(conds ...field.Expr) IOrgSettingDo + Distinct(cols ...field.Expr) IOrgSettingDo + Omit(cols ...field.Expr) IOrgSettingDo + Join(table schema.Tabler, on ...field.Expr) IOrgSettingDo + LeftJoin(table schema.Tabler, on ...field.Expr) IOrgSettingDo + RightJoin(table schema.Tabler, on ...field.Expr) IOrgSettingDo + Group(cols ...field.Expr) IOrgSettingDo + Having(conds ...gen.Condition) IOrgSettingDo + Limit(limit int) IOrgSettingDo + Offset(offset int) IOrgSettingDo + Count() (count int64, err error) + Scopes(funcs ...func(gen.Dao) gen.Dao) IOrgSettingDo + Unscoped() IOrgSettingDo + Create(values ...*model.OrgSetting) error + CreateInBatches(values []*model.OrgSetting, batchSize int) error + Save(values ...*model.OrgSetting) error + First() (*model.OrgSetting, error) + Take() (*model.OrgSetting, error) + Last() (*model.OrgSetting, error) + Find() ([]*model.OrgSetting, error) + FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.OrgSetting, err error) + FindInBatches(result *[]*model.OrgSetting, batchSize int, fc func(tx gen.Dao, batch int) error) error + Pluck(column field.Expr, dest interface{}) error + Delete(...*model.OrgSetting) (info gen.ResultInfo, err error) + Update(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + Updates(value interface{}) (info gen.ResultInfo, err error) + UpdateColumn(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateColumnSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + UpdateColumns(value interface{}) (info gen.ResultInfo, err error) + UpdateFrom(q gen.SubQuery) gen.Dao + Attrs(attrs ...field.AssignExpr) IOrgSettingDo + Assign(attrs ...field.AssignExpr) IOrgSettingDo + Joins(fields ...field.RelationField) IOrgSettingDo + Preload(fields ...field.RelationField) IOrgSettingDo + FirstOrInit() (*model.OrgSetting, error) + FirstOrCreate() (*model.OrgSetting, error) + FindByPage(offset int, limit int) (result []*model.OrgSetting, count int64, err error) + ScanByPage(result interface{}, offset int, limit int) (count int64, err error) + Scan(result interface{}) (err error) + Returning(value interface{}, columns ...string) IOrgSettingDo + UnderlyingDB() *gorm.DB + schema.Tabler +} + +func (o orgSettingDo) Debug() IOrgSettingDo { + return o.withDO(o.DO.Debug()) +} + +func (o orgSettingDo) WithContext(ctx context.Context) IOrgSettingDo { + return o.withDO(o.DO.WithContext(ctx)) +} + +func (o orgSettingDo) ReadDB() IOrgSettingDo { + return o.Clauses(dbresolver.Read) +} + +func (o orgSettingDo) WriteDB() IOrgSettingDo { + return o.Clauses(dbresolver.Write) +} + +func (o orgSettingDo) Session(config *gorm.Session) IOrgSettingDo { + return o.withDO(o.DO.Session(config)) +} + +func (o orgSettingDo) Clauses(conds ...clause.Expression) IOrgSettingDo { + return o.withDO(o.DO.Clauses(conds...)) +} + +func (o orgSettingDo) Returning(value interface{}, columns ...string) IOrgSettingDo { + return o.withDO(o.DO.Returning(value, columns...)) +} + +func (o orgSettingDo) Not(conds ...gen.Condition) IOrgSettingDo { + return o.withDO(o.DO.Not(conds...)) +} + +func (o orgSettingDo) Or(conds ...gen.Condition) IOrgSettingDo { + return o.withDO(o.DO.Or(conds...)) +} + +func (o orgSettingDo) Select(conds ...field.Expr) IOrgSettingDo { + return o.withDO(o.DO.Select(conds...)) +} + +func (o orgSettingDo) Where(conds ...gen.Condition) IOrgSettingDo { + return o.withDO(o.DO.Where(conds...)) +} + +func (o orgSettingDo) Order(conds ...field.Expr) IOrgSettingDo { + return o.withDO(o.DO.Order(conds...)) +} + +func (o orgSettingDo) Distinct(cols ...field.Expr) IOrgSettingDo { + return o.withDO(o.DO.Distinct(cols...)) +} + +func (o orgSettingDo) Omit(cols ...field.Expr) IOrgSettingDo { + return o.withDO(o.DO.Omit(cols...)) +} + +func (o orgSettingDo) Join(table schema.Tabler, on ...field.Expr) IOrgSettingDo { + return o.withDO(o.DO.Join(table, on...)) +} + +func (o orgSettingDo) LeftJoin(table schema.Tabler, on ...field.Expr) IOrgSettingDo { + return o.withDO(o.DO.LeftJoin(table, on...)) +} + +func (o orgSettingDo) RightJoin(table schema.Tabler, on ...field.Expr) IOrgSettingDo { + return o.withDO(o.DO.RightJoin(table, on...)) +} + +func (o orgSettingDo) Group(cols ...field.Expr) IOrgSettingDo { + return o.withDO(o.DO.Group(cols...)) +} + +func (o orgSettingDo) Having(conds ...gen.Condition) IOrgSettingDo { + return o.withDO(o.DO.Having(conds...)) +} + +func (o orgSettingDo) Limit(limit int) IOrgSettingDo { + return o.withDO(o.DO.Limit(limit)) +} + +func (o orgSettingDo) Offset(offset int) IOrgSettingDo { + return o.withDO(o.DO.Offset(offset)) +} + +func (o orgSettingDo) Scopes(funcs ...func(gen.Dao) gen.Dao) IOrgSettingDo { + return o.withDO(o.DO.Scopes(funcs...)) +} + +func (o orgSettingDo) Unscoped() IOrgSettingDo { + return o.withDO(o.DO.Unscoped()) +} + +func (o orgSettingDo) Create(values ...*model.OrgSetting) error { + if len(values) == 0 { + return nil + } + return o.DO.Create(values) +} + +func (o orgSettingDo) CreateInBatches(values []*model.OrgSetting, batchSize int) error { + return o.DO.CreateInBatches(values, batchSize) +} + +// Save : !!! underlying implementation is different with GORM +// The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values) +func (o orgSettingDo) Save(values ...*model.OrgSetting) error { + if len(values) == 0 { + return nil + } + return o.DO.Save(values) +} + +func (o orgSettingDo) First() (*model.OrgSetting, error) { + if result, err := o.DO.First(); err != nil { + return nil, err + } else { + return result.(*model.OrgSetting), nil + } +} + +func (o orgSettingDo) Take() (*model.OrgSetting, error) { + if result, err := o.DO.Take(); err != nil { + return nil, err + } else { + return result.(*model.OrgSetting), nil + } +} + +func (o orgSettingDo) Last() (*model.OrgSetting, error) { + if result, err := o.DO.Last(); err != nil { + return nil, err + } else { + return result.(*model.OrgSetting), nil + } +} + +func (o orgSettingDo) Find() ([]*model.OrgSetting, error) { + result, err := o.DO.Find() + return result.([]*model.OrgSetting), err +} + +func (o orgSettingDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.OrgSetting, err error) { + buf := make([]*model.OrgSetting, 0, batchSize) + err = o.DO.FindInBatches(&buf, batchSize, func(tx gen.Dao, batch int) error { + defer func() { results = append(results, buf...) }() + return fc(tx, batch) + }) + return results, err +} + +func (o orgSettingDo) FindInBatches(result *[]*model.OrgSetting, batchSize int, fc func(tx gen.Dao, batch int) error) error { + return o.DO.FindInBatches(result, batchSize, fc) +} + +func (o orgSettingDo) Attrs(attrs ...field.AssignExpr) IOrgSettingDo { + return o.withDO(o.DO.Attrs(attrs...)) +} + +func (o orgSettingDo) Assign(attrs ...field.AssignExpr) IOrgSettingDo { + return o.withDO(o.DO.Assign(attrs...)) +} + +func (o orgSettingDo) Joins(fields ...field.RelationField) IOrgSettingDo { + for _, _f := range fields { + o = *o.withDO(o.DO.Joins(_f)) + } + return &o +} + +func (o orgSettingDo) Preload(fields ...field.RelationField) IOrgSettingDo { + for _, _f := range fields { + o = *o.withDO(o.DO.Preload(_f)) + } + return &o +} + +func (o orgSettingDo) FirstOrInit() (*model.OrgSetting, error) { + if result, err := o.DO.FirstOrInit(); err != nil { + return nil, err + } else { + return result.(*model.OrgSetting), nil + } +} + +func (o orgSettingDo) FirstOrCreate() (*model.OrgSetting, error) { + if result, err := o.DO.FirstOrCreate(); err != nil { + return nil, err + } else { + return result.(*model.OrgSetting), nil + } +} + +func (o orgSettingDo) FindByPage(offset int, limit int) (result []*model.OrgSetting, count int64, err error) { + result, err = o.Offset(offset).Limit(limit).Find() + if err != nil { + return + } + + if size := len(result); 0 < limit && 0 < size && size < limit { + count = int64(size + offset) + return + } + + count, err = o.Offset(-1).Limit(-1).Count() + return +} + +func (o orgSettingDo) ScanByPage(result interface{}, offset int, limit int) (count int64, err error) { + count, err = o.Count() + if err != nil { + return + } + + err = o.Offset(offset).Limit(limit).Scan(result) + return +} + +func (o orgSettingDo) Scan(result interface{}) (err error) { + return o.DO.Scan(result) +} + +func (o orgSettingDo) Delete(models ...*model.OrgSetting) (result gen.ResultInfo, err error) { + return o.DO.Delete(models) +} + +func (o *orgSettingDo) withDO(do gen.Dao) *orgSettingDo { + o.DO = *do.(*gen.DO) + return o +} diff --git a/ee/drift/models_generated/repos.gen.go b/ee/drift/models_generated/repos.gen.go index 447edfe58..76c9b9c46 100644 --- a/ee/drift/models_generated/repos.gen.go +++ b/ee/drift/models_generated/repos.gen.go @@ -42,6 +42,8 @@ func newRepo(db *gorm.DB, opts ...gen.DOOption) repo { _repo.GithubAppID = field.NewInt64(tableName, "github_app_id") _repo.AccountID = field.NewInt64(tableName, "account_id") _repo.Login = field.NewString(tableName, "login") + _repo.CloneURL = field.NewString(tableName, "clone_url") + _repo.DefaultBranch = field.NewString(tableName, "default_branch") _repo.fillFieldMap() @@ -67,6 +69,8 @@ type repo struct { GithubAppID field.Int64 AccountID field.Int64 Login field.String + CloneURL field.String + DefaultBranch field.String fieldMap map[string]field.Expr } @@ -98,6 +102,8 @@ func (r *repo) updateTableName(table string) *repo { r.GithubAppID = field.NewInt64(table, "github_app_id") r.AccountID = field.NewInt64(table, "account_id") r.Login = field.NewString(table, "login") + r.CloneURL = field.NewString(table, "clone_url") + r.DefaultBranch = field.NewString(table, "default_branch") r.fillFieldMap() @@ -114,7 +120,7 @@ func (r *repo) GetFieldByName(fieldName string) (field.OrderExpr, bool) { } func (r *repo) fillFieldMap() { - r.fieldMap = make(map[string]field.Expr, 15) + r.fieldMap = make(map[string]field.Expr, 17) r.fieldMap["id"] = r.ID r.fieldMap["created_at"] = r.CreatedAt r.fieldMap["updated_at"] = r.UpdatedAt @@ -130,6 +136,8 @@ func (r *repo) fillFieldMap() { r.fieldMap["github_app_id"] = r.GithubAppID r.fieldMap["account_id"] = r.AccountID r.fieldMap["login"] = r.Login + r.fieldMap["clone_url"] = r.CloneURL + r.fieldMap["default_branch"] = r.DefaultBranch } func (r repo) clone(db *gorm.DB) repo { diff --git a/ee/drift/services/spec.go b/ee/drift/services/spec.go new file mode 100644 index 000000000..f2170b704 --- /dev/null +++ b/ee/drift/services/spec.go @@ -0,0 +1,45 @@ +package services + +import ( + "fmt" + utils2 "github.com/diggerhq/digger/backend/utils" + "github.com/diggerhq/digger/ee/drift/utils" + "github.com/diggerhq/digger/libs/spec" + "os" +) + +func GetRunNameFromJob(spec spec.Spec) (*string, error) { + jobSpec := spec.Job + diggerCommand := fmt.Sprintf("digger %v", jobSpec.JobType) + jobIdShort := spec.JobId[:8] + projectName := jobSpec.ProjectName + //requestedBy := jobSpec.RequestedBy + //prNumber := *jobSpec.PullRequestNumber + + runName := fmt.Sprintf("[%v] %v %v (driftapp)", jobIdShort, diggerCommand, projectName) + return &runName, nil +} + +func GetVCSToken(vcsType string, repoFullName string, repoOwner string, repoName string, installationId int64, gh utils2.GithubClientProvider) (*string, error) { + var token string + switch vcsType { + case "github": + _, ghToken, err := utils.GetGithubService( + gh, + installationId, + repoFullName, + repoOwner, + repoName, + ) + if err != nil { + return nil, fmt.Errorf("TriggerWorkflow: could not retrieve token: %v", err) + } + token = *ghToken + case "gitlab": + token = os.Getenv("DIGGER_GITLAB_ACCESS_TOKEN") + default: + return nil, fmt.Errorf("unknown batch VCS: %v", vcsType) + } + + return &token, nil +} diff --git a/ee/drift/utils/github.go b/ee/drift/utils/github.go index 048d33114..475d58038 100644 --- a/ee/drift/utils/github.go +++ b/ee/drift/utils/github.go @@ -5,13 +5,17 @@ import ( "encoding/base64" "fmt" "github.com/bradleyfalzon/ghinstallation/v2" + utils2 "github.com/diggerhq/digger/backend/utils" "github.com/diggerhq/digger/ee/drift/dbmodels" github2 "github.com/diggerhq/digger/libs/ci/github" + dg_configuration "github.com/diggerhq/digger/libs/digger_config" "github.com/diggerhq/digger/next/utils" + "github.com/dominikbraun/graph" "github.com/google/go-github/v61/github" "log" net "net/http" "os" + "path" "strconv" ) @@ -85,3 +89,35 @@ func (gh DiggerGithubRealClientProvider) Get(githubAppId int64, installationId i } return ghClient, &token, nil } + +func GetDiggerConfigForBranch(gh utils.GithubClientProvider, installationId int64, repoFullName string, repoOwner string, repoName string, cloneUrl string, branch string) (string, *github2.GithubService, *dg_configuration.DiggerConfig, graph.Graph[string, dg_configuration.Project], error) { + ghService, token, err := GetGithubService(gh, installationId, repoFullName, repoOwner, repoName) + if err != nil { + log.Printf("Error getting github service: %v", err) + return "", nil, nil, nil, fmt.Errorf("error getting github service") + } + + var config *dg_configuration.DiggerConfig + var diggerYmlStr string + var dependencyGraph graph.Graph[string, dg_configuration.Project] + + var changedFiles []string = nil + + err = utils2.CloneGitRepoAndDoAction(cloneUrl, branch, *token, func(dir string) error { + diggerYmlBytes, err := os.ReadFile(path.Join(dir, "digger.yml")) + diggerYmlStr = string(diggerYmlBytes) + config, _, dependencyGraph, err = dg_configuration.LoadDiggerConfig(dir, true, changedFiles) + if err != nil { + log.Printf("Error loading digger config: %v", err) + return err + } + return nil + }) + if err != nil { + log.Printf("Error cloning and loading config: %v", err) + return "", nil, nil, nil, fmt.Errorf("error cloning and loading config") + } + + log.Printf("Digger config loadded successfully\n") + return diggerYmlStr, ghService, config, dependencyGraph, nil +} diff --git a/next/services/scheduler.go b/next/services/scheduler.go index 8d39d50ca..f060740e9 100644 --- a/next/services/scheduler.go +++ b/next/services/scheduler.go @@ -144,7 +144,6 @@ func CreateJobAndBatchForProjectFromBranch(gh utils.GithubClientProvider, projec log.Printf("installation id is: %v", installationId) var dgprojects = []dg_configuration.Project{dbmodels.ToDiggerProject(project)} - projectsGraph, err := dg_configuration.CreateProjectDependencyGraph(dgprojects) var config *dg_configuration.DiggerConfig = &dg_configuration.DiggerConfig{ ApplyAfterMerge: true,